commit_id
stringlengths
40
40
project
stringclasses
90 values
commit_message
stringlengths
5
2.21k
type
stringclasses
3 values
url
stringclasses
89 values
git_diff
stringlengths
283
4.32M
7d8ae9dd2bf8e70f0d4421ecadbfbcaf36474f86
Valadoc
libvaladoc: mark @deprecated as deprecated
a
https://github.com/GNOME/vala/
diff --git a/src/libvaladoc/gtkdocrenderer.vala b/src/libvaladoc/gtkdocrenderer.vala index eb4fe6840d..f13b09e9b0 100644 --- a/src/libvaladoc/gtkdocrenderer.vala +++ b/src/libvaladoc/gtkdocrenderer.vala @@ -99,7 +99,6 @@ public class Valadoc.GtkdocRenderer : ContentRenderer { append_exceptions (documentation.find_taglets (null, typeof(Taglets.Throws))); append_see (documentation.find_taglets (null, typeof(Taglets.See))); append_since (documentation.find_taglets (null, typeof(Taglets.Since))); - append_deprecated (documentation.find_taglets (null, typeof(Taglets.Deprecated))); } public override void render_children (ContentElement element) { @@ -405,30 +404,6 @@ public class Valadoc.GtkdocRenderer : ContentRenderer { } } - public void append_deprecated (Gee.List<Content.Taglet> taglets) { - foreach (Content.Taglet _taglet in taglets) { - Taglets.Deprecated taglet = _taglet as Taglets.Deprecated; - if (taglet == null) { - // ignore unexpected taglets - continue ; - } - - if (separated == false) { - writer.text ("\n"); - } - - writer.set_wrap (false); - writer.text ("\nDeprecated: "); - taglet.accept_children (this); - writer.text (": "); - separated = true; - writer.set_wrap (true); - - // ignore multiple occurrences - return ; - } - } - public void append_see (Gee.List<Content.Taglet> taglets) { bool first = true; foreach (Content.Taglet _taglet in taglets) { diff --git a/src/libvaladoc/taglets/tagletdeprecated.vala b/src/libvaladoc/taglets/tagletdeprecated.vala index 468f6ec55f..f685f3fe5e 100755 --- a/src/libvaladoc/taglets/tagletdeprecated.vala +++ b/src/libvaladoc/taglets/tagletdeprecated.vala @@ -31,6 +31,7 @@ public class Valadoc.Taglets.Deprecated : InlineContent, Taglet, Block { public override void check (Api.Tree api_root, Api.Node container, string file_path, ErrorReporter reporter, Settings settings) { base.check (api_root, container, file_path, reporter, settings); + reporter.simple_warning ("warning: @deprecated is deprecated. Use [Deprecated]"); } public override void accept (ContentVisitor visitor) {
06058e47d3b42f059340b6e3e4a1156c1fc76036
Mylyn Reviews
342870 TBR extension point für task changeset mapping Initial implementation of the extension point and generic implementation.
a
https://github.com/eclipse-mylyn/org.eclipse.mylyn.reviews
diff --git a/tbr/org.eclipse.mylyn.versions.tasks.core/src/org/eclipse/mylyn/versions/tasks/core/IChangeSetMapping.java b/tbr/org.eclipse.mylyn.versions.tasks.core/src/org/eclipse/mylyn/versions/tasks/core/IChangeSetMapping.java new file mode 100644 index 00000000..4acf89e2 --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.core/src/org/eclipse/mylyn/versions/tasks/core/IChangeSetMapping.java @@ -0,0 +1,16 @@ +package org.eclipse.mylyn.versions.tasks.core; + +import org.eclipse.mylyn.tasks.core.ITask; +import org.eclipse.mylyn.versions.core.ChangeSet; + +/** + * + * @author mattk + * @noextend + * @implement + */ +public interface IChangeSetMapping { + public ITask getTask(); + + public void addChangeSet(ChangeSet changeset); +} \ No newline at end of file diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.classpath b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.classpath new file mode 100644 index 00000000..64c5e31b --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.classpath @@ -0,0 +1,7 @@ +<?xml version="1.0" encoding="UTF-8"?> +<classpath> + <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5"/> + <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/> + <classpathentry kind="src" path="src"/> + <classpathentry kind="output" path="bin"/> +</classpath> diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.project b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.project new file mode 100644 index 00000000..e990b987 --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.project @@ -0,0 +1,28 @@ +<?xml version="1.0" encoding="UTF-8"?> +<projectDescription> + <name>org.eclipse.mylyn.versions.tasks.mapper.generic</name> + <comment></comment> + <projects> + </projects> + <buildSpec> + <buildCommand> + <name>org.eclipse.jdt.core.javabuilder</name> + <arguments> + </arguments> + </buildCommand> + <buildCommand> + <name>org.eclipse.pde.ManifestBuilder</name> + <arguments> + </arguments> + </buildCommand> + <buildCommand> + <name>org.eclipse.pde.SchemaBuilder</name> + <arguments> + </arguments> + </buildCommand> + </buildSpec> + <natures> + <nature>org.eclipse.pde.PluginNature</nature> + <nature>org.eclipse.jdt.core.javanature</nature> + </natures> +</projectDescription> diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.settings/org.eclipse.jdt.core.prefs b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 00000000..cc25307b --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,8 @@ +#Tue Feb 22 18:52:35 PST 2011 +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5 +org.eclipse.jdt.core.compiler.compliance=1.5 +org.eclipse.jdt.core.compiler.problem.assertIdentifier=error +org.eclipse.jdt.core.compiler.problem.enumIdentifier=error +org.eclipse.jdt.core.compiler.source=1.5 diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.settings/org.eclipse.mylyn.tasks.ui.prefs b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.settings/org.eclipse.mylyn.tasks.ui.prefs new file mode 100644 index 00000000..25ab2c05 --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.settings/org.eclipse.mylyn.tasks.ui.prefs @@ -0,0 +1,4 @@ +#Thu Mar 24 15:23:44 PDT 2011 +eclipse.preferences.version=1 +project.repository.kind=bugzilla +project.repository.url=https\://bugs.eclipse.org/bugs diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.settings/org.eclipse.pde.core.prefs b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.settings/org.eclipse.pde.core.prefs new file mode 100644 index 00000000..0cbec6da --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/.settings/org.eclipse.pde.core.prefs @@ -0,0 +1,3 @@ +#Wed Mar 23 17:09:26 PDT 2011 +eclipse.preferences.version=1 +resolve.requirebundle=false diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/META-INF/MANIFEST.MF b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/META-INF/MANIFEST.MF new file mode 100644 index 00000000..65a17072 --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/META-INF/MANIFEST.MF @@ -0,0 +1,15 @@ +Manifest-Version: 1.0 +Bundle-ManifestVersion: 2 +Bundle-Name: %Bundle-Name +Bundle-SymbolicName: org.eclipse.mylyn.versions.tasks.mapper.generic;singleton:=true +Bundle-Version: 0.1.0.qualifier +Bundle-RequiredExecutionEnvironment: J2SE-1.5 +Require-Bundle: org.eclipse.mylyn.tasks.core;bundle-version="3.5.0", + org.eclipse.mylyn.versions.core;bundle-version="0.1.0", + org.eclipse.mylyn.versions.tasks.ui, + org.eclipse.core.runtime;bundle-version="3.7.0", + org.eclipse.core.resources;bundle-version="3.7.100", + org.eclipse.mylyn.tasks.ui;bundle-version="3.5.0", + org.eclipse.mylyn.versions.tasks.core;bundle-version="0.1.0" +Export-Package: org.eclipse.mylyn.versions.tasks.mapper.generic;x-internal:=true +Bundle-Vendor: %Bundle-Vendor diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/about.html b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/about.html new file mode 100644 index 00000000..d774b07c --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/about.html @@ -0,0 +1,27 @@ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN"> +<html> +<head> +<title>About</title> +<meta http-equiv=Content-Type content="text/html; charset=ISO-8859-1"> +</head> +<body lang="EN-US"> +<h2>About This Content</h2> + +<p>June 25, 2008</p> +<h3>License</h3> + +<p>The Eclipse Foundation makes available all content in this plug-in (&quot;Content&quot;). Unless otherwise +indicated below, the Content is provided to you under the terms and conditions of the +Eclipse Public License Version 1.0 (&quot;EPL&quot;). A copy of the EPL is available +at <a href="http://www.eclipse.org/legal/epl-v10.html">http://www.eclipse.org/legal/epl-v10.html</a>. +For purposes of the EPL, &quot;Program&quot; will mean the Content.</p> + +<p>If you did not receive this Content directly from the Eclipse Foundation, the Content is +being redistributed by another party (&quot;Redistributor&quot;) and different terms and conditions may +apply to your use of any object code in the Content. Check the Redistributor's license that was +provided with the Content. If no such license exists, contact the Redistributor. Unless otherwise +indicated below, the terms and conditions of the EPL still apply to any source code in the Content +and such source code may be obtained at <a href="/">http://www.eclipse.org</a>.</p> + +</body> +</html> \ No newline at end of file diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/build.properties b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/build.properties new file mode 100644 index 00000000..f0d40121 --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/build.properties @@ -0,0 +1,6 @@ +source.. = src/ +output.. = bin/ +bin.includes = META-INF/,\ + .,\ + plugin.xml +additional.bundles = org.eclipse.ui diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/plugin.properties b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/plugin.properties new file mode 100644 index 00000000..eee282ab --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/plugin.properties @@ -0,0 +1,12 @@ +############################################################################### +# Copyright (c) 2011 Research Group for Industrial Software (INSO), Vienna University of Technology and others. +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Eclipse Public License v1.0 +# which accompanies this distribution, and is available at +# http://www.eclipse.org/legal/epl-v10.html +# +# Contributors: +# Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation +############################################################################### +Bundle-Vendor = Eclipse Mylyn +Bundle-Name = Mylyn Versions Framework Task Integration diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/plugin.xml b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/plugin.xml new file mode 100644 index 00000000..2fc4c213 --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/plugin.xml @@ -0,0 +1,13 @@ +<?xml version="1.0" encoding="UTF-8"?> +<?eclipse version="3.4"?> +<plugin> + <extension + id="org.eclipse.mylyn.versions.tasks.mapper.generic" + name="Generic Implementation" + point="org.eclipse.mylyn.versions.tasks.changesetmapping"> + <changesetMapper + class="org.eclipse.mylyn.versions.tasks.mapper.generic.GenericTaskChangesetMapper"> + </changesetMapper> + </extension> + +</plugin> diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/pom.xml b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/pom.xml new file mode 100644 index 00000000..8eec24c5 --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/pom.xml @@ -0,0 +1,29 @@ +<?xml version="1.0" encoding="UTF-8"?> +<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> + <modelVersion>4.0.0</modelVersion> + <parent> + <artifactId>mylyn-reviews-tasks-parent</artifactId> + <groupId>org.eclipse.mylyn.reviews</groupId> + <version>0.7.0-SNAPSHOT</version> + </parent> + <artifactId>org.eclipse.mylyn.versions.tasks.mapper.generic</artifactId> + <version>0.1.0-SNAPSHOT</version> + <packaging>eclipse-plugin</packaging> + <build> + <plugins> + <plugin> + <groupId>org.sonatype.tycho</groupId> + <artifactId>maven-osgi-source-plugin</artifactId> + </plugin> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>findbugs-maven-plugin</artifactId> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-pmd-plugin</artifactId> + </plugin> + </plugins> + </build> +</project> diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/EclipsePluginConfiguration.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/EclipsePluginConfiguration.java new file mode 100644 index 00000000..13c848d2 --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/EclipsePluginConfiguration.java @@ -0,0 +1,43 @@ +/******************************************************************************* + * Copyright (c) 2010 Research Group for Industrial Software (INSO), Vienna University of Technology. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + * + * Contributors: + * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation + *******************************************************************************/ +package org.eclipse.mylyn.versions.tasks.mapper.generic; + +import java.util.ArrayList; +import java.util.List; + +import org.eclipse.core.resources.IProject; +import org.eclipse.core.resources.ResourcesPlugin; +import org.eclipse.mylyn.internal.tasks.ui.TasksUiPlugin; +import org.eclipse.mylyn.tasks.core.TaskRepository; + +/** + * + * @author Kilian Matt + * + */ +public class EclipsePluginConfiguration implements IConfiguration { + + public List<IProject> getProjectsForTaskRepository(String connectorKind, + String repositoryUrl) { + List<IProject> projects = new ArrayList<IProject>(); + for (IProject project : ResourcesPlugin.getWorkspace().getRoot() + .getProjects()) { + TaskRepository repo = TasksUiPlugin.getDefault() + .getRepositoryForResource(project); + if (connectorKind.equals(repo.getConnectorKind()) + && repositoryUrl.equals(repo.getRepositoryUrl())) { + projects.add(project); + } + } + return projects; + } + +} diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapper.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapper.java new file mode 100644 index 00000000..b9900d05 --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapper.java @@ -0,0 +1,98 @@ +/******************************************************************************* + * Copyright (c) 2010 Research Group for Industrial Software (INSO), Vienna University of Technology. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + * + * Contributors: + * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation + *******************************************************************************/ +package org.eclipse.mylyn.versions.tasks.mapper.generic; + +import java.util.ArrayList; +import java.util.List; + +import org.eclipse.core.resources.IProject; +import org.eclipse.core.runtime.CoreException; +import org.eclipse.core.runtime.IProgressMonitor; +import org.eclipse.core.runtime.NullProgressMonitor; +import org.eclipse.mylyn.tasks.core.ITask; +import org.eclipse.mylyn.versions.core.ChangeSet; +import org.eclipse.mylyn.versions.core.ScmCore; +import org.eclipse.mylyn.versions.core.ScmRepository; +import org.eclipse.mylyn.versions.core.spi.ScmConnector; +import org.eclipse.mylyn.versions.tasks.core.IChangeSetMapping; +import org.eclipse.mylyn.versions.tasks.ui.AbstractChangesetMappingProvider; + +/** + * + * @author Kilian Matt + * + */ +public class GenericTaskChangesetMapper extends + AbstractChangesetMappingProvider { + + private IConfiguration configuration; + + public GenericTaskChangesetMapper() { + this.configuration = new EclipsePluginConfiguration(); + + } + + public GenericTaskChangesetMapper(IConfiguration configuration) { + this.configuration = configuration; + } + + public void getChangesetsForTask(IChangeSetMapping mapping, + IProgressMonitor monitor) throws CoreException { + ITask task = mapping.getTask(); + if (task == null) + throw new IllegalArgumentException("task must not be null"); + + List<ScmRepository> repos = getRepositoriesFor(task); + for (ScmRepository repo : repos) { + + List<ChangeSet> allChangeSets = repo.getConnector().getChangeSets( + repo, new NullProgressMonitor()); + for (ChangeSet cs : allChangeSets) { + if (changeSetMatches(cs, task)) { + mapping.addChangeSet(cs); + } + } + } + } + + private boolean changeSetMatches(ChangeSet cs, ITask task) { + // FIXME better detection + return cs.getMessage().contains(task.getTaskKey()) + || cs.getMessage().contains(task.getUrl()); + } + + private List<ScmRepository> getRepositoriesFor(ITask task) + throws CoreException { + + List<ScmRepository> repos = new ArrayList<ScmRepository>(); + + List<IProject> projects = configuration.getProjectsForTaskRepository( + task.getConnectorKind(), task.getRepositoryUrl()); + for (IProject p : projects) { + ScmRepository repository = getRepositoryForProject(p); + repos.add(repository); + } + return repos; + } + + private ScmRepository getRepositoryForProject(IProject p) + throws CoreException { + ScmConnector connector = ScmCore.getConnector(p); + ScmRepository repository = connector.getRepository(p, + new NullProgressMonitor()); + return repository; + } + + public int getScoreFor(ITask task) { + return 0; + } + +} diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/IConfiguration.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/IConfiguration.java new file mode 100644 index 00000000..eca49106 --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/IConfiguration.java @@ -0,0 +1,28 @@ +/******************************************************************************* + * Copyright (c) 2010 Research Group for Industrial Software (INSO), Vienna University of Technology. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + * + * Contributors: + * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation + *******************************************************************************/ + +package org.eclipse.mylyn.versions.tasks.mapper.generic; + +import java.util.List; + +import org.eclipse.core.resources.IProject; + +/** + * + * @author Kilian Matt + * + */ +public interface IConfiguration { + + List<IProject> getProjectsForTaskRepository(String connectorKind, + String repositoryUrl); + +} diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/TaskChangeSet.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/TaskChangeSet.java new file mode 100644 index 00000000..c3c69a1a --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/TaskChangeSet.java @@ -0,0 +1,37 @@ +/******************************************************************************* + * Copyright (c) 2010 Research Group for Industrial Software (INSO), Vienna University of Technology. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + * + * Contributors: + * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation + *******************************************************************************/ +package org.eclipse.mylyn.versions.tasks.mapper.generic; + +import org.eclipse.mylyn.tasks.core.ITask; +import org.eclipse.mylyn.versions.core.ChangeSet; + +/** + * + * @author Kilian Matt + * + */ +public class TaskChangeSet { + private ChangeSet changeset; + private ITask task; + + public TaskChangeSet(ITask task, ChangeSet cs) { + this.task = task; + this.changeset = cs; + } + + public ChangeSet getChangeset() { + return changeset; + } + + public ITask getTask() { + return task; + } +} diff --git a/tbr/org.eclipse.mylyn.versions.tasks.ui/META-INF/MANIFEST.MF b/tbr/org.eclipse.mylyn.versions.tasks.ui/META-INF/MANIFEST.MF index db84438f..8b6ba783 100644 --- a/tbr/org.eclipse.mylyn.versions.tasks.ui/META-INF/MANIFEST.MF +++ b/tbr/org.eclipse.mylyn.versions.tasks.ui/META-INF/MANIFEST.MF @@ -10,7 +10,8 @@ Require-Bundle: org.eclipse.ui, org.eclipse.ui.forms, org.eclipse.mylyn.tasks.core;bundle-version="3.5.0", org.eclipse.mylyn.versions.ui;bundle-version="0.1.0", - org.eclipse.mylyn.versions.tasks.core;bundle-version="0.0.1" + org.eclipse.mylyn.versions.tasks.core;bundle-version="0.0.1", + org.eclipse.mylyn.commons.core;bundle-version="3.5.0" Bundle-ActivationPolicy: lazy Bundle-RequiredExecutionEnvironment: J2SE-1.5 Export-Package: org.eclipse.mylyn.versions.tasks.ui;x-internal:=true diff --git a/tbr/org.eclipse.mylyn.versions.tasks.ui/plugin.xml b/tbr/org.eclipse.mylyn.versions.tasks.ui/plugin.xml index 2d1fa5ed..5115a687 100644 --- a/tbr/org.eclipse.mylyn.versions.tasks.ui/plugin.xml +++ b/tbr/org.eclipse.mylyn.versions.tasks.ui/plugin.xml @@ -1,6 +1,7 @@ <?xml version="1.0" encoding="UTF-8"?> <?eclipse version="3.4"?> <plugin> + <extension-point id="org.eclipse.mylyn.versions.tasks.changesetmapping" name="Task changeset mapping provider" schema="schema/org.eclipse.mylyn.versions.tasks.changesetmapping.exsd"/> <extension point="org.eclipse.mylyn.tasks.ui.editors"> <pageFactory diff --git a/tbr/org.eclipse.mylyn.versions.tasks.ui/schema/org.eclipse.mylyn.versions.tasks.changesetmapping.exsd b/tbr/org.eclipse.mylyn.versions.tasks.ui/schema/org.eclipse.mylyn.versions.tasks.changesetmapping.exsd new file mode 100644 index 00000000..8c83f362 --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.ui/schema/org.eclipse.mylyn.versions.tasks.changesetmapping.exsd @@ -0,0 +1,102 @@ +<?xml version='1.0' encoding='UTF-8'?> +<!-- Schema file written by PDE --> +<schema targetNamespace="org.eclipse.mylyn.versions.tasks.ui" xmlns="http://www.w3.org/2001/XMLSchema"> +<annotation> + <appinfo> + <meta.schema plugin="org.eclipse.mylyn.versions.tasks.ui" id="org.eclipse.mylyn.versions.tasks.changesetmapping" name="Task changeset mapping provider"/> + </appinfo> + <documentation> + [Enter description of this extension point.] + </documentation> + </annotation> + + <element name="extension"> + <annotation> + <appinfo> + <meta.element /> + </appinfo> + </annotation> + <complexType> + <sequence> + <element ref="changesetMapper"/> + </sequence> + <attribute name="point" type="string" use="required"> + <annotation> + <documentation> + + </documentation> + </annotation> + </attribute> + <attribute name="id" type="string"> + <annotation> + <documentation> + + </documentation> + </annotation> + </attribute> + <attribute name="name" type="string"> + <annotation> + <documentation> + + </documentation> + <appinfo> + <meta.attribute translatable="true"/> + </appinfo> + </annotation> + </attribute> + </complexType> + </element> + + <element name="changesetMapper"> + <complexType> + <attribute name="class" type="string" use="required"> + <annotation> + <documentation> + + </documentation> + <appinfo> + <meta.attribute kind="java" basedOn=":org.eclipse.mylyn.versions.tasks.ui.IChangesetMappingProvider"/> + </appinfo> + </annotation> + </attribute> + </complexType> + </element> + + <annotation> + <appinfo> + <meta.section type="since"/> + </appinfo> + <documentation> + [Enter the first release in which this extension point appears.] + </documentation> + </annotation> + + <annotation> + <appinfo> + <meta.section type="examples"/> + </appinfo> + <documentation> + [Enter extension point usage example here.] + </documentation> + </annotation> + + <annotation> + <appinfo> + <meta.section type="apiinfo"/> + </appinfo> + <documentation> + [Enter API information here.] + </documentation> + </annotation> + + <annotation> + <appinfo> + <meta.section type="implementation"/> + </appinfo> + <documentation> + [Enter information about supplied implementation of this extension point.] + </documentation> + </annotation> + + +</schema> diff --git a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/AbstractChangesetMappingProvider.java b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/AbstractChangesetMappingProvider.java new file mode 100644 index 00000000..6e971bd3 --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/AbstractChangesetMappingProvider.java @@ -0,0 +1,14 @@ +package org.eclipse.mylyn.versions.tasks.ui; + +import org.eclipse.core.runtime.CoreException; +import org.eclipse.core.runtime.IProgressMonitor; +import org.eclipse.mylyn.tasks.core.ITask; +import org.eclipse.mylyn.versions.tasks.core.IChangeSetMapping; + +public abstract class AbstractChangesetMappingProvider { + + public abstract void getChangesetsForTask(IChangeSetMapping mapping, IProgressMonitor monitor) throws CoreException ; + + public abstract int getScoreFor(ITask task); +} + diff --git a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/ChangesetPart.java b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/ChangesetPart.java index cd5c8fb1..3272ccca 100644 --- a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/ChangesetPart.java +++ b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/ChangesetPart.java @@ -15,6 +15,7 @@ import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.NullProgressMonitor; +import org.eclipse.jface.action.ContributionManager; import org.eclipse.jface.action.MenuManager; import org.eclipse.jface.viewers.ArrayContentProvider; import org.eclipse.jface.viewers.ILabelProviderListener; @@ -24,9 +25,7 @@ import org.eclipse.mylyn.tasks.core.ITask; import org.eclipse.mylyn.tasks.ui.editors.AbstractTaskEditorPart; import org.eclipse.mylyn.versions.core.ChangeSet; -import org.eclipse.mylyn.versions.core.ScmCore; -import org.eclipse.mylyn.versions.core.ScmRepository; -import org.eclipse.mylyn.versions.core.spi.ScmConnector; +import org.eclipse.mylyn.versions.tasks.core.IChangeSetMapping; import org.eclipse.mylyn.versions.tasks.core.TaskChangeSet; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Image; @@ -42,6 +41,7 @@ * @author Kilian Matt * */ +@SuppressWarnings("restriction") public class ChangesetPart extends AbstractTaskEditorPart { public ChangesetPart() { setPartName("Changeset"); @@ -82,29 +82,23 @@ public void createControl(Composite parent, FormToolkit toolkit) { table.setContentProvider(ArrayContentProvider.getInstance()); table.setLabelProvider(new ITableLabelProvider() { - public void addListener(ILabelProviderListener listener) { } - public void dispose() { } - public boolean isLabelProperty(Object element, String property) { return false; } - public void removeListener(ILabelProviderListener listener) { } - public Image getColumnImage(Object element, int columnIndex) { return null; } - public String getColumnText(Object element, int columnIndex) { TaskChangeSet cs = ((TaskChangeSet) element); switch (columnIndex) { @@ -133,37 +127,39 @@ public String getColumnText(Object element, int columnIndex) { } private List<TaskChangeSet> getInput() { - List<ScmConnector> connectors = ScmCore.getAllRegisteredConnectors(); - for (ScmConnector c : connectors) { - try { - List<ScmRepository> repositories = c - .getRepositories(new NullProgressMonitor()); - for (ScmRepository r : repositories) { - ITask task = getModel().getTask(); - List<TaskChangeSet> changes = new ArrayList<TaskChangeSet>(); - List<ChangeSet> changeSets = c.getChangeSets(r, - new NullProgressMonitor()); - if (changeSets == null) - continue; - for (ChangeSet cs : changeSets) { - if (changeSetMatches(cs)) - changes.add(new TaskChangeSet(task, cs)); - } - if (!changes.isEmpty()) - return changes; - } - } catch (CoreException e) { - // TODO Auto-generated catch block - e.printStackTrace(); + int score = Integer.MIN_VALUE; + AbstractChangesetMappingProvider bestProvider = null; + final ITask task = getModel().getTask(); + + for (AbstractChangesetMappingProvider mappingProvider : TaskChangesetUtil + .getMappingProviders()) { + if (score < mappingProvider.getScoreFor(task)) + ; + { + bestProvider = mappingProvider; } - } - return null; - } + final List<TaskChangeSet> changesets = new ArrayList<TaskChangeSet>(); + try { + + IChangeSetMapping changesetsMapping = new IChangeSetMapping() { - private boolean changeSetMatches(ChangeSet cs) { - return cs.getMessage().contains(getModel().getTask().getTaskKey()) - || cs.getMessage().contains(getModel().getTask().getUrl()); + public ITask getTask() { + return task; + } + + public void addChangeSet(ChangeSet changeset) { + changesets.add(new TaskChangeSet(task, changeset)); + } + }; + // FIXME progress monitor + bestProvider.getChangesetsForTask(changesetsMapping, + new NullProgressMonitor()); + } catch (CoreException e) { + // FIXME Auto-generated catch block + e.printStackTrace(); + } + return changesets; } } diff --git a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/IChangeSets.java b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/IChangeSets.java new file mode 100644 index 00000000..5423d5bd --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/IChangeSets.java @@ -0,0 +1,4 @@ +package org.eclipse.mylyn.versions.tasks.ui; +public class IChangeSets { + +} diff --git a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/TaskChangesetUtil.java b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/TaskChangesetUtil.java new file mode 100644 index 00000000..4ff3bc6c --- /dev/null +++ b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/versions/tasks/ui/TaskChangesetUtil.java @@ -0,0 +1,76 @@ +package org.eclipse.mylyn.versions.tasks.ui; + +import java.util.ArrayList; +import java.util.List; + +import org.eclipse.core.runtime.IConfigurationElement; +import org.eclipse.core.runtime.IExtension; +import org.eclipse.core.runtime.IExtensionPoint; +import org.eclipse.core.runtime.IExtensionRegistry; +import org.eclipse.core.runtime.IStatus; +import org.eclipse.core.runtime.MultiStatus; +import org.eclipse.core.runtime.Platform; +import org.eclipse.core.runtime.Status; +import org.eclipse.mylyn.commons.core.StatusHandler; +import org.eclipse.osgi.util.NLS; + +public class TaskChangesetUtil { + private static final String PLUGIN_ID = "org.eclipse.mylyn.versions.tasks.ui"; + + private static List<AbstractChangesetMappingProvider> providers; + + public static List<AbstractChangesetMappingProvider> getMappingProviders() { + if (providers != null) + return providers; + + return providers = loadMappingProviders(); + } + + private synchronized static List<AbstractChangesetMappingProvider> loadMappingProviders() { + List<AbstractChangesetMappingProvider> providers = new ArrayList<AbstractChangesetMappingProvider>(); + + MultiStatus result = new MultiStatus(PLUGIN_ID, 0, + "Task Changeset Mapping Provider failed to load", null); //$NON-NLS-1$ + + IExtensionRegistry registry = Platform.getExtensionRegistry(); + IExtensionPoint connectorsExtensionPoint = registry + .getExtensionPoint("org.eclipse.mylyn.versions.tasks.changesetmapping"); //$NON-NLS-1$ + IExtension[] extensions = connectorsExtensionPoint.getExtensions(); + for (IExtension extension : extensions) { + IConfigurationElement[] elements = extension + .getConfigurationElements(); + for (IConfigurationElement element : elements) { + try { + Object object = element.createExecutableExtension("class"); //$NON-NLS-1$ + if (object instanceof AbstractChangesetMappingProvider) { + providers + .add((AbstractChangesetMappingProvider) object); + } else { + result.add(new Status( + IStatus.ERROR, + PLUGIN_ID, + // FIXME error message + NLS.bind( + "Extension ''{0}'' does not extend expected class for extension contributed by {1}", //$NON-NLS-1$ + object.getClass().getCanonicalName(), + element.getContributor().getName()))); + } + } catch (Throwable e) { + result.add(new Status( + IStatus.ERROR, + PLUGIN_ID, + // FIXME error message + NLS.bind( + "Connector core failed to load for extension contributed by {0}", element.getContributor().getName()), e)); //$NON-NLS-1$ + } + } + } + + if (!result.isOK()) { + StatusHandler.log(result); + } + + return providers; + } + +}
24d4d933f72c4c3c3f2f34b1de73575c65913bd2
hadoop
YARN-3100. Made YARN authorization pluggable.- Contributed by Jian He.--(cherry picked from commit 23bf6c72071782e3fd5a628e21495d6b974c7a9e)-
a
https://github.com/apache/hadoop
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 773184034f2a1..a5dfe24ff26bf 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -224,6 +224,8 @@ Release 2.7.0 - UNRELEASED YARN-3155. Refactor the exception handling code for TimelineClientImpl's retryOn method (Li Lu via wangda) + YARN-3100. Made YARN authorization pluggable. (Jian He via zjshen) + OPTIMIZATIONS YARN-2990. FairScheduler's delay-scheduling always waits for node-local and diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index a2a252983544f..d50a7009f1e90 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -435,6 +435,8 @@ private static void addDeprecatedKeys() { public static final String DEFAULT_RM_CONFIGURATION_PROVIDER_CLASS = "org.apache.hadoop.yarn.LocalConfigurationProvider"; + public static final String YARN_AUTHORIZATION_PROVIDER = YARN_PREFIX + + "authorization-provider"; private static final List<String> RM_SERVICES_ADDRESS_CONF_KEYS_HTTP = Collections.unmodifiableList(Arrays.asList( RM_ADDRESS, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AccessType.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AccessType.java new file mode 100644 index 0000000000000..32459b9688bd5 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AccessType.java @@ -0,0 +1,33 @@ +/** +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.hadoop.yarn.security; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; + +/** + * Access types for a queue or an application. + */ +@Private +@Unstable +public enum AccessType { + // queue + SUBMIT_APP, + ADMINISTER_QUEUE, +} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java index 70c1a6e10c5e6..a386123e6a272 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java @@ -98,15 +98,6 @@ public boolean areACLsEnabled() { return aclsEnabled; } - /** - * Returns the internal structure used to maintain administrator ACLs - * - * @return Structure used to maintain administrator access - */ - public AccessControlList getAdminAcl() { - return adminAcl; - } - /** * Returns whether the specified user/group is an administrator * @@ -117,26 +108,4 @@ public AccessControlList getAdminAcl() { public boolean isAdmin(UserGroupInformation callerUGI) { return adminAcl.isUserAllowed(callerUGI); } - - /** - * Returns whether the specified user/group has administrator access - * - * @param callerUGI user/group to to check - * @return <tt>true</tt> if the UserGroupInformation specified - * is a member of the access control list for administrators - * and ACLs are enabled for this cluster - * - * @see #getAdminAcl - * @see #areACLsEnabled - */ - public boolean checkAccess(UserGroupInformation callerUGI) { - - // Any user may perform this operation if authorization is not enabled - if (!areACLsEnabled()) { - return true; - } - - // Administrators may perform any operation - return isAdmin(callerUGI); - } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ConfiguredYarnAuthorizer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ConfiguredYarnAuthorizer.java new file mode 100644 index 0000000000000..90ba77a2bbdbe --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ConfiguredYarnAuthorizer.java @@ -0,0 +1,97 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.security; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.security.PrivilegedEntity.EntityType; + +/** + * A YarnAuthorizationProvider implementation based on configuration files. + * + */ +@Private +@Unstable +public class ConfiguredYarnAuthorizer extends YarnAuthorizationProvider { + + private final ConcurrentMap<PrivilegedEntity, Map<AccessType, AccessControlList>> allAcls = + new ConcurrentHashMap<>(); + private volatile AccessControlList adminAcl = null; + + + @Override + public void init(Configuration conf) { + adminAcl = + new AccessControlList(conf.get(YarnConfiguration.YARN_ADMIN_ACL, + YarnConfiguration.DEFAULT_YARN_ADMIN_ACL)); + } + + @Override + public void setPermission(PrivilegedEntity target, + Map<AccessType, AccessControlList> acls, UserGroupInformation ugi) { + allAcls.put(target, acls); + } + + @Override + public boolean checkPermission(AccessType accessType, + PrivilegedEntity target, UserGroupInformation user) { + boolean ret = false; + Map<AccessType, AccessControlList> acls = allAcls.get(target); + if (acls != null) { + AccessControlList list = acls.get(accessType); + if (list != null) { + ret = list.isUserAllowed(user); + } + } + + // recursively look up the queue to see if parent queue has the permission. + if (target.getType() == EntityType.QUEUE && !ret) { + String queueName = target.getName(); + if (!queueName.contains(".")) { + return ret; + } + String parentQueueName = queueName.substring(0, queueName.lastIndexOf(".")); + return checkPermission(accessType, new PrivilegedEntity(target.getType(), + parentQueueName), user); + } + return ret; + } + + @Override + public void setAdmins(AccessControlList acls, UserGroupInformation ugi) { + adminAcl = acls; + } + + @Override + public boolean isAdmin(UserGroupInformation ugi) { + return adminAcl.isUserAllowed(ugi); + } + + public AccessControlList getAdminAcls() { + return this.adminAcl; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/PrivilegedEntity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/PrivilegedEntity.java new file mode 100644 index 0000000000000..580bdf490a3f2 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/PrivilegedEntity.java @@ -0,0 +1,83 @@ +/** +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.hadoop.yarn.security; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.yarn.api.records.ApplicationAccessType; +import org.apache.hadoop.yarn.api.records.QueueACL; + +/** + * An entity in YARN that can be guarded with ACLs. The entity could be an + * application or a queue etc. An application entity has access types defined in + * {@link ApplicationAccessType}, a queue entity has access types defined in + * {@link QueueACL}. + */ +@Private +@Unstable +public class PrivilegedEntity { + + public enum EntityType { + QUEUE + } + + EntityType type; + String name; + + public PrivilegedEntity(EntityType type, String name) { + this.type = type; + this.name = name; + } + + public EntityType getType() { + return type; + } + + public String getName() { + return name; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((name == null) ? 0 : name.hashCode()); + result = prime * result + ((type == null) ? 0 : type.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + PrivilegedEntity other = (PrivilegedEntity) obj; + if (name == null) { + if (other.name != null) + return false; + } else if (!name.equals(other.name)) + return false; + if (type != other.type) + return false; + return true; + } +} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/YarnAuthorizationProvider.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/YarnAuthorizationProvider.java new file mode 100644 index 0000000000000..7b2c35cafdb1d --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/YarnAuthorizationProvider.java @@ -0,0 +1,112 @@ +/** +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.hadoop.yarn.security; + +import java.util.Map; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.yarn.conf.YarnConfiguration; + +/** + * An implementation of the interface will provide authorization related + * information and enforce permission check. It is excepted that any of the + * methods defined in this interface should be non-blocking call and should not + * involve expensive computation as these method could be invoked in RPC. + */ +@Private +@Unstable +public abstract class YarnAuthorizationProvider { + + private static final Log LOG = LogFactory.getLog(YarnAuthorizationProvider.class); + + private static YarnAuthorizationProvider authorizer = null; + + public static YarnAuthorizationProvider getInstance(Configuration conf) { + synchronized (YarnAuthorizationProvider.class) { + if (authorizer == null) { + Class<?> authorizerClass = + conf.getClass(YarnConfiguration.YARN_AUTHORIZATION_PROVIDER, + ConfiguredYarnAuthorizer.class); + authorizer = + (YarnAuthorizationProvider) ReflectionUtils.newInstance( + authorizerClass, conf); + authorizer.init(conf); + LOG.info(authorizerClass.getName() + " is instiantiated."); + } + } + return authorizer; + } + + /** + * Initialize the provider. Invoked on daemon startup. DefaultYarnAuthorizer is + * initialized based on configurations. + */ + public abstract void init(Configuration conf); + + /** + * Check if user has the permission to access the target object. + * + * @param accessType + * The type of accessing method. + * @param target + * The target object being accessed, e.g. app/queue + * @param user + * User who access the target + * @return true if user can access the object, otherwise false. + */ + public abstract boolean checkPermission(AccessType accessType, + PrivilegedEntity target, UserGroupInformation user); + + /** + * Set ACLs for the target object. AccessControlList class encapsulate the + * users and groups who can access the target. + * + * @param target + * The target object. + * @param acls + * A map from access method to a list of users and/or groups who has + * permission to do the access. + * @param ugi User who sets the permissions. + */ + public abstract void setPermission(PrivilegedEntity target, + Map<AccessType, AccessControlList> acls, UserGroupInformation ugi); + + /** + * Set a list of users/groups who have admin access + * + * @param acls users/groups who have admin access + * @param ugi User who sets the admin acls. + */ + public abstract void setAdmins(AccessControlList acls, UserGroupInformation ugi); + + /** + * Check if the user is an admin. + * + * @param ugi the user to be determined if it is an admin + * @return true if the given user is an admin + */ + public abstract boolean isAdmin(UserGroupInformation ugi); +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java index f8c6f55c243d4..1b5840f5db42b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java @@ -36,11 +36,10 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.HttpConfig.Policy; -import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.security.AdminACLsManager; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -240,7 +239,9 @@ public void setup() { .addEndpoint( URI.create(httpScheme + bindAddress + ":" + port)).setConf(conf).setFindPort(findPort) - .setACL(new AdminACLsManager(conf).getAdminAcl()) + .setACL(new AccessControlList(conf.get( + YarnConfiguration.YARN_ADMIN_ACL, + YarnConfiguration.DEFAULT_YARN_ADMIN_ACL))) .setPathSpec(pathList.toArray(new String[0])); boolean hasSpnegoConf = spnegoPrincipalKey != null diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java index d79de58b535be..618099546da6e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java @@ -56,6 +56,8 @@ import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.ipc.YarnRPC; +import org.apache.hadoop.yarn.security.ConfiguredYarnAuthorizer; +import org.apache.hadoop.yarn.security.YarnAuthorizationProvider; import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol; import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsResponse; @@ -101,7 +103,8 @@ public class AdminService extends CompositeService implements // Address to use for binding. May be a wildcard address. private InetSocketAddress masterServiceBindAddress; - private AccessControlList adminAcl; + + private YarnAuthorizationProvider authorizer; private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); @@ -129,10 +132,11 @@ public void serviceInit(Configuration conf) throws Exception { YarnConfiguration.RM_ADMIN_ADDRESS, YarnConfiguration.DEFAULT_RM_ADMIN_ADDRESS, YarnConfiguration.DEFAULT_RM_ADMIN_PORT); - - adminAcl = new AccessControlList(conf.get( - YarnConfiguration.YARN_ADMIN_ACL, - YarnConfiguration.DEFAULT_YARN_ADMIN_ACL)); + authorizer = YarnAuthorizationProvider.getInstance(conf); + authorizer.setAdmins(new AccessControlList(conf.get( + YarnConfiguration.YARN_ADMIN_ACL, + YarnConfiguration.DEFAULT_YARN_ADMIN_ACL)), UserGroupInformation + .getCurrentUser()); rmId = conf.get(YarnConfiguration.RM_HA_ID); super.serviceInit(conf); } @@ -206,7 +210,7 @@ void resetLeaderElection() { } private UserGroupInformation checkAccess(String method) throws IOException { - return RMServerUtils.verifyAccess(adminAcl, method, LOG); + return RMServerUtils.verifyAdminAccess(authorizer, method, LOG); } private UserGroupInformation checkAcls(String method) throws YarnException { @@ -293,7 +297,7 @@ public synchronized void transitionToActive( "transitionToActive", "RMHAProtocolService"); } catch (Exception e) { RMAuditLogger.logFailure(user.getShortUserName(), "transitionToActive", - adminAcl.toString(), "RMHAProtocolService", + "", "RMHAProtocolService", "Exception transitioning to active"); throw new ServiceFailedException( "Error when transitioning to Active mode", e); @@ -318,7 +322,7 @@ public synchronized void transitionToStandby( "transitionToStandby", "RMHAProtocolService"); } catch (Exception e) { RMAuditLogger.logFailure(user.getShortUserName(), "transitionToStandby", - adminAcl.toString(), "RMHAProtocolService", + "", "RMHAProtocolService", "Exception transitioning to standby"); throw new ServiceFailedException( "Error when transitioning to Standby mode", e); @@ -446,9 +450,10 @@ private RefreshAdminAclsResponse refreshAdminAcls(boolean checkRMHAState) Configuration conf = getConfiguration(new Configuration(false), YarnConfiguration.YARN_SITE_CONFIGURATION_FILE); - adminAcl = new AccessControlList(conf.get( - YarnConfiguration.YARN_ADMIN_ACL, - YarnConfiguration.DEFAULT_YARN_ADMIN_ACL)); + authorizer.setAdmins(new AccessControlList(conf.get( + YarnConfiguration.YARN_ADMIN_ACL, + YarnConfiguration.DEFAULT_YARN_ADMIN_ACL)), UserGroupInformation + .getCurrentUser()); RMAuditLogger.logSuccess(user.getShortUserName(), argName, "AdminService"); @@ -584,9 +589,10 @@ private void refreshAll() throws ServiceFailedException { } } + // only for testing @VisibleForTesting public AccessControlList getAccessControlList() { - return this.adminAcl; + return ((ConfiguredYarnAuthorizer)authorizer).getAdminAcls(); } @VisibleForTesting @@ -661,7 +667,7 @@ public ReplaceLabelsOnNodeResponse replaceLabelsOnNode( private void checkRMStatus(String user, String argName, String msg) throws StandbyException { if (!isRMActive()) { - RMAuditLogger.logFailure(user, argName, adminAcl.toString(), + RMAuditLogger.logFailure(user, argName, "", "AdminService", "ResourceManager is not active. Can not " + msg); throwStandbyException(); } @@ -670,7 +676,7 @@ private void checkRMStatus(String user, String argName, String msg) private YarnException logAndWrapException(IOException ioe, String user, String argName, String msg) throws YarnException { LOG.info("Exception " + msg, ioe); - RMAuditLogger.logFailure(user, argName, adminAcl.toString(), + RMAuditLogger.logFailure(user, argName, "", "AdminService", "Exception " + msg); return RPCUtil.getRemoteException(ioe); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMServerUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMServerUtils.java index c80778cf0a2c7..3d28bb72547f6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMServerUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMServerUtils.java @@ -45,6 +45,7 @@ import org.apache.hadoop.yarn.exceptions.InvalidResourceBlacklistRequestException; import org.apache.hadoop.yarn.exceptions.InvalidResourceRequestException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; +import org.apache.hadoop.yarn.security.YarnAuthorizationProvider; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; @@ -140,43 +141,43 @@ public static void validateBlacklistRequest( } } - public static UserGroupInformation verifyAccess( - AccessControlList acl, String method, final Log LOG) + public static UserGroupInformation verifyAdminAccess( + YarnAuthorizationProvider authorizer, String method, final Log LOG) throws IOException { // by default, this method will use AdminService as module name - return verifyAccess(acl, method, "AdminService", LOG); + return verifyAdminAccess(authorizer, method, "AdminService", LOG); } /** * Utility method to verify if the current user has access based on the * passed {@link AccessControlList} - * @param acl the {@link AccessControlList} to check against + * @param authorizer the {@link AccessControlList} to check against * @param method the method name to be logged - * @param module, like AdminService or NodeLabelManager + * @param module like AdminService or NodeLabelManager * @param LOG the logger to use * @return {@link UserGroupInformation} of the current user * @throws IOException */ - public static UserGroupInformation verifyAccess( - AccessControlList acl, String method, String module, final Log LOG) + public static UserGroupInformation verifyAdminAccess( + YarnAuthorizationProvider authorizer, String method, String module, + final Log LOG) throws IOException { UserGroupInformation user; try { user = UserGroupInformation.getCurrentUser(); } catch (IOException ioe) { LOG.warn("Couldn't get current user", ioe); - RMAuditLogger.logFailure("UNKNOWN", method, acl.toString(), + RMAuditLogger.logFailure("UNKNOWN", method, "", "AdminService", "Couldn't get current user"); throw ioe; } - if (!acl.isUserAllowed(user)) { + if (!authorizer.isAdmin(user)) { LOG.warn("User " + user.getShortUserName() + " doesn't have permission" + " to call '" + method + "'"); - RMAuditLogger.logFailure(user.getShortUserName(), method, - acl.toString(), module, - RMAuditLogger.AuditConstants.UNAUTHORIZED_USER); + RMAuditLogger.logFailure(user.getShortUserName(), method, "", module, + RMAuditLogger.AuditConstants.UNAUTHORIZED_USER); throw new AccessControlException("User " + user.getShortUserName() + " doesn't have permission" + diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/RMNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/RMNodeLabelsManager.java index 1555291cf1909..9942d80406f4e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/RMNodeLabelsManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/RMNodeLabelsManager.java @@ -33,12 +33,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager; import org.apache.hadoop.yarn.nodelabels.NodeLabel; +import org.apache.hadoop.yarn.security.YarnAuthorizationProvider; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeLabelsUpdateSchedulerEvent; import org.apache.hadoop.yarn.util.resource.Resources; @@ -60,16 +59,13 @@ protected Queue() { ConcurrentMap<String, Queue> queueCollections = new ConcurrentHashMap<String, Queue>(); - protected AccessControlList adminAcl; - + private YarnAuthorizationProvider authorizer; private RMContext rmContext = null; @Override protected void serviceInit(Configuration conf) throws Exception { super.serviceInit(conf); - adminAcl = - new AccessControlList(conf.get(YarnConfiguration.YARN_ADMIN_ACL, - YarnConfiguration.DEFAULT_YARN_ADMIN_ACL)); + authorizer = YarnAuthorizationProvider.getInstance(conf); } @Override @@ -479,7 +475,7 @@ private Map<String, Host> cloneNodeMap() { public boolean checkAccess(UserGroupInformation user) { // make sure only admin can invoke // this method - if (adminAcl.isUserAllowed(user)) { + if (authorizer.isAdmin(user)) { return true; } return false; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerUtils.java index c4900c3976f91..65d68598aead0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/SchedulerUtils.java @@ -28,12 +28,14 @@ import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerState; import org.apache.hadoop.yarn.api.records.ContainerStatus; +import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.exceptions.InvalidResourceRequestException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +import org.apache.hadoop.yarn.security.AccessType; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; import org.apache.hadoop.yarn.util.resource.ResourceCalculator; import org.apache.hadoop.yarn.util.resource.Resources; @@ -348,4 +350,15 @@ public static boolean checkQueueLabelExpression(Set<String> queueLabels, } return true; } + + + public static AccessType toAccessType(QueueACL acl) { + switch (acl) { + case ADMINISTER_QUEUE: + return AccessType.ADMINISTER_QUEUE; + case SUBMIT_APPLICATIONS: + return AccessType.SUBMIT_APP; + } + return null; + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractCSQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractCSQueue.java index e4c26658b0bf5..753fb14bf13c3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractCSQueue.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractCSQueue.java @@ -34,12 +34,16 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +import org.apache.hadoop.yarn.security.AccessType; +import org.apache.hadoop.yarn.security.PrivilegedEntity; +import org.apache.hadoop.yarn.security.PrivilegedEntity.EntityType; +import org.apache.hadoop.yarn.security.YarnAuthorizationProvider; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceUsage; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils; import org.apache.hadoop.yarn.util.resource.ResourceCalculator; -import org.apache.hadoop.yarn.util.resource.Resources; + import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Sets; @@ -60,7 +64,8 @@ public abstract class AbstractCSQueue implements CSQueue { Resource maximumAllocation; QueueState state; final QueueMetrics metrics; - + protected final PrivilegedEntity queueEntity; + final ResourceCalculator resourceCalculator; Set<String> accessibleLabels; RMNodeLabelsManager labelManager; @@ -70,8 +75,8 @@ public abstract class AbstractCSQueue implements CSQueue { Map<String, Float> absoluteMaxCapacityByNodeLabels; Map<String, Float> maxCapacityByNodeLabels; - Map<QueueACL, AccessControlList> acls = - new HashMap<QueueACL, AccessControlList>(); + Map<AccessType, AccessControlList> acls = + new HashMap<AccessType, AccessControlList>(); boolean reservationsContinueLooking; private boolean preemptionDisabled; @@ -81,6 +86,7 @@ public abstract class AbstractCSQueue implements CSQueue { private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); private CapacitySchedulerContext csContext; + protected YarnAuthorizationProvider authorizer = null; public AbstractCSQueue(CapacitySchedulerContext cs, String queueName, CSQueue parent, CSQueue old) throws IOException { @@ -126,6 +132,8 @@ public AbstractCSQueue(CapacitySchedulerContext cs, accessibleLabels, labelManager); this.csContext = cs; queueUsage = new ResourceUsage(); + queueEntity = new PrivilegedEntity(EntityType.QUEUE, getQueuePath()); + authorizer = YarnAuthorizationProvider.getInstance(cs.getConf()); } @Override @@ -181,7 +189,11 @@ public QueueMetrics getMetrics() { public String getQueueName() { return queueName; } - + + public PrivilegedEntity getPrivilegedEntity() { + return queueEntity; + } + @Override public synchronized CSQueue getParent() { return parent; @@ -195,22 +207,13 @@ public synchronized void setParent(CSQueue newParentQueue) { public Set<String> getAccessibleNodeLabels() { return accessibleLabels; } - + @Override public boolean hasAccess(QueueACL acl, UserGroupInformation user) { - synchronized (this) { - if (acls.get(acl).isUserAllowed(user)) { - return true; - } - } - - if (parent != null) { - return parent.hasAccess(acl, user); - } - - return false; + return authorizer.checkPermission(SchedulerUtils.toAccessType(acl), + queueEntity, user); } - + @Override public synchronized void setUsedCapacity(float usedCapacity) { this.usedCapacity = usedCapacity; @@ -251,7 +254,7 @@ public String getDefaultNodeLabelExpression() { synchronized void setupQueueConfigs(Resource clusterResource, float capacity, float absoluteCapacity, float maximumCapacity, float absoluteMaxCapacity, - QueueState state, Map<QueueACL, AccessControlList> acls, + QueueState state, Map<AccessType, AccessControlList> acls, Set<String> labels, String defaultLabelExpression, Map<String, Float> nodeLabelCapacities, Map<String, Float> maximumNodeLabelCapacities, @@ -436,7 +439,7 @@ public boolean getReservationContinueLooking() { } @Private - public Map<QueueACL, AccessControlList> getACLs() { + public Map<AccessType, AccessControlList> getACLs() { return acls; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityScheduler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityScheduler.java index 916a4db9101a0..6b9d8460b23b8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityScheduler.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityScheduler.java @@ -63,6 +63,7 @@ import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes; +import org.apache.hadoop.yarn.security.YarnAuthorizationProvider; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState; @@ -124,7 +125,8 @@ public class CapacityScheduler extends PreemptableResourceScheduler, CapacitySchedulerContext, Configurable { private static final Log LOG = LogFactory.getLog(CapacityScheduler.class); - + private YarnAuthorizationProvider authorizer; + private CSQueue root; // timeout to join when we stop this service protected final long THREAD_JOIN_TIMEOUT_MS = 1000; @@ -297,7 +299,7 @@ private synchronized void initScheduler(Configuration configuration) throws new ConcurrentHashMap<ApplicationId, SchedulerApplication<FiCaSchedulerApp>>(); this.labelManager = rmContext.getNodeLabelManager(); - + authorizer = YarnAuthorizationProvider.getInstance(yarnConf); initializeQueues(this.conf); scheduleAsynchronously = this.conf.getScheduleAynschronously(); @@ -474,6 +476,7 @@ private void initializeQueues(CapacitySchedulerConfiguration conf) labelManager.reinitializeQueueLabels(getQueueToLabels()); LOG.info("Initialized root queue " + root); initializeQueueMappings(); + setQueueAcls(authorizer, queues); } @Lock(CapacityScheduler.class) @@ -499,8 +502,19 @@ private void reinitializeQueues(CapacitySchedulerConfiguration conf) root.updateClusterResource(clusterResource); labelManager.reinitializeQueueLabels(getQueueToLabels()); + setQueueAcls(authorizer, queues); } - + + @VisibleForTesting + public static void setQueueAcls(YarnAuthorizationProvider authorizer, + Map<String, CSQueue> queues) throws IOException { + for (CSQueue queue : queues.values()) { + AbstractCSQueue csQueue = (AbstractCSQueue) queue; + authorizer.setPermission(csQueue.getPrivilegedEntity(), + csQueue.getACLs(), UserGroupInformation.getCurrentUser()); + } + } + private Map<String, Set<String>> getQueueToLabels() { Map<String, Set<String>> queueToLabels = new HashMap<String, Set<String>>(); for (CSQueue queue : queues.values()) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java index 268cc6cb20a03..b49a60a6b94ee 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java @@ -40,8 +40,10 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager; +import org.apache.hadoop.yarn.security.AccessType; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; import org.apache.hadoop.yarn.server.resourcemanager.reservation.ReservationSchedulerConfiguration; +import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils; import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator; import org.apache.hadoop.yarn.util.resource.ResourceCalculator; import org.apache.hadoop.yarn.util.resource.Resources; @@ -530,11 +532,11 @@ public void setAcl(String queue, QueueACL acl, String aclString) { set(queuePrefix + getAclKey(acl), aclString); } - public Map<QueueACL, AccessControlList> getAcls(String queue) { - Map<QueueACL, AccessControlList> acls = - new HashMap<QueueACL, AccessControlList>(); + public Map<AccessType, AccessControlList> getAcls(String queue) { + Map<AccessType, AccessControlList> acls = + new HashMap<AccessType, AccessControlList>(); for (QueueACL acl : QueueACL.values()) { - acls.put(acl, getAcl(queue, acl)); + acls.put(SchedulerUtils.toAccessType(acl), getAcl(queue, acl)); } return acls; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java index c1432101510b3..525822302f788 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java @@ -55,6 +55,7 @@ import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager; +import org.apache.hadoop.yarn.security.AccessType; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType; @@ -153,7 +154,7 @@ public LeafQueue(CapacitySchedulerContext cs, QueueState state = cs.getConfiguration().getState(getQueuePath()); - Map<QueueACL, AccessControlList> acls = + Map<AccessType, AccessControlList> acls = cs.getConfiguration().getAcls(getQueuePath()); setupQueueConfigs(cs.getClusterResource(), capacity, absoluteCapacity, @@ -189,7 +190,7 @@ protected synchronized void setupQueueConfigs( int userLimit, float userLimitFactor, int maxApplications, float maxAMResourcePerQueuePercent, int maxApplicationsPerUser, QueueState state, - Map<QueueACL, AccessControlList> acls, int nodeLocalityDelay, + Map<AccessType, AccessControlList> acls, int nodeLocalityDelay, Set<String> labels, String defaultLabelExpression, Map<String, Float> capacitieByLabel, Map<String, Float> maximumCapacitiesByLabel, @@ -247,7 +248,7 @@ protected synchronized void setupQueueConfigs( maximumAllocation); StringBuilder aclsString = new StringBuilder(); - for (Map.Entry<QueueACL, AccessControlList> e : acls.entrySet()) { + for (Map.Entry<AccessType, AccessControlList> e : acls.entrySet()) { aclsString.append(e.getKey() + ":" + e.getValue().getAclString()); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/ParentQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/ParentQueue.java index 5a2e234436382..29a8ba3e20a9c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/ParentQueue.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/ParentQueue.java @@ -49,6 +49,7 @@ import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager; +import org.apache.hadoop.yarn.security.AccessType; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType; @@ -107,7 +108,7 @@ public ParentQueue(CapacitySchedulerContext cs, QueueState state = cs.getConfiguration().getState(getQueuePath()); - Map<QueueACL, AccessControlList> acls = + Map<AccessType, AccessControlList> acls = cs.getConfiguration().getAcls(getQueuePath()); setupQueueConfigs(cs.getClusterResource(), capacity, absoluteCapacity, @@ -124,7 +125,7 @@ public ParentQueue(CapacitySchedulerContext cs, synchronized void setupQueueConfigs(Resource clusterResource, float capacity, float absoluteCapacity, float maximumCapacity, float absoluteMaxCapacity, - QueueState state, Map<QueueACL, AccessControlList> acls, + QueueState state, Map<AccessType, AccessControlList> acls, Set<String> accessibleLabels, String defaultLabelExpression, Map<String, Float> nodeLabelCapacities, Map<String, Float> maximumCapacitiesByLabel, @@ -134,7 +135,7 @@ synchronized void setupQueueConfigs(Resource clusterResource, float capacity, defaultLabelExpression, nodeLabelCapacities, maximumCapacitiesByLabel, reservationContinueLooking, maximumAllocation); StringBuilder aclsString = new StringBuilder(); - for (Map.Entry<QueueACL, AccessControlList> e : acls.entrySet()) { + for (Map.Entry<AccessType, AccessControlList> e : acls.entrySet()) { aclsString.append(e.getKey() + ":" + e.getValue().getAclString()); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestParentQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestParentQueue.java index 72983cac530a1..696ad7a119953 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestParentQueue.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestParentQueue.java @@ -37,7 +37,6 @@ import java.util.Map; import org.junit.Assert; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.security.UserGroupInformation; @@ -45,6 +44,7 @@ import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.security.YarnAuthorizationProvider; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp; @@ -726,6 +726,9 @@ public void testQueueAcl() throws Exception { CapacityScheduler.parseQueue(csContext, csConf, null, CapacitySchedulerConfiguration.ROOT, queues, queues, TestUtils.spyHook); + YarnAuthorizationProvider authorizer = + YarnAuthorizationProvider.getInstance(conf); + CapacityScheduler.setQueueAcls(authorizer, queues); UserGroupInformation user = UserGroupInformation.getCurrentUser(); // Setup queue configs diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-sharedcachemanager/src/main/java/org/apache/hadoop/yarn/server/sharedcachemanager/SCMAdminProtocolService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-sharedcachemanager/src/main/java/org/apache/hadoop/yarn/server/sharedcachemanager/SCMAdminProtocolService.java index 3ecca02e732ac..6f2baf649c025 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-sharedcachemanager/src/main/java/org/apache/hadoop/yarn/server/sharedcachemanager/SCMAdminProtocolService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-sharedcachemanager/src/main/java/org/apache/hadoop/yarn/server/sharedcachemanager/SCMAdminProtocolService.java @@ -31,6 +31,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.security.YarnAuthorizationProvider; import org.apache.hadoop.yarn.server.api.SCMAdminProtocol; import org.apache.hadoop.yarn.server.api.protocolrecords.RunSharedCacheCleanerTaskRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RunSharedCacheCleanerTaskResponse; @@ -58,8 +59,7 @@ public class SCMAdminProtocolService extends AbstractService implements private Server server; InetSocketAddress clientBindAddress; private final CleanerService cleanerService; - private AccessControlList adminAcl; - + private YarnAuthorizationProvider authorizer; public SCMAdminProtocolService(CleanerService cleanerService) { super(SCMAdminProtocolService.class.getName()); this.cleanerService = cleanerService; @@ -68,9 +68,7 @@ public SCMAdminProtocolService(CleanerService cleanerService) { @Override protected void serviceInit(Configuration conf) throws Exception { this.clientBindAddress = getBindAddress(conf); - adminAcl = new AccessControlList(conf.get( - YarnConfiguration.YARN_ADMIN_ACL, - YarnConfiguration.DEFAULT_YARN_ADMIN_ACL)); + authorizer = YarnAuthorizationProvider.getInstance(conf); super.serviceInit(conf); } @@ -119,7 +117,7 @@ private void checkAcls(String method) throws YarnException { throw RPCUtil.getRemoteException(ioe); } - if (!adminAcl.isUserAllowed(user)) { + if (!authorizer.isAdmin(user)) { LOG.warn("User " + user.getShortUserName() + " doesn't have permission" + " to call '" + method + "'");
ede53193d6eba06b1f3ee657cb5777f4ca2f8e76
Delta Spike
DELTASPIKE-403 make MessageBundles PassivationCapable
c
https://github.com/apache/deltaspike
diff --git a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/message/MessageBundleExtension.java b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/message/MessageBundleExtension.java index 213c0f417..6835c84ec 100644 --- a/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/message/MessageBundleExtension.java +++ b/deltaspike/core/impl/src/main/java/org/apache/deltaspike/core/impl/message/MessageBundleExtension.java @@ -194,14 +194,16 @@ protected void installMessageBundleProducerBeans(@Observes AfterBeanDiscovery ab } } - private static <T> Bean<T> createMessageBundleBean(Bean<Object> delegate, - AnnotatedType<T> annotatedType, - BeanManager beanManager) + private <T> Bean<T> createMessageBundleBean(Bean<Object> delegate, + AnnotatedType<T> annotatedType, + BeanManager beanManager) { WrappingBeanBuilder<T> beanBuilder = new WrappingBeanBuilder<T>(delegate, beanManager) .readFromType(annotatedType); //X TODO re-visit type.getBaseType() in combination with #addQualifier beanBuilder.types(annotatedType.getJavaClass(), Object.class, Serializable.class); + beanBuilder.passivationCapable(true); + beanBuilder.id("MessageBundleBean#" + annotatedType.getJavaClass().getName()); return beanBuilder.create(); } diff --git a/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/message/MessageUser.java b/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/message/MessageUser.java new file mode 100644 index 000000000..d91d32d20 --- /dev/null +++ b/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/message/MessageUser.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.deltaspike.test.core.api.message; + +import javax.enterprise.context.SessionScoped; +import javax.inject.Inject; +import java.io.Serializable; + +/** + * A SessionScoped (passivating!) user which uses a DeltaSpike message + */ +@SessionScoped +public class MessageUser implements Serializable +{ + @Inject + private SimpleMessage msg; + + public SimpleMessage getMsg() { + return msg; + } +} diff --git a/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/message/SimpleMessageTest.java b/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/message/SimpleMessageTest.java index 5ee43d738..e5dadd8c2 100644 --- a/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/message/SimpleMessageTest.java +++ b/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/message/SimpleMessageTest.java @@ -52,6 +52,10 @@ public class SimpleMessageTest @Inject private LocaleResolver localeResolver; + @Inject + private MessageUser messageUser; + + /** * X TODO creating a WebArchive is only a workaround because JavaArchive * cannot contain other archives. @@ -103,10 +107,18 @@ public void testNullMessage() } @Test - public void testMessageSerialisation() { + public void testMessageSerialisation() + { Serializer<SimpleMessage> simpleMessageSerializer = new Serializer<SimpleMessage>(); SimpleMessage sm2 = simpleMessageSerializer.roundTrip(simpleMessage); assertNotNull(sm2); } + + @Test + public void testPassivationCapability() + { + assertEquals("Welcome to DeltaSpike", messageUser.getMsg().welcomeToDeltaSpike()); + assertEquals("Welcome to DeltaSpike", messageUser.getMsg().welcomeWithStringVariable("DeltaSpike")); + } }
33dfd5bb6b66becf4009345170ead16bfc50abd3
kotlin
Lazy resolve: Package-level objects are handled- properly--
c
https://github.com/JetBrains/kotlin
diff --git a/compiler/frontend/src/org/jetbrains/jet/lang/resolve/lazy/AbstractLazyMemberScope.java b/compiler/frontend/src/org/jetbrains/jet/lang/resolve/lazy/AbstractLazyMemberScope.java index c81721e2559c0..badc29026a09a 100644 --- a/compiler/frontend/src/org/jetbrains/jet/lang/resolve/lazy/AbstractLazyMemberScope.java +++ b/compiler/frontend/src/org/jetbrains/jet/lang/resolve/lazy/AbstractLazyMemberScope.java @@ -145,7 +145,10 @@ public Set<VariableDescriptor> getProperties(@NotNull Name name) { JetClassOrObject classOrObjectDeclaration = declarationProvider.getClassOrObjectDeclaration(name); if (classOrObjectDeclaration instanceof JetObjectDeclaration) { JetObjectDeclaration objectDeclaration = (JetObjectDeclaration) classOrObjectDeclaration; - ClassDescriptor classifier = (ClassDescriptor) getClassifier(name); + ClassDescriptor classifier = getObjectDescriptor(name); + if (classifier == null) { + throw new IllegalStateException("Object declaration " + name + " found in the DeclarationProvider " + declarationProvider + " but not in the scope " + this); + } VariableDescriptor propertyDescriptor = resolveSession.getInjector().getDescriptorResolver() .resolveObjectDeclaration(thisDescriptor, objectDeclaration, classifier, resolveSession.getTrace()); result.add(propertyDescriptor); diff --git a/compiler/testData/lazyResolve/namespaceComparator/packageLevelObject.kt b/compiler/testData/lazyResolve/namespaceComparator/packageLevelObject.kt new file mode 100644 index 0000000000000..dc2194f9876c5 --- /dev/null +++ b/compiler/testData/lazyResolve/namespaceComparator/packageLevelObject.kt @@ -0,0 +1,4 @@ +package foo + +object Bar { +} \ No newline at end of file diff --git a/compiler/testData/lazyResolve/namespaceComparator/packageLevelObject.txt b/compiler/testData/lazyResolve/namespaceComparator/packageLevelObject.txt new file mode 100644 index 0000000000000..10ca6ab510ddb --- /dev/null +++ b/compiler/testData/lazyResolve/namespaceComparator/packageLevelObject.txt @@ -0,0 +1,7 @@ +namespace <root> + +// <namespace name="foo"> +namespace foo + +internal final val Bar: foo.Bar +// </namespace name="foo"> diff --git a/compiler/tests/org/jetbrains/jet/lang/resolve/lazy/LazyResolveNamespaceComparingTestGenerated.java b/compiler/tests/org/jetbrains/jet/lang/resolve/lazy/LazyResolveNamespaceComparingTestGenerated.java index ca8f45fa83b4d..87b06fc4aae9c 100644 --- a/compiler/tests/org/jetbrains/jet/lang/resolve/lazy/LazyResolveNamespaceComparingTestGenerated.java +++ b/compiler/tests/org/jetbrains/jet/lang/resolve/lazy/LazyResolveNamespaceComparingTestGenerated.java @@ -1275,6 +1275,11 @@ public void testGenericFunction() throws Exception { doTest("compiler/testData/lazyResolve/namespaceComparator/genericFunction.kt"); } + @TestMetadata("packageLevelObject.kt") + public void testPackageLevelObject() throws Exception { + doTest("compiler/testData/lazyResolve/namespaceComparator/packageLevelObject.kt"); + } + @TestMetadata("simpleClass.kt") public void testSimpleClass() throws Exception { doTest("compiler/testData/lazyResolve/namespaceComparator/simpleClass.kt");
a0a251e62e4abb8f0660e96c654c120506e54bd9
Vala
gtk+-2.0: expose Gtk.CellEditable.start_editing
c
https://github.com/GNOME/vala/
diff --git a/vapi/gtk+-2.0.vapi b/vapi/gtk+-2.0.vapi index df059f22d8..adb26a1731 100644 --- a/vapi/gtk+-2.0.vapi +++ b/vapi/gtk+-2.0.vapi @@ -5699,6 +5699,7 @@ namespace Gtk { } [CCode (cheader_filename = "gtk/gtk.h")] public interface CellEditable : Gtk.Widget { + public abstract void start_editing (Gdk.Event event); public bool editing_canceled { get; set; } [HasEmitter] public signal void editing_done (); diff --git a/vapi/packages/gtk+-2.0/gtk+-2.0.metadata b/vapi/packages/gtk+-2.0/gtk+-2.0.metadata index 1ccbbf7761..0df33dccff 100644 --- a/vapi/packages/gtk+-2.0/gtk+-2.0.metadata +++ b/vapi/packages/gtk+-2.0/gtk+-2.0.metadata @@ -88,7 +88,6 @@ GtkCallbackMarshal hidden="1" GtkCell.u hidden="1" GtkCellEditable::editing_done has_emitter="1" GtkCellEditable::remove_widget has_emitter="1" -GtkCellEditable::start_editing has_emitter="1" gtk_cell_layout_get_cells transfer_ownership="1" type_arguments="unowned CellRenderer" gtk_cell_layout_set_attributes ellipsis="1" gtk_cell_layout_set_cell_data_func.func transfer_ownership="1"
f4cf5a7d4a1b3998632309288777275bc30517bb
elasticsearch
Fix RobinEngineIntegrationTest - missed to- explicitly create the index--
c
https://github.com/elastic/elasticsearch
diff --git a/src/test/java/org/elasticsearch/index/engine/robin/RobinEngineIntegrationTest.java b/src/test/java/org/elasticsearch/index/engine/robin/RobinEngineIntegrationTest.java index d703fd5fd8463..652ee129d86ea 100644 --- a/src/test/java/org/elasticsearch/index/engine/robin/RobinEngineIntegrationTest.java +++ b/src/test/java/org/elasticsearch/index/engine/robin/RobinEngineIntegrationTest.java @@ -35,21 +35,21 @@ public class RobinEngineIntegrationTest extends AbstractIntegrationTest { @Test public void testSetIndexCompoundOnFlush() { - client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.builder().put("number_of_replicas", 0)); + client().admin().indices().prepareCreate("test").setSettings(ImmutableSettings.builder().put("number_of_replicas", 0).put("number_of_shards", 1)).get(); client().prepareIndex("test", "foo").setSource("field", "foo").get(); refresh(); - assertTotalCompoundSegments(2, 2, "test"); + assertTotalCompoundSegments(1, 1, "test"); client().admin().indices().prepareUpdateSettings("test") .setSettings(ImmutableSettings.builder().put(RobinEngine.INDEX_COMPOUND_ON_FLUSH, false)).get(); client().prepareIndex("test", "foo").setSource("field", "foo").get(); refresh(); - assertTotalCompoundSegments(2, 4, "test"); + assertTotalCompoundSegments(1, 2, "test"); client().admin().indices().prepareUpdateSettings("test") .setSettings(ImmutableSettings.builder().put(RobinEngine.INDEX_COMPOUND_ON_FLUSH, true)).get(); client().prepareIndex("test", "foo").setSource("field", "foo").get(); refresh(); - assertTotalCompoundSegments(4, 6, "test"); + assertTotalCompoundSegments(2, 3, "test"); } @@ -61,11 +61,13 @@ private void assertTotalCompoundSegments(int i, int t, String index) { int total = 0; for (IndexShardSegments indexShardSegments : values) { for (ShardSegments s : indexShardSegments) { - for (Segment segment : s.getSegments()) { - if (segment.isCompound()) { - compounds++; + for (Segment segment : s) { + if (segment.isSearch() && segment.getNumDocs() > 0) { + if (segment.isCompound()) { + compounds++; + } + total++; } - total++; } } }
d4bc187be90fb4bf65bde43d6166073429041749
elasticsearch
rename node to DiscoveryNode--
p
https://github.com/elastic/elasticsearch
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 88480851d0bdc..a942fced4d2f6 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -21,7 +21,7 @@ import com.google.common.collect.ImmutableMap; import org.elasticsearch.action.support.nodes.NodeOperationResponse; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.util.io.stream.StreamInput; import org.elasticsearch.util.io.stream.StreamOutput; import org.elasticsearch.util.settings.ImmutableSettings; @@ -42,11 +42,11 @@ public class NodeInfo extends NodeOperationResponse { NodeInfo() { } - public NodeInfo(Node node, Map<String, String> attributes, Settings settings) { + public NodeInfo(DiscoveryNode node, Map<String, String> attributes, Settings settings) { this(node, ImmutableMap.copyOf(attributes), settings); } - public NodeInfo(Node node, ImmutableMap<String, String> attributes, Settings settings) { + public NodeInfo(DiscoveryNode node, ImmutableMap<String, String> attributes, Settings settings) { super(node); this.attributes = attributes; this.settings = settings; diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/NodesShutdownResponse.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/NodesShutdownResponse.java index 9b6104df3fe84..4c887230a8891 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/NodesShutdownResponse.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/NodesShutdownResponse.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.support.nodes.NodeOperationResponse; import org.elasticsearch.action.support.nodes.NodesOperationResponse; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.util.io.stream.StreamInput; import org.elasticsearch.util.io.stream.StreamOutput; @@ -61,7 +61,7 @@ public static class NodeShutdownResponse extends NodeOperationResponse { NodeShutdownResponse() { } - public NodeShutdownResponse(Node node) { + public NodeShutdownResponse(DiscoveryNode node) { super(node); } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryAndFetchAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryAndFetchAction.java index a29f247815614..4f46b1e1c382d 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryAndFetchAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryAndFetchAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.*; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.search.SearchShardTarget; @@ -74,7 +74,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen return "dfs"; } - @Override protected void sendExecuteFirstPhase(Node node, InternalSearchRequest request, SearchServiceListener<DfsSearchResult> listener) { + @Override protected void sendExecuteFirstPhase(DiscoveryNode node, InternalSearchRequest request, SearchServiceListener<DfsSearchResult> listener) { searchService.sendExecuteDfs(node, request, listener); } @@ -88,7 +88,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen int localOperations = 0; for (DfsSearchResult dfsResult : dfsResults) { - Node node = nodes.get(dfsResult.shardTarget().nodeId()); + DiscoveryNode node = nodes.get(dfsResult.shardTarget().nodeId()); if (node.id().equals(nodes.localNodeId())) { localOperations++; } else { @@ -101,7 +101,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen threadPool.execute(new Runnable() { @Override public void run() { for (DfsSearchResult dfsResult : dfsResults) { - Node node = nodes.get(dfsResult.shardTarget().nodeId()); + DiscoveryNode node = nodes.get(dfsResult.shardTarget().nodeId()); if (node.id().equals(nodes.localNodeId())) { QuerySearchRequest querySearchRequest = new QuerySearchRequest(dfsResult.id(), dfs); executeSecondPhase(counter, node, querySearchRequest); @@ -112,7 +112,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen } else { boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD; for (DfsSearchResult dfsResult : dfsResults) { - final Node node = nodes.get(dfsResult.shardTarget().nodeId()); + final DiscoveryNode node = nodes.get(dfsResult.shardTarget().nodeId()); if (node.id().equals(nodes.localNodeId())) { final QuerySearchRequest querySearchRequest = new QuerySearchRequest(dfsResult.id(), dfs); if (localAsync) { @@ -130,7 +130,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen } } - private void executeSecondPhase(final AtomicInteger counter, Node node, QuerySearchRequest querySearchRequest) { + private void executeSecondPhase(final AtomicInteger counter, DiscoveryNode node, QuerySearchRequest querySearchRequest) { searchService.sendExecuteFetch(node, querySearchRequest, new SearchServiceListener<QueryFetchSearchResult>() { @Override public void onResult(QueryFetchSearchResult result) { queryFetchResults.put(result.shardTarget(), result); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java index 13581b8ed75d2..b282333958171 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchDfsQueryThenFetchAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.*; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.search.SearchShardTarget; @@ -78,7 +78,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen return "dfs"; } - @Override protected void sendExecuteFirstPhase(Node node, InternalSearchRequest request, SearchServiceListener<DfsSearchResult> listener) { + @Override protected void sendExecuteFirstPhase(DiscoveryNode node, InternalSearchRequest request, SearchServiceListener<DfsSearchResult> listener) { searchService.sendExecuteDfs(node, request, listener); } @@ -93,7 +93,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen int localOperations = 0; for (DfsSearchResult dfsResult : dfsResults) { - Node node = nodes.get(dfsResult.shardTarget().nodeId()); + DiscoveryNode node = nodes.get(dfsResult.shardTarget().nodeId()); if (node.id().equals(nodes.localNodeId())) { localOperations++; } else { @@ -107,7 +107,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen threadPool.execute(new Runnable() { @Override public void run() { for (DfsSearchResult dfsResult : dfsResults) { - Node node = nodes.get(dfsResult.shardTarget().nodeId()); + DiscoveryNode node = nodes.get(dfsResult.shardTarget().nodeId()); if (node.id().equals(nodes.localNodeId())) { QuerySearchRequest querySearchRequest = new QuerySearchRequest(dfsResult.id(), dfs); executeQuery(counter, querySearchRequest, node); @@ -118,7 +118,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen } else { boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD; for (DfsSearchResult dfsResult : dfsResults) { - final Node node = nodes.get(dfsResult.shardTarget().nodeId()); + final DiscoveryNode node = nodes.get(dfsResult.shardTarget().nodeId()); if (node.id().equals(nodes.localNodeId())) { final QuerySearchRequest querySearchRequest = new QuerySearchRequest(dfsResult.id(), dfs); if (localAsync) { @@ -136,7 +136,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen } } - private void executeQuery(final AtomicInteger counter, QuerySearchRequest querySearchRequest, Node node) { + private void executeQuery(final AtomicInteger counter, QuerySearchRequest querySearchRequest, DiscoveryNode node) { searchService.sendExecuteQuery(node, querySearchRequest, new SearchServiceListener<QuerySearchResult>() { @Override public void onResult(QuerySearchResult result) { queryResults.put(result.shardTarget(), result); @@ -178,7 +178,7 @@ private void innerExecuteFetchPhase() { final AtomicInteger counter = new AtomicInteger(docIdsToLoad.size()); int localOperations = 0; for (Map.Entry<SearchShardTarget, ExtTIntArrayList> entry : docIdsToLoad.entrySet()) { - Node node = nodes.get(entry.getKey().nodeId()); + DiscoveryNode node = nodes.get(entry.getKey().nodeId()); if (node.id().equals(nodes.localNodeId())) { localOperations++; } else { @@ -192,7 +192,7 @@ private void innerExecuteFetchPhase() { threadPool.execute(new Runnable() { @Override public void run() { for (Map.Entry<SearchShardTarget, ExtTIntArrayList> entry : docIdsToLoad.entrySet()) { - Node node = nodes.get(entry.getKey().nodeId()); + DiscoveryNode node = nodes.get(entry.getKey().nodeId()); if (node.id().equals(nodes.localNodeId())) { FetchSearchRequest fetchSearchRequest = new FetchSearchRequest(queryResults.get(entry.getKey()).id(), entry.getValue()); executeFetch(counter, fetchSearchRequest, node); @@ -203,7 +203,7 @@ private void innerExecuteFetchPhase() { } else { boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD; for (Map.Entry<SearchShardTarget, ExtTIntArrayList> entry : docIdsToLoad.entrySet()) { - final Node node = nodes.get(entry.getKey().nodeId()); + final DiscoveryNode node = nodes.get(entry.getKey().nodeId()); if (node.id().equals(nodes.localNodeId())) { final FetchSearchRequest fetchSearchRequest = new FetchSearchRequest(queryResults.get(entry.getKey()).id(), entry.getValue()); if (localAsync) { @@ -223,7 +223,7 @@ private void innerExecuteFetchPhase() { releaseIrrelevantSearchContexts(queryResults, docIdsToLoad); } - private void executeFetch(final AtomicInteger counter, FetchSearchRequest fetchSearchRequest, Node node) { + private void executeFetch(final AtomicInteger counter, FetchSearchRequest fetchSearchRequest, DiscoveryNode node) { searchService.sendExecuteFetch(node, fetchSearchRequest, new SearchServiceListener<FetchSearchResult>() { @Override public void onResult(FetchSearchResult result) { fetchResults.put(result.shardTarget(), result); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryAndFetchAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryAndFetchAction.java index 37818542ca7cd..3c4682b68a573 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryAndFetchAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryAndFetchAction.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.search.SearchShardTarget; @@ -68,7 +68,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen return "query_fetch"; } - @Override protected void sendExecuteFirstPhase(Node node, InternalSearchRequest request, SearchServiceListener<QueryFetchSearchResult> listener) { + @Override protected void sendExecuteFirstPhase(DiscoveryNode node, InternalSearchRequest request, SearchServiceListener<QueryFetchSearchResult> listener) { searchService.sendExecuteFetch(node, request, listener); } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java index adcc6f5b5b150..ef82fa0cc43f9 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchQueryThenFetchAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.*; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.search.SearchShardTarget; @@ -72,7 +72,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen return "query"; } - @Override protected void sendExecuteFirstPhase(Node node, InternalSearchRequest request, SearchServiceListener<QuerySearchResult> listener) { + @Override protected void sendExecuteFirstPhase(DiscoveryNode node, InternalSearchRequest request, SearchServiceListener<QuerySearchResult> listener) { searchService.sendExecuteQuery(node, request, listener); } @@ -93,7 +93,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen int localOperations = 0; for (Map.Entry<SearchShardTarget, ExtTIntArrayList> entry : docIdsToLoad.entrySet()) { - Node node = nodes.get(entry.getKey().nodeId()); + DiscoveryNode node = nodes.get(entry.getKey().nodeId()); if (node.id().equals(nodes.localNodeId())) { localOperations++; } else { @@ -107,7 +107,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen threadPool.execute(new Runnable() { @Override public void run() { for (Map.Entry<SearchShardTarget, ExtTIntArrayList> entry : docIdsToLoad.entrySet()) { - Node node = nodes.get(entry.getKey().nodeId()); + DiscoveryNode node = nodes.get(entry.getKey().nodeId()); if (node.id().equals(nodes.localNodeId())) { FetchSearchRequest fetchSearchRequest = new FetchSearchRequest(queryResults.get(entry.getKey()).id(), entry.getValue()); executeFetch(counter, fetchSearchRequest, node); @@ -118,7 +118,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen } else { boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD; for (Map.Entry<SearchShardTarget, ExtTIntArrayList> entry : docIdsToLoad.entrySet()) { - final Node node = nodes.get(entry.getKey().nodeId()); + final DiscoveryNode node = nodes.get(entry.getKey().nodeId()); if (node.id().equals(nodes.localNodeId())) { final FetchSearchRequest fetchSearchRequest = new FetchSearchRequest(queryResults.get(entry.getKey()).id(), entry.getValue()); if (localAsync) { @@ -138,7 +138,7 @@ private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listen releaseIrrelevantSearchContexts(queryResults, docIdsToLoad); } - private void executeFetch(final AtomicInteger counter, FetchSearchRequest fetchSearchRequest, Node node) { + private void executeFetch(final AtomicInteger counter, FetchSearchRequest fetchSearchRequest, DiscoveryNode node) { searchService.sendExecuteFetch(node, fetchSearchRequest, new SearchServiceListener<FetchSearchResult>() { @Override public void onResult(FetchSearchResult result) { fetchResults.put(result.shardTarget(), result); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java index 9d2f0eddbac62..5ccde73ec9f8a 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryAndFetchAction.java @@ -23,8 +23,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.*; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.node.Node; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.action.SearchServiceListener; import org.elasticsearch.search.action.SearchServiceTransportAction; @@ -81,7 +81,7 @@ private class AsyncAction { private final ParsedScrollId scrollId; - private final Nodes nodes; + private final DiscoveryNodes nodes; protected final Collection<ShardSearchFailure> shardFailures = searchCache.obtainShardFailures(); @@ -107,7 +107,7 @@ public void start() { int localOperations = 0; for (Tuple<String, Long> target : scrollId.values()) { - Node node = nodes.get(target.v1()); + DiscoveryNode node = nodes.get(target.v1()); if (node != null) { if (nodes.localNodeId().equals(node.id())) { localOperations++; @@ -130,7 +130,7 @@ public void start() { threadPool.execute(new Runnable() { @Override public void run() { for (Tuple<String, Long> target : scrollId.values()) { - Node node = nodes.get(target.v1()); + DiscoveryNode node = nodes.get(target.v1()); if (node != null && nodes.localNodeId().equals(node.id())) { executePhase(node, target.v2()); } @@ -140,7 +140,7 @@ public void start() { } else { boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD; for (final Tuple<String, Long> target : scrollId.values()) { - final Node node = nodes.get(target.v1()); + final DiscoveryNode node = nodes.get(target.v1()); if (node != null && nodes.localNodeId().equals(node.id())) { if (localAsync) { threadPool.execute(new Runnable() { @@ -157,7 +157,7 @@ public void start() { } for (Tuple<String, Long> target : scrollId.values()) { - Node node = nodes.get(target.v1()); + DiscoveryNode node = nodes.get(target.v1()); if (node == null) { if (logger.isDebugEnabled()) { logger.debug("Node [" + target.v1() + "] not available for scroll request [" + scrollId.source() + "]"); @@ -171,7 +171,7 @@ public void start() { } } - private void executePhase(Node node, long searchId) { + private void executePhase(DiscoveryNode node, long searchId) { searchService.sendExecuteFetch(node, internalScrollSearchRequest(searchId, request), new SearchServiceListener<QueryFetchSearchResult>() { @Override public void onResult(QueryFetchSearchResult result) { queryFetchResults.put(result.shardTarget(), result); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java index a146019e986ab..a2f1a00eb04b1 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchScrollQueryThenFetchAction.java @@ -23,8 +23,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.*; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.node.Node; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.action.SearchServiceListener; import org.elasticsearch.search.action.SearchServiceTransportAction; @@ -85,7 +85,7 @@ private class AsyncAction { private final ParsedScrollId scrollId; - private final Nodes nodes; + private final DiscoveryNodes nodes; protected final Collection<ShardSearchFailure> shardFailures = searchCache.obtainShardFailures(); @@ -113,7 +113,7 @@ public void start() { int localOperations = 0; for (Tuple<String, Long> target : scrollId.values()) { - Node node = nodes.get(target.v1()); + DiscoveryNode node = nodes.get(target.v1()); if (node != null) { if (nodes.localNodeId().equals(node.id())) { localOperations++; @@ -136,7 +136,7 @@ public void start() { threadPool.execute(new Runnable() { @Override public void run() { for (Tuple<String, Long> target : scrollId.values()) { - Node node = nodes.get(target.v1()); + DiscoveryNode node = nodes.get(target.v1()); if (node != null && nodes.localNodeId().equals(node.id())) { executeQueryPhase(counter, node, target.v2()); } @@ -146,7 +146,7 @@ public void start() { } else { boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD; for (final Tuple<String, Long> target : scrollId.values()) { - final Node node = nodes.get(target.v1()); + final DiscoveryNode node = nodes.get(target.v1()); if (node != null && nodes.localNodeId().equals(node.id())) { if (localAsync) { threadPool.execute(new Runnable() { @@ -163,7 +163,7 @@ public void start() { } } - private void executeQueryPhase(final AtomicInteger counter, Node node, long searchId) { + private void executeQueryPhase(final AtomicInteger counter, DiscoveryNode node, long searchId) { searchService.sendExecuteQuery(node, internalScrollSearchRequest(searchId, request), new SearchServiceListener<QuerySearchResult>() { @Override public void onResult(QuerySearchResult result) { queryResults.put(result.shardTarget(), result); @@ -199,7 +199,7 @@ private void executeFetchPhase() { SearchShardTarget shardTarget = entry.getKey(); ExtTIntArrayList docIds = entry.getValue(); FetchSearchRequest fetchSearchRequest = new FetchSearchRequest(queryResults.get(shardTarget).id(), docIds); - Node node = nodes.get(shardTarget.nodeId()); + DiscoveryNode node = nodes.get(shardTarget.nodeId()); searchService.sendExecuteFetch(node, fetchSearchRequest, new SearchServiceListener<FetchSearchResult>() { @Override public void onResult(FetchSearchResult result) { fetchResults.put(result.shardTarget(), result); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java index 5e0ca3d3b8a6d..b186a9e9b5aff 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/search/type/TransportSearchTypeAction.java @@ -24,8 +24,8 @@ import org.elasticsearch.action.support.BaseAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.Node; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; @@ -83,7 +83,7 @@ protected abstract class BaseAsyncAction<FirstResult> { protected final SearchRequest request; - protected final Nodes nodes; + protected final DiscoveryNodes nodes; protected final int expectedSuccessfulOps; @@ -172,7 +172,7 @@ private void performFirstPhase(final ShardsIterator shardIt) { // no more active shards... (we should not really get here, but just for safety) onFirstPhaseResult(shard, shardIt, null); } else { - Node node = nodes.get(shard.currentNodeId()); + DiscoveryNode node = nodes.get(shard.currentNodeId()); sendExecuteFirstPhase(node, internalSearchRequest(shard, request), new SearchServiceListener<FirstResult>() { @Override public void onResult(FirstResult result) { onFirstPhaseResult(shard, result, shardIt); @@ -281,7 +281,7 @@ protected void releaseIrrelevantSearchContexts(Map<SearchShardTarget, QuerySearc Map<SearchShardTarget, ExtTIntArrayList> docIdsToLoad) { for (Map.Entry<SearchShardTarget, QuerySearchResultProvider> entry : queryResults.entrySet()) { if (!docIdsToLoad.containsKey(entry.getKey())) { - Node node = nodes.get(entry.getKey().nodeId()); + DiscoveryNode node = nodes.get(entry.getKey().nodeId()); if (node != null) { // should not happen (==null) but safeguard anyhow searchService.sendFreeContext(node, entry.getValue().id()); } @@ -313,7 +313,7 @@ protected void invokeListener(final Throwable t) { } } - protected abstract void sendExecuteFirstPhase(Node node, InternalSearchRequest request, SearchServiceListener<FirstResult> listener); + protected abstract void sendExecuteFirstPhase(DiscoveryNode node, InternalSearchRequest request, SearchServiceListener<FirstResult> listener); protected abstract void processFirstPhaseResult(ShardRouting shard, FirstResult result); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastOperationAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastOperationAction.java index 585a8270ffdb3..ed26cca460e87 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastOperationAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastOperationAction.java @@ -25,8 +25,8 @@ import org.elasticsearch.action.support.BaseAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.Node; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; @@ -111,7 +111,7 @@ class AsyncBroadcastAction { private final ClusterState clusterState; - private final Nodes nodes; + private final DiscoveryNodes nodes; private final GroupShardsIterator shardsIts; @@ -216,7 +216,7 @@ private void performOperation(final ShardsIterator shardIt, boolean localAsync) } } } else { - Node node = nodes.get(shard.currentNodeId()); + DiscoveryNode node = nodes.get(shard.currentNodeId()); transportService.sendRequest(node, transportShardAction(), shardRequest, new BaseTransportResponseHandler<ShardResponse>() { @Override public ShardResponse newInstance() { return newShardResponse(); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeOperationAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeOperationAction.java index 5d0679817754b..8928a5b6ac675 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeOperationAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeOperationAction.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.BaseAction; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; import org.elasticsearch.util.settings.Settings; @@ -60,7 +60,7 @@ protected TransportMasterNodeOperationAction(Settings settings, TransportService protected abstract Response masterOperation(Request request) throws ElasticSearchException; @Override protected void doExecute(final Request request, final ActionListener<Response> listener) { - Nodes nodes = clusterService.state().nodes(); + DiscoveryNodes nodes = clusterService.state().nodes(); if (nodes.localNodeMaster()) { threadPool.execute(new Runnable() { @Override public void run() { diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/nodes/NodeOperationResponse.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/nodes/NodeOperationResponse.java index 6e15509d5614d..cd58ba28adcdf 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/nodes/NodeOperationResponse.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/nodes/NodeOperationResponse.java @@ -19,7 +19,7 @@ package org.elasticsearch.action.support.nodes; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.util.io.stream.StreamInput; import org.elasticsearch.util.io.stream.StreamOutput; import org.elasticsearch.util.io.stream.Streamable; @@ -31,21 +31,21 @@ */ public abstract class NodeOperationResponse implements Streamable { - private Node node; + private DiscoveryNode node; protected NodeOperationResponse() { } - protected NodeOperationResponse(Node node) { + protected NodeOperationResponse(DiscoveryNode node) { this.node = node; } - public Node node() { + public DiscoveryNode node() { return node; } @Override public void readFrom(StreamInput in) throws IOException { - node = Node.readNode(in); + node = DiscoveryNode.readNode(in); } @Override public void writeTo(StreamOutput out) throws IOException { diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesOperationAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesOperationAction.java index 459a8a02e89f1..1406bc171da69 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesOperationAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesOperationAction.java @@ -28,7 +28,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; import org.elasticsearch.util.settings.Settings; @@ -108,7 +108,7 @@ private AsyncAction(Request request, ActionListener<Response> listener) { if (nodesIds == null || nodesIds.length == 0 || (nodesIds.length == 1 && nodesIds[0].equals("_all"))) { int index = 0; nodesIds = new String[clusterState.nodes().size()]; - for (Node node : clusterState.nodes()) { + for (DiscoveryNode node : clusterState.nodes()) { nodesIds[index++] = node.id(); } } @@ -118,7 +118,7 @@ private AsyncAction(Request request, ActionListener<Response> listener) { private void start() { for (final String nodeId : nodesIds) { - final Node node = clusterState.nodes().nodes().get(nodeId); + final DiscoveryNode node = clusterState.nodes().nodes().get(nodeId); if (nodeId.equals("_local") || nodeId.equals(clusterState.nodes().localNodeId())) { threadPool.execute(new Runnable() { @Override public void run() { diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/replication/TransportShardReplicationOperationAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/replication/TransportShardReplicationOperationAction.java index 687af98b45983..307c55ba8d373 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/replication/TransportShardReplicationOperationAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/replication/TransportShardReplicationOperationAction.java @@ -29,8 +29,8 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.TimeoutClusterStateListener; import org.elasticsearch.cluster.action.shard.ShardStateAction; -import org.elasticsearch.cluster.node.Node; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.index.IndexShardMissingException; @@ -201,7 +201,7 @@ private class AsyncShardOperationAction { private final Request request; - private Nodes nodes; + private DiscoveryNodes nodes; private ShardsIterator shards; @@ -255,7 +255,7 @@ public boolean start(final boolean fromClusterEvent) throws ElasticSearchExcepti performOnPrimary(shard.id(), fromClusterEvent, false, shard); } } else { - Node node = nodes.get(shard.currentNodeId()); + DiscoveryNode node = nodes.get(shard.currentNodeId()); transportService.sendRequest(node, transportAction(), request, new BaseTransportResponseHandler<Response>() { @Override public Response newInstance() { @@ -399,7 +399,7 @@ private void performBackups(final Response response, boolean alreadyThreaded) { private void performOnBackup(final Response response, final AtomicInteger counter, final ShardRouting shard, String nodeId) { final ShardOperationRequest shardRequest = new ShardOperationRequest(shards.shardId().id(), request); if (!nodeId.equals(nodes.localNodeId())) { - Node node = nodes.get(nodeId); + DiscoveryNode node = nodes.get(nodeId); transportService.sendRequest(node, transportBackupAction(), shardRequest, new VoidTransportResponseHandler() { @Override public void handleResponse(VoidStreamable vResponse) { finishIfPossible(); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/single/TransportSingleOperationAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/single/TransportSingleOperationAction.java index c9299800119a5..2f658c1057a72 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/single/TransportSingleOperationAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/action/support/single/TransportSingleOperationAction.java @@ -26,8 +26,8 @@ import org.elasticsearch.action.support.BaseAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.Node; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.indices.IndicesService; @@ -86,7 +86,7 @@ private class AsyncSingleAction { private final Request request; - private final Nodes nodes; + private final DiscoveryNodes nodes; private AsyncSingleAction(Request request, ActionListener<Response> listener) { this.request = request; @@ -164,7 +164,7 @@ private void perform(final Exception lastException) { final ShardRouting shard = shardsIt.nextActive(); // no need to check for local nodes, we tried them already in performFirstGet if (!shard.currentNodeId().equals(nodes.localNodeId())) { - Node node = nodes.get(shard.currentNodeId()); + DiscoveryNode node = nodes.get(shard.currentNodeId()); transportService.sendRequest(node, transportShardAction(), new ShardSingleOperationRequest(request, shard.id()), new BaseTransportResponseHandler<Response>() { @Override public Response newInstance() { return newResponse(); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 4d46c434dd2b3..c9761b7179f9e 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -47,7 +47,7 @@ import org.elasticsearch.client.transport.action.ClientTransportActionModule; import org.elasticsearch.client.transport.support.InternalTransportClient; import org.elasticsearch.cluster.ClusterNameModule; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.env.Environment; import org.elasticsearch.env.EnvironmentModule; import org.elasticsearch.server.internal.InternalSettingsPerparer; @@ -166,7 +166,7 @@ public ImmutableList<TransportAddress> transportAddresses() { * <p>The nodes include all the nodes that are currently alive based on the transport * addresses provided. */ - public ImmutableList<Node> connectedNodes() { + public ImmutableList<DiscoveryNode> connectedNodes() { return nodesService.connectedNodes(); } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java index 2e202c239af3a..de32ec5ca6c4b 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java @@ -28,8 +28,8 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.node.Node; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BaseTransportResponseHandler; import org.elasticsearch.transport.ConnectTransportException; @@ -65,9 +65,9 @@ public class TransportClientNodesService extends AbstractComponent implements Cl private final Object transportMutex = new Object(); - private volatile ImmutableList<Node> nodes = ImmutableList.of(); + private volatile ImmutableList<DiscoveryNode> nodes = ImmutableList.of(); - private volatile Nodes discoveredNodes; + private volatile DiscoveryNodes discoveredNodes; private final AtomicInteger tempNodeIdGenerator = new AtomicInteger(); @@ -100,7 +100,7 @@ public ImmutableList<TransportAddress> transportAddresses() { return this.transportAddresses; } - public ImmutableList<Node> connectedNodes() { + public ImmutableList<DiscoveryNode> connectedNodes() { return this.nodes; } @@ -128,13 +128,13 @@ public TransportClientNodesService removeTransportAddress(TransportAddress trans } public <T> T execute(NodeCallback<T> callback) throws ElasticSearchException { - ImmutableList<Node> nodes = this.nodes; + ImmutableList<DiscoveryNode> nodes = this.nodes; if (nodes.isEmpty()) { throw new NoNodeAvailableException(); } int index = randomNodeGenerator.incrementAndGet(); for (int i = 0; i < nodes.size(); i++) { - Node node = nodes.get((index + i) % nodes.size()); + DiscoveryNode node = nodes.get((index + i) % nodes.size()); try { return callback.doWithNode(node); } catch (ConnectTransportException e) { @@ -151,9 +151,9 @@ public void close() { @Override public void clusterChanged(ClusterChangedEvent event) { transportService.nodesAdded(event.nodesDelta().addedNodes()); this.discoveredNodes = event.state().nodes(); - HashSet<Node> newNodes = new HashSet<Node>(nodes); + HashSet<DiscoveryNode> newNodes = new HashSet<DiscoveryNode>(nodes); newNodes.addAll(discoveredNodes.nodes().values()); - nodes = new ImmutableList.Builder<Node>().addAll(newNodes).build(); + nodes = new ImmutableList.Builder<DiscoveryNode>().addAll(newNodes).build(); transportService.nodesRemoved(event.nodesDelta().removedNodes()); } @@ -163,11 +163,11 @@ private class ScheduledNodesSampler implements Runnable { ImmutableList<TransportAddress> transportAddresses = TransportClientNodesService.this.transportAddresses; final CountDownLatch latch = new CountDownLatch(transportAddresses.size()); final CopyOnWriteArrayList<NodesInfoResponse> nodesInfoResponses = new CopyOnWriteArrayList<NodesInfoResponse>(); - final CopyOnWriteArrayList<Node> tempNodes = new CopyOnWriteArrayList<Node>(); + final CopyOnWriteArrayList<DiscoveryNode> tempNodes = new CopyOnWriteArrayList<DiscoveryNode>(); for (final TransportAddress transportAddress : transportAddresses) { threadPool.execute(new Runnable() { @Override public void run() { - Node tempNode = new Node("#temp#-" + tempNodeIdGenerator.incrementAndGet(), transportAddress); + DiscoveryNode tempNode = new DiscoveryNode("#temp#-" + tempNodeIdGenerator.incrementAndGet(), transportAddress); tempNodes.add(tempNode); try { transportService.nodesAdded(ImmutableList.of(tempNode)); @@ -201,10 +201,10 @@ private class ScheduledNodesSampler implements Runnable { return; } - HashSet<Node> newNodes = new HashSet<Node>(); + HashSet<DiscoveryNode> newNodes = new HashSet<DiscoveryNode>(); for (NodesInfoResponse nodesInfoResponse : nodesInfoResponses) { if (nodesInfoResponse.nodes().length > 0) { - Node node = nodesInfoResponse.nodes()[0].node(); + DiscoveryNode node = nodesInfoResponse.nodes()[0].node(); if (!clusterName.equals(nodesInfoResponse.clusterName())) { logger.warn("Node {} not part of the cluster {}, ignoring...", node, clusterName); } else { @@ -218,7 +218,7 @@ private class ScheduledNodesSampler implements Runnable { if (discoveredNodes != null) { newNodes.addAll(discoveredNodes.nodes().values()); } - nodes = new ImmutableList.Builder<Node>().addAll(newNodes).build(); + nodes = new ImmutableList.Builder<DiscoveryNode>().addAll(newNodes).build(); transportService.nodesRemoved(tempNodes); } @@ -226,6 +226,6 @@ private class ScheduledNodesSampler implements Runnable { public static interface NodeCallback<T> { - T doWithNode(Node node) throws ElasticSearchException; + T doWithNode(DiscoveryNode node) throws ElasticSearchException; } } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/action/ClientTransportAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/action/ClientTransportAction.java index 1fcd4a5145664..cb227b4ffce63 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/action/ClientTransportAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/action/ClientTransportAction.java @@ -24,14 +24,14 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; /** * @author kimchy (Shay Banon) */ public interface ClientTransportAction<Request extends ActionRequest, Response extends ActionResponse> { - ActionFuture<Response> execute(Node node, Request request) throws ElasticSearchException; + ActionFuture<Response> execute(DiscoveryNode node, Request request) throws ElasticSearchException; - void execute(Node node, Request request, ActionListener<Response> listener); + void execute(DiscoveryNode node, Request request, ActionListener<Response> listener); } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/action/support/BaseClientTransportAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/action/support/BaseClientTransportAction.java index f0d2d5378717d..36c4a853e0c76 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/action/support/BaseClientTransportAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/action/support/BaseClientTransportAction.java @@ -28,7 +28,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.transport.action.ClientTransportAction; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.transport.BaseTransportResponseHandler; import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.transport.TransportService; @@ -59,14 +59,14 @@ protected BaseClientTransportAction(Settings settings, TransportService transpor responseConstructor.setAccessible(true); } - @Override public ActionFuture<Response> execute(Node node, Request request) throws ElasticSearchException { + @Override public ActionFuture<Response> execute(DiscoveryNode node, Request request) throws ElasticSearchException { PlainActionFuture<Response> future = newFuture(); request.listenerThreaded(false); execute(node, request, future); return future; } - @Override public void execute(Node node, final Request request, final ActionListener<Response> listener) { + @Override public void execute(DiscoveryNode node, final Request request, final ActionListener<Response> listener) { transportService.sendRequest(node, action(), request, new BaseTransportResponseHandler<Response>() { @Override public Response newInstance() { return BaseClientTransportAction.this.newInstance(); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/support/InternalTransportClient.java b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/support/InternalTransportClient.java index 03ef658e0c024..9e9a7fc905233 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/support/InternalTransportClient.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/support/InternalTransportClient.java @@ -51,7 +51,7 @@ import org.elasticsearch.client.transport.action.search.ClientTransportSearchAction; import org.elasticsearch.client.transport.action.search.ClientTransportSearchScrollAction; import org.elasticsearch.client.transport.action.terms.ClientTransportTermsAction; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.util.component.AbstractComponent; import org.elasticsearch.util.settings.Settings; @@ -112,7 +112,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public ActionFuture<IndexResponse> index(final IndexRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<IndexResponse>>() { - @Override public ActionFuture<IndexResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<IndexResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return indexAction.execute(node, request); } }); @@ -120,7 +120,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public void index(final IndexRequest request, final ActionListener<IndexResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { indexAction.execute(node, request, listener); return null; } @@ -129,7 +129,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public ActionFuture<DeleteResponse> delete(final DeleteRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<DeleteResponse>>() { - @Override public ActionFuture<DeleteResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<DeleteResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return deleteAction.execute(node, request); } }); @@ -137,7 +137,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public void delete(final DeleteRequest request, final ActionListener<DeleteResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { deleteAction.execute(node, request, listener); return null; } @@ -146,7 +146,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public ActionFuture<DeleteByQueryResponse> deleteByQuery(final DeleteByQueryRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<DeleteByQueryResponse>>() { - @Override public ActionFuture<DeleteByQueryResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<DeleteByQueryResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return deleteByQueryAction.execute(node, request); } }); @@ -154,7 +154,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public void deleteByQuery(final DeleteByQueryRequest request, final ActionListener<DeleteByQueryResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { deleteByQueryAction.execute(node, request, listener); return null; } @@ -163,7 +163,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public ActionFuture<GetResponse> get(final GetRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<GetResponse>>() { - @Override public ActionFuture<GetResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<GetResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return getAction.execute(node, request); } }); @@ -171,7 +171,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public void get(final GetRequest request, final ActionListener<GetResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Object>() { - @Override public Object doWithNode(Node node) throws ElasticSearchException { + @Override public Object doWithNode(DiscoveryNode node) throws ElasticSearchException { getAction.execute(node, request, listener); return null; } @@ -180,7 +180,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public ActionFuture<CountResponse> count(final CountRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<CountResponse>>() { - @Override public ActionFuture<CountResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<CountResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return countAction.execute(node, request); } }); @@ -188,7 +188,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public void count(final CountRequest request, final ActionListener<CountResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { countAction.execute(node, request, listener); return null; } @@ -197,7 +197,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public ActionFuture<SearchResponse> search(final SearchRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<SearchResponse>>() { - @Override public ActionFuture<SearchResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<SearchResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return searchAction.execute(node, request); } }); @@ -205,7 +205,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public void search(final SearchRequest request, final ActionListener<SearchResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Object>() { - @Override public Object doWithNode(Node node) throws ElasticSearchException { + @Override public Object doWithNode(DiscoveryNode node) throws ElasticSearchException { searchAction.execute(node, request, listener); return null; } @@ -214,7 +214,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public ActionFuture<SearchResponse> searchScroll(final SearchScrollRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<SearchResponse>>() { - @Override public ActionFuture<SearchResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<SearchResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return searchScrollAction.execute(node, request); } }); @@ -222,7 +222,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public void searchScroll(final SearchScrollRequest request, final ActionListener<SearchResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Object>() { - @Override public Object doWithNode(Node node) throws ElasticSearchException { + @Override public Object doWithNode(DiscoveryNode node) throws ElasticSearchException { searchScrollAction.execute(node, request, listener); return null; } @@ -231,7 +231,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public ActionFuture<TermsResponse> terms(final TermsRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<TermsResponse>>() { - @Override public ActionFuture<TermsResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<TermsResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return termsAction.execute(node, request); } }); @@ -239,7 +239,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public void terms(final TermsRequest request, final ActionListener<TermsResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<Void>>() { - @Override public ActionFuture<Void> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<Void> doWithNode(DiscoveryNode node) throws ElasticSearchException { termsAction.execute(node, request, listener); return null; } @@ -248,7 +248,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public ActionFuture<SearchResponse> moreLikeThis(final MoreLikeThisRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<SearchResponse>>() { - @Override public ActionFuture<SearchResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<SearchResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return moreLikeThisAction.execute(node, request); } }); @@ -256,7 +256,7 @@ public class InternalTransportClient extends AbstractComponent implements Client @Override public void moreLikeThis(final MoreLikeThisRequest request, final ActionListener<SearchResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { moreLikeThisAction.execute(node, request, listener); return null; } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/support/InternalTransportClusterAdminClient.java b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/support/InternalTransportClusterAdminClient.java index 0420e6b47f05d..7fa4dbcb7837b 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/support/InternalTransportClusterAdminClient.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/support/InternalTransportClusterAdminClient.java @@ -46,7 +46,7 @@ import org.elasticsearch.client.transport.action.admin.cluster.ping.replication.ClientTransportReplicationPingAction; import org.elasticsearch.client.transport.action.admin.cluster.ping.single.ClientTransportSinglePingAction; import org.elasticsearch.client.transport.action.admin.cluster.state.ClientTransportClusterStateAction; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.util.component.AbstractComponent; import org.elasticsearch.util.settings.Settings; @@ -88,7 +88,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public ActionFuture<ClusterHealthResponse> health(final ClusterHealthRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<ClusterHealthResponse>>() { - @Override public ActionFuture<ClusterHealthResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<ClusterHealthResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return clusterHealthAction.execute(node, request); } }); @@ -96,7 +96,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public void health(final ClusterHealthRequest request, final ActionListener<ClusterHealthResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { clusterHealthAction.execute(node, request, listener); return null; } @@ -105,7 +105,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public ActionFuture<ClusterStateResponse> state(final ClusterStateRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<ClusterStateResponse>>() { - @Override public ActionFuture<ClusterStateResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<ClusterStateResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return clusterStateAction.execute(node, request); } }); @@ -113,7 +113,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public void state(final ClusterStateRequest request, final ActionListener<ClusterStateResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { clusterStateAction.execute(node, request, listener); return null; } @@ -122,7 +122,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public ActionFuture<SinglePingResponse> ping(final SinglePingRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<SinglePingResponse>>() { - @Override public ActionFuture<SinglePingResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<SinglePingResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return singlePingAction.execute(node, request); } }); @@ -130,7 +130,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public void ping(final SinglePingRequest request, final ActionListener<SinglePingResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { singlePingAction.execute(node, request, listener); return null; } @@ -139,7 +139,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public ActionFuture<BroadcastPingResponse> ping(final BroadcastPingRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<BroadcastPingResponse>>() { - @Override public ActionFuture<BroadcastPingResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<BroadcastPingResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return broadcastPingAction.execute(node, request); } }); @@ -147,7 +147,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public void ping(final BroadcastPingRequest request, final ActionListener<BroadcastPingResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { broadcastPingAction.execute(node, request, listener); return null; } @@ -156,7 +156,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public ActionFuture<ReplicationPingResponse> ping(final ReplicationPingRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<ReplicationPingResponse>>() { - @Override public ActionFuture<ReplicationPingResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<ReplicationPingResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return replicationPingAction.execute(node, request); } }); @@ -164,7 +164,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public void ping(final ReplicationPingRequest request, final ActionListener<ReplicationPingResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { replicationPingAction.execute(node, request, listener); return null; } @@ -173,7 +173,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public ActionFuture<NodesInfoResponse> nodesInfo(final NodesInfoRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<NodesInfoResponse>>() { - @Override public ActionFuture<NodesInfoResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<NodesInfoResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return nodesInfoAction.execute(node, request); } }); @@ -181,7 +181,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public void nodesInfo(final NodesInfoRequest request, final ActionListener<NodesInfoResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { nodesInfoAction.execute(node, request, listener); return null; } @@ -190,7 +190,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public ActionFuture<NodesShutdownResponse> nodesShutdown(final NodesShutdownRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<NodesShutdownResponse>>() { - @Override public ActionFuture<NodesShutdownResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<NodesShutdownResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return nodesShutdownAction.execute(node, request); } }); @@ -198,7 +198,7 @@ public class InternalTransportClusterAdminClient extends AbstractComponent imple @Override public void nodesShutdown(final NodesShutdownRequest request, final ActionListener<NodesShutdownResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<Void>>() { - @Override public ActionFuture<Void> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<Void> doWithNode(DiscoveryNode node) throws ElasticSearchException { nodesShutdownAction.execute(node, request, listener); return null; } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/support/InternalTransportIndicesAdminClient.java b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/support/InternalTransportIndicesAdminClient.java index d55ab7210b804..170f711c73a2f 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/support/InternalTransportIndicesAdminClient.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/client/transport/support/InternalTransportIndicesAdminClient.java @@ -55,7 +55,7 @@ import org.elasticsearch.client.transport.action.admin.indices.optimize.ClientTransportOptimizeAction; import org.elasticsearch.client.transport.action.admin.indices.refresh.ClientTransportRefreshAction; import org.elasticsearch.client.transport.action.admin.indices.status.ClientTransportIndicesStatusAction; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.util.component.AbstractComponent; import org.elasticsearch.util.settings.Settings; @@ -108,7 +108,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public ActionFuture<IndicesStatusResponse> status(final IndicesStatusRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<IndicesStatusResponse>>() { - @Override public ActionFuture<IndicesStatusResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<IndicesStatusResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return indicesStatusAction.execute(node, request); } }); @@ -116,7 +116,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public void status(final IndicesStatusRequest request, final ActionListener<IndicesStatusResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { indicesStatusAction.execute(node, request, listener); return null; } @@ -125,7 +125,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public ActionFuture<CreateIndexResponse> create(final CreateIndexRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<CreateIndexResponse>>() { - @Override public ActionFuture<CreateIndexResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<CreateIndexResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return createIndexAction.execute(node, request); } }); @@ -133,7 +133,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public void create(final CreateIndexRequest request, final ActionListener<CreateIndexResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Object>() { - @Override public Object doWithNode(Node node) throws ElasticSearchException { + @Override public Object doWithNode(DiscoveryNode node) throws ElasticSearchException { createIndexAction.execute(node, request, listener); return null; } @@ -142,7 +142,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public ActionFuture<DeleteIndexResponse> delete(final DeleteIndexRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<DeleteIndexResponse>>() { - @Override public ActionFuture<DeleteIndexResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<DeleteIndexResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return deleteIndexAction.execute(node, request); } }); @@ -150,7 +150,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public void delete(final DeleteIndexRequest request, final ActionListener<DeleteIndexResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Object>() { - @Override public Object doWithNode(Node node) throws ElasticSearchException { + @Override public Object doWithNode(DiscoveryNode node) throws ElasticSearchException { deleteIndexAction.execute(node, request, listener); return null; } @@ -159,7 +159,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public ActionFuture<RefreshResponse> refresh(final RefreshRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<RefreshResponse>>() { - @Override public ActionFuture<RefreshResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<RefreshResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return refreshAction.execute(node, request); } }); @@ -167,7 +167,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public void refresh(final RefreshRequest request, final ActionListener<RefreshResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { refreshAction.execute(node, request, listener); return null; } @@ -176,7 +176,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public ActionFuture<FlushResponse> flush(final FlushRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<FlushResponse>>() { - @Override public ActionFuture<FlushResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<FlushResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return flushAction.execute(node, request); } }); @@ -184,7 +184,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public void flush(final FlushRequest request, final ActionListener<FlushResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Object>() { - @Override public Object doWithNode(Node node) throws ElasticSearchException { + @Override public Object doWithNode(DiscoveryNode node) throws ElasticSearchException { flushAction.execute(node, request, listener); return null; } @@ -193,7 +193,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public ActionFuture<OptimizeResponse> optimize(final OptimizeRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<OptimizeResponse>>() { - @Override public ActionFuture<OptimizeResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<OptimizeResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return optimizeAction.execute(node, request); } }); @@ -201,7 +201,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public void optimize(final OptimizeRequest request, final ActionListener<OptimizeResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<Void>>() { - @Override public ActionFuture<Void> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<Void> doWithNode(DiscoveryNode node) throws ElasticSearchException { optimizeAction.execute(node, request, listener); return null; } @@ -210,7 +210,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public ActionFuture<PutMappingResponse> putMapping(final PutMappingRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<PutMappingResponse>>() { - @Override public ActionFuture<PutMappingResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<PutMappingResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return putMappingAction.execute(node, request); } }); @@ -218,7 +218,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public void putMapping(final PutMappingRequest request, final ActionListener<PutMappingResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { putMappingAction.execute(node, request, listener); return null; } @@ -227,7 +227,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public ActionFuture<GatewaySnapshotResponse> gatewaySnapshot(final GatewaySnapshotRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<GatewaySnapshotResponse>>() { - @Override public ActionFuture<GatewaySnapshotResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<GatewaySnapshotResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return gatewaySnapshotAction.execute(node, request); } }); @@ -235,7 +235,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public void gatewaySnapshot(final GatewaySnapshotRequest request, final ActionListener<GatewaySnapshotResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Object>() { - @Override public Object doWithNode(Node node) throws ElasticSearchException { + @Override public Object doWithNode(DiscoveryNode node) throws ElasticSearchException { gatewaySnapshotAction.execute(node, request, listener); return null; } @@ -244,7 +244,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public ActionFuture<IndicesAliasesResponse> aliases(final IndicesAliasesRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<IndicesAliasesResponse>>() { - @Override public ActionFuture<IndicesAliasesResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<IndicesAliasesResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return indicesAliasesAction.execute(node, request); } }); @@ -252,7 +252,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public void aliases(final IndicesAliasesRequest request, final ActionListener<IndicesAliasesResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { indicesAliasesAction.execute(node, request, listener); return null; } @@ -261,7 +261,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public ActionFuture<ClearIndicesCacheResponse> clearCache(final ClearIndicesCacheRequest request) { return nodesService.execute(new TransportClientNodesService.NodeCallback<ActionFuture<ClearIndicesCacheResponse>>() { - @Override public ActionFuture<ClearIndicesCacheResponse> doWithNode(Node node) throws ElasticSearchException { + @Override public ActionFuture<ClearIndicesCacheResponse> doWithNode(DiscoveryNode node) throws ElasticSearchException { return clearIndicesCacheAction.execute(node, request); } }); @@ -269,7 +269,7 @@ public class InternalTransportIndicesAdminClient extends AbstractComponent imple @Override public void clearCache(final ClearIndicesCacheRequest request, final ActionListener<ClearIndicesCacheResponse> listener) { nodesService.execute(new TransportClientNodesService.NodeCallback<Void>() { - @Override public Void doWithNode(Node node) throws ElasticSearchException { + @Override public Void doWithNode(DiscoveryNode node) throws ElasticSearchException { clearIndicesCacheAction.execute(node, request, listener); return null; } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/ClusterChangedEvent.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/ClusterChangedEvent.java index 62b70fe9d2fd0..8f829cd758c74 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/ClusterChangedEvent.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/ClusterChangedEvent.java @@ -19,7 +19,7 @@ package org.elasticsearch.cluster; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNodes; /** * @author kimchy (Shay Banon) @@ -34,7 +34,7 @@ public class ClusterChangedEvent { private final boolean firstMaster; - private final Nodes.Delta nodesDelta; + private final DiscoveryNodes.Delta nodesDelta; public ClusterChangedEvent(String source, ClusterState state, ClusterState previousState, boolean firstMaster) { this.source = source; @@ -75,7 +75,7 @@ public boolean firstMaster() { return firstMaster; } - public Nodes.Delta nodesDelta() { + public DiscoveryNodes.Delta nodesDelta() { return this.nodesDelta; } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/ClusterState.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/ClusterState.java index 0b85e1bdc8c46..44232580a4592 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -20,8 +20,8 @@ package org.elasticsearch.cluster; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.node.Node; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.util.Nullable; @@ -42,14 +42,14 @@ public class ClusterState { private final RoutingTable routingTable; - private final Nodes nodes; + private final DiscoveryNodes nodes; private final MetaData metaData; // built on demand private volatile RoutingNodes routingNodes; - public ClusterState(long version, MetaData metaData, RoutingTable routingTable, Nodes nodes) { + public ClusterState(long version, MetaData metaData, RoutingTable routingTable, DiscoveryNodes nodes) { this.version = version; this.metaData = metaData; this.routingTable = routingTable; @@ -60,7 +60,7 @@ public long version() { return this.version; } - public Nodes nodes() { + public DiscoveryNodes nodes() { return this.nodes; } @@ -100,13 +100,13 @@ public static class Builder { private RoutingTable routingTable = RoutingTable.EMPTY_ROUTING_TABLE; - private Nodes nodes = Nodes.EMPTY_NODES; + private DiscoveryNodes nodes = DiscoveryNodes.EMPTY_NODES; - public Builder nodes(Nodes.Builder nodesBuilder) { + public Builder nodes(DiscoveryNodes.Builder nodesBuilder) { return nodes(nodesBuilder.build()); } - public Builder nodes(Nodes nodes) { + public Builder nodes(DiscoveryNodes nodes) { this.nodes = nodes; return this; } @@ -147,7 +147,7 @@ public static byte[] toBytes(ClusterState state) throws IOException { return os.copiedByteArray(); } - public static ClusterState fromBytes(byte[] data, Settings globalSettings, Node localNode) throws IOException { + public static ClusterState fromBytes(byte[] data, Settings globalSettings, DiscoveryNode localNode) throws IOException { return readFrom(new BytesStreamInput(data), globalSettings, localNode); } @@ -155,15 +155,15 @@ public static void writeTo(ClusterState state, StreamOutput out) throws IOExcept out.writeLong(state.version()); MetaData.Builder.writeTo(state.metaData(), out); RoutingTable.Builder.writeTo(state.routingTable(), out); - Nodes.Builder.writeTo(state.nodes(), out); + DiscoveryNodes.Builder.writeTo(state.nodes(), out); } - public static ClusterState readFrom(StreamInput in, @Nullable Settings globalSettings, @Nullable Node localNode) throws IOException { + public static ClusterState readFrom(StreamInput in, @Nullable Settings globalSettings, @Nullable DiscoveryNode localNode) throws IOException { Builder builder = new Builder(); builder.version = in.readLong(); builder.metaData = MetaData.Builder.readFrom(in, globalSettings); builder.routingTable = RoutingTable.Builder.readFrom(in); - builder.nodes = Nodes.Builder.readFrom(in, localNode); + builder.nodes = DiscoveryNodes.Builder.readFrom(in, localNode); return builder.build(); } } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexCreatedAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexCreatedAction.java index deb15e97a5155..e9affa656f13e 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexCreatedAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexCreatedAction.java @@ -22,7 +22,7 @@ import com.google.inject.Inject; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BaseTransportRequestHandler; import org.elasticsearch.transport.TransportChannel; @@ -69,7 +69,7 @@ public void remove(Listener listener) { } public void nodeIndexCreated(final String index, final String nodeId) throws ElasticSearchException { - Nodes nodes = clusterService.state().nodes(); + DiscoveryNodes nodes = clusterService.state().nodes(); if (nodes.localNodeMaster()) { threadPool.execute(new Runnable() { @Override public void run() { diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java index e6a1a686b9076..8df5b788f315e 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/index/NodeIndexDeletedAction.java @@ -22,7 +22,7 @@ import com.google.inject.Inject; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BaseTransportRequestHandler; import org.elasticsearch.transport.TransportChannel; @@ -69,7 +69,7 @@ public void remove(Listener listener) { } public void nodeIndexDeleted(final String index, final String nodeId) throws ElasticSearchException { - Nodes nodes = clusterService.state().nodes(); + DiscoveryNodes nodes = clusterService.state().nodes(); if (nodes.localNodeMaster()) { threadPool.execute(new Runnable() { @Override public void run() { diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingCreatedAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingCreatedAction.java index 82c0618416df7..8cef1f8bfbf3d 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingCreatedAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingCreatedAction.java @@ -22,7 +22,7 @@ import com.google.inject.Inject; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BaseTransportRequestHandler; import org.elasticsearch.transport.TransportChannel; @@ -69,7 +69,7 @@ public void remove(Listener listener) { } public void nodeMappingCreated(final NodeMappingCreatedResponse response) throws ElasticSearchException { - Nodes nodes = clusterService.state().nodes(); + DiscoveryNodes nodes = clusterService.state().nodes(); if (nodes.localNodeMaster()) { threadPool.execute(new Runnable() { @Override public void run() { diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index be28fc628bb2f..cbd7fc78f734f 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -24,7 +24,7 @@ import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; @@ -75,7 +75,7 @@ public class ShardStateAction extends AbstractComponent { public void shardFailed(final ShardRouting shardRouting, final String reason) throws ElasticSearchException { logger.warn("Sending failed shard for {}, reason [{}]", shardRouting, reason); - Nodes nodes = clusterService.state().nodes(); + DiscoveryNodes nodes = clusterService.state().nodes(); if (nodes.localNodeMaster()) { threadPool.execute(new Runnable() { @Override public void run() { @@ -92,7 +92,7 @@ public void shardStarted(final ShardRouting shardRouting, final String reason) t if (logger.isDebugEnabled()) { logger.debug("Sending shard started for {}, reason [{}]", shardRouting, reason); } - Nodes nodes = clusterService.state().nodes(); + DiscoveryNodes nodes = clusterService.state().nodes(); if (nodes.localNodeMaster()) { threadPool.execute(new Runnable() { @Override public void run() { diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/Node.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java similarity index 86% rename from modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/Node.java rename to modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index 678ac34445c21..7a6f35380fe72 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/Node.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -33,9 +33,9 @@ /** * @author kimchy (Shay Banon) */ -public class Node implements Streamable, Serializable { +public class DiscoveryNode implements Streamable, Serializable { - public static final ImmutableList<Node> EMPTY_LIST = ImmutableList.of(); + public static final ImmutableList<DiscoveryNode> EMPTY_LIST = ImmutableList.of(); private String nodeName = StringHelper.intern(""); @@ -45,14 +45,14 @@ public class Node implements Streamable, Serializable { private boolean dataNode = true; - private Node() { + private DiscoveryNode() { } - public Node(String nodeId, TransportAddress address) { + public DiscoveryNode(String nodeId, TransportAddress address) { this("", true, nodeId, address); } - public Node(String nodeName, boolean dataNode, String nodeId, TransportAddress address) { + public DiscoveryNode(String nodeName, boolean dataNode, String nodeId, TransportAddress address) { if (nodeName == null) { this.nodeName = StringHelper.intern(""); } else { @@ -91,8 +91,8 @@ public boolean dataNode() { return dataNode; } - public static Node readNode(StreamInput in) throws IOException { - Node node = new Node(); + public static DiscoveryNode readNode(StreamInput in) throws IOException { + DiscoveryNode node = new DiscoveryNode(); node.readFrom(in); return node; } @@ -112,10 +112,10 @@ public static Node readNode(StreamInput in) throws IOException { } @Override public boolean equals(Object obj) { - if (!(obj instanceof Node)) + if (!(obj instanceof DiscoveryNode)) return false; - Node other = (Node) obj; + DiscoveryNode other = (DiscoveryNode) obj; return this.nodeId.equals(other.nodeId); } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/Nodes.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java similarity index 73% rename from modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/Nodes.java rename to modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 09e10599c673e..dea023aae1e0e 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/Nodes.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -37,26 +37,26 @@ /** * @author kimchy (Shay Banon) */ -public class Nodes implements Iterable<Node> { +public class DiscoveryNodes implements Iterable<DiscoveryNode> { - public static Nodes EMPTY_NODES = newNodesBuilder().build(); + public static DiscoveryNodes EMPTY_NODES = newNodesBuilder().build(); - private final ImmutableMap<String, Node> nodes; + private final ImmutableMap<String, DiscoveryNode> nodes; - private final ImmutableMap<String, Node> dataNodes; + private final ImmutableMap<String, DiscoveryNode> dataNodes; private final String masterNodeId; private final String localNodeId; - private Nodes(ImmutableMap<String, Node> nodes, ImmutableMap<String, Node> dataNodes, String masterNodeId, String localNodeId) { + private DiscoveryNodes(ImmutableMap<String, DiscoveryNode> nodes, ImmutableMap<String, DiscoveryNode> dataNodes, String masterNodeId, String localNodeId) { this.nodes = nodes; this.dataNodes = dataNodes; this.masterNodeId = masterNodeId; this.localNodeId = localNodeId; } - @Override public UnmodifiableIterator<Node> iterator() { + @Override public UnmodifiableIterator<DiscoveryNode> iterator() { return nodes.values().iterator(); } @@ -83,15 +83,15 @@ public int size() { return nodes.size(); } - public ImmutableMap<String, Node> nodes() { + public ImmutableMap<String, DiscoveryNode> nodes() { return this.nodes; } - public ImmutableMap<String, Node> dataNodes() { + public ImmutableMap<String, DiscoveryNode> dataNodes() { return this.dataNodes; } - public Node get(String nodeId) { + public DiscoveryNode get(String nodeId) { return nodes.get(nodeId); } @@ -107,17 +107,17 @@ public String localNodeId() { return this.localNodeId; } - public Node localNode() { + public DiscoveryNode localNode() { return nodes.get(localNodeId); } - public Node masterNode() { + public DiscoveryNode masterNode() { return nodes.get(masterNodeId); } - public Nodes removeDeadMembers(Set<String> newNodes, String masterNodeId) { + public DiscoveryNodes removeDeadMembers(Set<String> newNodes, String masterNodeId) { Builder builder = new Builder().masterNodeId(masterNodeId).localNodeId(localNodeId); - for (Node node : this) { + for (DiscoveryNode node : this) { if (newNodes.contains(node.id())) { builder.put(node); } @@ -125,28 +125,28 @@ public Nodes removeDeadMembers(Set<String> newNodes, String masterNodeId) { return builder.build(); } - public Nodes newNode(Node node) { + public DiscoveryNodes newNode(DiscoveryNode node) { return new Builder().putAll(this).put(node).build(); } /** * Returns the changes comparing this nodes to the provided nodes. */ - public Delta delta(Nodes other) { - List<Node> removed = newArrayList(); - List<Node> added = newArrayList(); - for (Node node : other) { + public Delta delta(DiscoveryNodes other) { + List<DiscoveryNode> removed = newArrayList(); + List<DiscoveryNode> added = newArrayList(); + for (DiscoveryNode node : other) { if (!this.nodeExists(node.id())) { removed.add(node); } } - for (Node node : this) { + for (DiscoveryNode node : this) { if (!other.nodeExists(node.id())) { added.add(node); } } - Node previousMasterNode = null; - Node newMasterNode = null; + DiscoveryNode previousMasterNode = null; + DiscoveryNode newMasterNode = null; if (masterNodeId != null) { if (other.masterNodeId == null || !other.masterNodeId.equals(masterNodeId)) { previousMasterNode = other.masterNode(); @@ -159,7 +159,7 @@ public Delta delta(Nodes other) { public String prettyPrint() { StringBuilder sb = new StringBuilder(); sb.append("Nodes: \n"); - for (Node node : this) { + for (DiscoveryNode node : this) { sb.append(" ").append(node); if (node == localNode()) { sb.append(", local"); @@ -173,23 +173,23 @@ public String prettyPrint() { } public Delta emptyDelta() { - return new Delta(null, null, localNodeId, Node.EMPTY_LIST, Node.EMPTY_LIST); + return new Delta(null, null, localNodeId, DiscoveryNode.EMPTY_LIST, DiscoveryNode.EMPTY_LIST); } public static class Delta { private final String localNodeId; - private final Node previousMasterNode; - private final Node newMasterNode; - private final ImmutableList<Node> removed; - private final ImmutableList<Node> added; + private final DiscoveryNode previousMasterNode; + private final DiscoveryNode newMasterNode; + private final ImmutableList<DiscoveryNode> removed; + private final ImmutableList<DiscoveryNode> added; - public Delta(String localNodeId, ImmutableList<Node> removed, ImmutableList<Node> added) { + public Delta(String localNodeId, ImmutableList<DiscoveryNode> removed, ImmutableList<DiscoveryNode> added) { this(null, null, localNodeId, removed, added); } - public Delta(@Nullable Node previousMasterNode, @Nullable Node newMasterNode, String localNodeId, ImmutableList<Node> removed, ImmutableList<Node> added) { + public Delta(@Nullable DiscoveryNode previousMasterNode, @Nullable DiscoveryNode newMasterNode, String localNodeId, ImmutableList<DiscoveryNode> removed, ImmutableList<DiscoveryNode> added) { this.previousMasterNode = previousMasterNode; this.newMasterNode = newMasterNode; this.localNodeId = localNodeId; @@ -205,11 +205,11 @@ public boolean masterNodeChanged() { return newMasterNode != null; } - public Node previousMasterNode() { + public DiscoveryNode previousMasterNode() { return previousMasterNode; } - public Node newMasterNode() { + public DiscoveryNode newMasterNode() { return newMasterNode; } @@ -217,7 +217,7 @@ public boolean removed() { return !removed.isEmpty(); } - public ImmutableList<Node> removedNodes() { + public ImmutableList<DiscoveryNode> removedNodes() { return removed; } @@ -225,7 +225,7 @@ public boolean added() { return !added.isEmpty(); } - public ImmutableList<Node> addedNodes() { + public ImmutableList<DiscoveryNode> addedNodes() { return added; } @@ -252,7 +252,7 @@ public String shortSummary() { sb.append(", "); } sb.append("Removed {"); - for (Node node : removedNodes()) { + for (DiscoveryNode node : removedNodes()) { sb.append(node).append(','); } sb.append("}"); @@ -265,7 +265,7 @@ public String shortSummary() { sb.append(", "); } sb.append("Added {"); - for (Node node : addedNodes()) { + for (DiscoveryNode node : addedNodes()) { if (!node.id().equals(localNodeId)) { // don't print ourself sb.append(node).append(','); @@ -284,28 +284,28 @@ public static Builder newNodesBuilder() { public static class Builder { - private Map<String, Node> nodes = newHashMap(); + private Map<String, DiscoveryNode> nodes = newHashMap(); private String masterNodeId; private String localNodeId; - public Builder putAll(Nodes nodes) { + public Builder putAll(DiscoveryNodes nodes) { this.masterNodeId = nodes.masterNodeId(); this.localNodeId = nodes.localNodeId(); - for (Node node : nodes) { + for (DiscoveryNode node : nodes) { put(node); } return this; } - public Builder put(Node node) { + public Builder put(DiscoveryNode node) { nodes.put(node.id(), node); return this; } - public Builder putAll(Iterable<Node> nodes) { - for (Node node : nodes) { + public Builder putAll(Iterable<DiscoveryNode> nodes) { + for (DiscoveryNode node : nodes) { put(node); } return this; @@ -326,25 +326,25 @@ public Builder localNodeId(String localNodeId) { return this; } - public Nodes build() { - ImmutableMap.Builder<String, Node> dataNodesBuilder = ImmutableMap.builder(); - for (Map.Entry<String, Node> nodeEntry : nodes.entrySet()) { + public DiscoveryNodes build() { + ImmutableMap.Builder<String, DiscoveryNode> dataNodesBuilder = ImmutableMap.builder(); + for (Map.Entry<String, DiscoveryNode> nodeEntry : nodes.entrySet()) { if (nodeEntry.getValue().dataNode()) { dataNodesBuilder.put(nodeEntry.getKey(), nodeEntry.getValue()); } } - return new Nodes(ImmutableMap.copyOf(nodes), dataNodesBuilder.build(), masterNodeId, localNodeId); + return new DiscoveryNodes(ImmutableMap.copyOf(nodes), dataNodesBuilder.build(), masterNodeId, localNodeId); } - public static void writeTo(Nodes nodes, StreamOutput out) throws IOException { + public static void writeTo(DiscoveryNodes nodes, StreamOutput out) throws IOException { out.writeUTF(nodes.masterNodeId); out.writeVInt(nodes.size()); - for (Node node : nodes) { + for (DiscoveryNode node : nodes) { node.writeTo(out); } } - public static Nodes readFrom(StreamInput in, @Nullable Node localNode) throws IOException { + public static DiscoveryNodes readFrom(StreamInput in, @Nullable DiscoveryNode localNode) throws IOException { Builder builder = new Builder(); builder.masterNodeId(in.readUTF()); if (localNode != null) { @@ -352,7 +352,7 @@ public static Nodes readFrom(StreamInput in, @Nullable Node localNode) throws IO } int size = in.readVInt(); for (int i = 0; i < size; i++) { - Node node = Node.readNode(in); + DiscoveryNode node = DiscoveryNode.readNode(in); if (localNode != null && node.id().equals(localNode.id())) { // reuse the same instance of our address and local node id for faster equality node = localNode; diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/routing/strategy/DefaultShardsRoutingStrategy.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/routing/strategy/DefaultShardsRoutingStrategy.java index c22417421916a..6a7052a327681 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/routing/strategy/DefaultShardsRoutingStrategy.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/routing/strategy/DefaultShardsRoutingStrategy.java @@ -20,7 +20,7 @@ package org.elasticsearch.cluster.routing.strategy; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.*; import java.util.Iterator; @@ -54,7 +54,7 @@ public class DefaultShardsRoutingStrategy implements ShardsRoutingStrategy { @Override public RoutingTable reroute(ClusterState clusterState) { RoutingNodes routingNodes = clusterState.routingNodes(); - Iterable<Node> dataNodes = clusterState.nodes().dataNodes().values(); + Iterable<DiscoveryNode> dataNodes = clusterState.nodes().dataNodes().values(); boolean changed = false; // first, clear from the shards any node id they used to belong to that is now dead @@ -212,8 +212,8 @@ private boolean allocateUnassigned(RoutingNodes routingNodes) { * * @param liveNodes currently live nodes. */ - private void applyNewNodes(RoutingNodes routingNodes, Iterable<Node> liveNodes) { - for (Node node : liveNodes) { + private void applyNewNodes(RoutingNodes routingNodes, Iterable<DiscoveryNode> liveNodes) { + for (DiscoveryNode node : liveNodes) { if (!routingNodes.nodesToShards().containsKey(node.id())) { RoutingNode routingNode = new RoutingNode(node.id()); routingNodes.nodesToShards().put(node.id(), routingNode); @@ -221,10 +221,10 @@ private void applyNewNodes(RoutingNodes routingNodes, Iterable<Node> liveNodes) } } - private boolean deassociateDeadNodes(RoutingNodes routingNodes, Iterable<Node> liveNodes) { + private boolean deassociateDeadNodes(RoutingNodes routingNodes, Iterable<DiscoveryNode> liveNodes) { boolean changed = false; Set<String> liveNodeIds = newHashSet(); - for (Node liveNode : liveNodes) { + for (DiscoveryNode liveNode : liveNodes) { liveNodeIds.add(liveNode.id()); } Set<String> nodeIdsToRemove = newHashSet(); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java index f6c97ba5f76a1..5fb356c7a39f4 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/cluster/service/InternalClusterService.java @@ -22,7 +22,7 @@ import com.google.inject.Inject; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.cluster.*; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -172,7 +172,7 @@ public void submitStateUpdateTask(final String source, final ClusterStateUpdateT ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent(source, clusterState, previousClusterState, discoveryService.firstMaster()); // new cluster state, notify all listeners - final Nodes.Delta nodesDelta = clusterChangedEvent.nodesDelta(); + final DiscoveryNodes.Delta nodesDelta = clusterChangedEvent.nodesDelta(); if (nodesDelta.hasChanges() && logger.isInfoEnabled()) { String summary = nodesDelta.shortSummary(); if (summary.length() > 0) { diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/jgroups/JgroupsDiscovery.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/jgroups/JgroupsDiscovery.java index cb92631e77484..a4edcb21c4ae3 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/jgroups/JgroupsDiscovery.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/jgroups/JgroupsDiscovery.java @@ -23,8 +23,8 @@ import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ElasticSearchIllegalStateException; import org.elasticsearch.cluster.*; -import org.elasticsearch.cluster.node.Node; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryException; import org.elasticsearch.discovery.InitialStateDiscoveryListener; @@ -70,7 +70,7 @@ public class JgroupsDiscovery extends AbstractLifecycleComponent<Discovery> impl private volatile boolean addressSet = false; - private Node localNode; + private DiscoveryNode localNode; private volatile boolean firstMaster = false; @@ -142,13 +142,13 @@ public class JgroupsDiscovery extends AbstractLifecycleComponent<Discovery> impl channel.connect(clusterName.value()); channel.setReceiver(this); logger.debug("Connected to cluster [{}], address [{}]", channel.getClusterName(), channel.getAddress()); - this.localNode = new Node(settings.get("name"), settings.getAsBoolean("node.data", true), channel.getAddress().toString(), transportService.boundAddress().publishAddress()); + this.localNode = new DiscoveryNode(settings.get("name"), settings.getAsBoolean("node.data", true), channel.getAddress().toString(), transportService.boundAddress().publishAddress()); if (isMaster()) { firstMaster = true; clusterService.submitStateUpdateTask("jgroups-disco-initialconnect(master)", new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { - Nodes.Builder builder = new Nodes.Builder() + DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder() .localNodeId(localNode.id()) .masterNodeId(localNode.id()) // put our local node @@ -164,7 +164,7 @@ public class JgroupsDiscovery extends AbstractLifecycleComponent<Discovery> impl } else { clusterService.submitStateUpdateTask("jgroups-disco-initialconnect", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { - Nodes.Builder builder = new Nodes.Builder() + DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder() .localNodeId(localNode.id()) .put(localNode); return newClusterStateBuilder().state(currentState).nodes(builder).build(); @@ -248,7 +248,7 @@ public String nodeDescription() { if (isMaster()) { try { BytesStreamInput is = new BytesStreamInput(msg.getBuffer()); - final Node newNode = Node.readNode(is); + final DiscoveryNode newNode = DiscoveryNode.readNode(is); is.close(); if (logger.isDebugEnabled()) { @@ -310,8 +310,8 @@ private boolean isMaster() { clusterService.submitStateUpdateTask("jgroups-disco-view", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { - Nodes newNodes = currentState.nodes().removeDeadMembers(newMembers, newView.getCreator().toString()); - Nodes.Delta delta = newNodes.delta(currentState.nodes()); + DiscoveryNodes newNodes = currentState.nodes().removeDeadMembers(newMembers, newView.getCreator().toString()); + DiscoveryNodes.Delta delta = newNodes.delta(currentState.nodes()); if (delta.added()) { logger.warn("No new nodes should be created when a new discovery view is accepted"); } @@ -328,7 +328,7 @@ private boolean isMaster() { // check whether I have been removed due to temporary disconnect final String me = channel.getAddress().toString(); boolean foundMe = false; - for (Node node : clusterService.state().nodes()) { + for (DiscoveryNode node : clusterService.state().nodes()) { if (node.id().equals(me)) { foundMe = true; break; diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java index 045c78aabd66c..a04105975a045 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/discovery/local/LocalDiscovery.java @@ -23,8 +23,8 @@ import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ElasticSearchIllegalStateException; import org.elasticsearch.cluster.*; -import org.elasticsearch.cluster.node.Node; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.InitialStateDiscoveryListener; import org.elasticsearch.transport.TransportService; @@ -54,7 +54,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem private final ClusterName clusterName; - private Node localNode; + private DiscoveryNode localNode; private volatile boolean master = false; @@ -84,7 +84,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem clusterGroups.put(clusterName, clusterGroup); } logger.debug("Connected to cluster [{}]", clusterName); - this.localNode = new Node(settings.get("name"), settings.getAsBoolean("node.data", true), Long.toString(nodeIdGenerator.incrementAndGet()), transportService.boundAddress().publishAddress()); + this.localNode = new DiscoveryNode(settings.get("name"), settings.getAsBoolean("node.data", true), Long.toString(nodeIdGenerator.incrementAndGet()), transportService.boundAddress().publishAddress()); clusterGroup.members().add(this); if (clusterGroup.members().size() == 1) { @@ -93,7 +93,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem firstMaster = true; clusterService.submitStateUpdateTask("local-disco-initialconnect(master)", new ProcessedClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { - Nodes.Builder builder = new Nodes.Builder() + DiscoveryNodes.Builder builder = new DiscoveryNodes.Builder() .localNodeId(localNode.id()) .masterNodeId(localNode.id()) // put our local node @@ -153,8 +153,8 @@ public class LocalDiscovery extends AbstractLifecycleComponent<Discovery> implem masterDiscovery.clusterService.submitStateUpdateTask("local-disco-update", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { - Nodes newNodes = currentState.nodes().removeDeadMembers(newMembers, masterDiscovery.localNode.id()); - Nodes.Delta delta = newNodes.delta(currentState.nodes()); + DiscoveryNodes newNodes = currentState.nodes().removeDeadMembers(newMembers, masterDiscovery.localNode.id()); + DiscoveryNodes.Delta delta = newNodes.delta(currentState.nodes()); if (delta.added()) { logger.warn("No new nodes should be created when a new discovery view is accepted"); } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/shard/recovery/RecoveryAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/shard/recovery/RecoveryAction.java index 04d8009a8c12d..825b738321e19 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/index/shard/recovery/RecoveryAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/shard/recovery/RecoveryAction.java @@ -25,7 +25,7 @@ import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ElasticSearchInterruptedException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.RecoveryEngineException; @@ -135,7 +135,7 @@ public void close() { } } - public synchronized void startRecovery(Node node, Node targetNode, boolean markAsRelocated) throws ElasticSearchException { + public synchronized void startRecovery(DiscoveryNode node, DiscoveryNode targetNode, boolean markAsRelocated) throws ElasticSearchException { sendStartRecoveryThread = Thread.currentThread(); try { // mark the shard as recovering @@ -224,20 +224,20 @@ private void cleanOpenIndex() { private static class StartRecoveryRequest implements Streamable { - private Node node; + private DiscoveryNode node; private boolean markAsRelocated; private StartRecoveryRequest() { } - private StartRecoveryRequest(Node node, boolean markAsRelocated) { + private StartRecoveryRequest(DiscoveryNode node, boolean markAsRelocated) { this.node = node; this.markAsRelocated = markAsRelocated; } @Override public void readFrom(StreamInput in) throws IOException { - node = Node.readNode(in); + node = DiscoveryNode.readNode(in); markAsRelocated = in.readBoolean(); } @@ -255,7 +255,7 @@ private class StartRecoveryTransportRequestHandler extends BaseTransportRequestH @Override public void messageReceived(final StartRecoveryRequest startRecoveryRequest, final TransportChannel channel) throws Exception { logger.trace("Starting recovery to {}, markAsRelocated {}", startRecoveryRequest.node, startRecoveryRequest.markAsRelocated); - final Node node = startRecoveryRequest.node; + final DiscoveryNode node = startRecoveryRequest.node; cleanOpenIndex(); final RecoveryStatus recoveryStatus = new RecoveryStatus(); indexShard.recover(new Engine.RecoveryHandler() { diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/index/shard/recovery/RecoveryFailedException.java b/modules/elasticsearch/src/main/java/org/elasticsearch/index/shard/recovery/RecoveryFailedException.java index 71c38e34c4722..3ce9bea28f1dc 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/index/shard/recovery/RecoveryFailedException.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/index/shard/recovery/RecoveryFailedException.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.shard.recovery; import org.elasticsearch.ElasticSearchException; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.index.shard.ShardId; /** @@ -28,7 +28,7 @@ */ public class RecoveryFailedException extends ElasticSearchException { - public RecoveryFailedException(ShardId shardId, Node node, Node targetNode, Throwable cause) { + public RecoveryFailedException(ShardId shardId, DiscoveryNode node, DiscoveryNode targetNode, Throwable cause) { super(shardId + ": Recovery failed from " + targetNode + " into " + node, cause); } } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java b/modules/elasticsearch/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java index 537bcc814c5f7..60aeec46d69c1 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/indices/cluster/IndicesClusterStateService.java @@ -30,8 +30,8 @@ import org.elasticsearch.cluster.action.index.NodeMappingCreatedAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.node.Node; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingTable; @@ -228,7 +228,7 @@ private void applyNewShards(final ClusterChangedEvent event) throws ElasticSearc if (routingNodes == null) { return; } - Nodes nodes = event.state().nodes(); + DiscoveryNodes nodes = event.state().nodes(); for (final ShardRouting shardRouting : routingNodes) { @@ -257,7 +257,7 @@ private void applyNewShards(final ClusterChangedEvent event) throws ElasticSearc } } - private void applyInitializingShard(final RoutingTable routingTable, final Nodes nodes, final ShardRouting shardRouting) throws ElasticSearchException { + private void applyInitializingShard(final RoutingTable routingTable, final DiscoveryNodes nodes, final ShardRouting shardRouting) throws ElasticSearchException { final IndexService indexService = indicesService.indexServiceSafe(shardRouting.index()); final int shardId = shardRouting.id(); @@ -322,7 +322,7 @@ private void applyInitializingShard(final RoutingTable routingTable, final Nodes for (ShardRouting entry : shardRoutingTable) { if (entry.primary() && entry.started()) { // only recover from started primary, if we can't find one, we will do it next round - Node node = nodes.get(entry.currentNodeId()); + DiscoveryNode node = nodes.get(entry.currentNodeId()); try { // we are recovering a backup from a primary, so no need to mark it as relocated recoveryAction.startRecovery(nodes.localNode(), node, false); @@ -346,7 +346,7 @@ private void applyInitializingShard(final RoutingTable routingTable, final Nodes } } else { // relocating primaries, recovery from the relocating shard - Node node = nodes.get(shardRouting.relocatingNodeId()); + DiscoveryNode node = nodes.get(shardRouting.relocatingNodeId()); try { // we mark the primary we are going to recover from as relocated at the end of phase 3 // so operations will start moving to the new primary diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/jmx/JmxClusterService.java b/modules/elasticsearch/src/main/java/org/elasticsearch/jmx/JmxClusterService.java index 936b60b7b8f89..37ee3725a5316 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/jmx/JmxClusterService.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/jmx/JmxClusterService.java @@ -22,7 +22,7 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.jmx.action.GetJmxServiceUrlAction; import org.elasticsearch.util.component.AbstractComponent; import org.elasticsearch.util.settings.Settings; @@ -60,7 +60,7 @@ public JmxClusterService(Settings settings, ClusterService clusterService, JmxSe if (jmxService.publishUrl() != null) { clusterService.add(new JmxClusterEventListener()); - for (final Node node : clusterService.state().nodes()) { + for (final DiscoveryNode node : clusterService.state().nodes()) { clusterNodesJmxUpdater.execute(new Runnable() { @Override public void run() { String nodeServiceUrl = getJmxServiceUrlAction.obtainPublishUrl(node); @@ -77,7 +77,7 @@ public void close() { } } - private void registerNode(Node node, String nodeServiceUrl) { + private void registerNode(DiscoveryNode node, String nodeServiceUrl) { try { JMXServiceURL jmxServiceURL = new JMXServiceURL(nodeServiceUrl); JMXConnector jmxConnector = JMXConnectorFactory.connect(jmxServiceURL, null); @@ -103,7 +103,7 @@ private class JmxClusterEventListener implements ClusterStateListener { if (!event.nodesChanged()) { return; } - for (final Node node : event.nodesDelta().addedNodes()) { + for (final DiscoveryNode node : event.nodesDelta().addedNodes()) { clusterNodesJmxUpdater.execute(new Runnable() { @Override public void run() { String nodeServiceUrl = getJmxServiceUrlAction.obtainPublishUrl(node); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/jmx/action/GetJmxServiceUrlAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/jmx/action/GetJmxServiceUrlAction.java index 15d60e413b0c1..62f5e172f860d 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/jmx/action/GetJmxServiceUrlAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/jmx/action/GetJmxServiceUrlAction.java @@ -22,7 +22,7 @@ import com.google.inject.Inject; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.jmx.JmxService; import org.elasticsearch.transport.BaseTransportRequestHandler; import org.elasticsearch.transport.FutureTransportResponseHandler; @@ -54,7 +54,7 @@ public class GetJmxServiceUrlAction extends AbstractComponent { transportService.registerHandler(GetJmxServiceUrlTransportHandler.ACTION, new GetJmxServiceUrlTransportHandler()); } - public String obtainPublishUrl(final Node node) throws ElasticSearchException { + public String obtainPublishUrl(final DiscoveryNode node) throws ElasticSearchException { if (clusterService.state().nodes().localNodeId().equals(node.id())) { return jmxService.publishUrl(); } else { diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/monitor/dump/SimpleDumpGenerator.java b/modules/elasticsearch/src/main/java/org/elasticsearch/monitor/dump/SimpleDumpGenerator.java index af97d7389b564..2c148c2a27dfb 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/monitor/dump/SimpleDumpGenerator.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/monitor/dump/SimpleDumpGenerator.java @@ -20,7 +20,7 @@ package org.elasticsearch.monitor.dump; import com.google.common.collect.ImmutableMap; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.util.Nullable; import java.io.File; @@ -50,7 +50,7 @@ public Result generateDump(String cause, @Nullable Map<String, Object> context, long timestamp = System.currentTimeMillis(); String fileName = ""; if (context.containsKey("localNode")) { - Node localNode = (Node) context.get("localNode"); + DiscoveryNode localNode = (DiscoveryNode) context.get("localNode"); if (localNode.name() != null) { fileName += localNode.name() + "-"; } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/monitor/dump/cluster/ClusterDumpContributor.java b/modules/elasticsearch/src/main/java/org/elasticsearch/monitor/dump/cluster/ClusterDumpContributor.java index 4f1e22ffbe0c4..1335ada63ee72 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/monitor/dump/cluster/ClusterDumpContributor.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/monitor/dump/cluster/ClusterDumpContributor.java @@ -23,7 +23,7 @@ import com.google.inject.assistedinject.Assisted; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.monitor.dump.Dump; import org.elasticsearch.monitor.dump.DumpContributionFailedException; @@ -54,7 +54,7 @@ public class ClusterDumpContributor implements DumpContributor { @Override public void contribute(Dump dump) throws DumpContributionFailedException { ClusterState clusterState = clusterService.state(); - Nodes nodes = clusterState.nodes(); + DiscoveryNodes nodes = clusterState.nodes(); RoutingTable routingTable = clusterState.routingTable(); PrintWriter writer = new PrintWriter(dump.createFileWriter("cluster.txt")); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java index 5d869050ba443..466b642c35a69 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/action/SearchServiceTransportAction.java @@ -21,7 +21,7 @@ import com.google.inject.Inject; import org.elasticsearch.cluster.ClusterService; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.fetch.FetchSearchRequest; @@ -65,7 +65,7 @@ public class SearchServiceTransportAction { transportService.registerHandler(SearchFetchByIdTransportHandler.ACTION, new SearchFetchByIdTransportHandler()); } - public void sendFreeContext(Node node, final long contextId) { + public void sendFreeContext(DiscoveryNode node, final long contextId) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { searchService.freeContext(contextId); } else { @@ -73,7 +73,7 @@ public void sendFreeContext(Node node, final long contextId) { } } - public void sendExecuteDfs(Node node, final InternalSearchRequest request, final SearchServiceListener<DfsSearchResult> listener) { + public void sendExecuteDfs(DiscoveryNode node, final InternalSearchRequest request, final SearchServiceListener<DfsSearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { DfsSearchResult result = searchService.executeDfsPhase(request); @@ -103,7 +103,7 @@ public void sendExecuteDfs(Node node, final InternalSearchRequest request, final } } - public void sendExecuteQuery(Node node, final InternalSearchRequest request, final SearchServiceListener<QuerySearchResult> listener) { + public void sendExecuteQuery(DiscoveryNode node, final InternalSearchRequest request, final SearchServiceListener<QuerySearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { QuerySearchResult result = searchService.executeQueryPhase(request); @@ -133,7 +133,7 @@ public void sendExecuteQuery(Node node, final InternalSearchRequest request, fin } } - public void sendExecuteQuery(Node node, final QuerySearchRequest request, final SearchServiceListener<QuerySearchResult> listener) { + public void sendExecuteQuery(DiscoveryNode node, final QuerySearchRequest request, final SearchServiceListener<QuerySearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { QuerySearchResult result = searchService.executeQueryPhase(request); @@ -163,7 +163,7 @@ public void sendExecuteQuery(Node node, final QuerySearchRequest request, final } } - public void sendExecuteQuery(Node node, final InternalScrollSearchRequest request, final SearchServiceListener<QuerySearchResult> listener) { + public void sendExecuteQuery(DiscoveryNode node, final InternalScrollSearchRequest request, final SearchServiceListener<QuerySearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { QuerySearchResult result = searchService.executeQueryPhase(request); @@ -193,7 +193,7 @@ public void sendExecuteQuery(Node node, final InternalScrollSearchRequest reques } } - public void sendExecuteFetch(Node node, final InternalSearchRequest request, final SearchServiceListener<QueryFetchSearchResult> listener) { + public void sendExecuteFetch(DiscoveryNode node, final InternalSearchRequest request, final SearchServiceListener<QueryFetchSearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { QueryFetchSearchResult result = searchService.executeFetchPhase(request); @@ -223,7 +223,7 @@ public void sendExecuteFetch(Node node, final InternalSearchRequest request, fin } } - public void sendExecuteFetch(Node node, final QuerySearchRequest request, final SearchServiceListener<QueryFetchSearchResult> listener) { + public void sendExecuteFetch(DiscoveryNode node, final QuerySearchRequest request, final SearchServiceListener<QueryFetchSearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { QueryFetchSearchResult result = searchService.executeFetchPhase(request); @@ -253,7 +253,7 @@ public void sendExecuteFetch(Node node, final QuerySearchRequest request, final } } - public void sendExecuteFetch(Node node, final InternalScrollSearchRequest request, final SearchServiceListener<QueryFetchSearchResult> listener) { + public void sendExecuteFetch(DiscoveryNode node, final InternalScrollSearchRequest request, final SearchServiceListener<QueryFetchSearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { QueryFetchSearchResult result = searchService.executeFetchPhase(request); @@ -283,7 +283,7 @@ public void sendExecuteFetch(Node node, final InternalScrollSearchRequest reques } } - public void sendExecuteFetch(Node node, final FetchSearchRequest request, final SearchServiceListener<FetchSearchResult> listener) { + public void sendExecuteFetch(DiscoveryNode node, final FetchSearchRequest request, final SearchServiceListener<FetchSearchResult> listener) { if (clusterService.state().nodes().localNodeId().equals(node.id())) { try { FetchSearchResult result = searchService.executeFetchPhase(request); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/ConnectTransportException.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/ConnectTransportException.java index 05a661db4294c..6b0abb69ec676 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/ConnectTransportException.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/ConnectTransportException.java @@ -19,25 +19,25 @@ package org.elasticsearch.transport; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; /** * @author kimchy (Shay Banon) */ public class ConnectTransportException extends TransportException { - private final Node node; + private final DiscoveryNode node; - public ConnectTransportException(Node node, String msg) { + public ConnectTransportException(DiscoveryNode node, String msg) { this(node, msg, null); } - public ConnectTransportException(Node node, String msg, Throwable cause) { + public ConnectTransportException(DiscoveryNode node, String msg, Throwable cause) { super(node + ": " + msg, cause); this.node = node; } - public Node node() { + public DiscoveryNode node() { return node; } } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/SendRequestTransportException.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/SendRequestTransportException.java index 5d2c219ca1720..4159ea4dd264f 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/SendRequestTransportException.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/SendRequestTransportException.java @@ -19,14 +19,14 @@ package org.elasticsearch.transport; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; /** * @author kimchy (shay.banon) */ public class SendRequestTransportException extends RemoteTransportException { - public SendRequestTransportException(Node node, String action, Throwable cause) { + public SendRequestTransportException(DiscoveryNode node, String action, Throwable cause) { super(node.name(), node.address(), action, cause); } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/Transport.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/Transport.java index 272a7ec695d1e..bf1a4e65615be 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/Transport.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/Transport.java @@ -19,7 +19,7 @@ package org.elasticsearch.transport; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.util.component.LifecycleComponent; import org.elasticsearch.util.io.stream.Streamable; import org.elasticsearch.util.transport.BoundTransportAddress; @@ -70,10 +70,10 @@ public static byte setError(byte value) { */ boolean addressSupported(Class<? extends TransportAddress> address); - void nodesAdded(Iterable<Node> nodes); + void nodesAdded(Iterable<DiscoveryNode> nodes); - void nodesRemoved(Iterable<Node> nodes); + void nodesRemoved(Iterable<DiscoveryNode> nodes); - <T extends Streamable> void sendRequest(Node node, long requestId, String action, + <T extends Streamable> void sendRequest(DiscoveryNode node, long requestId, String action, Streamable message, TransportResponseHandler<T> handler) throws IOException, TransportException; } diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/TransportService.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/TransportService.java index 88ec72ae6548d..8fce3ea72783a 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/TransportService.java @@ -21,7 +21,7 @@ import com.google.inject.Inject; import org.elasticsearch.ElasticSearchException; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.util.component.AbstractLifecycleComponent; import org.elasticsearch.util.concurrent.highscalelib.NonBlockingHashMapLong; @@ -96,7 +96,7 @@ public BoundTransportAddress boundAddress() { return transport.boundAddress(); } - public void nodesAdded(Iterable<Node> nodes) { + public void nodesAdded(Iterable<DiscoveryNode> nodes) { try { transport.nodesAdded(nodes); } catch (Exception e) { @@ -104,7 +104,7 @@ public void nodesAdded(Iterable<Node> nodes) { } } - public void nodesRemoved(Iterable<Node> nodes) { + public void nodesRemoved(Iterable<DiscoveryNode> nodes) { try { transport.nodesRemoved(nodes); } catch (Exception e) { @@ -123,14 +123,14 @@ public void throwConnectException(boolean throwConnectException) { this.throwConnectException = throwConnectException; } - public <T extends Streamable> TransportFuture<T> submitRequest(Node node, String action, Streamable message, + public <T extends Streamable> TransportFuture<T> submitRequest(DiscoveryNode node, String action, Streamable message, TransportResponseHandler<T> handler) throws TransportException { PlainTransportFuture<T> futureHandler = new PlainTransportFuture<T>(handler); sendRequest(node, action, message, futureHandler); return futureHandler; } - public <T extends Streamable> void sendRequest(final Node node, final String action, final Streamable message, + public <T extends Streamable> void sendRequest(final DiscoveryNode node, final String action, final Streamable message, final TransportResponseHandler<T> handler) throws TransportException { final long requestId = newRequestId(); try { diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/local/LocalTransport.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/local/LocalTransport.java index 7f3ed39437367..31a9e41c1d209 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/local/LocalTransport.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/local/LocalTransport.java @@ -21,7 +21,7 @@ import com.google.inject.Inject; import org.elasticsearch.ElasticSearchException; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; import org.elasticsearch.util.Nullable; @@ -92,13 +92,13 @@ public LocalTransport(ThreadPool threadPool) { return boundAddress; } - @Override public void nodesAdded(Iterable<Node> nodes) { + @Override public void nodesAdded(Iterable<DiscoveryNode> nodes) { } - @Override public void nodesRemoved(Iterable<Node> nodes) { + @Override public void nodesRemoved(Iterable<DiscoveryNode> nodes) { } - @Override public <T extends Streamable> void sendRequest(final Node node, final long requestId, final String action, + @Override public <T extends Streamable> void sendRequest(final DiscoveryNode node, final long requestId, final String action, final Streamable message, final TransportResponseHandler<T> handler) throws IOException, TransportException { HandlesStreamOutput stream = BytesStreamOutput.Cached.cachedHandles(); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java index 03820ff645e23..f11bcfbe9d805 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java @@ -23,7 +23,7 @@ import com.google.inject.Inject; import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ElasticSearchIllegalStateException; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; import org.elasticsearch.util.SizeValue; @@ -355,7 +355,7 @@ TransportAddress wrapAddress(SocketAddress socketAddress) { private static final byte[] LENGTH_PLACEHOLDER = new byte[4]; - @Override public <T extends Streamable> void sendRequest(Node node, long requestId, String action, + @Override public <T extends Streamable> void sendRequest(DiscoveryNode node, long requestId, String action, Streamable streamable, final TransportResponseHandler<T> handler) throws IOException, TransportException { Channel targetChannel = nodeChannel(node); @@ -391,11 +391,11 @@ TransportAddress wrapAddress(SocketAddress socketAddress) { // }); } - @Override public void nodesAdded(Iterable<Node> nodes) { + @Override public void nodesAdded(Iterable<DiscoveryNode> nodes) { if (!lifecycle.started()) { throw new ElasticSearchIllegalStateException("Can't add nodes to a stopped transport"); } - for (Node node : nodes) { + for (DiscoveryNode node : nodes) { try { nodeChannel(node); } catch (Exception e) { @@ -404,8 +404,8 @@ TransportAddress wrapAddress(SocketAddress socketAddress) { } } - @Override public void nodesRemoved(Iterable<Node> nodes) { - for (Node node : nodes) { + @Override public void nodesRemoved(Iterable<DiscoveryNode> nodes) { + for (DiscoveryNode node : nodes) { NodeConnections nodeConnections = clientChannels.remove(node.id()); if (nodeConnections != null) { nodeConnections.close(); @@ -413,7 +413,7 @@ TransportAddress wrapAddress(SocketAddress socketAddress) { } } - private Channel nodeChannel(Node node) throws ConnectTransportException { + private Channel nodeChannel(DiscoveryNode node) throws ConnectTransportException { if (node == null) { throw new ConnectTransportException(node, "Can't connect to a null node"); } diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/routing/strategy/SingleShardNoBackupsRoutingStrategyTests.java b/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/routing/strategy/SingleShardNoBackupsRoutingStrategyTests.java index 62db40f7cf4cb..bae7350ea7bb5 100644 --- a/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/routing/strategy/SingleShardNoBackupsRoutingStrategyTests.java +++ b/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/routing/strategy/SingleShardNoBackupsRoutingStrategyTests.java @@ -21,8 +21,8 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.node.Node; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.MutableShardRouting; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; @@ -40,7 +40,7 @@ import static org.elasticsearch.cluster.ClusterState.*; import static org.elasticsearch.cluster.metadata.IndexMetaData.*; import static org.elasticsearch.cluster.metadata.MetaData.*; -import static org.elasticsearch.cluster.node.Nodes.*; +import static org.elasticsearch.cluster.node.DiscoveryNodes.*; import static org.elasticsearch.cluster.routing.RoutingBuilders.*; import static org.elasticsearch.cluster.routing.ShardRoutingState.*; import static org.hamcrest.MatcherAssert.*; @@ -231,8 +231,8 @@ public class SingleShardNoBackupsRoutingStrategyTests { } logger.info("Adding " + (numberOfIndices / 2) + " nodes"); - Nodes.Builder nodesBuilder = newNodesBuilder(); - List<Node> nodes = newArrayList(); + DiscoveryNodes.Builder nodesBuilder = newNodesBuilder(); + List<DiscoveryNode> nodes = newArrayList(); for (int i = 0; i < (numberOfIndices / 2); i++) { nodesBuilder.put(newNode("node" + i)); } @@ -436,7 +436,7 @@ public class SingleShardNoBackupsRoutingStrategyTests { } } - private Node newNode(String nodeId) { - return new Node(nodeId, DummyTransportAddress.INSTANCE); + private DiscoveryNode newNode(String nodeId) { + return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE); } } \ No newline at end of file diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/routing/strategy/SingleShardOneBackupRoutingStrategyTests.java b/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/routing/strategy/SingleShardOneBackupRoutingStrategyTests.java index 24bc9e03399f5..5a804f9355a55 100644 --- a/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/routing/strategy/SingleShardOneBackupRoutingStrategyTests.java +++ b/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/routing/strategy/SingleShardOneBackupRoutingStrategyTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.util.logging.Loggers; @@ -32,7 +32,7 @@ import static org.elasticsearch.cluster.ClusterState.*; import static org.elasticsearch.cluster.metadata.IndexMetaData.*; import static org.elasticsearch.cluster.metadata.MetaData.*; -import static org.elasticsearch.cluster.node.Nodes.*; +import static org.elasticsearch.cluster.node.DiscoveryNodes.*; import static org.elasticsearch.cluster.routing.RoutingBuilders.*; import static org.elasticsearch.cluster.routing.ShardRoutingState.*; import static org.hamcrest.MatcherAssert.*; @@ -178,7 +178,7 @@ public class SingleShardOneBackupRoutingStrategyTests { assertThat(routingTable.index("test").shard(0).backupsShards().get(0).currentNodeId(), equalTo("node3")); } - private Node newNode(String nodeId) { - return new Node(nodeId, DummyTransportAddress.INSTANCE); + private DiscoveryNode newNode(String nodeId) { + return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE); } } diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/routing/strategy/TenShardsOneBackupRoutingTests.java b/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/routing/strategy/TenShardsOneBackupRoutingTests.java index b7abd607b93ff..0f374ec72211c 100644 --- a/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/routing/strategy/TenShardsOneBackupRoutingTests.java +++ b/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/routing/strategy/TenShardsOneBackupRoutingTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.util.logging.Loggers; @@ -32,7 +32,7 @@ import static org.elasticsearch.cluster.ClusterState.*; import static org.elasticsearch.cluster.metadata.IndexMetaData.*; import static org.elasticsearch.cluster.metadata.MetaData.*; -import static org.elasticsearch.cluster.node.Nodes.*; +import static org.elasticsearch.cluster.node.DiscoveryNodes.*; import static org.elasticsearch.cluster.routing.RoutingBuilders.*; import static org.elasticsearch.cluster.routing.ShardRoutingState.*; import static org.hamcrest.MatcherAssert.*; @@ -182,7 +182,7 @@ public class TenShardsOneBackupRoutingTests { assertThat(routingNodes.node("node3").numberOfShardsWithState(STARTED), equalTo(6)); } - private Node newNode(String nodeId) { - return new Node(nodeId, DummyTransportAddress.INSTANCE); + private DiscoveryNode newNode(String nodeId) { + return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE); } } diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java b/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java index b360935cbaf74..b245dcb0b90a4 100644 --- a/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java +++ b/modules/elasticsearch/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java @@ -21,8 +21,8 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.node.Node; -import org.elasticsearch.cluster.node.Nodes; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.strategy.DefaultShardsRoutingStrategy; import org.elasticsearch.util.io.stream.BytesStreamInput; @@ -52,7 +52,7 @@ public class ClusterSerializationTests { .add(indexRoutingTable("test").initializeEmpty(metaData.index("test"))) .build(); - Nodes nodes = Nodes.newNodesBuilder().put(newNode("node1")).put(newNode("node2")).put(newNode("node3")).localNodeId("node1").masterNodeId("node2").build(); + DiscoveryNodes nodes = DiscoveryNodes.newNodesBuilder().put(newNode("node1")).put(newNode("node2")).put(newNode("node3")).localNodeId("node1").masterNodeId("node2").build(); ClusterState clusterState = newClusterStateBuilder().nodes(nodes).metaData(metaData).routingTable(routingTable).build(); @@ -74,7 +74,7 @@ public class ClusterSerializationTests { .add(indexRoutingTable("test").initializeEmpty(metaData.index("test"))) .build(); - Nodes nodes = Nodes.newNodesBuilder().put(newNode("node1")).put(newNode("node2")).put(newNode("node3")).build(); + DiscoveryNodes nodes = DiscoveryNodes.newNodesBuilder().put(newNode("node1")).put(newNode("node2")).put(newNode("node3")).build(); ClusterState clusterState = newClusterStateBuilder().nodes(nodes).metaData(metaData).routingTable(routingTable).build(); @@ -89,7 +89,7 @@ public class ClusterSerializationTests { assertThat(target.prettyPrint(), equalTo(source.prettyPrint())); } - private Node newNode(String nodeId) { - return new Node(nodeId, DummyTransportAddress.INSTANCE); + private DiscoveryNode newNode(String nodeId) { + return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE); } } diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java index 8d8bc34b32a87..79e36daaf3ee0 100644 --- a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java +++ b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/local/SimpleLocalTransportTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.transport.local; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.scaling.ScalingThreadPool; import org.elasticsearch.transport.*; @@ -41,17 +41,17 @@ public class SimpleLocalTransportTests { private TransportService serviceA; private TransportService serviceB; - private Node serviceANode; - private Node serviceBNode; + private DiscoveryNode serviceANode; + private DiscoveryNode serviceBNode; @BeforeClass public void setUp() { threadPool = new ScalingThreadPool(); serviceA = new TransportService(new LocalTransport(threadPool), threadPool).start(); - serviceANode = new Node("A", serviceA.boundAddress().publishAddress()); + serviceANode = new DiscoveryNode("A", serviceA.boundAddress().publishAddress()); serviceB = new TransportService(new LocalTransport(threadPool), threadPool).start(); - serviceBNode = new Node("B", serviceB.boundAddress().publishAddress()); + serviceBNode = new DiscoveryNode("B", serviceB.boundAddress().publishAddress()); } @AfterClass public void tearDown() { diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java index f238cc6c89147..c004b914165d8 100644 --- a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java +++ b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.transport.netty; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.scaling.ScalingThreadPool; import org.elasticsearch.transport.*; @@ -41,17 +41,17 @@ public class SimpleNettyTransportTests { private TransportService serviceA; private TransportService serviceB; - private Node serviceANode; - private Node serviceBNode; + private DiscoveryNode serviceANode; + private DiscoveryNode serviceBNode; @BeforeClass public void setUp() { threadPool = new ScalingThreadPool(); serviceA = new TransportService(new NettyTransport(threadPool), threadPool).start(); - serviceANode = new Node("A", serviceA.boundAddress().publishAddress()); + serviceANode = new DiscoveryNode("A", serviceA.boundAddress().publishAddress()); serviceB = new TransportService(new NettyTransport(threadPool), threadPool).start(); - serviceBNode = new Node("B", serviceB.boundAddress().publishAddress()); + serviceBNode = new DiscoveryNode("B", serviceB.boundAddress().publishAddress()); } @AfterClass public void tearDown() { diff --git a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/benchmark/BenchmarkNettyClient.java b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/benchmark/BenchmarkNettyClient.java index ab8ad69f29876..b5fd824dbeddb 100644 --- a/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/benchmark/BenchmarkNettyClient.java +++ b/modules/elasticsearch/src/test/java/org/elasticsearch/transport/netty/benchmark/BenchmarkNettyClient.java @@ -20,7 +20,7 @@ package org.elasticsearch.transport.netty.benchmark; import com.google.common.collect.Lists; -import org.elasticsearch.cluster.node.Node; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.cached.CachedThreadPool; import org.elasticsearch.transport.BaseTransportResponseHandler; @@ -60,7 +60,7 @@ public static void main(String[] args) { final ThreadPool threadPool = new CachedThreadPool(); final TransportService transportService = new TransportService(new NettyTransport(settings, threadPool), threadPool).start(); - final Node node = new Node("server", new InetSocketTransportAddress("localhost", 9999)); + final DiscoveryNode node = new DiscoveryNode("server", new InetSocketTransportAddress("localhost", 9999)); transportService.nodesAdded(Lists.newArrayList(node));
14bc62302f5db980c5c21bcb64af047ca68241b5
restlet-framework-java
- The CLAP connector didn't set the- expiration date based on its 'timeToLive' parameter. Reported by- Peter Becker.--
c
https://github.com/restlet/restlet-framework-java
diff --git a/modules/org.restlet.example/src/org/restlet/example/jaxrs/GuardedExample.java b/modules/org.restlet.example/src/org/restlet/example/jaxrs/GuardedExample.java index 845d842af6..5d88624c9b 100644 --- a/modules/org.restlet.example/src/org/restlet/example/jaxrs/GuardedExample.java +++ b/modules/org.restlet.example/src/org/restlet/example/jaxrs/GuardedExample.java @@ -32,14 +32,14 @@ import org.restlet.Component; import org.restlet.Server; -import org.restlet.security.ChallengeGuard; -import org.restlet.security.MemoryRealm; -import org.restlet.security.Organization; -import org.restlet.security.User; import org.restlet.data.ChallengeScheme; import org.restlet.data.Protocol; import org.restlet.ext.jaxrs.JaxRsApplication; import org.restlet.ext.jaxrs.RoleChecker; +import org.restlet.security.ChallengeGuard; +import org.restlet.security.MemoryRealm; +import org.restlet.security.Organization; +import org.restlet.security.User; /** * <p> @@ -61,6 +61,7 @@ * @see ExampleServer * @see ExampleApplication */ +@SuppressWarnings("deprecation") public class GuardedExample { /** diff --git a/modules/org.restlet/src/org/restlet/engine/local/ClapClientHelper.java b/modules/org.restlet/src/org/restlet/engine/local/ClapClientHelper.java index 0a995072d9..282106ea99 100644 --- a/modules/org.restlet/src/org/restlet/engine/local/ClapClientHelper.java +++ b/modules/org.restlet/src/org/restlet/engine/local/ClapClientHelper.java @@ -168,6 +168,16 @@ protected void handleClassLoader(Request request, Response response, output.setIdentifier(request.getResourceRef()); output.setModificationDate(modificationDate); + // Update the expiration date + long timeToLive = getTimeToLive(); + if (timeToLive == 0) { + output.setExpirationDate(new Date()); + } else if (timeToLive > 0) { + output.setExpirationDate(new Date(System + .currentTimeMillis() + + (1000L * timeToLive))); + } + // Update the metadata based on file extensions final String name = path .substring(path.lastIndexOf('/') + 1); diff --git a/modules/org.restlet/src/org/restlet/engine/local/LocalClientHelper.java b/modules/org.restlet/src/org/restlet/engine/local/LocalClientHelper.java index c91301f4de..7f5f217b66 100644 --- a/modules/org.restlet/src/org/restlet/engine/local/LocalClientHelper.java +++ b/modules/org.restlet/src/org/restlet/engine/local/LocalClientHelper.java @@ -39,7 +39,6 @@ import org.restlet.representation.Variant; import org.restlet.service.MetadataService; - /** * Connector to the local resources accessible via file system, class loaders * and similar mechanisms. Here is the list of parameters that are supported: @@ -54,7 +53,8 @@ * <td>timeToLive</td> * <td>int</td> * <td>600</td> - * <td>Time to live for a file representation before it expires (in seconds).</td> + * <td>Time to live for a representation before it expires (in seconds). If you + * set the value to '0', the representation will never expire.</td> * </tr> * <tr> * <td>defaultLanguage</td>
6d224823f7340a1de4160c5127f8f4b46af9c120
eclipse$m2e-core
refactor openXmlEditor() method to open also files coming from local repository, additionally split it's functionality for reuse and further refactoring
p
https://github.com/eclipse-m2e/m2e-core
diff --git a/org.eclipse.m2e.editor.xml/src/main/java/org/eclipse/m2e/editor/xml/PomHyperlinkDetector.java b/org.eclipse.m2e.editor.xml/src/main/java/org/eclipse/m2e/editor/xml/PomHyperlinkDetector.java index 70ff4c0e2b..cc0d5dc043 100644 --- a/org.eclipse.m2e.editor.xml/src/main/java/org/eclipse/m2e/editor/xml/PomHyperlinkDetector.java +++ b/org.eclipse.m2e.editor.xml/src/main/java/org/eclipse/m2e/editor/xml/PomHyperlinkDetector.java @@ -12,6 +12,10 @@ package org.eclipse.m2e.editor.xml; import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; import java.util.ArrayList; import java.util.List; @@ -48,6 +52,7 @@ import org.eclipse.jface.text.hyperlink.IHyperlinkDetector; import org.eclipse.osgi.util.NLS; import org.eclipse.swt.widgets.Display; +import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IEditorPart; import org.eclipse.ui.IWorkbenchPage; import org.eclipse.ui.IWorkbenchWindow; @@ -61,6 +66,8 @@ import org.eclipse.m2e.core.MavenPlugin; import org.eclipse.m2e.core.actions.OpenPomAction; +import org.eclipse.m2e.core.actions.OpenPomAction.MavenPathStorageEditorInput; +import org.eclipse.m2e.core.core.MavenLogger; import org.eclipse.m2e.core.project.IMavenProjectFacade; import org.eclipse.m2e.editor.xml.internal.Messages; import org.eclipse.m2e.editor.xml.internal.NodeOperation; @@ -246,7 +253,7 @@ public void open() { File file = XmlUtils.fileForInputLocation(openLocation); if (file != null) { IFileStore fileStore = EFS.getLocalFileSystem().getStore(file.toURI()); - openXmlEditor(fileStore, openLocation.getLineNumber(), openLocation.getColumnNumber()); + openXmlEditor(fileStore, openLocation.getLineNumber(), openLocation.getColumnNumber(), openLocation.getSource().getModelId()); } } } @@ -369,7 +376,7 @@ public void open() { File file = XmlUtils.fileForInputLocation(location); if (file != null) { IFileStore fileStore = EFS.getLocalFileSystem().getStore(file.toURI()); - openXmlEditor(fileStore, location.getLineNumber(), location.getColumnNumber()); + openXmlEditor(fileStore, location.getLineNumber(), location.getColumnNumber(), location.getSource().getModelId()); } } } @@ -474,9 +481,15 @@ protected IStatus run(IProgressMonitor monitor) { if (versionString == null) { return Status.OK_STATUS; } - OpenPomAction.openEditor(gridString, + final IEditorPart page = OpenPomAction.openEditor(gridString, artidString, versionString, monitor); +// TODO: it's preferable to open the xml page, but this code will blink and open overview first and later switch. looks bad +// Display.getDefault().syncExec(new Runnable() { +// public void run() { +// selectEditorPage(page); +// } +// }); return Status.OK_STATUS; } }.schedule(); @@ -542,36 +555,31 @@ public String toString() { private void openXmlEditor(final IFileStore fileStore) { - openXmlEditor(fileStore, -1, -1); + openXmlEditor(fileStore, -1, -1, fileStore.getName()); } - private void openXmlEditor(final IFileStore fileStore, int line, int column) { + private void openXmlEditor(final IFileStore fileStore, int line, int column, String name) { + assert fileStore != null; IWorkbenchWindow window = PlatformUI.getWorkbench().getActiveWorkbenchWindow(); if(window != null) { IWorkbenchPage page = window.getActivePage(); if(page != null) { try { - IEditorPart part = IDE.openEditorOnFileStore(page, fileStore); - if(part instanceof FormEditor) { - FormEditor ed = (FormEditor) part; - ed.setActivePage(null); //null means source, always or just in the case of MavenPomEditor? - if(line != -1) { - if(ed.getActiveEditor() instanceof StructuredTextEditor) { - StructuredTextEditor structured = (StructuredTextEditor) ed.getActiveEditor(); - // convert the line and Column numbers to an offset: - IDocument doc = structured.getTextViewer().getDocument(); - if (doc instanceof IStructuredDocument) { - IStructuredDocument document = (IStructuredDocument) doc; - try { - int offset = document.getLineOffset(line - 1); - structured.selectAndReveal(offset + column - 1, 0); - } catch(BadLocationException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - } - } + if(fileStore.getName().equals("pom.xml")) { + IEditorPart part = IDE.openEditorOnFileStore(page, fileStore); + reveal(selectEditorPage(part), line, column); + } else { + //we need special EditorInput for stuff from repository + name = name + ".pom"; //$NON-NLS-1$ + File file = new File(fileStore.toURI()); + try { + IEditorInput input = new MavenPathStorageEditorInput(name, name, file.getAbsolutePath(), + readStream(new FileInputStream(file))); + IEditorPart part = OpenPomAction.openEditor(input, name); + reveal(selectEditorPage(part), line, column); + } catch(IOException e) { + MavenLogger.log("failed opening editor", e); + } } } catch(PartInitException e) { MessageDialog.openInformation( @@ -583,6 +591,65 @@ private void openXmlEditor(final IFileStore fileStore, int line, int column) { } } } + + private StructuredTextEditor selectEditorPage(IEditorPart part) { + if (part == null) { + return null; + } + if (part instanceof FormEditor) { + FormEditor ed = (FormEditor) part; + ed.setActivePage(null); //null means source, always or just in the case of MavenPomEditor? + if (ed.getActiveEditor() instanceof StructuredTextEditor) { + return (StructuredTextEditor) ed.getActiveEditor(); + } + } + return null; + } + + private void reveal(StructuredTextEditor structured, int line, int column) { + if (structured == null || line < 0 || column < 0) { + return; + } + IDocument doc = structured.getTextViewer().getDocument(); + if (doc instanceof IStructuredDocument) { + IStructuredDocument document = (IStructuredDocument) doc; + try { + int offset = document.getLineOffset(line - 1); + structured.selectAndReveal(offset + column - 1, 0); + } catch(BadLocationException e) { + MavenLogger.log("failed selecting part of editor", e); + } + } + } + + /** + * duplicate of OpenPomAction method + * @param is + * @return + * @throws IOException + */ + private static byte[] readStream(InputStream is) throws IOException { + byte[] b = new byte[is.available()]; + int len = 0; + while(true) { + int n = is.read(b, len, b.length - len); + if(n == -1) { + if(len < b.length) { + byte[] c = new byte[len]; + System.arraycopy(b, 0, c, 0, len); + b = c; + } + return b; + } + len += n; + if(len == b.length) { + byte[] c = new byte[b.length + 1000]; + System.arraycopy(b, 0, c, 0, len); + b = c; + } + } + } + static class ExpressionRegion implements IRegion {
a47d981c6e88178558d3b07fa53c903654a0e321
hadoop
YARN-975. Added a file-system implementation for- HistoryStorage. Contributed by Zhijie Shen. svn merge --ignore-ancestry -c- 1556727 ../YARN-321--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1562184 13f79535-47bb-0310-9956-ffa450edef68-
a
https://github.com/apache/hadoop
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 7ab49ac347199..48c97fc26d9b0 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -477,6 +477,9 @@ Branch YARN-321: Generic ApplicationHistoryService YARN-1007. Enhance History Reader interface for Containers. (Mayank Bansal via devaraj) + YARN-975. Added a file-system implementation for HistoryStorage. (Zhijie Shen + via vinodkv) + Release 2.2.0 - 2013-10-13 INCOMPATIBLE CHANGES diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index dc195858cb827..009dda3c16062 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -931,6 +931,19 @@ public class YarnConfiguration extends Configuration { public static final String YARN_APP_CONTAINER_LOG_BACKUPS = YARN_PREFIX + "app.container.log.backups"; + //////////////////////////////// + // AHS Configs + //////////////////////////////// + + public static final String AHS_PREFIX = YARN_PREFIX + "ahs."; + + /** URI for FileSystemApplicationHistoryStore */ + public static final String FS_HISTORY_STORE_URI = AHS_PREFIX + "fs-history-store.uri"; + + /** T-file compression types used to compress history data.*/ + public static final String FS_HISTORY_STORE_COMPRESSION_TYPE = AHS_PREFIX + "fs-history-store.compression-type"; + public static final String DEFAULT_FS_HISTORY_STORE_COMPRESSION_TYPE = "none"; + //////////////////////////////// // Other Configs //////////////////////////////// diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml index ba6264e0ae31f..b831158460fa4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml @@ -1041,6 +1041,23 @@ <value></value> </property> + <!-- Application History Service's Configuration--> + + <property> + <description>URI pointing to the location of the FileSystem path where + the history will be persisted. This must be supplied when using + org.apache.hadoop.yarn.server.applicationhistoryservice.FileSystemApplicationHistoryStore + as the value for yarn.resourcemanager.ahs.writer.class</description> + <name>yarn.ahs.fs-history-store.uri</name> + <value>${hadoop.log.dir}/yarn/system/ahstore</value> + </property> + + <property> + <description>T-file compression types used to compress history data.</description> + <name>yarn.ahs.fs-history-store.compression-type</name> + <value>none</value> + </property> + <!-- Other configuration --> <property> <description>The interval that the yarn client library uses to poll the diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java new file mode 100644 index 0000000000000..b4d97f314db59 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java @@ -0,0 +1,860 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice; + +import java.io.DataInput; +import java.io.DataInputStream; +import java.io.DataOutput; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.io.file.tfile.TFile; +import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto; +import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationAttemptFinishDataPBImpl; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationAttemptStartDataPBImpl; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationFinishDataPBImpl; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationStartDataPBImpl; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ContainerFinishDataPBImpl; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ContainerStartDataPBImpl; +import org.apache.hadoop.yarn.util.ConverterUtils; + +import com.google.protobuf.InvalidProtocolBufferException; + +/** + * File system implementation of {@link ApplicationHistoryStore}. In this + * implementation, one application will have just one file in the file system, + * which contains all the history data of one application, and its attempts and + * containers. {@link #applicationStarted(ApplicationStartData)} is supposed to + * be invoked first when writing any history data of one application and it will + * open a file, while {@link #applicationFinished(ApplicationFinishData)} is + * supposed to be last writing operation and will close the file. + */ +@Public +@Unstable +public class FileSystemApplicationHistoryStore extends AbstractService + implements ApplicationHistoryStore { + + private static final Log LOG = LogFactory + .getLog(FileSystemApplicationHistoryStore.class); + + private static final String ROOT_DIR_NAME = "ApplicationHistoryDataRoot"; + private static final int MIN_BLOCK_SIZE = 256 * 1024; + private static final String START_DATA_SUFFIX = "_start"; + private static final String FINISH_DATA_SUFFIX = "_finish"; + private static final FsPermission ROOT_DIR_UMASK = + FsPermission.createImmutable((short) 0740); + private static final FsPermission HISTORY_FILE_UMASK = + FsPermission.createImmutable((short) 0640); + + private FileSystem fs; + private Path rootDirPath; + + private ConcurrentMap<ApplicationId, HistoryFileWriter> outstandingWriters = + new ConcurrentHashMap<ApplicationId, HistoryFileWriter>(); + + public FileSystemApplicationHistoryStore() { + super(FileSystemApplicationHistoryStore.class.getName()); + } + + @Override + public void serviceInit(Configuration conf) throws Exception { + Path fsWorkingPath = new Path( + conf.get(YarnConfiguration.FS_HISTORY_STORE_URI)); + rootDirPath = new Path(fsWorkingPath, ROOT_DIR_NAME); + try { + fs = fsWorkingPath.getFileSystem(conf); + fs.mkdirs(rootDirPath); + fs.setPermission(rootDirPath, ROOT_DIR_UMASK); + } catch (IOException e) { + LOG.error("Error when initializing FileSystemHistoryStorage", e); + throw e; + } + super.serviceInit(conf); + } + + @Override + public void serviceStop() throws Exception { + try { + for (Entry<ApplicationId, HistoryFileWriter> entry : outstandingWriters + .entrySet()) { + entry.getValue().close(); + } + outstandingWriters.clear(); + } finally { + IOUtils.cleanup(LOG, fs); + } + super.serviceStop(); + } + + @Override + public ApplicationHistoryData getApplication(ApplicationId appId) + throws IOException { + HistoryFileReader hfReader = getHistoryFileReader(appId); + try { + boolean readStartData = false; + boolean readFinishData = false; + ApplicationHistoryData historyData = + ApplicationHistoryData.newInstance( + appId, null, null, null, null, Long.MIN_VALUE, Long.MIN_VALUE, + Long.MAX_VALUE, null, FinalApplicationStatus.UNDEFINED, null); + while ((!readStartData || !readFinishData) && hfReader.hasNext()) { + HistoryFileReader.Entry entry = hfReader.next(); + if (entry.key.id.equals(appId.toString())) { + if (entry.key.suffix.equals(START_DATA_SUFFIX)) { + ApplicationStartData startData = + parseApplicationStartData(entry.value); + mergeApplicationHistoryData(historyData, startData); + readStartData = true; + } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { + ApplicationFinishData finishData = + parseApplicationFinishData(entry.value); + mergeApplicationHistoryData(historyData, finishData); + readFinishData = true; + } + } + } + if (!readStartData && !readFinishData) { + return null; + } + if (!readStartData) { + LOG.warn("Start information is missing for application " + appId); + } + if (!readFinishData) { + LOG.warn("Finish information is missing for application " + appId); + } + LOG.info("Completed reading history information of application " + appId); + return historyData; + } catch (IOException e) { + LOG.error("Error when reading history file of application " + appId); + throw e; + } finally { + hfReader.close(); + } + } + + @Override + public Map<ApplicationId, ApplicationHistoryData> getAllApplications() + throws IOException { + Map<ApplicationId, ApplicationHistoryData> historyDataMap = + new HashMap<ApplicationId, ApplicationHistoryData>(); + FileStatus[] files = fs.listStatus(rootDirPath); + for (FileStatus file : files) { + ApplicationId appId = + ConverterUtils.toApplicationId(file.getPath().getName()); + try { + ApplicationHistoryData historyData = getApplication(appId); + if (historyData != null) { + historyDataMap.put(appId, historyData); + } + } catch (IOException e) { + // Eat the exception not to disturb the getting the next + // ApplicationHistoryData + LOG.error("History information of application " + appId + + " is not included into the result due to the exception", e); + } + } + return historyDataMap; + } + + @Override + public Map<ApplicationAttemptId, ApplicationAttemptHistoryData> + getApplicationAttempts(ApplicationId appId) throws IOException { + Map<ApplicationAttemptId, ApplicationAttemptHistoryData> historyDataMap = + new HashMap<ApplicationAttemptId, ApplicationAttemptHistoryData>(); + Map<ApplicationAttemptId, StartFinishDataPair<ApplicationAttemptStartData, ApplicationAttemptFinishData>> startFinshDataMap = + new HashMap<ApplicationAttemptId, StartFinishDataPair<ApplicationAttemptStartData, ApplicationAttemptFinishData>>(); + HistoryFileReader hfReader = getHistoryFileReader(appId); + try { + while (hfReader.hasNext()) { + HistoryFileReader.Entry entry = hfReader.next(); + if (entry.key.id.startsWith(ConverterUtils.APPLICATION_ATTEMPT_PREFIX)) { + if (entry.key.suffix.equals(START_DATA_SUFFIX)) { + retrieveStartFinishData(appId, entry, startFinshDataMap, true); + } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { + retrieveStartFinishData(appId, entry, startFinshDataMap, false); + } + } + } + LOG.info("Completed reading history information of all application" + + " attempts of application " + appId); + } catch (IOException e) { + LOG.info("Error when reading history information of some application" + + " attempts of application " + appId); + } finally { + hfReader.close(); + } + for (Map.Entry<ApplicationAttemptId, StartFinishDataPair<ApplicationAttemptStartData, ApplicationAttemptFinishData>> entry : startFinshDataMap + .entrySet()) { + ApplicationAttemptHistoryData historyData = + ApplicationAttemptHistoryData.newInstance( + entry.getKey(), null, -1, null, null, null, + FinalApplicationStatus.UNDEFINED, null); + mergeApplicationAttemptHistoryData(historyData, + entry.getValue().startData); + mergeApplicationAttemptHistoryData(historyData, + entry.getValue().finishData); + historyDataMap.put(entry.getKey(), historyData); + } + return historyDataMap; + } + + private + void + retrieveStartFinishData( + ApplicationId appId, + HistoryFileReader.Entry entry, + Map<ApplicationAttemptId, StartFinishDataPair<ApplicationAttemptStartData, ApplicationAttemptFinishData>> startFinshDataMap, + boolean start) throws IOException { + ApplicationAttemptId appAttemptId = + ConverterUtils.toApplicationAttemptId(entry.key.id); + if (appAttemptId.getApplicationId().equals(appId)) { + StartFinishDataPair<ApplicationAttemptStartData, ApplicationAttemptFinishData> pair = + startFinshDataMap.get(appAttemptId); + if (pair == null) { + pair = + new StartFinishDataPair<ApplicationAttemptStartData, ApplicationAttemptFinishData>(); + startFinshDataMap.put(appAttemptId, pair); + } + if (start) { + pair.startData = parseApplicationAttemptStartData(entry.value); + } else { + pair.finishData = parseApplicationAttemptFinishData(entry.value); + } + } + } + + @Override + public ApplicationAttemptHistoryData getApplicationAttempt( + ApplicationAttemptId appAttemptId) throws IOException { + HistoryFileReader hfReader = + getHistoryFileReader(appAttemptId.getApplicationId()); + try { + boolean readStartData = false; + boolean readFinishData = false; + ApplicationAttemptHistoryData historyData = + ApplicationAttemptHistoryData.newInstance( + appAttemptId, null, -1, null, null, null, + FinalApplicationStatus.UNDEFINED, null); + while ((!readStartData || !readFinishData) && hfReader.hasNext()) { + HistoryFileReader.Entry entry = hfReader.next(); + if (entry.key.id.equals(appAttemptId.toString())) { + if (entry.key.suffix.equals(START_DATA_SUFFIX)) { + ApplicationAttemptStartData startData = + parseApplicationAttemptStartData(entry.value); + mergeApplicationAttemptHistoryData(historyData, startData); + readStartData = true; + } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { + ApplicationAttemptFinishData finishData = + parseApplicationAttemptFinishData(entry.value); + mergeApplicationAttemptHistoryData(historyData, finishData); + readFinishData = true; + } + } + } + if (!readStartData && !readFinishData) { + return null; + } + if (!readStartData) { + LOG.warn("Start information is missing for application attempt " + + appAttemptId); + } + if (!readFinishData) { + LOG.warn("Finish information is missing for application attempt " + + appAttemptId); + } + LOG.info("Completed reading history information of application attempt " + + appAttemptId); + return historyData; + } catch (IOException e) { + LOG.error("Error when reading history file of application attempt" + + appAttemptId); + throw e; + } finally { + hfReader.close(); + } + } + + @Override + public ContainerHistoryData getContainer(ContainerId containerId) + throws IOException { + HistoryFileReader hfReader = + getHistoryFileReader(containerId.getApplicationAttemptId() + .getApplicationId()); + try { + boolean readStartData = false; + boolean readFinishData = false; + ContainerHistoryData historyData = + ContainerHistoryData.newInstance(containerId, null, null, null, + Long.MIN_VALUE, Long.MAX_VALUE, null, null, Integer.MAX_VALUE, + null); + while ((!readStartData || !readFinishData) && hfReader.hasNext()) { + HistoryFileReader.Entry entry = hfReader.next(); + if (entry.key.id.equals(containerId.toString())) { + if (entry.key.suffix.equals(START_DATA_SUFFIX)) { + ContainerStartData startData = + parseContainerStartData(entry.value); + mergeContainerHistoryData(historyData, startData); + readStartData = true; + } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { + ContainerFinishData finishData = + parseContainerFinishData(entry.value); + mergeContainerHistoryData(historyData, finishData); + readFinishData = true; + } + } + } + if (!readStartData && !readFinishData) { + return null; + } + if (!readStartData) { + LOG.warn("Start information is missing for container " + containerId); + } + if (!readFinishData) { + LOG.warn("Finish information is missing for container " + containerId); + } + LOG.info("Completed reading history information of container " + + containerId); + return historyData; + } catch (IOException e) { + LOG.error("Error when reading history file of container " + containerId); + throw e; + } finally { + hfReader.close(); + } + } + + @Override + public ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId) + throws IOException { + ApplicationAttemptHistoryData attemptHistoryData = + getApplicationAttempt(appAttemptId); + if (attemptHistoryData == null + || attemptHistoryData.getMasterContainerId() == null) { + return null; + } + return getContainer(attemptHistoryData.getMasterContainerId()); + } + + @Override + public Map<ContainerId, ContainerHistoryData> getContainers( + ApplicationAttemptId appAttemptId) throws IOException { + Map<ContainerId, ContainerHistoryData> historyDataMap = + new HashMap<ContainerId, ContainerHistoryData>(); + Map<ContainerId, StartFinishDataPair<ContainerStartData, ContainerFinishData>> startFinshDataMap = + new HashMap<ContainerId, StartFinishDataPair<ContainerStartData, ContainerFinishData>>(); + HistoryFileReader hfReader = + getHistoryFileReader(appAttemptId.getApplicationId()); + try { + while (hfReader.hasNext()) { + HistoryFileReader.Entry entry = hfReader.next(); + if (entry.key.id.startsWith(ConverterUtils.CONTAINER_PREFIX)) { + if (entry.key.suffix.equals(START_DATA_SUFFIX)) { + retrieveStartFinishData(appAttemptId, entry, startFinshDataMap, + true); + } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { + retrieveStartFinishData(appAttemptId, entry, startFinshDataMap, + false); + } + } + } + LOG.info("Completed reading history information of all conatiners" + + " of application attempt " + appAttemptId); + } catch (IOException e) { + LOG.info("Error when reading history information of some containers" + + " of application attempt " + appAttemptId); + } finally { + hfReader.close(); + } + for (Map.Entry<ContainerId, StartFinishDataPair<ContainerStartData, ContainerFinishData>> entry : startFinshDataMap + .entrySet()) { + ContainerHistoryData historyData = + ContainerHistoryData.newInstance(entry.getKey(), null, null, null, + Long.MIN_VALUE, Long.MAX_VALUE, null, null, Integer.MAX_VALUE, + null); + mergeContainerHistoryData(historyData, entry.getValue().startData); + mergeContainerHistoryData(historyData, entry.getValue().finishData); + historyDataMap.put(entry.getKey(), historyData); + } + return historyDataMap; + } + + private + void + retrieveStartFinishData( + ApplicationAttemptId appAttemptId, + HistoryFileReader.Entry entry, + Map<ContainerId, StartFinishDataPair<ContainerStartData, ContainerFinishData>> startFinshDataMap, + boolean start) throws IOException { + ContainerId containerId = + ConverterUtils.toContainerId(entry.key.id); + if (containerId.getApplicationAttemptId().equals(appAttemptId)) { + StartFinishDataPair<ContainerStartData, ContainerFinishData> pair = + startFinshDataMap.get(containerId); + if (pair == null) { + pair = + new StartFinishDataPair<ContainerStartData, ContainerFinishData>(); + startFinshDataMap.put(containerId, pair); + } + if (start) { + pair.startData = parseContainerStartData(entry.value); + } else { + pair.finishData = parseContainerFinishData(entry.value); + } + } + } + + @Override + public void applicationStarted(ApplicationStartData appStart) + throws IOException { + HistoryFileWriter hfWriter = + outstandingWriters.get(appStart.getApplicationId()); + if (hfWriter == null) { + Path applicationHistoryFile = + new Path(rootDirPath, appStart.getApplicationId().toString()); + try { + hfWriter = new HistoryFileWriter(applicationHistoryFile); + LOG.info("Opened history file of application " + + appStart.getApplicationId()); + } catch (IOException e) { + LOG.error("Error when openning history file of application " + + appStart.getApplicationId()); + throw e; + } + outstandingWriters.put(appStart.getApplicationId(), hfWriter); + } else { + throw new IOException("History file of application " + + appStart.getApplicationId() + " is already opened"); + } + assert appStart instanceof ApplicationStartDataPBImpl; + try { + hfWriter.writeHistoryData(new HistoryDataKey(appStart.getApplicationId() + .toString(), START_DATA_SUFFIX), + ((ApplicationStartDataPBImpl) appStart) + .getProto().toByteArray()); + LOG.info("Start information of application " + + appStart.getApplicationId() + " is written"); + } catch (IOException e) { + LOG.error("Error when writing start information of application " + + appStart.getApplicationId()); + throw e; + } + } + + @Override + public void applicationFinished(ApplicationFinishData appFinish) + throws IOException { + HistoryFileWriter hfWriter = + getHistoryFileWriter(appFinish.getApplicationId()); + assert appFinish instanceof ApplicationFinishDataPBImpl; + try { + hfWriter.writeHistoryData( + new HistoryDataKey(appFinish.getApplicationId().toString(), + FINISH_DATA_SUFFIX), + ((ApplicationFinishDataPBImpl) appFinish).getProto().toByteArray()); + LOG.info("Finish information of application " + + appFinish.getApplicationId() + " is written"); + } catch (IOException e) { + LOG.error("Error when writing finish information of application " + + appFinish.getApplicationId()); + throw e; + } finally { + hfWriter.close(); + outstandingWriters.remove(appFinish.getApplicationId()); + } + } + + @Override + public void applicationAttemptStarted( + ApplicationAttemptStartData appAttemptStart) throws IOException { + HistoryFileWriter hfWriter = + getHistoryFileWriter(appAttemptStart.getApplicationAttemptId() + .getApplicationId()); + assert appAttemptStart instanceof ApplicationAttemptStartDataPBImpl; + try { + hfWriter.writeHistoryData( + new HistoryDataKey(appAttemptStart.getApplicationAttemptId() + .toString(), + START_DATA_SUFFIX), + ((ApplicationAttemptStartDataPBImpl) appAttemptStart).getProto() + .toByteArray()); + LOG.info("Start information of application attempt " + + appAttemptStart.getApplicationAttemptId() + " is written"); + } catch (IOException e) { + LOG.error("Error when writing start information of application attempt " + + appAttemptStart.getApplicationAttemptId()); + throw e; + } + } + + @Override + public void applicationAttemptFinished( + ApplicationAttemptFinishData appAttemptFinish) throws IOException { + HistoryFileWriter hfWriter = + getHistoryFileWriter(appAttemptFinish.getApplicationAttemptId() + .getApplicationId()); + assert appAttemptFinish instanceof ApplicationAttemptFinishDataPBImpl; + try { + hfWriter.writeHistoryData( + new HistoryDataKey(appAttemptFinish.getApplicationAttemptId() + .toString(), + FINISH_DATA_SUFFIX), + ((ApplicationAttemptFinishDataPBImpl) appAttemptFinish).getProto() + .toByteArray()); + LOG.info("Finish information of application attempt " + + appAttemptFinish.getApplicationAttemptId() + " is written"); + } catch (IOException e) { + LOG.error("Error when writing finish information of application attempt " + + appAttemptFinish.getApplicationAttemptId()); + throw e; + } + } + + @Override + public void containerStarted(ContainerStartData containerStart) + throws IOException { + HistoryFileWriter hfWriter = + getHistoryFileWriter(containerStart.getContainerId() + .getApplicationAttemptId() + .getApplicationId()); + assert containerStart instanceof ContainerStartDataPBImpl; + try { + hfWriter.writeHistoryData( + new HistoryDataKey(containerStart.getContainerId().toString(), + START_DATA_SUFFIX), + ((ContainerStartDataPBImpl) containerStart).getProto().toByteArray()); + LOG.info("Start information of container " + + containerStart.getContainerId() + " is written"); + } catch (IOException e) { + LOG.error("Error when writing start information of container " + + containerStart.getContainerId()); + throw e; + } + } + + @Override + public void containerFinished(ContainerFinishData containerFinish) + throws IOException { + HistoryFileWriter hfWriter = + getHistoryFileWriter(containerFinish.getContainerId() + .getApplicationAttemptId().getApplicationId()); + assert containerFinish instanceof ContainerFinishDataPBImpl; + try { + hfWriter.writeHistoryData( + new HistoryDataKey(containerFinish.getContainerId().toString(), + FINISH_DATA_SUFFIX), + ((ContainerFinishDataPBImpl) containerFinish).getProto() + .toByteArray()); + LOG.info("Finish information of container " + + containerFinish.getContainerId() + " is written"); + } catch (IOException e) { + LOG.error("Error when writing finish information of container " + + containerFinish.getContainerId()); + } + } + + private static ApplicationStartData parseApplicationStartData(byte[] value) + throws InvalidProtocolBufferException { + return new ApplicationStartDataPBImpl( + ApplicationStartDataProto.parseFrom(value)); + } + + private static ApplicationFinishData parseApplicationFinishData(byte[] value) + throws InvalidProtocolBufferException { + return new ApplicationFinishDataPBImpl( + ApplicationFinishDataProto.parseFrom(value)); + } + + private static ApplicationAttemptStartData parseApplicationAttemptStartData( + byte[] value) throws InvalidProtocolBufferException { + return new ApplicationAttemptStartDataPBImpl( + ApplicationAttemptStartDataProto.parseFrom(value)); + } + + private static ApplicationAttemptFinishData + parseApplicationAttemptFinishData( + byte[] value) throws InvalidProtocolBufferException { + return new ApplicationAttemptFinishDataPBImpl( + ApplicationAttemptFinishDataProto.parseFrom(value)); + } + + private static ContainerStartData parseContainerStartData(byte[] value) + throws InvalidProtocolBufferException { + return new ContainerStartDataPBImpl( + ContainerStartDataProto.parseFrom(value)); + } + + private static ContainerFinishData parseContainerFinishData(byte[] value) + throws InvalidProtocolBufferException { + return new ContainerFinishDataPBImpl( + ContainerFinishDataProto.parseFrom(value)); + } + + private static void mergeApplicationHistoryData( + ApplicationHistoryData historyData, + ApplicationStartData startData) { + historyData.setApplicationName(startData.getApplicationName()); + historyData.setApplicationType(startData.getApplicationType()); + historyData.setQueue(startData.getQueue()); + historyData.setUser(startData.getUser()); + historyData.setSubmitTime(startData.getSubmitTime()); + historyData.setStartTime(startData.getStartTime()); + } + + private static void mergeApplicationHistoryData( + ApplicationHistoryData historyData, + ApplicationFinishData finishData) { + historyData.setFinishTime(finishData.getFinishTime()); + historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo()); + historyData.setFinalApplicationStatus(finishData + .getFinalApplicationStatus()); + historyData.setYarnApplicationState(finishData.getYarnApplicationState()); + } + + private static void mergeApplicationAttemptHistoryData( + ApplicationAttemptHistoryData historyData, + ApplicationAttemptStartData startData) { + historyData.setHost(startData.getHost()); + historyData.setRPCPort(startData.getRPCPort()); + historyData.setMasterContainerId(startData.getMasterContainerId()); + } + + private static void mergeApplicationAttemptHistoryData( + ApplicationAttemptHistoryData historyData, + ApplicationAttemptFinishData finishData) { + historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo()); + historyData.setTrackingURL(finishData.getTrackingURL()); + historyData.setFinalApplicationStatus(finishData + .getFinalApplicationStatus()); + historyData.setYarnApplicationAttemptState(finishData + .getYarnApplicationAttemptState()); + } + + private static void mergeContainerHistoryData( + ContainerHistoryData historyData, ContainerStartData startData) { + historyData.setAllocatedResource(startData.getAllocatedResource()); + historyData.setAssignedNode(startData.getAssignedNode()); + historyData.setPriority(startData.getPriority()); + historyData.setStartTime(startData.getStartTime()); + } + + private static void mergeContainerHistoryData( + ContainerHistoryData historyData, ContainerFinishData finishData) { + historyData.setFinishTime(finishData.getFinishTime()); + historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo()); + historyData.setLogURL(finishData.getLogURL()); + historyData.setContainerExitStatus(finishData + .getContainerExitStatus()); + historyData.setContainerState(finishData.getContainerState()); + } + + private HistoryFileWriter getHistoryFileWriter(ApplicationId appId) + throws IOException { + HistoryFileWriter hfWriter = outstandingWriters.get(appId); + if (hfWriter == null) { + throw new IOException("History file of application " + appId + + " is not opened"); + } + return hfWriter; + } + + private HistoryFileReader getHistoryFileReader(ApplicationId appId) + throws IOException { + Path applicationHistoryFile = new Path(rootDirPath, appId.toString()); + if (!fs.exists(applicationHistoryFile)) { + throw new IOException("History file for application " + appId + + " is not found"); + } + // The history file is still under writing + if (outstandingWriters.containsKey(appId)) { + throw new IOException("History file for application " + appId + + " is under writing"); + } + return new HistoryFileReader(applicationHistoryFile); + } + + private class HistoryFileReader { + + private class Entry { + + private HistoryDataKey key; + private byte[] value; + + public Entry(HistoryDataKey key, byte[] value) { + this.key = key; + this.value = value; + } + } + + private FSDataInputStream fsdis; + private TFile.Reader reader; + private TFile.Reader.Scanner scanner; + + public HistoryFileReader(Path historyFile) throws IOException { + FSDataInputStream fsdis = fs.open(historyFile); + reader = + new TFile.Reader(fsdis, fs.getFileStatus(historyFile).getLen(), + getConfig()); + reset(); + } + + public boolean hasNext() { + return !scanner.atEnd(); + } + + public Entry next() throws IOException { + TFile.Reader.Scanner.Entry entry = scanner.entry(); + DataInputStream dis = entry.getKeyStream(); + HistoryDataKey key = new HistoryDataKey(); + key.readFields(dis); + dis = entry.getValueStream(); + byte[] value = new byte[entry.getValueLength()]; + dis.read(value); + scanner.advance(); + return new Entry(key, value); + } + + public void reset() throws IOException { + IOUtils.cleanup(LOG, scanner); + scanner = reader.createScanner(); + } + + public void close() { + IOUtils.cleanup(LOG, scanner, reader, fsdis); + } + + } + + private class HistoryFileWriter { + + private FSDataOutputStream fsdos; + private TFile.Writer writer; + + public HistoryFileWriter(Path historyFile) + throws IOException { + if (fs.exists(historyFile)) { + fsdos = fs.append(historyFile); + } else { + fsdos = fs.create(historyFile); + } + fs.setPermission(historyFile, HISTORY_FILE_UMASK); + writer = + new TFile.Writer(fsdos, MIN_BLOCK_SIZE, getConfig().get( + YarnConfiguration.FS_HISTORY_STORE_COMPRESSION_TYPE, + YarnConfiguration.DEFAULT_FS_HISTORY_STORE_COMPRESSION_TYPE), + null, getConfig()); + } + + public synchronized void close() { + IOUtils.cleanup(LOG, writer, fsdos); + } + + public synchronized void writeHistoryData(HistoryDataKey key, byte[] value) + throws IOException { + DataOutputStream dos = null; + try { + dos = writer.prepareAppendKey(-1); + key.write(dos); + } finally { + IOUtils.cleanup(LOG, dos); + } + try { + dos = writer.prepareAppendValue(value.length); + dos.write(value); + } finally { + IOUtils.cleanup(LOG, dos); + } + } + + } + + private static class HistoryDataKey implements Writable { + + private String id; + + private String suffix; + + public HistoryDataKey() { + this(null, null); + } + + public HistoryDataKey(String id, String suffix) { + this.id = id; + this.suffix = suffix; + } + + @Override + public void write(DataOutput out) throws IOException { + out.writeUTF(id); + out.writeUTF(suffix); + } + + @Override + public void readFields(DataInput in) throws IOException { + id = in.readUTF(); + suffix = in.readUTF(); + } + + } + + private static class StartFinishDataPair<S, F> { + + private S startData; + private F finishData; + + } + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java new file mode 100644 index 0000000000000..d4a431f4ecb7f --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java @@ -0,0 +1,198 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice; + +import java.io.IOException; +import java.net.URI; + +import junit.framework.Assert; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.RawLocalFileSystem; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.api.records.Priority; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; +import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class TestFileSystemApplicationHistoryStore extends + ApplicationHistoryStoreTestUtils { + + private FileSystem fs; + private Path fsWorkingPath; + + @Before + public void setup() throws Exception { + fs = new RawLocalFileSystem(); + Configuration conf = new Configuration(); + fs.initialize(new URI("/"), conf); + fsWorkingPath = new Path("Test"); + fs.delete(fsWorkingPath, true); + conf.set(YarnConfiguration.FS_HISTORY_STORE_URI, fsWorkingPath.toString()); + store = new FileSystemApplicationHistoryStore(); + store.init(conf); + store.start(); + } + + @After + public void tearDown() throws Exception { + store.stop(); + fs.delete(fsWorkingPath, true); + fs.close(); + } + + @Test + public void testReadWriteHistoryData() throws IOException { + testWriteHistoryData(5); + testReadHistoryData(5); + } + + private void testWriteHistoryData(int num) throws IOException { + // write application history data + for (int i = 1; i <= num; ++i) { + ApplicationId appId = ApplicationId.newInstance(0, i); + writeApplicationStartData(appId); + + // write application attempt history data + for (int j = 1; j <= num; ++j) { + ApplicationAttemptId appAttemptId = + ApplicationAttemptId.newInstance(appId, j); + writeApplicationAttemptStartData(appAttemptId); + + // write container history data + for (int k = 1; k <= num; ++k) { + ContainerId containerId = ContainerId.newInstance(appAttemptId, k); + writeContainerStartData(containerId); + writeContainerFinishData(containerId); + + writeApplicationAttemptFinishData(appAttemptId); + } + } + + writeApplicationFinishData(appId); + } + } + + private void testReadHistoryData(int num) throws IOException { + // read application history data + Assert.assertEquals(num, store.getAllApplications().size()); + for (int i = 1; i <= num; ++i) { + ApplicationId appId = ApplicationId.newInstance(0, i); + ApplicationHistoryData appData = store.getApplication(appId); + Assert.assertNotNull(appData); + Assert.assertEquals(appId.toString(), appData.getApplicationName()); + Assert.assertEquals(appId.toString(), appData.getDiagnosticsInfo()); + + // read application attempt history data + Assert.assertEquals( + num, store.getApplicationAttempts(appId).size()); + for (int j = 1; j <= num; ++j) { + ApplicationAttemptId appAttemptId = + ApplicationAttemptId.newInstance(appId, j); + ApplicationAttemptHistoryData attemptData = + store.getApplicationAttempt(appAttemptId); + Assert.assertNotNull(attemptData); + Assert.assertEquals(appAttemptId.toString(), attemptData.getHost()); + Assert.assertEquals(appAttemptId.toString(), + attemptData.getDiagnosticsInfo()); + + // read container history data + Assert.assertEquals( + num, store.getContainers(appAttemptId).size()); + for (int k = 1; k <= num; ++k) { + ContainerId containerId = ContainerId.newInstance(appAttemptId, k); + ContainerHistoryData containerData = store.getContainer(containerId); + Assert.assertNotNull(containerData); + Assert.assertEquals(Priority.newInstance(containerId.getId()), + containerData.getPriority()); + Assert.assertEquals(containerId.toString(), + containerData.getDiagnosticsInfo()); + } + ContainerHistoryData masterContainer = + store.getAMContainer(appAttemptId); + Assert.assertNotNull(masterContainer); + Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1), + masterContainer.getContainerId()); + } + } + } + + @Test + public void testWriteAfterApplicationFinish() throws IOException { + ApplicationId appId = ApplicationId.newInstance(0, 1); + writeApplicationStartData(appId); + writeApplicationFinishData(appId); + // write application attempt history data + ApplicationAttemptId appAttemptId = + ApplicationAttemptId.newInstance(appId, 1); + try { + writeApplicationAttemptStartData(appAttemptId); + Assert.fail(); + } catch (IOException e) { + Assert.assertTrue(e.getMessage().contains("is not opened")); + } + try { + writeApplicationAttemptFinishData(appAttemptId); + Assert.fail(); + } catch (IOException e) { + Assert.assertTrue(e.getMessage().contains("is not opened")); + } + // write container history data + ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); + try { + writeContainerStartData(containerId); + Assert.fail(); + } catch (IOException e) { + Assert.assertTrue(e.getMessage().contains("is not opened")); + } + try { + writeContainerFinishData(containerId); + Assert.fail(); + } catch (IOException e) { + Assert.assertTrue(e.getMessage().contains("is not opened")); + } + } + + @Test + public void testMassiveWriteContainerHistoryData() throws IOException { + long mb = 1024 * 1024; + long usedDiskBefore = fs.getContentSummary(fsWorkingPath).getLength() / mb; + ApplicationId appId = ApplicationId.newInstance(0, 1); + writeApplicationStartData(appId); + ApplicationAttemptId appAttemptId = + ApplicationAttemptId.newInstance(appId, 1); + for (int i = 1; i <= 100000; ++i) { + ContainerId containerId = ContainerId.newInstance(appAttemptId, i); + writeContainerStartData(containerId); + writeContainerFinishData(containerId); + } + writeApplicationFinishData(appId); + long usedDiskAfter = fs.getContentSummary(fsWorkingPath).getLength() / mb; + Assert.assertTrue((usedDiskAfter - usedDiskBefore) < 20); + } + +}
3b73c810de961f01ab2dd27710c7e17a0490e208
orientdb
Started support for server-side triggers--
a
https://github.com/orientechnologies/orientdb
diff --git a/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java b/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java index ffe771f4c58..ab7c460ee98 100644 --- a/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java +++ b/client/src/main/java/com/orientechnologies/orient/client/remote/OStorageRemote.java @@ -1247,12 +1247,11 @@ private ORecordInternal<?> readRecordFromNetwork(final ODatabaseRecord iDatabase final ORecordInternal<?> record = ORecordFactory.newInstance(network.readByte()); if (record instanceof ORecordSchemaAware<?>) - ((ORecordSchemaAware<?>) record).fill(iDatabase, classId, network.readShort(), network.readLong(), network.readInt()); + ((ORecordSchemaAware<?>) record).fill(iDatabase, classId, network.readShort(), network.readLong(), network.readInt(), + network.readBytes()); else // DISCARD CLASS ID - record.fill(iDatabase, network.readShort(), network.readLong(), network.readInt()); - - record.fromStream(network.readBytes()); + record.fill(iDatabase, network.readShort(), network.readLong(), network.readInt(), network.readBytes()); return record; } diff --git a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java index 1de1700b8f4..f6fb23cf754 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java +++ b/core/src/main/java/com/orientechnologies/orient/core/db/record/ODatabaseRecordAbstract.java @@ -392,12 +392,12 @@ public <RET extends ORecordInternal<?>> RET executeReadRecord(final int iCluster if (currDb == null) currDb = (ODatabaseRecord) databaseOwner; - iRecord.fill(currDb, iClusterId, iPosition, recordBuffer.version); + iRecord.fill(currDb, iClusterId, iPosition, recordBuffer.version, recordBuffer.buffer); iRecord.fromStream(recordBuffer.buffer); iRecord.setStatus(STATUS.LOADED); callbackHooks(TYPE.AFTER_READ, iRecord); - + if (!iIgnoreCache) { getCache().pushRecord(iRecord); } @@ -482,7 +482,7 @@ public void executeSaveRecord(final ORecordInternal<?> iRecord, final String iCl if (isNew) { // UPDATE INFORMATION: CLUSTER ID+POSITION - iRecord.fill(iRecord.getDatabase(), clusterId, result, 0); + iRecord.fill(iRecord.getDatabase(), clusterId, result, 0, stream); iRecord.setStatus(STATUS.LOADED); if (stream != null && stream.length > 0) callbackHooks(TYPE.AFTER_CREATE, iRecord); @@ -491,7 +491,7 @@ public void executeSaveRecord(final ORecordInternal<?> iRecord, final String iCl iRecord.onAfterIdentityChanged(iRecord); } else { // UPDATE INFORMATION: VERSION - iRecord.fill(iRecord.getDatabase(), clusterId, rid.getClusterPosition(), (int) result); + iRecord.fill(iRecord.getDatabase(), clusterId, rid.getClusterPosition(), (int) result, stream); iRecord.setStatus(STATUS.LOADED); if (stream != null && stream.length > 0) callbackHooks(TYPE.AFTER_UPDATE, iRecord); diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java index 63e751e6f35..0d1a92b3b37 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java +++ b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordAbstract.java @@ -50,10 +50,14 @@ public ORecordAbstract(final ODatabaseRecord iDatabase, final byte[] iSource) { unsetDirty(); } - public ORecordAbstract<?> fill(final ODatabaseRecord iDatabase, final int iClusterId, final long iPosition, final int iVersion) { + public ORecordAbstract<?> fill(final ODatabaseRecord iDatabase, final int iClusterId, final long iPosition, final int iVersion, + final byte[] iBuffer) { _database = iDatabase; setIdentity(iClusterId, iPosition); _version = iVersion; + _status = STATUS.LOADED; + _source = iBuffer; + return this; } @@ -242,7 +246,7 @@ public ORecordInternal<T> reload(final String iFetchPlan) { _database.reload(this, iFetchPlan); // GET CONTENT - //fromStream(toStream()); + // fromStream(toStream()); return this; } catch (Exception e) { diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordInternal.java b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordInternal.java index 8d090da853d..70f869c3c57 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordInternal.java +++ b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordInternal.java @@ -23,7 +23,7 @@ * Generic record representation. The object can be reused across call to the database. */ public interface ORecordInternal<T> extends ORecord<T>, OSerializableStream { - public ORecordAbstract<?> fill(ODatabaseRecord iDatabase, int iClusterId, long iPosition, int iVersion); + public ORecordAbstract<?> fill(ODatabaseRecord iDatabase, int iClusterId, long iPosition, int iVersion, byte[] iBuffer); public ORecordAbstract<?> setIdentity(int iClusterId, long iClusterPosition); diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAware.java b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAware.java index baf0b555589..02f13d261f0 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAware.java +++ b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAware.java @@ -51,5 +51,6 @@ public interface ORecordSchemaAware<T> extends ORecordInternal<T> { public void validate() throws OValidationException; - public ORecordSchemaAware<T> fill(ODatabaseRecord iDatabase, int iClassId, int iClusterId, long iPosition, int iVersion); + public ORecordSchemaAware<T> fill(ODatabaseRecord iDatabase, int iClassId, int iClusterId, long iPosition, int iVersion, + byte[] iBuffer); } diff --git a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAwareAbstract.java b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAwareAbstract.java index a3501a6b7d2..01802a8bbc0 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAwareAbstract.java +++ b/core/src/main/java/com/orientechnologies/orient/core/record/ORecordSchemaAwareAbstract.java @@ -40,8 +40,8 @@ public ORecordSchemaAwareAbstract(final ODatabaseRecord iDatabase) { } public ORecordSchemaAwareAbstract<T> fill(final ODatabaseRecord iDatabase, final int iClassId, final int iClusterId, - final long iPosition, final int iVersion) { - super.fill(iDatabase, iClusterId, iPosition, iVersion); + final long iPosition, final int iVersion, final byte[] iBuffer) { + super.fill(iDatabase, iClusterId, iPosition, iVersion, iBuffer); setClass(_database.getMetadata().getSchema().getClassById(iClassId)); return this; } diff --git a/server/src/main/java/com/orientechnologies/orient/server/tx/OTransactionRecordProxy.java b/server/src/main/java/com/orientechnologies/orient/server/tx/OTransactionRecordProxy.java index 6bc891752eb..5e2855ec9bf 100644 --- a/server/src/main/java/com/orientechnologies/orient/server/tx/OTransactionRecordProxy.java +++ b/server/src/main/java/com/orientechnologies/orient/server/tx/OTransactionRecordProxy.java @@ -167,7 +167,8 @@ public void setStatus(com.orientechnologies.orient.core.record.ORecord.STATUS iS } @Override - public ORecordAbstract<?> fill(final ODatabaseRecord iDatabase, final int iClusterId, final long iPosition, final int iVersion) { + public ORecordAbstract<?> fill(final ODatabaseRecord iDatabase, final int iClusterId, final long iPosition, final int iVersion, + final byte[] iBuffer) { return null; }
6bb83374332a7506ecac4d688ea7f95db4aa8ee4
Delta Spike
make sure our MessageBundle mesages are fully Serializable
c
https://github.com/apache/deltaspike
diff --git a/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/message/SimpleMessageTest.java b/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/message/SimpleMessageTest.java index e7767e3e9..5ee43d738 100644 --- a/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/message/SimpleMessageTest.java +++ b/deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/message/SimpleMessageTest.java @@ -21,6 +21,7 @@ import org.apache.deltaspike.core.api.message.LocaleResolver; import org.apache.deltaspike.core.impl.message.MessageBundleExtension; import org.apache.deltaspike.test.category.SeCategory; +import org.apache.deltaspike.test.category.Serializer; import org.apache.deltaspike.test.util.ArchiveUtils; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.junit.Arquillian; @@ -36,6 +37,7 @@ import javax.inject.Inject; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; /** * Tests for {@link org.apache.deltaspike.core.api.message.MessageTemplate} @@ -99,4 +101,12 @@ public void testNullMessage() String result = simpleMessage.welcomeWithStringVariable(null); assertEquals(expectedResult, result); } + + @Test + public void testMessageSerialisation() { + Serializer<SimpleMessage> simpleMessageSerializer = new Serializer<SimpleMessage>(); + + SimpleMessage sm2 = simpleMessageSerializer.roundTrip(simpleMessage); + assertNotNull(sm2); + } }
886a96aed8bf0594e54197e870fc9d53c1282578
hbase
HBASE-9369 Add support for 1- and 2-byte integers- in OrderedBytes and provide types (He Liangliang)--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1524297 13f79535-47bb-0310-9956-ffa450edef68-
a
https://github.com/apache/hbase
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java new file mode 100644 index 000000000000..d353c15ba146 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java @@ -0,0 +1,73 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.types; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.util.Order; +import org.apache.hadoop.hbase.util.OrderedBytes; +import org.apache.hadoop.hbase.util.PositionedByteRange; + + +/** + * A {@code short} of 16-bits using a fixed-length encoding. Built on + * {@link OrderedBytes#encodeInt16(PositionedByteRange, short, Order)}. + */ [email protected] [email protected] +public class OrderedInt16 extends OrderedBytesBase<Short> { + + public static final OrderedInt16 ASCENDING = new OrderedInt16(Order.ASCENDING); + public static final OrderedInt16 DESCENDING = new OrderedInt16(Order.DESCENDING); + + protected OrderedInt16(Order order) { super(order); } + + @Override + public boolean isNullable() { return false; } + + @Override + public int encodedLength(Short val) { return 3; } + + @Override + public Class<Short> encodedClass() { return Short.class; } + + @Override + public Short decode(PositionedByteRange src) { + return OrderedBytes.decodeInt16(src); + } + + @Override + public int encode(PositionedByteRange dst, Short val) { + if (null == val) throw new IllegalArgumentException("Null values not supported."); + return OrderedBytes.encodeInt16(dst, val, order); + } + + /** + * Read a {@code short} value from the buffer {@code src}. + */ + public short decodeShort(PositionedByteRange src) { + return OrderedBytes.decodeInt16(src); + } + + /** + * Write instance {@code val} into buffer {@code dst}. + */ + public int encodeShort(PositionedByteRange dst, short val) { + return OrderedBytes.encodeInt16(dst, val, order); + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java new file mode 100644 index 000000000000..29ed504de8fe --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java @@ -0,0 +1,73 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.types; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.util.Order; +import org.apache.hadoop.hbase.util.OrderedBytes; +import org.apache.hadoop.hbase.util.PositionedByteRange; + + +/** + * A {@code byte} of 8-bits using a fixed-length encoding. Built on + * {@link OrderedBytes#encodeInt8(PositionedByteRange, byte, Order)}. + */ [email protected] [email protected] +public class OrderedInt8 extends OrderedBytesBase<Byte> { + + public static final OrderedInt8 ASCENDING = new OrderedInt8(Order.ASCENDING); + public static final OrderedInt8 DESCENDING = new OrderedInt8(Order.DESCENDING); + + protected OrderedInt8(Order order) { super(order); } + + @Override + public boolean isNullable() { return false; } + + @Override + public int encodedLength(Byte val) { return 2; } + + @Override + public Class<Byte> encodedClass() { return Byte.class; } + + @Override + public Byte decode(PositionedByteRange src) { + return OrderedBytes.decodeInt8(src); + } + + @Override + public int encode(PositionedByteRange dst, Byte val) { + if (null == val) throw new IllegalArgumentException("Null values not supported."); + return OrderedBytes.encodeInt8(dst, val, order); + } + + /** + * Read a {@code byte} value from the buffer {@code src}. + */ + public byte decodeByte(PositionedByteRange src) { + return OrderedBytes.decodeInt8(src); + } + + /** + * Write instance {@code val} into buffer {@code dst}. + */ + public int encodeByte(PositionedByteRange dst, byte val) { + return OrderedBytes.encodeInt8(dst, val, order); + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java new file mode 100644 index 000000000000..5091daa1d331 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java @@ -0,0 +1,86 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.types; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Order; +import org.apache.hadoop.hbase.util.PositionedByteRange; + +/** + * An {@code DataType} for interacting with values encoded using + * {@link Bytes#putByte(byte[], int, byte)}. Intended to make it easier to + * transition away from direct use of {@link Bytes}. + * @see Bytes#putByte(byte[], int, byte) + */ [email protected] [email protected] +public class RawByte implements DataType<Byte> { + + @Override + public boolean isOrderPreserving() { return false; } + + @Override + public Order getOrder() { return null; } + + @Override + public boolean isNullable() { return false; } + + @Override + public boolean isSkippable() { return true; } + + @Override + public int encodedLength(Byte val) { return Bytes.SIZEOF_BYTE; } + + @Override + public Class<Byte> encodedClass() { return Byte.class; } + + @Override + public int skip(PositionedByteRange src) { + src.setPosition(src.getPosition() + Bytes.SIZEOF_BYTE); + return Bytes.SIZEOF_BYTE; + } + + @Override + public Byte decode(PositionedByteRange src) { + byte val = src.getBytes()[src.getOffset() + src.getPosition()]; + skip(src); + return val; + } + + @Override + public int encode(PositionedByteRange dst, Byte val) { + Bytes.putByte(dst.getBytes(), dst.getOffset() + dst.getPosition(), val); + return skip(dst); + } + + /** + * Read a {@code byte} value from the buffer {@code buff}. + */ + public byte decodeByte(byte[] buff, int offset) { + return buff[offset]; + } + + /** + * Write instance {@code val} into buffer {@code buff}. + */ + public int encodeByte(byte[] buff, int offset, byte val) { + return Bytes.putByte(buff, offset, val); + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java new file mode 100644 index 000000000000..4fae5d74f185 --- /dev/null +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java @@ -0,0 +1,87 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.types; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Order; +import org.apache.hadoop.hbase.util.PositionedByteRange; + +/** + * An {@code DataType} for interacting with values encoded using + * {@link Bytes#putShort(byte[], int, short)}. Intended to make it easier to + * transition away from direct use of {@link Bytes}. + * @see Bytes#putShort(byte[], int, short) + * @see Bytes#toShort(byte[]) + */ [email protected] [email protected] +public class RawShort implements DataType<Short> { + + @Override + public boolean isOrderPreserving() { return false; } + + @Override + public Order getOrder() { return null; } + + @Override + public boolean isNullable() { return false; } + + @Override + public boolean isSkippable() { return true; } + + @Override + public int encodedLength(Short val) { return Bytes.SIZEOF_SHORT; } + + @Override + public Class<Short> encodedClass() { return Short.class; } + + @Override + public int skip(PositionedByteRange src) { + src.setPosition(src.getPosition() + Bytes.SIZEOF_SHORT); + return Bytes.SIZEOF_SHORT; + } + + @Override + public Short decode(PositionedByteRange src) { + short val = Bytes.toShort(src.getBytes(), src.getOffset() + src.getPosition()); + skip(src); + return val; + } + + @Override + public int encode(PositionedByteRange dst, Short val) { + Bytes.putShort(dst.getBytes(), dst.getOffset() + dst.getPosition(), val); + return skip(dst); + } + + /** + * Read a {@code short} value from the buffer {@code buff}. + */ + public short decodeShort(byte[] buff, int offset) { + return Bytes.toShort(buff, offset); + } + + /** + * Write instance {@code val} into buffer {@code buff}. + */ + public int encodeShort(byte[] buff, int offset, short val) { + return Bytes.putShort(buff, offset, val); + } +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java index b99fc7b99169..0bc20c8614c0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java @@ -278,6 +278,7 @@ public class OrderedBytes { * implementations can be inserted into the total ordering enforced here. */ private static final byte NULL = 0x05; + // room for 1 expansion type private static final byte NEG_INF = 0x07; private static final byte NEG_LARGE = 0x08; private static final byte NEG_MED_MIN = 0x09; @@ -289,14 +290,21 @@ public class OrderedBytes { private static final byte POS_MED_MAX = 0x21; private static final byte POS_LARGE = 0x22; private static final byte POS_INF = 0x23; - private static final byte NAN = 0x25; - private static final byte FIXED_INT32 = 0x27; - private static final byte FIXED_INT64 = 0x28; + // room for 2 expansion type + private static final byte NAN = 0x26; + // room for 2 expansion types + private static final byte FIXED_INT8 = 0x29; + private static final byte FIXED_INT16 = 0x2a; + private static final byte FIXED_INT32 = 0x2b; + private static final byte FIXED_INT64 = 0x2c; + // room for 3 expansion types private static final byte FIXED_FLOAT32 = 0x30; private static final byte FIXED_FLOAT64 = 0x31; - private static final byte TEXT = 0x33; - private static final byte BLOB_VAR = 0x35; - private static final byte BLOB_COPY = 0x36; + // room for 2 expansion type + private static final byte TEXT = 0x34; + // room for 2 expansion type + private static final byte BLOB_VAR = 0x37; + private static final byte BLOB_COPY = 0x38; /* * The following constant values are used by encoding implementations @@ -1198,6 +1206,59 @@ public static int encodeNull(PositionedByteRange dst, Order ord) { return 1; } + /** + * Encode an {@code int8} value using the fixed-length encoding. + * @return the number of bytes written. + * @see #encodeInt64(PositionedByteRange, long, Order) + * @see #decodeInt8(PositionedByteRange) + */ + public static int encodeInt8(PositionedByteRange dst, byte val, Order ord) { + final int offset = dst.getOffset(), start = dst.getPosition(); + dst.put(FIXED_INT8) + .put((byte) (val ^ 0x80)); + ord.apply(dst.getBytes(), offset + start, 2); + return 2; + } + + /** + * Decode an {@code int8} value. + * @see #encodeInt8(PositionedByteRange, byte, Order) + */ + public static byte decodeInt8(PositionedByteRange src) { + final byte header = src.get(); + assert header == FIXED_INT8 || header == DESCENDING.apply(FIXED_INT8); + Order ord = header == FIXED_INT8 ? ASCENDING : DESCENDING; + return (byte)((ord.apply(src.get()) ^ 0x80) & 0xff); + } + + /** + * Encode an {@code int16} value using the fixed-length encoding. + * @return the number of bytes written. + * @see #encodeInt64(PositionedByteRange, long, Order) + * @see #decodeInt16(PositionedByteRange) + */ + public static int encodeInt16(PositionedByteRange dst, short val, Order ord) { + final int offset = dst.getOffset(), start = dst.getPosition(); + dst.put(FIXED_INT16) + .put((byte) ((val >> 8) ^ 0x80)) + .put((byte) val); + ord.apply(dst.getBytes(), offset + start, 3); + return 3; + } + + /** + * Decode an {@code int16} value. + * @see #encodeInt16(PositionedByteRange, short, Order) + */ + public static short decodeInt16(PositionedByteRange src) { + final byte header = src.get(); + assert header == FIXED_INT16 || header == DESCENDING.apply(FIXED_INT16); + Order ord = header == FIXED_INT16 ? ASCENDING : DESCENDING; + short val = (short) ((ord.apply(src.get()) ^ 0x80) & 0xff); + val = (short) ((val << 8) + (ord.apply(src.get()) & 0xff)); + return val; + } + /** * Encode an {@code int32} value using the fixed-length encoding. * @return the number of bytes written. @@ -1270,14 +1331,14 @@ public static int decodeInt32(PositionedByteRange src) { public static int encodeInt64(PositionedByteRange dst, long val, Order ord) { final int offset = dst.getOffset(), start = dst.getPosition(); dst.put(FIXED_INT64) - .put((byte) ((val >> 56) ^ 0x80)) - .put((byte) (val >> 48)) - .put((byte) (val >> 40)) - .put((byte) (val >> 32)) - .put((byte) (val >> 24)) - .put((byte) (val >> 16)) - .put((byte) (val >> 8)) - .put((byte) val); + .put((byte) ((val >> 56) ^ 0x80)) + .put((byte) (val >> 48)) + .put((byte) (val >> 40)) + .put((byte) (val >> 32)) + .put((byte) (val >> 24)) + .put((byte) (val >> 16)) + .put((byte) (val >> 8)) + .put((byte) val); ord.apply(dst.getBytes(), offset + start, 9); return 9; } @@ -1611,6 +1672,12 @@ public static int skip(PositionedByteRange src) { return 1; case NAN: return 1; + case FIXED_INT8: + src.setPosition(src.getPosition() + 1); + return src.getPosition() - start; + case FIXED_INT16: + src.setPosition(src.getPosition() + 2); + return src.getPosition() - start; case FIXED_INT32: src.setPosition(src.getPosition() + 4); return src.getPosition() - start; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java index 509d76e666fd..5ecc45461da9 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java @@ -358,6 +358,142 @@ public void testNumericIntRealCompatibility() { } } + /** + * Test int8 encoding. + */ + @Test + public void testInt8() { + Byte[] vals = + { Byte.MIN_VALUE, Byte.MIN_VALUE / 2, 0, Byte.MAX_VALUE / 2, Byte.MAX_VALUE }; + + /* + * assert encoded values match decoded values. encode into target buffer + * starting at an offset to detect over/underflow conditions. + */ + for (Order ord : new Order[] { Order.ASCENDING, Order.DESCENDING }) { + for (int i = 0; i < vals.length; i++) { + // allocate a buffer 3-bytes larger than necessary to detect over/underflow + byte[] a = new byte[2 + 3]; + PositionedByteRange buf1 = new SimplePositionedByteRange(a, 1, 2 + 1); + buf1.setPosition(1); + + // verify encode + assertEquals("Surprising return value.", + 2, OrderedBytes.encodeInt8(buf1, vals[i], ord)); + assertEquals("Broken test: serialization did not consume entire buffer.", + buf1.getLength(), buf1.getPosition()); + assertEquals("Surprising serialized length.", 2, buf1.getPosition() - 1); + assertEquals("Buffer underflow.", 0, a[0]); + assertEquals("Buffer underflow.", 0, a[1]); + assertEquals("Buffer overflow.", 0, a[a.length - 1]); + + // verify skip + buf1.setPosition(1); + assertEquals("Surprising return value.", 2, OrderedBytes.skip(buf1)); + assertEquals("Did not skip enough bytes.", 2, buf1.getPosition() - 1); + + // verify decode + buf1.setPosition(1); + assertEquals("Deserialization failed.", + vals[i].byteValue(), OrderedBytes.decodeInt8(buf1)); + assertEquals("Did not consume enough bytes.", 2, buf1.getPosition() - 1); + } + } + + /* + * assert natural sort order is preserved by the codec. + */ + for (Order ord : new Order[] { Order.ASCENDING, Order.DESCENDING }) { + byte[][] encoded = new byte[vals.length][2]; + PositionedByteRange pbr = new SimplePositionedByteRange(); + for (int i = 0; i < vals.length; i++) { + OrderedBytes.encodeInt8(pbr.set(encoded[i]), vals[i], ord); + } + + Arrays.sort(encoded, Bytes.BYTES_COMPARATOR); + Byte[] sortedVals = Arrays.copyOf(vals, vals.length); + if (ord == Order.ASCENDING) Arrays.sort(sortedVals); + else Arrays.sort(sortedVals, Collections.reverseOrder()); + + for (int i = 0; i < sortedVals.length; i++) { + int decoded = OrderedBytes.decodeInt8(pbr.set(encoded[i])); + assertEquals( + String.format( + "Encoded representations do not preserve natural order: <%s>, <%s>, %s", + sortedVals[i], decoded, ord), + sortedVals[i].byteValue(), decoded); + } + } + } + + /** + * Test int16 encoding. + */ + @Test + public void testInt16() { + Short[] vals = + { Short.MIN_VALUE, Short.MIN_VALUE / 2, 0, Short.MAX_VALUE / 2, Short.MAX_VALUE }; + + /* + * assert encoded values match decoded values. encode into target buffer + * starting at an offset to detect over/underflow conditions. + */ + for (Order ord : new Order[] { Order.ASCENDING, Order.DESCENDING }) { + for (int i = 0; i < vals.length; i++) { + // allocate a buffer 3-bytes larger than necessary to detect over/underflow + byte[] a = new byte[3 + 3]; + PositionedByteRange buf1 = new SimplePositionedByteRange(a, 1, 3 + 1); + buf1.setPosition(1); + + // verify encode + assertEquals("Surprising return value.", + 3, OrderedBytes.encodeInt16(buf1, vals[i], ord)); + assertEquals("Broken test: serialization did not consume entire buffer.", + buf1.getLength(), buf1.getPosition()); + assertEquals("Surprising serialized length.", 3, buf1.getPosition() - 1); + assertEquals("Buffer underflow.", 0, a[0]); + assertEquals("Buffer underflow.", 0, a[1]); + assertEquals("Buffer overflow.", 0, a[a.length - 1]); + + // verify skip + buf1.setPosition(1); + assertEquals("Surprising return value.", 3, OrderedBytes.skip(buf1)); + assertEquals("Did not skip enough bytes.", 3, buf1.getPosition() - 1); + + // verify decode + buf1.setPosition(1); + assertEquals("Deserialization failed.", + vals[i].shortValue(), OrderedBytes.decodeInt16(buf1)); + assertEquals("Did not consume enough bytes.", 3, buf1.getPosition() - 1); + } + } + + /* + * assert natural sort order is preserved by the codec. + */ + for (Order ord : new Order[] { Order.ASCENDING, Order.DESCENDING }) { + byte[][] encoded = new byte[vals.length][3]; + PositionedByteRange pbr = new SimplePositionedByteRange(); + for (int i = 0; i < vals.length; i++) { + OrderedBytes.encodeInt16(pbr.set(encoded[i]), vals[i], ord); + } + + Arrays.sort(encoded, Bytes.BYTES_COMPARATOR); + Short[] sortedVals = Arrays.copyOf(vals, vals.length); + if (ord == Order.ASCENDING) Arrays.sort(sortedVals); + else Arrays.sort(sortedVals, Collections.reverseOrder()); + + for (int i = 0; i < sortedVals.length; i++) { + int decoded = OrderedBytes.decodeInt16(pbr.set(encoded[i])); + assertEquals( + String.format( + "Encoded representations do not preserve natural order: <%s>, <%s>, %s", + sortedVals[i], decoded, ord), + sortedVals[i].shortValue(), decoded); + } + } + } + /** * Test int32 encoding. */ @@ -898,7 +1034,8 @@ public void testBlobCopy() { @Test(expected = IllegalArgumentException.class) public void testBlobCopyNoZeroBytes() { byte[] val = { 0x01, 0x02, 0x00, 0x03 }; - byte[] ascExpected = { 0x36, 0x01, 0x02, 0x00, 0x03 }; + // TODO: implementation detail leaked here. + byte[] ascExpected = { 0x38, 0x01, 0x02, 0x00, 0x03 }; PositionedByteRange buf = new SimplePositionedByteRange(val.length + 1); OrderedBytes.encodeBlobCopy(buf, val, Order.ASCENDING); assertArrayEquals(ascExpected, buf.getBytes()); @@ -923,6 +1060,8 @@ public void testSkip() { BigDecimal posLarge = negLarge.negate(); double posInf = Double.POSITIVE_INFINITY; double nan = Double.NaN; + byte int8 = 100; + short int16 = 100; int int32 = 100; long int64 = 100l; float float32 = 100.0f; @@ -988,6 +1127,16 @@ public void testSkip() { buff.setPosition(0); assertEquals(o, OrderedBytes.skip(buff)); + buff.setPosition(0); + o = OrderedBytes.encodeInt8(buff, int8, ord); + buff.setPosition(0); + assertEquals(o, OrderedBytes.skip(buff)); + + buff.setPosition(0); + o = OrderedBytes.encodeInt16(buff, int16, ord); + buff.setPosition(0); + assertEquals(o, OrderedBytes.skip(buff)); + buff.setPosition(0); o = OrderedBytes.encodeInt32(buff, int32, ord); buff.setPosition(0);
2a8739c8bc4dd1998fd000033d9ef0d3819eff76
Valadoc
gir-importer: Ignore <annotation> tags
a
https://github.com/GNOME/vala/
diff --git a/src/libvaladoc/importer/girdocumentationimporter.vala b/src/libvaladoc/importer/girdocumentationimporter.vala index 02f9e85ab9..1988b967c1 100644 --- a/src/libvaladoc/importer/girdocumentationimporter.vala +++ b/src/libvaladoc/importer/girdocumentationimporter.vala @@ -176,6 +176,12 @@ public class Valadoc.Importer.GirDocumentationImporter : DocumentationImporter { private void next () { current_token = reader.read_token (out begin, out end); + + // Skip <annotation /> (only generated by valac) + if (current_token == MarkupTokenType.START_ELEMENT && reader.name == "annotation") { + next (); // MarkupTokenType.END_ELEMENT, annotation + next (); + } } private void start_element (string name) {
062042ba83651b8495bc0330023ae7c7c47a38d4
hbase
HBASE-1722 Add support for exporting HBase- metrics via JMX--git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@813229 13f79535-47bb-0310-9956-ffa450edef68-
a
https://github.com/apache/hbase
diff --git a/CHANGES.txt b/CHANGES.txt index 05466fa181fb..d81a17cbca78 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -34,6 +34,7 @@ Release 0.21.0 - Unreleased HBASE-1800 Too many ZK connections HBASE-1819 Update to 0.20.1 hadoop and zk 3.2.1 HBASE-1820 Update jruby from 1.2 to 1.3.1 + HBASE-1722 Add support for exporting HBase metrics via JMX OPTIMIZATIONS HBASE-1765 Delay Result deserialization until asked for and permit diff --git a/src/docs/src/documentation/content/xdocs/metrics.xml b/src/docs/src/documentation/content/xdocs/metrics.xml index c8744f438de5..b01d7bd06cc7 100644 --- a/src/docs/src/documentation/content/xdocs/metrics.xml +++ b/src/docs/src/documentation/content/xdocs/metrics.xml @@ -63,5 +63,118 @@ in ganglia, the stats are aggregated rather than reported per instance. </p> </section> + + <section> + <title> Using with JMX </title> + <p> + In addition to the standard output contexts supported by the Hadoop + metrics package, you can also export HBase metrics via Java Management + Extensions (JMX). This will allow viewing HBase stats in JConsole or + any other JMX client. + </p> + <section> + <title>Enable HBase stats collection</title> + <p> + To enable JMX support in HBase, first edit + <code>$HBASE_HOME/conf/hadoop-metrics.properties</code> to support + metrics refreshing. (If you've already configured + <code>hadoop-metrics.properties</code> for another output context, + you can skip this step). + </p> + <source> +# Configuration of the "hbase" context for null +hbase.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread +hbase.period=60 + +# Configuration of the "jvm" context for null +jvm.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread +jvm.period=60 + +# Configuration of the "rpc" context for null +rpc.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread +rpc.period=60 + </source> + </section> + <section> + <title>Setup JMX remote access</title> + <p> + For remote access, you will need to configure JMX remote passwords + and access profiles. Create the files: + </p> + <dl> + <dt><code>$HBASE_HOME/conf/jmxremote.passwd</code> (set permissions + to 600)</dt> + <dd> + <source> +monitorRole monitorpass +controlRole controlpass + </source> + </dd> + + <dt><code>$HBASE_HOME/conf/jmxremote.access</code></dt> + <dd> + <source> +monitorRole readonly +controlRole readwrite + </source> + </dd> + </dl> + </section> + <section> + <title>Configure JMX in HBase startup</title> + <p> + Finally, edit the <code>$HBASE_HOME/conf/hbase-env.sh</code> and + <code>$HBASE_HOME/bin/hbase</code> scripts for JMX support: + </p> + <dl> + <dt><code>$HBASE_HOME/conf/hbase-env.sh</code></dt> + <dd> + <p>Add the lines:</p> + <source> +JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false" +JMX_OPTS="$JMX_OPTS -Dcom.sun.management.jmxremote.password.file=$HBASE_HOME/conf/jmxremote.passwd" +JMX_OPTS="$JMX_OPTS -Dcom.sun.management.jmxremote.access.file=$HBASE_HOME/conf/jmxremote.access" + +export HBASE_MASTER_OPTS="$JMX_OPTS -Dcom.sun.management.jmxremote.port=10101" +export HBASE_REGIONSERVER_OPTS="$JMX_OPTS -Dcom.sun.management.jmxremote.port=10102" + </source> + </dd> + <dt><code>$HBASE_HOME/bin/hbase</code></dt> + <dd> + <p>Towards the end of the script, replace the lines:</p> + <source> + # figure out which class to run +if [ "$COMMAND" = "shell" ] ; then + CLASS="org.jruby.Main ${HBASE_HOME}/bin/hirb.rb" +elif [ "$COMMAND" = "master" ] ; then + CLASS='org.apache.hadoop.hbase.master.HMaster' +elif [ "$COMMAND" = "regionserver" ] ; then + CLASS='org.apache.hadoop.hbase.regionserver.HRegionServer' + </source> + <p> + with the lines: (adding the "HBASE_OPTS=..." lines for "master" and + "regionserver" commands) + </p> + <source> + # figure out which class to run +if [ "$COMMAND" = "shell" ] ; then + CLASS="org.jruby.Main ${HBASE_HOME}/bin/hirb.rb" +elif [ "$COMMAND" = "master" ] ; then + CLASS='org.apache.hadoop.hbase.master.HMaster' + HBASE_OPTS="$HBASE_OPTS $HBASE_MASTER_OPTS" +elif [ "$COMMAND" = "regionserver" ] ; then + CLASS='org.apache.hadoop.hbase.regionserver.HRegionServer' + HBASE_OPTS="$HBASE_OPTS $HBASE_REGIONSERVER_OPTS" + </source> + </dd> + </dl> + <p> + After restarting the processes you want to monitor, you should now be + able to run JConsole (included with the JDK since JDK 5.0) to view + the statistics via JMX. HBase MBeans are exported under the + <strong><code>hadoop</code></strong> domain in JMX. + </p> + </section> + </section> </body> </document> diff --git a/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java b/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java index fcfd13943815..950d02a43900 100644 --- a/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java +++ b/src/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java @@ -47,6 +47,7 @@ public class HBaseRpcMetrics implements Updater { private MetricsRecord metricsRecord; private static Log LOG = LogFactory.getLog(HBaseRpcMetrics.class); + private final HBaseRPCStatistics rpcStatistics; public HBaseRpcMetrics(String hostName, String port) { MetricsContext context = MetricsUtil.getContext("rpc"); @@ -58,6 +59,8 @@ public HBaseRpcMetrics(String hostName, String port) { + hostName + ", port=" + port); context.registerUpdater(this); + + rpcStatistics = new HBaseRPCStatistics(this.registry, hostName, port); } @@ -110,6 +113,7 @@ public void doUpdates(MetricsContext context) { } public void shutdown() { - // Nothing to do + if (rpcStatistics != null) + rpcStatistics.shutdown(); } } \ No newline at end of file diff --git a/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java b/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java index 4d527b0a5b15..62d7cf3888c4 100644 --- a/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java +++ b/src/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java @@ -39,6 +39,7 @@ public class MasterMetrics implements Updater { private final Log LOG = LogFactory.getLog(this.getClass()); private final MetricsRecord metricsRecord; private final MetricsRegistry registry = new MetricsRegistry(); + private final MasterStatistics masterStatistics; /* * Count of requests to the cluster since last call to metrics update */ @@ -52,11 +53,16 @@ public MasterMetrics() { metricsRecord.setTag("Master", name); context.registerUpdater(this); JvmMetrics.init("Master", name); + + // expose the MBean for metrics + masterStatistics = new MasterStatistics(this.registry); + LOG.info("Initialized"); } public void shutdown() { - // nought to do. + if (masterStatistics != null) + masterStatistics.shutdown(); } /** diff --git a/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java b/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java index 52ab21ffffef..49e819724960 100644 --- a/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java +++ b/src/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java @@ -47,6 +47,7 @@ public class RegionServerMetrics implements Updater { private long lastUpdate = System.currentTimeMillis(); private static final int MB = 1024*1024; private MetricsRegistry registry = new MetricsRegistry(); + private final RegionServerStatistics statistics; public final MetricsTimeVaryingRate atomicIncrementTime = new MetricsTimeVaryingRate("atomicIncrementTime", registry); @@ -112,13 +113,18 @@ public RegionServerMetrics() { context.registerUpdater(this); // Add jvmmetrics. JvmMetrics.init("RegionServer", name); + + // export for JMX + statistics = new RegionServerStatistics(this.registry, name); + LOG.info("Initialized"); } - + public void shutdown() { - // nought to do. + if (statistics != null) + statistics.shutdown(); } - + /** * Since this object is a registered updater, this method will be called * periodically, e.g. every 5 seconds. @@ -141,7 +147,7 @@ public void doUpdates(MetricsContext unused) { this.metricsRecord.update(); this.lastUpdate = System.currentTimeMillis(); } - + public void resetAllMinMax() { // Nothing to do }
b767617542a2403051a0f884d18462e60bf45e04
adangel$pmd
Preferences * Rule activation checkboxes now operational - only the checked rules are used to evaluate code. No longer need to physically remove rules from the ruleset to bypass them. Settings are stored in preferences and the number of active rules is shown in the dialog when PMD runs a scan. Minor UI bug in group-by mode doesn't render parent node checks initially. * New SelectAll/None buttons and a SortByChecked items button. Replaced several text buttons with icon versions * Selected rules & property tabs retained by UI preferences between sessions Fixes * New column in rule table showing number of available fixes per rule (just two for now). The new 'Fixes' tab in the properties folders needs additional work to complete it. The Fix menu option in the violations context menu works now for the two rules that have them. Need to provide better storage for rule-fix associations - current approach only stores fix classnames - should support classname + static instance var. CPD * Added launch button for the Swing version of the CPD UI to the preference page. Need to migrate its better functionality to the SWT version. git-svn-id: https://pmd.svn.sourceforge.net/svnroot/pmd/trunk@7092 51baf565-9d33-0410-a72c-fc3788e3496d
p
https://github.com/adangel/pmd
diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/add.gif b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/add.gif new file mode 100755 index 00000000000..252d7ebcb8c Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/add.gif differ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/btn_prio0.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/btn_prio0.png new file mode 100755 index 00000000000..0648e8a9761 Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/btn_prio0.png differ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/checkAll.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/checkAll.png new file mode 100755 index 00000000000..a6506ab94ce Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/checkAll.png differ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/delete.gif b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/delete.gif new file mode 100755 index 00000000000..b6922ac11cf Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/delete.gif differ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/downArrow.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/downArrow.png new file mode 100755 index 00000000000..15786161ee8 Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/downArrow.png differ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/editTool.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/editTool.png new file mode 100755 index 00000000000..7acac0e6a25 Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/editTool.png differ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/export.gif b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/export.gif new file mode 100755 index 00000000000..5a0837d1e47 Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/export.gif differ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/icon_empty.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/icon_empty.png new file mode 100755 index 00000000000..46b493e8512 Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/icon_empty.png differ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/icon_greenCheck.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/icon_greenCheck.png new file mode 100755 index 00000000000..c797ab2374f Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/icon_greenCheck.png differ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/import.gif b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/import.gif new file mode 100755 index 00000000000..d38085ad9c2 Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/import.gif differ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/sortChecked.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/sortChecked.png new file mode 100755 index 00000000000..6ef26ec333e Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/sortChecked.png differ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/uncheckAll.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/uncheckAll.png new file mode 100755 index 00000000000..eafc98f6ecf Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/uncheckAll.png differ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/upArrow.png b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/upArrow.png new file mode 100755 index 00000000000..c93d4c0706c Binary files /dev/null and b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/icons/upArrow.png differ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/messages.properties b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/messages.properties index 56037a0d353..9d37a290b14 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/messages.properties +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/messages.properties @@ -46,14 +46,19 @@ preference.ruleset.label.rulepropstable = Rule properties preference.ruleset.label.exclude_patterns_table = Exclude patterns preference.ruleset.label.include_patterns_table = Include patterns preference.ruleset.label.rules_grouped_by = Rules grouped by +preference.ruleset.label.active_rule_count = Active rules: + +preference.ruleset.button.tooltip.check.all = Check all rules +preference.ruleset.button.tooltip.uncheck.all = Uncheck all rules + preference.ruleset.column.language = Language preference.ruleset.column.language.tooltip = Target language preference.ruleset.column.dataflow = DFA preference.ruleset.column.dataflow.tooltip = Dataflow analysis preference.ruleset.column.ruleset = Rule set preference.ruleset.column.ruleset.tooltip = Current rule set -preference.ruleset.column.ruleset_name = Rule set name -preference.ruleset.column.rule_name = Rule name +preference.ruleset.column.ruleset_name = Rule set +preference.ruleset.column.rule_name = Rule preference.ruleset.column.rule_type = Type preference.ruleset.column.example_count = Examples preference.ruleset.column.example_count.tooltip = Number of examples @@ -67,6 +72,7 @@ preference.ruleset.column.filters.xpath.tooltip = XPath exclusion filters preference.ruleset.column.since = Since preference.ruleset.column.since.tooltip = Incorporation within PMD preference.ruleset.column.priority = Priority +preference.ruleset.column.fixCount = Fixes preference.ruleset.column.priority.tooltip = Relative priority preference.ruleset.column.description = Description preference.ruleset.column.property = Property @@ -104,6 +110,7 @@ preference.ruleedit.tab.properties = Properties preference.ruleedit.tab.description = Description preference.ruleedit.tab.filters = Filters preference.ruleedit.tab.xpath = XPath +preference.ruleedit.tab.fixes = Fixes preference.ruleedit.tab.examples = Examples preference.ruleedit.label.exclusion_regex = Exclusion regular expression diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/plugin.xml b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/plugin.xml index 3f8d3d44825..557035e8a23 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/plugin.xml +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/plugin.xml @@ -90,13 +90,14 @@ <page name="%preferences.rulesets" category="net.sourceforge.pmd.eclipse.ui.preferences.generalPreferencesPage" - class="net.sourceforge.pmd.eclipse.ui.preferences.br.PMDPreferencePage" + class="net.sourceforge.pmd.eclipse.ui.preferences.br.PMDPreferencePage2" id="net.sourceforge.pmd.eclipse.ui.preferences.pmdPreferencePage"> </page> - - <page name="Ruleset preferences - BR" category="net.sourceforge.pmd.ui.preferences.generalPreferencesPage" class="net.sourceforge.pmd.eclipse.ui.preferences.br.PMDPreferencePage" id="net.sourceforge.pmd.ui.preferences.pmdPreferencePage"> + +<!-- + <page name="Ruleset preferences - BR" category="net.sourceforge.pmd.ui.preferences.generalPreferencesPage" class="net.sourceforge.pmd.eclipse.ui.preferences.br.PMDPreferencePage2" id="net.sourceforge.pmd.ui.preferences.pmdPreferencePage"> </page> - +--> <page name="%preferences.cpd" category="net.sourceforge.pmd.eclipse.ui.preferences.generalPreferencesPage" @@ -367,13 +368,15 @@ category="net.sourceforge.pmd.eclipse.ui.views" name="%view.overview" id="net.sourceforge.pmd.eclipse.ui.views.violationOverview"/> -<!-- <view temporary placement, under construction +<!-- + <view allowMultiple="false" icon="icons/icon_overview.gif" class="net.sourceforge.pmd.eclipse.ui.views.br.ViolationOverviewBR" category="net.sourceforge.pmd.eclipse.ui.views" name="Violation Overview BR" - id="net.sourceforge.pmd.eclipse.ui.views.violationOverview.br"/> --> + id="net.sourceforge.pmd.eclipse.ui.views.violationOverview.br"/> +--> <view allowMultiple="false" icon="icons/icon_dataflow.png" diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/builder/MarkerUtil.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/builder/MarkerUtil.java index 2af643aff2f..42a1789b26e 100755 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/builder/MarkerUtil.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/builder/MarkerUtil.java @@ -3,10 +3,17 @@ import java.util.ArrayList; import java.util.List; +import net.sourceforge.pmd.Rule; +import net.sourceforge.pmd.RuleSet; +import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; import net.sourceforge.pmd.eclipse.runtime.PMDRuntimeConstants; +import net.sourceforge.pmd.eclipse.ui.PMDUiConstants; +import net.sourceforge.pmd.util.StringUtil; +import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IMarker; import org.eclipse.core.resources.IResource; +import org.eclipse.core.resources.IResourceVisitor; import org.eclipse.core.runtime.CoreException; /** @@ -19,6 +26,85 @@ public class MarkerUtil { private MarkerUtil() { } + public static boolean hasAnyRuleMarkers(IResource resource) throws CoreException { + + final boolean foundOne[] = new boolean[] { false }; + + IResourceVisitor ruleMarkerFinder = new IResourceVisitor() { + + public boolean visit(IResource resource) { + + if (foundOne[0]) return false; + + if (resource instanceof IFile) { + + IMarker[] ruleMarkers = null; + try { + ruleMarkers = resource.findMarkers(PMDRuntimeConstants.PMD_MARKER, true, IResource.DEPTH_INFINITE); + } catch (CoreException ex) { + // what do to? + } + if (ruleMarkers.length > 0) { + foundOne[0] = true; + return false; + } + } + + return true; + } + }; + + try { + resource.accept(ruleMarkerFinder); + } catch (CoreException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + return foundOne[0]; + } + + public static String ruleNameFor(IMarker marker) { + return marker.getAttribute(PMDUiConstants.KEY_MARKERATT_RULENAME, ""); + } + + public static int rulePriorityFor(IMarker marker) throws CoreException { + return ((Integer)marker.getAttribute(PMDUiConstants.KEY_MARKERATT_PRIORITY)).intValue(); + } + + public static List<Rule> rulesFor(IMarker[] markers) { + + List<Rule> rules = new ArrayList<Rule>(markers.length); + RuleSet ruleset = PMDPlugin.getDefault().getPreferencesManager().getRuleSet(); + + for (IMarker marker : markers) { + String name = ruleNameFor(marker); + if (StringUtil.isEmpty(name)) continue; + Rule rule = ruleset.getRuleByName(name); + if (rule == null) continue; + rules.add(rule); + } + + return rules; + } + + /** + * Returns the name of the rule that is common to all markers + * or null if any one of them differ. + * + * @param IMarker[] markers + * @return String + */ + public static String commonRuleNameAmong(IMarker[] markers) { + + String ruleName = ruleNameFor(markers[0]); + for (int i=1; i<markers.length; i++) { + if (!ruleName.equals(ruleNameFor(markers[i]))) return null; + } + + return ruleName; + } + public static void deleteAllMarkersIn(IResource resource) throws CoreException { deleteMarkersIn(resource, PMDRuntimeConstants.ALL_MARKER_TYPES); } diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/cmd/ReviewCodeCmd.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/cmd/ReviewCodeCmd.java index 07af8530110..faae2109a3b 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/cmd/ReviewCodeCmd.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/cmd/ReviewCodeCmd.java @@ -48,8 +48,10 @@ import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; import net.sourceforge.pmd.eclipse.runtime.PMDRuntimeConstants; import net.sourceforge.pmd.eclipse.runtime.builder.MarkerUtil; +import net.sourceforge.pmd.eclipse.runtime.preferences.IPreferences; import net.sourceforge.pmd.eclipse.runtime.properties.IProjectProperties; import net.sourceforge.pmd.eclipse.runtime.properties.PropertiesException; +import net.sourceforge.pmd.eclipse.ui.actions.RuleSetUtil; import org.apache.log4j.Logger; import org.eclipse.core.resources.IContainer; @@ -93,6 +95,7 @@ public class ReviewCodeCmd extends AbstractDefaultCommand { private int ruleCount; private int fileCount; private long pmdDuration; + private String onErrorIssue = null; private static final long serialVersionUID = 1L; @@ -121,8 +124,9 @@ public void execute() throws CommandException { this.pmdDuration = 0; beginTask("PMD checking...", getStepCount()); + // Lancer PMD - if (this.resources.isEmpty()) { + if (resources.isEmpty()) { processResourceDelta(); } else { processResources(); @@ -155,21 +159,19 @@ public void run() { done(); // Log performance information - if (this.fileCount > 0 && this.ruleCount > 0) { + if (fileCount > 0 && ruleCount > 0) { logInfo( - "Review code command terminated. " + this.ruleCount + " rules were executed against " + this.fileCount - + " files. Actual PMD duration is about " + this.pmdDuration + "ms, that is about " - + (float)this.pmdDuration / this.fileCount + "Review code command terminated. " + ruleCount + " rules were executed against " + fileCount + + " files. Actual PMD duration is about " + pmdDuration + "ms, that is about " + + (float)pmdDuration / fileCount + " ms/file, " - + (float)this.pmdDuration / this.ruleCount + + (float)pmdDuration / ruleCount + " ms/rule, " - + (float)this.pmdDuration / ((long) this.fileCount * (long) this.ruleCount) + + (float)pmdDuration / ((long) fileCount * (long) ruleCount) + " ms/filerule" ); } else { - logInfo( - "Review code command terminated. " + this.ruleCount + " rules were executed against " + this.fileCount - + " files. PMD was not executed."); + logInfo("Review code command terminated. " + ruleCount + " rules were executed against " + fileCount + " files. PMD was not executed."); } } } @@ -185,8 +187,8 @@ public Map<IFile, Set<MarkerInfo>> getMarkers() { * @param resource The resource to set. */ public void setResources(final List<ISchedulingRule> resources) { - this.resources.clear(); - this.resources.addAll(resources); + resources.clear(); + resources.addAll(resources); } /** @@ -199,7 +201,7 @@ public void addResource(final IResource resource) { throw new IllegalArgumentException("Resource parameter can not be null"); } - this.resources.add(resource); + resources.add(resource); } /** @@ -229,10 +231,11 @@ public void setOpenPmdPerspective(boolean openPmdPerspective) { */ @Override public void reset() { - this.resources.clear(); - this.markersByFile = new HashMap<IFile, Set<MarkerInfo>>(); - this.setTerminated(false); - this.openPmdPerspective = false; + resources.clear(); + markersByFile = new HashMap<IFile, Set<MarkerInfo>>(); + setTerminated(false); + openPmdPerspective = false; + onErrorIssue = null; } /** @@ -240,7 +243,7 @@ public void reset() { */ @Override public boolean isReadyToExecute() { - return this.resources.size() != 0 || this.resourceDelta != null; + return resources.size() != 0 || resourceDelta != null; } /** @@ -251,14 +254,14 @@ private ISchedulingRule getschedulingRule() { final IResourceRuleFactory ruleFactory = workspace.getRuleFactory(); ISchedulingRule rule = null; - if (this.resources.isEmpty()) { - rule = ruleFactory.markerRule(this.resourceDelta.getResource().getProject()); + if (resources.isEmpty()) { + rule = ruleFactory.markerRule(resourceDelta.getResource().getProject()); } else { - ISchedulingRule rules[] = new ISchedulingRule[this.resources.size()]; + ISchedulingRule rules[] = new ISchedulingRule[resources.size()]; for (int i = 0; i < rules.length; i++) { - rules[i] = ruleFactory.markerRule((IResource) this.resources.get(i)); + rules[i] = ruleFactory.markerRule((IResource) resources.get(i)); } - rule = new MultiRule(this.resources.toArray(rules)); + rule = new MultiRule(resources.toArray(rules)); } return rule; @@ -270,7 +273,7 @@ private ISchedulingRule getschedulingRule() { * @throws CommandException */ private void processResources() throws CommandException { - final Iterator<ISchedulingRule> i = this.resources.iterator(); + final Iterator<ISchedulingRule> i = resources.iterator(); while (i.hasNext()) { final IResource resource = (IResource) i.next(); @@ -290,7 +293,9 @@ private void processResource(IResource resource) throws CommandException { try { final IProject project = resource.getProject(); final IProjectProperties properties = PMDPlugin.getDefault().loadProjectProperties(project); - final RuleSet ruleSet = properties.getProjectRuleSet(); + + final RuleSet ruleSet = filteredRuleSet(properties); //properties.getProjectRuleSet(); + final PMDEngine pmdEngine = getPmdEngineForProject(project); setStepCount(countResourceElement(resource)); log.debug("Visiting resource " + resource.getName() + " : " + getStepCount()); @@ -304,9 +309,9 @@ private void processResource(IResource resource) throws CommandException { visitor.setProjectProperties(properties); resource.accept(visitor); - this.ruleCount = ruleSet.getRules().size(); - this.fileCount += visitor.getProcessedFilesCount(); - this.pmdDuration += visitor.getActualPmdDuration(); + ruleCount = ruleSet.getRules().size(); + fileCount += visitor.getProcessedFilesCount(); + pmdDuration += visitor.getActualPmdDuration(); } catch (PropertiesException e) { throw new CommandException(e); @@ -355,7 +360,23 @@ private void processProject(IProject project) throws CommandException { throw new CommandException(e); } } + + private void taskScope(int activeRuleCount, int totalRuleCount) { + setTaskName("Checking with " + Integer.toString(activeRuleCount) + " out of " + Integer.toString(totalRuleCount) + " rules"); + } + private RuleSet filteredRuleSet(IProjectProperties properties) throws CommandException, PropertiesException { + + final RuleSet ruleSet = properties.getProjectRuleSet(); + IPreferences preferences = PMDPlugin.getDefault().getPreferencesManager().loadPreferences(); + Set<String> activeRuleNames = preferences.getActiveRuleNames(); + + RuleSet filteredRuleSet = RuleSetUtil.newCopyOf(ruleSet); + RuleSetUtil.retainOnly(filteredRuleSet, activeRuleNames); + taskScope(filteredRuleSet.getRules().size(), ruleSet.getRules().size()); + return filteredRuleSet; + } + /** * Review a resource delta */ @@ -363,7 +384,9 @@ private void processResourceDelta() throws CommandException { try { final IProject project = this.resourceDelta.getResource().getProject(); final IProjectProperties properties = PMDPlugin.getDefault().loadProjectProperties(project); - final RuleSet ruleSet = properties.getProjectRuleSet(); + + final RuleSet ruleSet = filteredRuleSet(properties); //properties.getProjectRuleSet(); + final PMDEngine pmdEngine = getPmdEngineForProject(project); this.setStepCount(countDeltaElement(this.resourceDelta)); log.debug("Visit of resource delta : " + getStepCount()); diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/IPreferences.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/IPreferences.java index 3fa90bdae8f..94a0f132560 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/IPreferences.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/IPreferences.java @@ -36,6 +36,8 @@ package net.sourceforge.pmd.eclipse.runtime.preferences; +import java.util.Set; + import org.apache.log4j.Level; /** @@ -57,7 +59,16 @@ public interface IPreferences { int MIN_TILE_SIZE_DEFAULT = 25; String LOG_FILENAME_DEFAULT = "pmd-eclipse.log"; Level LOG_LEVEL = Level.WARN; + String ACTIVE_RULES = ""; + boolean isActive(String rulename); + + void isActive(String ruleName, boolean flag); + + Set<String> getActiveRuleNames(); + + void setActiveRuleNames(Set<String> ruleNames); + /** * Should the Project Build Path be used? */ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/PreferenceUIStore.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferenceUIStore.java similarity index 57% rename from pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/PreferenceUIStore.java rename to pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferenceUIStore.java index e0f7e44c1ae..14751cc25f1 100755 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/PreferenceUIStore.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferenceUIStore.java @@ -1,11 +1,18 @@ -package net.sourceforge.pmd.eclipse.ui.preferences.br; +package net.sourceforge.pmd.eclipse.runtime.preferences.impl; import java.io.IOException; +import java.util.Collection; import java.util.HashSet; import java.util.Set; +import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; +import net.sourceforge.pmd.eclipse.ui.preferences.br.RuleColumnDescriptor; +import net.sourceforge.pmd.eclipse.ui.preferences.br.TextColumnDescriptor; import net.sourceforge.pmd.eclipse.ui.preferences.editors.SWTUtil; +import org.eclipse.core.resources.IWorkspaceRoot; +import org.eclipse.core.resources.ResourcesPlugin; +import org.eclipse.core.runtime.IPath; import org.eclipse.jface.preference.PreferenceStore; /** @@ -17,10 +24,12 @@ public class PreferenceUIStore { private PreferenceStore preferenceStore; - private static final String tableFraction = "ruletable.fraction"; - private static final String tableHiddenCols = "ruletable.hiddenColumns"; - private static final String tableColumnSortUp = "ruletable.sortUp"; - private static final String groupingColumn = "ruletable.groupingColumn"; + private static final String tableFraction = PMDPlugin.PLUGIN_ID + ".ruletable.fraction"; + private static final String tableHiddenCols = PMDPlugin.PLUGIN_ID + ".ruletable.hiddenColumns"; + private static final String tableColumnSortUp = PMDPlugin.PLUGIN_ID + ".ruletable.sortUp"; + private static final String groupingColumn = PMDPlugin.PLUGIN_ID + ".ruletable.groupingColumn"; + private static final String selectedRuleNames = PMDPlugin.PLUGIN_ID + ".ruletable.selectedRules"; + private static final String selectedPropertyTab = PMDPlugin.PLUGIN_ID + ".ruletable.selectedPropertyTab"; private static final int tableFractionDefault = 55; private static final char stringSeparator = ','; @@ -31,10 +40,7 @@ public class PreferenceUIStore { }; private static final boolean defaultSortUp = false; - - // TODO - where to get the proper path? seem to park the file on my Ubuntu desktop for some reason - private static final String filename = "pmd.ui.preferences"; - + public static final PreferenceUIStore instance = new PreferenceUIStore(); private static String defaultHiddenColumnNames() { @@ -50,8 +56,13 @@ private PreferenceUIStore() { } private void initialize() { + + IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot(); + IPath path = root.getLocation(); + String fileName = path.append(PreferencesManagerImpl.NEW_PREFERENCE_LOCATION).toString(); + // TODO - replace this with the existing ViewMemento - preferenceStore = new PreferenceStore(filename); + preferenceStore = new PreferenceStore(fileName); try { preferenceStore.load(); @@ -66,6 +77,8 @@ private void createNewStore() { preferenceStore.setValue(tableHiddenCols, defaultHiddenColumnNames()); preferenceStore.setValue(tableColumnSortUp, defaultSortUp); preferenceStore.setValue(groupingColumn, ""); + preferenceStore.setValue(selectedRuleNames, ""); + preferenceStore.setValue(selectedPropertyTab, 0); save(); } @@ -97,6 +110,24 @@ public void hiddenColumnNames(Set<String> names) { preferenceStore.setValue(tableHiddenCols, nameStr); } + public int selectedPropertyTab() { + return preferenceStore.getInt(selectedPropertyTab); + } + + public void selectedPropertyTab(int anIndex) { + preferenceStore.setValue(selectedPropertyTab, anIndex); + } + + public Set<String> selectedRuleNames() { + String names = preferenceStore.getString(selectedRuleNames); + return SWTUtil.asStringSet(names, stringSeparator); + } + + public void selectedRuleNames(Collection<String> ruleNames) { + String nameStr = SWTUtil.asString(ruleNames, stringSeparator); + preferenceStore.setValue(selectedRuleNames, nameStr); + } + public boolean sortDirectionUp() { return preferenceStore.getBoolean(tableColumnSortUp); } diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesImpl.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesImpl.java index 9fb921bc990..b7a91c3074b 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesImpl.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesImpl.java @@ -36,11 +36,14 @@ package net.sourceforge.pmd.eclipse.runtime.preferences.impl; -import org.apache.log4j.Level; +import java.util.HashSet; +import java.util.Set; import net.sourceforge.pmd.eclipse.runtime.preferences.IPreferences; import net.sourceforge.pmd.eclipse.runtime.preferences.IPreferencesManager; +import org.apache.log4j.Level; + /** * Implements the preferences information structure * @@ -49,16 +52,17 @@ */ class PreferencesImpl implements IPreferences { + private IPreferencesManager preferencesManager; - private boolean projectBuildPathEnabled; - private boolean pmdPerspectiveEnabled; - private int maxViolationsPerFilePerRule; - private String reviewAdditionalComment; - private boolean reviewPmdStyleEnabled; - private int minTileSize; - private String logFileName; - private Level logLevel; - + private boolean projectBuildPathEnabled; + private boolean pmdPerspectiveEnabled; + private int maxViolationsPerFilePerRule; + private String reviewAdditionalComment; + private boolean reviewPmdStyleEnabled; + private int minTileSize; + private String logFileName; + private Level logLevel; + private Set<String> activeRuleNames = new HashSet<String>(); /** * Is constructed from a preferences manager * @param preferencesManager @@ -187,4 +191,24 @@ public void sync() { this.preferencesManager.storePreferences(this); } + public boolean isActive(String ruleName) { + return activeRuleNames.contains(ruleName); + } + + public void isActive(String ruleName, boolean flag) { + if (flag) { + activeRuleNames.add(ruleName); + } else { + activeRuleNames.remove(ruleName); + } + } + + public Set<String> getActiveRuleNames() { + return activeRuleNames; + } + + public void setActiveRuleNames(Set<String> ruleNames) { + activeRuleNames = ruleNames; + } + } diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesManagerImpl.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesManagerImpl.java index 4cc5d879686..11ae3ff4972 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesManagerImpl.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/runtime/preferences/impl/PreferencesManagerImpl.java @@ -60,6 +60,7 @@ import net.sourceforge.pmd.eclipse.runtime.properties.PropertiesException; import net.sourceforge.pmd.eclipse.runtime.writer.IRuleSetWriter; import net.sourceforge.pmd.eclipse.runtime.writer.WriterException; +import net.sourceforge.pmd.util.StringUtil; import org.apache.log4j.Level; import org.apache.log4j.Logger; @@ -88,10 +89,11 @@ class PreferencesManagerImpl implements IPreferencesManager { private static final String MIN_TILE_SIZE = PMDPlugin.PLUGIN_ID + ".min_tile_size"; private static final String LOG_FILENAME = PMDPlugin.PLUGIN_ID + ".log_filename"; private static final String LOG_LEVEL = PMDPlugin.PLUGIN_ID + ".log_level"; + private static final String DISABLED_RULES = PMDPlugin.PLUGIN_ID + ".disabled_rules"; private static final String OLD_PREFERENCE_PREFIX = "net.sourceforge.pmd.runtime"; private static final String OLD_PREFERENCE_LOCATION = "/.metadata/.plugins/org.eclipse.core.runtime/.settings/net.sourceforge.pmd.runtime.prefs"; - private static final String NEW_PREFERENCE_LOCATION = "/.metadata/.plugins/org.eclipse.core.runtime/.settings/net.sourceforge.pmd.eclipse.plugin.prefs"; + public static final String NEW_PREFERENCE_LOCATION = "/.metadata/.plugins/org.eclipse.core.runtime/.settings/net.sourceforge.pmd.eclipse.plugin.prefs"; private static final String PREFERENCE_RULESET_FILE = "/ruleset.xml"; @@ -118,6 +120,7 @@ public IPreferences loadPreferences() { loadMinTileSize(); loadLogFileName(); loadLogLevel(); + loadActiveRules(); } return this.preferences; @@ -175,6 +178,7 @@ public void storePreferences(IPreferences preferences) { storeMinTileSize(); storeLogFileName(); storeLogLevel(); + storeActiveRules(); } /** @@ -269,6 +273,44 @@ private void loadLogLevel() { this.preferences.setLogLevel(Level.toLevel(this.loadPreferencesStore.getString(LOG_LEVEL))); } + /** + * Read the disabled rules + * + */ + private void loadActiveRules() { + this.loadPreferencesStore.setDefault(DISABLED_RULES, IPreferences.ACTIVE_RULES); + this.preferences.setActiveRuleNames(asStringSet(loadPreferencesStore.getString(DISABLED_RULES), ",")); + } + + private static Set<String> asStringSet(String delimitedString, String delimiter) { + + String[] values = delimitedString.split(delimiter); + Set<String> valueSet = new HashSet<String>(values.length); + for (int i=0; i<values.length; i++) { + String name = values[i].trim(); + if (StringUtil.isEmpty(name)) continue; + valueSet.add(name); + } + return valueSet; + } + + private static String asDelimitedString(Set<String>values, String delimiter) { + + if (values == null || values.isEmpty()) return ""; + + StringBuilder sb = new StringBuilder(); + + for (String value : values) { + sb.append(delimiter).append(value); + } + + return sb.toString(); + } + + private void storeActiveRules() { + storePreferencesStore.setValue(DISABLED_RULES, asDelimitedString(preferences.getActiveRuleNames(), ",")); + } + /** * Write the projectBuildPathEnabled flag */ diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/PMDUiConstants.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/PMDUiConstants.java index 136cac89378..0ba376ebe22 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/PMDUiConstants.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/PMDUiConstants.java @@ -80,7 +80,8 @@ public class PMDUiConstants { public static final String ICON_PROJECT = "icons/obj_project.gif"; public static final String ICON_PACKAGE = "icons/obj_package.gif"; public static final String ICON_JAVACU = "icons/obj_javacu.gif"; - + + public static final String ICON_BUTTON_PRIO0 = "icons/btn_prio0.png"; public static final String ICON_BUTTON_PRIO1 = "icons/btn_prio1.gif"; public static final String ICON_BUTTON_PRIO2 = "icons/btn_prio2.gif"; public static final String ICON_BUTTON_PRIO3 = "icons/btn_prio3.gif"; @@ -91,7 +92,8 @@ public class PMDUiConstants { public static final String ICON_BUTTON_FILEMARKERS = "icons/btn_filemarkers.gif"; public static final String ICON_BUTTON_MARKERFILES = "icons/btn_markerfiles.gif"; public static final String ICON_BUTTON_FILES = "icons/btn_files.gif"; - + + public static final String ICON_BUTTON_DISABLE = "icons/btn_disable.gif"; public static final String ICON_BUTTON_COLLAPSE = "icons/btn_collapse.gif"; public static final String ICON_BUTTON_REMVIO = "icons/btn_remvio.gif"; public static final String ICON_BUTTON_QUICKFIX = "icons/btn_quickfix.gif"; @@ -99,6 +101,21 @@ public class PMDUiConstants { public static final String ICON_BUTTON_REFRESH = "icons/btn_refresh.gif"; public static final String ICON_BUTTON_CALCULATE = "icons/btn_calculate.gif"; + + public static final String ICON_BUTTON_UPARROW = "icons/upArrow.png"; + public static final String ICON_BUTTON_DOWNARROW = "icons/downArrow.png"; + public static final String ICON_BUTTON_ADD = "icons/add.gif"; + public static final String ICON_BUTTON_DELETE = "icons/delete.gif"; + public static final String ICON_BUTTON_CHECK_ALL = "icons/checkAll.png"; + public static final String ICON_BUTTON_UNCHECK_ALL = "icons/uncheckAll.png"; + public static final String ICON_BUTTON_IMPORT = "icons/import.gif"; + public static final String ICON_BUTTON_EXPORT = "icons/export.gif"; + public static final String ICON_BUTTON_EDITOR = "icons/editTool.png"; + public static final String ICON_BUTTON_SORT_CHECKED = "icons/sortChecked.png"; + + public static final String ICON_GREEN_CHECK = "icons/icon_greenCheck.png"; + public static final String ICON_EMPTY = "icons/icon_empty.png"; + public static final String ID_PERSPECTIVE = PLUGIN_ID + ".views.pmdPerspective"; public static final String ID_OUTLINE = PLUGIN_ID + ".views.violationOutline"; public static final String ID_OVERVIEW = PLUGIN_ID + ".views.violationOverview"; diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/actions/PMDCheckAction.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/actions/PMDCheckAction.java index 8585655f5c3..620265b7abb 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/actions/PMDCheckAction.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/actions/PMDCheckAction.java @@ -79,8 +79,7 @@ public void run(IAction action) { if (selection instanceof IStructuredSelection) { reviewSelectedResources((IStructuredSelection) selection); } else { - log.debug("The selection is not an instance of IStructuredSelection. This is not supported: " - + selection.getClass().getName()); + log.debug("The selection is not an instance of IStructuredSelection. This is not supported: " + selection.getClass().getName()); } } @@ -90,8 +89,7 @@ else if (isEditorPart()) { if (editorInput instanceof IFileEditorInput) { reviewSingleResource(((IFileEditorInput) editorInput).getFile()); } else { - log.debug("The kind of editor input is not supported. The editor input if of type: " - + editorInput.getClass().getName()); + log.debug("The kind of editor input is not supported. The editor input if of type: " + editorInput.getClass().getName()); } } diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/actions/RuleSetUtil.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/actions/RuleSetUtil.java new file mode 100755 index 00000000000..086624757d7 --- /dev/null +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/actions/RuleSetUtil.java @@ -0,0 +1,50 @@ +package net.sourceforge.pmd.eclipse.ui.actions; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +import net.sourceforge.pmd.Rule; +import net.sourceforge.pmd.RuleSet; + +/** + * + * @author Brian Remedios + */ +public class RuleSetUtil { + + public static RuleSet newCopyOf(RuleSet original) { + + RuleSet rs = new RuleSet(); + rs.setName(original.getName()); + rs.setDescription(original.getDescription()); + rs.setFileName(original.getFileName()); + rs.setExcludePatterns(original.getExcludePatterns()); + rs.setIncludePatterns(original.getIncludePatterns()); + rs.addRuleSet(original); + + return rs; + } + + /** + * This should not really work but the ruleset hands out its + * internal container....oops! :) + * + * @param ruleSet + * @param unwantedRuleNames + */ + public static void retainOnly(RuleSet ruleSet, Set<String> wantedRuleNames) { + + Collection<Rule> rules = ruleSet.getRules(); + Collection<Rule> ruleCopy = new ArrayList<Rule>(rules.size()); + ruleCopy.addAll(rules); + + for (Rule rule : ruleCopy) { + if (!wantedRuleNames.contains(rule.getName())) { + rules.remove(rule); + } + } + + } +} diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/model/FileRecord.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/model/FileRecord.java index 88cfbdcdf9b..9bc0a41f67a 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/model/FileRecord.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/model/FileRecord.java @@ -159,12 +159,13 @@ protected final AbstractPMDRecord[] createChildren() { while (markerIterator.hasNext()) { final IMarker marker = markerIterator.next(); - MarkerRecord markerRecord = allMarkerMap.get(marker.getAttribute(PMDUiConstants.KEY_MARKERATT_RULENAME)); + MarkerRecord markerRecord = allMarkerMap.get(MarkerUtil.ruleNameFor(marker)); if (markerRecord == null) { - String ruleName = (String)marker.getAttribute(PMDUiConstants.KEY_MARKERATT_RULENAME); + String ruleName = MarkerUtil.ruleNameFor(marker); markerRecord = new MarkerRecord(this, // NOPMD by Sven on 13.11.06 11:57 ruleName, - ((Integer)marker.getAttribute(PMDUiConstants.KEY_MARKERATT_PRIORITY)).intValue()); + MarkerUtil.rulePriorityFor(marker) + ); markerRecord.addViolation(marker); allMarkerMap.put(ruleName, markerRecord); } else { @@ -430,7 +431,7 @@ public String authorName() { return RepositoryUtil.hasRepositoryAccess() ? RepositoryUtil.authorNameFor(resource) : - "<unknown>"; + null; } /** * @see net.sourceforge.pmd.eclipse.ui.model.AbstractPMDRecord#getResourceType() diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/model/RepositoryUtil.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/model/RepositoryUtil.java index dc521348512..2b8807a9b16 100755 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/model/RepositoryUtil.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/model/RepositoryUtil.java @@ -3,6 +3,7 @@ import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.team.core.RepositoryProvider; +import org.eclipse.team.core.history.IFileHistoryProvider; import org.eclipse.team.core.history.IFileRevision; /** @@ -20,7 +21,7 @@ public static boolean hasRepositoryAccess() { if (hasRepositoryAccess != null) return hasRepositoryAccess; try { - Object cls = Class.forName("org.eclipse.team.core.RepositoryProvider"); + Class.forName("org.eclipse.team.core.RepositoryProvider"); hasRepositoryAccess = Boolean.TRUE; } catch (ClassNotFoundException e) { hasRepositoryAccess = Boolean.FALSE; @@ -29,13 +30,25 @@ public static boolean hasRepositoryAccess() { return hasRepositoryAccess; } + /** + * Returns the name of the resource author if the resource was parked in + * a repository or null if it wasn't. + * + * @param resource + * @return String + */ public static String authorNameFor(IResource resource) { IProject project = resource.getProject(); String authorName = null; try { RepositoryProvider provider = RepositoryProvider.getProvider(project); - IFileRevision revision = provider.getFileHistoryProvider().getWorkspaceFileRevision(resource); + if (provider == null) return null; + + IFileHistoryProvider fhProvider = provider.getFileHistoryProvider(); + if (fhProvider == null) return null; + + IFileRevision revision = fhProvider.getWorkspaceFileRevision(resource); authorName = revision.getAuthor(); } catch (Exception ex) { ex.printStackTrace(); diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/nls/StringKeys.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/nls/StringKeys.java index bf48ce915fc..4c80d73e454 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/nls/StringKeys.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/nls/StringKeys.java @@ -96,6 +96,7 @@ public class StringKeys { public static final String MSGKEY_PREF_RULESET_COLUMN_FILTERS_XPATH = "preference.ruleset.column.filters.xpath"; public static final String MSGKEY_PREF_RULESET_COLUMN_MIN_VER = "preference.ruleset.column.minimum_version"; public static final String MSGKEY_PREF_RULESET_COLUMN_PRIORITY = "preference.ruleset.column.priority"; + public static final String MSGKEY_PREF_RULESET_COLUMN_FIXCOUNT = "preference.ruleset.column.fixCount"; public static final String MSGKEY_PREF_RULESET_COLUMN_PROPERTIES = "preference.ruleset.column.properties"; public static final String MSGKEY_PREF_RULESET_COLUMN_DESCRIPTION = "preference.ruleset.column.description"; public static final String MSGKEY_PREF_RULESET_COLUMN_PROPERTY = "preference.ruleset.column.property"; @@ -121,11 +122,16 @@ public class StringKeys { public static final String MSGKEY_PREF_RULESET_DIALOG_PROPERTY_NAME = "preference.ruleset.dialog.property_name"; public static final String MSGKEY_PREF_RULESET_RULES_GROUPED_BY = "preference.ruleset.label.rules_grouped_by"; + public static final String MSGKEY_PREF_RULESET_ACTIVE_RULE_COUNT = "preference.ruleset.label.active_rule_count"; + public static final String MSGKEY_PREF_RULESET_BUTTON_CHECK_ALL = "preference.ruleset.button.tooltip.check.all"; + public static final String MSGKEY_PREF_RULESET_BUTTON_UNCHECK_ALL = "preference.ruleset.button.tooltip.uncheck.all"; + public static final String MSGKEY_PREF_RULESET_TAB_PROPERTIES = "preference.ruleedit.tab.properties"; public static final String MSGKEY_PREF_RULESET_TAB_DESCRIPTION = "preference.ruleedit.tab.description"; public static final String MSGKEY_PREF_RULESET_TAB_FILTERS = "preference.ruleedit.tab.filters"; public static final String MSGKEY_PREF_RULESET_TAB_XPATH = "preference.ruleedit.tab.xpath"; + public static final String MSGKEY_PREF_RULESET_TAB_FIXES = "preference.ruleedit.tab.fixes"; public static final String MSGKEY_PREF_RULESET_TAB_EXAMPLES = "preference.ruleedit.tab.examples"; public static final String MSGKEY_LABEL_XPATH_EXCLUSION = "preference.ruleedit.label.xpath_exclusion"; @@ -208,6 +214,7 @@ public class StringKeys { public static final String MSGKEY_VIEW_COLUMN_LOCATION = "view.column.location"; public static final String MSGKEY_VIEW_TOOLTIP_PROJECT = "view.tooltip.project"; public static final String MSGKEY_VIEW_TOOLTIP_FILE = "view.tooltip.file"; + public static final String MSGKEY_VIEW_TOOLTIP_DISABLE = "view.tooltip.disable"; public static final String MSGKEY_VIEW_TOOLTIP_ERRORHIGH_FILTER = "view.tooltip.errorhigh_filter"; public static final String MSGKEY_VIEW_TOOLTIP_ERROR_FILTER = "view.tooltip.error_filter"; public static final String MSGKEY_VIEW_TOOLTIP_WARNINGHIGH_FILTER = "view.tooltip.warninghigh_filter"; @@ -219,7 +226,9 @@ public class StringKeys { public static final String MSGKEY_VIEW_TOOLTIP_REVIEW = "view.tooltip.review"; public static final String MSGKEY_VIEW_TOOLTIP_QUICKFIX = "view.tooltip.quickfix"; public static final String MSGKEY_VIEW_TOOLTIP_CALCULATE_STATS = "view.tooltip.calc_stats"; + public static final String MSGKEY_VIEW_TOOLTIP_COMPUTE_METRICS = "view.tooltip.compute_metrics"; public static final String MSGKEY_VIEW_ACTION_PROJECT = "view.action.project"; + public static final String MSGKEY_VIEW_ACTION_DISABLE = "view.action.disable"; public static final String MSGKEY_VIEW_ACTION_FILE = "view.action.file"; public static final String MSGKEY_VIEW_ACTION_ERRORHIGH = "view.action.errorhigh"; public static final String MSGKEY_VIEW_ACTION_ERROR = "view.action.error"; diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/CPDPreferencePage.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/CPDPreferencePage.java index c57daf9f2a0..41bc3944d4e 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/CPDPreferencePage.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/CPDPreferencePage.java @@ -1,20 +1,22 @@ package net.sourceforge.pmd.eclipse.ui.preferences; -import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; +import net.sourceforge.pmd.cpd.GUI; import net.sourceforge.pmd.eclipse.runtime.preferences.IPreferences; import net.sourceforge.pmd.eclipse.ui.nls.StringKeys; +import net.sourceforge.pmd.eclipse.ui.preferences.br.AbstractPMDPreferencePage; import org.eclipse.jface.preference.PreferencePage; import org.eclipse.swt.SWT; +import org.eclipse.swt.events.SelectionAdapter; +import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; +import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Spinner; -import org.eclipse.ui.IWorkbench; -import org.eclipse.ui.IWorkbenchPreferencePage; /** * Preference page for CPD properties @@ -23,21 +25,14 @@ * @author Philippe Herlin, Brian Remedios * */ -public class CPDPreferencePage extends PreferencePage implements IWorkbenchPreferencePage { +public class CPDPreferencePage extends AbstractPMDPreferencePage { private Spinner minTileSizeSpinner; private Label minTileLabel; - private IPreferences preferences; - /** - * Insert the method's description here. - * @see PreferencePage#init - */ - public void init(IWorkbench workbench) { - setDescription(getMessage(StringKeys.MSGKEY_PREF_CPD_TITLE)); - this.preferences = PMDPlugin.getDefault().loadPreferences(); + protected String descriptionId() { + return StringKeys.MSGKEY_PREF_CPD_TITLE; } - /** * Insert the method's description here. * @see PreferencePage#createContents @@ -56,9 +51,41 @@ protected Control createContents(Composite parent) { // Layout children generalGroup.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); + buildCPDLauncherButton(composite); + return composite; } + public void createControl(Composite parent) { + super.createControl(parent); + + setModified(false); + } + + /** + * Build the CPD launcher button + * @param parent Composite + * @return Button + */ + private Button buildCPDLauncherButton(Composite parent) { + Button button = new Button(parent, SWT.PUSH | SWT.LEFT); + button.setText("Launch CPD..."); + + button.addSelectionListener(new SelectionAdapter() { + @Override + public void widgetSelected(SelectionEvent event) { + new Thread(new Runnable() { + public void run() { + GUI.main(new String[] { "-noexitonclose" }); + } + }).start(); + } + }); + + return button; + } + + /** * Build the group of general preferences * @param parent the parent composite @@ -97,19 +124,9 @@ protected void performDefaults() { * @see org.eclipse.jface.preference.IPreferencePage#performOk() */ public boolean performOk() { - this.preferences.setMinTileSize(Integer.valueOf(minTileSizeSpinner.getText()).intValue()); - this.preferences.sync(); - + preferences.setMinTileSize(Integer.valueOf(minTileSizeSpinner.getText()).intValue()); + return super.performOk(); } - /** - * Helper method to shorten message access - * @param key a message key - * @return requested message - */ - private String getMessage(String key) { - return PMDPlugin.getDefault().getStringTable().getString(key); - } - } diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/AbstractPMDPreferencePage.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/AbstractPMDPreferencePage.java new file mode 100755 index 00000000000..7eb8aa1d3ef --- /dev/null +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/AbstractPMDPreferencePage.java @@ -0,0 +1,70 @@ +package net.sourceforge.pmd.eclipse.ui.preferences.br; + +import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; +import net.sourceforge.pmd.eclipse.runtime.preferences.IPreferences; + +import org.eclipse.jface.preference.PreferencePage; +import org.eclipse.ui.IWorkbench; +import org.eclipse.ui.IWorkbenchPreferencePage; + +public abstract class AbstractPMDPreferencePage extends PreferencePage implements IWorkbenchPreferencePage { + + protected IPreferences preferences; + private boolean modified; + + protected static PMDPlugin plugin = PMDPlugin.getDefault(); + + abstract protected String descriptionId(); + + /** + * Returns the isModified. + * @return boolean + */ + public boolean isModified() { + return modified; + } + + public void setModified() { + setModified(true); + } + /** + * Sets the isModified. + * @param isModified The isModified to set + */ + public void setModified(boolean isModified) { + modified = isModified; + + getApplyButton().setEnabled(modified); + getDefaultsButton().setEnabled(!modified); + } + + /** + * Insert the method's description here. + * @see PreferencePage#init + */ + + public void init(IWorkbench workbench) { + setDescription(getMessage(descriptionId())); + preferences = PMDPlugin.getDefault().loadPreferences(); + } + + /** + * @see org.eclipse.jface.preference.IPreferencePage#performOk() + */ + public boolean performOk() { + + preferences.sync(); + + return super.performOk(); + } + + /** + * Helper method to shorten message access + * @param key a message key + * @return requested message + */ + protected String getMessage(String key) { + return PMDPlugin.getDefault().getStringTable().getString(key); + } + +} diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/AbstractRuleColumnDescriptor.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/AbstractRuleColumnDescriptor.java index 80339598d67..97abc11d376 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/AbstractRuleColumnDescriptor.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/AbstractRuleColumnDescriptor.java @@ -2,6 +2,7 @@ import net.sourceforge.pmd.Rule; import net.sourceforge.pmd.eclipse.ui.preferences.editors.SWTUtil; +import net.sourceforge.pmd.eclipse.util.ResourceManager; import org.eclipse.swt.SWT; import org.eclipse.swt.widgets.Event; @@ -18,13 +19,14 @@ public abstract class AbstractRuleColumnDescriptor implements RuleColumnDescriptor { private final String label; - private final String tooltip; + private final String tooltip; private final int alignment; private final int width; private final RuleFieldAccessor accessor; private final boolean isResizable; - - protected AbstractRuleColumnDescriptor(String labelKey, int theAlignment, int theWidth, RuleFieldAccessor theAccessor, boolean resizableFlag) { + private final String imagePath; + + protected AbstractRuleColumnDescriptor(String labelKey, int theAlignment, int theWidth, RuleFieldAccessor theAccessor, boolean resizableFlag, String theImagePath) { super(); label = SWTUtil.stringFor(labelKey); @@ -33,6 +35,7 @@ protected AbstractRuleColumnDescriptor(String labelKey, int theAlignment, int th width = theWidth; accessor = theAccessor; isResizable = resizableFlag; + imagePath = theImagePath; } protected TreeColumn buildTreeColumn(Tree parent, final RuleSortListener sortListener) { @@ -40,11 +43,12 @@ protected TreeColumn buildTreeColumn(Tree parent, final RuleSortListener sortLis final TreeColumn tc = new TreeColumn(parent, alignment); tc.setWidth(width); tc.setResizable(isResizable); - + tc.setToolTipText(tooltip); + if (imagePath != null) tc.setImage(ResourceManager.imageFor(imagePath)); + tc.addListener(SWT.Selection, new Listener() { public void handleEvent(Event e) { - sortListener.sortBy(accessor(), e.widget); - // tc.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_LABEL_ARRDN)); + sortListener.sortBy(accessor(), e.widget); } }); diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/IconColumnDescriptor.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/IconColumnDescriptor.java index 267429b68a4..38292946cdd 100755 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/IconColumnDescriptor.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/IconColumnDescriptor.java @@ -24,15 +24,20 @@ public class IconColumnDescriptor extends AbstractRuleColumnDescriptor { private Map<Object, Image> iconsByValue; - private static final Map<Object, String> iconNamesByValue = CollectionUtil.mapFrom( + private static final Map<Object, String> iconNamesByPriority = CollectionUtil.mapFrom( new Object[] { RulePriority.LOW, RulePriority.MEDIUM_LOW, RulePriority.MEDIUM, RulePriority.MEDIUM_HIGH, RulePriority.HIGH }, new String[] {PMDUiConstants.ICON_BUTTON_PRIO5, PMDUiConstants.ICON_BUTTON_PRIO4, PMDUiConstants.ICON_BUTTON_PRIO3, PMDUiConstants.ICON_BUTTON_PRIO2, PMDUiConstants.ICON_BUTTON_PRIO1} ); - public static final RuleColumnDescriptor priority = new IconColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_PRIORITY, SWT.RIGHT, 53, RuleFieldAccessor.priority, true, iconNamesByValue); + private static final Map<Object, String> iconNamesByBoolean = CollectionUtil.mapFrom( + new Object[] { Boolean.TRUE, Boolean.FALSE }, + new String[] { PMDUiConstants.ICON_GREEN_CHECK, PMDUiConstants.ICON_EMPTY} + ); + + public static final RuleColumnDescriptor priority = new IconColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_PRIORITY, SWT.RIGHT, 53, RuleFieldAccessor.priority, true, PMDUiConstants.ICON_BUTTON_PRIO0, iconNamesByPriority); - public IconColumnDescriptor(String labelKey, int theAlignment, int theWidth, RuleFieldAccessor theAccessor, boolean resizableFlag, Map<Object, String> imageNamesByValue) { - super(labelKey, theAlignment, theWidth, theAccessor, resizableFlag); + public IconColumnDescriptor(String labelKey, int theAlignment, int theWidth, RuleFieldAccessor theAccessor, boolean resizableFlag, String theImagePath, Map<Object, String> imageNamesByValue) { + super(labelKey, theAlignment, theWidth, theAccessor, resizableFlag, theImagePath); iconsByValue = iconsFor(imageNamesByValue); } @@ -53,7 +58,6 @@ public Image imageFor(Rule rule) { public TreeColumn newTreeColumnFor(Tree parent, int columnIndex, RuleSortListener sortListener, Map<Integer, List<Listener>> paintListeners) { TreeColumn tc = buildTreeColumn(parent, sortListener); - tc.setToolTipText(tooltip()); return tc; } diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/ImageColumnDescriptor.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/ImageColumnDescriptor.java index 929d1a6c5d8..a5fcda65fe9 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/ImageColumnDescriptor.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/ImageColumnDescriptor.java @@ -6,7 +6,6 @@ import net.sourceforge.pmd.Rule; import net.sourceforge.pmd.eclipse.ui.PMDUiConstants; import net.sourceforge.pmd.eclipse.ui.nls.StringKeys; -import net.sourceforge.pmd.eclipse.util.ResourceManager; import net.sourceforge.pmd.eclipse.util.Util; import org.eclipse.swt.SWT; @@ -21,16 +20,14 @@ */ public class ImageColumnDescriptor extends AbstractRuleColumnDescriptor { - private final String imagePath; private final CellPainterBuilder painterBuilder; public static final RuleColumnDescriptor filterViolationRegex = new ImageColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_FILTERS_REGEX, SWT.LEFT, 25, RuleFieldAccessor.violationRegex, false, PMDUiConstants.ICON_FILTER, Util.textAsColorShapeFor(16, 16, Util.shape.square)); public static final RuleColumnDescriptor filterViolationXPath = new ImageColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_FILTERS_XPATH, SWT.LEFT, 25, RuleFieldAccessor.violationXPath, false, PMDUiConstants.ICON_FILTER_X, Util.textAsColorShapeFor(16, 16, Util.shape.circle)); public ImageColumnDescriptor(String labelKey, int theAlignment, int theWidth, RuleFieldAccessor theAccessor, boolean resizableFlag, String theImagePath, CellPainterBuilder thePainterBuilder) { - super(labelKey, theAlignment, theWidth, theAccessor, resizableFlag); + super(labelKey, theAlignment, theWidth, theAccessor, resizableFlag, theImagePath); - imagePath = theImagePath; painterBuilder = thePainterBuilder; } @@ -39,8 +36,7 @@ public ImageColumnDescriptor(String labelKey, int theAlignment, int theWidth, Ru */ public TreeColumn newTreeColumnFor(Tree parent, int columnIndex, final RuleSortListener sortListener, Map<Integer, List<Listener>> paintListeners) { TreeColumn tc = buildTreeColumn(parent, sortListener); - tc.setToolTipText(tooltip()); - if (imagePath != null) tc.setImage(ResourceManager.imageFor(imagePath)); + if (painterBuilder != null) painterBuilder.addPainterFor(tc.getParent(), columnIndex, accessor(), paintListeners); return tc; } diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/ModifyListener.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/ModifyListener.java new file mode 100755 index 00000000000..85286959636 --- /dev/null +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/ModifyListener.java @@ -0,0 +1,10 @@ +package net.sourceforge.pmd.eclipse.ui.preferences.br; + +/** + * + * @author Brian Remedios + */ +public interface ModifyListener { + + void setModified(); +} diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/PMDPreferencePage.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/PMDPreferencePage.java index 176ce8a4d7b..1f10b1360af 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/PMDPreferencePage.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/PMDPreferencePage.java @@ -17,7 +17,7 @@ import net.sourceforge.pmd.Rule; import net.sourceforge.pmd.RulePriority; import net.sourceforge.pmd.RuleSet; -import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; +import net.sourceforge.pmd.eclipse.runtime.preferences.impl.PreferenceUIStore; import net.sourceforge.pmd.eclipse.runtime.writer.IRuleSetWriter; import net.sourceforge.pmd.eclipse.ui.PMDUiConstants; import net.sourceforge.pmd.eclipse.ui.nls.StringKeys; @@ -29,6 +29,7 @@ import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.ExamplePanelManager; import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.ExclusionPanelManager; import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.PerRulePropertyPanelManager; +import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.QuickFixPanelManager; import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.RulePropertyManager; import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.XPathPanelManager; import net.sourceforge.pmd.eclipse.util.ResourceManager; @@ -45,12 +46,12 @@ import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.dialogs.ProgressMonitorDialog; import org.eclipse.jface.operation.IRunnableWithProgress; -import org.eclipse.jface.preference.IPreferenceStore; -import org.eclipse.jface.preference.PreferencePage; import org.eclipse.jface.viewers.CheckboxTreeViewer; +import org.eclipse.jface.viewers.ICheckStateProvider; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.SelectionChangedEvent; +import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; @@ -82,6 +83,7 @@ import org.eclipse.swt.widgets.TreeItem; import org.eclipse.ui.IWorkbench; import org.eclipse.ui.IWorkbenchPreferencePage; +import org.eclipse.ui.dialogs.ContainerCheckedTreeViewer; /** * This page is used to modify preferences only. They are stored in the preference store that belongs to @@ -91,7 +93,7 @@ * @author Brian Remedios */ -public class PMDPreferencePage extends PreferencePage implements IWorkbenchPreferencePage, ValueChangeListener, RuleSortListener { +public class PMDPreferencePage extends AbstractPMDPreferencePage implements ValueChangeListener, RuleSortListener { public static PMDPreferencePage activeInstance = null; @@ -100,6 +102,7 @@ public class PMDPreferencePage extends PreferencePage implements IWorkbenchPrefe TextColumnDescriptor.name, //TextColumnDescriptor.priorityName, IconColumnDescriptor.priority, + TextColumnDescriptor.fixCount, TextColumnDescriptor.since, TextColumnDescriptor.ruleSetName, TextColumnDescriptor.ruleType, @@ -145,16 +148,15 @@ public class PMDPreferencePage extends PreferencePage implements IWorkbenchPrefe formattersByType.put(Method[].class, ValueFormatter.MultiMethodFormatter); formattersByType.put(Object[].class, ValueFormatter.ObjectArrayFormatter); } - - private CheckboxTreeViewer ruleTreeViewer; - private Button addRuleButton; - private Button removeRuleButton; -// private Button editRuleButton; - private RuleSet ruleSet; // TODO - what is this used for? - br - private TabFolder tabFolder; - private Set<Object> checkedRules = new HashSet<Object>(); - private Menu ruleListMenu; - private Set<String> hiddenColumnNames = new HashSet<String>(); + + private ContainerCheckedTreeViewer ruleTreeViewer; + private Button addRuleButton; + private Button removeRuleButton; + private RuleSet ruleSet; + private TabFolder tabFolder; +// private final Set<Rule> checkedRules = new HashSet<Rule>(); + private Menu ruleListMenu; + private Set<String> hiddenColumnNames = new HashSet<String>(); private RulePropertyManager[] rulePropertyManagers; @@ -168,24 +170,42 @@ public class PMDPreferencePage extends PreferencePage implements IWorkbenchPrefe private Map<RulePriority, MenuItem> priorityMenusByPriority; private Map<String, MenuItem> rulesetMenusByName; - private boolean modified = false; - private static PMDPlugin plugin = PMDPlugin.getDefault(); + private RuleFieldAccessor checkedColumnAccessor; + + private Button sortByCheckedButton; + private Button selectAllButton; + private Button unSelectAllButton; /** * @see IWorkbenchPreferencePage#init(org.eclipse.ui.IWorkbench) */ public void init(IWorkbench workbench) { - setDescription(getMessage(StringKeys.MSGKEY_PREF_RULESET_TITLE)); + super.init(workbench); + activeInstance = this; + + hiddenColumnNames = PreferenceUIStore.instance.hiddenColumnNames(); -// hiddenColumnNames.add(TextColumnDescriptor.since.label()); -// hiddenColumnNames.add(TextColumnDescriptor.externalURL.label()); -// hiddenColumnNames.add(TextColumnDescriptor.minLangVers.label()); -// hiddenColumnNames.add(TextColumnDescriptor.exampleCount.label()); + checkedColumnAccessor = createCheckedItemAccessor(); + } + + private RuleFieldAccessor createCheckedItemAccessor() { - hiddenColumnNames = PreferenceUIStore.instance.hiddenColumnNames(); + return new BasicRuleFieldAccessor() { + public Comparable<Boolean> valueFor(Rule rule) { + return preferences.isActive(rule.getName()); + } + }; + } + + private void sortByCheckedItems() { + sortBy(checkedColumnAccessor, ruleTreeViewer.getTree().getColumn(0)); + } + + + protected String descriptionId() { + return StringKeys.MSGKEY_PREF_RULESET_TITLE; } - /** * @see org.eclipse.jface.preference.PreferencePage#performDefaults() */ @@ -195,17 +215,31 @@ protected void performDefaults() { super.performDefaults(); } + private void storeActiveRules() { + + Object[] chosenRules = ruleTreeViewer.getCheckedElements(); + for (Object item : chosenRules) { + if (item instanceof Rule) { + preferences.isActive(((Rule)item).getName(), true); + } + } + + System.out.println("Active rules: " + preferences.getActiveRuleNames()); + } + /** * @see org.eclipse.jface.preference.IPreferencePage#performOk() */ @Override public boolean performOk() { + saveRuleSelections(); PreferenceUIStore.instance.save(); - if (modified) { + if (isModified()) { updateRuleSet(); rebuildProjects(); + storeActiveRules(); } return super.performOk(); @@ -213,11 +247,35 @@ public boolean performOk() { @Override public boolean performCancel() { - + + saveRuleSelections(); PreferenceUIStore.instance.save(); return super.performCancel(); } + private void restoreSavedRuleSelections() { + + Set<String> names = PreferenceUIStore.instance.selectedRuleNames(); + List<Rule> rules = new ArrayList<Rule>(); + for (String name : names) rules.add(ruleSet.getRuleByName(name)); + + IStructuredSelection selection = new StructuredSelection(rules); + ruleTreeViewer.setSelection(selection); + } + + private void saveRuleSelections() { + + IStructuredSelection selection = (IStructuredSelection)ruleTreeViewer.getSelection(); + + List<String> ruleNames = new ArrayList<String>(); + for (Object item : selection.toList()) { + if (item instanceof Rule) + ruleNames.add(((Rule)item).getName()); + } + + PreferenceUIStore.instance.selectedRuleNames(ruleNames); + } + /** * @see org.eclipse.jface.preference.PreferencePage#createContents(Composite) */ @@ -228,6 +286,10 @@ protected Control createContents(Composite parent) { Composite composite = new Composite(parent, SWT.NULL); layoutControls(composite); + + restoreSavedRuleSelections(); + updateCheckButtons(); + return composite; } @@ -271,7 +333,45 @@ private Composite createRuleSection(Composite parent) { return ruleSection; } + + private int[] selectionRatioIn(Rule[] rules) { + + int selectedCount = 0; + for (Rule rule : rules) { + if (preferences.isActive(rule.getName())) selectedCount++; + } + return new int[] { selectedCount , rules.length }; + } + + private ICheckStateProvider createCheckStateProvider() { + return new ICheckStateProvider() { + + public boolean isChecked(Object item) { + if (item instanceof Rule) { + return preferences.isActive(((Rule)item).getName()); + } else { + if (item instanceof RuleGroup) { + int[] fraction = selectionRatioIn(((RuleGroup)item).rules()); + return (fraction[0] > 0) && (fraction[0] == fraction[1]); + } + } + return false; // should never get here + } + + public boolean isGrayed(Object item) { + + if (item instanceof Rule) return false; + if (item instanceof RuleGroup) { + int[] fraction = selectionRatioIn(((RuleGroup)item).rules()); + return (fraction[0] > 0) && (fraction[0] != fraction[1]); + } + return false; + } + + }; + } + /** * Main layout * @param parent Composite @@ -351,10 +451,6 @@ public static String propertyStringFrom(Rule rule) { } return sb.toString(); } - - public static Rule ruleFrom(String ruleName) { - return null; - } public static String ruleSetNameFrom(Rule rule) { return ruleSetNameFrom( rule.getRuleSetName() ); @@ -385,11 +481,26 @@ private void redrawTable(String sortColumnLabel, int sortDir) { ruleTreeViewer.getTree().setSortDirection(sortDir); } + private void updateCheckButtons() { + + Rule[] rules = new Rule[ruleSet.size()]; + rules = ruleSet.getRules().toArray(rules); + int[] selectionRatio = selectionRatioIn(rules); + + selectAllButton.setEnabled( selectionRatio[0] < selectionRatio[1]); + unSelectAllButton.setEnabled( selectionRatio[0] > 0); + sortByCheckedButton.setEnabled( (selectionRatio[0] != 0) && (selectionRatio[0] != selectionRatio[1])); + } + private Composite buildGroupCombo(Composite parent, String comboLabelKey) { Composite panel = new Composite(parent, 0); - GridLayout layout = new GridLayout(2, false); + GridLayout layout = new GridLayout(5, false); panel.setLayout(layout); + + sortByCheckedButton = buildSortByCheckedItemsButton(panel); + selectAllButton = buildSelectAllButton(panel); + unSelectAllButton = buildUnselectAllButton(panel); Label label = new Label(panel, 0); GridData data = new GridData(); @@ -427,8 +538,9 @@ private TabFolder buildTabFolder(Composite parent) { buildPropertyTab(tabFolder, 0, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_PROPERTIES)), buildDescriptionTab(tabFolder, 1, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_DESCRIPTION)), buildUsageTab(tabFolder, 2, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_FILTERS)), - buildXPathTab(tabFolder, 3, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_XPATH)), - buildExampleTab(tabFolder, 4, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_EXAMPLES)), + buildXPathTab(tabFolder, 3, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_XPATH)), + buildQuickFixTab(tabFolder, 4, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_FIXES)), + buildExampleTab(tabFolder, 5, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_EXAMPLES)), }; tabFolder.pack(); @@ -503,6 +615,23 @@ private RulePropertyManager buildExampleTab(TabFolder parent, int index, String return manager; } + /** + * @param parent TabFolder + * @param index int + */ + private RulePropertyManager buildQuickFixTab(TabFolder parent, int index, String title) { + + TabItem tab = new TabItem(parent, 0, index); + tab.setText(title); + + QuickFixPanelManager manager = new QuickFixPanelManager(this); + tab.setControl( + manager.setupOn(parent) + ); + manager.tab(tab); + return manager; + } + /** * * @param parent TabFolder @@ -539,38 +668,21 @@ private Composite buildRuleTableButtons(Composite parent) { gridLayout.verticalSpacing = 3; composite.setLayout(gridLayout); - removeRuleButton = buildRemoveRuleButton(composite); -// editRuleButton = buildEditRuleButton(composite); addRuleButton = buildAddRuleButton(composite); + removeRuleButton = buildRemoveRuleButton(composite); Button importRuleSetButton = buildImportRuleSetButton(composite); Button exportRuleSetButton = buildExportRuleSetButton(composite); - Button clearAllButton = buildClearAllButton(composite); Button ruleDesignerButton = buildRuleDesignerButton(composite); GridData data = new GridData(); - data.horizontalAlignment = GridData.FILL; - removeRuleButton.setLayoutData(data); - -// data = new GridData(); -// data.horizontalAlignment = GridData.FILL; -// editRuleButton.setLayoutData(data); - - data = new GridData(); - data.horizontalAlignment = GridData.FILL; addRuleButton.setLayoutData(data); data = new GridData(); - data.horizontalAlignment = GridData.FILL; importRuleSetButton.setLayoutData(data); data = new GridData(); - data.horizontalAlignment = GridData.FILL; exportRuleSetButton.setLayoutData(data); - data = new GridData(); - data.horizontalAlignment = GridData.FILL; - clearAllButton.setLayoutData(data); - data = new GridData(); data.horizontalAlignment = GridData.FILL; data.grabExcessVerticalSpace = true; @@ -604,7 +716,7 @@ private Composite buildRulePropertiesTableButtons(Composite parent) { private Tree buildRuleTreeViewer(Composite parent) { int treeStyle = SWT.BORDER | SWT.H_SCROLL | SWT.V_SCROLL | SWT.MULTI | SWT.FULL_SELECTION | SWT.CHECK; - ruleTreeViewer = new CheckboxTreeViewer(parent, treeStyle); + ruleTreeViewer = new ContainerCheckedTreeViewer(parent, treeStyle); final Tree ruleTree = ruleTreeViewer.getTree(); ruleTree.setLinesVisible(true); @@ -648,6 +760,8 @@ public void handleEvent(Event event) { } }); + ruleTreeViewer.setCheckStateProvider(createCheckStateProvider()); + return ruleTree; } @@ -892,11 +1006,8 @@ private void selectedItems(Object[] items) { ruleSelection = new RuleSelection(items); for (RulePropertyManager manager : rulePropertyManagers) manager.manage(ruleSelection); - - Rule rule = ruleSelection.soleRule(); - removeRuleButton.setEnabled(rule != null); -// editRuleButton.setEnabled(rule != null && ruleSelection.hasOneRule()); + removeRuleButton.setEnabled(items.length > 0); } /** @@ -1007,6 +1118,7 @@ private void checkItems(TreeItem item, boolean checked) { for (TreeItem item2 : items) { checkItems(item2, checked); } + updateCheckButtons(); } /** @@ -1018,77 +1130,48 @@ private void check(TreeItem item, boolean checked) { item.setChecked(checked); if (item.getData() instanceof RuleGroup) return; - if (checked) { - checkedRules.add(item.getData()); - } else { - checkedRules.remove(item.getData()); - } - } + String name = ((Rule)item.getData()).getName(); + + preferences.isActive(name, checked); - private void removeSelectedRules() { - - int removeCount = ruleSelection.removeAllFrom(ruleSet); - if (removeCount == 0) return; - - setModified(true); - - try { - refresh(); - } catch (Throwable t) { - ruleTreeViewer.setSelection(null); - } + updateCheckButtons(); + setModified(true); } - + /** * Build the remove rule button * @param parent Composite * @return Button */ private Button buildRemoveRuleButton(Composite parent) { + Button button = new Button(parent, SWT.PUSH | SWT.LEFT); - button.setText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_REMOVERULE)); - button.setEnabled(false); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_DELETE)); + button.setToolTipText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_REMOVERULE)); + button.addSelectionListener(new SelectionAdapter() { @Override - public void widgetSelected(SelectionEvent event) { + public void widgetSelected(SelectionEvent event) { removeSelectedRules(); } }); return button; } - - /** - * Build the edit rule button - * @param parent Composite - * @return Button - */ - private Button buildEditRuleButton(Composite parent) { - Button button = new Button(parent, SWT.PUSH | SWT.LEFT); - button.setText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_EDITRULE)); - button.setEnabled(false); - - button.addSelectionListener(new SelectionAdapter() { - @Override - public void widgetSelected(SelectionEvent event) { - IStructuredSelection selection = (IStructuredSelection)ruleTreeViewer.getSelection(); - Rule rule = (Rule)selection.getFirstElement(); - - RuleDialog dialog = new RuleDialog(getShell(), rule); - int result = dialog.open(); - if (result == RuleDialog.OK) { - setModified(true); - try { - refresh(); - } catch (Throwable t) { - plugin.logError("Exception when refreshing the rule table", t); - } - } - } - }); - - return button; + + private void removeSelectedRules() { + + int removeCount = ruleSelection.removeAllFrom(ruleSet); + if (removeCount == 0) return; + + setModified(true); + + try { + refresh(); + } catch (Throwable t) { + ruleTreeViewer.setSelection(null); + } } - + /** * Build the edit rule button * @param parent Composite @@ -1096,7 +1179,8 @@ public void widgetSelected(SelectionEvent event) { */ private Button buildAddRuleButton(Composite parent) { Button button = new Button(parent, SWT.PUSH | SWT.LEFT); - button.setText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_ADDRULE)); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_ADD)); + button.setToolTipText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_ADDRULE)); button.setEnabled(true); button.addSelectionListener(new SelectionAdapter() { @@ -1134,7 +1218,8 @@ public void widgetSelected(SelectionEvent event) { */ private Button buildImportRuleSetButton(Composite parent) { Button button = new Button(parent, SWT.PUSH | SWT.LEFT); - button.setText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_IMPORTRULESET)); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_IMPORT)); + button.setToolTipText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_IMPORTRULESET)); button.setEnabled(true); button.addSelectionListener(new SelectionAdapter() { @Override @@ -1178,7 +1263,8 @@ public void widgetSelected(SelectionEvent event) { */ private Button buildExportRuleSetButton(Composite parent) { Button button = new Button(parent, SWT.PUSH | SWT.LEFT); - button.setText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_EXPORTRULESET)); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_EXPORT)); + button.setToolTipText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_EXPORTRULESET)); button.setEnabled(true); button.addSelectionListener(new SelectionAdapter() { @Override @@ -1224,34 +1310,75 @@ public void widgetSelected(SelectionEvent event) { return button; } + private CheckboxTreeViewer treeViewer() { return ruleTreeViewer; } + + private Button buildSortByCheckedItemsButton(Composite parent) { + Button button = new Button(parent, SWT.PUSH | SWT.LEFT); + button.setToolTipText("Sort by checked items"); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_SORT_CHECKED)); + + button.addSelectionListener(new SelectionAdapter() { + public void widgetSelected(SelectionEvent event) { + sortByCheckedItems(); + } + }); + + return button; + } + /** - * Build the clear all button + * * @param parent Composite * @return Button */ - private Button buildClearAllButton(Composite parent) { + private Button buildSelectAllButton(Composite parent) { Button button = new Button(parent, SWT.PUSH | SWT.LEFT); - button.setText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_CLEARALL)); +// button.setText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_SELECT_ALL)); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_CHECK_ALL)); + button.setEnabled(true); button.addSelectionListener(new SelectionAdapter() { - @Override public void widgetSelected(SelectionEvent event) { - if (MessageDialog.openConfirm(getShell(), getMessage(StringKeys.MSGKEY_CONFIRM_TITLE), - getMessage(StringKeys.MSGKEY_CONFIRM_CLEAR_RULESET))) { - ruleSet.getRules().clear(); - setModified(true); - try { - refresh(); - } catch (Throwable t) { - plugin.logError("Exception when refreshing the rule table", t); - } - } + setAllRulesActive(); } }); return button; } + + private void setAllRulesActive() { + for (Rule rule : ruleSet.getRules()) { + preferences.isActive(rule.getName(), true); + } + + treeViewer().setCheckedElements(ruleSet.getRules().toArray()); + setModified(true); + updateCheckButtons(); + } + + /** + * + * @param parent Composite + * @return Button + */ + private Button buildUnselectAllButton(Composite parent) { + Button button = new Button(parent, SWT.PUSH | SWT.LEFT); +// button.setText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_SELECT_ALL)); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_UNCHECK_ALL)); + + button.setEnabled(true); + button.addSelectionListener(new SelectionAdapter() { + public void widgetSelected(SelectionEvent event) { + preferences.getActiveRuleNames().clear(); + treeViewer().setCheckedElements(new Object[0]); + setModified(true); + updateCheckButtons(); + } + }); + return button; + } + /** * Build the Rule Designer button * @param parent Composite @@ -1259,7 +1386,8 @@ public void widgetSelected(SelectionEvent event) { */ private Button buildRuleDesignerButton(Composite parent) { Button button = new Button(parent, SWT.PUSH | SWT.LEFT); - button.setText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_RULEDESIGNER)); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_EDITOR)); + button.setToolTipText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_RULEDESIGNER)); button.setEnabled(true); button.addSelectionListener(new SelectionAdapter() { @Override @@ -1296,48 +1424,16 @@ private void populateRuleTable() { } private void checkSelections() { - ruleTreeViewer.setCheckedElements(checkedRules.toArray()); - } - - /** - * Helper method to shorten message access - * @param key a message key - * @return requested message - */ - protected String getMessage(String key) { - return plugin.getStringTable().getString(key); - } - - /** - * @see org.eclipse.jface.preference.PreferencePage#doGetPreferenceStore() - */ - @Override - protected IPreferenceStore doGetPreferenceStore() { - return plugin.getPreferenceStore(); - } - - /** - * Returns the activeInstance. - * @return PMDPreferencePage - */ - public static PMDPreferencePage getActiveInstance() { - return activeInstance; - } - - /** - * Returns the isModified. - * @return boolean - */ - public boolean isModified() { - return modified; - } - - /** - * Sets the isModified. - * @param isModified The isModified to set - */ - public void setModified(boolean isModified) { - this.modified = isModified; + +// List<Rule> activeRules = new ArrayList<Rule>(); +// +// for (Rule rule : ruleSet.getRules()) { +// if (preferences.isActive(rule.getName())) { +// activeRules.add(rule); +// } +// } +// +// ruleTreeViewer.setCheckedElements(activeRules.toArray()); } /** @@ -1412,14 +1508,14 @@ protected void selectAndShowRule(Rule rule) { } public void changed(Rule rule, PropertyDescriptor<?> desc, Object newValue) { - // TODO enhance to recognize default values - modified = true; + // TODO enhance to recognize default values ruleTreeViewer.update(rule, null); + setModified(); } public void changed(RuleSelection selection, PropertyDescriptor<?> desc, Object newValue) { // TODO enhance to recognize default values - modified = true; + for (Rule rule : selection.allRules()) { if (newValue != null) { // non-reliable update behaviour, alternate trigger option - weird ruleTreeViewer.getTree().redraw(); @@ -1432,6 +1528,8 @@ public void changed(RuleSelection selection, PropertyDescriptor<?> desc, Object for (RulePropertyManager manager : rulePropertyManagers) { manager.validate(); } + + setModified(); } public void sortBy(RuleFieldAccessor accessor, Object context) { diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/PMDPreferencePage2.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/PMDPreferencePage2.java new file mode 100755 index 00000000000..c67b610d065 --- /dev/null +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/PMDPreferencePage2.java @@ -0,0 +1,506 @@ +package net.sourceforge.pmd.eclipse.ui.preferences.br; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import net.sourceforge.pmd.PropertyDescriptor; +import net.sourceforge.pmd.Rule; +import net.sourceforge.pmd.RuleSet; +import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; +import net.sourceforge.pmd.eclipse.runtime.preferences.impl.PreferenceUIStore; +import net.sourceforge.pmd.eclipse.ui.nls.StringKeys; +import net.sourceforge.pmd.eclipse.ui.preferences.editors.SWTUtil; +import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.DescriptionPanelManager; +import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.ExamplePanelManager; +import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.ExclusionPanelManager; +import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.PerRulePropertyPanelManager; +import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.QuickFixPanelManager; +import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.RulePropertyManager; +import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.XPathPanelManager; +import net.sourceforge.pmd.eclipse.util.Util; + +import org.eclipse.core.resources.IncrementalProjectBuilder; +import org.eclipse.core.resources.ResourcesPlugin; +import org.eclipse.core.runtime.CoreException; +import org.eclipse.core.runtime.IProgressMonitor; +import org.eclipse.jface.dialogs.MessageDialog; +import org.eclipse.jface.dialogs.ProgressMonitorDialog; +import org.eclipse.jface.operation.IRunnableWithProgress; +import org.eclipse.swt.SWT; +import org.eclipse.swt.events.SelectionAdapter; +import org.eclipse.swt.events.SelectionEvent; +import org.eclipse.swt.layout.FormAttachment; +import org.eclipse.swt.layout.FormData; +import org.eclipse.swt.layout.FormLayout; +import org.eclipse.swt.layout.GridData; +import org.eclipse.swt.layout.GridLayout; +import org.eclipse.swt.layout.RowLayout; +import org.eclipse.swt.widgets.Composite; +import org.eclipse.swt.widgets.Control; +import org.eclipse.swt.widgets.Sash; +import org.eclipse.swt.widgets.TabFolder; +import org.eclipse.swt.widgets.TabItem; +import org.eclipse.swt.widgets.Tree; + +public class PMDPreferencePage2 extends AbstractPMDPreferencePage implements RuleSelectionListener, ModifyListener, ValueChangeListener { + + private TabFolder tabFolder; + private RulePropertyManager[] rulePropertyManagers; + private RuleTableManager tableManager; + + // columns shown in the rule treetable in the desired order + private static final RuleColumnDescriptor[] availableColumns = new RuleColumnDescriptor[] { + TextColumnDescriptor.name, + //TextColumnDescriptor.priorityName, + IconColumnDescriptor.priority, + TextColumnDescriptor.fixCount, + TextColumnDescriptor.since, + TextColumnDescriptor.ruleSetName, + TextColumnDescriptor.ruleType, + TextColumnDescriptor.minLangVers, + TextColumnDescriptor.language, + ImageColumnDescriptor.filterViolationRegex, // regex text -> compact color squares (for comparison) + ImageColumnDescriptor.filterViolationXPath, // xpath text -> compact color circles (for comparison) + TextColumnDescriptor.properties, + }; + + // last item in this list is the grouping used at startup + private static final Object[][] groupingChoices = new Object[][] { + { TextColumnDescriptor.ruleSetName, StringKeys.MSGKEY_PREF_RULESET_COLUMN_RULESET}, + { TextColumnDescriptor.since, StringKeys.MSGKEY_PREF_RULESET_GROUPING_PMD_VERSION }, + { TextColumnDescriptor.priorityName, StringKeys.MSGKEY_PREF_RULESET_COLUMN_PRIORITY }, + { TextColumnDescriptor.ruleType, StringKeys.MSGKEY_PREF_RULESET_COLUMN_RULE_TYPE }, + { TextColumnDescriptor.language, StringKeys.MSGKEY_PREF_RULESET_COLUMN_LANGUAGE }, + { ImageColumnDescriptor.filterViolationRegex, StringKeys.MSGKEY_PREF_RULESET_GROUPING_REGEX }, + { null, StringKeys.MSGKEY_PREF_RULESET_GROUPING_NONE } + }; + + private static final Map<Class<?>, ValueFormatter> formattersByType = new HashMap<Class<?>, ValueFormatter>(); + + static { // used to render property values in short form in main table + formattersByType.put(String.class, ValueFormatter.StringFormatter); + formattersByType.put(String[].class, ValueFormatter.MultiStringFormatter); + formattersByType.put(Boolean.class, ValueFormatter.BooleanFormatter); + formattersByType.put(Boolean[].class, ValueFormatter.ObjectArrayFormatter); + formattersByType.put(Integer.class, ValueFormatter.NumberFormatter); + formattersByType.put(Integer[].class, ValueFormatter.ObjectArrayFormatter); + formattersByType.put(Long.class, ValueFormatter.NumberFormatter); + formattersByType.put(Long[].class, ValueFormatter.ObjectArrayFormatter); + formattersByType.put(Float.class, ValueFormatter.NumberFormatter); + formattersByType.put(Float[].class, ValueFormatter.ObjectArrayFormatter); + formattersByType.put(Double.class, ValueFormatter.NumberFormatter); + formattersByType.put(Double[].class, ValueFormatter.ObjectArrayFormatter); + formattersByType.put(Character.class, ValueFormatter.ObjectFormatter); + formattersByType.put(Character[].class, ValueFormatter.ObjectArrayFormatter); + formattersByType.put(Class.class, ValueFormatter.TypeFormatter); + formattersByType.put(Class[].class, ValueFormatter.MultiTypeFormatter); + formattersByType.put(Method.class, ValueFormatter.MethodFormatter); + formattersByType.put(Method[].class, ValueFormatter.MultiMethodFormatter); + formattersByType.put(Object[].class, ValueFormatter.ObjectArrayFormatter); + } + + public PMDPreferencePage2() { + + } + + protected String descriptionId() { + return StringKeys.MSGKEY_PREF_RULESET_TITLE; + } + + @Override + protected Control createContents(Composite parent) { + + tableManager = new RuleTableManager(availableColumns, formattersByType, PMDPlugin.getDefault().loadPreferences()); + tableManager.modifyListener(this); + tableManager.selectionListener(this); + + populateRuleset(); + + Composite composite = new Composite(parent, SWT.NULL); + layoutControls(composite); + + tableManager.populateRuleTable(); + int i = PreferenceUIStore.instance.selectedPropertyTab() ; + tabFolder.setSelection( i ); + + return composite; + } + + public void createControl(Composite parent) { + super.createControl(parent); + + setModified(false); + } + /** + * Create buttons for rule properties table management + * @param parent Composite + * @return Composite + */ + private Composite buildRulePropertiesTableButtons(Composite parent) { + Composite composite = new Composite(parent, SWT.NULL); + RowLayout rowLayout = new RowLayout(); + rowLayout.type = SWT.VERTICAL; + rowLayout.wrap = false; + rowLayout.pack = false; + composite.setLayout(rowLayout); + + return composite; + } + + private Composite createRuleSection(Composite parent) { + + Composite ruleSection = new Composite(parent, SWT.NULL); + + // Create the controls (order is important !) + Composite groupCombo = tableManager.buildGroupCombo(ruleSection, StringKeys.MSGKEY_PREF_RULESET_RULES_GROUPED_BY, groupingChoices); + + Tree ruleTree = tableManager.buildRuleTreeViewer(ruleSection); + tableManager.groupBy(null); + + Composite ruleTableButtons = tableManager.buildRuleTableButtons(ruleSection); + Composite rulePropertiesTableButtons = buildRulePropertiesTableButtons(ruleSection); + + // Place controls on the layout + GridLayout gridLayout = new GridLayout(3, false); + ruleSection.setLayout(gridLayout); + + GridData data = new GridData(); + data.horizontalSpan = 3; + groupCombo.setLayoutData(data); + + data = new GridData(); + data.heightHint = 200; data.widthHint = 350; + data.horizontalSpan = 1; + data.horizontalAlignment = GridData.FILL; data.verticalAlignment = GridData.FILL; + data.grabExcessHorizontalSpace = true; data.grabExcessVerticalSpace = true; + ruleTree.setLayoutData(data); + + data = new GridData(); + data.horizontalSpan = 1; + data.horizontalAlignment = GridData.FILL; data.verticalAlignment = GridData.FILL; + ruleTableButtons.setLayoutData(data); + + data = new GridData(); + data.horizontalSpan = 1; + data.horizontalAlignment = GridData.FILL; data.verticalAlignment = GridData.FILL; + rulePropertiesTableButtons.setLayoutData(data); + + return ruleSection; + } + + /** + * Method buildTabFolder. + * @param parent Composite + * @return TabFolder + */ + private TabFolder buildTabFolder(Composite parent) { + + tabFolder = new TabFolder(parent, SWT.TOP); + + rulePropertyManagers = new RulePropertyManager[] { + buildPropertyTab(tabFolder, 0, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_PROPERTIES)), + buildDescriptionTab(tabFolder, 1, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_DESCRIPTION)), + buildUsageTab(tabFolder, 2, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_FILTERS)), + buildXPathTab(tabFolder, 3, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_XPATH)), + buildQuickFixTab(tabFolder, 4, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_FIXES)), + buildExampleTab(tabFolder, 5, SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_TAB_EXAMPLES)), + }; + + tabFolder.pack(); + return tabFolder; + } + + /** + * @param parent TabFolder + * @param index int + */ + private RulePropertyManager buildPropertyTab(TabFolder parent, int index, String title) { + + TabItem tab = new TabItem(parent, 0, index); + tab.setText(title); + + PerRulePropertyPanelManager manager = new PerRulePropertyPanelManager(this); + tab.setControl( + manager.setupOn(parent, this) + ); + manager.tab(tab); + return manager; + } + + /** + * @param parent TabFolder + * @param index int + */ + private RulePropertyManager buildDescriptionTab(TabFolder parent, int index, String title) { + + TabItem tab = new TabItem(parent, 0, index); + tab.setText(title); + + DescriptionPanelManager manager = new DescriptionPanelManager(this); + tab.setControl( + manager.setupOn(parent) + ); + manager.tab(tab); + return manager; + } + + /** + * @param parent TabFolder + * @param index int + */ + private RulePropertyManager buildXPathTab(TabFolder parent, int index, String title) { + + TabItem tab = new TabItem(parent, 0, index); + tab.setText(title); + + XPathPanelManager manager = new XPathPanelManager(this); + tab.setControl( + manager.setupOn(parent) + ); + manager.tab(tab); + return manager; + } + + /** + * @param parent TabFolder + * @param index int + */ + private RulePropertyManager buildExampleTab(TabFolder parent, int index, String title) { + + TabItem tab = new TabItem(parent, 0, index); + tab.setText(title); + + ExamplePanelManager manager = new ExamplePanelManager(this); + tab.setControl( + manager.setupOn(parent) + ); + manager.tab(tab); + return manager; + } + + /** + * @param parent TabFolder + * @param index int + */ + private RulePropertyManager buildQuickFixTab(TabFolder parent, int index, String title) { + + TabItem tab = new TabItem(parent, 0, index); + tab.setText(title); + + QuickFixPanelManager manager = new QuickFixPanelManager(this); + tab.setControl( + manager.setupOn(parent) + ); + manager.tab(tab); + return manager; + } + + /** + * + * @param parent TabFolder + * @param index int + * @param title String + */ + private RulePropertyManager buildUsageTab(TabFolder parent, int index, String title) { + + TabItem tab = new TabItem(parent, 0, index); + tab.setText(title); + + ExclusionPanelManager manager = new ExclusionPanelManager(this); + tab.setControl( + manager.setupOn( + parent, + SWTUtil.stringFor(StringKeys.MSGKEY_LABEL_EXCLUSION_REGEX), + SWTUtil.stringFor(StringKeys.MSGKEY_LABEL_XPATH_EXCLUSION), + SWTUtil.stringFor(StringKeys.MSGKEY_LABEL_COLOUR_CODE) + ) + ); + manager.tab(tab); + return manager; + } + + public void changed(Rule rule, PropertyDescriptor<?> desc, Object newValue) { + // TODO enhance to recognize default values + setModified(); + tableManager.updated(rule); + } + + public void changed(RuleSelection selection, PropertyDescriptor<?> desc, Object newValue) { + // TODO enhance to recognize default values + + for (Rule rule : selection.allRules()) { + if (newValue != null) { // non-reliable update behaviour, alternate trigger option - weird + // ruleTreeViewer.getTree().redraw(); + // System.out.println("doing redraw"); + } else { + // ruleTreeViewer.update(rule, null); + // System.out.println("viewer update"); + } + } + for (RulePropertyManager manager : rulePropertyManagers) { + manager.validate(); + } + + setModified(); + } + + /** + * Main layout + * @param parent Composite + */ + private void layoutControls(Composite parent) { + + parent.setLayout(new FormLayout()); + int ruleTableFraction = 55; //PreferenceUIStore.instance.tableFraction(); + + // Create the sash first, so the other controls can be attached to it. + final Sash sash = new Sash(parent, SWT.HORIZONTAL); + FormData data = new FormData(); + data.left = new FormAttachment(0, 0); // attach to left + data.right = new FormAttachment(100, 0); // attach to right + data.top = new FormAttachment(ruleTableFraction, 0); + sash.setLayoutData(data); + sash.addSelectionListener(new SelectionAdapter() { + public void widgetSelected(SelectionEvent event) { + // Re-attach to the top edge, and we use the y value of the event to determine the offset from the top + ((FormData)sash.getLayoutData()).top = new FormAttachment(0, event.y); +// PreferenceUIStore.instance.tableFraction(event.y); + sash.getParent().layout(); + } + }); + + // Create the first text box and attach its bottom edge to the sash + Composite ruleSection = createRuleSection(parent); + data = new FormData(); + data.top = new FormAttachment(0, 0); + data.bottom = new FormAttachment(sash, 0); + data.left = new FormAttachment(0, 0); + data.right = new FormAttachment(100, 0); + ruleSection.setLayoutData(data); + + // Create the second text box and attach its top edge to the sash + TabFolder propertySection = buildTabFolder(parent); + data = new FormData(); + data.top = new FormAttachment(sash, 0); + data.bottom = new FormAttachment(100, 0); + data.left = new FormAttachment(0, 0); + data.right = new FormAttachment(100, 0); + propertySection.setLayoutData(data); + } + + /** + * @see org.eclipse.jface.preference.IPreferencePage#performOk() + */ + @Override + public boolean performOk() { + + saveUIState(); + + if (isModified()) { + updateRuleSet(); + rebuildProjects(); + storeActiveRules(); + } + + return super.performOk(); + } + + @Override + public boolean performCancel() { + + saveUIState(); + return super.performCancel(); + } + + /** + * @see org.eclipse.jface.preference.PreferencePage#performDefaults() + */ + @Override + protected void performDefaults() { + tableManager.populateRuleTable(); + super.performDefaults(); + } + + private void populateRuleset() { + + RuleSet defaultRuleSet = plugin.getPreferencesManager().getRuleSet(); + RuleSet ruleSet = new RuleSet(); + ruleSet.addRuleSet(defaultRuleSet); + ruleSet.setName(defaultRuleSet.getName()); + ruleSet.setDescription(Util.asCleanString(defaultRuleSet.getDescription())); + ruleSet.addExcludePatterns(defaultRuleSet.getExcludePatterns()); + ruleSet.addIncludePatterns(defaultRuleSet.getIncludePatterns()); + + tableManager.useRuleSet(ruleSet); + } + + public void selection(RuleSelection selection) { + + for (RulePropertyManager manager : rulePropertyManagers) { + manager.manage(selection); + manager.validate(); + } + } + + /** + * If user wants to, rebuild all projects + */ + private void rebuildProjects() { + if (MessageDialog.openQuestion(getShell(), getMessage(StringKeys.MSGKEY_QUESTION_TITLE), + getMessage(StringKeys.MSGKEY_QUESTION_RULES_CHANGED))) { + try { + ProgressMonitorDialog monitorDialog = new ProgressMonitorDialog(getShell()); + monitorDialog.run(true, true, new IRunnableWithProgress() { + public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException { + try { + ResourcesPlugin.getWorkspace().build(IncrementalProjectBuilder.FULL_BUILD, monitor); + } catch (CoreException e) { + plugin.logError("Exception building all projects after a preference change", e); + } + } + }); + } catch (Exception e) { + plugin.logError("Exception building all projects after a preference change", e); + } + } + } + + private void saveUIState() { + tableManager.saveUIState(); + int i = tabFolder.getSelectionIndex(); + PreferenceUIStore.instance.selectedPropertyTab( i ); + PreferenceUIStore.instance.save(); + } + + + private void storeActiveRules() { + + List<Rule> chosenRules = tableManager.activeRules(); + for (Rule rule : chosenRules) { + preferences.isActive(rule.getName(), true); + } + + System.out.println("Active rules: " + preferences.getActiveRuleNames()); + } + + /** + * Update the configured rule set + * Update also all configured projects + */ + private void updateRuleSet() { + try { + ProgressMonitorDialog monitorDialog = new ProgressMonitorDialog(getShell()); + monitorDialog.run(true, true, new IRunnableWithProgress() { + public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException { + plugin.getPreferencesManager().setRuleSet(tableManager.ruleSet()); + } + }); + } catch (Exception e) { + plugin.logError("Exception updating all projects after a preference change", e); + } + } + +} diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleFieldAccessor.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleFieldAccessor.java index 4e0a9245d36..e1e1c1f8406 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleFieldAccessor.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleFieldAccessor.java @@ -4,6 +4,7 @@ import java.util.List; import net.sourceforge.pmd.Rule; +import net.sourceforge.pmd.eclipse.ui.quickfix.PMDResolutionGenerator; import net.sourceforge.pmd.eclipse.util.Util; import net.sourceforge.pmd.lang.Language; import net.sourceforge.pmd.lang.LanguageVersion; @@ -35,7 +36,7 @@ public interface RuleFieldAccessor { String labelFor(Rule rule); RuleFieldAccessor since = new BasicRuleFieldAccessor() { - public Comparable<?> valueFor(Rule rule) { + public Comparable<String> valueFor(Rule rule) { return rule.getSince(); } }; @@ -47,37 +48,43 @@ public Comparable<?> valueFor(Rule rule) { }; RuleFieldAccessor priorityName = new BasicRuleFieldAccessor() { - public Comparable<?> valueFor(Rule rule) { + public Comparable<String> valueFor(Rule rule) { return rule.getPriority().getName(); } }; + RuleFieldAccessor fixCount = new BasicRuleFieldAccessor() { + public Comparable<Integer> valueFor(Rule rule) { + return PMDResolutionGenerator.fixCountFor(rule); + } + }; + RuleFieldAccessor name = new BasicRuleFieldAccessor() { - public Comparable<?> valueFor(Rule rule) { + public Comparable<String> valueFor(Rule rule) { return rule.getName(); } }; RuleFieldAccessor description = new BasicRuleFieldAccessor() { - public Comparable<?> valueFor(Rule rule) { + public Comparable<String> valueFor(Rule rule) { return rule.getDescription(); } }; RuleFieldAccessor usesDFA = new BasicRuleFieldAccessor() { - public Comparable<?> valueFor(Rule rule) { + public Comparable<Boolean> valueFor(Rule rule) { return rule.usesDFA() ? Boolean.TRUE : Boolean.FALSE; } }; RuleFieldAccessor message = new BasicRuleFieldAccessor() { - public Comparable<?> valueFor(Rule rule) { + public Comparable<String> valueFor(Rule rule) { return rule.getMessage(); } }; RuleFieldAccessor url = new BasicRuleFieldAccessor() { - public Comparable<?> valueFor(Rule rule) { + public Comparable<String> valueFor(Rule rule) { return rule.getExternalInfoUrl(); } }; @@ -90,7 +97,7 @@ public Comparable<?> valueFor(Rule rule) { }; RuleFieldAccessor ruleType = new BasicRuleFieldAccessor() { - public Comparable<?> valueFor(Rule rule) { + public Comparable<String> valueFor(Rule rule) { StringBuilder sb = new StringBuilder(3); if (rule.hasDescriptor(XPathRule.XPATH_DESCRIPTOR)) sb.append(ruleTypeXPath[0]); if (rule.usesDFA()) sb.append(ruleTypeDFlow[0]); @@ -107,33 +114,33 @@ public String labelFor(Rule rule) { }; RuleFieldAccessor language = new BasicRuleFieldAccessor() { - public Comparable<?> valueFor(Rule rule) { + public Comparable<String> valueFor(Rule rule) { Language language = rule.getLanguage(); return language == null ? "" : language.getTerseName(); } }; RuleFieldAccessor minLanguageVersion = new BasicRuleFieldAccessor() { - public Comparable<?> valueFor(Rule rule) { + public Comparable<String> valueFor(Rule rule) { LanguageVersion version = rule.getMinimumLanguageVersion(); return version == null ? "" : version.getTerseName(); } }; RuleFieldAccessor maxLanguageVersion = new BasicRuleFieldAccessor() { - public Comparable<?> valueFor(Rule rule) { + public Comparable<LanguageVersion> valueFor(Rule rule) { return rule.getMaximumLanguageVersion(); } }; RuleFieldAccessor violationRegex = new BasicRuleFieldAccessor() { - public Comparable<?> valueFor(Rule rule) { + public Comparable<String> valueFor(Rule rule) { return rule.getProperty(Rule.VIOLATION_SUPPRESS_REGEX_DESCRIPTOR); } }; RuleFieldAccessor violationXPath = new BasicRuleFieldAccessor() { - public Comparable<?> valueFor(Rule rule) { + public Comparable<String> valueFor(Rule rule) { return rule.getProperty(Rule.VIOLATION_SUPPRESS_XPATH_DESCRIPTOR); } }; diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleSelectionListener.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleSelectionListener.java new file mode 100755 index 00000000000..e16cb9c61e3 --- /dev/null +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleSelectionListener.java @@ -0,0 +1,10 @@ +package net.sourceforge.pmd.eclipse.ui.preferences.br; + +/** + * + * @author Brian Remedios + */ +public interface RuleSelectionListener { + + void selection(RuleSelection selection); +} diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleSetTreeItemProvider.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleSetTreeItemProvider.java index 4fac5cdb776..f29b3b35f41 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleSetTreeItemProvider.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleSetTreeItemProvider.java @@ -158,8 +158,7 @@ public Object[] getElements(Object inputElement) { * @see org.eclipse.jface.viewers.IContentProvider#inputChanged(Viewer, Object, Object) */ public void inputChanged(Viewer viewer, Object oldInput, Object newInput) { - // TODO Auto-generated method stub - + } public void dispose() { diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleTableManager.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleTableManager.java new file mode 100755 index 00000000000..1ea765927cb --- /dev/null +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/RuleTableManager.java @@ -0,0 +1,1170 @@ +package net.sourceforge.pmd.eclipse.ui.preferences.br; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import net.sourceforge.pmd.PropertyDescriptor; +import net.sourceforge.pmd.Rule; +import net.sourceforge.pmd.RulePriority; +import net.sourceforge.pmd.RuleSet; +import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; +import net.sourceforge.pmd.eclipse.runtime.preferences.IPreferences; +import net.sourceforge.pmd.eclipse.runtime.preferences.impl.PreferenceUIStore; +import net.sourceforge.pmd.eclipse.runtime.writer.IRuleSetWriter; +import net.sourceforge.pmd.eclipse.ui.PMDUiConstants; +import net.sourceforge.pmd.eclipse.ui.nls.StringKeys; +import net.sourceforge.pmd.eclipse.ui.preferences.RuleDialog; +import net.sourceforge.pmd.eclipse.ui.preferences.RuleSetSelectionDialog; +import net.sourceforge.pmd.eclipse.ui.preferences.editors.SWTUtil; +import net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers.Configuration; +import net.sourceforge.pmd.eclipse.util.ResourceManager; +import net.sourceforge.pmd.eclipse.util.Util; +import net.sourceforge.pmd.util.FileUtil; +import net.sourceforge.pmd.util.StringUtil; +import net.sourceforge.pmd.util.designer.Designer; + +import org.eclipse.jface.dialogs.InputDialog; +import org.eclipse.jface.dialogs.MessageDialog; +import org.eclipse.jface.viewers.CheckboxTreeViewer; +import org.eclipse.jface.viewers.ICheckStateProvider; +import org.eclipse.jface.viewers.ISelectionChangedListener; +import org.eclipse.jface.viewers.IStructuredSelection; +import org.eclipse.jface.viewers.SelectionChangedEvent; +import org.eclipse.jface.viewers.StructuredSelection; +import org.eclipse.swt.SWT; +import org.eclipse.swt.events.SelectionAdapter; +import org.eclipse.swt.events.SelectionEvent; +import org.eclipse.swt.events.SelectionListener; +import org.eclipse.swt.graphics.Point; +import org.eclipse.swt.graphics.Rectangle; +import org.eclipse.swt.layout.GridData; +import org.eclipse.swt.layout.GridLayout; +import org.eclipse.swt.widgets.Button; +import org.eclipse.swt.widgets.Combo; +import org.eclipse.swt.widgets.Composite; +import org.eclipse.swt.widgets.Control; +import org.eclipse.swt.widgets.Event; +import org.eclipse.swt.widgets.FileDialog; +import org.eclipse.swt.widgets.Label; +import org.eclipse.swt.widgets.Listener; +import org.eclipse.swt.widgets.Menu; +import org.eclipse.swt.widgets.MenuItem; +import org.eclipse.swt.widgets.Tree; +import org.eclipse.swt.widgets.TreeColumn; +import org.eclipse.swt.widgets.TreeItem; +import org.eclipse.ui.dialogs.ContainerCheckedTreeViewer; + +/** + * Instantiates and manages a tree table widget holding all the rules in a ruleset. + * + * @author Brian Remedios + */ +public class RuleTableManager implements RuleSortListener, ValueChangeListener { + + private RuleSet ruleSet; + + private ContainerCheckedTreeViewer ruleTreeViewer; + + private boolean sortDescending; + private RuleFieldAccessor columnSorter = RuleFieldAccessor.name; // initial sort + private RuleColumnDescriptor groupingColumn; + + private Set<String> hiddenColumnNames = new HashSet<String>(); + private Map<Integer, List<Listener>> paintListeners = new HashMap<Integer, List<Listener>>(); + + private Map<RulePriority, MenuItem> priorityMenusByPriority; + private Map<String, MenuItem> rulesetMenusByName; + + private RuleFieldAccessor checkedColumnAccessor; + private RuleSelection ruleSelection; // may hold rules and/or group nodes + + private Menu ruleListMenu; + private Button addRuleButton; + private Button removeRuleButton; + private Button sortByCheckedButton; + private Button selectAllButton; + private Button unSelectAllButton; + private Label activeCountLabel; + + private final RuleColumnDescriptor[] availableColumns; // columns shown in the rule treetable in the desired order + private final Map<Class<?>, ValueFormatter> formattersByType; + private final IPreferences preferences; + + private ModifyListener modifyListener; + private RuleSelectionListener ruleSelectionListener; + + protected static PMDPlugin plugin = PMDPlugin.getDefault(); + + public static String ruleSetNameFrom(Rule rule) { + return ruleSetNameFrom( rule.getRuleSetName() ); + } + + public List<Rule> activeRules() { + + Object[] checkedItems = ruleTreeViewer.getCheckedElements(); + List<Rule> activeOnes = new ArrayList<Rule>(checkedItems.length); + + for (Object item : checkedItems) { + if (item instanceof Rule) { + activeOnes.add((Rule)item); + } + } + + return activeOnes; + } + + public int activeRuleCount() { + + Object[] checkedItems = ruleTreeViewer.getCheckedElements(); + int count = 0; + + for (Object item : checkedItems) { + if (item instanceof Rule) count++; + } + + return count; + } + + public void modifyListener(ModifyListener theListener) { + modifyListener = theListener; + } + + public void selectionListener(RuleSelectionListener theListener) { + ruleSelectionListener = theListener; + } + + public static String ruleSetNameFrom(String rulesetName) { + + int pos = rulesetName.toUpperCase().indexOf("RULES"); + return pos < 0 ? rulesetName : rulesetName.substring(0, pos-1); + } + + public void formatValueOn(StringBuilder target, Object value, Class<?> datatype) { + + ValueFormatter formatter = formattersByType.get(datatype); + if (formatter != null) { + formatter.format(value, target); + return; + } + + target.append(value); // should not get here..breakpoint here + } + + /** + * @param rule Rule + * @return String + */ + public String propertyStringFrom(Rule rule) { + + Map<PropertyDescriptor<?>, Object> valuesByProp = Configuration.filteredPropertiesOf(rule); + + if (valuesByProp.isEmpty()) return ""; + StringBuilder sb = new StringBuilder(); + + Iterator<PropertyDescriptor<?>> iter = valuesByProp.keySet().iterator(); + + PropertyDescriptor<?> desc = iter.next(); + sb.append(desc.name()).append(": "); + formatValueOn(sb, rule.getProperty(desc), desc.type()); + + while (iter.hasNext()) { + desc = iter.next(); + sb.append(", ").append(desc.name()).append(": "); + formatValueOn(sb, rule.getProperty(desc), desc.type()); + } + return sb.toString(); + } + + public RuleTableManager(RuleColumnDescriptor[] theColumns, Map<Class<?>, ValueFormatter> theFormattersByType, IPreferences thePreferences) { + + availableColumns = theColumns; + formattersByType = theFormattersByType; + preferences = thePreferences; + + hiddenColumnNames = PreferenceUIStore.instance.hiddenColumnNames(); + checkedColumnAccessor = createCheckedItemAccessor(); + } + + private RuleFieldAccessor createCheckedItemAccessor() { + + return new BasicRuleFieldAccessor() { + public Comparable<Boolean> valueFor(Rule rule) { + return preferences.isActive(rule.getName()); + } + }; + } + + private void addColumnSelectionOptions(Menu menu) { + + MenuItem showMenu = new MenuItem(menu, SWT.CASCADE); + showMenu.setText("Show"); + Menu columnsSubMenu = new Menu(menu); + showMenu.setMenu(columnsSubMenu); + + for (String columnLabel : columnLabels()) { + MenuItem columnItem = new MenuItem(columnsSubMenu, SWT.CHECK); + columnItem.setSelection(!hiddenColumnNames.contains(columnLabel)); + columnItem.setText(columnLabel); + final String nameStr = columnLabel; + columnItem.addSelectionListener( new SelectionAdapter() { + public void widgetSelected(SelectionEvent e) { + toggleColumnVisiblity(nameStr); + } + } + ); + } + } + + private void addRulesetMenuOptions(Menu menu) { + + MenuItem rulesetMenu = new MenuItem(menu, SWT.CASCADE); + rulesetMenu.setText("Ruleset"); + Menu rulesetSubMenu = new Menu(menu); + rulesetMenu.setMenu(rulesetSubMenu); + rulesetMenusByName = new HashMap<String, MenuItem>(); + + MenuItem demoItem = new MenuItem(rulesetSubMenu, SWT.PUSH); + demoItem.setText("---demo only---"); // NO API to re-parent rules to other rulesets (yet) + + for (String rulesetName : rulesetNames()) { + MenuItem rulesetItem = new MenuItem(rulesetSubMenu, SWT.RADIO); + rulesetMenusByName.put(rulesetName, rulesetItem); + rulesetItem.setText(rulesetName); + final String rulesetStr = rulesetName; + rulesetItem.addSelectionListener( new SelectionAdapter() { + public void widgetSelected(SelectionEvent e) { + setRuleset(rulesetStr); + } + } + ); + } + } + + private void adjustMenuPrioritySettings() { + + RulePriority priority = ruleSelection == null ? null : ruleSelection.commonPriority(); + Iterator<Map.Entry<RulePriority, MenuItem>> iter = priorityMenusByPriority.entrySet().iterator(); + + while (iter.hasNext()) { + Map.Entry<RulePriority, MenuItem> entry = iter.next(); + MenuItem item = entry.getValue(); + if (entry.getKey() == priority) { + item.setSelection(true); + item.setEnabled(false); + } else { + item.setSelection(false); + item.setEnabled(true); + } + } + } + + // if all the selected rules/ruleGroups reference a common ruleset name + // then check that item and disable it, do the reverse for all others. + private void adjustMenuRulesetSettings() { + + String rulesetName = ruleSelection == null ? null : ruleSetNameFrom(ruleSelection.commonRuleset()); + Iterator<Map.Entry<String, MenuItem>> iter = rulesetMenusByName.entrySet().iterator(); + + while (iter.hasNext()) { + Map.Entry<String, MenuItem> entry = iter.next(); + MenuItem item = entry.getValue(); + if (rulesetName == null) { // allow all entries if none or conflicting + item.setSelection(false); + item.setEnabled(true); + continue; + } + if (StringUtil.areSemanticEquals(entry.getKey(), rulesetName)) { + item.setSelection(true); + item.setEnabled(false); + } else { + item.setSelection(false); + item.setEnabled(true); + } + } + } + + private void adjustMenuUseDefaultsOption() { + + } + + /** + * Build the edit rule button + * @param parent Composite + * @return Button + */ + public Button buildAddRuleButton(final Composite parent) { + Button button = new Button(parent, SWT.PUSH | SWT.LEFT); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_ADD)); + button.setToolTipText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_ADDRULE)); + button.setEnabled(true); + + button.addSelectionListener(new SelectionAdapter() { + @Override + public void widgetSelected(SelectionEvent event) { + RuleDialog dialog = new RuleDialog(parent.getShell()); + int result = dialog.open(); + if (result == RuleDialog.OK) { + Rule addedRule = dialog.getRule(); + ruleSet.addRule(addedRule); + setModified(); + try { + refresh(); + } catch (Throwable t) { + plugin.logError("Exception when refreshing the rule table", t); + } + } + } + }); + + return button; + } + + /** + * Helper method to shorten message access + * @param key a message key + * @return requested message + */ + protected String getMessage(String key) { + return PMDPlugin.getDefault().getStringTable().getString(key); + } + + private void setModified() { + if (modifyListener != null) modifyListener.setModified(); + } + + /** + * Build the remove rule button + * @param parent Composite + * @return Button + */ + public Button buildRemoveRuleButton(Composite parent) { + + Button button = new Button(parent, SWT.PUSH | SWT.LEFT); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_DELETE)); + button.setToolTipText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_REMOVERULE)); + + button.addSelectionListener(new SelectionAdapter() { + @Override + public void widgetSelected(SelectionEvent event) { + removeSelectedRules(); + } + }); + return button; + } + + private void removeSelectedRules() { + + if (ruleSelection == null) return; + + int removeCount = ruleSelection.removeAllFrom(ruleSet); + if (removeCount == 0) return; + + setModified(); + + try { + refresh(); + } catch (Throwable t) { + ruleTreeViewer.setSelection(null); + } + } + + /** + * Build the export rule set button + * @param parent Composite + * @return Button + */ + private Button buildExportRuleSetButton(final Composite parent) { + Button button = new Button(parent, SWT.PUSH | SWT.LEFT); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_EXPORT)); + button.setToolTipText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_EXPORTRULESET)); + button.setEnabled(true); + button.addSelectionListener(new SelectionAdapter() { + @Override + public void widgetSelected(SelectionEvent event) { + FileDialog dialog = new FileDialog(parent.getShell(), SWT.SAVE); + String fileName = dialog.open(); + if (fileName != null) { + try { + File file = new File(fileName); + boolean flContinue = true; + if (file.exists()) { + flContinue = MessageDialog.openConfirm(parent.getShell(), + getMessage(StringKeys.MSGKEY_CONFIRM_TITLE), + getMessage(StringKeys.MSGKEY_CONFIRM_RULESET_EXISTS)); + } + + InputDialog input = null; + if (flContinue) { + input = new InputDialog(parent.getShell(), + getMessage(StringKeys.MSGKEY_PREF_RULESET_DIALOG_TITLE), + getMessage(StringKeys.MSGKEY_PREF_RULESET_DIALOG_RULESET_DESCRIPTION), + ruleSet.getDescription() == null ? "" : ruleSet.getDescription().trim(), null); + flContinue = input.open() == InputDialog.OK; + } + + if (flContinue) { + ruleSet.setName(FileUtil.getFileNameWithoutExtension(file.getName())); + ruleSet.setDescription(input.getValue()); + OutputStream out = new FileOutputStream(fileName); + IRuleSetWriter writer = plugin.getRuleSetWriter(); + writer.write(out, ruleSet); + out.close(); + MessageDialog.openInformation(parent.getShell(), getMessage(StringKeys.MSGKEY_INFORMATION_TITLE), + getMessage(StringKeys.MSGKEY_INFORMATION_RULESET_EXPORTED)); + } + } catch (Exception e) { + plugin.showError(getMessage(StringKeys.MSGKEY_ERROR_EXPORTING_RULESET), e); + } + } + } + }); + + return button; + } + + /** + * Build the import ruleset button + * @param parent Composite + * @return Button + */ + private Button buildImportRuleSetButton(final Composite parent) { + + Button button = new Button(parent, SWT.PUSH | SWT.LEFT); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_IMPORT)); + button.setToolTipText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_IMPORTRULESET)); + button.setEnabled(true); + + button.addSelectionListener(new SelectionAdapter() { + @Override + public void widgetSelected(SelectionEvent event) { + RuleSetSelectionDialog dialog = new RuleSetSelectionDialog(parent.getShell()); + dialog.open(); + if (dialog.getReturnCode() == RuleSetSelectionDialog.OK) { + try { + RuleSet selectedRuleSet = dialog.getSelectedRuleSet(); + if (dialog.isImportByReference()) { + ruleSet.addRuleSetByReference(selectedRuleSet, false); + } else { + // Set pmd-eclipse as new RuleSet name and add the Rule + Iterator<Rule> iter = selectedRuleSet.getRules().iterator(); + while (iter.hasNext()) { + Rule rule = iter.next(); + rule.setRuleSetName("pmd-eclipse"); + ruleSet.addRule(rule); + } + } + setModified(); + try { + refresh(); + } catch (Throwable t) { + plugin.logError("Exception when refreshing the rule table", t); + } + } catch (RuntimeException e) { + plugin.showError(getMessage(StringKeys.MSGKEY_ERROR_IMPORTING_RULESET), e); + } + } + } + }); + + return button; + } + + public Composite buildGroupCombo(Composite parent, String comboLabelKey, final Object[][] groupingChoices) { + + Composite panel = new Composite(parent, 0); + GridLayout layout = new GridLayout(6, false); + panel.setLayout(layout); + + sortByCheckedButton = buildSortByCheckedItemsButton(panel); + selectAllButton = buildSelectAllButton(panel); + unSelectAllButton = buildUnselectAllButton(panel); + + Label label = new Label(panel, 0); + GridData data = new GridData(); + data.horizontalAlignment = SWT.LEFT; + data.verticalAlignment = SWT.CENTER; + label.setLayoutData(data); + label.setText(SWTUtil.stringFor(comboLabelKey)); + + final Combo combo = new Combo(panel, SWT.READ_ONLY); + combo.setItems(SWTUtil.i18lLabelsIn(groupingChoices, 1)); + combo.select(groupingChoices.length - 1); // picks last one by default TODO make it a persistent preference + + combo.addSelectionListener(new SelectionAdapter() { + public void widgetSelected(SelectionEvent e) { + int selectionIdx = combo.getSelectionIndex(); + Object[] choice = groupingChoices[selectionIdx]; + groupingColumn = (RuleColumnDescriptor)choice[0]; + redrawTable(); + } + }); + + activeCountLabel = new Label(panel, 0); + activeCountLabel.setText("---"); + data = new GridData(); + data.horizontalAlignment = GridData.FILL; + data.grabExcessHorizontalSpace = true; + data.horizontalAlignment = SWT.RIGHT; + activeCountLabel.setLayoutData(data); + + return panel; + } + + /** + * Build the Rule Designer button + * @param parent Composite + * @return Button + */ + private Button buildRuleDesignerButton(Composite parent) { + Button button = new Button(parent, SWT.PUSH | SWT.LEFT); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_EDITOR)); + button.setToolTipText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_RULEDESIGNER)); + button.setEnabled(true); + button.addSelectionListener(new SelectionAdapter() { + @Override + public void widgetSelected(SelectionEvent event) { + // TODO Is this cool from Eclipse? Is there a nicer way to spawn a J2SE Application? + new Thread(new Runnable() { + public void run() { + Designer.main(new String[] { "-noexitonclose" }); + } + }).start(); + } + }); + + return button; + } + + /** + * Create buttons for rule table management + * @param parent Composite + * @return Composite + */ + public Composite buildRuleTableButtons(Composite parent) { + + Composite composite = new Composite(parent, SWT.NULL); + GridLayout gridLayout = new GridLayout(); + gridLayout.numColumns = 1; + gridLayout.verticalSpacing = 3; + composite.setLayout(gridLayout); + + addRuleButton = buildAddRuleButton(composite); + removeRuleButton = buildRemoveRuleButton(composite); + Button importRuleSetButton = buildImportRuleSetButton(composite); + Button exportRuleSetButton = buildExportRuleSetButton(composite); + Button ruleDesignerButton = buildRuleDesignerButton(composite); + + GridData data = new GridData(); + addRuleButton.setLayoutData(data); + + data = new GridData(); + importRuleSetButton.setLayoutData(data); + + data = new GridData(); + exportRuleSetButton.setLayoutData(data); + + data = new GridData(); + data.horizontalAlignment = GridData.FILL; + data.grabExcessVerticalSpace = true; + data.verticalAlignment = GridData.END; + ruleDesignerButton.setLayoutData(data); + + return composite; + } + + /** + * Build rule table viewer + * @param parent Composite + * @return Tree + */ + public Tree buildRuleTreeViewer(Composite parent) { + + int treeStyle = SWT.BORDER | SWT.H_SCROLL | SWT.V_SCROLL | SWT.MULTI | SWT.FULL_SELECTION | SWT.CHECK; + ruleTreeViewer = new ContainerCheckedTreeViewer(parent, treeStyle); + + final Tree ruleTree = ruleTreeViewer.getTree(); + ruleTree.setLinesVisible(true); + ruleTree.setHeaderVisible(true); + + ruleTreeViewer.addSelectionChangedListener(new ISelectionChangedListener() { + public void selectionChanged(SelectionChangedEvent event) { + IStructuredSelection selection = (IStructuredSelection)event.getSelection(); + selectedItems(selection.toArray()); + } + }); + + ruleListMenu = createMenuFor(ruleTree); + ruleTree.setMenu(ruleListMenu); + ruleTree.addListener(SWT.MenuDetect, new Listener () { + public void handleEvent (Event event) { + popupRuleSelectionMenu(event); + } + }); + + ruleTree.addListener(SWT.Selection, new Listener() { + public void handleEvent(Event event) { + if (event.detail == SWT.CHECK) { + TreeItem item = (TreeItem) event.item; + boolean checked = item.getChecked(); + checkItems(item, checked); + checkPath(item.getParentItem(), checked, false); + } + // if (!checkedRules.isEmpty()) System.out.println(checkedRules.iterator().next()); + } + }); + + ruleTree.addListener(SWT.MouseMove, new Listener() { + public void handleEvent(Event event) { + Point point = new Point(event.x, event.y); + TreeItem item = ruleTree.getItem(point); + if (item != null) { + int columnIndex = columnIndexAt(item, event.x); + updateTooltipFor(item, columnIndex); + } + } + }); + + ruleTreeViewer.setCheckStateProvider(createCheckStateProvider()); + + return ruleTree; + } + + /** + * + * @param parent Composite + * @return Button + */ + private Button buildSelectAllButton(Composite parent) { + Button button = new Button(parent, SWT.PUSH | SWT.LEFT); + button.setToolTipText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_CHECK_ALL)); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_CHECK_ALL)); + + button.setEnabled(true); + button.addSelectionListener(new SelectionAdapter() { + public void widgetSelected(SelectionEvent event) { + setAllRulesActive(); + } + }); + + return button; + } + + private Button buildSortByCheckedItemsButton(Composite parent) { + Button button = new Button(parent, SWT.PUSH | SWT.LEFT); + button.setToolTipText("Sort by checked items"); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_SORT_CHECKED)); + + button.addSelectionListener(new SelectionAdapter() { + public void widgetSelected(SelectionEvent event) { + sortByCheckedItems(); + } + }); + + return button; + } + + /** + * + * @param parent Composite + * @return Button + */ + private Button buildUnselectAllButton(Composite parent) { + Button button = new Button(parent, SWT.PUSH | SWT.LEFT); + button.setToolTipText(getMessage(StringKeys.MSGKEY_PREF_RULESET_BUTTON_UNCHECK_ALL)); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_UNCHECK_ALL)); + + button.setEnabled(true); + button.addSelectionListener(new SelectionAdapter() { + public void widgetSelected(SelectionEvent event) { + preferences.getActiveRuleNames().clear(); + treeViewer().setCheckedElements(new Object[0]); + setModified(); + updateCheckControls(); + } + }); + + return button; + } + + public void changed(Rule rule, PropertyDescriptor<?> desc, Object newValue) { + // TODO enhance to recognize default values + ruleTreeViewer.update(rule, null); + setModified(); + } + + public void changed(RuleSelection selection, PropertyDescriptor<?> desc, Object newValue) { + // TODO enhance to recognize default values + + for (Rule rule : selection.allRules()) { + if (newValue != null) { // non-reliable update behaviour, alternate trigger option - weird + ruleTreeViewer.getTree().redraw(); + // System.out.println("doing redraw"); + } else { + ruleTreeViewer.update(rule, null); + // System.out.println("viewer update"); + } + } + setModified(); + } + + /** + * Method checkPath. + * @param item TreeItem + * @param checked boolean + * @param grayed boolean + */ + private void checkPath(TreeItem item, boolean checked, boolean grayed) { + if (item == null) return; + if (grayed) { + checked = true; + } else { + int index = 0; + TreeItem[] items = item.getItems(); + while (index < items.length) { + TreeItem child = items[index]; + if (child.getGrayed() || checked != child.getChecked()) { + checked = grayed = true; + break; + } + index++; + } + } + check(item, checked); + item.setGrayed(grayed); + checkPath(item.getParentItem(), checked, grayed); + } + + /** + * @param item TreeItem + * @param checked boolean + */ + private void checkItems(TreeItem item, boolean checked) { + item.setGrayed(false); + check(item, checked); + TreeItem[] items = item.getItems(); + for (TreeItem item2 : items) { + checkItems(item2, checked); + } + updateCheckControls(); + } + + /** + * @param item TreeItem + * @param checked boolean + */ + private void check(TreeItem item, boolean checked) { + + item.setChecked(checked); + Object itemData = item.getData(); + if (itemData == null || itemData instanceof RuleGroup) return; + + String name = ((Rule)itemData).getName(); + + preferences.isActive(name, checked); + + updateCheckControls(); + setModified(); + } + + private void checkSelections() { + +// List<Rule> activeRules = new ArrayList<Rule>(); +// +// for (Rule rule : ruleSet.getRules()) { +// if (preferences.isActive(rule.getName())) { +// activeRules.add(rule); +// } +// } +// +// ruleTreeViewer.setCheckedElements(activeRules.toArray()); + } + + /** + * Remove all rows, columns, and column painters in preparation + * for new columns. + * + * @return Tree + */ + private Tree cleanupRuleTree() { + + Tree ruleTree = ruleTreeViewer.getTree(); + + ruleTree.clearAll(true); + for(;ruleTree.getColumns().length>0;) { // TODO also dispose any heading icons? + ruleTree.getColumns()[0].dispose(); + } + + // ensure we don't have any previous per-column painters left over + for (Map.Entry<Integer, List<Listener>> entry : paintListeners.entrySet()) { + int eventCode = entry.getKey().intValue(); + List<Listener> listeners = entry.getValue(); + for (Listener listener : listeners) { + ruleTree.removeListener(eventCode, listener); + } + listeners.clear(); + } + + return ruleTree; + } + + private int columnIndexAt(TreeItem item, int xPosition) { + + TreeColumn[] cols = ruleTreeViewer.getTree().getColumns(); + Rectangle bounds = null; + + for(int i = 0; i < cols.length; i++){ + bounds = item.getBounds(i); + if (bounds.x < xPosition && xPosition < (bounds.x + bounds.width)) { + return i; + } + } + return -1; + } + + private String[] columnLabels() { + String[] names = new String[availableColumns.length]; + for (int i=0; i<availableColumns.length; i++) { + names[i] = availableColumns[i].label(); + } + return names; + } + + private ICheckStateProvider createCheckStateProvider() { + + return new ICheckStateProvider() { + + public boolean isChecked(Object item) { + if (item instanceof Rule) { + return preferences.isActive(((Rule)item).getName()); + } else { + if (item instanceof RuleGroup) { + int[] fraction = selectionRatioIn(((RuleGroup)item).rules()); + return (fraction[0] > 0) && (fraction[0] == fraction[1]); + } + } + return false; // should never get here + } + + public boolean isGrayed(Object item) { + + if (item instanceof Rule) return false; + if (item instanceof RuleGroup) { + int[] fraction = selectionRatioIn(((RuleGroup)item).rules()); + return (fraction[0] > 0) && (fraction[0] != fraction[1]); + } + return false; + } + + }; + } + + private Menu createMenuFor(Control control) { + + Menu menu = new Menu(control); + + MenuItem priorityMenu = new MenuItem (menu, SWT.CASCADE); + priorityMenu.setText(SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_PRIORITY)); + Menu subMenu = new Menu(menu); + priorityMenu.setMenu (subMenu); + priorityMenusByPriority = new HashMap<RulePriority, MenuItem>(RulePriority.values().length); + + for (RulePriority priority : RulePriority.values()) { + MenuItem priorityItem = new MenuItem (subMenu, SWT.RADIO); + priorityMenusByPriority.put(priority, priorityItem); + priorityItem.setText(priority.getName()); // TODO need to internationalize? + // priorityItem.setImage(imageFor(priority)); not visible with radiobuttons + final RulePriority pri = priority; + priorityItem.addSelectionListener( new SelectionListener() { + public void widgetSelected(SelectionEvent e) { + setPriority(pri); + } + public void widgetDefaultSelected(SelectionEvent e) { }} + ); + } + +// MenuItem removeItem = new MenuItem(menu, SWT.PUSH); +// removeItem.setText("Remove"); +// removeItem.addSelectionListener(new SelectionAdapter() { +// public void widgetSelected(SelectionEvent event) { +// removeSelectedRules(); +// } +// }); + + MenuItem useDefaultsItem = new MenuItem(menu, SWT.PUSH); + useDefaultsItem.setText("Use defaults"); + useDefaultsItem.setEnabled(false); + useDefaultsItem.addSelectionListener(new SelectionAdapter() { + public void widgetSelected(SelectionEvent event) { + // useDefaultValues(); + } + }); + + return menu; + } + + private TreeColumn columnFor(String tooltipText) { + for (TreeColumn column : ruleTreeViewer.getTree().getColumns()) { + if (column.getToolTipText().equals(tooltipText)) return column; + } + return null; + } + + /** + * Method groupBy. + * @param chosenColumn RuleColumnDescriptor + */ + public void groupBy(RuleColumnDescriptor chosenColumn) { + + List<RuleColumnDescriptor> visibleColumns = new ArrayList<RuleColumnDescriptor>(availableColumns.length); + for (RuleColumnDescriptor desc : availableColumns) { + if (desc == chosenColumn) continue; // redundant, don't include it + if (hiddenColumnNames.contains(desc.label())) continue; + visibleColumns.add(desc); + } + + setupTreeColumns( + visibleColumns.toArray(new RuleColumnDescriptor[visibleColumns.size()]), + chosenColumn == null ? null : chosenColumn.accessor() + ); + } + + private boolean hasPriorityGrouping() { + return + groupingColumn == TextColumnDescriptor.priorityName || + groupingColumn == TextColumnDescriptor.priority; + } + + /** + * Populate the rule table + */ + public void populateRuleTable() { + ruleTreeViewer.setInput(ruleSet); + checkSelections(); + + restoreSavedRuleSelections(); + updateCheckControls(); + } + + private void popupRuleSelectionMenu(Event event) { + + // have to do it here or else the ruleset var is null in the menu setup - timing issue + if (rulesetMenusByName == null) { + addRulesetMenuOptions(ruleListMenu); + new MenuItem(ruleListMenu, SWT.SEPARATOR); + addColumnSelectionOptions(ruleListMenu); + } + + adjustMenuPrioritySettings(); + adjustMenuRulesetSettings(); + adjustMenuUseDefaultsOption(); + ruleListMenu.setLocation(event.x, event.y); + ruleListMenu.setVisible(true); + } + + private void redrawTable() { + redrawTable("-", -1); + } + + private void redrawTable(String sortColumnLabel, int sortDir) { + groupBy(groupingColumn); + + TreeColumn sortColumn = columnFor(sortColumnLabel); + ruleTreeViewer.getTree().setSortColumn(sortColumn); + ruleTreeViewer.getTree().setSortDirection(sortDir); + } + + + /** + * Refresh the list + */ + protected void refresh() { + try { + ruleTreeViewer.getControl().setRedraw(false); + ruleTreeViewer.refresh(); + } catch (ClassCastException e) { + plugin.logError("Ignoring exception while refreshing table", e); + } finally { + ruleTreeViewer.getControl().setRedraw(true); + } + } + + private void restoreSavedRuleSelections() { + + Set<String> names = PreferenceUIStore.instance.selectedRuleNames(); + List<Rule> rules = new ArrayList<Rule>(); + for (String name : names) rules.add(ruleSet.getRuleByName(name)); + + IStructuredSelection selection = new StructuredSelection(rules); + ruleTreeViewer.setSelection(selection); + } + + public RuleSet ruleSet() { return ruleSet; } + + private String[] rulesetNames() { + + Set<String> names = new HashSet<String>(); + for (Rule rule : ruleSet.getRules()) { + names.add(ruleSetNameFrom(rule)); // if we strip out the 'Rules' portions then we don't get matches...need to rename rulesets + } + return names.toArray(new String[names.size()]); + } + + private void saveRuleSelections() { + + IStructuredSelection selection = (IStructuredSelection)ruleTreeViewer.getSelection(); + + List<String> ruleNames = new ArrayList<String>(); + for (Object item : selection.toList()) { + if (item instanceof Rule) + ruleNames.add(((Rule)item).getName()); + } + + PreferenceUIStore.instance.selectedRuleNames(ruleNames); + } + + public void saveUIState() { + saveRuleSelections(); + } + + /** + * @param item Object[] + */ + private void selectedItems(Object[] items) { + + ruleSelection = new RuleSelection(items); + if (ruleSelectionListener != null) { + ruleSelectionListener.selection(ruleSelection); + } + + if (removeRuleButton != null) removeRuleButton.setEnabled(items.length > 0); + } + + private int[] selectionRatioIn(Rule[] rules) { + + int selectedCount = 0; + for (Rule rule : rules) { + if (preferences.isActive(rule.getName())) selectedCount++; + } + return new int[] { selectedCount , rules.length }; + } + + private void setAllRulesActive() { + for (Rule rule : ruleSet.getRules()) { + preferences.isActive(rule.getName(), true); + } + + treeViewer().setCheckedElements(ruleSet.getRules().toArray()); + + updateCheckControls(); + setModified(); + } + + private void setPriority(RulePriority priority) { + + if (ruleSelection == null) return; + + ruleSelection.setPriority(priority); + + if (hasPriorityGrouping()) { + redrawTable(); + } else { + ruleTreeViewer.update(ruleSelection.allRules().toArray(), null); + } + + setModified(); + } + + private void setRuleset(String rulesetName) { + // TODO - awaiting support in PMD itself + } + /** + * Method setupTreeColumns. + * @param columnDescs RuleColumnDescriptor[] + * @param groupingField RuleFieldAccessor + */ + private void setupTreeColumns(RuleColumnDescriptor[] columnDescs, RuleFieldAccessor groupingField) { + + Tree ruleTree = cleanupRuleTree(); + + for (int i=0; i<columnDescs.length; i++) columnDescs[i].newTreeColumnFor(ruleTree, i, this, paintListeners); + + ruleTreeViewer.setLabelProvider(new RuleLabelProvider(columnDescs)); + ruleTreeViewer.setContentProvider( + new RuleSetTreeItemProvider(groupingField, "??", Util.comparatorFrom(columnSorter, sortDescending)) + ); + + ruleTreeViewer.setInput(ruleSet); + checkSelections(); + + TreeColumn[] columns = ruleTree.getColumns(); + for (TreeColumn column : columns) column.pack(); + } + + private void sortByCheckedItems() { + sortBy(checkedColumnAccessor, ruleTreeViewer.getTree().getColumn(0)); + } + + public void sortBy(RuleFieldAccessor accessor, Object context) { + + TreeColumn column = (TreeColumn)context; + + if (columnSorter == accessor) { + sortDescending = !sortDescending; + } else { + columnSorter = accessor; + } + + redrawTable(column.getToolTipText(), sortDescending ? SWT.DOWN : SWT.UP); + } + + private CheckboxTreeViewer treeViewer() { return ruleTreeViewer; } + + private void toggleColumnVisiblity(String columnName) { + + if (hiddenColumnNames.contains(columnName)) { + hiddenColumnNames.remove(columnName); + } else { + hiddenColumnNames.add(columnName); + } + + PreferenceUIStore.instance.hiddenColumnNames(hiddenColumnNames); + redrawTable(); + } + + public void useRuleSet(RuleSet theSet) { + ruleSet = theSet; + } + + public void updated(Rule rule) { + ruleTreeViewer.update(rule, null); + } + + private void updateCheckControls() { + + Rule[] rules = new Rule[ruleSet.size()]; + rules = ruleSet.getRules().toArray(rules); + int[] selectionRatio = selectionRatioIn(rules); + + selectAllButton.setEnabled( selectionRatio[0] < selectionRatio[1]); + unSelectAllButton.setEnabled( selectionRatio[0] > 0); + sortByCheckedButton.setEnabled( (selectionRatio[0] != 0) && (selectionRatio[0] != selectionRatio[1])); + + String label = SWTUtil.stringFor(StringKeys.MSGKEY_PREF_RULESET_ACTIVE_RULE_COUNT); + activeCountLabel.setText(label + " " + activeRuleCount() + " / " + ruleSet.size()); + } + + private void updateTooltipFor(TreeItem item, int columnIndex) { + + RuleLabelProvider provider = (RuleLabelProvider)ruleTreeViewer.getLabelProvider(); + String txt = provider.getDetailText(item.getData(), columnIndex); + ruleTreeViewer.getTree().setToolTipText(txt); + } +} diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/TODO items b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/TODO items index 1af3fab1692..ccc4525f829 100755 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/TODO items +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/TODO items @@ -1,11 +1,8 @@ -ToDo items: Jan 9th 2010 - +ToDo items: March 21st 2010 Finish remaining editors and the Add Property dialog -Enable delete key on rule table Highlight non-default values in editors and/or summary column Enable 'Use defaults' option in popup menu -Sort indicators on rule column headings Add selection column selected rules in per-project rule settings table Rework context menu - build on-demand, not before Markup violations using priority icons rather than the yellow yield signs or red Xs @@ -14,7 +11,6 @@ Remember settings for: chosen grouping selection (or none) sort column & order rule table screen fraction -Find standard place to park preference settings file Items needing support in PMD itself: diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/TextColumnDescriptor.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/TextColumnDescriptor.java index 205a5c3deae..ff8fda5c120 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/TextColumnDescriptor.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/br/TextColumnDescriptor.java @@ -30,19 +30,20 @@ public Comparable<?> valueFor(Rule rule) { } }; - public static final RuleColumnDescriptor name = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_RULE_NAME, SWT.LEFT, 210, RuleFieldAccessor.name, true); - public static final RuleColumnDescriptor ruleSetName = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_RULESET_NAME,SWT.LEFT, 160, ruleSetNameAcc, true); - public static final RuleColumnDescriptor priority = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_PRIORITY, SWT.RIGHT,53, RuleFieldAccessor.priority, false); - public static final RuleColumnDescriptor priorityName = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_PRIORITY, SWT.LEFT, 80, RuleFieldAccessor.priorityName, true); - public static final RuleColumnDescriptor since = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_SINCE, SWT.RIGHT,46, RuleFieldAccessor.since, false); - public static final RuleColumnDescriptor usesDFA = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_DATAFLOW, SWT.LEFT, 60, RuleFieldAccessor.usesDFA, false); - public static final RuleColumnDescriptor externalURL = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_URL, SWT.LEFT, 100, RuleFieldAccessor.url, true); - public static final RuleColumnDescriptor properties = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_PROPERTIES, SWT.LEFT, 40, propertiesAcc, true); - public static final RuleColumnDescriptor language = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_LANGUAGE, SWT.LEFT, 32, RuleFieldAccessor.language, false); - public static final RuleColumnDescriptor ruleType = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_RULE_TYPE, SWT.LEFT, 20, RuleFieldAccessor.ruleType, false); - public static final RuleColumnDescriptor minLangVers = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_MIN_VER, SWT.LEFT, 30, RuleFieldAccessor.minLanguageVersion, false); - public static final RuleColumnDescriptor exampleCount = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_EXAMPLE_CNT, SWT.RIGHT, 20, RuleFieldAccessor.exampleCount, false); - + public static final RuleColumnDescriptor name = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_RULE_NAME, SWT.LEFT, 210, RuleFieldAccessor.name, true, null); + public static final RuleColumnDescriptor ruleSetName = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_RULESET_NAME,SWT.LEFT, 160, ruleSetNameAcc, true, null); + public static final RuleColumnDescriptor priority = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_PRIORITY, SWT.RIGHT,53, RuleFieldAccessor.priority, false, null); + public static final RuleColumnDescriptor priorityName = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_PRIORITY, SWT.LEFT, 80, RuleFieldAccessor.priorityName, true, null); + public static final RuleColumnDescriptor since = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_SINCE, SWT.RIGHT,46, RuleFieldAccessor.since, false, null); + public static final RuleColumnDescriptor usesDFA = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_DATAFLOW, SWT.LEFT, 60, RuleFieldAccessor.usesDFA, false, null); + public static final RuleColumnDescriptor externalURL = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_URL, SWT.LEFT, 100, RuleFieldAccessor.url, true, null); + public static final RuleColumnDescriptor properties = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_PROPERTIES, SWT.LEFT, 40, propertiesAcc, true, null); + public static final RuleColumnDescriptor language = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_LANGUAGE, SWT.LEFT, 32, RuleFieldAccessor.language, false, null); + public static final RuleColumnDescriptor ruleType = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_RULE_TYPE, SWT.LEFT, 20, RuleFieldAccessor.ruleType, false, null); + public static final RuleColumnDescriptor minLangVers = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_MIN_VER, SWT.LEFT, 30, RuleFieldAccessor.minLanguageVersion, false, null); + public static final RuleColumnDescriptor exampleCount = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_EXAMPLE_CNT, SWT.RIGHT, 20, RuleFieldAccessor.exampleCount, false, null); + public static final RuleColumnDescriptor fixCount = new TextColumnDescriptor(StringKeys.MSGKEY_PREF_RULESET_COLUMN_FIXCOUNT, SWT.RIGHT, 25, RuleFieldAccessor.fixCount, false, null); + // public static final RuleColumnDescriptor violateXPath = new TextColumnDescriptor("Filter", SWT.RIGHT, 20, RuleFieldAccessor.violationXPath, true); /** @@ -52,8 +53,8 @@ public Comparable<?> valueFor(Rule rule) { * @param theAccessor RuleFieldAccessor * @param resizableFlag boolean */ - public TextColumnDescriptor(String theLabel, int theAlignment, int theWidth, RuleFieldAccessor theAccessor, boolean resizableFlag) { - super(theLabel, theAlignment,theWidth,theAccessor,resizableFlag); + public TextColumnDescriptor(String theLabel, int theAlignment, int theWidth, RuleFieldAccessor theAccessor, boolean resizableFlag, String theImagePath) { + super(theLabel, theAlignment,theWidth,theAccessor,resizableFlag, theImagePath); } /* (non-Javadoc) @@ -61,8 +62,7 @@ public TextColumnDescriptor(String theLabel, int theAlignment, int theWidth, Rul */ public TreeColumn newTreeColumnFor(Tree parent, int columnIndex, RuleSortListener sortListener, Map<Integer, List<Listener>> paintListeners) { TreeColumn tc = buildTreeColumn(parent, sortListener); - tc.setText(label()); - tc.setToolTipText(tooltip()); + tc.setText(label()); return tc; } diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/editors/SWTUtil.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/editors/SWTUtil.java index 0bbb6251ec5..f3c5a594cb0 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/editors/SWTUtil.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/editors/SWTUtil.java @@ -1,6 +1,7 @@ package net.sourceforge.pmd.eclipse.ui.preferences.editors; import java.util.Arrays; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -35,7 +36,9 @@ public static Set<String> asStringSet(String input, char separator) { } // TODO move this to to Collections utility - public static String asString(Set<String> values, char separator) { + public static String asString(Collection<String> values, char separator) { + + if (values == null || values.isEmpty()) return ""; String[] strings = values.toArray(new String[values.size()]); StringBuilder sb = new StringBuilder(strings[0]); diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/FormArranger.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/FormArranger.java index 241bc4aab00..fa1cc55da6a 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/FormArranger.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/FormArranger.java @@ -8,6 +8,7 @@ import net.sourceforge.pmd.PropertyDescriptor; import net.sourceforge.pmd.Rule; import net.sourceforge.pmd.eclipse.ui.PMDUiConstants; +import net.sourceforge.pmd.eclipse.ui.nls.StringKeys; import net.sourceforge.pmd.eclipse.ui.preferences.br.EditorFactory; import net.sourceforge.pmd.eclipse.ui.preferences.br.NewPropertyDialog; import net.sourceforge.pmd.eclipse.ui.preferences.br.SizeChangeListener; @@ -181,7 +182,7 @@ private Control addDeleteButton(Composite parent, final PropertyDescriptor<?> de Button button = new Button(parent, SWT.BORDER); button.setData(desc.name()); // for later reference - button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_REMVIO)); + button.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_DELETE)); button.addSelectionListener( new SelectionListener(){ public void widgetDefaultSelected(SelectionEvent e) { } diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/ListManager.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/ListManager.java new file mode 100755 index 00000000000..e6a4ca2ec1b --- /dev/null +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/ListManager.java @@ -0,0 +1,95 @@ +package net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers; + +import org.eclipse.swt.SWT; +import org.eclipse.swt.events.SelectionAdapter; +import org.eclipse.swt.events.SelectionEvent; +import org.eclipse.swt.widgets.Button; +import org.eclipse.swt.widgets.Event; +import org.eclipse.swt.widgets.List; +import org.eclipse.swt.widgets.Listener; + +/** + * + * @author Brian Remedios + */ +public class ListManager { + + private final List list; + private final Button upButton; + private final Button downButton; + private final Button deleteButton; + + public ListManager(List theList, Button theUpButton, Button theDownButton, Button theDeleteButton) { + + list = theList; + upButton = theUpButton; + downButton = theDownButton; + deleteButton = theDeleteButton; + + registerListeners(); + updateButtonStates(); + } + + private void registerListeners() { + list.addListener(SWT.Selection, new Listener() { + + public void handleEvent(Event arg0) { + updateButtonStates(); + } + + }); + upButton.addSelectionListener(new SelectionAdapter() { + public void widgetSelected(SelectionEvent arg0) { shiftUp(); } + }); + downButton.addSelectionListener(new SelectionAdapter() { + public void widgetSelected(SelectionEvent arg0) { shiftDown(); } + }); + deleteButton.addSelectionListener(new SelectionAdapter() { + public void widgetSelected(SelectionEvent arg0) { delete(); } + }); + } + + private void shiftUp() { + // TODO + updateButtonStates(); + } + + private void shiftDown() { + // TODO + updateButtonStates(); + } + + private void delete() { + int[] indices = list.getSelectionIndices(); + list.remove(indices); + + updateButtonStates(); + } + + private void updateButtonStates() { + + if (!hasSelection()) { + upButton.setEnabled(false); + downButton.setEnabled(false); + deleteButton.setEnabled(false); + return; + } + + upButton.setEnabled(hasSelectionIndex(0)); + downButton.setEnabled(hasSelectionIndex(list.getItemCount()-1)); + deleteButton.setEnabled(true); + } + + private boolean hasSelection() { + return list.getSelectionCount() > 0; + } + + + private boolean hasSelectionIndex(int index) { + + for (int i : list.getSelectionIndices()) { + if (i == index) return false; + } + return true; + } +} diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/PerRulePropertyPanelManager.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/PerRulePropertyPanelManager.java index 4d4b9a643f2..759624aa0fe 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/PerRulePropertyPanelManager.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/PerRulePropertyPanelManager.java @@ -8,7 +8,6 @@ import net.sourceforge.pmd.Rule; import net.sourceforge.pmd.eclipse.ui.preferences.br.EditorFactory; -import net.sourceforge.pmd.eclipse.ui.preferences.br.PMDPreferencePage; import net.sourceforge.pmd.eclipse.ui.preferences.br.SizeChangeListener; import net.sourceforge.pmd.eclipse.ui.preferences.br.ValueChangeListener; import net.sourceforge.pmd.eclipse.ui.preferences.editors.BooleanEditorFactory; diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/QuickFixPanelManager.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/QuickFixPanelManager.java new file mode 100755 index 00000000000..681fad33d78 --- /dev/null +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/preferences/panelmanagers/QuickFixPanelManager.java @@ -0,0 +1,148 @@ +package net.sourceforge.pmd.eclipse.ui.preferences.panelmanagers; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import net.sourceforge.pmd.Rule; +import net.sourceforge.pmd.eclipse.ui.PMDUiConstants; +import net.sourceforge.pmd.eclipse.ui.preferences.br.ValueChangeListener; +import net.sourceforge.pmd.eclipse.ui.quickfix.Fix; +import net.sourceforge.pmd.eclipse.ui.quickfix.PMDResolutionGenerator; +import net.sourceforge.pmd.eclipse.util.ResourceManager; + +import org.eclipse.swt.SWT; +import org.eclipse.swt.layout.GridData; +import org.eclipse.swt.layout.GridLayout; +import org.eclipse.swt.widgets.Button; +import org.eclipse.swt.widgets.Composite; +import org.eclipse.swt.widgets.Control; + +public class QuickFixPanelManager extends AbstractRulePanelManager { + + private org.eclipse.swt.widgets.List fixerList; + private ListManager listManager; + + public QuickFixPanelManager(ValueChangeListener theListener) { + super(theListener); + } + + protected List<Fix> commonFixes() { + // TODO finish this + List<Rule> theRules = rules.allRules(); + List<Fix> fixes = new ArrayList<Fix>(); + + Fix[] fixSet = PMDResolutionGenerator.fixesFor(theRules.get(0)); + if (fixSet != null) { + for (Fix fix : fixSet) fixes.add(fix); + } + + return fixes; + } + + @Override + protected void adapt() { + + fixerList.removeAll(); + + List<Fix> fixes = commonFixes(); + + for (Fix fix : fixes) fixerList.add(fix.getLabel()); + } + + protected boolean canManageMultipleRules() { return true; } + + @Override + protected void clearControls() { + fixerList.removeAll(); + } + + protected void setVisible(boolean flag) { + fixerList.setVisible(flag); + } + + public static <T> List<T> shift(List<T> items, int[] indices, int shiftAmt) { + + int[] indexArr = new int[items.size()]; + + int currentIdx = 0; + for (int i=0; i<items.size(); i++) { + if (currentIdx < indices.length && i == indices[currentIdx] + shiftAmt) { + indexArr[i] = indices[currentIdx++]; + } else { + indexArr[i] = i - currentIdx; + } + } + + List<T> out = new ArrayList<T>(indexArr.length); + for (int i=0; i<indexArr.length; i++) { + out.add( items.get(indexArr[i]) ); + } + + return out; + } + + public static <T> List<T> shift(List<T> items, int index, int shiftAmt) { + + int start = Math.min(index, index + shiftAmt); + int end = Math.max(index, index + shiftAmt); + + Collections.rotate(items.subList(start, end), shiftAmt); + + return items; + } + + public Control setupOn(Composite parent) { + + GridData gridData = new GridData(GridData.FILL_HORIZONTAL); + + Composite panel = new Composite(parent, 0); + GridLayout layout = new GridLayout(2, false); + panel.setLayout(layout); + + fixerList = new org.eclipse.swt.widgets.List(panel, SWT.BORDER | SWT.MULTI | SWT.V_SCROLL); + gridData = new GridData(GridData.FILL_BOTH); + gridData.grabExcessHorizontalSpace = true; + gridData.horizontalSpan = 1; + fixerList.setLayoutData(gridData); + + Composite buttonPanel = new Composite(panel, 0); + layout = new GridLayout(1, false); + buttonPanel.setLayout(layout); + gridData = new GridData(); + gridData.horizontalSpan = 1; + gridData.grabExcessHorizontalSpace = false; + buttonPanel.setLayoutData(gridData); + + Button shiftUpButton = new Button(buttonPanel, SWT.PUSH); + shiftUpButton.setToolTipText("Shift up"); + shiftUpButton.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_UPARROW)); + + Button addButton = new Button(buttonPanel, SWT.PUSH); + addButton.setToolTipText("Add"); + addButton.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_ADD)); + + Button removeButton = new Button(buttonPanel, SWT.PUSH); + removeButton.setToolTipText("Remove"); + removeButton.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_DELETE)); + + Button shiftDownButton = new Button(buttonPanel, SWT.PUSH); + shiftDownButton.setToolTipText("Shift down"); + shiftDownButton.setImage(ResourceManager.imageFor(PMDUiConstants.ICON_BUTTON_DOWNARROW)); + + listManager = new ListManager(fixerList, shiftUpButton, shiftDownButton, removeButton); + + return panel; + } + + public static void main(String[] args) { + + List<Integer> numbers = new ArrayList<Integer>(); + Collections.addAll(numbers, 0,1,2,3,4,5,6,7,8,9,10); +// int[] shiftSet = new int[] { 4, 6 }; + + List<Integer> newNumbers = shift(numbers, 5, 3); + + System.out.println(newNumbers); + } +} diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/AbstractFix.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/AbstractFix.java new file mode 100755 index 00000000000..1c6119dd551 --- /dev/null +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/AbstractFix.java @@ -0,0 +1,17 @@ +package net.sourceforge.pmd.eclipse.ui.quickfix; + +public abstract class AbstractFix implements Fix { + + private final String label; + protected AbstractFix(String theLabel) { + label = theLabel; + } + + /** + * @see net.sourceforge.pmd.eclipse.Fix#getLabel() + */ + public String getLabel() { + return label; + } + +} diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/CommentOutLineFix.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/CommentOutLineFix.java new file mode 100755 index 00000000000..91e38d30b1e --- /dev/null +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/CommentOutLineFix.java @@ -0,0 +1,49 @@ +/* + * <copyright> + * Copyright 1997-2003 PMD for Eclipse Development team + * under sponsorship of the Defense Advanced Research Projects + * Agency (DARPA). + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the Cougaar Open Source License as published by + * DARPA on the Cougaar Open Source Website (www.cougaar.org). + * + * THE COUGAAR SOFTWARE AND ANY DERIVATIVE SUPPLIED BY LICENSOR IS + * PROVIDED "AS IS" WITHOUT WARRANTIES OF ANY KIND, WHETHER EXPRESS OR + * IMPLIED, INCLUDING (BUT NOT LIMITED TO) ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, AND WITHOUT + * ANY WARRANTIES AS TO NON-INFRINGEMENT. IN NO EVENT SHALL COPYRIGHT + * HOLDER BE LIABLE FOR ANY DIRECT, SPECIAL, INDIRECT OR CONSEQUENTIAL + * DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE OF DATA OR PROFITS, + * TORTIOUS CONDUCT, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + * PERFORMANCE OF THE COUGAAR SOFTWARE. + * + * </copyright> + */ +package net.sourceforge.pmd.eclipse.ui.quickfix; + +import org.eclipse.jface.text.Document; + +/** + * + * @author Brian Remedios + * + */ +public class CommentOutLineFix extends AbstractFix { + + public CommentOutLineFix() { + super("Comment out the line"); + } + + /** + * @see net.sourceforge.pmd.eclipse.Fix#fix(java.lang.String, int) + */ + public String fix(String sourceCode, int lineNumber) { + final Document document = new Document(sourceCode); + + // TODO + + return document.get(); + } + +} diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/DeleteLineFix.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/DeleteLineFix.java index 3370ec84068..c07f5dbcf88 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/DeleteLineFix.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/DeleteLineFix.java @@ -31,8 +31,12 @@ * @author Philippe Herlin * */ -public class DeleteLineFix implements Fix { +public class DeleteLineFix extends AbstractFix { + public DeleteLineFix() { + super("Delete the line"); + } + /** * @see net.sourceforge.pmd.eclipse.Fix#fix(java.lang.String, int) */ @@ -49,11 +53,4 @@ public String fix(String sourceCode, int lineNumber) { return document.get(); } - /** - * @see net.sourceforge.pmd.eclipse.Fix#getLabel() - */ - public String getLabel() { - return "Delete the line"; - } - } diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/PMDResolutionGenerator.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/PMDResolutionGenerator.java index 2d636d82d3f..47af8c9a695 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/PMDResolutionGenerator.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/PMDResolutionGenerator.java @@ -23,16 +23,21 @@ package net.sourceforge.pmd.eclipse.ui.quickfix; import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.ResourceBundle; +import java.util.Set; import net.sourceforge.pmd.Rule; import net.sourceforge.pmd.RuleSet; import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; -import net.sourceforge.pmd.eclipse.ui.PMDUiConstants; +import net.sourceforge.pmd.eclipse.runtime.builder.MarkerUtil; import net.sourceforge.pmd.eclipse.ui.nls.StringKeys; +import net.sourceforge.pmd.util.StringUtil; import org.eclipse.core.resources.IMarker; -import org.eclipse.core.runtime.CoreException; import org.eclipse.ui.IMarkerResolution; import org.eclipse.ui.IMarkerResolutionGenerator; @@ -40,29 +45,140 @@ * Implementation of a resolution generator to bring the quick fixes feature * of Eclipse to PMD * - * @author Philippe Herlin + * @author Philippe Herlin, Brian Remedios * + * TODO + * resource bundles are read-only, migrate to a persistence mechanism + * that allows for updates to the fixes associated with the rules. */ public class PMDResolutionGenerator implements IMarkerResolutionGenerator { + private static final Map<String, Fix[]> fixersByRuleName = new HashMap<String, Fix[]>(); + + private static final Set<String> missingFixes = new HashSet<String>(); + private static final Map<String, String> brokenFixes = new HashMap<String, String>(); + + private static String QUICKFIX_BUNDLE = "properties.QuickFix"; // NOPMD + + public static final IMarkerResolution[] EMPTY_RESOLUTIONS = new IMarkerResolution[0]; + + public static Class<Fix> fixClassFor(String className, String ruleName) { + + if (StringUtil.isEmpty(className)) return null; + + try { + Class<?> cls = Class.forName(className); + if (Fix.class.isAssignableFrom(cls)) { + return (Class<Fix>)cls; + } else { + brokenFixes.put(ruleName, className); + return null; + } + } catch (ClassNotFoundException ex) { + return null; + } + } + + private static void add(String ruleName, Fix fix) { + + if (fixersByRuleName.containsKey(ruleName)) { + Fix[] existingFixers = fixersByRuleName.get(ruleName); + Fix[] newFixers = new Fix[existingFixers.length+1]; + System.arraycopy(existingFixers, 0, newFixers, 0, existingFixers.length); + newFixers[newFixers.length-1] = fix; + fixersByRuleName.put(ruleName, newFixers); + } else { + fixersByRuleName.put(ruleName, new Fix[] { fix }); + } + } + + public static int fixCountFor(Rule rule) { + + String ruleName = rule.getName(); + if (missingFixes.contains(ruleName)) return 0; + + loadFixesFor(ruleName); + + if (!fixersByRuleName.containsKey(ruleName)) return 0; + return fixersByRuleName.get(ruleName).length; + } + + public static void saveFixesFor(String ruleName) { + // TODO + } + + private static void loadFixesFor(String ruleName) { + + ResourceBundle bundle = ResourceBundle.getBundle(QUICKFIX_BUNDLE); + if (!bundle.containsKey(ruleName)) { + missingFixes.add(ruleName); + return; + } + + String fixClassNameSet = bundle.getString(ruleName); + String[] fixClassNames = fixClassNameSet.split(","); + + for (String fixClassName : fixClassNames) { + if (StringUtil.isEmpty(fixClassName)) continue; + Class<Fix> fixClass = fixClassFor(fixClassName.trim(), ruleName); + if (fixClass != null) { + Fix fix = fixFor(ruleName, fixClass); + if (fix != null) { + add(ruleName, fix); + } + } + } + + if (!fixersByRuleName.containsKey(ruleName)) missingFixes.add(ruleName); + } + + public static boolean hasFixesFor(Rule rule) { + + String ruleName = rule.getName(); + if (fixersByRuleName.containsKey(ruleName)) return true; + + if (missingFixes.contains(ruleName)) return false; + if (brokenFixes.containsKey(ruleName)) return false; + + loadFixesFor(ruleName); + + return fixersByRuleName.containsKey(ruleName); + } + + private static Fix fixFor(String ruleName, Class<Fix> fixClass) { + + try { + return (Fix)fixClass.newInstance(); + } catch (Exception ex) { + brokenFixes.put(ruleName, fixClass.getName()); + return null; + } + } + + public static Fix[] fixesFor(Rule rule) { + return fixersByRuleName.get(rule.getName()); + } + + public static void fixesFor(Rule rule, Fix[] fixes) { + fixersByRuleName.put(rule.getName(), fixes); + } + /** * @see org.eclipse.ui.IMarkerResolutionGenerator#getResolutions(org.eclipse.core.resources.IMarker) */ public IMarkerResolution[] getResolutions(IMarker marker) { + final List<PMDResolution> markerResolutionList = new ArrayList<PMDResolution>(); try { - final String ruleName = (String) marker.getAttribute(PMDUiConstants.KEY_MARKERATT_RULENAME); + final String ruleName = MarkerUtil.ruleNameFor(marker); if (ruleName != null) { final RuleSet ruleSet = PMDPlugin.getDefault().getPreferencesManager().getRuleSet(); final Rule rule = ruleSet.getRuleByName(ruleName); - - // The final implementation should ask the rule to give a list of fixes - if (rule != null && rule.getName().equals("DuplicateImports")) { - markerResolutionList.add(new PMDResolution(new DeleteLineFix())); - } + if (rule == null || !hasFixesFor(rule)) return EMPTY_RESOLUTIONS; + + Fix[] fixes = fixesFor(rule); + for (Fix fix : fixes) markerResolutionList.add( new PMDResolution(fix) ); } - } catch (CoreException e) { - PMDPlugin.getDefault().showError(PMDPlugin.getDefault().getStringTable().getString(StringKeys.MSGKEY_ERROR_CORE_EXCEPTION), e); } catch (RuntimeException e) { PMDPlugin.getDefault().showError(PMDPlugin.getDefault().getStringTable().getString(StringKeys.MSGKEY_ERROR_RUNTIME_EXCEPTION), e); } diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/SearchAndReplace.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/SearchAndReplace.java new file mode 100755 index 00000000000..dbc5f645e1d --- /dev/null +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/quickfix/SearchAndReplace.java @@ -0,0 +1,29 @@ +package net.sourceforge.pmd.eclipse.ui.quickfix; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class SearchAndReplace extends AbstractFix { + + private final String searchStr; + private final String replaceStr; + private final Pattern pattern; + + public SearchAndReplace(String searchString, String replacement) { + super("Search & replace"); + + searchStr = searchString; + replaceStr = replacement; + pattern = Pattern.compile(searchStr); + } + + /** + * @see net.sourceforge.pmd.eclipse.Fix#fix(java.lang.String, int) + */ + public String fix(String sourceCode, int lineNumber) { + + Matcher matcher = pattern.matcher(sourceCode); + return matcher.replaceAll(replaceStr); + } + +} diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/ViolationOutline.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/ViolationOutline.java index dedc8a1e570..f51f1959529 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/ViolationOutline.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/ViolationOutline.java @@ -136,10 +136,10 @@ private void addFilterControls() { manager.add(filterAction); } } - + /** * Creates a Context Menu for the View - * + * * @param viewer */ public void createContextMenu(final TableViewer viewer) { @@ -148,6 +148,7 @@ public void createContextMenu(final TableViewer viewer) { // here we add the Context Menus Actions manager.addMenuListener(new IMenuListener() { public void menuAboutToShow(IMenuManager manager) { + // show the Rule Dialog Action showRuleAction = new ShowRuleAction(viewer, getSite().getShell()); manager.add(showRuleAction); diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/AbstractViolationSelectionAction.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/AbstractViolationSelectionAction.java index defc6abb513..d8b5edad994 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/AbstractViolationSelectionAction.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/AbstractViolationSelectionAction.java @@ -1,7 +1,10 @@ package net.sourceforge.pmd.eclipse.ui.views.actions; +import java.util.Collections; import java.util.Iterator; +import java.util.List; +import net.sourceforge.pmd.Rule; import net.sourceforge.pmd.eclipse.runtime.builder.MarkerUtil; import org.eclipse.core.resources.IMarker; @@ -49,6 +52,14 @@ protected boolean hasSelections() { return !tableViewer.getSelection().isEmpty(); } + protected List<Rule> getSelectedViolationRules() { + + if (!hasSelections()) return Collections.EMPTY_LIST; + + IMarker[] markers = getSelectedViolations(); + return MarkerUtil.rulesFor(markers); + } + /** * Return the selected Violations (Markers) * diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/DisableRuleAction.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/DisableRuleAction.java new file mode 100755 index 00000000000..6268e73ebcd --- /dev/null +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/DisableRuleAction.java @@ -0,0 +1,38 @@ +package net.sourceforge.pmd.eclipse.ui.views.actions; + +import java.util.List; + +import net.sourceforge.pmd.Rule; +import net.sourceforge.pmd.eclipse.runtime.builder.MarkerUtil; +import net.sourceforge.pmd.eclipse.ui.PMDUiConstants; +import net.sourceforge.pmd.eclipse.ui.nls.StringKeys; + +import org.eclipse.core.resources.IMarker; +import org.eclipse.jface.viewers.TableViewer; + +public class DisableRuleAction extends AbstractViolationSelectionAction { + + public DisableRuleAction(TableViewer viewer) { + super(viewer); + } + + protected String textId() { return StringKeys.MSGKEY_VIEW_ACTION_DISABLE; } + + protected String imageId() { return PMDUiConstants.ICON_BUTTON_DISABLE; } + + protected String tooltipMsgId() { return StringKeys.MSGKEY_VIEW_TOOLTIP_DISABLE; } + + /** + * @see org.eclipse.jface.action.IAction#run() + */ + public void run() { + final IMarker[] markers = getSelectedViolations(); + if (markers == null) return; + + + List<Rule> rules = MarkerUtil.rulesFor(markers); + + } + + +} diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/ReviewAction.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/ReviewAction.java index fd2ab295650..0fc67b75a1f 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/ReviewAction.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/ReviewAction.java @@ -10,6 +10,7 @@ import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; import net.sourceforge.pmd.eclipse.runtime.PMDRuntimeConstants; +import net.sourceforge.pmd.eclipse.runtime.builder.MarkerUtil; import net.sourceforge.pmd.eclipse.ui.PMDUiConstants; import net.sourceforge.pmd.eclipse.ui.nls.StringKeys; @@ -216,7 +217,7 @@ private String addPluginReviewComment(String sourceCode, int offset, IMarker mar // Add the review comment sb.append(computeIndent(sourceCode, offset)); sb.append(PMDRuntimeConstants.PLUGIN_STYLE_REVIEW_COMMENT); - sb.append(marker.getAttribute(PMDUiConstants.KEY_MARKERATT_RULENAME, "")); + sb.append(MarkerUtil.ruleNameFor(marker)); sb.append(": ").append(additionalCommentTxt()); sb.append(System.getProperty("line.separator")); diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/ShowRuleAction.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/ShowRuleAction.java index 8654e8320e0..63fd57c76b8 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/ShowRuleAction.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/ui/views/actions/ShowRuleAction.java @@ -2,7 +2,7 @@ import net.sourceforge.pmd.Rule; import net.sourceforge.pmd.eclipse.plugin.PMDPlugin; -import net.sourceforge.pmd.eclipse.ui.PMDUiConstants; +import net.sourceforge.pmd.eclipse.runtime.builder.MarkerUtil; import net.sourceforge.pmd.eclipse.ui.nls.StringKeys; import net.sourceforge.pmd.eclipse.ui.preferences.RuleDialog; @@ -33,10 +33,6 @@ public ShowRuleAction(TableViewer viewer, Shell shell) { protected String tooltipMsgId() { return StringKeys.MSGKEY_VIEW_TOOLTIP_SHOW_RULE; } - private String ruleNameFor(IMarker marker) { - return marker.getAttribute(PMDUiConstants.KEY_MARKERATT_RULENAME, ""); - } - protected boolean canExecute() { return super.canExecute() && allSelectionsDenoteSameRule(); } @@ -44,12 +40,7 @@ protected boolean canExecute() { private boolean allSelectionsDenoteSameRule() { IMarker[] markers = getSelectedViolations(); - String ruleName = ruleNameFor(markers[0]); - for (int i=1; i<markers.length; i++) { - if (!ruleName.equals(ruleNameFor(markers[i]))) return false; - } - - return true; + return MarkerUtil.commonRuleNameAmong(markers) != null; } /** @@ -72,7 +63,8 @@ public Rule getSelectedViolationRule() { IMarker[] markers = getSelectedViolations(); if (markers != null) { rule = PMDPlugin.getDefault().getPreferencesManager().getRuleSet().getRuleByName( - markers[0].getAttribute(PMDUiConstants.KEY_MARKERATT_RULENAME, "")); + MarkerUtil.ruleNameFor(markers[0]) + ); } } catch (RuntimeException e) { logErrorByKey(StringKeys.MSGKEY_ERROR_RUNTIME_EXCEPTION, e); diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/util/Util.java b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/util/Util.java index 36b9662e91c..17a90f9a942 100644 --- a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/util/Util.java +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/net/sourceforge/pmd/eclipse/util/Util.java @@ -145,6 +145,10 @@ private static void signatureFor(Class<?> type, String[] unwantedPrefixes, Strin public static Comparator<?> comparatorFrom(final RuleFieldAccessor accessor, final boolean inverted) { + if (accessor == null) { + throw new IllegalArgumentException("Accessor is required"); + } + return new Comparator() { public int compare(Object a, Object b) { diff --git a/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/properties/QuickFix.properties b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/properties/QuickFix.properties new file mode 100755 index 00000000000..e7855b7a404 --- /dev/null +++ b/pmd-eclipse-plugin/plugins/net.sourceforge.pmd.eclipse.plugin/src/properties/QuickFix.properties @@ -0,0 +1,4 @@ +# rule name -> name of class implementing Fix interface + +DuplicateImports = net.sourceforge.pmd.eclipse.ui.quickfix.DeleteLineFix, net.sourceforge.pmd.eclipse.ui.quickfix.CommentOutLineFix +DontImportJavaLang = net.sourceforge.pmd.eclipse.ui.quickfix.DeleteLineFix, net.sourceforge.pmd.eclipse.ui.quickfix.CommentOutLineFix \ No newline at end of file
d07bf38b5f4f65bb7e38e6afae4d41ec304d97e6
restlet-framework-java
- Fixed issue with HTTP client connector not- correctly reporting connection and other IO errors.--
c
https://github.com/restlet/restlet-framework-java
diff --git a/source/main/com/noelios/restlet/impl/HttpClientCallImpl.java b/source/main/com/noelios/restlet/impl/HttpClientCallImpl.java index bbacf77003..f2a0574c81 100644 --- a/source/main/com/noelios/restlet/impl/HttpClientCallImpl.java +++ b/source/main/com/noelios/restlet/impl/HttpClientCallImpl.java @@ -27,7 +27,6 @@ import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.InetAddress; -import java.net.ProtocolException; import java.net.URL; import java.net.UnknownHostException; import java.nio.channels.ReadableByteChannel; @@ -35,8 +34,6 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.logging.Level; -import java.util.logging.Logger; import javax.net.ssl.HttpsURLConnection; @@ -49,9 +46,6 @@ */ public class HttpClientCallImpl extends ConnectorCallImpl implements ClientCall { - /** Obtain a suitable logger. */ - private static Logger logger = Logger.getLogger("com.noelios.restlet.connector.HttpClientCallImpl"); - /** The wrapped HTTP URL connection. */ protected HttpURLConnection connection; @@ -114,17 +108,10 @@ public void setRequestMethod(String method) * Sends the request headers.<br/> * Must be called before sending the request input. */ - public void sendRequestHeaders() + public void sendRequestHeaders() throws IOException { // Set the request method - try - { - getConnection().setRequestMethod(getRequestMethod()); - } - catch(ProtocolException e) - { - logger.log(Level.WARNING, "Unable to set method", e); - } + getConnection().setRequestMethod(getRequestMethod()); // Set the request headers Parameter header; @@ -135,14 +122,7 @@ public void sendRequestHeaders() } // Ensure that the connections is active - try - { - getConnection().connect(); - } - catch(IOException ioe) - { - logger.log(Level.WARNING, "Unable to connect to the server", ioe); - } + getConnection().connect(); } /** @@ -167,7 +147,7 @@ else if(getRequestChannel() != null) * Returns the request entity channel if it exists. * @return The request entity channel if it exists. */ - public WritableByteChannel getRequestChannel() + public WritableByteChannel getRequestChannel() throws IOException { return null; } @@ -176,17 +156,9 @@ public WritableByteChannel getRequestChannel() * Returns the request entity stream if it exists. * @return The request entity stream if it exists. */ - public OutputStream getRequestStream() + public OutputStream getRequestStream() throws IOException { - try - { - return getConnection().getOutputStream(); - } - catch(IOException e) - { - logger.log(Level.WARNING, "Unable to get the request stream", e); - return null; - } + return getConnection().getOutputStream(); } /** @@ -261,7 +233,7 @@ public String getResponseReasonPhrase() * Returns the response channel if it exists. * @return The response channel if it exists. */ - public ReadableByteChannel getResponseChannel() + public ReadableByteChannel getResponseChannel() throws IOException { return null; } @@ -270,16 +242,8 @@ public ReadableByteChannel getResponseChannel() * Returns the response stream if it exists. * @return The response stream if it exists. */ - public InputStream getResponseStream() + public InputStream getResponseStream() throws IOException { - try - { - return getConnection().getInputStream(); - } - catch(IOException e) - { - logger.log(Level.FINE, "Unable to get the response stream", e); - return null; - } + return getConnection().getInputStream(); } } diff --git a/source/main/com/noelios/restlet/impl/HttpClientImpl.java b/source/main/com/noelios/restlet/impl/HttpClientImpl.java index 7b821a8fdc..6ec168d6e1 100644 --- a/source/main/com/noelios/restlet/impl/HttpClientImpl.java +++ b/source/main/com/noelios/restlet/impl/HttpClientImpl.java @@ -23,6 +23,7 @@ package com.noelios.restlet.impl; import java.io.IOException; +import java.net.ConnectException; import java.util.Arrays; import java.util.Date; import java.util.Iterator; @@ -52,6 +53,7 @@ import org.restlet.data.Protocols; import org.restlet.data.Representation; import org.restlet.data.DefaultStatus; +import org.restlet.data.Statuses; import org.restlet.data.Tag; import com.noelios.restlet.data.ContentType; @@ -378,9 +380,15 @@ else if(clientCall.getResponseChannel() != null) } } } + catch(ConnectException ce) + { + logger.log(Level.FINE, "An error occured during the connection to the remote HTTP server.", ce); + call.setStatus(new DefaultStatus(Statuses.SERVER_ERROR_SERVICE_UNAVAILABLE, "Unable to connect to the remote server. " + ce.getMessage())); + } catch(Exception e) { - logger.log(Level.WARNING, "An error occured during the handling of an HTTP client call.", e); + logger.log(Level.FINE, "An error occured during the handling of the HTTP client call.", e); + call.setStatus(new DefaultStatus(Statuses.SERVER_ERROR_INTERNAL, "Unable to complete the call. " + e.getMessage())); } } diff --git a/source/main/com/noelios/restlet/impl/RestletMapping.java b/source/main/com/noelios/restlet/impl/RestletMapping.java index a1328ba2db..2f509e23f7 100644 --- a/source/main/com/noelios/restlet/impl/RestletMapping.java +++ b/source/main/com/noelios/restlet/impl/RestletMapping.java @@ -33,38 +33,38 @@ */ public class RestletMapping extends RestletTarget { - /** The path pattern. */ - Pattern pathPattern; + /** The URI pattern. */ + Pattern pattern; /** * Constructor. - * @param pathPattern The path pattern. + * @param pattern The URI pattern. * @param target The target interface. */ - public RestletMapping(String pathPattern, Restlet target) + public RestletMapping(String pattern, Restlet target) { super(target); - this.pathPattern = Pattern.compile(pathPattern, Pattern.CASE_INSENSITIVE); + this.pattern = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE); } /** * Constructor. - * @param pathPattern The path pattern. + * @param pattern The URI pattern. * @param targetClass The target class. */ - public RestletMapping(String pathPattern, Class<? extends Restlet> targetClass) + public RestletMapping(String pattern, Class<? extends Restlet> targetClass) { super(targetClass); - this.pathPattern = Pattern.compile(pathPattern, Pattern.CASE_INSENSITIVE); + this.pattern = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE); } /** - * Returns the path pattern. - * @return The path pattern. + * Returns the URI pattern. + * @return The URI pattern. */ - public Pattern getPathPattern() + public Pattern getPattern() { - return this.pathPattern; + return this.pattern; } }
6858749cfd27f2975ce560e84b29e95d16eb88d2
camel
MailConsumer and MailProducer now use the- endpoint reference from its super class and doesn'n manager its own instance- variable--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1054815 13f79535-47bb-0310-9956-ffa450edef68-
p
https://github.com/apache/camel
diff --git a/components/camel-mail/src/main/java/org/apache/camel/component/mail/MailConsumer.java b/components/camel-mail/src/main/java/org/apache/camel/component/mail/MailConsumer.java index 5bd3677d1f1d0..120dcf1a7634b 100644 --- a/components/camel-mail/src/main/java/org/apache/camel/component/mail/MailConsumer.java +++ b/components/camel-mail/src/main/java/org/apache/camel/component/mail/MailConsumer.java @@ -18,6 +18,7 @@ import java.util.LinkedList; import java.util.Queue; + import javax.mail.Flags; import javax.mail.Folder; import javax.mail.FolderNotFoundException; @@ -49,7 +50,6 @@ public class MailConsumer extends ScheduledPollConsumer implements BatchConsumer public static final long DEFAULT_CONSUMER_DELAY = 60 * 1000L; private static final transient Log LOG = LogFactory.getLog(MailConsumer.class); - private final MailEndpoint endpoint; private final JavaMailSenderImpl sender; private Folder folder; private Store store; @@ -59,7 +59,6 @@ public class MailConsumer extends ScheduledPollConsumer implements BatchConsumer public MailConsumer(MailEndpoint endpoint, Processor processor, JavaMailSenderImpl sender) { super(endpoint, processor); - this.endpoint = endpoint; this.sender = sender; } @@ -89,14 +88,14 @@ protected void poll() throws Exception { if (store == null || folder == null) { throw new IllegalStateException("MailConsumer did not connect properly to the MailStore: " - + endpoint.getConfiguration().getMailStoreLogInformation()); + + getEndpoint().getConfiguration().getMailStoreLogInformation()); } if (LOG.isDebugEnabled()) { - LOG.debug("Polling mailfolder: " + endpoint.getConfiguration().getMailStoreLogInformation()); + LOG.debug("Polling mailfolder: " + getEndpoint().getConfiguration().getMailStoreLogInformation()); } - if (endpoint.getConfiguration().getFetchSize() == 0) { + if (getEndpoint().getConfiguration().getFetchSize() == 0) { LOG.warn("Fetch size is 0 meaning the configuration is set to poll no new messages at all. Camel will skip this poll."); return; } @@ -112,7 +111,7 @@ protected void poll() throws Exception { Message[] messages; // should we process all messages or only unseen messages - if (endpoint.getConfiguration().isUnseen()) { + if (getEndpoint().getConfiguration().isUnseen()) { messages = folder.search(new FlagTerm(new Flags(Flags.Flag.SEEN), false)); } else { messages = folder.getMessages(); @@ -226,7 +225,7 @@ public boolean isBatchAllowed() { protected Queue<Exchange> createExchanges(Message[] messages) throws MessagingException { Queue<Exchange> answer = new LinkedList<Exchange>(); - int fetchSize = endpoint.getConfiguration().getFetchSize(); + int fetchSize = getEndpoint().getConfiguration().getFetchSize(); int count = fetchSize == -1 ? messages.length : Math.min(fetchSize, messages.length); if (LOG.isDebugEnabled()) { @@ -236,7 +235,7 @@ protected Queue<Exchange> createExchanges(Message[] messages) throws MessagingEx for (int i = 0; i < count; i++) { Message message = messages[i]; if (!message.getFlags().contains(Flags.Flag.DELETED)) { - Exchange exchange = endpoint.createExchange(message); + Exchange exchange = getEndpoint().createExchange(message); answer.add(exchange); } else { if (LOG.isDebugEnabled()) { @@ -267,7 +266,7 @@ protected void processExchange(Exchange exchange) throws Exception { */ protected void processCommit(Message mail, Exchange exchange) { try { - if (endpoint.getConfiguration().isDelete()) { + if (getEndpoint().getConfiguration().isDelete()) { LOG.debug("Exchange processed, so flagging message as DELETED"); mail.setFlag(Flags.Flag.DELETED, true); } else { @@ -296,7 +295,7 @@ protected void processRollback(Message mail, Exchange exchange) { } private void ensureIsConnected() throws MessagingException { - MailConfiguration config = endpoint.getConfiguration(); + MailConfiguration config = getEndpoint().getConfiguration(); boolean connected = false; try { @@ -305,7 +304,7 @@ private void ensureIsConnected() throws MessagingException { } } catch (Exception e) { LOG.debug("Exception while testing for is connected to MailStore: " - + endpoint.getConfiguration().getMailStoreLogInformation() + + getEndpoint().getConfiguration().getMailStoreLogInformation() + ". Caused by: " + e.getMessage(), e); } @@ -315,7 +314,7 @@ private void ensureIsConnected() throws MessagingException { folder = null; if (LOG.isDebugEnabled()) { - LOG.debug("Connecting to MailStore: " + endpoint.getConfiguration().getMailStoreLogInformation()); + LOG.debug("Connecting to MailStore: " + getEndpoint().getConfiguration().getMailStoreLogInformation()); } store = sender.getSession().getStore(config.getProtocol()); store.connect(config.getHost(), config.getPort(), config.getUsername(), config.getPassword()); @@ -332,4 +331,8 @@ private void ensureIsConnected() throws MessagingException { } } -} + @Override + public MailEndpoint getEndpoint() { + return (MailEndpoint) super.getEndpoint(); + } +} \ No newline at end of file diff --git a/components/camel-mail/src/main/java/org/apache/camel/component/mail/MailProducer.java b/components/camel-mail/src/main/java/org/apache/camel/component/mail/MailProducer.java index 2a63ca97c7d3a..0be21d63e6237 100644 --- a/components/camel-mail/src/main/java/org/apache/camel/component/mail/MailProducer.java +++ b/components/camel-mail/src/main/java/org/apache/camel/component/mail/MailProducer.java @@ -33,23 +33,26 @@ */ public class MailProducer extends DefaultProducer { private static final transient Log LOG = LogFactory.getLog(MailProducer.class); - private final MailEndpoint endpoint; private final JavaMailSender sender; public MailProducer(MailEndpoint endpoint, JavaMailSender sender) { super(endpoint); - this.endpoint = endpoint; this.sender = sender; } public void process(final Exchange exchange) { sender.send(new MimeMessagePreparator() { public void prepare(MimeMessage mimeMessage) throws Exception { - endpoint.getBinding().populateMailMessage(endpoint, mimeMessage, exchange); + getEndpoint().getBinding().populateMailMessage(getEndpoint(), mimeMessage, exchange); if (LOG.isDebugEnabled()) { LOG.debug("Sending MimeMessage: " + MailUtils.dumpMessage(mimeMessage)); } } }); } -} + + @Override + public MailEndpoint getEndpoint() { + return (MailEndpoint) super.getEndpoint(); + } +} \ No newline at end of file
1c31074ed8a27a91cc794c5ebf26c64fcb0fda75
Vala
libsoup-2.4: update to 2.30.0 Fixes bug 615047.
a
https://github.com/GNOME/vala/
diff --git a/vapi/libsoup-2.4.vapi b/vapi/libsoup-2.4.vapi index dd83ee5316..8568abd5c3 100644 --- a/vapi/libsoup-2.4.vapi +++ b/vapi/libsoup-2.4.vapi @@ -47,6 +47,7 @@ namespace Soup { public bool is_for_proxy { get; construct; } public string realm { get; construct; } public string scheme_name { get; } + public virtual signal void save_password (string p0, string p1); } [CCode (cheader_filename = "libsoup/soup.h")] public class AuthDomain : GLib.Object { @@ -120,6 +121,17 @@ namespace Soup { public unowned string get_host (); public unowned Soup.Socket get_socket (); } + [CCode (cheader_filename = "libsoup/soup.h")] + public class ContentDecoder : GLib.Object, Soup.SessionFeature { + } + [CCode (cheader_filename = "libsoup/soup.h")] + public class ContentSniffer : GLib.Object, Soup.SessionFeature { + [CCode (has_construct_function = false)] + public ContentSniffer (); + [NoWrapper] + public virtual size_t get_buffer_size (); + public virtual string sniff (Soup.Message msg, Soup.Buffer buffer, out GLib.HashTable<string,string>? @params); + } [Compact] [CCode (copy_function = "soup_cookie_copy", type_id = "SOUP_TYPE_COOKIE", cheader_filename = "libsoup/soup.h")] public class Cookie { @@ -134,6 +146,7 @@ namespace Soup { public Cookie (string name, string value, string domain, string path, int max_age); public bool applies_to_uri (Soup.URI uri); public Soup.Cookie copy (); + public bool domain_matches (string host); public bool equal (Soup.Cookie cookie2); public static unowned Soup.Cookie parse (string header, Soup.URI origin); public void set_domain (string domain); @@ -154,9 +167,13 @@ namespace Soup { public void add_cookie (Soup.Cookie cookie); public GLib.SList<Soup.Cookie> all_cookies (); public void delete_cookie (Soup.Cookie cookie); + public Soup.CookieJarAcceptPolicy get_accept_policy (); public unowned string get_cookies (Soup.URI uri, bool for_http); public virtual void save (); + public void set_accept_policy (Soup.CookieJarAcceptPolicy policy); public void set_cookie (Soup.URI uri, string cookie); + public void set_cookie_with_first_party (Soup.URI uri, Soup.URI first_party, string cookie); + public Soup.CookieJarAcceptPolicy accept_policy { get; set; } [NoAccessorMethod] public bool read_only { get; construct; } public virtual signal void changed (Soup.Cookie old_cookie, Soup.Cookie new_cookie); @@ -211,14 +228,17 @@ namespace Soup { public Message (string method, string uri_string); public uint add_header_handler (string @signal, string header, GLib.Callback callback); public uint add_status_code_handler (string @signal, uint status_code, GLib.Callback callback); + public void disable_feature (GLib.Type feature_type); [CCode (has_construct_function = false)] public Message.from_uri (string method, Soup.URI uri); public unowned Soup.Address get_address (); + public unowned Soup.URI get_first_party (); public Soup.MessageFlags get_flags (); public Soup.HTTPVersion get_http_version (); public unowned Soup.URI get_uri (); public bool is_keepalive (); public void set_chunk_allocator (owned Soup.ChunkAllocator allocator); + public void set_first_party (...); public void set_flags (Soup.MessageFlags flags); public void set_http_version (Soup.HTTPVersion version); public void set_request (string content_type, Soup.MemoryUse req_use, string req_body, size_t req_length); @@ -226,6 +246,7 @@ namespace Soup { public void set_status (uint status_code); public void set_status_full (uint status_code, string reason_phrase); public void set_uri (Soup.URI uri); + public Soup.URI first_party { get; set; } public Soup.MessageFlags flags { get; set; } public Soup.HTTPVersion http_version { get; set; } [NoAccessorMethod] @@ -238,6 +259,8 @@ namespace Soup { public uint status_code { get; set; } public Soup.URI uri { get; set; } [HasEmitter] + public virtual signal void content_sniffed (string p0, GLib.HashTable p1); + [HasEmitter] public virtual signal void finished (); [HasEmitter] public virtual signal void got_body (); @@ -292,9 +315,11 @@ namespace Soup { public bool get_content_disposition (out string disposition, out GLib.HashTable? @params); public int64 get_content_length (); public bool get_content_range (int64 start, int64 end, int64 total_length); - public unowned string get_content_type (out GLib.HashTable? @params); + public unowned string get_content_type (out GLib.HashTable<string,string>? @params); public Soup.Encoding get_encoding (); public Soup.Expectation get_expectations (); + public unowned string get_list (string name); + public unowned string get_one (string name); public bool get_ranges (int64 total_length, out unowned Soup.Range ranges, int length); public void remove (string name); public void replace (string name, string value); @@ -371,15 +396,25 @@ namespace Soup { [CCode (cheader_filename = "libsoup/soup.h")] public class Session : GLib.Object { public void abort (); + [NoWrapper] + public virtual void auth_required (Soup.Message msg, Soup.Auth auth, bool retrying); public virtual void cancel_message (Soup.Message msg, uint status_code); public unowned GLib.MainContext get_async_context (); + public unowned Soup.SessionFeature get_feature (GLib.Type feature_type); + public unowned Soup.SessionFeature get_feature_for_message (GLib.Type feature_type, Soup.Message msg); + public GLib.SList<weak Soup.SessionFeature> get_features (GLib.Type feature_type); public void pause_message (Soup.Message msg); + public void prepare_for_uri (Soup.URI uri); public virtual void queue_message (owned Soup.Message msg, Soup.SessionCallback? callback); public void remove_feature (Soup.SessionFeature feature); public virtual void requeue_message (Soup.Message msg); public virtual uint send_message (Soup.Message msg); public void unpause_message (Soup.Message msg); [NoAccessorMethod] + public string accept_language { owned get; set; } + [NoAccessorMethod] + public bool accept_language_auto { get; set; } + [NoAccessorMethod] public Soup.SessionFeature add_feature { owned get; set; } [NoAccessorMethod] public GLib.Type add_feature_by_type { get; set; } @@ -397,15 +432,19 @@ namespace Soup { [NoAccessorMethod] public string ssl_ca_file { owned get; set; } [NoAccessorMethod] + public bool ssl_strict { get; set; } + [NoAccessorMethod] public uint timeout { get; set; } [NoAccessorMethod] public bool use_ntlm { get; set; } [NoAccessorMethod] public string user_agent { owned get; set; } public virtual signal void authenticate (Soup.Message msg, Soup.Auth auth, bool retrying); + public virtual signal void connection_created (GLib.Object p0); public virtual signal void request_queued (Soup.Message p0); public virtual signal void request_started (Soup.Message msg, Soup.Socket socket); public virtual signal void request_unqueued (Soup.Message p0); + public virtual signal void tunneling (GLib.Object p0); } [CCode (cheader_filename = "libsoup/soup.h")] public class SessionAsync : Soup.Session { @@ -428,6 +467,7 @@ namespace Soup { public void connect_async (GLib.Cancellable cancellable, Soup.SocketCallback callback); public uint connect_sync (GLib.Cancellable cancellable); public void disconnect (); + public int get_fd (); public unowned Soup.Address get_local_address (); public unowned Soup.Address get_remote_address (); public bool is_connected (); @@ -449,7 +489,11 @@ namespace Soup { [NoAccessorMethod] public void* ssl_creds { get; set; } [NoAccessorMethod] + public bool ssl_strict { get; construct; } + [NoAccessorMethod] public uint timeout { get; set; } + [NoAccessorMethod] + public bool trusted_certificate { get; construct; } public virtual signal void disconnected (); public virtual signal void new_connection (Soup.Socket p0); public virtual signal void readable (); @@ -469,9 +513,12 @@ namespace Soup { [CCode (has_construct_function = false)] public URI (string uri_string); public Soup.URI copy (); + public unowned Soup.URI copy_host (); public static string decode (string part); public static string encode (string part, string? escape_extra); public bool equal (Soup.URI uri2); + public static bool host_equal (void* v1, void* v2); + public static uint host_hash (void* key); public static string normalize (string part, string unescape_extra); public void set_fragment (string fragment); public void set_host (string host); @@ -494,6 +541,11 @@ namespace Soup { public abstract uint get_proxy_sync (Soup.Message msg, GLib.Cancellable cancellable, out unowned Soup.Address addr); } [CCode (cheader_filename = "libsoup/soup.h")] + public interface ProxyURIResolver : GLib.Object { + public abstract void get_proxy_uri_async (Soup.URI uri, GLib.MainContext async_context, GLib.Cancellable cancellable, Soup.ProxyURIResolverCallback callback); + public abstract uint get_proxy_uri_sync (Soup.URI uri, GLib.Cancellable cancellable, out unowned Soup.URI proxy_uri); + } + [CCode (cheader_filename = "libsoup/soup.h")] public interface SessionFeature : GLib.Object { public abstract void attach (Soup.Session session); public abstract void detach (Soup.Session session); @@ -510,6 +562,21 @@ namespace Soup { IPV4, IPV6 } + [CCode (cprefix = "SOUP_CONNECTION_", cheader_filename = "libsoup/soup.h")] + public enum ConnectionState { + NEW, + CONNECTING, + IDLE, + IN_USE, + REMOTE_DISCONNECTED, + DISCONNECTED + } + [CCode (cprefix = "SOUP_COOKIE_JAR_ACCEPT_", cheader_filename = "libsoup/soup.h")] + public enum CookieJarAcceptPolicy { + ALWAYS, + NEVER, + NO_THIRD_PARTY + } [CCode (cprefix = "SOUP_DATE_", cheader_filename = "libsoup/soup.h")] public enum DateFormat { HTTP, @@ -621,8 +688,10 @@ namespace Soup { [CCode (cprefix = "SOUP_MESSAGE_", cheader_filename = "libsoup/soup.h")] [Flags] public enum MessageFlags { + NO_REDIRECT, OVERWRITE_CHUNKS, - NO_REDIRECT + CONTENT_DECODED, + CERTIFICATE_TRUSTED } [CCode (cprefix = "SOUP_MESSAGE_HEADERS_", cheader_filename = "libsoup/soup.h")] public enum MessageHeadersType { @@ -682,6 +751,8 @@ namespace Soup { [CCode (cheader_filename = "libsoup/soup.h", has_target = false)] public delegate void ProxyResolverCallback (Soup.ProxyResolver p1, Soup.Message p2, uint p3, Soup.Address p4, void* p5); [CCode (cheader_filename = "libsoup/soup.h")] + public delegate void ProxyURIResolverCallback (Soup.ProxyURIResolver resolver, uint status, Soup.URI proxy_uri); + [CCode (cheader_filename = "libsoup/soup.h")] public delegate void ServerCallback (Soup.Server server, Soup.Message msg, string path, GLib.HashTable<string,string> query, Soup.ClientContext client); [CCode (cheader_filename = "libsoup/soup.h")] public delegate void SessionCallback (Soup.Session session, Soup.Message msg); @@ -700,8 +771,6 @@ namespace Soup { [CCode (cheader_filename = "libsoup/soup.h")] public const string ADDRESS_SOCKADDR; [CCode (cheader_filename = "libsoup/soup.h")] - public const int AF_INET6; - [CCode (cheader_filename = "libsoup/soup.h")] public const string AUTH_DOMAIN_ADD_PATH; [CCode (cheader_filename = "libsoup/soup.h")] public const string AUTH_DOMAIN_BASIC_AUTH_CALLBACK; @@ -744,8 +813,14 @@ namespace Soup { [CCode (cheader_filename = "libsoup/soup.h")] public const string AUTH_SCHEME_NAME; [CCode (cheader_filename = "libsoup/soup.h")] + public const int CONTENT_DECODER_H; + [CCode (cheader_filename = "libsoup/soup.h")] + public const int CONTENT_SNIFFER_H; + [CCode (cheader_filename = "libsoup/soup.h")] public const int COOKIE_H; [CCode (cheader_filename = "libsoup/soup.h")] + public const string COOKIE_JAR_ACCEPT_POLICY; + [CCode (cheader_filename = "libsoup/soup.h")] public const int COOKIE_JAR_H; [CCode (cheader_filename = "libsoup/soup.h")] public const string COOKIE_JAR_READ_ONLY; @@ -778,6 +853,8 @@ namespace Soup { [CCode (cheader_filename = "libsoup/soup.h")] public const int MESSAGE_BODY_H; [CCode (cheader_filename = "libsoup/soup.h")] + public const string MESSAGE_FIRST_PARTY; + [CCode (cheader_filename = "libsoup/soup.h")] public const string MESSAGE_FLAGS; [CCode (cheader_filename = "libsoup/soup.h")] public const int MESSAGE_H; @@ -802,8 +879,12 @@ namespace Soup { [CCode (cheader_filename = "libsoup/soup.h")] public const int MULTIPART_H; [CCode (cheader_filename = "libsoup/soup.h")] + public const int PASSWORD_MANAGER_H; + [CCode (cheader_filename = "libsoup/soup.h")] public const int PROXY_RESOLVER_H; [CCode (cheader_filename = "libsoup/soup.h")] + public const int PROXY_URI_RESOLVER_H; + [CCode (cheader_filename = "libsoup/soup.h")] public const string SERVER_ASYNC_CONTEXT; [CCode (cheader_filename = "libsoup/soup.h")] public const int SERVER_H; @@ -820,6 +901,10 @@ namespace Soup { [CCode (cheader_filename = "libsoup/soup.h")] public const string SERVER_SSL_KEY_FILE; [CCode (cheader_filename = "libsoup/soup.h")] + public const string SESSION_ACCEPT_LANGUAGE; + [CCode (cheader_filename = "libsoup/soup.h")] + public const string SESSION_ACCEPT_LANGUAGE_AUTO; + [CCode (cheader_filename = "libsoup/soup.h")] public const string SESSION_ADD_FEATURE; [CCode (cheader_filename = "libsoup/soup.h")] public const string SESSION_ADD_FEATURE_BY_TYPE; @@ -844,6 +929,8 @@ namespace Soup { [CCode (cheader_filename = "libsoup/soup.h")] public const string SESSION_SSL_CA_FILE; [CCode (cheader_filename = "libsoup/soup.h")] + public const string SESSION_SSL_STRICT; + [CCode (cheader_filename = "libsoup/soup.h")] public const int SESSION_SYNC_H; [CCode (cheader_filename = "libsoup/soup.h")] public const string SESSION_TIMEOUT; @@ -866,8 +953,12 @@ namespace Soup { [CCode (cheader_filename = "libsoup/soup.h")] public const string SOCKET_SSL_CREDENTIALS; [CCode (cheader_filename = "libsoup/soup.h")] + public const string SOCKET_SSL_STRICT; + [CCode (cheader_filename = "libsoup/soup.h")] public const string SOCKET_TIMEOUT; [CCode (cheader_filename = "libsoup/soup.h")] + public const string SOCKET_TRUSTED_CERTIFICATE; + [CCode (cheader_filename = "libsoup/soup.h")] public const int STATUS_H; [CCode (cheader_filename = "libsoup/soup.h")] public const int TYPES_H; diff --git a/vapi/packages/libsoup-2.4/libsoup-2.4.gi b/vapi/packages/libsoup-2.4/libsoup-2.4.gi index e61ee29906..d8f67fb05d 100644 --- a/vapi/packages/libsoup-2.4/libsoup-2.4.gi +++ b/vapi/packages/libsoup-2.4/libsoup-2.4.gi @@ -539,6 +539,15 @@ <parameter name="p5" type="gpointer"/> </parameters> </callback> + <callback name="SoupProxyURIResolverCallback"> + <return-type type="void"/> + <parameters> + <parameter name="resolver" type="SoupProxyURIResolver*"/> + <parameter name="status" type="guint"/> + <parameter name="proxy_uri" type="SoupURI*"/> + <parameter name="user_data" type="gpointer"/> + </parameters> + </callback> <callback name="SoupServerCallback"> <return-type type="void"/> <parameters> @@ -683,6 +692,13 @@ <parameter name="cookie" type="SoupCookie*"/> </parameters> </method> + <method name="domain_matches" symbol="soup_cookie_domain_matches"> + <return-type type="gboolean"/> + <parameters> + <parameter name="cookie" type="SoupCookie*"/> + <parameter name="host" type="char*"/> + </parameters> + </method> <method name="equal" symbol="soup_cookie_equal"> <return-type type="gboolean"/> <parameters> @@ -1032,6 +1048,20 @@ <parameter name="hdrs" type="SoupMessageHeaders*"/> </parameters> </method> + <method name="get_list" symbol="soup_message_headers_get_list"> + <return-type type="char*"/> + <parameters> + <parameter name="hdrs" type="SoupMessageHeaders*"/> + <parameter name="name" type="char*"/> + </parameters> + </method> + <method name="get_one" symbol="soup_message_headers_get_one"> + <return-type type="char*"/> + <parameters> + <parameter name="hdrs" type="SoupMessageHeaders*"/> + <parameter name="name" type="char*"/> + </parameters> + </method> <method name="get_ranges" symbol="soup_message_headers_get_ranges"> <return-type type="gboolean"/> <parameters> @@ -1202,6 +1232,12 @@ <parameter name="uri" type="SoupURI*"/> </parameters> </method> + <method name="copy_host" symbol="soup_uri_copy_host"> + <return-type type="SoupURI*"/> + <parameters> + <parameter name="uri" type="SoupURI*"/> + </parameters> + </method> <method name="decode" symbol="soup_uri_decode"> <return-type type="char*"/> <parameters> @@ -1228,6 +1264,19 @@ <parameter name="uri" type="SoupURI*"/> </parameters> </method> + <method name="host_equal" symbol="soup_uri_host_equal"> + <return-type type="gboolean"/> + <parameters> + <parameter name="v1" type="gconstpointer"/> + <parameter name="v2" type="gconstpointer"/> + </parameters> + </method> + <method name="host_hash" symbol="soup_uri_host_hash"> + <return-type type="guint"/> + <parameters> + <parameter name="key" type="gconstpointer"/> + </parameters> + </method> <constructor name="new" symbol="soup_uri_new"> <return-type type="SoupURI*"/> <parameters> @@ -1345,6 +1394,19 @@ <member name="SOUP_ADDRESS_FAMILY_IPV4" value="2"/> <member name="SOUP_ADDRESS_FAMILY_IPV6" value="10"/> </enum> + <enum name="SoupConnectionState" type-name="SoupConnectionState" get-type="soup_connection_state_get_type"> + <member name="SOUP_CONNECTION_NEW" value="0"/> + <member name="SOUP_CONNECTION_CONNECTING" value="1"/> + <member name="SOUP_CONNECTION_IDLE" value="2"/> + <member name="SOUP_CONNECTION_IN_USE" value="3"/> + <member name="SOUP_CONNECTION_REMOTE_DISCONNECTED" value="4"/> + <member name="SOUP_CONNECTION_DISCONNECTED" value="5"/> + </enum> + <enum name="SoupCookieJarAcceptPolicy" type-name="SoupCookieJarAcceptPolicy" get-type="soup_cookie_jar_accept_policy_get_type"> + <member name="SOUP_COOKIE_JAR_ACCEPT_ALWAYS" value="0"/> + <member name="SOUP_COOKIE_JAR_ACCEPT_NEVER" value="1"/> + <member name="SOUP_COOKIE_JAR_ACCEPT_NO_THIRD_PARTY" value="2"/> + </enum> <enum name="SoupDateFormat" type-name="SoupDateFormat" get-type="soup_date_format_get_type"> <member name="SOUP_DATE_HTTP" value="1"/> <member name="SOUP_DATE_COOKIE" value="2"/> @@ -1478,8 +1540,10 @@ <member name="SOUP_EXPECTATION_CONTINUE" value="2"/> </flags> <flags name="SoupMessageFlags" type-name="SoupMessageFlags" get-type="soup_message_flags_get_type"> - <member name="SOUP_MESSAGE_OVERWRITE_CHUNKS" value="8"/> <member name="SOUP_MESSAGE_NO_REDIRECT" value="2"/> + <member name="SOUP_MESSAGE_OVERWRITE_CHUNKS" value="8"/> + <member name="SOUP_MESSAGE_CONTENT_DECODED" value="16"/> + <member name="SOUP_MESSAGE_CERTIFICATE_TRUSTED" value="32"/> </flags> <object name="SoupAddress" parent="GObject" type-name="SoupAddress" get-type="soup_address_get_type"> <method name="equal_by_ip" symbol="soup_address_equal_by_ip"> @@ -1670,6 +1734,14 @@ <property name="is-for-proxy" type="gboolean" readable="1" writable="1" construct="0" construct-only="1"/> <property name="realm" type="char*" readable="1" writable="1" construct="0" construct-only="1"/> <property name="scheme-name" type="char*" readable="1" writable="0" construct="0" construct-only="0"/> + <signal name="save-password" when="FIRST"> + <return-type type="void"/> + <parameters> + <parameter name="object" type="SoupAuth*"/> + <parameter name="p0" type="char*"/> + <parameter name="p1" type="char*"/> + </parameters> + </signal> <vfunc name="authenticate"> <return-type type="void"/> <parameters> @@ -1864,6 +1936,43 @@ <property name="auth-callback" type="gpointer" readable="1" writable="1" construct="0" construct-only="0"/> <property name="auth-data" type="gpointer" readable="1" writable="1" construct="0" construct-only="0"/> </object> + <object name="SoupContentDecoder" parent="GObject" type-name="SoupContentDecoder" get-type="soup_content_decoder_get_type"> + <implements> + <interface name="SoupSessionFeature"/> + </implements> + </object> + <object name="SoupContentSniffer" parent="GObject" type-name="SoupContentSniffer" get-type="soup_content_sniffer_get_type"> + <implements> + <interface name="SoupSessionFeature"/> + </implements> + <constructor name="new" symbol="soup_content_sniffer_new"> + <return-type type="SoupContentSniffer*"/> + </constructor> + <method name="sniff" symbol="soup_content_sniffer_sniff"> + <return-type type="char*"/> + <parameters> + <parameter name="sniffer" type="SoupContentSniffer*"/> + <parameter name="msg" type="SoupMessage*"/> + <parameter name="buffer" type="SoupBuffer*"/> + <parameter name="params" type="GHashTable**"/> + </parameters> + </method> + <vfunc name="get_buffer_size"> + <return-type type="gsize"/> + <parameters> + <parameter name="sniffer" type="SoupContentSniffer*"/> + </parameters> + </vfunc> + <vfunc name="sniff"> + <return-type type="char*"/> + <parameters> + <parameter name="sniffer" type="SoupContentSniffer*"/> + <parameter name="msg" type="SoupMessage*"/> + <parameter name="buffer" type="SoupBuffer*"/> + <parameter name="params" type="GHashTable**"/> + </parameters> + </vfunc> + </object> <object name="SoupCookieJar" parent="GObject" type-name="SoupCookieJar" get-type="soup_cookie_jar_get_type"> <implements> <interface name="SoupSessionFeature"/> @@ -1888,6 +1997,12 @@ <parameter name="cookie" type="SoupCookie*"/> </parameters> </method> + <method name="get_accept_policy" symbol="soup_cookie_jar_get_accept_policy"> + <return-type type="SoupCookieJarAcceptPolicy"/> + <parameters> + <parameter name="jar" type="SoupCookieJar*"/> + </parameters> + </method> <method name="get_cookies" symbol="soup_cookie_jar_get_cookies"> <return-type type="char*"/> <parameters> @@ -1905,6 +2020,13 @@ <parameter name="jar" type="SoupCookieJar*"/> </parameters> </method> + <method name="set_accept_policy" symbol="soup_cookie_jar_set_accept_policy"> + <return-type type="void"/> + <parameters> + <parameter name="jar" type="SoupCookieJar*"/> + <parameter name="policy" type="SoupCookieJarAcceptPolicy"/> + </parameters> + </method> <method name="set_cookie" symbol="soup_cookie_jar_set_cookie"> <return-type type="void"/> <parameters> @@ -1913,6 +2035,16 @@ <parameter name="cookie" type="char*"/> </parameters> </method> + <method name="set_cookie_with_first_party" symbol="soup_cookie_jar_set_cookie_with_first_party"> + <return-type type="void"/> + <parameters> + <parameter name="jar" type="SoupCookieJar*"/> + <parameter name="uri" type="SoupURI*"/> + <parameter name="first_party" type="SoupURI*"/> + <parameter name="cookie" type="char*"/> + </parameters> + </method> + <property name="accept-policy" type="SoupCookieJarAcceptPolicy" readable="1" writable="1" construct="0" construct-only="0"/> <property name="read-only" type="gboolean" readable="1" writable="1" construct="0" construct-only="1"/> <signal name="changed" when="FIRST"> <return-type type="void"/> @@ -2016,6 +2148,21 @@ <parameter name="user_data" type="gpointer"/> </parameters> </method> + <method name="content_sniffed" symbol="soup_message_content_sniffed"> + <return-type type="void"/> + <parameters> + <parameter name="msg" type="SoupMessage*"/> + <parameter name="content_type" type="char*"/> + <parameter name="params" type="GHashTable*"/> + </parameters> + </method> + <method name="disable_feature" symbol="soup_message_disable_feature"> + <return-type type="void"/> + <parameters> + <parameter name="msg" type="SoupMessage*"/> + <parameter name="feature_type" type="GType"/> + </parameters> + </method> <method name="finished" symbol="soup_message_finished"> <return-type type="void"/> <parameters> @@ -2028,6 +2175,12 @@ <parameter name="msg" type="SoupMessage*"/> </parameters> </method> + <method name="get_first_party" symbol="soup_message_get_first_party"> + <return-type type="SoupURI*"/> + <parameters> + <parameter name="msg" type="SoupMessage*"/> + </parameters> + </method> <method name="get_flags" symbol="soup_message_get_flags"> <return-type type="SoupMessageFlags"/> <parameters> @@ -2106,6 +2259,13 @@ <parameter name="destroy_notify" type="GDestroyNotify"/> </parameters> </method> + <method name="set_first_party" symbol="soup_message_set_first_party"> + <return-type type="void"/> + <parameters> + <parameter name="msg" type="SoupMessage*"/> + <parameter name="first_party" type="SoupURI*"/> + </parameters> + </method> <method name="set_flags" symbol="soup_message_set_flags"> <return-type type="void"/> <parameters> @@ -2193,6 +2353,7 @@ <parameter name="msg" type="SoupMessage*"/> </parameters> </method> + <property name="first-party" type="SoupURI*" readable="1" writable="1" construct="0" construct-only="0"/> <property name="flags" type="SoupMessageFlags" readable="1" writable="1" construct="0" construct-only="0"/> <property name="http-version" type="SoupHTTPVersion" readable="1" writable="1" construct="0" construct-only="0"/> <property name="method" type="char*" readable="1" writable="1" construct="0" construct-only="0"/> @@ -2200,6 +2361,14 @@ <property name="server-side" type="gboolean" readable="1" writable="1" construct="0" construct-only="1"/> <property name="status-code" type="guint" readable="1" writable="1" construct="0" construct-only="0"/> <property name="uri" type="SoupURI*" readable="1" writable="1" construct="0" construct-only="0"/> + <signal name="content-sniffed" when="FIRST"> + <return-type type="void"/> + <parameters> + <parameter name="object" type="SoupMessage*"/> + <parameter name="p0" type="char*"/> + <parameter name="p1" type="GHashTable*"/> + </parameters> + </signal> <signal name="finished" when="FIRST"> <return-type type="void"/> <parameters> @@ -2445,6 +2614,28 @@ <parameter name="session" type="SoupSession*"/> </parameters> </method> + <method name="get_feature" symbol="soup_session_get_feature"> + <return-type type="SoupSessionFeature*"/> + <parameters> + <parameter name="session" type="SoupSession*"/> + <parameter name="feature_type" type="GType"/> + </parameters> + </method> + <method name="get_feature_for_message" symbol="soup_session_get_feature_for_message"> + <return-type type="SoupSessionFeature*"/> + <parameters> + <parameter name="session" type="SoupSession*"/> + <parameter name="feature_type" type="GType"/> + <parameter name="msg" type="SoupMessage*"/> + </parameters> + </method> + <method name="get_features" symbol="soup_session_get_features"> + <return-type type="GSList*"/> + <parameters> + <parameter name="session" type="SoupSession*"/> + <parameter name="feature_type" type="GType"/> + </parameters> + </method> <method name="pause_message" symbol="soup_session_pause_message"> <return-type type="void"/> <parameters> @@ -2452,6 +2643,13 @@ <parameter name="msg" type="SoupMessage*"/> </parameters> </method> + <method name="prepare_for_uri" symbol="soup_session_prepare_for_uri"> + <return-type type="void"/> + <parameters> + <parameter name="session" type="SoupSession*"/> + <parameter name="uri" type="SoupURI*"/> + </parameters> + </method> <method name="queue_message" symbol="soup_session_queue_message"> <return-type type="void"/> <parameters> @@ -2496,6 +2694,8 @@ <parameter name="msg" type="SoupMessage*"/> </parameters> </method> + <property name="accept-language" type="char*" readable="1" writable="1" construct="0" construct-only="0"/> + <property name="accept-language-auto" type="gboolean" readable="1" writable="1" construct="0" construct-only="0"/> <property name="add-feature" type="SoupSessionFeature*" readable="1" writable="1" construct="0" construct-only="0"/> <property name="add-feature-by-type" type="GType" readable="1" writable="1" construct="0" construct-only="0"/> <property name="async-context" type="gpointer" readable="1" writable="1" construct="0" construct-only="1"/> @@ -2505,6 +2705,7 @@ <property name="proxy-uri" type="SoupURI*" readable="1" writable="1" construct="0" construct-only="0"/> <property name="remove-feature-by-type" type="GType" readable="1" writable="1" construct="0" construct-only="0"/> <property name="ssl-ca-file" type="char*" readable="1" writable="1" construct="0" construct-only="0"/> + <property name="ssl-strict" type="gboolean" readable="1" writable="1" construct="0" construct-only="0"/> <property name="timeout" type="guint" readable="1" writable="1" construct="0" construct-only="0"/> <property name="use-ntlm" type="gboolean" readable="1" writable="1" construct="0" construct-only="0"/> <property name="user-agent" type="char*" readable="1" writable="1" construct="0" construct-only="0"/> @@ -2517,6 +2718,13 @@ <parameter name="retrying" type="gboolean"/> </parameters> </signal> + <signal name="connection-created" when="FIRST"> + <return-type type="void"/> + <parameters> + <parameter name="object" type="SoupSession*"/> + <parameter name="p0" type="GObject*"/> + </parameters> + </signal> <signal name="request-queued" when="FIRST"> <return-type type="void"/> <parameters> @@ -2539,6 +2747,22 @@ <parameter name="p0" type="SoupMessage*"/> </parameters> </signal> + <signal name="tunneling" when="FIRST"> + <return-type type="void"/> + <parameters> + <parameter name="object" type="SoupSession*"/> + <parameter name="p0" type="GObject*"/> + </parameters> + </signal> + <vfunc name="auth_required"> + <return-type type="void"/> + <parameters> + <parameter name="session" type="SoupSession*"/> + <parameter name="msg" type="SoupMessage*"/> + <parameter name="auth" type="SoupAuth*"/> + <parameter name="retrying" type="gboolean"/> + </parameters> + </vfunc> <vfunc name="cancel_message"> <return-type type="void"/> <parameters> @@ -2616,6 +2840,12 @@ <parameter name="sock" type="SoupSocket*"/> </parameters> </method> + <method name="get_fd" symbol="soup_socket_get_fd"> + <return-type type="int"/> + <parameters> + <parameter name="sock" type="SoupSocket*"/> + </parameters> + </method> <method name="get_local_address" symbol="soup_socket_get_local_address"> <return-type type="SoupAddress*"/> <parameters> @@ -2709,7 +2939,9 @@ <property name="non-blocking" type="gboolean" readable="1" writable="1" construct="0" construct-only="0"/> <property name="remote-address" type="SoupAddress*" readable="1" writable="1" construct="0" construct-only="1"/> <property name="ssl-creds" type="gpointer" readable="1" writable="1" construct="0" construct-only="0"/> + <property name="ssl-strict" type="gboolean" readable="1" writable="1" construct="0" construct-only="1"/> <property name="timeout" type="guint" readable="1" writable="1" construct="0" construct-only="0"/> + <property name="trusted-certificate" type="gboolean" readable="1" writable="1" construct="0" construct-only="1"/> <signal name="disconnected" when="LAST"> <return-type type="void"/> <parameters> @@ -2782,6 +3014,51 @@ </parameters> </vfunc> </interface> + <interface name="SoupProxyURIResolver" type-name="SoupProxyURIResolver" get-type="soup_proxy_uri_resolver_get_type"> + <requires> + <interface name="GObject"/> + </requires> + <method name="get_proxy_uri_async" symbol="soup_proxy_uri_resolver_get_proxy_uri_async"> + <return-type type="void"/> + <parameters> + <parameter name="proxy_uri_resolver" type="SoupProxyURIResolver*"/> + <parameter name="uri" type="SoupURI*"/> + <parameter name="async_context" type="GMainContext*"/> + <parameter name="cancellable" type="GCancellable*"/> + <parameter name="callback" type="SoupProxyURIResolverCallback"/> + <parameter name="user_data" type="gpointer"/> + </parameters> + </method> + <method name="get_proxy_uri_sync" symbol="soup_proxy_uri_resolver_get_proxy_uri_sync"> + <return-type type="guint"/> + <parameters> + <parameter name="proxy_uri_resolver" type="SoupProxyURIResolver*"/> + <parameter name="uri" type="SoupURI*"/> + <parameter name="cancellable" type="GCancellable*"/> + <parameter name="proxy_uri" type="SoupURI**"/> + </parameters> + </method> + <vfunc name="get_proxy_uri_async"> + <return-type type="void"/> + <parameters> + <parameter name="p1" type="SoupProxyURIResolver*"/> + <parameter name="p2" type="SoupURI*"/> + <parameter name="p3" type="GMainContext*"/> + <parameter name="p4" type="GCancellable*"/> + <parameter name="p5" type="SoupProxyURIResolverCallback"/> + <parameter name="p6" type="gpointer"/> + </parameters> + </vfunc> + <vfunc name="get_proxy_uri_sync"> + <return-type type="guint"/> + <parameters> + <parameter name="p1" type="SoupProxyURIResolver*"/> + <parameter name="p2" type="SoupURI*"/> + <parameter name="p3" type="GCancellable*"/> + <parameter name="p4" type="SoupURI**"/> + </parameters> + </vfunc> + </interface> <interface name="SoupSessionFeature" type-name="SoupSessionFeature" get-type="soup_session_feature_get_type"> <requires> <interface name="GObject"/> @@ -2840,7 +3117,6 @@ </parameters> </vfunc> </interface> - <constant name="AF_INET6" type="int" value="-1"/> <constant name="SOUP_ADDRESS_ANY_PORT" type="int" value="0"/> <constant name="SOUP_ADDRESS_FAMILY" type="char*" value="family"/> <constant name="SOUP_ADDRESS_NAME" type="char*" value="name"/> @@ -2868,7 +3144,10 @@ <constant name="SOUP_AUTH_IS_FOR_PROXY" type="char*" value="is-for-proxy"/> <constant name="SOUP_AUTH_REALM" type="char*" value="realm"/> <constant name="SOUP_AUTH_SCHEME_NAME" type="char*" value="scheme-name"/> + <constant name="SOUP_CONTENT_DECODER_H" type="int" value="1"/> + <constant name="SOUP_CONTENT_SNIFFER_H" type="int" value="1"/> <constant name="SOUP_COOKIE_H" type="int" value="1"/> + <constant name="SOUP_COOKIE_JAR_ACCEPT_POLICY" type="char*" value="accept-policy"/> <constant name="SOUP_COOKIE_JAR_H" type="int" value="1"/> <constant name="SOUP_COOKIE_JAR_READ_ONLY" type="char*" value="read-only"/> <constant name="SOUP_COOKIE_JAR_TEXT_FILENAME" type="char*" value="filename"/> @@ -2885,6 +3164,7 @@ <constant name="SOUP_HEADERS_H" type="int" value="1"/> <constant name="SOUP_LOGGER_H" type="int" value="1"/> <constant name="SOUP_MESSAGE_BODY_H" type="int" value="1"/> + <constant name="SOUP_MESSAGE_FIRST_PARTY" type="char*" value="first-party"/> <constant name="SOUP_MESSAGE_FLAGS" type="char*" value="flags"/> <constant name="SOUP_MESSAGE_H" type="int" value="1"/> <constant name="SOUP_MESSAGE_HEADERS_H" type="int" value="1"/> @@ -2897,7 +3177,9 @@ <constant name="SOUP_METHOD_H" type="int" value="1"/> <constant name="SOUP_MISC_H" type="int" value="1"/> <constant name="SOUP_MULTIPART_H" type="int" value="1"/> + <constant name="SOUP_PASSWORD_MANAGER_H" type="int" value="1"/> <constant name="SOUP_PROXY_RESOLVER_H" type="int" value="1"/> + <constant name="SOUP_PROXY_URI_RESOLVER_H" type="int" value="1"/> <constant name="SOUP_SERVER_ASYNC_CONTEXT" type="char*" value="async-context"/> <constant name="SOUP_SERVER_H" type="int" value="1"/> <constant name="SOUP_SERVER_INTERFACE" type="char*" value="interface"/> @@ -2906,6 +3188,8 @@ <constant name="SOUP_SERVER_SERVER_HEADER" type="char*" value="server-header"/> <constant name="SOUP_SERVER_SSL_CERT_FILE" type="char*" value="ssl-cert-file"/> <constant name="SOUP_SERVER_SSL_KEY_FILE" type="char*" value="ssl-key-file"/> + <constant name="SOUP_SESSION_ACCEPT_LANGUAGE" type="char*" value="accept-language"/> + <constant name="SOUP_SESSION_ACCEPT_LANGUAGE_AUTO" type="char*" value="accept-language-auto"/> <constant name="SOUP_SESSION_ADD_FEATURE" type="char*" value="add-feature"/> <constant name="SOUP_SESSION_ADD_FEATURE_BY_TYPE" type="char*" value="add-feature-by-type"/> <constant name="SOUP_SESSION_ASYNC_CONTEXT" type="char*" value="async-context"/> @@ -2918,6 +3202,7 @@ <constant name="SOUP_SESSION_PROXY_URI" type="char*" value="proxy-uri"/> <constant name="SOUP_SESSION_REMOVE_FEATURE_BY_TYPE" type="char*" value="remove-feature-by-type"/> <constant name="SOUP_SESSION_SSL_CA_FILE" type="char*" value="ssl-ca-file"/> + <constant name="SOUP_SESSION_SSL_STRICT" type="char*" value="ssl-strict"/> <constant name="SOUP_SESSION_SYNC_H" type="int" value="1"/> <constant name="SOUP_SESSION_TIMEOUT" type="char*" value="timeout"/> <constant name="SOUP_SESSION_USER_AGENT" type="char*" value="user-agent"/> @@ -2929,7 +3214,9 @@ <constant name="SOUP_SOCKET_LOCAL_ADDRESS" type="char*" value="local-address"/> <constant name="SOUP_SOCKET_REMOTE_ADDRESS" type="char*" value="remote-address"/> <constant name="SOUP_SOCKET_SSL_CREDENTIALS" type="char*" value="ssl-creds"/> + <constant name="SOUP_SOCKET_SSL_STRICT" type="char*" value="ssl-strict"/> <constant name="SOUP_SOCKET_TIMEOUT" type="char*" value="timeout"/> + <constant name="SOUP_SOCKET_TRUSTED_CERTIFICATE" type="char*" value="trusted-certificate"/> <constant name="SOUP_STATUS_H" type="int" value="1"/> <constant name="SOUP_TYPES_H" type="int" value="1"/> <constant name="SOUP_URI_H" type="int" value="1"/> diff --git a/vapi/packages/libsoup-2.4/libsoup-2.4.metadata b/vapi/packages/libsoup-2.4/libsoup-2.4.metadata index 4ae8b06798..5b0e2ada95 100644 --- a/vapi/packages/libsoup-2.4/libsoup-2.4.metadata +++ b/vapi/packages/libsoup-2.4/libsoup-2.4.metadata @@ -18,6 +18,8 @@ soup_auth_domain_digest_set_auth_callback.callback transfer_ownership="1" soup_auth_domain_digest_set_auth_callback.dnotify hidden="1" SoupBuffer ref_function="soup_buffer_copy" unref_function="soup_buffer_free" soup_buffer_copy transfer_ownership="1" +soup_content_sniffer_sniff transfer_ownership="1" +soup_content_sniffer_sniff.params is_out="1" transfer_ownership="1" nullable="1" type_arguments="string,string" soup_cookie_copy transfer_ownership="1" soup_cookies_free hidden="1" soup_cookies_to_cookie_header.cookies type_arguments="Cookie" @@ -47,6 +49,7 @@ soup_logger_set_request_filter.destroy hidden="1" soup_logger_set_response_filter.response_filter transfer_ownership="1" soup_logger_set_response_filter.filter_data hidden="1" soup_logger_set_response_filter.destroy hidden="1" +SoupMessage::content_sniffed has_emitter="1" SoupMessage::finished has_emitter="1" SoupMessage::got_body has_emitter="1" SoupMessage::got_chunk has_emitter="1" @@ -63,7 +66,7 @@ soup_cookies_from_response type_arguments="Cookie" transfer_ownership="1" soup_header_parse_semi_param_list type_arguments="string,string" transfer_ownership="1" soup_message_headers_get_content_disposition.disposition transfer_ownership="1" soup_message_headers_get_content_disposition.params is_out="1" transfer_ownership="1" nullable="1" -soup_message_headers_get_content_type.params is_out="1" transfer_ownership="1" nullable="1" +soup_message_headers_get_content_type.params is_out="1" transfer_ownership="1" nullable="1" type_arguments="string,string" soup_message_set_chunk_allocator.allocator transfer_ownership="1" soup_message_set_chunk_allocator.destroy_notify hidden="1" SoupMessageBody.data type_name="uint8" is_array="1" @@ -76,6 +79,7 @@ SoupServerCallback.query type_arguments="string,string" SoupSession::add_feature has_emitter="1" SoupSession::add_feature_by_type has_emitter="1" SoupSession::remove_feature_by_type has_emitter="1" +soup_session_get_features transfer_ownership="1" type_arguments="unowned SessionFeature" soup_session_queue_message.msg transfer_ownership="1" soup_session_queue_message.callback nullable="1" soup_session_async_new_with_options ellipsis="1"
d7bf95159df37a3d338ca267dddd3d26b38ec37c
casidiablo$persistence
Now it is possible to specify the sqlite open helper, which is useful when performing custom upgrades
p
https://github.com/casidiablo/persistence
diff --git a/pom.xml b/pom.xml index 394263b..60097c9 100644 --- a/pom.xml +++ b/pom.xml @@ -22,7 +22,7 @@ <groupId>com.codeslap</groupId> <artifactId>persistence</artifactId> - <version>0.9.9.1</version> + <version>0.9.9.2</version> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> diff --git a/src/main/java/com/codeslap/persistence/DbOpenHelper.java b/src/main/java/com/codeslap/persistence/DbOpenHelper.java new file mode 100644 index 0000000..f1ad177 --- /dev/null +++ b/src/main/java/com/codeslap/persistence/DbOpenHelper.java @@ -0,0 +1,52 @@ +/* + * Copyright 2012 CodeSlap + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.codeslap.persistence; + +import android.content.Context; +import android.database.sqlite.SQLiteDatabase; +import android.database.sqlite.SQLiteOpenHelper; + +/** + * This class will allow you to customize the database creation and, more important, + * the upgrades you may want to perform. + * + * @author cristian + */ +public abstract class DbOpenHelper extends SQLiteOpenHelper { + private final String mName; + private final int mVersion; + + public DbOpenHelper(Context context, String name, int version) { + super(context, name, null, version); + mName = name; + mVersion = version; + } + + @Override + public void onCreate(SQLiteDatabase sqLiteDatabase) { + } + + public abstract void onUpgrade(SQLiteDatabase sqLiteDatabase, int oldVersion, int newVersion); + + public String getName() { + return mName; + } + + public int getVersion() { + return mVersion; + } +} diff --git a/src/main/java/com/codeslap/persistence/PersistenceConfig.java b/src/main/java/com/codeslap/persistence/PersistenceConfig.java index 8e8d954..a793311 100644 --- a/src/main/java/com/codeslap/persistence/PersistenceConfig.java +++ b/src/main/java/com/codeslap/persistence/PersistenceConfig.java @@ -26,6 +26,14 @@ public class PersistenceConfig { static String sFirstDatabase; public static SqlPersistence getDatabase(String name, int version) { + return getDatabase(name, version, null); + } + + public static SqlPersistence getDatabase(DbOpenHelper openHelper) { + return getDatabase(openHelper.getName(), openHelper.getVersion(), openHelper); + } + + private static SqlPersistence getDatabase(String name, int version, DbOpenHelper openHelper) { if (name == null) { throw new IllegalArgumentException("You must provide a valid database name"); } @@ -35,7 +43,7 @@ public static SqlPersistence getDatabase(String name, int version) { if (SQL.containsKey(name)) { return SQL.get(name); } - SqlPersistence sqlPersistence = new SqlPersistence(name, version); + SqlPersistence sqlPersistence = new SqlPersistence(name, version, openHelper); SQL.put(name, sqlPersistence); return sqlPersistence; } diff --git a/src/main/java/com/codeslap/persistence/SqlPersistence.java b/src/main/java/com/codeslap/persistence/SqlPersistence.java index ca59b19..640c60b 100644 --- a/src/main/java/com/codeslap/persistence/SqlPersistence.java +++ b/src/main/java/com/codeslap/persistence/SqlPersistence.java @@ -31,10 +31,12 @@ public class SqlPersistence { private final String mName; private final int mVersion; + private final DbOpenHelper mOpenHelper; - public SqlPersistence(String name, int version) { + public SqlPersistence(String name, int version, DbOpenHelper openHelper) { mName = name; mVersion = version; + mOpenHelper = openHelper; } public String getName() { @@ -45,6 +47,10 @@ public int getVersion() { return mVersion; } + public DbOpenHelper getOpenHelper() { + return mOpenHelper; + } + /** * Register one or more classes to be added to the Sqlite model. All classes should have an ID which will be treated * as autoincrement if possible. If your class has a field called <code>id</code> then it will be automatically diff --git a/src/main/java/com/codeslap/persistence/SqliteAdapterImpl.java b/src/main/java/com/codeslap/persistence/SqliteAdapterImpl.java index 2012746..e674d51 100644 --- a/src/main/java/com/codeslap/persistence/SqliteAdapterImpl.java +++ b/src/main/java/com/codeslap/persistence/SqliteAdapterImpl.java @@ -47,7 +47,7 @@ class SqliteAdapterImpl implements SqlAdapter { SqliteAdapterImpl(Context context, String name) { mPersistence = PersistenceConfig.getDatabase(name); - SqliteDb helper = SqliteDb.getInstance(context, mPersistence.getName(), mPersistence.getVersion()); + SqliteDb helper = SqliteDb.getInstance(context, mPersistence); mDb = helper.getDatabase(); mInsertHelperMap = new HashMap<String, DatabaseUtils.InsertHelper>(); } diff --git a/src/main/java/com/codeslap/persistence/SqliteDb.java b/src/main/java/com/codeslap/persistence/SqliteDb.java index 4011b8b..6b3d106 100644 --- a/src/main/java/com/codeslap/persistence/SqliteDb.java +++ b/src/main/java/com/codeslap/persistence/SqliteDb.java @@ -18,7 +18,6 @@ import android.content.Context; import android.database.sqlite.SQLiteDatabase; -import android.database.sqlite.SQLiteOpenHelper; import java.util.HashMap; import java.util.List; @@ -30,29 +29,50 @@ * created yet, or update it in case the version number changes. */ class SqliteDb { - private static final String TAG = SqliteDb.class.getSimpleName(); - + private static final Map<String, SqliteDb> instances = new HashMap<String, SqliteDb>(); private SQLiteDatabase mSqLiteDatabase; - private final Helper mDbHelper; + private final DbOpenHelper mDbHelper; + + private SqliteDb(Context context, SqlPersistence sqlPersistence) { + String name = sqlPersistence.getName(); + if (sqlPersistence.getOpenHelper() == null) { + mDbHelper = new Helper(context, name, sqlPersistence.getVersion()); + } else { + mDbHelper = sqlPersistence.getOpenHelper(); + } + mSqLiteDatabase = mDbHelper.getWritableDatabase(); + PersistenceLogManager.d(TAG, String.format("Opening \"%s\" database... Open: %s", name, mSqLiteDatabase.isOpen())); + } - private static class Helper extends SQLiteOpenHelper { + static SqliteDb getInstance(Context context, SqlPersistence sqlPersistence) { + String key = sqlPersistence.getName() + sqlPersistence.getVersion(); + if (!instances.containsKey(key)) { + instances.put(key, new SqliteDb(context, sqlPersistence)); + } + return instances.get(key); + } - private final String mName; + public SQLiteDatabase getDatabase() { + if (mSqLiteDatabase.isOpen()) { + return mSqLiteDatabase; + } + return mSqLiteDatabase = mDbHelper.getWritableDatabase(); + } + private static class Helper extends DbOpenHelper { public Helper(Context context, String name, int version) { - super(context, name, null, version); - mName = name; + super(context, name, version); } @Override public void onCreate(SQLiteDatabase db) { // create all tables for registered classes - SqlPersistence sqlPersistence = PersistenceConfig.getDatabase(mName); + SqlPersistence sqlPersistence = PersistenceConfig.getDatabase(getName()); List<Class<?>> objects = sqlPersistence.getSqliteClasses(); for (Class<?> clazz : objects) { - db.execSQL(SQLHelper.getCreateTableSentence(mName, clazz)); + db.execSQL(SQLHelper.getCreateTableSentence(getName(), clazz)); } // create all extra table for many to many relations List<ManyToMany> sqliteManyToMany = sqlPersistence.getSqliteManyToMany(); @@ -67,27 +87,4 @@ public void onUpgrade(SQLiteDatabase sqLiteDatabase, int oldVersion, int newVers } } - - private static final Map<String, SqliteDb> instances = new HashMap<String, SqliteDb>(); - - private SqliteDb(Context context, String name, int version) { - mDbHelper = new Helper(context, name, version); - mSqLiteDatabase = mDbHelper.getWritableDatabase(); - PersistenceLogManager.d(TAG, String.format("Opening \"%s\" database... Open: %s", name, mSqLiteDatabase.isOpen())); - } - - static SqliteDb getInstance(Context context, String name, int version) { - String key = name + version; - if (!instances.containsKey(key)) { - instances.put(key, new SqliteDb(context, name, version)); - } - return instances.get(key); - } - - public SQLiteDatabase getDatabase() { - if (mSqLiteDatabase.isOpen()) { - return mSqLiteDatabase; - } - return mSqLiteDatabase = mDbHelper.getWritableDatabase(); - } }
19dea8c85287e1462e4719e5710ed1951d3cde6a
orientdb
Fixed issue about parenthesis in SQL query
c
https://github.com/orientechnologies/orientdb
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilter.java b/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilter.java index e2871394021..bcf8747740d 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilter.java +++ b/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilter.java @@ -174,6 +174,10 @@ protected OSQLFilterCondition extractCondition() { } private OQueryOperator extractConditionOperator() { + if (currentPos >= text.length()) + // END OF PARSING: JUST RETURN + return null; + String word; word = nextWord(true, " 0123456789'\""); diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilterCondition.java b/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilterCondition.java index e0d29a4c0bf..fc2bfecad95 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilterCondition.java +++ b/core/src/main/java/com/orientechnologies/orient/core/sql/filter/OSQLFilterCondition.java @@ -55,6 +55,10 @@ public Object evaluate(final ORecordSchemaAware<?> iRecord) { r = convertedValues[1]; } + if (operator == null) + // UNITARY OPERATOR: JUST RETURN LEFT RESULT + return l; + return operator.evaluateRecord(iRecord, this, l, r); } @@ -137,11 +141,13 @@ public String toString() { buffer.append('('); buffer.append(left); - buffer.append(' '); - buffer.append(operator); - buffer.append(' '); - buffer.append(right); - buffer.append(')'); + if (operator != null) { + buffer.append(' '); + buffer.append(operator); + buffer.append(' '); + buffer.append(right); + buffer.append(')'); + } return buffer.toString(); } diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectTest.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectTest.java index 45b1c3d64ed..b54a80be89d 100644 --- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectTest.java +++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/SQLSelectTest.java @@ -54,6 +54,24 @@ public void queryNoWhere() { database.close(); } + + @Test + public void queryParentesisAsRight() { + database.open("admin", "admin"); + + List<ODocument> result = database.command( + new OSQLSynchQuery<ODocument>( + " select from Profile where name = 'Giuseppe' and ( name <> 'Napoleone' and nick is not null ) ")) + .execute(); + + Assert.assertTrue(result.size() != 0); + + for (ODocument d : result) { + Assert.assertEquals(d.getRecordType(), ODocument.RECORD_TYPE); + } + + database.close(); + } @Test public void queryTwoParentesisConditions() {
0b5aa15cb099f065e984b45b890351a7fe6d51f8
Vala
glib-2.0: GLib.static_assert returns void
c
https://github.com/GNOME/vala/
diff --git a/vapi/glib-2.0.vapi b/vapi/glib-2.0.vapi index f1f215780b..6b6ef38283 100644 --- a/vapi/glib-2.0.vapi +++ b/vapi/glib-2.0.vapi @@ -3820,5 +3820,5 @@ namespace GLib { [CCode (cname = "G_UNLIKELY", cheader_filename = "glib.h")] public static bool unlikely (bool expression); [CCode (cname = "G_STATIC_ASSERT", cheader_filename = "glib.h")] - public static bool static_assert (bool expression); + public static void static_assert (bool expression); }
f537b8cceefa694e895dfaa43722ce6b4283948a
elasticsearch
Change default operator to "or" for- "low_freq_operator" and "high_freq_operator" parameters for "common" queries--Closes -3178-
c
https://github.com/elastic/elasticsearch
diff --git a/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java b/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java index 94334f3652278..621f39aaf9ccf 100644 --- a/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java @@ -51,9 +51,9 @@ public class CommonTermsQueryParser implements QueryParser { static final float DEFAULT_MAX_TERM_DOC_FREQ = 0.01f; - static final Occur DEFAULT_HIGH_FREQ_OCCUR = Occur.MUST; + static final Occur DEFAULT_HIGH_FREQ_OCCUR = Occur.SHOULD; - static final Occur DEFAULT_LOW_FREQ_OCCUR = Occur.MUST; + static final Occur DEFAULT_LOW_FREQ_OCCUR = Occur.SHOULD; static final boolean DEFAULT_DISABLE_COORDS = true; @@ -81,7 +81,7 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars String minimumShouldMatch = null; boolean disableCoords = DEFAULT_DISABLE_COORDS; Occur highFreqOccur = DEFAULT_HIGH_FREQ_OCCUR; - Occur lowFreqOccur = DEFAULT_HIGH_FREQ_OCCUR; + Occur lowFreqOccur = DEFAULT_LOW_FREQ_OCCUR; float maxTermFrequency = DEFAULT_MAX_TERM_DOC_FREQ; token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { @@ -202,4 +202,4 @@ private final Query parseQueryString(ExtendedCommonTermsQuery query, String quer query.setMinimumNumberShouldMatch(minimumShouldMatch); return wrapSmartNameQuery(query, smartNameFieldMappers, parseContext); } -} \ No newline at end of file +} diff --git a/src/test/java/org/elasticsearch/test/integration/search/query/SimpleQueryTests.java b/src/test/java/org/elasticsearch/test/integration/search/query/SimpleQueryTests.java index 13eec4906c733..0f306cbfe7aff 100644 --- a/src/test/java/org/elasticsearch/test/integration/search/query/SimpleQueryTests.java +++ b/src/test/java/org/elasticsearch/test/integration/search/query/SimpleQueryTests.java @@ -127,13 +127,19 @@ public void testCommonTermsQuery() throws Exception { client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree").execute().actionGet(); client().prepareIndex("test", "type1", "3").setSource("field1", "quick lazy huge brown", "field2", "the quick lazy huge brown fox jumps over the tree").setRefresh(true).execute().actionGet(); - SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.commonTerms("field1", "the quick brown").cutoffFrequency(3)).execute().actionGet(); - assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.commonTerms("field1", "the quick brown").cutoffFrequency(3).lowFreqOperator(Operator.OR)).execute().actionGet(); + assertThat(searchResponse.getHits().totalHits(), equalTo(3l)); assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3")); + searchResponse = client().prepareSearch().setQuery(QueryBuilders.commonTerms("field1", "the quick brown").cutoffFrequency(3).lowFreqOperator(Operator.AND)).execute().actionGet(); + assertThat(searchResponse.getHits().totalHits(), equalTo(2l)); + assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2")); - searchResponse = client().prepareSearch().setQuery(QueryBuilders.commonTerms("field1", "the quick brown").cutoffFrequency(3).lowFreqOperator(Operator.OR)).execute().actionGet(); + // Default + searchResponse = client().prepareSearch().setQuery(QueryBuilders.commonTerms("field1", "the quick brown").cutoffFrequency(3)).execute().actionGet(); assertThat(searchResponse.getHits().totalHits(), equalTo(3l)); assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("2"));
9a320a9a6d7b97c4205871ce80d8e4c6d1a377db
Vala
glib-2.0: revert previous patch against MarkupParser callbacks. The new version sacrificed usability for theoretical correctness, and was not well received.
a
https://github.com/GNOME/vala/
diff --git a/vapi/glib-2.0.vapi b/vapi/glib-2.0.vapi index d170aff739..7083cbc527 100644 --- a/vapi/glib-2.0.vapi +++ b/vapi/glib-2.0.vapi @@ -2830,7 +2830,8 @@ namespace GLib { public string[] fetch_all (); } - /* Simple XML Subset Parser */ + /* Simple XML Subset Parser + See http://live.gnome.org/Vala/MarkupSample for an example */ public errordomain MarkupError { BAD_UTF8, @@ -2860,20 +2861,15 @@ namespace GLib { public void* pop (); } - [CCode (cname = "GCallback")] - public static delegate void MarkupParserStartElementFunc (MarkupParseContext context, string element_name, [CCode (array_length = false, array_null_terminated = true)] string[] attribute_names, [CCode (array_length = false, array_null_terminated = true)] string[] attribute_values, void* user_data) throws MarkupError; + public delegate void MarkupParserStartElementFunc (MarkupParseContext context, string element_name, [CCode (array_length = false, array_null_terminated = true)] string[] attribute_names, [CCode (array_length = false, array_null_terminated = true)] string[] attribute_values) throws MarkupError; - [CCode (cname = "GCallback")] - public static delegate void MarkupParserEndElementFunc (MarkupParseContext context, string element_name, void* user_data) throws MarkupError; + public delegate void MarkupParserEndElementFunc (MarkupParseContext context, string element_name) throws MarkupError; - [CCode (cname = "GCallback")] - public static delegate void MarkupParserTextFunc (MarkupParseContext context, string text, size_t text_len, void* user_data) throws MarkupError; + public delegate void MarkupParserTextFunc (MarkupParseContext context, string text, size_t text_len) throws MarkupError; - [CCode (cname = "GCallback")] - public static delegate void MarkupParserPassthroughFunc (MarkupParseContext context, string passthrough_text, size_t text_len, void* user_data) throws MarkupError; + public delegate void MarkupParserPassthroughFunc (MarkupParseContext context, string passthrough_text, size_t text_len) throws MarkupError; - [CCode (cname = "GCallback")] - public static delegate void MarkupParserErrorFunc (MarkupParseContext context, Error error, void* user_data); + public delegate void MarkupParserErrorFunc (MarkupParseContext context, Error error); public struct MarkupParser { public unowned MarkupParserStartElementFunc start_element;
56031cb270834dea0d6e013641c582a3a56c33ff
kotlin
Control-Flow Analysis: Fix bug in finally-block- repetition in the presence of non-local returns -EA-65982 Fixed--
c
https://github.com/JetBrains/kotlin
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/cfg/pseudocode/PseudocodeImpl.java b/compiler/frontend/src/org/jetbrains/kotlin/cfg/pseudocode/PseudocodeImpl.java index 914604348544a..d9d30cb1ad74f 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/cfg/pseudocode/PseudocodeImpl.java +++ b/compiler/frontend/src/org/jetbrains/kotlin/cfg/pseudocode/PseudocodeImpl.java @@ -86,6 +86,10 @@ public Instruction resolveToInstruction() { public PseudocodeLabel copy(int newLabelIndex) { return new PseudocodeLabel("L" + newLabelIndex, "copy of " + name + ", " + comment); } + + public PseudocodeImpl getPseudocode() { + return PseudocodeImpl.this; + } } private final List<Instruction> mutableInstructionList = new ArrayList<Instruction>(); @@ -455,6 +459,8 @@ private Instruction getNextPosition(int currentPosition) { } public int repeatPart(@NotNull Label startLabel, @NotNull Label finishLabel, int labelCount) { + PseudocodeImpl originalPseudocode = ((PseudocodeLabel) startLabel).getPseudocode(); + Integer startIndex = ((PseudocodeLabel) startLabel).getTargetInstructionIndex(); assert startIndex != null; Integer finishIndex = ((PseudocodeLabel) finishLabel).getTargetInstructionIndex(); @@ -462,7 +468,7 @@ public int repeatPart(@NotNull Label startLabel, @NotNull Label finishLabel, int Map<Label, Label> originalToCopy = Maps.newLinkedHashMap(); Multimap<Instruction, Label> originalLabelsForInstruction = HashMultimap.create(); - for (PseudocodeLabel label : labels) { + for (PseudocodeLabel label : originalPseudocode.labels) { Integer index = label.getTargetInstructionIndex(); if (index == null) continue; //label is not bounded yet if (label == startLabel || label == finishLabel) continue; @@ -476,11 +482,13 @@ public int repeatPart(@NotNull Label startLabel, @NotNull Label finishLabel, int labels.add((PseudocodeLabel) label); } for (int index = startIndex; index < finishIndex; index++) { - Instruction originalInstruction = mutableInstructionList.get(index); + Instruction originalInstruction = originalPseudocode.mutableInstructionList.get(index); repeatLabelsBindingForInstruction(originalInstruction, originalToCopy, originalLabelsForInstruction); addInstruction(copyInstruction(originalInstruction, originalToCopy)); } - repeatLabelsBindingForInstruction(mutableInstructionList.get(finishIndex), originalToCopy, originalLabelsForInstruction); + repeatLabelsBindingForInstruction(originalPseudocode.mutableInstructionList.get(finishIndex), + originalToCopy, + originalLabelsForInstruction); return labelCount; } diff --git a/compiler/testData/cfg/controlStructures/localAndNonlocalReturnsWithFinally.instructions b/compiler/testData/cfg/controlStructures/localAndNonlocalReturnsWithFinally.instructions new file mode 100644 index 0000000000000..b7155a8026035 --- /dev/null +++ b/compiler/testData/cfg/controlStructures/localAndNonlocalReturnsWithFinally.instructions @@ -0,0 +1,195 @@ +== let == +fun <T: Any, U> T.let(f: (T) -> U): U = f(this) +--------------------- +L0: + 1 <START> + v(f: (T) -> U) + magic[FAKE_INITIALIZER](f: (T) -> U) -> <v0> + w(f|<v0>) + r(f) -> <v1> + r(this) -> <v2> + mark(f(this)) + call(f(this), invoke|<v1>, <v2>) -> <v3> + ret(*|<v3>) L1 +L1: + <END> NEXT:[<SINK>] +error: + <ERROR> PREV:[] +sink: + <SINK> PREV:[<ERROR>, <END>] +===================== +== bar == +fun bar(): Int = 1 +--------------------- +L0: + 1 <START> + r(1) -> <v0> + ret(*|<v0>) L1 +L1: + <END> NEXT:[<SINK>] +error: + <ERROR> PREV:[] +sink: + <SINK> PREV:[<ERROR>, <END>] +===================== +== foo == +fun foo(n: Int): Int { + try { + if (n < 0) return 0 + n.let { return it } + } + finally { + for (i in 1..2) { } + return bar() + } +} +--------------------- +L0: + 1 <START> + v(n: Int) + magic[FAKE_INITIALIZER](n: Int) -> <v0> + w(n|<v0>) + 2 mark({ try { if (n < 0) return 0 n.let { return it } } finally { for (i in 1..2) { } return bar() } }) + mark(try { if (n < 0) return 0 n.let { return it } } finally { for (i in 1..2) { } return bar() }) + jmp?(L2) NEXT:[mark({ for (i in 1..2) { } return bar() }), mark({ if (n < 0) return 0 n.let { return it } })] + 3 mark({ if (n < 0) return 0 n.let { return it } }) + mark(if (n < 0) return 0) + r(n) -> <v1> PREV:[mark(if (n < 0) return 0), jmp(L15)] + r(0) -> <v2> + mark(n < 0) + call(n < 0, compareTo|<v1>, <v2>) -> <v3> + jf(L3|<v3>) NEXT:[read (Unit), r(0) -> <v4>] + r(0) -> <v4> +L4 [start finally]: + 4 mark({ for (i in 1..2) { } return bar() }) + 5 r(1) -> <v5> PREV:[mark({ for (i in 1..2) { } return bar() }), jmp?(L16)] + r(2) -> <v6> + mark(1..2) + call(1..2, rangeTo|<v5>, <v6>) -> <v7> + v(i) +L5 [loop entry point]: +L9 [condition entry point]: + jmp?(L6) NEXT:[read (Unit), magic[LOOP_RANGE_ITERATION](1..2|<v7>) -> <v8>] PREV:[v(i), jmp(L5)] + magic[LOOP_RANGE_ITERATION](1..2|<v7>) -> <v8> + w(i|<v8>) + mark(for (i in 1..2) { }) +L7 [body entry point]: + 6 mark({ }) + read (Unit) + 5 jmp(L5) NEXT:[jmp?(L6)] +L6 [loop exit point]: +L8 [body exit point]: + read (Unit) PREV:[jmp?(L6)] + 4 mark(bar()) + call(bar(), bar) -> <v9> + ret(*|<v9>) L1 NEXT:[<END>] +L10 [finish finally]: +- 3 ret(*|<v4>) L1 NEXT:[<END>] PREV:[] +- jmp(L11) NEXT:[merge(if (n < 0) return 0|!<v11>) -> <v12>] PREV:[] +L3 [else branch]: + read (Unit) PREV:[jf(L3|<v3>)] +L11 ['if' expression result]: + merge(if (n < 0) return 0|!<v11>) -> <v12> + mark(n.let { return it }) + r(n) -> <v13> + mark({ return it }) + jmp?(L12) NEXT:[r({ return it }) -> <v14>, d({ return it })] + d({ return it }) NEXT:[<SINK>] +L12 [after local declaration]: + r({ return it }) -> <v14> PREV:[jmp?(L12)] + mark(let { return it }) + call(let { return it }, let|<v13>, <v14>) -> <v15> + 2 jmp(L20) NEXT:[mark({ for (i in 1..2) { } return bar() })] +L2 [onExceptionToFinallyBlock]: + 4 mark({ for (i in 1..2) { } return bar() }) PREV:[jmp?(L2)] + 5 r(1) -> <v5> + r(2) -> <v6> + mark(1..2) + call(1..2, rangeTo|<v5>, <v6>) -> <v7> + v(i) +L21 [copy of L5, loop entry point]: +L25 [copy of L9, condition entry point]: + jmp?(L22) NEXT:[read (Unit), magic[LOOP_RANGE_ITERATION](1..2|<v7>) -> <v8>] PREV:[v(i), jmp(L21)] + magic[LOOP_RANGE_ITERATION](1..2|<v7>) -> <v8> + w(i|<v8>) + mark(for (i in 1..2) { }) +L23 [copy of L7, body entry point]: + 6 mark({ }) + read (Unit) + 5 jmp(L21) NEXT:[jmp?(L22)] +L22 [copy of L6, loop exit point]: +L24 [copy of L8, body exit point]: + read (Unit) PREV:[jmp?(L22)] + 4 mark(bar()) + call(bar(), bar) -> <v9> + ret(*|<v9>) L1 NEXT:[<END>] +- 2 jmp(error) NEXT:[<ERROR>] PREV:[] +L20 [skipFinallyToErrorBlock]: + 4 mark({ for (i in 1..2) { } return bar() }) PREV:[jmp(L20)] + 5 r(1) -> <v5> + r(2) -> <v6> + mark(1..2) + call(1..2, rangeTo|<v5>, <v6>) -> <v7> + v(i) +L26 [copy of L5, loop entry point]: +L30 [copy of L9, condition entry point]: + jmp?(L27) NEXT:[read (Unit), magic[LOOP_RANGE_ITERATION](1..2|<v7>) -> <v8>] PREV:[v(i), jmp(L26)] + magic[LOOP_RANGE_ITERATION](1..2|<v7>) -> <v8> + w(i|<v8>) + mark(for (i in 1..2) { }) +L28 [copy of L7, body entry point]: + 6 mark({ }) + read (Unit) + 5 jmp(L26) NEXT:[jmp?(L27)] +L27 [copy of L6, loop exit point]: +L29 [copy of L8, body exit point]: + read (Unit) PREV:[jmp?(L27)] + 4 mark(bar()) + call(bar(), bar) -> <v9> + ret(*|<v9>) L1 NEXT:[<END>] +- 2 merge(try { if (n < 0) return 0 n.let { return it } } finally { for (i in 1..2) { } return bar() }|<v15>) -> <v16> PREV:[] +L1: + 1 <END> NEXT:[<SINK>] PREV:[ret(*|<v9>) L1, ret(*|<v9>) L1, ret(*|<v9>) L1] +error: + <ERROR> PREV:[] +sink: + <SINK> PREV:[<ERROR>, <END>, d({ return it })] +===================== +== anonymous_0 == +{ return it } +--------------------- +L13: + 4 <START> + 5 mark(return it) + r(it) -> <v0> + 4 mark({ for (i in 1..2) { } return bar() }) + 5 r(1) -> <v5> + r(2) -> <v6> + mark(1..2) + call(1..2, rangeTo|<v5>, <v6>) -> <v7> + v(i) +L15 [copy of L5, loop entry point]: +L19 [copy of L9, condition entry point]: + jmp?(L16) NEXT:[r(1) -> <v5>, magic[LOOP_RANGE_ITERATION](1..2|<v7>) -> <v8>] + magic[LOOP_RANGE_ITERATION](1..2|<v7>) -> <v8> + w(i|<v8>) + mark(for (i in 1..2) { }) +L17 [copy of L7, body entry point]: + 6 mark({ }) + read (Unit) + 5 jmp(L15) NEXT:[r(n) -> <v1>] +L16 [copy of L6, loop exit point]: +L18 [copy of L8, body exit point]: +- read (Unit) PREV:[] +- 4 mark(bar()) PREV:[] +- call(bar(), bar) -> <v9> PREV:[] +- ret(*|<v9>) L1 NEXT:[<END>] PREV:[] +- 5 ret(*|<v0>) L1 NEXT:[<END>] PREV:[] +- 4 ret(*|!<v1>) L14 PREV:[] +L14: + <END> NEXT:[<SINK>] PREV:[] +error: + <ERROR> PREV:[] +sink: + <SINK> PREV:[<ERROR>, <END>] +===================== diff --git a/compiler/testData/cfg/controlStructures/localAndNonlocalReturnsWithFinally.kt b/compiler/testData/cfg/controlStructures/localAndNonlocalReturnsWithFinally.kt new file mode 100644 index 0000000000000..cc9c317762489 --- /dev/null +++ b/compiler/testData/cfg/controlStructures/localAndNonlocalReturnsWithFinally.kt @@ -0,0 +1,14 @@ +fun <T: Any, U> T.let(f: (T) -> U): U = f(this) + +fun bar(): Int = 1 + +fun foo(n: Int): Int { + try { + if (n < 0) return 0 + n.let { return it } + } + finally { + for (i in 1..2) { } + return bar() + } +} \ No newline at end of file diff --git a/compiler/testData/cfg/controlStructures/localAndNonlocalReturnsWithFinally.values b/compiler/testData/cfg/controlStructures/localAndNonlocalReturnsWithFinally.values new file mode 100644 index 0000000000000..8419322f99671 --- /dev/null +++ b/compiler/testData/cfg/controlStructures/localAndNonlocalReturnsWithFinally.values @@ -0,0 +1,56 @@ +== let == +fun <T: Any, U> T.let(f: (T) -> U): U = f(this) +--------------------- + <v0>: {<: (T) -> U} NEW: magic[FAKE_INITIALIZER](f: (T) -> U) -> <v0> +f <v1>: {<: (T) -> U} NEW: r(f) -> <v1> +this <v2>: {<: T} COPY +this <v2>: {<: T} NEW: r(this) -> <v2> +f(this) <v3>: {<: U} NEW: call(f(this), invoke|<v1>, <v2>) -> <v3> +===================== +== bar == +fun bar(): Int = 1 +--------------------- +1 <v0>: Int NEW: r(1) -> <v0> +===================== +== foo == +fun foo(n: Int): Int { + try { + if (n < 0) return 0 + n.let { return it } + } + finally { + for (i in 1..2) { } + return bar() + } +} +--------------------- + <v0>: Int NEW: magic[FAKE_INITIALIZER](n: Int) -> <v0> + <v8>: Int NEW: magic[LOOP_RANGE_ITERATION](1..2|<v7>) -> <v8> +n <v1>: {<: Comparable<Int>} NEW: r(n) -> <v1> +0 <v2>: Int NEW: r(0) -> <v2> +n < 0 <v3>: Boolean NEW: call(n < 0, compareTo|<v1>, <v2>) -> <v3> +0 <v4>: Int NEW: r(0) -> <v4> +return 0 !<v11>: * +if (n < 0) return 0 <v12>: * NEW: merge(if (n < 0) return 0|!<v11>) -> <v12> +n <v13>: Int NEW: r(n) -> <v13> +{ return it } <v14>: {<: (Int) -> ???} NEW: r({ return it }) -> <v14> +let { return it } <v15>: * NEW: call(let { return it }, let|<v13>, <v14>) -> <v15> +n.let { return it } <v15>: * COPY +{ if (n < 0) return 0 n.let { return it } } <v15>: * COPY +1 <v5>: Int NEW: r(1) -> <v5> +2 <v6>: Int NEW: r(2) -> <v6> +1..2 <v7>: {<: Iterable<Int>} NEW: call(1..2, rangeTo|<v5>, <v6>) -> <v7> +bar() <v9>: Int NEW: call(bar(), bar) -> <v9> +return bar() !<v10>: * +{ for (i in 1..2) { } return bar() } !<v10>: * COPY +try { if (n < 0) return 0 n.let { return it } } finally { for (i in 1..2) { } return bar() } <v16>: * NEW: merge(try { if (n < 0) return 0 n.let { return it } } finally { for (i in 1..2) { } return bar() }|<v15>) -> <v16> +{ try { if (n < 0) return 0 n.let { return it } } finally { for (i in 1..2) { } return bar() } } <v16>: * COPY +===================== +== anonymous_0 == +{ return it } +--------------------- + <v8>: Int NEW: magic[LOOP_RANGE_ITERATION](1..2|<v7>) -> <v8> +it <v0>: Int NEW: r(it) -> <v0> +return it !<v1>: * +return it !<v1>: * COPY +===================== diff --git a/compiler/tests/org/jetbrains/kotlin/cfg/ControlFlowTestGenerated.java b/compiler/tests/org/jetbrains/kotlin/cfg/ControlFlowTestGenerated.java index 8107c01dce20e..f7d682edb221c 100644 --- a/compiler/tests/org/jetbrains/kotlin/cfg/ControlFlowTestGenerated.java +++ b/compiler/tests/org/jetbrains/kotlin/cfg/ControlFlowTestGenerated.java @@ -204,6 +204,12 @@ public void testIf() throws Exception { doTest(fileName); } + @TestMetadata("localAndNonlocalReturnsWithFinally.kt") + public void testLocalAndNonlocalReturnsWithFinally() throws Exception { + String fileName = JetTestUtils.navigationMetadata("compiler/testData/cfg/controlStructures/localAndNonlocalReturnsWithFinally.kt"); + doTest(fileName); + } + @TestMetadata("OnlyWhileInFunctionBody.kt") public void testOnlyWhileInFunctionBody() throws Exception { String fileName = JetTestUtils.navigationMetadata("compiler/testData/cfg/controlStructures/OnlyWhileInFunctionBody.kt"); diff --git a/compiler/tests/org/jetbrains/kotlin/cfg/PseudoValueTestGenerated.java b/compiler/tests/org/jetbrains/kotlin/cfg/PseudoValueTestGenerated.java index 889e304aa4146..fda6ae9a42026 100644 --- a/compiler/tests/org/jetbrains/kotlin/cfg/PseudoValueTestGenerated.java +++ b/compiler/tests/org/jetbrains/kotlin/cfg/PseudoValueTestGenerated.java @@ -210,6 +210,12 @@ public void testIf() throws Exception { doTest(fileName); } + @TestMetadata("localAndNonlocalReturnsWithFinally.kt") + public void testLocalAndNonlocalReturnsWithFinally() throws Exception { + String fileName = JetTestUtils.navigationMetadata("compiler/testData/cfg/controlStructures/localAndNonlocalReturnsWithFinally.kt"); + doTest(fileName); + } + @TestMetadata("OnlyWhileInFunctionBody.kt") public void testOnlyWhileInFunctionBody() throws Exception { String fileName = JetTestUtils.navigationMetadata("compiler/testData/cfg/controlStructures/OnlyWhileInFunctionBody.kt");
9e7daeca5209937905d9541876c5785a86ead095
Vala
vapigen: Add support for deprecated metadata attributes
a
https://github.com/GNOME/vala/
diff --git a/vapigen/valagidlparser.vala b/vapigen/valagidlparser.vala index 16b79013b2..9712cec35f 100644 --- a/vapigen/valagidlparser.vala +++ b/vapigen/valagidlparser.vala @@ -305,6 +305,14 @@ public class Vala.GIdlParser : CodeVisitor { if (eval (nv[1]) == "1") { return_type.value_owned = true; } + } else if (nv[0] == "deprecated") { + if (eval (nv[1]) == "1") { + cb.deprecated = true; + } + } else if (nv[0] == "replacement") { + cb.replacement = eval (nv[1]); + } else if (nv[0] == "deprecated_since") { + cb.deprecated_since = eval (nv[1]); } else if (nv[0] == "type_arguments") { var type_args = eval (nv[1]).split (","); foreach (string type_arg in type_args) { @@ -472,6 +480,14 @@ public class Vala.GIdlParser : CodeVisitor { if (eval (nv[1]) == "0") { st.has_copy_function = false; } + } else if (nv[0] == "deprecated") { + if (eval (nv[1]) == "1") { + st.deprecated = true; + } + } else if (nv[0] == "replacement") { + st.replacement = eval (nv[1]); + } else if (nv[0] == "deprecated_since") { + st.deprecated_since = eval (nv[1]); } else if (nv[0] == "has_destroy_function") { if (eval (nv[1]) == "0") { st.has_destroy_function = false; @@ -554,6 +570,14 @@ public class Vala.GIdlParser : CodeVisitor { if (eval (nv[1]) == "1") { ref_function_void = true; } + } else if (nv[0] == "deprecated") { + if (eval (nv[1]) == "1") { + cl.deprecated = true; + } + } else if (nv[0] == "replacement") { + cl.replacement = eval (nv[1]); + } else if (nv[0] == "deprecated_since") { + cl.deprecated_since = eval (nv[1]); } else if (nv[0] == "type_parameters") { foreach (string type_param_name in eval (nv[1]).split (",")) { cl.add_type_parameter (new TypeParameter (type_param_name, current_source_reference)); @@ -637,6 +661,14 @@ public class Vala.GIdlParser : CodeVisitor { var nv = attr.split ("=", 2); if (nv[0] == "cheader_filename") { st.add_cheader_filename (eval (nv[1])); + } else if (nv[0] == "deprecated") { + if (eval (nv[1]) == "1") { + st.deprecated = true; + } + } else if (nv[0] == "replacement") { + st.replacement = eval (nv[1]); + } else if (nv[0] == "deprecated_since") { + st.deprecated_since = eval (nv[1]); } else if (nv[0] == "hidden") { if (eval (nv[1]) == "1") { return; @@ -769,6 +801,14 @@ public class Vala.GIdlParser : CodeVisitor { var nv = attr.split ("=", 2); if (nv[0] == "cheader_filename") { st.add_cheader_filename (eval (nv[1])); + } else if (nv[0] == "deprecated") { + if (eval (nv[1]) == "1") { + st.deprecated = true; + } + } else if (nv[0] == "replacement") { + st.replacement = eval (nv[1]); + } else if (nv[0] == "deprecated_since") { + st.deprecated_since = eval (nv[1]); } else if (nv[0] == "immutable") { if (eval (nv[1]) == "1") { st.is_immutable = true; @@ -834,6 +874,14 @@ public class Vala.GIdlParser : CodeVisitor { if (eval (nv[1]) == "1") { cl.is_immutable = true; } + } else if (nv[0] == "deprecated") { + if (eval (nv[1]) == "1") { + cl.deprecated = true; + } + } else if (nv[0] == "replacement") { + cl.replacement = eval (nv[1]); + } else if (nv[0] == "deprecated_since") { + cl.deprecated_since = eval (nv[1]); } else if (nv[0] == "const_cname") { cl.const_cname = eval (nv[1]); } else if (nv[0] == "free_function") { @@ -978,6 +1026,14 @@ public class Vala.GIdlParser : CodeVisitor { if (eval (nv[1]) == "1") { return; } + } else if (nv[0] == "deprecated") { + if (eval (nv[1]) == "1") { + en.deprecated = true; + } + } else if (nv[0] == "replacement") { + en.replacement = eval (nv[1]); + } else if (nv[0] == "deprecated_since") { + en.deprecated_since = eval (nv[1]); } else if (nv[0] == "rename_to") { en.name = eval (nv[1]); } else if (nv[0] == "errordomain") { @@ -1066,6 +1122,14 @@ public class Vala.GIdlParser : CodeVisitor { } } else if (nv[0] == "type_check_function") { cl.type_check_function = eval (nv[1]); + } else if (nv[0] == "deprecated") { + if (eval (nv[1]) == "1") { + cl.deprecated = true; + } + } else if (nv[0] == "replacement") { + cl.replacement = eval (nv[1]); + } else if (nv[0] == "deprecated_since") { + cl.deprecated_since = eval (nv[1]); } else if (nv[0] == "type_id") { cl.set_type_id (eval (nv[1])); } else if (nv[0] == "abstract") { @@ -1654,6 +1718,14 @@ public class Vala.GIdlParser : CodeVisitor { foreach (string type_arg in type_args) { return_type.add_type_argument (get_type_from_string (type_arg)); } + } else if (nv[0] == "deprecated") { + if (eval (nv[1]) == "1") { + m.deprecated = true; + } + } else if (nv[0] == "replacement") { + m.replacement = eval (nv[1]); + } else if (nv[0] == "deprecated_since") { + m.deprecated_since = eval (nv[1]); } else if (nv[0] == "cheader_filename") { m.add_cheader_filename (eval (nv[1])); } else if (nv[0] == "abstract") { @@ -2036,6 +2108,14 @@ public class Vala.GIdlParser : CodeVisitor { foreach (string type_arg in type_args) { prop.property_type.add_type_argument (get_type_from_string (type_arg)); } + } else if (nv[0] == "deprecated") { + if (eval (nv[1]) == "1") { + prop.deprecated = true; + } + } else if (nv[0] == "replacement") { + prop.replacement = eval (nv[1]); + } else if (nv[0] == "deprecated_since") { + prop.deprecated_since = eval (nv[1]); } else if (nv[0] == "accessor_method") { if (eval (nv[1]) == "0") { prop.no_accessor_method = true; @@ -2072,6 +2152,14 @@ public class Vala.GIdlParser : CodeVisitor { var nv = attr.split ("=", 2); if (nv[0] == "cheader_filename") { c.add_cheader_filename (eval (nv[1])); + } else if (nv[0] == "deprecated") { + if (eval (nv[1]) == "1") { + c.deprecated = true; + } + } else if (nv[0] == "replacement") { + c.replacement = eval (nv[1]); + } else if (nv[0] == "deprecated_since") { + c.deprecated_since = eval (nv[1]); } else if (nv[0] == "hidden") { if (eval (nv[1]) == "1") { return null; @@ -2099,6 +2187,9 @@ public class Vala.GIdlParser : CodeVisitor { string array_length_cname = null; string array_length_type = null; bool array_null_terminated = false; + bool deprecated = false; + string deprecated_since = null; + string replacement = null; var attributes = get_attributes ("%s.%s".printf (current_data_type.get_cname (), node.name)); if (attributes != null) { @@ -2134,6 +2225,14 @@ public class Vala.GIdlParser : CodeVisitor { foreach (string type_arg in type_args) { type.add_type_argument (get_type_from_string (type_arg)); } + } else if (nv[0] == "deprecated") { + if (eval (nv[1]) == "1") { + deprecated = true; + } + } else if (nv[0] == "replacement") { + replacement = eval (nv[1]); + } else if (nv[0] == "deprecated_since") { + deprecated_since = eval (nv[1]); } else if (nv[0] == "cheader_filename") { cheader_filename = eval (nv[1]); } else if (nv[0] == "ctype") { @@ -2171,6 +2270,18 @@ public class Vala.GIdlParser : CodeVisitor { field.set_cname (node.name); } + if (deprecated) { + field.deprecated = true; + + if (deprecated_since != null) { + field.deprecated_since = deprecated_since; + } + + if (replacement != null) { + field.replacement = replacement; + } + } + if (ctype != null) { field.set_ctype (ctype); } @@ -2291,6 +2402,14 @@ public class Vala.GIdlParser : CodeVisitor { if (eval (nv[1]) == "1") { return null; } + } else if (nv[0] == "deprecated") { + if (eval (nv[1]) == "1") { + sig.deprecated = true; + } + } else if (nv[0] == "replacement") { + sig.replacement = eval (nv[1]); + } else if (nv[0] == "deprecated_since") { + sig.deprecated_since = eval (nv[1]); } else if (nv[0] == "transfer_ownership") { if (eval (nv[1]) == "1") { sig.return_type.value_owned = true;
3d53bdc3d320973bcb0ca67047a59e0e58cee0b3
Mylyn Reviews
cleanup warnings * Fix build path issue * Minor Cleanup for Action Change-Id: I4e4bda10624e16d4f69545a953ae80d84939c1dd
p
https://github.com/eclipse-mylyn/org.eclipse.mylyn.reviews
diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/build.properties b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/build.properties index e10dcceb..cc91072e 100644 --- a/tbr/org.eclipse.mylyn.reviews.tasks.dsl/build.properties +++ b/tbr/org.eclipse.mylyn.reviews.tasks.dsl/build.properties @@ -1,5 +1,4 @@ -source.. = src/,\ - src-gen/ +source.. = src/ bin.includes = META-INF/,\ .,\ - plugin.xml \ No newline at end of file + plugin.xml diff --git a/tbr/org.eclipse.mylyn.versions.context.ui/src/org/eclipse/mylyn/versions/tasks/context/ImportAsContextAction.java b/tbr/org.eclipse.mylyn.versions.context.ui/src/org/eclipse/mylyn/versions/tasks/context/ImportAsContextAction.java index 059158ce..68ea94c6 100644 --- a/tbr/org.eclipse.mylyn.versions.context.ui/src/org/eclipse/mylyn/versions/tasks/context/ImportAsContextAction.java +++ b/tbr/org.eclipse.mylyn.versions.context.ui/src/org/eclipse/mylyn/versions/tasks/context/ImportAsContextAction.java @@ -41,7 +41,7 @@ public ImportAsContextAction() { } public void run() { - + throw new java.lang.UnsupportedOperationException(); } private String formatHandleString(Change c) { @@ -71,11 +71,9 @@ public void run(ITaskVersionsModel model) { if (elementNotDeleted(c)) { InteractionEvent interactionEvent = new InteractionEvent( Kind.SELECTION, null, formatHandleString(c), ORIGIN); + ContextCore.getContextManager().processInteractionEvent(interactionEvent); - ContextCore.getContextManager().processInteractionEvent( - interactionEvent); - MonitorUiPlugin.getDefault().notifyInteractionObserved( - interactionEvent); + MonitorUiPlugin.getDefault().notifyInteractionObserved(interactionEvent); } } } diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapperTest.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapperTest.java index 18c1d070..bc4cb83d 100644 --- a/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapperTest.java +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapperTest.java @@ -1,3 +1,13 @@ +/******************************************************************************* + * Copyright (c) 2012 Research Group for Industrial Software (INSO), Vienna University of Technology. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + * + * Contributors: + * Research Group for Industrial Software (INSO), Vienna University of Technology - initial API and implementation + *******************************************************************************/ package org.eclipse.mylyn.versions.tasks.mapper.generic; import static org.junit.Assert.*; @@ -16,7 +26,11 @@ import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; - +/** + * + * @author Kilian Matt + * + */ public class GenericTaskChangesetMapperTest { private GenericTaskChangesetMapper mapper; diff --git a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/internal/versions/tasks/ui/ChangesetPart.java b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/internal/versions/tasks/ui/ChangesetPart.java index 49563f3c..bcfffdae 100644 --- a/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/internal/versions/tasks/ui/ChangesetPart.java +++ b/tbr/org.eclipse.mylyn.versions.tasks.ui/src/org/eclipse/mylyn/internal/versions/tasks/ui/ChangesetPart.java @@ -45,14 +45,13 @@ import org.eclipse.ui.forms.widgets.Section; /** - * * @author Kilian Matt - * */ @SuppressWarnings("restriction") public class ChangesetPart extends AbstractTaskEditorPart { private TableViewer table; - private ChangesetModel model = new ChangesetModel(); + + private final ChangesetModel model = new ChangesetModel(); public ChangesetPart() { setPartName("Changeset"); @@ -67,8 +66,7 @@ public void createControl(Composite parent, FormToolkit toolkit) { createTable(composite); } - private Composite createContentComposite(FormToolkit toolkit, - Section createSection) { + private Composite createContentComposite(FormToolkit toolkit, Section createSection) { Composite composite = toolkit.createComposite(createSection); createSection.setClient(composite); composite.setLayout(new FillLayout()); @@ -107,10 +105,10 @@ protected void fillToolBar(ToolBarManager toolBarManager) { super.fillToolBar(toolBarManager); toolBarManager.add(new IncludeSubTasksAction(model)); List<ITaskVersionsContributionAction> contributions = InternalExtensionPointLoader.loadActionContributions(); - for(final ITaskVersionsContributionAction action : contributions) { + for (final ITaskVersionsContributionAction action : contributions) { toolBarManager.add(new ActionDelegate(action) { - @Override - public void runWithEvent(Event event) { + @Override + public void runWithEvent(Event event) { action.run(model); } }); @@ -120,37 +118,33 @@ public void runWithEvent(Event event) { private void registerContextMenu(TableViewer table) { MenuManager menuManager = new MenuManager(); menuManager.setRemoveAllWhenShown(true); - getTaskEditorPage().getEditorSite().registerContextMenu( - "org.eclipse.mylyn.versions.changesets", menuManager, table, - true); + getTaskEditorPage().getEditorSite().registerContextMenu("org.eclipse.mylyn.versions.changesets", menuManager, + table, true); Menu menu = menuManager.createContextMenu(table.getControl()); table.getTable().setMenu(menu); } private void addColumn(TableViewer table, String name) { - TableViewerColumn tableViewerColumn = new TableViewerColumn(table, - SWT.LEFT); + TableViewerColumn tableViewerColumn = new TableViewerColumn(table, SWT.LEFT); tableViewerColumn.getColumn().setText(name); tableViewerColumn.getColumn().setWidth(100); } - private AbstractChangesetMappingProvider determineBestProvider( - final ITask task) { + private AbstractChangesetMappingProvider determineBestProvider(final ITask task) { AbstractChangesetMappingProvider bestProvider = new NullProvider(); int score = Integer.MIN_VALUE; - for (AbstractChangesetMappingProvider mappingProvider : TaskChangesetUtil - .getMappingProviders()) { + for (AbstractChangesetMappingProvider mappingProvider : TaskChangesetUtil.getMappingProviders()) { if (score < mappingProvider.getScoreFor(task)) { bestProvider = mappingProvider; } } return bestProvider; } - private static class NullProvider extends AbstractChangesetMappingProvider{ + + private static class NullProvider extends AbstractChangesetMappingProvider { @Override - public void getChangesetsForTask(IChangeSetMapping mapping, - IProgressMonitor monitor) throws CoreException { + public void getChangesetsForTask(IChangeSetMapping mapping, IProgressMonitor monitor) throws CoreException { } @Override @@ -160,8 +154,7 @@ public int getScoreFor(ITask task) { } - private IChangeSetMapping createChangeSetMapping(final ITask task, - final List<TaskChangeSet> changesets) { + private IChangeSetMapping createChangeSetMapping(final ITask task, final List<TaskChangeSet> changesets) { return new IChangeSetMapping() { public ITask getTask() { @@ -207,8 +200,7 @@ public List<TaskChangeSet> getInput() { if (task instanceof ITaskContainer) { ITaskContainer taskContainer = (ITaskContainer) task; for (ITask subTask : taskContainer.getChildren()) { - changesetsMapping.add(createChangeSetMapping(subTask, - changesets)); + changesetsMapping.add(createChangeSetMapping(subTask, changesets)); } } } @@ -221,7 +213,8 @@ public void run() { provider.getChangesetsForTask(csm, new NullProgressMonitor()); } } catch (CoreException e) { - getTaskEditorPage().getTaskEditor().setMessage("An exception occurred " + e.getMessage(), IMessageProvider.ERROR); + getTaskEditorPage().getTaskEditor().setMessage("An exception occurred " + e.getMessage(), + IMessageProvider.ERROR); } }
c7c8f2fe48d3a16ac70fb98f662d3d77292ba0cd
hadoop
MAPREDUCE-2603. Disable High-Ram emulation in- system tests. (Vinay Kumar Thota via amarrk)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1138301 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/hadoop
diff --git a/mapreduce/CHANGES.txt b/mapreduce/CHANGES.txt index 988c40eda0b45..75c454e91447b 100644 --- a/mapreduce/CHANGES.txt +++ b/mapreduce/CHANGES.txt @@ -186,6 +186,9 @@ Trunk (unreleased changes) BUG FIXES + MAPREDUCE-2603. Disable High-Ram emulation in system tests. + (Vinay Kumar Thota via amarrk) + MAPREDUCE-2539. Fixed NPE in getMapTaskReports in JobClient. (Robert Evans via acmurthy) diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationEnableForAllTypesOfJobs.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationEnableForAllTypesOfJobs.java index 4144bae842e87..3ade9e34e687b 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationEnableForAllTypesOfJobs.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationEnableForAllTypesOfJobs.java @@ -56,6 +56,7 @@ public void testInputCompressionEmualtionEnableForAllJobsWithDefaultRatios() final String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_INPUT_DECOMPRESS_ENABLE + "=true", "-D", GridMixConfig.GRIDMIX_INPUT_COMPRESS_RATIO + "=0.46", "-D", GridMixConfig.GRIDMIX_INTERMEDIATE_COMPRESSION_RATIO + "=0.35", @@ -84,6 +85,7 @@ public void testInputCompressionEmulationEnableForAllJobsWithCustomRatios() final String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForCompressInAndUncompressOut.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForCompressInAndUncompressOut.java index 6f0dcbff0f056..4b7fc3a15aada 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForCompressInAndUncompressOut.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForCompressInAndUncompressOut.java @@ -56,6 +56,7 @@ public void testCompressionEmulationOfCompressedInputWithDefaultRatios() final String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true" }; @@ -85,6 +86,7 @@ public void testCompressionEmulationOfCompressedInputWithCustomRatios() "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true", "-D", GridMixConfig.GRIDMIX_INPUT_DECOMPRESS_ENABLE + "=true", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_INPUT_COMPRESS_RATIO + "=0.58", "-D", GridMixConfig.GRIDMIX_INTERMEDIATE_COMPRESSION_RATIO + "=0.42" }; diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForUncompressInAndCompressOut.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForUncompressInAndCompressOut.java index 70dc0d1276451..383fc83de4b1d 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForUncompressInAndCompressOut.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestCompressionEmulationForUncompressInAndCompressOut.java @@ -54,6 +54,7 @@ public void testCompressionEmulationOfCompressedOuputWithDefaultRatios() final String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true" }; @@ -82,6 +83,7 @@ public void testCompressionEmulationOfCompressedOutputWithCustomRatios() final String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_OUTPUT_COMPRESSION_RATIO + "=0.38" }; diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSAndLocalFSDCFiles.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSAndLocalFSDCFiles.java index d98b259177ab4..a1ae1e9dfafe7 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSAndLocalFSDCFiles.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSAndLocalFSDCFiles.java @@ -59,6 +59,7 @@ public void testGenerateDataEmulateHDFSAndLocalFSDCFiles() final String [] otherArgs = { "-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, @@ -85,6 +86,7 @@ public void testEmulationOfHDFSAndLocalFSDCFiles() final String [] otherArgs = { "-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFileUsesMultipleJobs.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFileUsesMultipleJobs.java index 00d2e4825a2a2..7f8938f88a742 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFileUsesMultipleJobs.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFileUsesMultipleJobs.java @@ -58,6 +58,7 @@ public void testGenerateAndEmulationOfHDFSDCFile() final String [] otherArgs = { "-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, @@ -81,6 +82,7 @@ public void testGridmixEmulationOfHDFSPublicDCFile() tracePath}; final String [] otherArgs = { + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFilesWithDifferentVisibilities.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFilesWithDifferentVisibilities.java index 3840f1bbeafa0..453e5b990815b 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFilesWithDifferentVisibilities.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfHDFSDCFilesWithDifferentVisibilities.java @@ -60,6 +60,7 @@ public void testGenerateAndEmulateOfHDFSDCFilesWithDiffVisibilities() final String [] otherArgs = { "-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, @@ -81,6 +82,7 @@ public void testHDFSDCFilesWithoutEnableDCEmulation() "REPLAY", tracePath}; final String [] otherArgs = { + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfLocalFSDCFiles.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfLocalFSDCFiles.java index e50eb6e2e8138..eff47f2d64134 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfLocalFSDCFiles.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestEmulationOfLocalFSDCFiles.java @@ -57,6 +57,7 @@ public void testGenerateInputAndEmulateLocalFSDCFile() final String [] otherArgs = { "-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, @@ -83,6 +84,7 @@ public void testEmulationOfLocalFSDCFile() final String [] otherArgs = { "-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixDataGeneration.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixDataGeneration.java index f1501bf850b77..ef273b5fd2519 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixDataGeneration.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixDataGeneration.java @@ -93,6 +93,7 @@ public void testGenerateDataWithSTRESSSubmission() throws Exception { String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; int exitCode = @@ -123,6 +124,7 @@ public void testGenerateDataWithREPLAYSubmission() throws Exception { String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; @@ -154,6 +156,7 @@ public void testGenerateDataWithSERIALSubmission() throws Exception { long bytesPerFile = 200 * 1024 * 1024; // 200 mb per file of data String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_BYTES_PER_FILE + "=" + bytesPerFile, + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixFilePool.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixFilePool.java index 1ad10d8af50fe..883feec88fcbe 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixFilePool.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridMixFilePool.java @@ -80,6 +80,7 @@ public void testFilesCountAndSizesForSpecifiedFilePool() throws Exception { String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixCompressionEmulationWithCompressInput.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixCompressionEmulationWithCompressInput.java index adaa0d2363be8..3fdd16d7f6f90 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixCompressionEmulationWithCompressInput.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixCompressionEmulationWithCompressInput.java @@ -59,6 +59,7 @@ public void testGridmixCompressionRatiosAgainstDefaultCompressionRatio() final String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, @@ -89,6 +90,7 @@ public void testGridmixOuputCompressionRatiosAgainstCustomRatios() "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=true", "-D", GridMixConfig.GRIDMIX_INPUT_DECOMPRESS_ENABLE + "=true", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_INPUT_COMPRESS_RATIO + "=0.68", "-D", GridMixConfig.GRIDMIX_INTERMEDIATE_COMPRESSION_RATIO + "=0.35", "-D", GridMixConfig.GRIDMIX_OUTPUT_COMPRESSION_RATIO + "=0.40" diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPrivateDCFile.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPrivateDCFile.java index 5289bf3c8a140..e6c7e6af46ba3 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPrivateDCFile.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPrivateDCFile.java @@ -56,6 +56,7 @@ public void testGenerateAndEmulateOfHDFSPrivateDCFile() final String [] otherArgs = { "-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, @@ -78,6 +79,7 @@ public void testGridmixEmulationOfHDFSPrivateDCFile() "REPLAY", tracePath}; final String [] otherArgs = { + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPublicDCFile.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPublicDCFile.java index e12180c72e428..0bf07fdf4d208 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPublicDCFile.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfHDFSPublicDCFile.java @@ -55,6 +55,7 @@ public void testGenerateAndEmulationOfSingleHDFSDCFile() final String [] otherArgs = { "-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, @@ -80,6 +81,7 @@ public void testGridmixEmulationOfSingleHDFSPublicDCFile() tracePath}; final String [] otherArgs = { + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPrivateDCFiles.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPrivateDCFiles.java index 4dca1a214ce82..5f464ce39be56 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPrivateDCFiles.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPrivateDCFiles.java @@ -58,6 +58,7 @@ public void testGenerateAndEmulationOfMultipleHDFSPrivateDCFiles() tracePath}; final String [] otherArgs = { "-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, @@ -81,6 +82,7 @@ public void testGridmixEmulationOfMultipleHDFSPrivateDCFiles() "STRESS", tracePath}; final String [] otherArgs = { + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPublicDCFiles.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPublicDCFiles.java index 09bbf181226c1..cca5da83ecb48 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPublicDCFiles.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixEmulationOfMultipleHDFSPublicDCFiles.java @@ -59,6 +59,7 @@ public void testGenerateAndEmulationOfMultipleHDFSDCFiles() final String [] otherArgs = { "-D", MRJobConfig.JOB_CANCEL_DELEGATION_TOKEN + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, @@ -81,6 +82,7 @@ public void testGridmixEmulationOfMulitpleHDFSPublicDCFile() tracePath}; final String [] otherArgs = { + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=true" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath, diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith10minTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith10minTrace.java index c48a7461d3fd2..ec11a2b36e66c 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith10minTrace.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith10minTrace.java @@ -55,6 +55,7 @@ public void testGridmixWith10minTrace() throws Exception { String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_MINIMUM_FILE_SIZE + "=" + minFileSize, "-D", GridMixConfig.GRIDMIX_JOB_SUBMISSION_QUEUE_IN_TRACE + "=false", "-D", GridMixConfig.GRIDMIX_SLEEPJOB_MAPTASK_ONLY + "=true", diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith12minTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith12minTrace.java index ec2a1377bd6cb..9bcb45a3fbb6c 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith12minTrace.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith12minTrace.java @@ -51,6 +51,7 @@ public void testGridmixWith12minTrace() throws Exception { String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_SLEEP_MAP_MAX_TIME + "=10", "-D", GridMixConfig.GRIDMIX_SLEEP_REDUCE_MAX_TIME + "=5" }; diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith1minTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith1minTrace.java index ed2648448fa5a..c583e6d3a29fc 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith1minTrace.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith1minTrace.java @@ -49,6 +49,7 @@ public void testGridmixWith1minTrace() throws Exception { String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith2minStreamingJobTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith2minStreamingJobTrace.java index 9628dd2db8d6c..d9fb7c70f7f84 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith2minStreamingJobTrace.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith2minStreamingJobTrace.java @@ -56,6 +56,7 @@ public void testGridmixWith2minStreamJobTrace() throws Exception { "-D", GridMixConfig.GRIDMIX_JOB_SUBMISSION_QUEUE_IN_TRACE + "=true", "-D", GridMixConfig.GRIDMIX_MINIMUM_FILE_SIZE + "=" + minFileSize, "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; runGridmixAndVerify(runtimeValues, otherArgs, tracePath); diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minStreamingJobTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minStreamingJobTrace.java index 926f795b747df..85dedf6675f96 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minStreamingJobTrace.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minStreamingJobTrace.java @@ -59,6 +59,7 @@ public void testGridmixWith3minStreamJobTrace() throws Exception { "-D", GridMixConfig.GRIDMIX_JOB_SUBMISSION_QUEUE_IN_TRACE + "=true", "-D", GridMixConfig.GRIDMIX_BYTES_PER_FILE + "=" + bytesPerFile, "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minTrace.java index bed33d0dd3ebc..5f2171fb40196 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minTrace.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith3minTrace.java @@ -52,6 +52,7 @@ public void testGridmixWith3minTrace() throws Exception { String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minStreamingJobTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minStreamingJobTrace.java index 370f120aa961a..ef1878c0855b5 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minStreamingJobTrace.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minStreamingJobTrace.java @@ -56,6 +56,7 @@ public void testGridmixWith5minStreamJobTrace() throws Exception { "-D", GridMixConfig.GRIDMIX_KEY_FRC + "=0.5f", "-D", GridMixConfig.GRIDMIX_BYTES_PER_FILE + "=" + bytesPerFile, "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false" }; diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minTrace.java index 5a141d4c8153d..c55167e3b4f51 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minTrace.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith5minTrace.java @@ -51,6 +51,7 @@ public void testGridmixWith5minTrace() throws Exception { String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_MINIMUM_FILE_SIZE + "=" + minFileSize }; diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith7minTrace.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith7minTrace.java index 0791d68aee28b..55be37b17dd89 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith7minTrace.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/TestGridmixWith7minTrace.java @@ -51,6 +51,7 @@ public void testGridmixWith7minTrace() throws Exception { String [] otherArgs = { "-D", GridMixConfig.GRIDMIX_DISTCACHE_ENABLE + "=false", + "-D", GridmixJob.GRIDMIX_HIGHRAM_EMULATION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_COMPRESSION_ENABLE + "=false", "-D", GridMixConfig.GRIDMIX_MINIMUM_FILE_SIZE + "=" + minFileSize, "-D", GridMixConfig.GRIDMIX_JOB_SUBMISSION_QUEUE_IN_TRACE + "=false" diff --git a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java index 46872b2418cd4..ae71ec5764bde 100644 --- a/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java +++ b/mapreduce/src/contrib/gridmix/src/test/system/org/apache/hadoop/mapred/gridmix/test/system/GridmixJobVerification.java @@ -316,7 +316,9 @@ public JobConf getSimulatedJobConf(JobID simulatedJobID, File tmpJHFolder) Path jhpath = new Path(historyFilePath); fs = jhpath.getFileSystem(conf); fs.copyToLocalFile(jhpath,new Path(tmpJHFolder.toString())); - fs.copyToLocalFile(new Path(historyFilePath + "_conf.xml"), + String historyPath = + historyFilePath.substring(0,historyFilePath.lastIndexOf("_")); + fs.copyToLocalFile(new Path(historyPath + "_conf.xml"), new Path(tmpJHFolder.toString())); JobConf jobConf = new JobConf(); jobConf.addResource(new Path(tmpJHFolder.toString() diff --git a/mapreduce/src/contrib/gridmix/src/test/system/resources/highram_mr_jobs_case4.json.gz b/mapreduce/src/contrib/gridmix/src/test/system/resources/highram_mr_jobs_case4.json.gz index c4d4657c3cb7f..229d8d321bc4a 100644 Binary files a/mapreduce/src/contrib/gridmix/src/test/system/resources/highram_mr_jobs_case4.json.gz and b/mapreduce/src/contrib/gridmix/src/test/system/resources/highram_mr_jobs_case4.json.gz differ
e449fb139b70db15ffae182355d5306d90389adb
ReactiveX-RxJava
Fixed javadoc and comments
c
https://github.com/ReactiveX/RxJava
diff --git a/rxjava-core/src/main/java/rx/operators/OperationCombineLatest.java b/rxjava-core/src/main/java/rx/operators/OperationCombineLatest.java index 3cf2a410c4..87d0cbe0c9 100644 --- a/rxjava-core/src/main/java/rx/operators/OperationCombineLatest.java +++ b/rxjava-core/src/main/java/rx/operators/OperationCombineLatest.java @@ -43,6 +43,14 @@ public class OperationCombineLatest { + /** + * Combines the two given observables, emitting an event containing an aggregation of the latest values of each of the source observables + * each time an event is received from one of the source observables, where the aggregation is defined by the given function. + * @param w0 The first source observable. + * @param w1 The second source observable. + * @param combineLatestFunction The aggregation function used to combine the source observable values. + * @return A function from an observer to a subscription. This can be used to create an observable from. + */ public static <T0, T1, R> Func1<Observer<R>, Subscription> combineLatest(Observable<T0> w0, Observable<T1> w1, Func2<T0, T1, R> combineLatestFunction) { Aggregator<R> a = new Aggregator<R>(Functions.fromFunc(combineLatestFunction)); a.addObserver(new CombineObserver<R, T0>(a, w0)); @@ -50,6 +58,9 @@ public static <T0, T1, R> Func1<Observer<R>, Subscription> combineLatest(Observa return a; } + /** + * @see #combineLatest(Observable<T0> w0, Observable<T1> w1, Func2<T0, T1, R> combineLatestFunction) + */ public static <T0, T1, T2, R> Func1<Observer<R>, Subscription> combineLatest(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Func3<T0, T1, T2, R> combineLatestFunction) { Aggregator<R> a = new Aggregator<R>(Functions.fromFunc(combineLatestFunction)); a.addObserver(new CombineObserver<R, T0>(a, w0)); @@ -58,6 +69,9 @@ public static <T0, T1, T2, R> Func1<Observer<R>, Subscription> combineLatest(Obs return a; } + /** + * @see #combineLatest(Observable<T0> w0, Observable<T1> w1, Func2<T0, T1, R> combineLatestFunction) + */ public static <T0, T1, T2, T3, R> Func1<Observer<R>, Subscription> combineLatest(Observable<T0> w0, Observable<T1> w1, Observable<T2> w2, Observable<T3> w3, Func4<T0, T1, T2, T3, R> combineLatestFunction) { Aggregator<R> a = new Aggregator<R>(Functions.fromFunc(combineLatestFunction)); a.addObserver(new CombineObserver<R, T0>(a, w0)); @@ -91,7 +105,7 @@ public void onCompleted() { @Override public void onError(Exception e) { - a.error(this, e); + a.error(e); } @Override @@ -101,32 +115,46 @@ public void onNext(T args) { } /** - * Receive notifications from each of the Observables we are reducing and execute the combineLatestFunction whenever we have received events from all Observables. - * - * @param <R> + * Receive notifications from each of the observables we are reducing and execute the combineLatestFunction + * whenever we have received an event from one of the observables, as soon as each Observable has received + * at least one event. */ private static class Aggregator<R> implements Func1<Observer<R>, Subscription> { + private Observer<R> observer; + private final FuncN<R> combineLatestFunction; - private Observer<R> Observer; - private AtomicBoolean running = new AtomicBoolean(true); + private final AtomicBoolean running = new AtomicBoolean(true); + // used as an internal lock for handling the latest values and the completed state of each observer + private final Object lockObject = new Object(); + /** - * store when a Observer completes + * Store when an observer completes. * <p> - * Note that access to this set MUST BE SYNCHRONIZED + * Note that access to this set MUST BE SYNCHRONIZED via 'lockObject' above. * */ - private Set<CombineObserver<R, ?>> completed = new HashSet<CombineObserver<R, ?>>(); + private final Set<CombineObserver<R, ?>> completed = new HashSet<CombineObserver<R, ?>>(); /** - * The last value from a Observer + * The latest value from each observer * <p> - * Note that access to this set MUST BE SYNCHRONIZED + * Note that access to this set MUST BE SYNCHRONIZED via 'lockObject' above. * */ - private Map<CombineObserver<R, ?>, Object> lastValue = new HashMap<CombineObserver<R, ?>, Object>(); + private final Map<CombineObserver<R, ?>, Object> latestValue = new HashMap<CombineObserver<R, ?>, Object>(); - private Set<CombineObserver<R, ?>> hasLastValue = new HashSet<CombineObserver<R, ?>>(); - private List<CombineObserver<R, ?>> observers = new LinkedList<CombineObserver<R, ?>>(); + /** + * Whether each observer has a latest value at all. + * <p> + * Note that access to this set MUST BE SYNCHRONIZED via 'lockObject' above. + * */ + private final Set<CombineObserver<R, ?>> hasLatestValue = new HashSet<CombineObserver<R, ?>>(); + + /** + * Ordered list of observers to combine. + * No synchronization is necessary as these can not be added or changed asynchronously. + */ + private final List<CombineObserver<R, ?>> observers = new LinkedList<CombineObserver<R, ?>>(); public Aggregator(FuncN<R> combineLatestFunction) { this.combineLatestFunction = combineLatestFunction; @@ -135,55 +163,53 @@ public Aggregator(FuncN<R> combineLatestFunction) { /** * Receive notification of a Observer starting (meaning we should require it for aggregation) * - * @param w + * @param w The observer to add. */ - synchronized <T> void addObserver(CombineObserver<R, T> w) { - observers.add(w); + <T> void addObserver(CombineObserver<R, T> w) { + observers.add(w); } /** * Receive notification of a Observer completing its iterations. * - * @param w + * @param w The observer that has completed. */ - synchronized <T> void complete(CombineObserver<R, T> w) { - // store that this CombineLatestObserver is completed - completed.add(w); - // if all CombineObservers are completed, we mark the whole thing as completed - if (completed.size() == observers.size()) { - if (running.get()) { - // mark ourselves as done - Observer.onCompleted(); - // just to ensure we stop processing in case we receive more onNext/complete/error calls after this - running.set(false); + <T> void complete(CombineObserver<R, T> w) { + synchronized(lockObject) { + // store that this CombineLatestObserver is completed + completed.add(w); + // if all CombineObservers are completed, we mark the whole thing as completed + if (completed.size() == observers.size()) { + if (running.get()) { + // mark ourselves as done + observer.onCompleted(); + // just to ensure we stop processing in case we receive more onNext/complete/error calls after this + running.set(false); + } } } } /** * Receive error for a Observer. Throw the error up the chain and stop processing. - * - * @param w */ - synchronized <T> void error(CombineObserver<R, T> w, Exception e) { - Observer.onError(e); - /* tell ourselves to stop processing onNext events, event if the Observers don't obey the unsubscribe we're about to send */ - running.set(false); - /* tell all Observers to unsubscribe since we had an error */ + void error(Exception e) { + observer.onError(e); + /* tell all observers to unsubscribe since we had an error */ stop(); } /** - * Receive the next value from a Observer. + * Receive the next value from an observer. * <p> - * If we have received values from all Observers, trigger the combineLatest function, otherwise store the value and keep waiting. + * If we have received values from all observers, trigger the combineLatest function, otherwise store the value and keep waiting. * * @param w * @param arg */ <T> void next(CombineObserver<R, T> w, T arg) { - if (Observer == null) { - throw new RuntimeException("This shouldn't be running if a Observer isn't registered"); + if (observer == null) { + throw new RuntimeException("This shouldn't be running if an Observer isn't registered"); } /* if we've been 'unsubscribed' don't process anything further even if the things we're watching keep sending (likely because they are not responding to the unsubscribe call) */ @@ -194,15 +220,17 @@ <T> void next(CombineObserver<R, T> w, T arg) { // define here so the variable is out of the synchronized scope Object[] argsToCombineLatest = new Object[observers.size()]; - // we synchronize everything that touches receivedValues and the internal LinkedList objects - synchronized (this) { - // remember this as the last value for this Observer - lastValue.put(w, arg); - hasLastValue.add(w); + // we synchronize everything that touches latest values + synchronized (lockObject) { + // remember this as the latest value for this observer + latestValue.put(w, arg); + + // remember that this observer now has a latest value set + hasLatestValue.add(w); - // if all CombineLatestObservers in 'receivedValues' map have a value, invoke the combineLatestFunction + // if all observers in the 'observers' list have a value, invoke the combineLatestFunction for (CombineObserver<R, ?> rw : observers) { - if (!hasLastValue.contains(rw)) { + if (!hasLatestValue.contains(rw)) { // we don't have a value yet for each observer to combine, so we don't have a combined value yet either return; } @@ -210,48 +238,45 @@ <T> void next(CombineObserver<R, T> w, T arg) { // if we get to here this means all the queues have data int i = 0; for (CombineObserver<R, ?> _w : observers) { - argsToCombineLatest[i++] = lastValue.get(_w); + argsToCombineLatest[i++] = latestValue.get(_w); } } // if we did not return above from the synchronized block we can now invoke the combineLatestFunction with all of the args // we do this outside the synchronized block as it is now safe to call this concurrently and don't need to block other threads from calling // this 'next' method while another thread finishes calling this combineLatestFunction - Observer.onNext(combineLatestFunction.call(argsToCombineLatest)); + observer.onNext(combineLatestFunction.call(argsToCombineLatest)); } @Override - public Subscription call(Observer<R> Observer) { - if (this.Observer != null) { + public Subscription call(Observer<R> observer) { + if (this.observer != null) { throw new IllegalStateException("Only one Observer can subscribe to this Observable."); } - this.Observer = Observer; + this.observer = observer; - /* start the Observers */ + /* start the observers */ for (CombineObserver<R, ?> rw : observers) { rw.startWatching(); } return new Subscription() { - @Override public void unsubscribe() { stop(); } - }; } private void stop() { /* tell ourselves to stop processing onNext events */ running.set(false); - /* propogate to all Observers to unsubscribe */ + /* propogate to all observers to unsubscribe */ for (CombineObserver<R, ?> rw : observers) { if (rw.subscription != null) { rw.subscription.unsubscribe(); } } } - } public static class UnitTest { @@ -597,7 +622,7 @@ public void testAggregatorError() { verify(aObserver, never()).onCompleted(); verify(aObserver, times(1)).onNext("helloworld"); - a.error(r1, new RuntimeException("")); + a.error(new RuntimeException("")); a.next(r1, "hello"); a.next(r2, "again");
88a70199bfec4d92307bace5e569fd79faadd561
Vala
gstreamer-base-0.10: fix ownership of BaseTransform.transform_caps
c
https://github.com/GNOME/vala/
diff --git a/vapi/gstreamer-base-0.10.vapi b/vapi/gstreamer-base-0.10.vapi index 25d93e0b5d..4bcca86889 100644 --- a/vapi/gstreamer-base-0.10.vapi +++ b/vapi/gstreamer-base-0.10.vapi @@ -226,7 +226,7 @@ namespace Gst { [NoWrapper] public virtual Gst.FlowReturn transform (Gst.Buffer inbuf, Gst.Buffer outbuf); [NoWrapper] - public virtual unowned Gst.Caps transform_caps (Gst.PadDirection direction, Gst.Caps caps); + public virtual Gst.Caps transform_caps (Gst.PadDirection direction, Gst.Caps caps); [NoWrapper] public virtual Gst.FlowReturn transform_ip (Gst.Buffer buf); [NoWrapper] diff --git a/vapi/packages/gstreamer-base-0.10/gstreamer-base-0.10.metadata b/vapi/packages/gstreamer-base-0.10/gstreamer-base-0.10.metadata index efed3717a3..28e416a32f 100644 --- a/vapi/packages/gstreamer-base-0.10/gstreamer-base-0.10.metadata +++ b/vapi/packages/gstreamer-base-0.10/gstreamer-base-0.10.metadata @@ -20,6 +20,7 @@ gst_base_src_newsegment hidden="1" GstBaseTransform cheader_filename="gst/base/gstbasetransform.h" GstBaseTransform.* weak="0" gst_base_transform_get_unit_size.size is_out="1" +gst_base_transform_transform_caps transfer_ownership="1" gst_base_transform_transform_size.othersize is_out="1" GST_BASE_TRANSFORM_SINK_NAME hidden="1" GST_BASE_TRANSFORM_SRC_NAME hidden="1"
34baa742739f0d4371816ee24225398affcf2d39
hadoop
YARN-1718. Fix a couple isTerminals in Fair- Scheduler queue placement rules (Sandy Ryza)--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1569929 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/hadoop
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 1dfa87eee2b09..86f8a891c344b 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -14,6 +14,9 @@ Release 2.5.0 - UNRELEASED BUG FIXES + YARN-1718. Fix a couple isTerminals in Fair Scheduler queue placement rules + (Sandy Ryza) + Release 2.4.0 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueuePlacementRule.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueuePlacementRule.java index ac0df50954680..6acba27479f49 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueuePlacementRule.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/QueuePlacementRule.java @@ -162,7 +162,7 @@ protected String getQueueForApp(String requestedQueue, @Override public boolean isTerminal() { - return create; + return false; } } @@ -201,7 +201,7 @@ protected String getQueueForApp(String requestedQueue, String user, @Override public boolean isTerminal() { - return create; + return true; } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestQueuePlacementPolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestQueuePlacementPolicy.java index 5b5a51fa78518..fd807c9d7e147 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestQueuePlacementPolicy.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestQueuePlacementPolicy.java @@ -106,6 +106,17 @@ public void testTerminalRuleInMiddle() throws Exception { parse(sb.toString()); } + @Test + public void testTerminals() throws Exception { + // Should make it through without an exception + StringBuffer sb = new StringBuffer(); + sb.append("<queuePlacementPolicy>"); + sb.append(" <rule name='secondaryGroupExistingQueue' create='true'/>"); + sb.append(" <rule name='default' create='false'/>"); + sb.append("</queuePlacementPolicy>"); + parse(sb.toString()); + } + private QueuePlacementPolicy parse(String str) throws Exception { // Read and parse the allocations file. DocumentBuilderFactory docBuilderFactory =
61466809552f96a83aa19446d4d59cecd0d2cad5
hadoop
YARN-3094. Reset timer for liveness monitors after- RM recovery. Contributed by Jun Gong (cherry picked from commit- 0af6a99a3fcfa4b47d3bcba5e5cc5fe7b312a152)--
c
https://github.com/apache/hadoop
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index bdddbf36b194d..f3bcb8edee02f 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -482,6 +482,9 @@ Release 2.7.0 - UNRELEASED YARN-3143. RM Apps REST API can return NPE or entries missing id and other fields (jlowe) + YARN-3094. Reset timer for liveness monitors after RM recovery. (Jun Gong + via jianhe) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AbstractLivelinessMonitor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AbstractLivelinessMonitor.java index c1825319a734b..4f587b348cf11 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AbstractLivelinessMonitor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/AbstractLivelinessMonitor.java @@ -59,6 +59,7 @@ public AbstractLivelinessMonitor(String name, Clock clock) { @Override protected void serviceStart() throws Exception { assert !stopped : "starting when already stopped"; + resetTimer(); checkerThread = new Thread(new PingChecker()); checkerThread.setName("Ping Checker"); checkerThread.start(); @@ -99,6 +100,13 @@ public synchronized void unregister(O ob) { running.remove(ob); } + public synchronized void resetTimer() { + long time = clock.getTime(); + for (O ob : running.keySet()) { + running.put(ob, time); + } + } + private class PingChecker implements Runnable { @Override diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java index 4f242e93ae4e2..a93372a72fe72 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java @@ -564,12 +564,14 @@ protected void serviceStart() throws Exception { if(recoveryEnabled) { try { + LOG.info("Recovery started"); rmStore.checkVersion(); if (rmContext.isWorkPreservingRecoveryEnabled()) { rmContext.setEpoch(rmStore.getAndIncrementEpoch()); } RMState state = rmStore.loadState(); recover(state); + LOG.info("Recovery ended"); } catch (Exception e) { // the Exception from loadState() needs to be handled for // HA and we need to give up master status if we got fenced diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/AMLivelinessMonitor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/AMLivelinessMonitor.java index 2c1f7f1f03e3d..76331bf7fec41 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/AMLivelinessMonitor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/AMLivelinessMonitor.java @@ -24,6 +24,7 @@ import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.util.AbstractLivelinessMonitor; +import org.apache.hadoop.yarn.util.Clock; import org.apache.hadoop.yarn.util.SystemClock; public class AMLivelinessMonitor extends AbstractLivelinessMonitor<ApplicationAttemptId> { @@ -35,6 +36,11 @@ public AMLivelinessMonitor(Dispatcher d) { this.dispatcher = d.getEventHandler(); } + public AMLivelinessMonitor(Dispatcher d, Clock clock) { + super("AMLivelinessMonitor", clock); + this.dispatcher = d.getEventHandler(); + } + public void serviceInit(Configuration conf) throws Exception { super.serviceInit(conf); int expireIntvl = conf.getInt(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestAMLivelinessMonitor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestAMLivelinessMonitor.java new file mode 100644 index 0000000000000..e0e6aee022862 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestAMLivelinessMonitor.java @@ -0,0 +1,81 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt; + +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.service.Service; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.event.Dispatcher; +import org.apache.hadoop.yarn.server.resourcemanager.MockRM; +import org.apache.hadoop.yarn.server.resourcemanager.recovery.MemoryRMStateStore; +import org.apache.hadoop.yarn.util.ControlledClock; +import org.apache.hadoop.yarn.util.SystemClock; +import org.junit.Assert; +import org.junit.Test; + +import static org.mockito.Mockito.mock; + +public class TestAMLivelinessMonitor { + + @Test(timeout = 10000) + public void testResetTimer() throws Exception { + YarnConfiguration conf = new YarnConfiguration(); + UserGroupInformation.setConfiguration(conf); + conf.set(YarnConfiguration.RECOVERY_ENABLED, "true"); + conf.set(YarnConfiguration.RM_STORE, MemoryRMStateStore.class.getName()); + conf.setBoolean(YarnConfiguration.RM_WORK_PRESERVING_RECOVERY_ENABLED, true); + conf.setInt(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 6000); + final ControlledClock clock = new ControlledClock(new SystemClock()); + clock.setTime(0); + MemoryRMStateStore memStore = new MemoryRMStateStore() { + @Override + public synchronized RMState loadState() throws Exception { + clock.setTime(8000); + return super.loadState(); + } + }; + memStore.init(conf); + final ApplicationAttemptId attemptId = mock(ApplicationAttemptId.class); + final Dispatcher dispatcher = mock(Dispatcher.class); + final boolean[] expired = new boolean[]{false}; + final AMLivelinessMonitor monitor = new AMLivelinessMonitor( + dispatcher, clock) { + @Override + protected void expire(ApplicationAttemptId id) { + Assert.assertEquals(id, attemptId); + expired[0] = true; + } + }; + monitor.register(attemptId); + MockRM rm = new MockRM(conf, memStore) { + @Override + protected AMLivelinessMonitor createAMLivelinessMonitor() { + return monitor; + } + }; + rm.start(); + // make sure that monitor has started + while (monitor.getServiceState() != Service.STATE.STARTED) { + Thread.sleep(100); + } + // expired[0] would be set to true without resetTimer + Assert.assertFalse(expired[0]); + rm.stop(); + } +}
6188550a4817e1f8f0f024034d0f0b5f03b6ecc3
spring-framework
ServletRequestAttributes skips well-known- immutable values when updating accessed session attributes--Issue: SPR-11738-
p
https://github.com/spring-projects/spring-framework
diff --git a/spring-web/src/main/java/org/springframework/web/context/request/ServletRequestAttributes.java b/spring-web/src/main/java/org/springframework/web/context/request/ServletRequestAttributes.java index 8ad2fdb9979d..20a17c3fcebd 100644 --- a/spring-web/src/main/java/org/springframework/web/context/request/ServletRequestAttributes.java +++ b/spring-web/src/main/java/org/springframework/web/context/request/ServletRequestAttributes.java @@ -248,7 +248,7 @@ protected void updateAccessedSessionAttributes() { String name = entry.getKey(); Object newValue = entry.getValue(); Object oldValue = this.session.getAttribute(name); - if (oldValue == newValue) { + if (oldValue == newValue && !isImmutableSessionAttribute(name, newValue)) { this.session.setAttribute(name, newValue); } } @@ -260,6 +260,23 @@ protected void updateAccessedSessionAttributes() { this.sessionAttributesToUpdate.clear(); } + /** + * Determine whether the given value is to be considered as an immutable session + * attribute, that is, doesn't have to be re-set via {@code session.setAttribute} + * since its value cannot meaningfully change internally. + * <p>The default implementation returns {@code true} for {@code String}, + * {@code Character}, {@code Boolean} and {@code Number} values. + * @param name the name of the attribute + * @param value the corresponding value to check + * @return {@code true} if the value is to be considered as immutable for the + * purposes of session attribute management; {@code false} otherwise + * @see #updateAccessedSessionAttributes() + */ + protected boolean isImmutableSessionAttribute(String name, Object value) { + return (value instanceof String || value instanceof Character || + value instanceof Boolean || value instanceof Number); + } + /** * Register the given callback as to be executed after session termination. * <p>Note: The callback object should be serializable in order to survive diff --git a/spring-web/src/test/java/org/springframework/web/context/request/ServletRequestAttributesTests.java b/spring-web/src/test/java/org/springframework/web/context/request/ServletRequestAttributesTests.java index 2e5688857f82..226b05d3d1ef 100644 --- a/spring-web/src/test/java/org/springframework/web/context/request/ServletRequestAttributesTests.java +++ b/spring-web/src/test/java/org/springframework/web/context/request/ServletRequestAttributesTests.java @@ -1,5 +1,5 @@ /* - * Copyright 2002-2013 the original author or authors. + * Copyright 2002-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,10 +17,12 @@ package org.springframework.web.context.request; import java.io.Serializable; - +import java.math.BigInteger; import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpSession; import org.junit.Test; + import org.springframework.mock.web.test.MockHttpServletRequest; import org.springframework.mock.web.test.MockHttpSession; @@ -39,23 +41,12 @@ public class ServletRequestAttributesTests { private static final Serializable VALUE = new Serializable() { }; + @Test(expected = IllegalArgumentException.class) public void ctorRejectsNullArg() throws Exception { new ServletRequestAttributes(null); } - @Test - public void updateAccessedAttributes() throws Exception { - MockHttpSession session = new MockHttpSession(); - session.setAttribute(KEY, VALUE); - MockHttpServletRequest request = new MockHttpServletRequest(); - request.setSession(session); - ServletRequestAttributes attrs = new ServletRequestAttributes(request); - Object value = attrs.getAttribute(KEY, RequestAttributes.SCOPE_SESSION); - assertSame(VALUE, value); - attrs.requestCompleted(); - } - @Test public void setRequestScopedAttribute() throws Exception { MockHttpServletRequest request = new MockHttpServletRequest(); @@ -162,4 +153,64 @@ public void removeSessionScopedAttributeDoesNotForceCreationOfSession() throws E verify(request).getSession(false); } + @Test + public void updateAccessedAttributes() throws Exception { + HttpServletRequest request = mock(HttpServletRequest.class); + HttpSession session = mock(HttpSession.class); + when(request.getSession(anyBoolean())).thenReturn(session); + when(session.getAttribute(KEY)).thenReturn(VALUE); + + ServletRequestAttributes attrs = new ServletRequestAttributes(request); + assertSame(VALUE, attrs.getAttribute(KEY, RequestAttributes.SCOPE_SESSION)); + attrs.requestCompleted(); + + verify(session, times(2)).getAttribute(KEY); + verify(session).setAttribute(KEY, VALUE); + verifyNoMoreInteractions(session); + } + + @Test + public void skipImmutableString() { + doSkipImmutableValue("someString"); + } + + @Test + public void skipImmutableCharacter() { + doSkipImmutableValue(new Character('x')); + } + + @Test + public void skipImmutableBoolean() { + doSkipImmutableValue(Boolean.TRUE); + } + + @Test + public void skipImmutableInteger() { + doSkipImmutableValue(new Integer(1)); + } + + @Test + public void skipImmutableFloat() { + doSkipImmutableValue(new Float(1.1)); + } + + @Test + public void skipImmutableBigInteger() { + doSkipImmutableValue(new BigInteger("1")); + } + + private void doSkipImmutableValue(Object immutableValue) { + HttpServletRequest request = mock(HttpServletRequest.class); + HttpSession session = mock(HttpSession.class); + when(request.getSession(anyBoolean())).thenReturn(session); + when(session.getAttribute(KEY)).thenReturn(immutableValue); + + ServletRequestAttributes attrs = new ServletRequestAttributes(request); + attrs.getAttribute(KEY, RequestAttributes.SCOPE_SESSION); + attrs.requestCompleted(); + + verify(session, times(2)).getAttribute(KEY); + verifyNoMoreInteractions(session); + } + }
9a40de8e6c1974d4ae187b181055ecd4b1cc93da
camel
Fixed unit test having problem on Windows- deleting files for cleanup.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@888416 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/camel
diff --git a/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrAuthTestBase.java b/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrAuthTestBase.java index 737ee4a243855..4dc522016fb1f 100644 --- a/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrAuthTestBase.java +++ b/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrAuthTestBase.java @@ -17,8 +17,6 @@ package org.apache.camel.component.jcr; import java.io.File; -import java.io.IOException; - import javax.jcr.Repository; import javax.jcr.SimpleCredentials; import javax.naming.Context; @@ -30,7 +28,6 @@ import org.apache.jackrabbit.api.security.user.UserManager; import org.apache.jackrabbit.core.SessionImpl; import org.apache.jackrabbit.core.TransientRepository; -import org.apache.jackrabbit.core.fs.local.FileUtil; import org.apache.jackrabbit.core.security.authorization.JackrabbitAccessControlList; import org.junit.Before; @@ -49,14 +46,11 @@ public abstract class JcrAuthTestBase extends CamelTestSupport { private Repository repository; - private void clean() throws IOException { - File[] files = {new File("target/repository_with_auth"), - new File("derby.log") }; - for (File file : files) { - if (file.exists()) { - FileUtil.delete(file); - } - } + @Override + @Before + public void setUp() throws Exception { + deleteDirectory("target/repository"); + super.setUp(); } @Override @@ -106,11 +100,4 @@ protected Repository getRepository() { return repository; } - @Override - @Before - public void setUp() throws Exception { - clean(); - super.setUp(); - } - } \ No newline at end of file diff --git a/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrNodePathCreationTest.java b/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrNodePathCreationTest.java index bf0e7c3a235b6..6aaef3822793c 100644 --- a/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrNodePathCreationTest.java +++ b/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrNodePathCreationTest.java @@ -16,9 +16,6 @@ */ package org.apache.camel.component.jcr; -import java.io.File; -import java.io.IOException; - import javax.jcr.Node; import javax.jcr.Repository; import javax.jcr.Session; @@ -29,7 +26,6 @@ import org.apache.camel.builder.RouteBuilder; import org.apache.camel.test.junit4.CamelTestSupport; import org.apache.jackrabbit.core.TransientRepository; -import org.apache.jackrabbit.core.fs.local.FileUtil; import org.junit.Before; import org.junit.Test; @@ -40,20 +36,10 @@ public class JcrNodePathCreationTest extends CamelTestSupport { @Override @Before public void setUp() throws Exception { - clean(); + deleteDirectory("target/repository"); super.setUp(); } - private void clean() throws IOException { - File[] files = {new File("target/repository"), new File("target/repository.xml"), - new File("derby.log")}; - for (File file : files) { - if (file.exists()) { - FileUtil.delete(file); - } - } - } - @Test public void testJcrNodePathCreation() throws Exception { Exchange exchange = createExchangeWithBody("<body/>"); diff --git a/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrRouteTest.java b/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrRouteTest.java index ac2d197973c4d..fdfb35948af49 100644 --- a/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrRouteTest.java +++ b/components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrRouteTest.java @@ -16,9 +16,6 @@ */ package org.apache.camel.component.jcr; -import java.io.File; -import java.io.IOException; - import javax.jcr.Node; import javax.jcr.Repository; import javax.jcr.Session; @@ -29,7 +26,6 @@ import org.apache.camel.builder.RouteBuilder; import org.apache.camel.test.junit4.CamelTestSupport; import org.apache.jackrabbit.core.TransientRepository; -import org.apache.jackrabbit.core.fs.local.FileUtil; import org.junit.Before; import org.junit.Test; @@ -40,20 +36,10 @@ public class JcrRouteTest extends CamelTestSupport { @Override @Before public void setUp() throws Exception { - clean(); + deleteDirectory("target/repository"); super.setUp(); } - private void clean() throws IOException { - File[] files = {new File("target/repository"), new File("target/repository.xml"), - new File("derby.log")}; - for (File file : files) { - if (file.exists()) { - FileUtil.delete(file); - } - } - } - @Test public void testJcrRoute() throws Exception { Exchange exchange = createExchangeWithBody("<hello>world!</hello>");
646aa624eaddb4f543a1019f2d5ef49795cf66ce
ReactiveX-RxJava
Fixed issue -417--
c
https://github.com/ReactiveX/RxJava
diff --git a/rxjava-core/src/main/java/rx/operators/OperationMap.java b/rxjava-core/src/main/java/rx/operators/OperationMap.java index 9eb2520420..940147b0b8 100644 --- a/rxjava-core/src/main/java/rx/operators/OperationMap.java +++ b/rxjava-core/src/main/java/rx/operators/OperationMap.java @@ -15,12 +15,16 @@ */ package rx.operators; -import static org.junit.Assert.*; -import static org.mockito.Matchers.*; -import static org.mockito.Mockito.*; +import static org.junit.Assert.assertEquals; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import java.util.HashMap; import java.util.Map; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; import org.junit.Before; @@ -33,6 +37,7 @@ import rx.Observable.OnSubscribeFunc; import rx.Observer; import rx.Subscription; +import rx.concurrency.Schedulers; import rx.util.functions.Func1; import rx.util.functions.Func2; @@ -59,17 +64,12 @@ public final class OperationMap { * @return a sequence that is the result of applying the transformation function to each item in the input sequence. */ public static <T, R> OnSubscribeFunc<R> map(final Observable<? extends T> sequence, final Func1<? super T, ? extends R> func) { - return new OnSubscribeFunc<R>() { - @Override - public Subscription onSubscribe(Observer<? super R> observer) { - return new MapObservable<T, R>(sequence, new Func2<T, Integer, R>() { + return mapWithIndex(sequence, new Func2<T, Integer, R>() { @Override public R call(T value, @SuppressWarnings("unused") Integer unused) { return func.call(value); } - }).onSubscribe(observer); - } - }; + }); } /** @@ -136,7 +136,8 @@ public MapObservable(Observable<? extends T> sequence, Func2<? super T, Integer, @Override public Subscription onSubscribe(final Observer<? super R> observer) { - return sequence.subscribe(new Observer<T>() { + final SafeObservableSubscription subscription = new SafeObservableSubscription(); + return subscription.wrap(sequence.subscribe(new SafeObserver<T>(subscription, new Observer<T>() { @Override public void onNext(T value) { observer.onNext(func.call(value, index)); @@ -152,7 +153,7 @@ public void onError(Throwable ex) { public void onCompleted() { observer.onCompleted(); } - }); + }))); } } @@ -366,6 +367,41 @@ public String call(String s) { assertEquals(1, c2.get()); } + @Test(expected = IllegalArgumentException.class) + public void testMapWithIssue417() { + Observable.from(1).observeOn(Schedulers.threadPoolForComputation()) + .map(new Func1<Integer, Integer>() { + public Integer call(Integer arg0) { + throw new IllegalArgumentException("any error"); + } + }).toBlockingObservable().single(); + } + + @Test + public void testMapWithErrorInFuncAndThreadPoolScheduler() throws InterruptedException { + // The error will throw in one of threads in the thread pool. + // If map does not handle it, the error will disappear. + // so map needs to handle the error by itself. + final CountDownLatch latch = new CountDownLatch(1); + Observable<String> m = Observable.from("one") + .observeOn(Schedulers.threadPoolForComputation()) + .map(new Func1<String, String>() { + public String call(String arg0) { + try { + throw new IllegalArgumentException("any error"); + } finally { + latch.countDown(); + } + } + }); + + m.subscribe(stringObserver); + latch.await(); + InOrder inorder = inOrder(stringObserver); + inorder.verify(stringObserver, times(1)).onError(any(IllegalArgumentException.class)); + inorder.verifyNoMoreInteractions(); + } + private static Map<String, String> getMap(String prefix) { Map<String, String> m = new HashMap<String, String>(); m.put("firstName", prefix + "First");
a57debf4fe3380f7ad9d46db90d083d979541ebb
intellij-community
fragments with differences in comments only are- considered equivalent--
a
https://github.com/JetBrains/intellij-community
diff --git a/codeInsight/openapi/com/intellij/codeInsight/PsiEquivalenceUtil.java b/codeInsight/openapi/com/intellij/codeInsight/PsiEquivalenceUtil.java index 2c103ee2b9ab6..ed5db910a0736 100644 --- a/codeInsight/openapi/com/intellij/codeInsight/PsiEquivalenceUtil.java +++ b/codeInsight/openapi/com/intellij/codeInsight/PsiEquivalenceUtil.java @@ -7,6 +7,7 @@ import com.intellij.psi.PsiElement; import com.intellij.psi.PsiReference; import com.intellij.psi.PsiWhiteSpace; +import com.intellij.psi.PsiComment; import com.intellij.psi.util.PsiTreeUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -61,7 +62,7 @@ private static PsiElement[] getFilteredChildren(PsiElement element1) { PsiElement[] children1 = element1.getChildren(); ArrayList<PsiElement> array = new ArrayList<PsiElement>(); for (PsiElement child : children1) { - if (!(child instanceof PsiWhiteSpace)) { + if (!(child instanceof PsiWhiteSpace) && !(child instanceof PsiComment)) { array.add(child); } } @@ -95,7 +96,7 @@ private static void addRangeDuplicates(final PsiElement scope, i = j + 1; continue NextChild; } - next = PsiTreeUtil.skipSiblingsForward(next, new Class[]{PsiWhiteSpace.class}); + next = PsiTreeUtil.skipSiblingsForward(next, PsiWhiteSpace.class); } while (true);
3007d21fd5599c913b4c0a37431e297ea490cac7
Search_api
Issue #2130819 by drunken monkey, Bojhan: Added UI improvements for the "View" tabs.
a
https://github.com/lucidworks/drupal_search_api
diff --git a/CHANGELOG.txt b/CHANGELOG.txt index 7a124cf5..23fa74cf 100644 --- a/CHANGELOG.txt +++ b/CHANGELOG.txt @@ -1,5 +1,6 @@ Search API 1.x, dev (xx/xx/xxxx): --------------------------------- +- #2130819 by drunken monkey, Bojhan: Added UI improvements for the "View" tabs. - #2152327 by sirtet, miro_dietiker: Fixed typo in help text for drush sapi-c. - #2144531 by drunken monkey: Fixed cloning of queries to clone filters, too. - #2100671 by drunken monkey: Fixed stopwords processor to ignore missing diff --git a/includes/server_entity.inc b/includes/server_entity.inc index b13d7328..be2a568c 100644 --- a/includes/server_entity.inc +++ b/includes/server_entity.inc @@ -374,4 +374,23 @@ class SearchApiServer extends Entity { return $this->proxy->search($query); } + /** + * Retrieves additional information for the server, if available. + * + * Retrieving such information is only supported if the service class supports + * the "search_api_service_extra" feature. + * + * @return array + * An array containing additional, service class-specific information about + * the server. + * + * @see SearchApiAbstractService::getExtraInformation() + */ + public function getExtraInformation() { + if ($this->proxy->supportsFeature('search_api_service_extra')) { + return $this->proxy->getExtraInformation(); + } + return array(); + } + } diff --git a/includes/service.inc b/includes/service.inc index 6442f9b8..b8d8ca88 100644 --- a/includes/service.inc +++ b/includes/service.inc @@ -370,6 +370,30 @@ abstract class SearchApiAbstractService implements SearchApiServiceInterface { return $output ? "<dl>\n$output</dl>" : ''; } + /** + * Returns additional, service-specific information about this server. + * + * If a service class implements this method and supports the + * "search_api_service_extra" option, this method will be used to add extra + * information to the server's "View" tab. + * + * In the default theme implementation this data will be output in a table + * with two columns along with other, generic information about the server. + * + * @return array + * An array of additional server information, with each piece of information + * being an associative array with the following keys: + * - label: The human-readable label for this data. + * - info: The information, as HTML. + * - status: (optional) The status associated with this information. One of + * "info", "ok", "warning" or "error". Defaults to "info". + * + * @see supportsFeature() + */ + public function getExtraInformation() { + return array(); + } + /** * Implements SearchApiServiceInterface::__construct(). * diff --git a/search_api.admin.css b/search_api.admin.css index 29d3b5e4..b82798df 100644 --- a/search_api.admin.css +++ b/search_api.admin.css @@ -15,10 +15,34 @@ vertical-align: top; } +/* + * VIEW SERVER + */ + +.search-api-server-summary ul.inline { + margin: 0; +} + +.search-api-server-summary ul.inline li { + padding-left: 0; +} + +/* + * VIEW INDEX + */ +.search-api-limit, +.search-api-batch-size { + text-align: center; +} + +.search-api-index-status .progress .filled { + background: #0074BD none; +} + /* * DROPBUTTONS * - * (Largely copied from D8's dropbutton.css. + * (Largely copied from D8's dropbutton.css.) */ /** diff --git a/search_api.admin.inc b/search_api.admin.inc index bc950e47..51071b12 100644 --- a/search_api.admin.inc +++ b/search_api.admin.inc @@ -80,7 +80,7 @@ function search_api_admin_overview() { } $links = array_merge($links, menu_contextual_links('search-api-server', $pre_server, array($server->machine_name))); $row[] = theme('search_api_dropbutton', array('links' => $links)); - $rows[] = $row; + $rows[] = _search_api_deep_copy($row); if (!empty($indexes[$server->machine_name])) { foreach ($indexes[$server->machine_name] as $index) { @@ -90,7 +90,7 @@ function search_api_admin_overview() { if ($show_config_status) { $row[] = theme('entity_status', array('status' => $index->status)); } - $row[] = ''; + $row[] = ' '; $row[] = $t_index; $row[] = l($index->name, $url); $links = array(); @@ -103,7 +103,7 @@ function search_api_admin_overview() { } $links = array_merge($links, menu_contextual_links('search-api-index', $pre_index, array($index->machine_name))); $row[] = theme('search_api_dropbutton', array('links' => $links)); - $rows[] = $row; + $rows[] = _search_api_deep_copy($row); } } } @@ -119,7 +119,7 @@ function search_api_admin_overview() { $row[] = l($index->name, $url); $links = menu_contextual_links('search-api-index', $pre_index, array($index->machine_name)); $row[] = theme('search_api_dropbutton', array('links' => $links)); - $rows[] = $row; + $rows[] = _search_api_deep_copy($row); } } @@ -365,7 +365,7 @@ function search_api_admin_server_view(SearchApiServer $server, $action = NULL) { } else { $ret = drupal_get_form('search_api_admin_confirm', 'server', $action, $server); - if ($ret) { + if (!empty($ret['actions'])) { return $ret; } } @@ -374,7 +374,18 @@ function search_api_admin_server_view(SearchApiServer $server, $action = NULL) { drupal_set_title(search_api_admin_item_title($server)); $class = search_api_get_service_info($server->class); $options = $server->viewSettings(); - return array( + $indexes = array(); + foreach (search_api_index_load_multiple(FALSE, array('server' => $server->machine_name)) as $index) { + if (!$indexes) { + $indexes['#theme'] = 'links'; + $indexes['#attributes']['class'] = array('inline'); + } + $indexes['#links'][] = array( + 'title' => $index->name, + 'href' => 'admin/config/search/search_api/index/' . $index->machine_name, + ); + } + $render['view'] = array( '#theme' => 'search_api_server', '#id' => $server->id, '#name' => $server->name, @@ -383,9 +394,16 @@ function search_api_admin_server_view(SearchApiServer $server, $action = NULL) { '#enabled' => $server->enabled, '#class_name' => $class['name'], '#class_description' => $class['description'], + '#indexes' => $indexes, '#options' => $options, '#status' => $server->status, + '#extra' => $server->getExtraInformation(), ); + $render['#attached']['css'][] = drupal_get_path('module', 'search_api') . '/search_api.admin.css'; + if ($server->enabled) { + $render['form'] = drupal_get_form('search_api_server_status_form', $server); + } + return $render; } /** @@ -400,62 +418,140 @@ function search_api_admin_server_view(SearchApiServer $server, $action = NULL) { * - enabled: Boolean indicating whether the server is enabled. * - class_name: The used service class' display name. * - class_description: The used service class' description. + * - indexes: A list of indexes associated with this server, either as an HTML + * string or a render array. * - options: An HTML string or render array containing information about the * server's service-specific settings. * - status: The entity configuration status (in database, in code, etc.). + * - extra: An associative array of additional server information, with the + * keys being the labels and the values being the information. + * + * @return string + * HTML for displaying a server. + * + * @ingroup themeable */ function theme_search_api_server(array $variables) { - extract($variables); + $machine_name = $variables['machine_name']; + $description = $variables['description']; + $enabled = $variables['enabled']; + $class_name = $variables['class_name']; + $indexes = $variables['indexes']; + $options = $variables['options']; + $status = $variables['status']; + $extra = $variables['extra']; + + // First, output the index description if there is one set. $output = ''; - $output .= '<h3>' . check_plain($name) . '</h3>' . "\n"; + if ($description) { + $output .= '<p class="description">' . nl2br(check_plain($description)) . '</p>'; + } - $output .= '<dl>' . "\n"; + // Then, display a table summarizing the index's status. + $rows = array(); + // Create a row template with references so we don't have to deal with the + // complicated structure for each individual row. + $row = array( + 'data' => array( + array('header' => TRUE), + '', + ), + 'class' => array(''), + ); + $label = & $row['data'][0]['data']; + $info = & $row['data'][1]; + $class = & $row['class'][0]; - $output .= '<dt>' . t('Status') . '</dt>' . "\n"; - $output .= '<dd>'; if ($enabled) { - $output .= t('enabled (!disable_link)', array('!disable_link' => l(t('disable'), 'admin/config/search/search_api/server/' . $machine_name . '/disable'))); + $class = 'ok'; + $info = t('enabled (!disable_link)', array('!disable_link' => l(t('disable'), 'admin/config/search/search_api/server/' . $machine_name . '/disable'))); } else { - $output .= t('disabled (!enable_link)', array('!enable_link' => l(t('enable'), 'admin/config/search/search_api/server/' . $machine_name . '/enable', array('query' => array('token' => drupal_get_token($machine_name)))))); + $class = 'warning'; + $info = t('disabled (!enable_link)', array('!enable_link' => l(t('enable'), 'admin/config/search/search_api/server/' . $machine_name . '/enable', array('query' => array('token' => drupal_get_token($machine_name)))))); } - $output .= '</dd>' . "\n"; + $label = t('Status'); + $rows[] = _search_api_deep_copy($row); + $class = ''; - $output .= '<dt>' . t('Machine name') . '</dt>' . "\n"; - $output .= '<dd>' . check_plain($machine_name) . '</dd>' . "\n"; + $label = t('Service class'); + if (module_exists('help')) { + $url_options['fragment'] = drupal_clean_css_identifier('search_api_solr_service'); + $info = l($class_name, 'admin/help/search_api', $url_options); + } + else { + $info = check_plain($class_name); + } + $rows[] = _search_api_deep_copy($row); - if (!empty($description)) { - $output .= '<dt>' . t('Description') . '</dt>' . "\n"; - $output .= '<dd>' . nl2br(check_plain($description)) . '</dd>' . "\n"; + if ($indexes) { + $label = t('Search indexes'); + $info = render($indexes); + $rows[] = _search_api_deep_copy($row); } - if (!empty($class_name)) { - $output .= '<dt>' . t('Service class') . '</dt>' . "\n"; - $output .= '<dd><em>' . check_plain($class_name) . '</em>'; - if (!empty($class_description)) { - $output .= '<p class="description">' . $class_description . '</p>'; - } - $output .= '</dd>' . "\n"; + if ($options) { + $label = t('Service options'); + $info = render($options); + $rows[] = _search_api_deep_copy($row); } - if (!empty($options)) { - $output .= '<dt>' . t('Service options') . '</dt>' . "\n"; - $output .= '<dd>' . "\n"; - $output .= render($options); - $output .= '</dd>' . "\n"; + if ($status != ENTITY_CUSTOM) { + $label = t('Configuration status'); + $info = theme('entity_status', array('status' => $status)); + $class = ($status == ENTITY_OVERRIDDEN) ? 'warning' : 'ok'; + $rows[] = _search_api_deep_copy($row); + $class = ''; } - $output .= '<dt>' . t('Configuration status') . '</dt>' . "\n"; - $output .= '<dd>' . "\n"; - $output .= theme('entity_status', array('status' => $status)); - $output .= '</dd>' . "\n"; + if ($extra) { + foreach ($extra as $information) { + $label = $information['label']; + $info = $information['info']; + $class = !empty($information['status']) ? $information['status'] : ''; + $rows[] = _search_api_deep_copy($row); + } + } - $output .= '</dl>'; + $theme['rows'] = $rows; + $theme['attributes']['class'][] = 'search-api-summary'; + $theme['attributes']['class'][] = 'search-api-server-summary'; + $theme['attributes']['class'][] = 'system-status-report'; + $output .= theme('table', $theme); return $output; } +/** + * Form constructor for completely clearing a server. + * + * @param SearchApiServer $server + * The server for which the form is displayed. + * + * @ingroup forms + * + * @see search_api_server_status_form_submit() + */ +function search_api_server_status_form(array $form, array &$form_state, SearchApiServer $server) { + $form_state['server'] = $server; + + $form['clear'] = array( + '#type' => 'submit', + '#value' => t('Delete all indexed data on this server'), + ); + + return $form; +} + +/** +* Form submission handler for search_api_server_status_form(). +*/ +function search_api_server_status_form_submit(array $form, array &$form_state) { + $server_id = $form_state['server']->machine_name; + $form_state['redirect'] = "admin/config/search/search_api/server/$server_id/clear"; +} + /** * Form constructor for editing a server's settings. * @@ -728,160 +824,286 @@ function search_api_admin_index_view(SearchApiIndex $index, $action = NULL) { } else { $ret = drupal_get_form('search_api_admin_confirm', 'index', $action, $index); - if ($ret) { + if (!empty($ret['actions'])) { return $ret; } } } - return drupal_get_form('search_api_admin_index_status_form', $index); + $status = search_api_index_status($index); + $ret['view'] = array( + '#theme' => 'search_api_index', + '#id' => $index->id, + '#name' => $index->name, + '#machine_name' => $index->machine_name, + '#description' => $index->description, + '#item_type' => $index->item_type, + '#enabled' => $index->enabled, + '#server' => $index->server(), + '#options' => $index->options, + '#fields' => $index->getFields(), + '#indexed_items' => $status['indexed'], + '#on_server' => _search_api_get_items_on_server($index), + '#total_items' => $status['total'], + '#status' => $index->status, + '#read_only' => $index->read_only, + ); + if ($index->enabled && !$index->read_only) { + $ret['form'] = drupal_get_form('search_api_admin_index_status_form', $index, $status); + } + return $ret; } /** - * Form function for displaying an index status form. + * Returns HTML for a search index. * - * @param SearchApiIndex $index - * The index whose status should be displayed. + * @param array $variables + * An associative array containing: + * - id: The index's id. + * - name: The index' name. + * - machine_name: The index' machine name. + * - description: The index' description. + * - item_type: The type of items stored in this index. + * - enabled: Boolean indicating whether the index is enabled. + * - server: The server this index currently rests on, if any. + * - options: The index' options, like cron limit. + * - fields: All indexed fields of the index. + * - indexed_items: The number of items already indexed in their latest + * version on this index. + * - on_server: The number of items actually indexed on the server. + * - total_items: The total number of items that have to be indexed for this + * index. + * - status: The entity configuration status (in database, in code, etc.). + * - read_only: Boolean indicating whether this index is read only. + * + * @return string + * HTML for a search index. + * + * @ingroup themeable */ -function search_api_admin_index_status_form(array $form, array &$form_state, SearchApiIndex $index) { - $enabled = !empty($index->enabled); - $server = $index->server(); - - $form['#attached']['css'][] = drupal_get_path('module', 'search_api') . '/search_api.admin.css'; - $form_state['index'] = $index; +function theme_search_api_index(array $variables) { + $machine_name = $variables['machine_name']; + $description = $variables['description']; + $enabled = $variables['enabled']; + $item_type = $variables['item_type']; + $server = $variables['server']; + $options = $variables['options']; + $status = $variables['status']; + $indexed_items = $variables['indexed_items']; + $on_server = $variables['on_server']; + $total_items = $variables['total_items']; + + // First, output the index description if there is one set. + $output = ''; - if (!empty($index->description)) { - $form['description']['#markup'] = '<p>' . nl2br(check_plain($index->description)) . '</p>'; + if ($description) { + $output .= '<p class="description">' . nl2br(check_plain($description)) . '</p>'; } - $form['info']['#prefix'] = '<dl>'; - $form['info']['#suffix'] = '</dl>'; + // Then, display a table summarizing the index's status. + $rows = array(); + // Create a row template with references so we don't have to deal with the + // complicated structure for each individual row. + $row = array( + 'data' => array( + array('header' => TRUE), + '', + ), + 'class' => array(''), + ); + $label = &$row['data'][0]['data']; + $info = &$row['data'][1]; + $class = &$row['class'][0]; + + $class = 'warning'; if ($enabled) { - $status_message = t('enabled (!disable_link)', array('!disable_link' => l(t('disable'), 'admin/config/search/search_api/index/' . $index->machine_name . '/disable'))); + $info = t('enabled (!disable_link)', array('!disable_link' => l(t('disable'), 'admin/config/search/search_api/index/' . $machine_name . '/disable'))); + $class = 'ok'; } - elseif (!empty($server->enabled)) { - $status_message = t('disabled (!enable_link)', array('!enable_link' => l(t('enable'), 'admin/config/search/search_api/index/' . $index->machine_name . '/enable', array('query' => array('token' => drupal_get_token($index->machine_name)))))); + elseif ($server) { + $info = t('disabled (!enable_link)', array('!enable_link' => l(t('enable'), 'admin/config/search/search_api/index/' . $machine_name . '/enable', array('query' => array('token' => drupal_get_token($machine_name)))))); } else { - $status_message = t('disabled'); + $info = t('disabled'); } - $form['info']['status']['#markup'] = '<dt>' . t('Status') . '</dt>' . "\n"; - $form['info']['status']['#markup'] .= '<dd>' . $status_message . '</dd>' . "\n"; + $label = t('Status'); + $rows[] = _search_api_deep_copy($row); + $class = ''; - $type = search_api_get_item_type_info($index->item_type); - $form['info']['type']['#markup'] = '<dt>' . t('Item type') . '</dt>' . "\n"; - $form['info']['type']['#markup'] .= '<dd>' . check_plain($type['name']) . '</dd>' . "\n"; + $label = t('Item type'); + $type = search_api_get_item_type_info($item_type); + $item_type = !empty($type['name']) ? $type['name'] : $item_type; + $info = check_plain($item_type); + $rows[] = _search_api_deep_copy($row); - if (!empty($server)) { - $form['info']['server']['#markup'] = '<dt>' . t('Server') . '</dt>' . "\n"; - $form['info']['server']['#markup'] .= '<dd>' . l($server->name, 'admin/config/search/search_api/server/' . $server->machine_name); - if (!empty($server->description)) { - $form['info']['server']['#markup'] .= '<p class="description">' . nl2br(check_plain($server->description)) . '</p>'; - } - $form['info']['server']['#markup'] .= '</dd>' . "\n"; + if ($server) { + $label = t('Server'); + $info = l($server->name, 'admin/config/search/search_api/server/' . $server->machine_name); + $rows[] = _search_api_deep_copy($row); } - $form['info']['config_status']['#markup'] = '<dt>' . t('Configuration status') . '</dt>' . "\n"; - $form['info']['config_status']['#markup'] .= '<dd>' . theme('entity_status', array('status' => $index->status)) . '</dd>' . "\n"; + if ($enabled) { + $options += array('cron_limit' => SEARCH_API_DEFAULT_CRON_LIMIT); + if ($options['cron_limit']) { + $class = 'ok'; + $info = format_plural( + $options['cron_limit'], + 'During cron runs, 1 item will be indexed per batch.', + 'During cron runs, @count items will be indexed per batch.' + ); + } + else { + $class = 'warning'; + $info = t('No items will be indexed during cron runs.'); + } + $label = t('Cron batch size'); + $rows[] = _search_api_deep_copy($row); - if ($index->read_only) { - $message = t('The index is currently in read-only mode. No new items will be indexed, nor will old ones be deleted.'); - $form['info']['read_only']['#markup'] = '<dt>' . t('Read only') . '</dt>' . "\n"; - $form['info']['read_only']['#markup'] .= '<dd>' . $message . '</dd>'; + $theme = array( + 'percent' => (int) (100 * $indexed_items / $total_items), + 'message' => t('@indexed/@total indexed', array('@indexed' => $indexed_items, '@total' => $total_items)), + ); + $output .= '<h3>' . t('Index status') . '</h3>'; + $output .= '<div class="search-api-index-status">' . theme('progress_bar', $theme) . '</div>'; + + if ($on_server != $total_items) { + if ($on_server < $indexed_items || $on_server > $total_items) { + $vars = array(); + $vars['@num'] = $on_server; + if ($on_server < $indexed_items) { + $vars['@diff'] = $indexed_items - $on_server; + $info = t('The index status on the search server has diverged (@num items indexed – @diff less than required). You are strongly advised to mark the index for re-indexing using the form below.', $vars); + } + else { + $vars['@diff'] = $on_server - $total_items; + $info = t('The index status on the search server has diverged (@num items indexed – @diff more than present). You are strongly advised to clear the index using the form below.', $vars); + } + $class = 'error'; + } + else { + $info = format_plural($on_server, 'There is 1 item indexed on the server for this index.', 'There are @count items indexed on the server for this index.'); + $class = 'warning'; + } + $label = t('Server index status'); + $rows[] = _search_api_deep_copy($row); + } + } - return $form; + if ($status != ENTITY_CUSTOM) { + $label = t('Configuration status'); + $info = theme('entity_status', array('status' => $status)); + $class = ($status == ENTITY_OVERRIDDEN) ? 'warning' : 'ok'; + $rows[] = _search_api_deep_copy($row); } - $status = search_api_index_status($index); + $theme['rows'] = $rows; + $theme['attributes']['class'][] = 'search-api-summary'; + $theme['attributes']['class'][] = 'search-api-index-summary'; + $theme['attributes']['class'][] = 'system-status-report'; + $output .= theme('table', $theme); + + return $output; +} + +/** + * Form constructor for an index status form. + * + * Should only be used for enabled indexes which aren't read-only. + * + * @param SearchApiIndex $index + * The index whose status should be displayed. + * @param array $status + * The indexing status of the index, as returned by search_api_index_status(). + * + * @ingroup forms + * + * @see search_api_admin_index_status_form_validate() + * @see search_api_admin_index_status_form_submit() + */ +function search_api_admin_index_status_form(array $form, array &$form_state, SearchApiIndex $index, array $status) { + $form['#attached']['css'][] = drupal_get_path('module', 'search_api') . '/search_api.admin.css'; + $form_state['index'] = $index; + $form['index'] = array( '#type' => 'fieldset', - '#title' => t('Indexing status'), - '#description' => t('This index is disabled. No information about the indexing status is available.'), - '#collapsible' => TRUE, + '#title' => t('Index now'), ); - if ($enabled) { - $all = ($status['indexed'] == $status['total']); - if ($all) { - $form['index']['#description'] = t('All items have been indexed (@total / @total).', - array('@total' => $status['total'])); - } - elseif (!$status['indexed']) { - $form['index']['#description'] = t('All items still need to be indexed (@total total).', - array('@total' => $status['total'])); - } - else { - $percentage = (int) (100 * $status['indexed'] / $status['total']); - $form['index']['#description'] = t('About @percentage% of all items have been indexed in their latest version (@indexed / @total).', - array('@indexed' => $status['indexed'], '@total' => $status['total'], '@percentage' => $percentage)); - } + $form['index']['#attributes']['class'][] = 'container-inline'; - if (!$all) { - $form['index']['index'] = array( - '#type' => 'fieldset', - '#title' => t('Index now'), - '#collapsible' => TRUE, - ); - $form['index']['index']['settings'] = array( - '#type' => 'fieldset', - '#title' => t('Advanced settings'), - '#collapsible' => TRUE, - '#collapsed' => TRUE, - ); - $form['index']['index']['settings']['limit'] = array( - '#type' => 'textfield', - '#title' => t('Number of items to index'), - '#default_value' => -1, - '#size' => 4, - '#attributes' => array('class' => array('search-api-limit')), - '#description' => t('Number of items to index. Set to -1 for all items.'), - ); - $batch_size = empty($index->options['cron_limit']) ? SEARCH_API_DEFAULT_CRON_LIMIT : $index->options['cron_limit']; - $form['index']['index']['settings']['batch_size'] = array( - '#type' => 'textfield', - '#title' => t('Number of items per batch run'), - '#default_value' => $batch_size, - '#size' => 4, - '#attributes' => array('class' => array('search-api-batch-size')), - '#description' => t('Number of items per batch run. Set to -1 for all items at once (not recommended). Defaults to the cron batch size of the index.'), - ); - $form['index']['index']['button'] = array( - '#type' => 'submit', - '#value' => t('Index now'), - ); - $form['index']['index']['total'] = array( - '#type' => 'value', - '#value' => $status['total'], - ); - $form['index']['index']['remaining'] = array( - '#type' => 'value', - '#value' => $status['total'] - $status['indexed'], - ); - } - } + $allow_indexing = ($status['indexed'] < $status['total']); + $all = t('all', array(), array('context' => 'items to index')); + $limit = array( + '#type' => 'textfield', + '#default_value' => $all, + '#size' => 4, + '#attributes' => array('class' => array('search-api-limit')), + '#disabled' => !$allow_indexing, + ); + $batch_size = empty($index->options['cron_limit']) ? SEARCH_API_DEFAULT_CRON_LIMIT : $index->options['cron_limit']; + $batch_size = $batch_size > 0 ? $batch_size : $all; + $batch_size = array( + '#type' => 'textfield', + '#default_value' => $batch_size, + '#size' => 4, + '#attributes' => array('class' => array('search-api-batch-size')), + '#disabled' => !$allow_indexing, + ); - if ($server) { - $form['index']['reindex'] = array( - '#type' => 'fieldset', - '#title' => t('Re-indexing'), - '#collapsible' => TRUE, - '#collapsed' => TRUE, - '#tree' => TRUE, - ); - $form['index']['reindex']['message'] = array( - '#type' => 'item', - '#description' => t('This will mark all items as "changed" and add them to the index again (overwriting existing data) in subsequent indexing operations.'), - ); - $form['index']['reindex']['clear'] = array( - '#type' => 'checkbox', - '#title' => t('Also clear data on server'), - '#description' => t('If checked, indexed data on the server will be deleted, too. No results will be returned by searches for this index until items are indexed again.<br />Use with care, in most cases rebuilding the index might be enough.'), - '#default_value' => FALSE, - ); - $form['index']['reindex']['button'] = array( - '#type' => 'submit', - '#value' => t('Re-index content'), - ); + // Here it gets complicated. We want to build a sentence from the form input + // elements, but to translate that we have to make the two form elements (for + // limit and batch size) pseudo-variables in the t() call. Since we can't + // pass them directly, we split the translated sentence (which still has the + // two tokens), figure out their order and then put the pieces together again + // using the form elements' #prefix and #suffix properties. + $sentence = t('Index @limit items in batches of @batch_size items'); + $sentence = preg_split('/@(limit|batch_size)/', $sentence, -1, PREG_SPLIT_DELIM_CAPTURE); + if (count($sentence) == 5) { + $first = $sentence[1]; + $form['index'][$first] = $$first; + $form['index'][$first]['#prefix'] = $sentence[0]; + $form['index'][$first]['#suffix'] = $sentence[2]; + $second = $sentence[3]; + $form['index'][$second] = $$second; + $form['index'][$second]['#suffix'] = $sentence[4] . ' '; } + else { + // PANIC! + $limit['#title'] = t('Number of items to index'); + $form['index']['limit'] = $limit; + $batch_size['#title'] = t('Number of items per batch run'); + $form['index']['batch_size'] = $batch_size; + } + + $form['index']['button'] = array( + '#type' => 'submit', + '#value' => t('Index now'), + '#disabled' => !$allow_indexing, + ); + $form['index']['total'] = array( + '#type' => 'value', + '#value' => $status['total'], + ); + $form['index']['remaining'] = array( + '#type' => 'value', + '#value' => $status['total'] - $status['indexed'], + ); + $form['index']['all'] = array( + '#type' => 'value', + '#value' => $all, + ); + + $form['reindex'] = array( + '#type' => 'submit', + '#value' => t('Queue all items for reindexing'), + '#prefix' => '<div>', + '#suffix' => '</div>', + ); + $form['clear'] = array( + '#type' => 'submit', + '#value' => t('Clear all indexed data'), + '#prefix' => '<div>', + '#suffix' => '</div>', + ); return $form; } @@ -892,8 +1114,22 @@ function search_api_admin_index_status_form(array $form, array &$form_state, Sea * @see search_api_admin_index_status_form_submit() */ function search_api_admin_index_status_form_validate(array $form, array &$form_state) { - if ($form_state['values']['op'] == t('Index now') && !$form_state['values']['limit']) { - form_set_error('number', t('You have to set the number of items to index. Set to -1 for indexing all items.')); + $values = $form_state['values']; + if ($values['op'] == t('Index now')) { + $all_lower = drupal_strtolower($values['all']); + foreach (array('limit', 'batch_size') as $field) { + $val = trim($values[$field]); + if (drupal_strtolower($val) == $all_lower) { + $val = -1; + } + elseif (!$val || !is_numeric($val) || ((int) $val) != $val) { + form_error($form['index'][$field], t('Enter a non-zero integer. Use "-1" or "@all" for "all items".', array('@all' => $values['all']))); + } + else { + $val = (int) $val; + } + $form_state['values'][$field] = $val; + } } } @@ -903,48 +1139,33 @@ function search_api_admin_index_status_form_validate(array $form, array &$form_s * @see search_api_admin_index_status_form_validate() */ function search_api_admin_index_status_form_submit(array $form, array &$form_state) { - $redirect = &$form_state['redirect']; $values = $form_state['values']; $index = $form_state['index']; - $pre = 'admin/config/search/search_api/index/' . $index->machine_name; + $form_state['redirect'] = 'admin/config/search/search_api/index/' . $index->machine_name; + + // There is a Form API bug here that will let a user submit the form via the + // "Index now" button even if it is disabled, and then just set "op" to the + // value of an arbitrary other button. We therefore have to take care to spot + // this case ourselves. + if ($form_state['input']['op'] == t('Index now') && !empty($form['index']['button']['#disabled'])) { + drupal_set_message(t('All items have already been indexed.'), 'warning'); + return; + } + switch ($values['op']) { - case t('Enable'): - $redirect = array( - $pre . '/enable', - array('query' => array('token' => drupal_get_token($index->machine_name))), - ); - break; - case t('Disable'): - $redirect = $pre . '/disable'; - break; case t('Index now'): if (!_search_api_batch_indexing_create($index, $values['batch_size'], $values['limit'], $values['remaining'])) { drupal_set_message(t("Couldn't create a batch, please check the batch size and limit."), 'warning'); } - $redirect = $pre; break; - case t('Re-index content'): - if (empty($values['reindex']['clear'])) { - if ($index->reindex()) { - drupal_set_message(t('The index was successfully scheduled for re-indexing.')); - } - else { - drupal_set_message(t('An error has occurred while performing the desired action. Check the logs for details.'), 'error'); - } - } - else { - if ($index->clear()) { - drupal_set_message(t('The index was successfully cleared.')); - } - else { - drupal_set_message(t('An error has occurred while performing the desired action. Check the logs for details.'), 'error'); - } - } - $redirect = $pre; + + case t('Queue all items for reindexing'): + $form_state['redirect'] .= '/reindex'; break; - default: - throw new SearchApiException(t('Unknown action.')); + case t('Clear all indexed data'): + $form_state['redirect'] .= '/clear'; + break; } } @@ -1760,7 +1981,7 @@ function theme_search_api_admin_fields_table($variables) { } } if (empty($form['fields'][$name]['description']['#value'])) { - $rows[] = $row; + $rows[] = _search_api_deep_copy($row); } else { $rows[] = array( @@ -1870,14 +2091,24 @@ function search_api_admin_confirm(array $form, array &$form_state, $type, $actio switch ($type) { case 'server': switch ($action) { - case 'disable': + case 'clear': $text = array( + t('Clear server @name', array('@name' => $entity->name)), + t('Do you really want to clear all indexed data from this server?'), + t('This will permanently remove all data currently indexed on this server. Before the data is reindexed, searches on the indexes associated with this server will not return any results. This action cannot be undone. <strong>Use with caution!</strong>'), + t("The server's indexed data was successfully cleared."), + ); + break; + + case 'disable': + array( t('Disable server @name', array('@name' => $entity->name)), t('Do you really want to disable this server?'), t('This will disconnect all indexes from this server and disable them. Searches on these indexes will not be available until they are added to another server and re-enabled. All indexed data (except for read-only indexes) on this server will be cleared.'), t('The server and its indexes were successfully disabled.'), ); break; + case 'delete': if ($entity->hasStatus(ENTITY_OVERRIDDEN)) { $text = array( @@ -1897,12 +2128,31 @@ function search_api_admin_confirm(array $form, array &$form_state, $type, $actio ); } break; + default: return FALSE; } break; case 'index': switch ($action) { + case 'reindex': + $text = array( + t('Re-index index @name', array('@name' => $entity->name)), + t('Do you really want to queue all items on this index for re-indexing?'), + t('This will mark all items for this index to be marked as needing to be indexed. Searches on this index will continue to yield results while the items are being re-indexed. This action cannot be undone.'), + t('The index was successfully marked for re-indexing.'), + ); + break; + + case 'clear': + $text = array( + t('Clear index @name', array('@name' => $entity->name)), + t('Do you really want to clear the indexed data of this index?'), + t('This will remove all data currently indexed for this index. Before the data is reindexed, searches on the index will not return any results. This action cannot be undone.'), + t('The index was successfully cleared.'), + ); + break; + case 'disable': $text = array( t('Disable index @name', array('@name' => $entity->name)), @@ -1911,6 +2161,7 @@ function search_api_admin_confirm(array $form, array &$form_state, $type, $actio t('The index was successfully disabled.'), ); break; + case 'delete': if ($entity->hasStatus(ENTITY_OVERRIDDEN)) { $text = array( @@ -1930,6 +2181,7 @@ function search_api_admin_confirm(array $form, array &$form_state, $type, $actio ); } break; + default: return FALSE; } diff --git a/search_api.api.php b/search_api.api.php index d4211262..c1af2a14 100644 --- a/search_api.api.php +++ b/search_api.api.php @@ -22,7 +22,8 @@ * - description: A translated string to be shown to administrators when * selecting a service class. Should contain all peculiarities of the * service class, like field type support, supported features (like facets), - * the "direct" parse mode and other specific things to keep in mind. + * the "direct" parse mode and other specific things to keep in mind. The + * text can contain HTML. * - class: The service class, which has to implement the * SearchApiServiceInterface interface. * diff --git a/search_api.module b/search_api.module index 431df9f9..1326abe8 100644 --- a/search_api.module +++ b/search_api.module @@ -59,7 +59,6 @@ function search_api_menu() { $items[$pre . '/server/%search_api_server/view'] = array( 'title' => 'View', 'type' => MENU_DEFAULT_LOCAL_TASK, - 'context' => MENU_CONTEXT_INLINE | MENU_CONTEXT_PAGE, 'weight' => -10, ); $items[$pre . '/server/%search_api_server/edit'] = array( @@ -112,7 +111,6 @@ function search_api_menu() { $items[$pre . '/index/%search_api_index/view'] = array( 'title' => 'View', 'type' => MENU_DEFAULT_LOCAL_TASK, - 'context' => MENU_CONTEXT_INLINE | MENU_CONTEXT_PAGE, 'weight' => -10, ); $items[$pre . '/index/%search_api_index/edit'] = array( @@ -183,6 +181,20 @@ function search_api_menu() { */ function search_api_help($path) { switch ($path) { + case 'admin/help#search_api': + $classes = array(); + foreach (search_api_get_service_info() as $id => $info) { + $id = drupal_clean_css_identifier($id); + $name = check_plain($info['name']); + $description = isset($info['description']) ? $info['description'] : ''; + $classes[] = "<h2 id=\"$id\">$name</h2>\n$description"; + } + $output = ''; + if ($classes) { + $output .= '<p>' . t('The following service classes are available for creating a search server.') . "</p>\n"; + $output .= implode("\n\n", $classes); + } + return $output; case 'admin/config/search/search_api': return '<p>' . t('A search server and search index are used to execute searches. Several indexes can exist per server.<br />You need at least one server and one index to create searches on your site.') . '</p>'; } @@ -243,8 +255,29 @@ function search_api_theme() { 'enabled' => NULL, 'class_name' => NULL, 'class_description' => NULL, + 'indexes' => array(), + 'options' => array(), + 'status' => ENTITY_CUSTOM, + 'extra' => array(), + ), + 'file' => 'search_api.admin.inc', + ); + $themes['search_api_index'] = array( + 'variables' => array( + 'id' => NULL, + 'name' => '', + 'machine_name' => '', + 'description' => NULL, + 'item_type' => NULL, + 'enabled' => NULL, + 'server' => NULL, 'options' => array(), + 'fields' => array(), + 'indexed_items' => 0, + 'on_server' => 0, + 'total_items' => 0, 'status' => ENTITY_CUSTOM, + 'read_only' => 0, ), 'file' => 'search_api.admin.inc', ); @@ -2397,6 +2430,30 @@ function search_api_server_disable($id) { return $ret ? 1 : $ret; } +/** + * Clears a search server. + * + * Will delete all items stored on the server and mark all associated indexes + * for re-indexing. + * + * @param int|string $id + * The ID or machine name of the server to clear. + * + * @return bool + * TRUE on success, FALSE on failure. + */ +function search_api_server_clear($id) { + $server = search_api_server_load($id); + $success = TRUE; + foreach (search_api_index_load_multiple(FALSE, array('server' => $server->machine_name)) as $index) { + $success &= $index->reindex(); + } + if ($success) { + $server->deleteItems(); + } + return $success; +} + /** * Deletes a search server and disables all associated indexes. * @@ -2728,6 +2785,63 @@ function _search_api_convert_custom_type($callback, $value, $original_type, $typ return $values; } +/** + * Determines the number of items indexed on a server for a certain index. + * + * Used as a helper function in search_api_admin_index_view(). + * + * @param SearchApiIndex $index + * The index + * + * @return int + * The number of items found on the server for this index, if the latter is + * enabled. 0 otherwise. + */ +function _search_api_get_items_on_server(SearchApiIndex $index) { + if (!$index->enabled) { + return 0; + } + // We want the raw count, without facets or other filters. Therefore we don't + // use the query's execute() method but pass it straight to the server for + // evaluation. Since this circumvents the normal preprocessing, which sets the + // fields (on which some service classes might even rely when there are no + // keywords), we set them manually here. + $query = $index->query() + ->fields(array()) + ->range(0, 0); + $response = $index->server()->search($query); + return $response['result count']; +} + +/** + * Returns a deep copy of the input array. + * + * The behavior of PHP regarding arrays with references pointing to it is rather + * weird. Therefore, we use this helper function in theme_search_api_index() to + * create safe copies of such arrays. + * + * @param array $array + * The array to copy. + * + * @return array + * A deep copy of the array. + */ +function _search_api_deep_copy(array $array) { + $copy = array(); + foreach ($array as $k => $v) { + if (is_array($v)) { + $copy[$k] = _search_api_deep_copy($v); + } + elseif (is_object($v)) { + $copy[$k] = clone $v; + } + elseif ($v) { + $copy[$k] = $v; + } + } + return $copy; +} + /** * Creates and sets a batch for indexing items. * diff --git a/search_api.test b/search_api.test index b7e0752e..12d3240d 100644 --- a/search_api.test +++ b/search_api.test @@ -314,7 +314,6 @@ class SearchApiWebTest extends DrupalWebTestCase { $this->assertTitle('Search API test server | Drupal', 'Correct title when viewing server.'); $this->assertText('A server used for testing.', 'Description displayed.'); $this->assertText('search_api_test_service', 'Service name displayed.'); - $this->assertText('search_api_test_service description', 'Service description displayed.'); $this->assertText('search_api_test foo bar', 'Service options displayed.'); } @@ -453,29 +452,40 @@ class SearchApiWebTest extends DrupalWebTestCase { * @param bool $check_buttons * (optional) Whether to check for the correct presence/absence of buttons. * Defaults to TRUE. + * @param int|null $on_server + * (optional) The number of items actually on the server. Defaults to + * $indexed. */ - protected function checkIndexStatus($indexed = 0, $total = 10, $check_buttons = TRUE) { + protected function checkIndexStatus($indexed = 0, $total = 10, $check_buttons = TRUE, $on_server = NULL) { $url = "admin/config/search/search_api/index/{$this->index_id}"; if (strpos($this->url, $url) === FALSE) { $this->drupalGet($url); } - $all = ($indexed == $total); - $correct_status = 'Correct index status displayed.'; - if ($all) { - $this->assertText(t('All items have been indexed (@total / @total).', array('@total' => $total)), $correct_status); + + $index_status = t('@indexed/@total indexed', array('@indexed' => $indexed, '@total' => $total)); + $this->assertText($index_status, 'Correct index status displayed.'); + + if (!isset($on_server)) { + $on_server = $indexed; } - elseif (!$indexed) { - $this->assertText(t('All items still need to be indexed (@total total).', array('@total' => $total)), $correct_status); + if ($on_server == $total) { + $this->assertNoText(t('Server index status'), 'No server index status displayed.'); } else { - $percentage = (int) (100 * $indexed / $total); - $text = t('About @percentage% of all items have been indexed in their latest version (@indexed / @total).', - array( - '@indexed' => $indexed, - '@total' => $total, - '@percentage' => $percentage - )); - $this->assertText($text, $correct_status); + $vars['@num'] = $on_server; + if ($on_server < $indexed) { + $vars['@diff'] = $indexed - $on_server; + $info = t('The index status on the search server has diverged (@num items indexed – @diff less than required). You are strongly advised to mark the index for re-indexing using the form below.', $vars); + } + elseif ($on_server > $total) { + $vars['@diff'] = $on_server - $total; + $info = t('The index status on the search server has diverged (@num items indexed – @diff more than present). You are strongly advised to clear the index using the form below.', $vars); + } + else { + $info = format_plural($on_server, 'There is 1 item indexed on the server for this index.', 'There are @count items indexed on the server for this index.'); + } + $this->assertText(t('Server index status'), 'Server index status displayed.'); + $this->assertText($info, 'Correct server index status displayed.'); } if (!$check_buttons) { @@ -483,11 +493,11 @@ class SearchApiWebTest extends DrupalWebTestCase { } $this->assertText(t('enabled'), '"Enabled" status displayed.'); - if ($all) { - $this->assertNoText(t('Index now'), '"Index now" form not displayed.'); + if ($indexed == $total) { + $this->assertRaw('disabled="disabled"', '"Index now" form disabled.'); } else { - $this->assertText(t('Index now'), '"Index now" form displayed.'); + $this->assertNoRaw('disabled="disabled"', '"Index now" form enabled.'); } } @@ -554,7 +564,7 @@ class SearchApiWebTest extends DrupalWebTestCase { $this->insertItems(1); // item 14 // Check whether the status display is right. - $this->checkIndexStatus(7, 14, FALSE); + $this->checkIndexStatus(7, 14, FALSE, 0); // Indexing order should now be: 11, 12, 13, 14, 8, 2, 4. Let's try it out! // First manually index one item, and see if it's 11. @@ -565,7 +575,7 @@ class SearchApiWebTest extends DrupalWebTestCase { $this->assertText(t('Successfully indexed @count item.', array('@count' => 1))); $this->assertNoText(t("Some items couldn't be indexed. Check the logs for details."), "Index errors warning isn't displayed."); $this->assertNoText(t("Couldn't index items. Check the logs for details."), "Index error isn't displayed."); - $this->checkIndexStatus(8, 14, FALSE); + $this->checkIndexStatus(8, 14, FALSE, 1); $results = $this->doSearch(); $this->assertEqual($results['result count'], 1, 'Indexing order test 1: correct result count.'); @@ -696,9 +706,10 @@ class SearchApiWebTest extends DrupalWebTestCase { * Tests whether clearing the index works correctly. */ protected function clearIndex() { - $this->drupalPost("admin/config/search/search_api/index/{$this->index_id}", array('reindex[clear]' => TRUE), t('Re-index content')); + $this->drupalPost("admin/config/search/search_api/index/{$this->index_id}", array(), t('Clear all indexed data')); + $this->drupalPost(NULL, array(), t('Confirm')); $this->assertText(t('The index was successfully cleared.')); - $this->assertText(t('All items still need to be indexed (@total total).', array('@total' => 14)), 'Correct index status displayed.'); + $this->assertText(t('@indexed/@total indexed', array('@indexed' => 0, '@total' => 14)), 'Correct index status displayed.'); } /**
2943411152f61129c959c4aaa9e9ac412fecc0a4
apache$maven-plugins
Working on: MNG-662 o Cleaned up resume functionality, and checkpointing for the release:prepare mojo. TODO: - Add testing of some sort - Verify that maven-scm's checkin function is recursive (suspect it's not) git-svn-id: https://svn.apache.org/repos/asf/maven/components/trunk/maven-plugins@227150 13f79535-47bb-0310-9956-ffa450edef68
p
https://github.com/apache/maven-plugins
diff --git a/maven-release-plugin/src/main/java/org/apache/maven/plugins/release/PrepareReleaseMojo.java b/maven-release-plugin/src/main/java/org/apache/maven/plugins/release/PrepareReleaseMojo.java index 9a027fb89d..a6a0215b49 100644 --- a/maven-release-plugin/src/main/java/org/apache/maven/plugins/release/PrepareReleaseMojo.java +++ b/maven-release-plugin/src/main/java/org/apache/maven/plugins/release/PrepareReleaseMojo.java @@ -166,7 +166,7 @@ protected void executeTask() getLog().warn( "Error writing checkpoint.", e ); } - if ( !getReleaseProgress().reachedCheckpoint( ReleaseProgressTracker.CP_PREPARED_RELEASE ) ) + if ( !getReleaseProgress().verifyCheckpoint( ReleaseProgressTracker.CP_PREPARED_RELEASE ) ) { checkForLocalModifications(); @@ -228,7 +228,7 @@ protected void executeTask() private void transformPomToSnapshotVersionPom( MavenProject project ) throws MojoExecutionException { - if ( !getReleaseProgress().reachedCheckpoint( ReleaseProgressTracker.CP_POM_TRANSORMED_FOR_DEVELOPMENT ) ) + if ( !getReleaseProgress().verifyCheckpoint( ReleaseProgressTracker.CP_POM_TRANSORMED_FOR_DEVELOPMENT ) ) { if ( isSnapshot( project.getVersion() ) ) { @@ -373,7 +373,7 @@ protected ReleaseProgressTracker getReleaseProgress() try { releaseProgress = ReleaseProgressTracker.load( basedir ); - + releaseProgress.verifyResumeCapable(); } catch ( IOException e ) @@ -395,6 +395,8 @@ protected ReleaseProgressTracker getReleaseProgress() + ". Creating new instance." ); } + releaseProgress.setResumeAtCheckpoint( resume ); + releaseProgress.setUsername( username ); if ( password != null ) @@ -441,7 +443,7 @@ private boolean isSnapshot( String version ) private void checkForLocalModifications() throws MojoExecutionException { - if ( !getReleaseProgress().reachedCheckpoint( ReleaseProgressTracker.CP_LOCAL_MODIFICATIONS_CHECKED ) ) + if ( !getReleaseProgress().verifyCheckpoint( ReleaseProgressTracker.CP_LOCAL_MODIFICATIONS_CHECKED ) ) { getLog().info( "Verifying there are no local modifications ..." ); @@ -512,7 +514,7 @@ private void checkForLocalModifications() private void checkForPresenceOfSnapshots( MavenProject project ) throws MojoExecutionException { - if ( !getReleaseProgress().reachedCheckpoint( ReleaseProgressTracker.CP_SNAPSHOTS_CHECKED ) ) + if ( !getReleaseProgress().verifyCheckpoint( ReleaseProgressTracker.CP_SNAPSHOTS_CHECKED ) ) { getLog().info( "Checking lineage for snapshots ..." ); @@ -612,7 +614,7 @@ private void checkForPresenceOfSnapshots( MavenProject project ) private void transformPomToReleaseVersionPom( MavenProject project ) throws MojoExecutionException { - if ( !getReleaseProgress().reachedCheckpoint( ReleaseProgressTracker.CP_POM_TRANSFORMED_FOR_RELEASE ) ) + if ( !getReleaseProgress().verifyCheckpoint( ReleaseProgressTracker.CP_POM_TRANSFORMED_FOR_RELEASE ) ) { if ( !isSnapshot( project.getVersion() ) ) { @@ -751,7 +753,7 @@ private void transformPomToReleaseVersionPom( MavenProject project ) private void generateReleasePom( MavenProject project ) throws MojoExecutionException { - if ( !getReleaseProgress().reachedCheckpoint( ReleaseProgressTracker.CP_GENERATED_RELEASE_POM ) ) + if ( !getReleaseProgress().verifyCheckpoint( ReleaseProgressTracker.CP_GENERATED_RELEASE_POM ) ) { MavenProject releaseProject = new MavenProject( project ); Model releaseModel = releaseProject.getModel(); @@ -947,7 +949,7 @@ private String resolveVersion( Artifact artifact, String artifactUsage, MavenPro private void checkInRelease() throws MojoExecutionException { - if ( !getReleaseProgress().reachedCheckpoint( ReleaseProgressTracker.CP_CHECKED_IN_RELEASE_VERSION ) ) + if ( !getReleaseProgress().verifyCheckpoint( ReleaseProgressTracker.CP_CHECKED_IN_RELEASE_VERSION ) ) { checkIn( "**/pom.xml,**/release-pom.xml", "[maven-release-plugin] prepare release" ); @@ -965,7 +967,7 @@ private void checkInRelease() private void removeReleasePoms() throws MojoExecutionException { - if ( !getReleaseProgress().reachedCheckpoint( ReleaseProgressTracker.CP_REMOVED_RELEASE_POM ) ) + if ( !getReleaseProgress().verifyCheckpoint( ReleaseProgressTracker.CP_REMOVED_RELEASE_POM ) ) { File currentReleasePomFile = null; @@ -1028,7 +1030,7 @@ private String trimPathForScmCalculation( File file ) private void checkInNextSnapshot() throws MojoExecutionException { - if ( !getReleaseProgress().reachedCheckpoint( ReleaseProgressTracker.CP_CHECKED_IN_DEVELOPMENT_VERSION ) ) + if ( !getReleaseProgress().verifyCheckpoint( ReleaseProgressTracker.CP_CHECKED_IN_DEVELOPMENT_VERSION ) ) { checkIn( "**/pom.xml", "[maven-release-plugin] prepare for next development iteration" ); @@ -1113,7 +1115,7 @@ private String getTagLabel() private void tagRelease() throws MojoExecutionException { - if ( !getReleaseProgress().reachedCheckpoint( ReleaseProgressTracker.CP_TAGGED_RELEASE ) ) + if ( !getReleaseProgress().verifyCheckpoint( ReleaseProgressTracker.CP_TAGGED_RELEASE ) ) { String tag = getTagLabel(); diff --git a/maven-release-plugin/src/main/java/org/apache/maven/plugins/release/helpers/ReleaseProgressTracker.java b/maven-release-plugin/src/main/java/org/apache/maven/plugins/release/helpers/ReleaseProgressTracker.java index e090e26ed5..e07c0788f1 100644 --- a/maven-release-plugin/src/main/java/org/apache/maven/plugins/release/helpers/ReleaseProgressTracker.java +++ b/maven-release-plugin/src/main/java/org/apache/maven/plugins/release/helpers/ReleaseProgressTracker.java @@ -5,11 +5,9 @@ import java.io.File; import java.io.FileInputStream; -import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; import java.util.Date; import java.util.Properties; @@ -54,6 +52,8 @@ public class ReleaseProgressTracker private Properties releaseProperties; + private boolean resumeAtCheckpoint = false; + private ReleaseProgressTracker() { } @@ -230,11 +230,16 @@ private void setCheckpoint( String pointName ) releaseProperties.setProperty( CHECKPOINT_PREFIX + pointName, "OK" ); } - public boolean reachedCheckpoint( String pointName ) + public boolean verifyCheckpoint( String pointName ) { checkLoaded(); - return "OK".equals( releaseProperties.getProperty( CHECKPOINT_PREFIX + pointName ) ); + return resumeAtCheckpoint && "OK".equals( releaseProperties.getProperty( CHECKPOINT_PREFIX + pointName ) ); + } + + public void setResumeAtCheckpoint( boolean resumeAtCheckpoint ) + { + this.resumeAtCheckpoint = resumeAtCheckpoint; } }
9cf7838196de04939db4a066fe3f271409df4bb9
agorava$agorava-core
AGOVA-52 Support to programmatically control callback URL per auth request Extended OAuthSession to sore extra info like the internal callback URL
a
https://github.com/agorava/agorava-core
diff --git a/agorava-core-api/src/main/java/org/agorava/api/AgoravaConstants.java b/agorava-core-api/src/main/java/org/agorava/AgoravaConstants.java similarity index 99% rename from agorava-core-api/src/main/java/org/agorava/api/AgoravaConstants.java rename to agorava-core-api/src/main/java/org/agorava/AgoravaConstants.java index eafbaa6..1697461 100644 --- a/agorava-core-api/src/main/java/org/agorava/api/AgoravaConstants.java +++ b/agorava-core-api/src/main/java/org/agorava/AgoravaConstants.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.agorava.api; +package org.agorava; import org.agorava.api.oauth.Token; diff --git a/agorava-core-api/src/main/java/org/agorava/AgoravaContext.java b/agorava-core-api/src/main/java/org/agorava/AgoravaContext.java index 4704f0d..109cc65 100644 --- a/agorava-core-api/src/main/java/org/agorava/AgoravaContext.java +++ b/agorava-core-api/src/main/java/org/agorava/AgoravaContext.java @@ -50,6 +50,9 @@ public class AgoravaContext { private static List<String> listOfServices = null; + /** + * The default internal callback when whe return from OAuth connexion + */ protected static String internalCallBack; diff --git a/agorava-core-api/src/main/java/org/agorava/api/atinject/BeanResolver.java b/agorava-core-api/src/main/java/org/agorava/api/atinject/BeanResolver.java index b42f7b5..78b4f40 100644 --- a/agorava-core-api/src/main/java/org/agorava/api/atinject/BeanResolver.java +++ b/agorava-core-api/src/main/java/org/agorava/api/atinject/BeanResolver.java @@ -18,7 +18,7 @@ /** * - * Provide an agnostic way to resolve a bean accross all JSR 330 engine + * Provide an agnostic way to resolve a bean across all JSR 330 engine * * @author Antoine Sabot-Durand */ @@ -30,7 +30,7 @@ public abstract class BeanResolver { protected static BeanResolver instance; /** - * @return the current resulover + * @return the current resolver */ public static BeanResolver getInstance() { return instance; @@ -58,7 +58,7 @@ public static BeanResolver getInstance() { /** - * Rsolve bean by its class. Resolution can be optional + * Resolve bean by its class. Resolution can be optional * * @param name of the bean * @param optional if true and no bean found return null diff --git a/agorava-core-api/src/main/java/org/agorava/api/oauth/OAuthSession.java b/agorava-core-api/src/main/java/org/agorava/api/oauth/OAuthSession.java index 7e15548..08fe9ff 100644 --- a/agorava-core-api/src/main/java/org/agorava/api/oauth/OAuthSession.java +++ b/agorava-core-api/src/main/java/org/agorava/api/oauth/OAuthSession.java @@ -22,7 +22,10 @@ import org.agorava.api.storage.UserSessionRepository; import org.agorava.spi.UserProfile; +import java.io.Serializable; import java.lang.annotation.Annotation; +import java.util.HashMap; +import java.util.Map; import java.util.UUID; /** @@ -55,6 +58,8 @@ public class OAuthSession implements Identifiable { private UserSessionRepository repo; + private final Map<String, Serializable> extraData = new HashMap<String, Serializable>(); + OAuthSession(Annotation qualifier, Token requestToken, Token accessToken, String verifier, UserProfile userProfile, UserSessionRepository repo, String id) { @@ -210,6 +215,14 @@ public boolean equals(Object obj) { } + /** + * + * @return the extra data map that can be associated to this Session + */ + public Map<String, Serializable> getExtraData() { + return extraData; + } + /** * Builder class for {@link org.agorava.api.oauth.OAuthSession} * @@ -362,4 +375,5 @@ public OAuthSession build() { return new OAuthSession(qualifier, requestToken, accessToken, verifier, userProfile, repo, id); } } + } diff --git a/agorava-core-api/src/main/java/org/agorava/api/oauth/Token.java b/agorava-core-api/src/main/java/org/agorava/api/oauth/Token.java index 4dc1c59..876ca74 100644 --- a/agorava-core-api/src/main/java/org/agorava/api/oauth/Token.java +++ b/agorava-core-api/src/main/java/org/agorava/api/oauth/Token.java @@ -16,7 +16,7 @@ package org.agorava.api.oauth; -import org.agorava.api.AgoravaConstants; +import org.agorava.AgoravaConstants; import org.agorava.api.service.Preconditions; import java.io.Serializable; diff --git a/agorava-core-api/src/main/java/org/agorava/api/oauth/application/SimpleOAuthAppSettingsBuilder.java b/agorava-core-api/src/main/java/org/agorava/api/oauth/application/SimpleOAuthAppSettingsBuilder.java index 6b0dd07..173c539 100644 --- a/agorava-core-api/src/main/java/org/agorava/api/oauth/application/SimpleOAuthAppSettingsBuilder.java +++ b/agorava-core-api/src/main/java/org/agorava/api/oauth/application/SimpleOAuthAppSettingsBuilder.java @@ -16,8 +16,8 @@ package org.agorava.api.oauth.application; +import org.agorava.AgoravaConstants; import org.agorava.AgoravaContext; -import org.agorava.api.AgoravaConstants; import org.agorava.api.exception.AgoravaException; import java.lang.annotation.Annotation; diff --git a/agorava-core-api/src/main/java/org/agorava/api/service/JsonMapperService.java b/agorava-core-api/src/main/java/org/agorava/api/service/JsonMapperService.java index 5fd10a1..a3a7021 100644 --- a/agorava-core-api/src/main/java/org/agorava/api/service/JsonMapperService.java +++ b/agorava-core-api/src/main/java/org/agorava/api/service/JsonMapperService.java @@ -35,6 +35,7 @@ public interface JsonMapperService extends Serializable { * @param resp the response to de-serialize * @param clazz the target class of the object * @return an object of the given Class with fields coming from the response + * @throws org.agorava.api.exception.ResponseException if provided response is not valid */ <T> T mapToObject(Response resp, Class<T> clazz) throws ResponseException; diff --git a/agorava-core-api/src/main/java/org/agorava/api/service/OAuthLifeCycleService.java b/agorava-core-api/src/main/java/org/agorava/api/service/OAuthLifeCycleService.java index 93f2f2e..1fb0028 100644 --- a/agorava-core-api/src/main/java/org/agorava/api/service/OAuthLifeCycleService.java +++ b/agorava-core-api/src/main/java/org/agorava/api/service/OAuthLifeCycleService.java @@ -75,6 +75,16 @@ public interface OAuthLifeCycleService extends Serializable { String startDanceFor(String providerName); + /** + * OAuth Dance entry point. For a given Provider name initialize OAuth workflow by returning authorization url end user + * should connect to in order to grant permission to the OAuth application to use her account or her behalf + * + * @param providerName name of the service provider to connect to + * @param internalCallBack the internal URL to go back to after ending the dance + * @return the Authorization url needed to continue the OAuth Dance workflow + */ + String startDanceFor(String providerName, String internalCallBack); + /** * OAuth dance entry point. For a given Provider {@link org.agorava.api.atinject.ProviderRelated} qualifier, * initializes OAuth workflow by returning authorization url end user should connect to in order to grant permission to diff --git a/agorava-core-api/src/main/java/org/agorava/api/service/Preconditions.java b/agorava-core-api/src/main/java/org/agorava/api/service/Preconditions.java index 78efcda..bc36e9e 100644 --- a/agorava-core-api/src/main/java/org/agorava/api/service/Preconditions.java +++ b/agorava-core-api/src/main/java/org/agorava/api/service/Preconditions.java @@ -16,7 +16,7 @@ package org.agorava.api.service; -import org.agorava.api.AgoravaConstants; +import org.agorava.AgoravaConstants; import java.util.regex.Pattern; @@ -83,7 +83,7 @@ public static void checkValidOAuthCallback(String url, String errorMsg) { } } - public static boolean isUrl(String url) { + private static boolean isUrl(String url) { return URL_PATTERN.matcher(url).matches(); } diff --git a/agorava-core-api/src/main/java/org/agorava/utils/StringUtils.java b/agorava-core-api/src/main/java/org/agorava/api/service/StringUtils.java similarity index 96% rename from agorava-core-api/src/main/java/org/agorava/utils/StringUtils.java rename to agorava-core-api/src/main/java/org/agorava/api/service/StringUtils.java index e620fd2..18ba9c2 100644 --- a/agorava-core-api/src/main/java/org/agorava/utils/StringUtils.java +++ b/agorava-core-api/src/main/java/org/agorava/api/service/StringUtils.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package org.agorava.utils; +package org.agorava.api.service; import java.util.Arrays; import java.util.Collection; @@ -51,7 +51,7 @@ public static String join(Collection collection, Character separator) { /** * Join an array of Object with a given character as a separator * - * @param collection collection to join + * @param array array to join * @param separator char to separate values * @return resulting string of the concatained values with separator */ diff --git a/agorava-core-api/src/test/java/org/agorava/utils/StringUtilsTest.java b/agorava-core-api/src/test/java/org/agorava/utils/StringUtilsTest.java index 24f228c..96b84b3 100644 --- a/agorava-core-api/src/test/java/org/agorava/utils/StringUtilsTest.java +++ b/agorava-core-api/src/test/java/org/agorava/utils/StringUtilsTest.java @@ -16,6 +16,7 @@ package org.agorava.utils; +import org.agorava.api.service.StringUtils; import org.junit.Assert; import org.junit.Test; diff --git a/agorava-core-impl-cdi/src/main/java/org/agorava/cdi/OAuthLifeCycleServiceImpl.java b/agorava-core-impl-cdi/src/main/java/org/agorava/cdi/OAuthLifeCycleServiceImpl.java index af76f56..d7c11cd 100644 --- a/agorava-core-impl-cdi/src/main/java/org/agorava/cdi/OAuthLifeCycleServiceImpl.java +++ b/agorava-core-impl-cdi/src/main/java/org/agorava/cdi/OAuthLifeCycleServiceImpl.java @@ -16,6 +16,7 @@ package org.agorava.cdi; +import org.agorava.AgoravaConstants; import org.agorava.api.atinject.Current; import org.agorava.api.event.OAuthComplete; import org.agorava.api.event.SocialEvent; @@ -95,6 +96,11 @@ public UserProfileService getCurrentUserProfileService() { return userProfileServices.select(getCurrentSession().getServiceQualifier()).get(); } + @Override + public String startDanceFor(String providerName) { + return startDanceFor(providerName, null); + } + @Override public synchronized void endDance() { @@ -152,8 +158,10 @@ else if (!repository.getCurrent().getServiceQualifier().equals(qualifier)) @Override - public String startDanceFor(String providerName) { - buildSessionFor(providerName); + public String startDanceFor(String providerName, String internalCallBack) { + OAuthSession session = buildSessionFor(providerName); + if (internalCallBack != null && !"".equals(internalCallBack.trim())) + session.getExtraData().put(AgoravaConstants.INTERN_CALLBACK_PARAM_NAME, internalCallBack); return getCurrentService().getAuthorizationUrl(); } diff --git a/agorava-core-impl-cdi/src/main/java/org/agorava/cdi/extensions/AgoravaExtension.java b/agorava-core-impl-cdi/src/main/java/org/agorava/cdi/extensions/AgoravaExtension.java index 896072c..a4b6675 100644 --- a/agorava-core-impl-cdi/src/main/java/org/agorava/cdi/extensions/AgoravaExtension.java +++ b/agorava-core-impl-cdi/src/main/java/org/agorava/cdi/extensions/AgoravaExtension.java @@ -16,8 +16,8 @@ package org.agorava.cdi.extensions; +import org.agorava.AgoravaConstants; import org.agorava.AgoravaContext; -import org.agorava.api.AgoravaConstants; import org.agorava.api.atinject.InjectWithQualifier; import org.agorava.api.atinject.ProviderRelated; import org.agorava.api.exception.AgoravaException; diff --git a/agorava-core-impl/src/main/java/org/agorava/oauth/OAuth10aServiceImpl.java b/agorava-core-impl/src/main/java/org/agorava/oauth/OAuth10aServiceImpl.java index 13bfeb4..abd1b21 100644 --- a/agorava-core-impl/src/main/java/org/agorava/oauth/OAuth10aServiceImpl.java +++ b/agorava-core-impl/src/main/java/org/agorava/oauth/OAuth10aServiceImpl.java @@ -16,7 +16,7 @@ package org.agorava.oauth; -import org.agorava.api.AgoravaConstants; +import org.agorava.AgoravaConstants; import org.agorava.api.atinject.GenericBean; import org.agorava.api.atinject.InjectWithQualifier; import org.agorava.api.oauth.OAuth; diff --git a/agorava-core-impl/src/main/java/org/agorava/oauth/OAuth20FinalServiceImpl.java b/agorava-core-impl/src/main/java/org/agorava/oauth/OAuth20FinalServiceImpl.java index 7f24887..e8b8e5f 100644 --- a/agorava-core-impl/src/main/java/org/agorava/oauth/OAuth20FinalServiceImpl.java +++ b/agorava-core-impl/src/main/java/org/agorava/oauth/OAuth20FinalServiceImpl.java @@ -16,7 +16,7 @@ package org.agorava.oauth; -import org.agorava.api.AgoravaConstants; +import org.agorava.AgoravaConstants; import org.agorava.api.atinject.GenericBean; import org.agorava.api.atinject.InjectWithQualifier; import org.agorava.api.oauth.OAuth; diff --git a/agorava-core-impl/src/main/java/org/agorava/oauth/OAuth20ServiceImpl.java b/agorava-core-impl/src/main/java/org/agorava/oauth/OAuth20ServiceImpl.java index 025dfb5..ade3c06 100644 --- a/agorava-core-impl/src/main/java/org/agorava/oauth/OAuth20ServiceImpl.java +++ b/agorava-core-impl/src/main/java/org/agorava/oauth/OAuth20ServiceImpl.java @@ -17,7 +17,7 @@ package org.agorava.oauth; -import org.agorava.api.AgoravaConstants; +import org.agorava.AgoravaConstants; import org.agorava.api.atinject.GenericBean; import org.agorava.api.atinject.InjectWithQualifier; import org.agorava.api.oauth.OAuth; diff --git a/agorava-core-impl/src/main/java/org/agorava/rest/OAuthRequestImpl.java b/agorava-core-impl/src/main/java/org/agorava/rest/OAuthRequestImpl.java index e1db038..4bd51b8 100644 --- a/agorava-core-impl/src/main/java/org/agorava/rest/OAuthRequestImpl.java +++ b/agorava-core-impl/src/main/java/org/agorava/rest/OAuthRequestImpl.java @@ -16,7 +16,7 @@ package org.agorava.rest; -import org.agorava.api.AgoravaConstants; +import org.agorava.AgoravaConstants; import org.agorava.api.oauth.OAuthRequest; import org.agorava.api.rest.Verb; diff --git a/agorava-core-impl/src/main/java/org/agorava/servlet/OAuthCallbackServlet.java b/agorava-core-impl/src/main/java/org/agorava/servlet/OAuthCallbackServlet.java index a83c20b..28df62a 100644 --- a/agorava-core-impl/src/main/java/org/agorava/servlet/OAuthCallbackServlet.java +++ b/agorava-core-impl/src/main/java/org/agorava/servlet/OAuthCallbackServlet.java @@ -16,8 +16,8 @@ package org.agorava.servlet; +import org.agorava.AgoravaConstants; import org.agorava.AgoravaContext; -import org.agorava.api.AgoravaConstants; import org.agorava.api.exception.AgoravaException; import org.agorava.api.service.OAuthLifeCycleService; @@ -35,11 +35,12 @@ public class OAuthCallbackServlet extends HttpServlet { @Inject - OAuthLifeCycleService OAuthLifeCycleService; + OAuthLifeCycleService lifeCycleService; protected void renderResponse(HttpServletRequest req, HttpServletResponse resp) { - String internalCallBack = req.getParameter(AgoravaConstants.INTERN_CALLBACK_PARAM_NAME); + String internalCallBack = (String) lifeCycleService.getCurrentSession().getExtraData().get(AgoravaConstants + .INTERN_CALLBACK_PARAM_NAME); if (internalCallBack == null) internalCallBack = AgoravaContext.getInternalCallBack(); @@ -55,8 +56,8 @@ protected void renderResponse(HttpServletRequest req, HttpServletResponse resp) @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { - String verifier = req.getParameter(OAuthLifeCycleService.getVerifierParamName()); - OAuthLifeCycleService.endDance(verifier); + String verifier = req.getParameter(lifeCycleService.getVerifierParamName()); + lifeCycleService.endDance(verifier); renderResponse(req, resp); } } diff --git a/agorava-core-impl/src/test/java/org/agorava/core/mock/ObjectMother.java b/agorava-core-impl/src/test/java/org/agorava/core/mock/ObjectMother.java index 5ee29a6..e0949b9 100644 --- a/agorava-core-impl/src/test/java/org/agorava/core/mock/ObjectMother.java +++ b/agorava-core-impl/src/test/java/org/agorava/core/mock/ObjectMother.java @@ -17,7 +17,7 @@ package org.agorava.core.mock; -import org.agorava.api.AgoravaConstants; +import org.agorava.AgoravaConstants; import org.agorava.api.oauth.OAuthRequest; import org.agorava.api.rest.Verb; import org.agorava.rest.OAuthRequestImpl; diff --git a/agorava-core-impl/src/test/java/org/agorava/rest/OAuthRequestTest.java b/agorava-core-impl/src/test/java/org/agorava/rest/OAuthRequestTest.java index a1a4c65..97f6137 100644 --- a/agorava-core-impl/src/test/java/org/agorava/rest/OAuthRequestTest.java +++ b/agorava-core-impl/src/test/java/org/agorava/rest/OAuthRequestTest.java @@ -16,7 +16,7 @@ package org.agorava.rest; -import org.agorava.api.AgoravaConstants; +import org.agorava.AgoravaConstants; import org.agorava.api.oauth.OAuthRequest; import org.agorava.api.rest.Verb; import org.junit.Before;
b792e33fdb52d83b0ce13bf2eaa98c0242bce157
intellij-community
added additional shortcut--
a
https://github.com/JetBrains/intellij-community
diff --git a/EDIDE/src/ru/compscicenter/edide/StudyTaskManager.java b/EDIDE/src/ru/compscicenter/edide/StudyTaskManager.java index b8d72745c9115..4335b5b1a4f08 100644 --- a/EDIDE/src/ru/compscicenter/edide/StudyTaskManager.java +++ b/EDIDE/src/ru/compscicenter/edide/StudyTaskManager.java @@ -115,6 +115,7 @@ public void run() { addShortcut(NextWindowAction.SHORTCUT, NextWindowAction.ACTION_ID); addShortcut(PrevWindowAction.SHORTCUT, PrevWindowAction.ACTION_ID); addShortcut(ShowHintAction.SHORTCUT, ShowHintAction.ACTION_ID); + addShortcut(NextWindowAction.SHORTCUT2, NextWindowAction.ACTION_ID); } } }); diff --git a/EDIDE/src/ru/compscicenter/edide/actions/NextWindowAction.java b/EDIDE/src/ru/compscicenter/edide/actions/NextWindowAction.java index 1053f1be54ed8..7f91b90ec5cbf 100644 --- a/EDIDE/src/ru/compscicenter/edide/actions/NextWindowAction.java +++ b/EDIDE/src/ru/compscicenter/edide/actions/NextWindowAction.java @@ -19,6 +19,7 @@ public class NextWindowAction extends DumbAwareAction { public static final String ACTION_ID = "NextWindow"; public static final String SHORTCUT = "ctrl pressed PERIOD"; + public static final String SHORTCUT2 = "ctrl pressed ENTER"; public void actionPerformed(AnActionEvent e) { Project project = e.getProject();
19184595677fdd08acf35c32ff78a3d97faf3ab2
Vala
Avoid unnecessary copies when using the coalescing operator Fixes bug 661985
a
https://github.com/GNOME/vala/
diff --git a/tests/Makefile.am b/tests/Makefile.am index 5d89abfe75..f8f882ca19 100644 --- a/tests/Makefile.am +++ b/tests/Makefile.am @@ -65,6 +65,7 @@ TESTS = \ control-flow/sideeffects.vala \ control-flow/bug639482.vala \ control-flow/bug652549.vala \ + control-flow/bug661985.vala \ control-flow/bug665904.vala \ control-flow/bug691514.vala \ enums/enums.vala \ diff --git a/tests/control-flow/bug661985.vala b/tests/control-flow/bug661985.vala new file mode 100644 index 0000000000..649af7d46a --- /dev/null +++ b/tests/control-flow/bug661985.vala @@ -0,0 +1,7 @@ +void main () { + string foo = "foo"; + void* foop = foo; + unowned string bar = foo ?? "bar"; + void* barp = bar; + assert (foop == barp); +} diff --git a/vala/valabinaryexpression.vala b/vala/valabinaryexpression.vala index 19766e12ff..9c1ef4c984 100644 --- a/vala/valabinaryexpression.vala +++ b/vala/valabinaryexpression.vala @@ -194,7 +194,12 @@ public class Vala.BinaryExpression : Expression { } if (operator == BinaryOperator.COALESCE) { - var local = new LocalVariable (null, get_temp_name (), left, source_reference); + if (!left.check (context)) { + error = true; + return false; + } + + var local = new LocalVariable (left.value_type != null ? left.value_type.copy () : null, get_temp_name (), left, source_reference); var decl = new DeclarationStatement (local, source_reference); var right_stmt = new ExpressionStatement (new Assignment (new MemberAccess.simple (local.name, right.source_reference), right, AssignmentOperator.SIMPLE, right.source_reference), right.source_reference); @@ -220,19 +225,10 @@ public class Vala.BinaryExpression : Expression { return false; } - var ma = new MemberAccess.simple (local.name, source_reference); - Expression replace = ma; - - if (target_type == null) { - replace = new ReferenceTransferExpression (replace, source_reference); - replace.target_type = local.variable_type.copy (); - replace.target_type.value_owned = true; - } else { - replace.target_type = target_type.copy (); - } - replace.check (context); + var temp_access = SemanticAnalyzer.create_temp_access (local, target_type); + temp_access.check (context); - parent_node.replace_expression (this, replace); + parent_node.replace_expression (this, temp_access); return true; }
20928d48aba08edecaed996d1cd7f469d3da1659
arquillian$arquillian-graphene
ARQGRA-84: Sizzle locators support added Implemented ByJquery and own @FindBy, utility class which works with both of @FindBy uniformly, enrichers altered to work with this utility class, enrichers refactored to not contain warnings
a
https://github.com/arquillian/arquillian-graphene
diff --git a/graphene-webdriver/graphene-webdriver-ftest/src/test/java/org/jboss/arquillian/graphene/ftest/page/extension/PageExtensionTestCase.java b/graphene-webdriver/graphene-webdriver-ftest/src/test/java/org/jboss/arquillian/graphene/ftest/page/extension/PageExtensionTestCase.java index ed17ac7c3..371a2a1e0 100644 --- a/graphene-webdriver/graphene-webdriver-ftest/src/test/java/org/jboss/arquillian/graphene/ftest/page/extension/PageExtensionTestCase.java +++ b/graphene-webdriver/graphene-webdriver-ftest/src/test/java/org/jboss/arquillian/graphene/ftest/page/extension/PageExtensionTestCase.java @@ -21,12 +21,16 @@ */ package org.jboss.arquillian.graphene.ftest.page.extension; -import java.util.Collection; -import java.util.List; +import static org.mockito.Mockito.when; + import java.net.URL; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; +import java.util.List; + import junit.framework.Assert; + import org.jboss.arquillian.drone.api.annotation.Drone; import org.jboss.arquillian.graphene.context.GraphenePageExtensionsContext; import org.jboss.arquillian.graphene.page.extension.PageExtensionRegistry; @@ -37,7 +41,6 @@ import org.junit.runner.RunWith; import org.mockito.Mockito; import org.openqa.selenium.WebDriver; -import static org.mockito.Mockito.when; /** * @author <a href="mailto:[email protected]">Jan Papousek</a> diff --git a/graphene-webdriver/graphene-webdriver-ftest/src/test/java/org/jboss/arquillian/graphene/ftest/page/extension/SizzleJSPageExtensionTestCase.java b/graphene-webdriver/graphene-webdriver-ftest/src/test/java/org/jboss/arquillian/graphene/ftest/page/extension/SizzleJSPageExtensionTestCase.java new file mode 100644 index 000000000..bb84963c8 --- /dev/null +++ b/graphene-webdriver/graphene-webdriver-ftest/src/test/java/org/jboss/arquillian/graphene/ftest/page/extension/SizzleJSPageExtensionTestCase.java @@ -0,0 +1,123 @@ +/** + * JBoss, Home of Professional Open Source + * Copyright 2012, Red Hat, Inc. and individual contributors + * by the @authors tag. See the copyright.txt in the distribution for a + * full listing of individual contributors. + * + * This is free software; you can redistribute it and/or modify it + * under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * This software is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this software; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301 USA, or see the FSF site: http://www.fsf.org. + */ +package org.jboss.arquillian.graphene.ftest.page.extension; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.net.URL; +import java.util.List; + +import org.jboss.arquillian.drone.api.annotation.Drone; +import org.jboss.arquillian.graphene.enricher.annotation.ByJQuery; +import org.jboss.arquillian.graphene.spi.annotations.FindBy; +import org.jboss.arquillian.junit.Arquillian; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.openqa.selenium.WebDriver; +import org.openqa.selenium.WebElement; + +/** + * @author <a href="mailto:[email protected]">Juraj Huska</a> + */ +@RunWith(Arquillian.class) +public class SizzleJSPageExtensionTestCase { + + @FindBy(jquery = ":header") + private WebElement webElementBySizzle; + + @FindBy(jquery = ":header") + private List<WebElement> listOfWebElementsBySizzle; + + @FindBy(jquery = "div:first") + private SizzleTestPageFragment sizzleTestPageFragment; + + @FindBy(jquery = "div:first") + private List<SizzleTestPageFragment> listOfSizzlePageFragments; + + @Drone + private WebDriver browser; + + private static String EXPECTED_SIZZLE_TEXT = "Hello sizzle locators!"; + + public void loadPage() { + URL page = this.getClass().getClassLoader() + .getResource("org/jboss/arquillian/graphene/ftest/page/extension/sample.html"); + browser.get(page.toString()); + } + + @Test + public void testSizzleJSPageExtensionInstalled() { + loadPage(); + + ByJQuery htmlBy = new ByJQuery("html"); + WebElement htmlElement = browser.findElement(htmlBy); + + assertNotNull(htmlElement); + assertEquals("html", htmlElement.getTagName()); + } + + @Test + public void testFindByOnWebElementSizzleLocator() { + loadPage(); + + assertNotNull(webElementBySizzle); + assertEquals("h1", webElementBySizzle.getTagName()); + } + + @Test + public void testFindByOnListOfWebElementSizzleLocator() { + loadPage(); + + assertNotNull(listOfWebElementsBySizzle); + assertEquals("h1", listOfWebElementsBySizzle.get(0).getTagName()); + } + + @Test + public void testFindByOnPageFragmentBySizzleLocator() { + loadPage(); + + assertNotNull(sizzleTestPageFragment); + assertEquals(EXPECTED_SIZZLE_TEXT, sizzleTestPageFragment.getSizzleLocator().getText()); + } + + @Test + public void testFindByOnListOfPageFragments() { + loadPage(); + + assertNotNull(listOfSizzlePageFragments); + assertEquals(EXPECTED_SIZZLE_TEXT, listOfSizzlePageFragments.get(0).getSizzleLocator().getText()); + } + + /* ************* + * Page Fragment + */ + public class SizzleTestPageFragment { + + @FindBy(jquery = "div:contains(sizzle locators)") + private WebElement sizzleLocator; + + public WebElement getSizzleLocator() { + return sizzleLocator; + } + } +} diff --git a/graphene-webdriver/graphene-webdriver-ftest/src/test/resources/org/jboss/arquillian/graphene/ftest/page/extension/sample.html b/graphene-webdriver/graphene-webdriver-ftest/src/test/resources/org/jboss/arquillian/graphene/ftest/page/extension/sample.html index 5dc662464..febc81c19 100644 --- a/graphene-webdriver/graphene-webdriver-ftest/src/test/resources/org/jboss/arquillian/graphene/ftest/page/extension/sample.html +++ b/graphene-webdriver/graphene-webdriver-ftest/src/test/resources/org/jboss/arquillian/graphene/ftest/page/extension/sample.html @@ -1,8 +1,11 @@ <!DOCTYPE html> <html> - <head> - </head> - <body> - <h1>Hello World!</h1> - </body> +<head> +</head> +<body> + <h1>Hello World!</h1> + <div> + <div>Hello sizzle locators!</div> + </div> +</body> </html> \ No newline at end of file diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/AbstractSearchContextEnricher.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/AbstractSearchContextEnricher.java index 41f3ae1cc..44996a6a7 100644 --- a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/AbstractSearchContextEnricher.java +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/AbstractSearchContextEnricher.java @@ -29,19 +29,18 @@ import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.util.Arrays; + import org.jboss.arquillian.core.api.Instance; import org.jboss.arquillian.core.api.annotation.Inject; import org.jboss.arquillian.core.spi.ServiceLoader; import org.jboss.arquillian.graphene.enricher.exception.GrapheneTestEnricherException; import org.jboss.arquillian.graphene.spi.enricher.SearchContextTestEnricher; import org.jboss.arquillian.test.spi.TestEnricher; -import org.openqa.selenium.By; import org.openqa.selenium.SearchContext; -import org.openqa.selenium.support.FindBy; /** * This class should help you to implement {@link SearchContextTestEnricher}. - * + * * @author <a href="mailto:[email protected]">Juraj Huska</a> * @author <a href="mailto:[email protected]">Jan Papousek</a> */ @@ -56,30 +55,28 @@ public abstract class AbstractSearchContextEnricher implements SearchContextTest private Instance<ServiceLoader> serviceLoader; /** - * Performs further enrichment on the given instance with the given search context. - * That means all instances {@link TestEnricher} and {@link SearchContextTestEnricher} - * are invoked. - * + * Performs further enrichment on the given instance with the given search context. That means all instances + * {@link TestEnricher} and {@link SearchContextTestEnricher} are invoked. + * * @param searchContext * @param target */ protected final void enrichRecursively(SearchContext searchContext, Object target) { - for (TestEnricher enricher: serviceLoader.get().all(TestEnricher.class)) { + for (TestEnricher enricher : serviceLoader.get().all(TestEnricher.class)) { if (!enricher.getClass().equals(GrapheneEnricher.class)) { enricher.enrich(target); } } - for (SearchContextTestEnricher enricher: serviceLoader.get().all(SearchContextTestEnricher.class)) { + for (SearchContextTestEnricher enricher : serviceLoader.get().all(SearchContextTestEnricher.class)) { enricher.enrich(searchContext, target); } } /** - * It loads a real type of a field defined by parametric type. It searches - * in declaring class and super class. E. g. if a field is declared as 'A fieldName', - * It tries to find type parameter called 'A' in super class declaration - * and its evaluation in the class declaring the given field. - * + * It loads a real type of a field defined by parametric type. It searches in declaring class and super class. E. g. if a + * field is declared as 'A fieldName', It tries to find type parameter called 'A' in super class declaration and its + * evaluation in the class declaring the given field. + * * @param field * @param testCase * @return type of the given field @@ -107,6 +104,7 @@ protected final Class<?> getActualType(Field field, Object testCase) { /** * It loads the concrete type of list items. E.g. for List<String>, String is returned. + * * @param listField * @return * @throws ClassNotFoundException @@ -115,57 +113,9 @@ protected final Class<?> getListType(Field listField) throws ClassNotFoundExcept return Class.forName(listField.getGenericType().toString().split("<")[1].split(">")[0].split("<")[0]); } - /* - * can I do it in better way ?to iterate over all annotations methods and invoke them on what ?obviously it is not possible - * to invoke it on annotation, since it can not be instantiated - */ - protected final By getReferencedBy(FindBy findByAnnotation) { - String value = findByAnnotation.className().trim(); - if (!value.isEmpty()) { - return By.className(value); - } - - value = findByAnnotation.css().trim(); - if (!value.isEmpty()) { - return By.cssSelector(value); - } - - value = findByAnnotation.id().trim(); - if (!value.isEmpty()) { - return By.id(value); - } - - value = findByAnnotation.xpath().trim(); - if (!value.isEmpty()) { - return By.xpath(value); - } - - value = findByAnnotation.name().trim(); - if (!value.isEmpty()) { - return By.name(value); - } - - value = findByAnnotation.tagName().trim(); - if (!value.isEmpty()) { - return By.tagName(value); - } - - value = findByAnnotation.linkText().trim(); - if (!value.isEmpty()) { - return By.linkText(value); - } - - value = findByAnnotation.partialLinkText().trim(); - if (!value.isEmpty()) { - return By.partialLinkText(value); - } - - return null; - } - /** * Initialize given class. - * + * * @param clazz to be initialized * @throws IllegalAccessException * @throws InstantiationException @@ -174,8 +124,8 @@ protected final By getReferencedBy(FindBy findByAnnotation) { * @throws SecurityException * @throws NoSuchMethodException */ - protected final <T> T instantiate(Class<T> clazz) throws NoSuchMethodException, SecurityException, - InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException { + protected final <T> T instantiate(Class<T> clazz) throws NoSuchMethodException, SecurityException, InstantiationException, + IllegalAccessException, IllegalArgumentException, InvocationTargetException { Class<?> outerClass = clazz.getDeclaringClass(); @@ -188,7 +138,7 @@ protected final <T> T instantiate(Class<T> clazz) throws NoSuchMethodException, Object outerObject = instantiate(outerClass); - return construtor.newInstance(new Object[]{outerObject}); + return construtor.newInstance(new Object[] { outerObject }); } else { Constructor<T> construtor = clazz.getDeclaredConstructor(); diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/PageFragmentEnricher.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/PageFragmentEnricher.java index 8b57bb23a..513d274d2 100644 --- a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/PageFragmentEnricher.java +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/PageFragmentEnricher.java @@ -25,44 +25,45 @@ import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.List; + import org.jboss.arquillian.core.api.Instance; import org.jboss.arquillian.core.api.annotation.Inject; import org.jboss.arquillian.core.spi.ServiceLoader; -import org.jboss.arquillian.graphene.context.GrapheneContext; +import org.jboss.arquillian.graphene.enricher.annotation.FindByUtilities; import org.jboss.arquillian.graphene.enricher.exception.PageFragmentInitializationException; import org.jboss.arquillian.graphene.proxy.GrapheneProxy; import org.jboss.arquillian.graphene.spi.annotations.Root; import org.openqa.selenium.By; import org.openqa.selenium.SearchContext; -import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; /** * Enricher injecting page fragments ({@link FindBy} annotation is used) to the fields of the given object. - * + * * @author <a href="mailto:[email protected]">Juraj Huska</a> * @author <a href="mailto:[email protected]">Jan Papousek</a> */ public class PageFragmentEnricher extends AbstractWebElementEnricher { - + + @SuppressWarnings("unused") @Inject private Instance<ServiceLoader> serviceLoader; @Override public void enrich(SearchContext searchContext, Object target) { - List<Field> fields = ReflectionHelper.getFieldsWithAnnotation(target.getClass(), FindBy.class); + List<Field> fields = FindByUtilities.getListOfFieldsAnnotatedWithFindBys(target); for (Field field : fields) { // Page fragment if (isPageFragmentClass(field.getType())) { setupPageFragment(searchContext, target, field); - // List<Page fragment> + // List<Page fragment> } else { try { if (field.getType().isAssignableFrom(List.class) && isPageFragmentClass(getListType(field))) { setupPageFragmentList(searchContext, target, field); } - } catch(ClassNotFoundException e) { + } catch (ClassNotFoundException e) { throw new PageFragmentInitializationException(e.getMessage(), e); } } @@ -77,7 +78,6 @@ protected final <T> List<T> createPageFragmentList(final Class<T> clazz, final S List<T> result = GrapheneProxy.getProxyForFutureTarget(new GrapheneProxy.FutureTarget() { @Override public Object getTarget() { - WebDriver driver = GrapheneContext.getProxy(); List<WebElement> elements = searchContext.findElements(rootBy); List<T> fragments = new ArrayList<T>(); for (int i = 0; i < elements.size(); i++) { @@ -92,12 +92,12 @@ public Object getTarget() { protected final <T> T createPageFragment(Class<T> clazz, WebElement root) { try { - T pageFragment = instantiate(clazz); + T pageFragment = instantiate(clazz); List<Field> roots = ReflectionHelper.getFieldsWithAnnotation(clazz, Root.class); if (roots.size() > 1) { - throw new PageFragmentInitializationException("The Page Fragment " + NEW_LINE + pageFragment.getClass() + NEW_LINE - + " can not have more than one field annotated with Root annotation!" + "Your fields with @Root annotation: " - + roots + NEW_LINE); + throw new PageFragmentInitializationException("The Page Fragment " + NEW_LINE + pageFragment.getClass() + + NEW_LINE + " can not have more than one field annotated with Root annotation!" + + "Your fields with @Root annotation: " + roots + NEW_LINE); } if (roots.size() == 1) { setValue(roots.get(0), pageFragment, root); @@ -106,37 +106,38 @@ protected final <T> T createPageFragment(Class<T> clazz, WebElement root) { return pageFragment; } catch (NoSuchMethodException ex) { throw new PageFragmentInitializationException(" Check whether declared Page Fragment has no argument constructor!", - ex); + ex); } catch (IllegalAccessException ex) { throw new PageFragmentInitializationException( - " Check whether declared Page Fragment has public no argument constructor!", ex); + " Check whether declared Page Fragment has public no argument constructor!", ex); } catch (InstantiationException ex) { throw new PageFragmentInitializationException( - " Check whether you did not declare Page Fragment with abstract type!", ex); + " Check whether you did not declare Page Fragment with abstract type!", ex); } catch (Exception ex) { throw new PageFragmentInitializationException(ex); } } - protected final void setupPageFragmentList(SearchContext searchContext, Object target, Field field) throws ClassNotFoundException { - By rootBy = getReferencedBy(field.getAnnotation(FindBy.class)); + protected final void setupPageFragmentList(SearchContext searchContext, Object target, Field field) + throws ClassNotFoundException { + By rootBy = FindByUtilities.getCorrectBy(field); if (rootBy == null) { - throw new PageFragmentInitializationException( - "Your declaration of Page Fragment in test "+field.getDeclaringClass().getName()+" is annotated with @FindBy without any " - + "parameters, in other words without reference to root of the particular Page Fragment on the page!" - + NEW_LINE); + throw new PageFragmentInitializationException("Your declaration of Page Fragment in test " + + field.getDeclaringClass().getName() + " is annotated with @FindBy without any " + + "parameters, in other words without reference to root of the particular Page Fragment on the page!" + + NEW_LINE); } List<?> pageFragments = createPageFragmentList(getListType(field), searchContext, rootBy); setValue(field, target, pageFragments); } protected final void setupPageFragment(SearchContext searchContext, Object target, Field field) { - By rootBy = getReferencedBy(field.getAnnotation(FindBy.class)); + By rootBy = FindByUtilities.getCorrectBy(field); if (rootBy == null) { - throw new PageFragmentInitializationException( - "Your declaration of Page Fragment in test "+field.getDeclaringClass().getName()+" is annotated with @FindBy without any " - + "parameters, in other words without reference to root of the particular Page Fragment on the page!" - + NEW_LINE); + throw new PageFragmentInitializationException("Your declaration of Page Fragment in test " + + field.getDeclaringClass().getName() + " is annotated with @FindBy without any " + + "parameters, in other words without reference to root of the particular Page Fragment on the page!" + + NEW_LINE); } WebElement root = createWebElement(rootBy, searchContext); Object pageFragment = createPageFragment(field.getType(), root); diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/PageObjectEnricher.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/PageObjectEnricher.java index 7504619b8..a22cb3104 100644 --- a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/PageObjectEnricher.java +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/PageObjectEnricher.java @@ -40,6 +40,7 @@ */ public class PageObjectEnricher extends AbstractSearchContextEnricher { + @SuppressWarnings("unused") @Inject private Instance<ServiceLoader> serviceLoader; diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/WebElementEnricher.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/WebElementEnricher.java index a62859c92..b6f3b1ac0 100644 --- a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/WebElementEnricher.java +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/WebElementEnricher.java @@ -23,11 +23,12 @@ import java.lang.reflect.Field; import java.util.List; + +import org.jboss.arquillian.graphene.enricher.annotation.FindByUtilities; import org.jboss.arquillian.graphene.enricher.exception.GrapheneTestEnricherException; import org.openqa.selenium.By; import org.openqa.selenium.SearchContext; import org.openqa.selenium.WebElement; -import org.openqa.selenium.support.FindBy; /** * @author <a href="mailto:[email protected]">Juraj Huska</a> @@ -38,36 +39,32 @@ public class WebElementEnricher extends AbstractWebElementEnricher { @Override public void enrich(SearchContext searchContext, Object target) { try { - List<Field> fields = ReflectionHelper.getFieldsWithAnnotation(target.getClass(), FindBy.class); + List<Field> fields = FindByUtilities.getListOfFieldsAnnotatedWithFindBys(target); for (Field field : fields) { - By by = getReferencedBy(field.getAnnotation(FindBy.class)); - String message = "Your @FindBy annotation over field " + NEW_LINE + field.getClass() - + NEW_LINE + " declared in: " + NEW_LINE + field.getDeclaringClass().getName() + NEW_LINE - + " is annotated with empty @FindBy annotation, in other words it " - + "should contain parameter which will define the strategy for referencing that element."; + By by = FindByUtilities.getCorrectBy(field); + String message = "Your @FindBy annotation over field " + NEW_LINE + field.getClass() + NEW_LINE + + " declared in: " + NEW_LINE + field.getDeclaringClass().getName() + NEW_LINE + + " is annotated with empty @FindBy annotation, in other words it " + + "should contain parameter which will define the strategy for referencing that element."; // WebElement if (field.getType().isAssignableFrom(WebElement.class)) { if (by == null) { throw new GrapheneTestEnricherException(message); } - WebElement element = createWebElement( - by, - searchContext); + WebElement element = createWebElement(by, searchContext); setValue(field, target, element); - // List<WebElement> - } else if (field.getType().isAssignableFrom(List.class) && getListType(field).isAssignableFrom(WebElement.class)) { + // List<WebElement> + } else if (field.getType().isAssignableFrom(List.class) + && getListType(field).isAssignableFrom(WebElement.class)) { if (by == null) { throw new GrapheneTestEnricherException(message); } - List<WebElement> elements = createWebElements( - by, - searchContext); + List<WebElement> elements = createWebElements(by, searchContext); setValue(field, target, elements); } } - } catch(ClassNotFoundException e) { + } catch (ClassNotFoundException e) { throw new IllegalStateException(e.getMessage(), e); } } - } diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/annotation/ByJQuery.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/annotation/ByJQuery.java new file mode 100644 index 000000000..e0bd43568 --- /dev/null +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/annotation/ByJQuery.java @@ -0,0 +1,62 @@ +/** + * JBoss, Home of Professional Open Source + * Copyright 2012, Red Hat, Inc. and individual contributors + * by the @authors tag. See the copyright.txt in the distribution for a + * full listing of individual contributors. + * + * This is free software; you can redistribute it and/or modify it + * under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * This software is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this software; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301 USA, or see the FSF site: http://www.fsf.org. + */ +package org.jboss.arquillian.graphene.enricher.annotation; + +import java.util.List; + +import org.jboss.arquillian.graphene.context.GrapheneContext; +import org.jboss.arquillian.graphene.context.GraphenePageExtensionsContext; +import org.jboss.arquillian.graphene.page.extension.SizzleJSPageExtension; +import org.openqa.selenium.By; +import org.openqa.selenium.JavascriptExecutor; +import org.openqa.selenium.SearchContext; +import org.openqa.selenium.WebElement; + +/** + * @author <a href="mailto:[email protected]">Juraj Huska</a> + */ +public class ByJQuery extends By { + + private String jquerySelector; + + private JavascriptExecutor executor = GrapheneContext.getProxyForInterfaces(JavascriptExecutor.class); + + public ByJQuery(String jquerySelector) { + this.jquerySelector = jquerySelector; + } + + public static ByJQuery jquerySelector(String selector) { + if(selector == null) { + throw new IllegalArgumentException("Can not find elements when jquerySelector is null!"); + } + return new ByJQuery(selector); + } + + @SuppressWarnings("unchecked") + @Override + public List<WebElement> findElements(SearchContext context) { + SizzleJSPageExtension pageExtension = new SizzleJSPageExtension(); + GraphenePageExtensionsContext.getRegistryProxy().register(pageExtension); + GraphenePageExtensionsContext.getInstallatorProviderProxy().installator(pageExtension.getName()).install(); + + return (List<WebElement>) executor.executeScript("return window.Sizzle('" + jquerySelector + "')"); + }} diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/annotation/FindByUtilities.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/annotation/FindByUtilities.java new file mode 100644 index 000000000..299c45932 --- /dev/null +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/enricher/annotation/FindByUtilities.java @@ -0,0 +1,144 @@ +/** + * JBoss, Home of Professional Open Source + * Copyright 2012, Red Hat, Inc. and individual contributors + * by the @authors tag. See the copyright.txt in the distribution for a + * full listing of individual contributors. + * + * This is free software; you can redistribute it and/or modify it + * under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * This software is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this software; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301 USA, or see the FSF site: http://www.fsf.org. + */ +package org.jboss.arquillian.graphene.enricher.annotation; + +import java.lang.reflect.Field; +import java.util.List; + +import org.jboss.arquillian.graphene.enricher.ReflectionHelper; +import org.openqa.selenium.By; +import org.openqa.selenium.support.FindBy; + +/** + * @author <a href="mailto:[email protected]">Juraj Huska</a> + */ +public class FindByUtilities { + + public static By getCorrectBy(Field field) { + if (field == null) { + throw new IllegalArgumentException("Parameter field cannot be null!"); + } + By by = null; + if (field.getAnnotation(FindBy.class) != null) { + by = getReferencedBy(field.getAnnotation(FindBy.class)); + } else { + by = getReferencedBy(field.getAnnotation(org.jboss.arquillian.graphene.spi.annotations.FindBy.class)); + } + + return by; + } + + public static List<Field> getListOfFieldsAnnotatedWithFindBys(Object target) { + List<Field> fields = ReflectionHelper.getFieldsWithAnnotation(target.getClass(), FindBy.class); + fields.addAll(ReflectionHelper.getFieldsWithAnnotation(target.getClass(), org.jboss.arquillian.graphene.spi.annotations.FindBy.class)); + return fields; + } + + private static final By getReferencedBy(FindBy findByAnnotation) { + String value = findByAnnotation.className().trim(); + if (!value.isEmpty()) { + return By.className(value); + } + + value = findByAnnotation.css().trim(); + if (!value.isEmpty()) { + return By.cssSelector(value); + } + + value = findByAnnotation.id().trim(); + if (!value.isEmpty()) { + return By.id(value); + } + + value = findByAnnotation.xpath().trim(); + if (!value.isEmpty()) { + return By.xpath(value); + } + + value = findByAnnotation.name().trim(); + if (!value.isEmpty()) { + return By.name(value); + } + + value = findByAnnotation.tagName().trim(); + if (!value.isEmpty()) { + return By.tagName(value); + } + + value = findByAnnotation.linkText().trim(); + if (!value.isEmpty()) { + return By.linkText(value); + } + + value = findByAnnotation.partialLinkText().trim(); + if (!value.isEmpty()) { + return By.partialLinkText(value); + } + + return null; + } + + private static final By getReferencedBy(org.jboss.arquillian.graphene.spi.annotations.FindBy findByAnnotation) { + String value = findByAnnotation.className().trim(); + if (!value.isEmpty()) { + return By.className(value); + } + + value = findByAnnotation.css().trim(); + if (!value.isEmpty()) { + return By.cssSelector(value); + } + + value = findByAnnotation.id().trim(); + if (!value.isEmpty()) { + return By.id(value); + } + + value = findByAnnotation.xpath().trim(); + if (!value.isEmpty()) { + return By.xpath(value); + } + + value = findByAnnotation.name().trim(); + if (!value.isEmpty()) { + return By.name(value); + } + + value = findByAnnotation.tagName().trim(); + if (!value.isEmpty()) { + return By.tagName(value); + } + + value = findByAnnotation.linkText().trim(); + if (!value.isEmpty()) { + return By.linkText(value); + } + + value = findByAnnotation.jquery(); + if (!value.isEmpty()) { + return ByJQuery.jquerySelector(value); + } + + return null; + } + +} diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/DefaultExecutionResolver.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/DefaultExecutionResolver.java index 4900e4504..a1cbae0cf 100644 --- a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/DefaultExecutionResolver.java +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/DefaultExecutionResolver.java @@ -21,17 +21,19 @@ */ package org.jboss.arquillian.graphene.javascript; -import com.google.common.io.Resources; import java.lang.reflect.Method; import java.net.URL; import java.nio.charset.Charset; import java.text.MessageFormat; import java.util.Arrays; + import org.jboss.arquillian.graphene.context.GrapheneContext; import org.jboss.arquillian.graphene.context.GraphenePageExtensionsContext; import org.jboss.arquillian.graphene.page.extension.JavaScriptPageExtension; import org.openqa.selenium.JavascriptExecutor; +import com.google.common.io.Resources; + /** * This resolver uses page extension mechanism to install needed JavaScript * and other required extensions. diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/page/extension/SizzleJSPageExtension.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/page/extension/SizzleJSPageExtension.java new file mode 100644 index 000000000..f06ab2fca --- /dev/null +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/page/extension/SizzleJSPageExtension.java @@ -0,0 +1,54 @@ +/** + * JBoss, Home of Professional Open Source + * Copyright 2012, Red Hat, Inc. and individual contributors + * by the @authors tag. See the copyright.txt in the distribution for a + * full listing of individual contributors. + * + * This is free software; you can redistribute it and/or modify it + * under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * This software is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this software; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301 USA, or see the FSF site: http://www.fsf.org. + */ +package org.jboss.arquillian.graphene.page.extension; + +import java.util.Collection; +import java.util.Collections; + +import org.jboss.arquillian.graphene.spi.javascript.JavaScript; +import org.jboss.arquillian.graphene.spi.page.PageExtension; + +/** + * @author <a href="mailto:[email protected]">Juraj Huska</a> + */ +public class SizzleJSPageExtension implements PageExtension { + + @Override + public String getName() { + return getClass().getName(); + } + + @Override + public JavaScript getExtensionScript() { + return JavaScript.fromResource("com/sizzlejs/sizzle.js"); + } + + @Override + public JavaScript getInstallationDetectionScript() { + return JavaScript.fromString("return ((typeof window.Sizzle != 'undefined') && (window.Sizzle() != null))"); + } + + @Override + public Collection<String> getRequired() { + return Collections.emptyList(); + } +} diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/javascript/com/sizzlejs/sizzle.js b/graphene-webdriver/graphene-webdriver-impl/src/main/javascript/com/sizzlejs/sizzle.js new file mode 100644 index 000000000..3e3caed4b --- /dev/null +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/javascript/com/sizzlejs/sizzle.js @@ -0,0 +1,1710 @@ +/*! + * Sizzle CSS Selector Engine + * Copyright 2012 jQuery Foundation and other contributors + * Released under the MIT license + * http://sizzlejs.com/ + */ +(function( window, undefined ) { + +var cachedruns, + assertGetIdNotName, + Expr, + getText, + isXML, + contains, + compile, + sortOrder, + hasDuplicate, + outermostContext, + + strundefined = "undefined", + + // Used in sorting + MAX_NEGATIVE = 1 << 31, + baseHasDuplicate = true, + + expando = ( "sizcache" + Math.random() ).replace( ".", "" ), + + Token = String, + document = window.document, + docElem = document.documentElement, + dirruns = 0, + done = 0, + pop = [].pop, + push = [].push, + slice = [].slice, + // Use a stripped-down indexOf if a native one is unavailable + indexOf = [].indexOf || function( elem ) { + var i = 0, + len = this.length; + for ( ; i < len; i++ ) { + if ( this[i] === elem ) { + return i; + } + } + return -1; + }, + + // Augment a function for special use by Sizzle + markFunction = function( fn, value ) { + fn[ expando ] = value == null || value; + return fn; + }, + + createCache = function() { + var cache = {}, + keys = []; + + return markFunction(function( key, value ) { + // Only keep the most recent entries + if ( keys.push( key ) > Expr.cacheLength ) { + delete cache[ keys.shift() ]; + } + + // Retrieve with (key + " ") to avoid collision with native Object.prototype properties (see Issue #157) + return (cache[ key + " " ] = value); + }, cache ); + }, + + classCache = createCache(), + tokenCache = createCache(), + compilerCache = createCache(), + + // Regex + + // Whitespace characters http://www.w3.org/TR/css3-selectors/#whitespace + whitespace = "[\\x20\\t\\r\\n\\f]", + // http://www.w3.org/TR/css3-syntax/#characters + characterEncoding = "(?:\\\\.|[-\\w]|[^\\x00-\\xa0])+", + + // Loosely modeled on CSS identifier characters + // An unquoted value should be a CSS identifier (http://www.w3.org/TR/css3-selectors/#attribute-selectors) + // Proper syntax: http://www.w3.org/TR/CSS21/syndata.html#value-def-identifier + identifier = characterEncoding.replace( "w", "w#" ), + + // Acceptable operators http://www.w3.org/TR/selectors/#attribute-selectors + operators = "([*^$|!~]?=)", + attributes = "\\[" + whitespace + "*(" + characterEncoding + ")" + whitespace + + "*(?:" + operators + whitespace + "*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|(" + identifier + ")|)|)" + whitespace + "*\\]", + + // Prefer arguments not in parens/brackets, + // then attribute selectors and non-pseudos (denoted by :), + // then anything else + // These preferences are here to reduce the number of selectors + // needing tokenize in the PSEUDO preFilter + pseudos = ":(" + characterEncoding + ")(?:\\((?:(['\"])((?:\\\\.|[^\\\\])*?)\\2|([^()[\\]]*|(?:(?:" + attributes + ")|[^:]|\\\\.)*|.*))\\)|)", + + // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter + rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + whitespace + "+$", "g" ), + + rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ), + rcombinators = new RegExp( "^" + whitespace + "*([\\x20\\t\\r\\n\\f>+~])" + whitespace + "*" ), + rpseudo = new RegExp( pseudos ), + + // Easily-parseable/retrievable ID or TAG or CLASS selectors + rquickExpr = /^(?:#([\w\-]+)|(\w+)|\.([\w\-]+))$/, + + rsibling = /[\x20\t\r\n\f]*[+~]/, + + rheader = /h\d/i, + rinputs = /input|select|textarea|button/i, + + rbackslash = /\\(?!\\)/g, + + matchExpr = { + "ID": new RegExp( "^#(" + characterEncoding + ")" ), + "CLASS": new RegExp( "^\\.(" + characterEncoding + ")" ), + "NAME": new RegExp( "^\\[name=['\"]?(" + characterEncoding + ")['\"]?\\]" ), + "TAG": new RegExp( "^(" + characterEncoding.replace( "w", "w*" ) + ")" ), + "ATTR": new RegExp( "^" + attributes ), + "PSEUDO": new RegExp( "^" + pseudos ), + "CHILD": new RegExp( "^:(only|nth|first|last)-child(?:\\(" + whitespace + + "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + whitespace + + "*(\\d+)|))" + whitespace + "*\\)|)", "i" ), + // For use in libraries implementing .is() + // We use this for POS matching in `select` + "needsContext": new RegExp( "^" + whitespace + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + + whitespace + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" ) + }, + + // Support + + // Used for testing something on an element + assert = function( fn ) { + var div = document.createElement("div"); + + try { + return fn( div ); + } catch (e) { + return false; + } finally { + // release memory in IE + div = null; + } + }, + + // Check if getElementsByTagName("*") returns only elements + assertTagNameNoComments = assert(function( div ) { + div.appendChild( document.createComment("") ); + return !div.getElementsByTagName("*").length; + }), + + // Check if getAttribute returns normalized href attributes + assertHrefNotNormalized = assert(function( div ) { + div.innerHTML = "<a href='#'></a>"; + return div.firstChild && typeof div.firstChild.getAttribute !== strundefined && + div.firstChild.getAttribute("href") === "#"; + }), + + // Check if attributes should be retrieved by attribute nodes + assertAttributes = assert(function( div ) { + div.innerHTML = "<select></select>"; + var type = typeof div.lastChild.getAttribute("multiple"); + // IE8 returns a string for some attributes even when not present + return type !== "boolean" && type !== "string"; + }), + + // Check if getElementsByClassName can be trusted + assertUsableClassName = assert(function( div ) { + // Opera can't find a second classname (in 9.6) + div.innerHTML = "<div class='hidden e'></div><div class='hidden'></div>"; + if ( !div.getElementsByClassName || !div.getElementsByClassName("e").length ) { + return false; + } + + // Safari 3.2 caches class attributes and doesn't catch changes + div.lastChild.className = "e"; + return div.getElementsByClassName("e").length === 2; + }), + + // Check if getElementById returns elements by name + // Check if getElementsByName privileges form controls or returns elements by ID + assertUsableName = assert(function( div ) { + // Inject content + div.id = expando + 0; + div.innerHTML = "<a name='" + expando + "'></a><div name='" + expando + "'></div>"; + docElem.insertBefore( div, docElem.firstChild ); + + // Test + var pass = document.getElementsByName && + // buggy browsers will return fewer than the correct 2 + document.getElementsByName( expando ).length === 2 + + // buggy browsers will return more than the correct 0 + document.getElementsByName( expando + 0 ).length; + assertGetIdNotName = !document.getElementById( expando ); + + // Cleanup + docElem.removeChild( div ); + + return pass; + }); + +// If slice is not available, provide a backup +try { + slice.call( docElem.childNodes, 0 )[0].nodeType; +} catch ( e ) { + slice = function( i ) { + var elem, + results = []; + for ( ; (elem = this[i]); i++ ) { + results.push( elem ); + } + return results; + }; +} + +function Sizzle( selector, context, results, seed ) { + results = results || []; + context = context || document; + var match, elem, xml, m, + nodeType = context.nodeType; + + if ( !selector || typeof selector !== "string" ) { + return results; + } + + if ( nodeType !== 1 && nodeType !== 9 ) { + return []; + } + + xml = isXML( context ); + + if ( !xml && !seed ) { + if ( (match = rquickExpr.exec( selector )) ) { + // Speed-up: Sizzle("#ID") + if ( (m = match[1]) ) { + if ( nodeType === 9 ) { + elem = context.getElementById( m ); + // Check parentNode to catch when Blackberry 4.6 returns + // nodes that are no longer in the document #6963 + if ( elem && elem.parentNode ) { + // Handle the case where IE, Opera, and Webkit return items + // by name instead of ID + if ( elem.id === m ) { + results.push( elem ); + return results; + } + } else { + return results; + } + } else { + // Context is not a document + if ( context.ownerDocument && (elem = context.ownerDocument.getElementById( m )) && + contains( context, elem ) && elem.id === m ) { + results.push( elem ); + return results; + } + } + + // Speed-up: Sizzle("TAG") + } else if ( match[2] ) { + push.apply( results, slice.call(context.getElementsByTagName( selector ), 0) ); + return results; + + // Speed-up: Sizzle(".CLASS") + } else if ( (m = match[3]) && assertUsableClassName && context.getElementsByClassName ) { + push.apply( results, slice.call(context.getElementsByClassName( m ), 0) ); + return results; + } + } + } + + // All others + return select( selector.replace( rtrim, "$1" ), context, results, seed, xml ); +} + +Sizzle.matches = function( expr, elements ) { + return Sizzle( expr, null, null, elements ); +}; + +Sizzle.matchesSelector = function( elem, expr ) { + return Sizzle( expr, null, null, [ elem ] ).length > 0; +}; + +// Returns a function to use in pseudos for input types +function createInputPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === type; + }; +} + +// Returns a function to use in pseudos for buttons +function createButtonPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return (name === "input" || name === "button") && elem.type === type; + }; +} + +// Returns a function to use in pseudos for positionals +function createPositionalPseudo( fn ) { + return markFunction(function( argument ) { + argument = +argument; + return markFunction(function( seed, matches ) { + var j, + matchIndexes = fn( [], seed.length, argument ), + i = matchIndexes.length; + + // Match elements found at the specified indexes + while ( i-- ) { + if ( seed[ (j = matchIndexes[i]) ] ) { + seed[j] = !(matches[j] = seed[j]); + } + } + }); + }); +} + +/** + * Utility function for retrieving the text value of an array of DOM nodes + * @param {Array|Element} elem + */ +getText = Sizzle.getText = function( elem ) { + var node, + ret = "", + i = 0, + nodeType = elem.nodeType; + + if ( nodeType ) { + if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) { + // Use textContent for elements + // innerText usage removed for consistency of new lines (see #11153) + if ( typeof elem.textContent === "string" ) { + return elem.textContent; + } else { + // Traverse its children + for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { + ret += getText( elem ); + } + } + } else if ( nodeType === 3 || nodeType === 4 ) { + return elem.nodeValue; + } + // Do not include comment or processing instruction nodes + } else { + + // If no nodeType, this is expected to be an array + for ( ; (node = elem[i]); i++ ) { + // Do not traverse comment nodes + ret += getText( node ); + } + } + return ret; +}; + +isXML = Sizzle.isXML = function( elem ) { + // documentElement is verified for cases where it doesn't yet exist + // (such as loading iframes in IE - #4833) + var documentElement = elem && (elem.ownerDocument || elem).documentElement; + return documentElement ? documentElement.nodeName !== "HTML" : false; +}; + +// Element contains another +contains = Sizzle.contains = docElem.contains ? + function( a, b ) { + var adown = a.nodeType === 9 ? a.documentElement : a, + bup = b && b.parentNode; + return a === bup || !!( bup && bup.nodeType === 1 && adown.contains && adown.contains(bup) ); + } : + docElem.compareDocumentPosition ? + function( a, b ) { + return b && !!( a.compareDocumentPosition( b ) & 16 ); + } : + function( a, b ) { + while ( (b = b.parentNode) ) { + if ( b === a ) { + return true; + } + } + return false; + }; + +Sizzle.attr = function( elem, name ) { + var val, + xml = isXML( elem ); + + if ( !xml ) { + name = name.toLowerCase(); + } + if ( (val = Expr.attrHandle[ name ]) ) { + return val( elem ); + } + if ( xml || assertAttributes ) { + return elem.getAttribute( name ); + } + val = elem.getAttributeNode( name ); + return val ? + typeof elem[ name ] === "boolean" ? + elem[ name ] ? name : null : + val.specified ? val.value : null : + null; +}; + +Expr = Sizzle.selectors = { + + // Can be adjusted by the user + cacheLength: 50, + + createPseudo: markFunction, + + match: matchExpr, + + // IE6/7 return a modified href + attrHandle: assertHrefNotNormalized ? + {} : + { + "href": function( elem ) { + return elem.getAttribute( "href", 2 ); + }, + "type": function( elem ) { + return elem.getAttribute("type"); + } + }, + + find: { + "ID": assertGetIdNotName ? + function( id, context, xml ) { + if ( typeof context.getElementById !== strundefined && !xml ) { + var m = context.getElementById( id ); + // Check parentNode to catch when Blackberry 4.6 returns + // nodes that are no longer in the document #6963 + return m && m.parentNode ? [m] : []; + } + } : + function( id, context, xml ) { + if ( typeof context.getElementById !== strundefined && !xml ) { + var m = context.getElementById( id ); + + return m ? + m.id === id || typeof m.getAttributeNode !== strundefined && m.getAttributeNode("id").value === id ? + [m] : + undefined : + []; + } + }, + + "TAG": assertTagNameNoComments ? + function( tag, context ) { + if ( typeof context.getElementsByTagName !== strundefined ) { + return context.getElementsByTagName( tag ); + } + } : + function( tag, context ) { + var results = context.getElementsByTagName( tag ); + + // Filter out possible comments + if ( tag === "*" ) { + var elem, + tmp = [], + i = 0; + + for ( ; (elem = results[i]); i++ ) { + if ( elem.nodeType === 1 ) { + tmp.push( elem ); + } + } + + return tmp; + } + return results; + }, + + "NAME": assertUsableName && function( tag, context ) { + if ( typeof context.getElementsByName !== strundefined ) { + return context.getElementsByName( name ); + } + }, + + "CLASS": assertUsableClassName && function( className, context, xml ) { + if ( typeof context.getElementsByClassName !== strundefined && !xml ) { + return context.getElementsByClassName( className ); + } + } + }, + + relative: { + ">": { dir: "parentNode", first: true }, + " ": { dir: "parentNode" }, + "+": { dir: "previousSibling", first: true }, + "~": { dir: "previousSibling" } + }, + + preFilter: { + "ATTR": function( match ) { + match[1] = match[1].replace( rbackslash, "" ); + + // Move the given value to match[3] whether quoted or unquoted + match[3] = ( match[4] || match[5] || "" ).replace( rbackslash, "" ); + + if ( match[2] === "~=" ) { + match[3] = " " + match[3] + " "; + } + + return match.slice( 0, 4 ); + }, + + "CHILD": function( match ) { + /* matches from matchExpr["CHILD"] + 1 type (only|nth|...) + 2 argument (even|odd|\d*|\d*n([+-]\d+)?|...) + 3 xn-component of xn+y argument ([+-]?\d*n|) + 4 sign of xn-component + 5 x of xn-component + 6 sign of y-component + 7 y of y-component + */ + match[1] = match[1].toLowerCase(); + + if ( match[1] === "nth" ) { + // nth-child requires argument + if ( !match[2] ) { + Sizzle.error( match[0] ); + } + + // numeric x and y parameters for Expr.filter.CHILD + // remember that false/true cast respectively to 0/1 + match[3] = +( match[3] ? match[4] + (match[5] || 1) : 2 * ( match[2] === "even" || match[2] === "odd" ) ); + match[4] = +( ( match[6] + match[7] ) || match[2] === "odd" ); + + // other types prohibit arguments + } else if ( match[2] ) { + Sizzle.error( match[0] ); + } + + return match; + }, + + "PSEUDO": function( match ) { + var unquoted, excess; + if ( matchExpr["CHILD"].test( match[0] ) ) { + return null; + } + + if ( match[3] ) { + match[2] = match[3]; + } else if ( (unquoted = match[4]) ) { + // Only check arguments that contain a pseudo + if ( rpseudo.test(unquoted) && + // Get excess from tokenize (recursively) + (excess = tokenize( unquoted, true )) && + // advance to the next closing parenthesis + (excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length) ) { + + // excess is a negative index + unquoted = unquoted.slice( 0, excess ); + match[0] = match[0].slice( 0, excess ); + } + match[2] = unquoted; + } + + // Return only captures needed by the pseudo filter method (type and argument) + return match.slice( 0, 3 ); + } + }, + + filter: { + "ID": assertGetIdNotName ? + function( id ) { + id = id.replace( rbackslash, "" ); + return function( elem ) { + return elem.getAttribute("id") === id; + }; + } : + function( id ) { + id = id.replace( rbackslash, "" ); + return function( elem ) { + var node = typeof elem.getAttributeNode !== strundefined && elem.getAttributeNode("id"); + return node && node.value === id; + }; + }, + + "TAG": function( nodeName ) { + if ( nodeName === "*" ) { + return function() { return true; }; + } + nodeName = nodeName.replace( rbackslash, "" ).toLowerCase(); + + return function( elem ) { + return elem.nodeName && elem.nodeName.toLowerCase() === nodeName; + }; + }, + + "CLASS": function( className ) { + var pattern = classCache[ expando ][ className + " " ]; + + return pattern || + (pattern = new RegExp( "(^|" + whitespace + ")" + className + "(" + whitespace + "|$)" )) && + classCache( className, function( elem ) { + return pattern.test( elem.className || (typeof elem.getAttribute !== strundefined && elem.getAttribute("class")) || "" ); + }); + }, + + "ATTR": function( name, operator, check ) { + return function( elem ) { + var result = Sizzle.attr( elem, name ); + + if ( result == null ) { + return operator === "!="; + } + if ( !operator ) { + return true; + } + + result += ""; + + return operator === "=" ? result === check : + operator === "!=" ? result !== check : + operator === "^=" ? check && result.indexOf( check ) === 0 : + operator === "*=" ? check && result.indexOf( check ) > -1 : + operator === "$=" ? check && result.substr( result.length - check.length ) === check : + operator === "~=" ? ( " " + result + " " ).indexOf( check ) > -1 : + operator === "|=" ? result === check || result.substr( 0, check.length + 1 ) === check + "-" : + false; + }; + }, + + "CHILD": function( type, argument, first, last ) { + + if ( type === "nth" ) { + return function( elem ) { + var node, diff, + parent = elem.parentNode; + + if ( first === 1 && last === 0 ) { + return true; + } + + if ( parent ) { + diff = 0; + for ( node = parent.firstChild; node; node = node.nextSibling ) { + if ( node.nodeType === 1 ) { + diff++; + if ( elem === node ) { + break; + } + } + } + } + + // Incorporate the offset (or cast to NaN), then check against cycle size + diff -= last; + return diff === first || ( diff % first === 0 && diff / first >= 0 ); + }; + } + + return function( elem ) { + var node = elem; + + switch ( type ) { + case "only": + case "first": + while ( (node = node.previousSibling) ) { + if ( node.nodeType === 1 ) { + return false; + } + } + + if ( type === "first" ) { + return true; + } + + node = elem; + + /* falls through */ + case "last": + while ( (node = node.nextSibling) ) { + if ( node.nodeType === 1 ) { + return false; + } + } + + return true; + } + }; + }, + + "PSEUDO": function( pseudo, argument ) { + // pseudo-class names are case-insensitive + // http://www.w3.org/TR/selectors/#pseudo-classes + // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters + // Remember that setFilters inherits from pseudos + var args, + fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] || + Sizzle.error( "unsupported pseudo: " + pseudo ); + + // The user may use createPseudo to indicate that + // arguments are needed to create the filter function + // just as Sizzle does + if ( fn[ expando ] ) { + return fn( argument ); + } + + // But maintain support for old signatures + if ( fn.length > 1 ) { + args = [ pseudo, pseudo, "", argument ]; + return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ? + markFunction(function( seed, matches ) { + var idx, + matched = fn( seed, argument ), + i = matched.length; + while ( i-- ) { + idx = indexOf.call( seed, matched[i] ); + seed[ idx ] = !( matches[ idx ] = matched[i] ); + } + }) : + function( elem ) { + return fn( elem, 0, args ); + }; + } + + return fn; + } + }, + + pseudos: { + "not": markFunction(function( selector ) { + // Trim the selector passed to compile + // to avoid treating leading and trailing + // spaces as combinators + var input = [], + results = [], + matcher = compile( selector.replace( rtrim, "$1" ) ); + + return matcher[ expando ] ? + markFunction(function( seed, matches, context, xml ) { + var elem, + unmatched = matcher( seed, null, xml, [] ), + i = seed.length; + + // Match elements unmatched by `matcher` + while ( i-- ) { + if ( (elem = unmatched[i]) ) { + seed[i] = !(matches[i] = elem); + } + } + }) : + function( elem, context, xml ) { + input[0] = elem; + matcher( input, null, xml, results ); + return !results.pop(); + }; + }), + + "has": markFunction(function( selector ) { + return function( elem ) { + return Sizzle( selector, elem ).length > 0; + }; + }), + + "contains": markFunction(function( text ) { + return function( elem ) { + return ( elem.textContent || elem.innerText || getText( elem ) ).indexOf( text ) > -1; + }; + }), + + "enabled": function( elem ) { + return elem.disabled === false; + }, + + "disabled": function( elem ) { + return elem.disabled === true; + }, + + "checked": function( elem ) { + // In CSS3, :checked should return both checked and selected elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + var nodeName = elem.nodeName.toLowerCase(); + return (nodeName === "input" && !!elem.checked) || (nodeName === "option" && !!elem.selected); + }, + + "selected": function( elem ) { + // Accessing this property makes selected-by-default + // options in Safari work properly + if ( elem.parentNode ) { + elem.parentNode.selectedIndex; + } + + return elem.selected === true; + }, + + "parent": function( elem ) { + return !Expr.pseudos["empty"]( elem ); + }, + + "empty": function( elem ) { + // http://www.w3.org/TR/selectors/#empty-pseudo + // :empty is only affected by element nodes and content nodes(including text(3), cdata(4)), + // not comment, processing instructions, or others + // Thanks to Diego Perini for the nodeName shortcut + // Greater than "@" means alpha characters (specifically not starting with "#" or "?") + var nodeType; + elem = elem.firstChild; + while ( elem ) { + if ( elem.nodeName > "@" || (nodeType = elem.nodeType) === 3 || nodeType === 4 ) { + return false; + } + elem = elem.nextSibling; + } + return true; + }, + + "header": function( elem ) { + return rheader.test( elem.nodeName ); + }, + + "text": function( elem ) { + var type, attr; + // IE6 and 7 will map elem.type to 'text' for new HTML5 types (search, etc) + // use getAttribute instead to test this case + return elem.nodeName.toLowerCase() === "input" && + (type = elem.type) === "text" && + ( (attr = elem.getAttribute("type")) == null || attr.toLowerCase() === type ); + }, + + // Input types + "radio": createInputPseudo("radio"), + "checkbox": createInputPseudo("checkbox"), + "file": createInputPseudo("file"), + "password": createInputPseudo("password"), + "image": createInputPseudo("image"), + + "submit": createButtonPseudo("submit"), + "reset": createButtonPseudo("reset"), + + "button": function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === "button" || name === "button"; + }, + + "input": function( elem ) { + return rinputs.test( elem.nodeName ); + }, + + "focus": function( elem ) { + var doc = elem.ownerDocument; + return elem === doc.activeElement && (!doc.hasFocus || doc.hasFocus()) && !!(elem.type || elem.href || ~elem.tabIndex); + }, + + "active": function( elem ) { + return elem === elem.ownerDocument.activeElement; + }, + + // Positional types + "first": createPositionalPseudo(function() { + return [ 0 ]; + }), + + "last": createPositionalPseudo(function( matchIndexes, length ) { + return [ length - 1 ]; + }), + + "eq": createPositionalPseudo(function( matchIndexes, length, argument ) { + return [ argument < 0 ? argument + length : argument ]; + }), + + "even": createPositionalPseudo(function( matchIndexes, length ) { + for ( var i = 0; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + }), + + "odd": createPositionalPseudo(function( matchIndexes, length ) { + for ( var i = 1; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + }), + + "lt": createPositionalPseudo(function( matchIndexes, length, argument ) { + for ( var i = argument < 0 ? argument + length : argument; --i >= 0; ) { + matchIndexes.push( i ); + } + return matchIndexes; + }), + + "gt": createPositionalPseudo(function( matchIndexes, length, argument ) { + for ( var i = argument < 0 ? argument + length : argument; ++i < length; ) { + matchIndexes.push( i ); + } + return matchIndexes; + }) + } +}; + +function siblingCheck( a, b ) { + + if ( a && b ) { + var cur = a.nextSibling; + + while ( cur ) { + if ( cur === b ) { + return -1; + } + + cur = cur.nextSibling; + } + } + + return a ? 1 : -1; +} + +sortOrder = docElem.compareDocumentPosition ? + function( a, b ) { + var compare, parent; + if ( a === b ) { + hasDuplicate = true; + return 0; + } + + if ( a.compareDocumentPosition && b.compareDocumentPosition ) { + if ( (compare = a.compareDocumentPosition( b )) & 1 || (( parent = a.parentNode ) && parent.nodeType === 11) ) { + if ( a === document || contains(document, a) ) { + return -1; + } + if ( b === document || contains(document, b) ) { + return 1; + } + return 0; + } + return compare & 4 ? -1 : 1; + } + + return a.compareDocumentPosition ? -1 : 1; + } : + function( a, b ) { + // The nodes are identical, we can exit early + if ( a === b ) { + hasDuplicate = true; + return 0; + + // Fallback to using sourceIndex (in IE) if it's available on both nodes + } else if ( a.sourceIndex && b.sourceIndex ) { + return ( ~b.sourceIndex || ( MAX_NEGATIVE ) ) - ( contains( document, a ) && ~a.sourceIndex || ( MAX_NEGATIVE ) ); + } + + var i = 0, + ap = [ a ], + bp = [ b ], + aup = a.parentNode, + bup = b.parentNode, + cur = aup; + + // If no parents were found then the nodes are disconnected + if ( a === document ) { + return -1; + + } else if ( b === document ) { + return 1; + + } else if ( !aup && !bup ) { + return 0; + + } else if ( !bup ) { + return -1; + + } else if ( !aup ) { + return 1; + + // If the nodes are siblings (or identical) we can do a quick check + } else if ( aup === bup ) { + return siblingCheck( a, b ); + } + + // Otherwise they're somewhere else in the tree so we need + // to build up a full list of the parentNodes for comparison + while ( cur ) { + ap.unshift( cur ); + cur = cur.parentNode; + } + + cur = bup; + + while ( cur ) { + bp.unshift( cur ); + cur = cur.parentNode; + } + + // Walk down the tree looking for a discrepancy + while ( ap[i] === bp[i] ) { + i++; + } + + // Prefer our document + if ( i === 0 ) { + if ( ap[0] === document || contains(document, ap[0]) ) { + return -1; + } + if ( bp[0] === document || contains(document, bp[0]) ) { + return 1; + } + return 0; + } + + // We ended someplace up the tree so do a sibling check + return siblingCheck( ap[i], bp[i] ); + }; + +// Always assume the presence of duplicates if sort doesn't +// pass them to our comparison function (as in Google Chrome). +[0, 0].sort( sortOrder ); +baseHasDuplicate = !hasDuplicate; + +// Document sorting and removing duplicates +Sizzle.uniqueSort = function( results ) { + var elem, + duplicates = [], + i = 1, + j = 0; + + hasDuplicate = baseHasDuplicate; + results.sort( sortOrder ); + + if ( hasDuplicate ) { + for ( ; (elem = results[i]); i++ ) { + if ( elem === results[ i - 1 ] ) { + j = duplicates.push( i ); + } + } + while ( j-- ) { + results.splice( duplicates[ j ], 1 ); + } + } + + return results; +}; + +Sizzle.error = function( msg ) { + throw new Error( "Syntax error, unrecognized expression: " + msg ); +}; + +function tokenize( selector, parseOnly ) { + var matched, match, tokens, type, + soFar, groups, preFilters, + cached = tokenCache[ expando ][ selector + " " ]; + + if ( cached ) { + return parseOnly ? 0 : cached.slice( 0 ); + } + + soFar = selector; + groups = []; + preFilters = Expr.preFilter; + + while ( soFar ) { + + // Comma and first run + if ( !matched || (match = rcomma.exec( soFar )) ) { + if ( match ) { + // Don't consume trailing commas as valid + soFar = soFar.slice( match[0].length ) || soFar; + } + groups.push( tokens = [] ); + } + + matched = false; + + // Combinators + if ( (match = rcombinators.exec( soFar )) ) { + tokens.push( matched = new Token( match.shift() ) ); + soFar = soFar.slice( matched.length ); + + // Cast descendant combinators to space + matched.type = match[0].replace( rtrim, " " ); + } + + // Filters + for ( type in Expr.filter ) { + if ( (match = matchExpr[ type ].exec( soFar )) && (!preFilters[ type ] || + (match = preFilters[ type ]( match ))) ) { + + tokens.push( matched = new Token( match.shift() ) ); + soFar = soFar.slice( matched.length ); + matched.type = type; + matched.matches = match; + } + } + + if ( !matched ) { + break; + } + } + + // Return the length of the invalid excess + // if we're just parsing + // Otherwise, throw an error or return tokens + return parseOnly ? + soFar.length : + soFar ? + Sizzle.error( selector ) : + // Cache the tokens + tokenCache( selector, groups ).slice( 0 ); +} + +function addCombinator( matcher, combinator, base ) { + var dir = combinator.dir, + checkNonElements = base && combinator.dir === "parentNode", + doneName = done++; + + return combinator.first ? + // Check against closest ancestor/preceding element + function( elem, context, xml ) { + while ( (elem = elem[ dir ]) ) { + if ( checkNonElements || elem.nodeType === 1 ) { + return matcher( elem, context, xml ); + } + } + } : + + // Check against all ancestor/preceding elements + function( elem, context, xml ) { + // We can't set arbitrary data on XML nodes, so they don't benefit from dir caching + if ( !xml ) { + var cache, + dirkey = dirruns + " " + doneName + " ", + cachedkey = dirkey + cachedruns; + while ( (elem = elem[ dir ]) ) { + if ( checkNonElements || elem.nodeType === 1 ) { + if ( (cache = elem[ expando ]) === cachedkey ) { + return elem.sizset; + } else if ( typeof cache === "string" && cache.indexOf(dirkey) === 0 ) { + if ( elem.sizset ) { + return elem; + } + } else { + elem[ expando ] = cachedkey; + if ( matcher( elem, context, xml ) ) { + elem.sizset = true; + return elem; + } + elem.sizset = false; + } + } + } + } else { + while ( (elem = elem[ dir ]) ) { + if ( checkNonElements || elem.nodeType === 1 ) { + if ( matcher( elem, context, xml ) ) { + return elem; + } + } + } + } + }; +} + +function elementMatcher( matchers ) { + return matchers.length > 1 ? + function( elem, context, xml ) { + var i = matchers.length; + while ( i-- ) { + if ( !matchers[i]( elem, context, xml ) ) { + return false; + } + } + return true; + } : + matchers[0]; +} + +function condense( unmatched, map, filter, context, xml ) { + var elem, + newUnmatched = [], + i = 0, + len = unmatched.length, + mapped = map != null; + + for ( ; i < len; i++ ) { + if ( (elem = unmatched[i]) ) { + if ( !filter || filter( elem, context, xml ) ) { + newUnmatched.push( elem ); + if ( mapped ) { + map.push( i ); + } + } + } + } + + return newUnmatched; +} + +function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) { + if ( postFilter && !postFilter[ expando ] ) { + postFilter = setMatcher( postFilter ); + } + if ( postFinder && !postFinder[ expando ] ) { + postFinder = setMatcher( postFinder, postSelector ); + } + return markFunction(function( seed, results, context, xml ) { + var temp, i, elem, + preMap = [], + postMap = [], + preexisting = results.length, + + // Get initial elements from seed or context + elems = seed || multipleContexts( selector || "*", context.nodeType ? [ context ] : context, [] ), + + // Prefilter to get matcher input, preserving a map for seed-results synchronization + matcherIn = preFilter && ( seed || !selector ) ? + condense( elems, preMap, preFilter, context, xml ) : + elems, + + matcherOut = matcher ? + // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results, + postFinder || ( seed ? preFilter : preexisting || postFilter ) ? + + // ...intermediate processing is necessary + [] : + + // ...otherwise use results directly + results : + matcherIn; + + // Find primary matches + if ( matcher ) { + matcher( matcherIn, matcherOut, context, xml ); + } + + // Apply postFilter + if ( postFilter ) { + temp = condense( matcherOut, postMap ); + postFilter( temp, [], context, xml ); + + // Un-match failing elements by moving them back to matcherIn + i = temp.length; + while ( i-- ) { + if ( (elem = temp[i]) ) { + matcherOut[ postMap[i] ] = !(matcherIn[ postMap[i] ] = elem); + } + } + } + + if ( seed ) { + if ( postFinder || preFilter ) { + if ( postFinder ) { + // Get the final matcherOut by condensing this intermediate into postFinder contexts + temp = []; + i = matcherOut.length; + while ( i-- ) { + if ( (elem = matcherOut[i]) ) { + // Restore matcherIn since elem is not yet a final match + temp.push( (matcherIn[i] = elem) ); + } + } + postFinder( null, (matcherOut = []), temp, xml ); + } + + // Move matched elements from seed to results to keep them synchronized + i = matcherOut.length; + while ( i-- ) { + if ( (elem = matcherOut[i]) && + (temp = postFinder ? indexOf.call( seed, elem ) : preMap[i]) > -1 ) { + + seed[temp] = !(results[temp] = elem); + } + } + } + + // Add elements to results, through postFinder if defined + } else { + matcherOut = condense( + matcherOut === results ? + matcherOut.splice( preexisting, matcherOut.length ) : + matcherOut + ); + if ( postFinder ) { + postFinder( null, results, matcherOut, xml ); + } else { + push.apply( results, matcherOut ); + } + } + }); +} + +function matcherFromTokens( tokens ) { + var checkContext, matcher, j, + len = tokens.length, + leadingRelative = Expr.relative[ tokens[0].type ], + implicitRelative = leadingRelative || Expr.relative[" "], + i = leadingRelative ? 1 : 0, + + // The foundational matcher ensures that elements are reachable from top-level context(s) + matchContext = addCombinator( function( elem ) { + return elem === checkContext; + }, implicitRelative, true ), + matchAnyContext = addCombinator( function( elem ) { + return indexOf.call( checkContext, elem ) > -1; + }, implicitRelative, true ), + matchers = [ function( elem, context, xml ) { + return ( !leadingRelative && ( xml || context !== outermostContext ) ) || ( + (checkContext = context).nodeType ? + matchContext( elem, context, xml ) : + matchAnyContext( elem, context, xml ) ); + } ]; + + for ( ; i < len; i++ ) { + if ( (matcher = Expr.relative[ tokens[i].type ]) ) { + matchers = [ addCombinator( elementMatcher( matchers ), matcher ) ]; + } else { + matcher = Expr.filter[ tokens[i].type ].apply( null, tokens[i].matches ); + + // Return special upon seeing a positional matcher + if ( matcher[ expando ] ) { + // Find the next relative operator (if any) for proper handling + j = ++i; + for ( ; j < len; j++ ) { + if ( Expr.relative[ tokens[j].type ] ) { + break; + } + } + return setMatcher( + i > 1 && elementMatcher( matchers ), + i > 1 && tokens.slice( 0, i - 1 ).join("").replace( rtrim, "$1" ), + matcher, + i < j && matcherFromTokens( tokens.slice( i, j ) ), + j < len && matcherFromTokens( (tokens = tokens.slice( j )) ), + j < len && tokens.join("") + ); + } + matchers.push( matcher ); + } + } + + return elementMatcher( matchers ); +} + +function matcherFromGroupMatchers( elementMatchers, setMatchers ) { + // A counter to specify which element is currently being matched + var matcherCachedRuns = 0, + bySet = setMatchers.length > 0, + byElement = elementMatchers.length > 0, + superMatcher = function( seed, context, xml, results, expandContext ) { + var elem, j, matcher, + setMatched = [], + matchedCount = 0, + i = "0", + unmatched = seed && [], + outermost = expandContext != null, + contextBackup = outermostContext, + // We must always have either seed elements or context + elems = seed || byElement && Expr.find["TAG"]( "*", expandContext && context.parentNode || context ), + // Nested matchers should use non-integer dirruns + dirrunsUnique = (dirruns += contextBackup == null ? 1 : Math.E); + + if ( outermost ) { + outermostContext = context !== document && context; + cachedruns = matcherCachedRuns; + } + + // Add elements passing elementMatchers directly to results + for ( ; (elem = elems[i]) != null; i++ ) { + if ( byElement && elem ) { + for ( j = 0; (matcher = elementMatchers[j]); j++ ) { + if ( matcher( elem, context, xml ) ) { + results.push( elem ); + break; + } + } + if ( outermost ) { + dirruns = dirrunsUnique; + cachedruns = ++matcherCachedRuns; + } + } + + // Track unmatched elements for set filters + if ( bySet ) { + // They will have gone through all possible matchers + if ( (elem = !matcher && elem) ) { + matchedCount--; + } + + // Lengthen the array for every element, matched or not + if ( seed ) { + unmatched.push( elem ); + } + } + } + + // Apply set filters to unmatched elements + // `i` starts as a string, so matchedCount would equal "00" if there are no elements + matchedCount += i; + if ( bySet && i !== matchedCount ) { + for ( j = 0; (matcher = setMatchers[j]); j++ ) { + matcher( unmatched, setMatched, context, xml ); + } + + if ( seed ) { + // Reintegrate element matches to eliminate the need for sorting + if ( matchedCount > 0 ) { + while ( i-- ) { + if ( !(unmatched[i] || setMatched[i]) ) { + setMatched[i] = pop.call( results ); + } + } + } + + // Discard index placeholder values to get only actual matches + setMatched = condense( setMatched ); + } + + // Add matches to results + push.apply( results, setMatched ); + + // Seedless set matches succeeding multiple successful matchers stipulate sorting + if ( outermost && !seed && setMatched.length > 0 && + ( matchedCount + setMatchers.length ) > 1 ) { + + Sizzle.uniqueSort( results ); + } + } + + // Override manipulation of globals by nested matchers + if ( outermost ) { + dirruns = dirrunsUnique; + outermostContext = contextBackup; + } + + return unmatched; + }; + + return bySet ? + markFunction( superMatcher ) : + superMatcher; +} + +compile = Sizzle.compile = function( selector, group /* Internal Use Only */ ) { + var i, + setMatchers = [], + elementMatchers = [], + cached = compilerCache[ expando ][ selector + " " ]; + + if ( !cached ) { + // Generate a function of recursive functions that can be used to check each element + if ( !group ) { + group = tokenize( selector ); + } + i = group.length; + while ( i-- ) { + cached = matcherFromTokens( group[i] ); + if ( cached[ expando ] ) { + setMatchers.push( cached ); + } else { + elementMatchers.push( cached ); + } + } + + // Cache the compiled function + cached = compilerCache( selector, matcherFromGroupMatchers( elementMatchers, setMatchers ) ); + } + return cached; +}; + +function multipleContexts( selector, contexts, results ) { + var i = 0, + len = contexts.length; + for ( ; i < len; i++ ) { + Sizzle( selector, contexts[i], results ); + } + return results; +} + +function select( selector, context, results, seed, xml ) { + var i, tokens, token, type, find, + match = tokenize( selector ); + + if ( !seed ) { + // Try to minimize operations if there is only one group + if ( match.length === 1 ) { + + // Take a shortcut and set the context if the root selector is an ID + tokens = match[0] = match[0].slice( 0 ); + if ( tokens.length > 2 && (token = tokens[0]).type === "ID" && + context.nodeType === 9 && !xml && + Expr.relative[ tokens[1].type ] ) { + + context = Expr.find["ID"]( token.matches[0].replace( rbackslash, "" ), context, xml )[0]; + if ( !context ) { + return results; + } + + selector = selector.slice( tokens.shift().length ); + } + + // Fetch a seed set for right-to-left matching + for ( i = matchExpr["needsContext"].test( selector ) ? -1 : tokens.length - 1; i >= 0; i-- ) { + token = tokens[i]; + + // Abort if we hit a combinator + if ( Expr.relative[ (type = token.type) ] ) { + break; + } + if ( (find = Expr.find[ type ]) ) { + // Search, expanding context for leading sibling combinators + if ( (seed = find( + token.matches[0].replace( rbackslash, "" ), + rsibling.test( tokens[0].type ) && context.parentNode || context, + xml + )) ) { + + // If seed is empty or no tokens remain, we can return early + tokens.splice( i, 1 ); + selector = seed.length && tokens.join(""); + if ( !selector ) { + push.apply( results, slice.call( seed, 0 ) ); + return results; + } + + break; + } + } + } + } + } + + // Compile and execute a filtering function + // Provide `match` to avoid retokenization if we modified the selector above + compile( selector, match )( + seed, + context, + xml, + results, + rsibling.test( selector ) + ); + return results; +} + +if ( document.querySelectorAll ) { + (function() { + var disconnectedMatch, + oldSelect = select, + rescape = /'|\\/g, + rattributeQuotes = /\=[\x20\t\r\n\f]*([^'"\]]*)[\x20\t\r\n\f]*\]/g, + + // qSa(:focus) reports false when true (Chrome 21), no need to also add to buggyMatches since matches checks buggyQSA + // A support test would require too much code (would include document ready) + rbuggyQSA = [ ":focus" ], + + // matchesSelector(:active) reports false when true (IE9/Opera 11.5) + // A support test would require too much code (would include document ready) + // just skip matchesSelector for :active + rbuggyMatches = [ ":active" ], + matches = docElem.matchesSelector || + docElem.mozMatchesSelector || + docElem.webkitMatchesSelector || + docElem.oMatchesSelector || + docElem.msMatchesSelector; + + // Build QSA regex + // Regex strategy adopted from Diego Perini + assert(function( div ) { + // Select is set to empty string on purpose + // This is to test IE's treatment of not explictly + // setting a boolean content attribute, + // since its presence should be enough + // http://bugs.jquery.com/ticket/12359 + div.innerHTML = "<select><option selected=''></option></select>"; + + // IE8 - Some boolean attributes are not treated correctly + if ( !div.querySelectorAll("[selected]").length ) { + rbuggyQSA.push( "\\[" + whitespace + "*(?:checked|disabled|ismap|multiple|readonly|selected|value)" ); + } + + // Webkit/Opera - :checked should return selected option elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + // IE8 throws error here (do not put tests after this one) + if ( !div.querySelectorAll(":checked").length ) { + rbuggyQSA.push(":checked"); + } + }); + + assert(function( div ) { + + // Opera 10-12/IE9 - ^= $= *= and empty values + // Should not select anything + div.innerHTML = "<p test=''></p>"; + if ( div.querySelectorAll("[test^='']").length ) { + rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:\"\"|'')" ); + } + + // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled) + // IE8 throws error here (do not put tests after this one) + div.innerHTML = "<input type='hidden'/>"; + if ( !div.querySelectorAll(":enabled").length ) { + rbuggyQSA.push(":enabled", ":disabled"); + } + }); + + // rbuggyQSA always contains :focus, so no need for a length check + rbuggyQSA = /* rbuggyQSA.length && */ new RegExp( rbuggyQSA.join("|") ); + + select = function( selector, context, results, seed, xml ) { + // Only use querySelectorAll when not filtering, + // when this is not xml, + // and when no QSA bugs apply + if ( !seed && !xml && !rbuggyQSA.test( selector ) ) { + var groups, i, + old = true, + nid = expando, + newContext = context, + newSelector = context.nodeType === 9 && selector; + + // qSA works strangely on Element-rooted queries + // We can work around this by specifying an extra ID on the root + // and working up from there (Thanks to Andrew Dupont for the technique) + // IE 8 doesn't work on object elements + if ( context.nodeType === 1 && context.nodeName.toLowerCase() !== "object" ) { + groups = tokenize( selector ); + + if ( (old = context.getAttribute("id")) ) { + nid = old.replace( rescape, "\\$&" ); + } else { + context.setAttribute( "id", nid ); + } + nid = "[id='" + nid + "'] "; + + i = groups.length; + while ( i-- ) { + groups[i] = nid + groups[i].join(""); + } + newContext = rsibling.test( selector ) && context.parentNode || context; + newSelector = groups.join(","); + } + + if ( newSelector ) { + try { + push.apply( results, slice.call( newContext.querySelectorAll( + newSelector + ), 0 ) ); + return results; + } catch(qsaError) { + } finally { + if ( !old ) { + context.removeAttribute("id"); + } + } + } + } + + return oldSelect( selector, context, results, seed, xml ); + }; + + if ( matches ) { + assert(function( div ) { + // Check to see if it's possible to do matchesSelector + // on a disconnected node (IE 9) + disconnectedMatch = matches.call( div, "div" ); + + // This should fail with an exception + // Gecko does not error, returns false instead + try { + matches.call( div, "[test!='']:sizzle" ); + rbuggyMatches.push( "!=", pseudos ); + } catch ( e ) {} + }); + + // rbuggyMatches always contains :active and :focus, so no need for a length check + rbuggyMatches = /* rbuggyMatches.length && */ new RegExp( rbuggyMatches.join("|") ); + + Sizzle.matchesSelector = function( elem, expr ) { + // Make sure that attribute selectors are quoted + expr = expr.replace( rattributeQuotes, "='$1']" ); + + // rbuggyMatches always contains :active, so no need for an existence check + if ( !isXML( elem ) && !rbuggyMatches.test( expr ) && !rbuggyQSA.test( expr ) ) { + try { + var ret = matches.call( elem, expr ); + + // IE 9's matchesSelector returns false on disconnected nodes + if ( ret || disconnectedMatch || + // As well, disconnected nodes are said to be in a document + // fragment in IE 9 + elem.document && elem.document.nodeType !== 11 ) { + return ret; + } + } catch(e) {} + } + + return Sizzle( expr, null, null, [ elem ] ).length > 0; + }; + } + })(); +} + +// Deprecated +Expr.pseudos["nth"] = Expr.pseudos["eq"]; + +// Easy API for creating new setFilters +function setFilters() {} +Expr.filters = setFilters.prototype = Expr.pseudos; +Expr.setFilters = new setFilters(); + +// EXPOSE +if ( typeof define === "function" && define.amd ) { + define(function() { return Sizzle; }); +} else { + window.Sizzle = Sizzle; +} +// EXPOSE + +})( window ); diff --git a/graphene-webdriver/graphene-webdriver-spi/src/main/java/org/jboss/arquillian/graphene/spi/annotations/FindBy.java b/graphene-webdriver/graphene-webdriver-spi/src/main/java/org/jboss/arquillian/graphene/spi/annotations/FindBy.java new file mode 100644 index 000000000..12083207d --- /dev/null +++ b/graphene-webdriver/graphene-webdriver-spi/src/main/java/org/jboss/arquillian/graphene/spi/annotations/FindBy.java @@ -0,0 +1,59 @@ +/** + * JBoss, Home of Professional Open Source + * Copyright 2012, Red Hat, Inc. and individual contributors + * by the @authors tag. See the copyright.txt in the distribution for a + * full listing of individual contributors. + * + * This is free software; you can redistribute it and/or modify it + * under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * This software is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this software; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301 USA, or see the FSF site: http://www.fsf.org. + */ +package org.jboss.arquillian.graphene.spi.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.openqa.selenium.support.How; + +/** + * @author <a href="mailto:[email protected]">Juraj Huska</a> + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +public @interface FindBy { + + How how() default How.ID; + + String using() default ""; + + String id() default ""; + + String name() default ""; + + String className() default ""; + + String css() default ""; + + String tagName() default ""; + + String linkText() default ""; + + String partialLinkText() default ""; + + String xpath() default ""; + + String jquery() default ""; +}
6ac5f539c682c974b12a8b3c6c7f9aad649125ad
Vala
gio-2.0: fix callbacks for sources created with Socket.create_source Fixes bug 617069.
c
https://github.com/GNOME/vala/
diff --git a/vapi/gio-2.0.vapi b/vapi/gio-2.0.vapi index f4f9bb6006..4ead2fa889 100644 --- a/vapi/gio-2.0.vapi +++ b/vapi/gio-2.0.vapi @@ -715,7 +715,7 @@ namespace GLib { public GLib.IOCondition condition_check (GLib.IOCondition condition); public bool condition_wait (GLib.IOCondition condition, GLib.Cancellable? cancellable) throws GLib.Error; public bool connect (GLib.SocketAddress address, GLib.Cancellable? cancellable) throws GLib.Error; - public unowned GLib.TimeoutSource create_source (GLib.IOCondition condition, GLib.Cancellable? cancellable); + public GLib.SocketSource create_source (GLib.IOCondition condition, GLib.Cancellable? cancellable); [CCode (has_construct_function = false)] public Socket.from_fd (int fd) throws GLib.Error; public bool get_blocking (); @@ -844,6 +844,12 @@ namespace GLib { public virtual signal bool incoming (GLib.SocketConnection connection, GLib.Object? source_object); } [Compact] + [CCode (cname = "GSource", cheader_filename = "gio/gio.h")] + public class SocketSource : GLib.Source { + [CCode (cname = "g_source_set_callback")] + public void set_callback ([CCode (type = "GSourceFunc")] owned GLib.SocketSourceFunc func); + } + [Compact] [CCode (copy_function = "g_srv_target_copy", type_id = "G_TYPE_SRV_TARGET", cheader_filename = "gio/gio.h")] public class SrvTarget { [CCode (has_construct_function = false)] diff --git a/vapi/packages/gio-2.0/gio-2.0-custom.vala b/vapi/packages/gio-2.0/gio-2.0-custom.vala index 42ab86c390..ae52b16afe 100644 --- a/vapi/packages/gio-2.0/gio-2.0-custom.vala +++ b/vapi/packages/gio-2.0/gio-2.0-custom.vala @@ -34,6 +34,13 @@ namespace GLib { public abstract GLib.FileMonitor monitor_file (GLib.FileMonitorFlags flags, GLib.Cancellable? cancellable = null) throws GLib.IOError; } + [Compact] + [CCode (cname = "GSource", ref_function = "g_source_ref", unref_function = "g_source_unref")] + public class SocketSource : GLib.Source { + [CCode (cname = "g_source_set_callback")] + public void set_callback ([CCode (type = "GSourceFunc")] owned SocketSourceFunc func); + } + [CCode (cname = "g_file_hash", cheader_filename = "gio/gio.h")] public static GLib.HashFunc file_hash; [CCode (cname = "g_file_equal", cheader_filename = "gio/gio.h")] diff --git a/vapi/packages/gio-2.0/gio-2.0.metadata b/vapi/packages/gio-2.0/gio-2.0.metadata index 74e9aaed51..f783e1a11e 100644 --- a/vapi/packages/gio-2.0/gio-2.0.metadata +++ b/vapi/packages/gio-2.0/gio-2.0.metadata @@ -113,6 +113,7 @@ g_resolver_lookup_service transfer_ownership="1" type_arguments="SrvTarget" g_resolver_lookup_service_finish transfer_ownership="1" type_arguments="SrvTarget" g_seekable_truncate abstract="1" vfunc_name="truncate_fn" g_seekable_truncate_fn hidden="1" +g_socket_create_source type_name="SocketSource" transfer_ownership="1" g_socket_listener_add_address.source_object nullable="1" g_socket_listener_add_inet_port.source_object nullable="1" g_socket_listener_add_socket.source_object nullable="1"
a8806266b898ee71376ce9722de407ece9ed359a
hbase
HBASE-1200 Add bloomfilters--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@946464 13f79535-47bb-0310-9956-ffa450edef68-
a
https://github.com/apache/hbase
diff --git a/.gitignore b/.gitignore index 4208d615bea4..7b4653af1990 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,5 @@ /core/build/ /core/test/ *.iml +*.orig +*~ diff --git a/CHANGES.txt b/CHANGES.txt index c5d529c21c82..f29ce0762193 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -653,6 +653,7 @@ Release 0.21.0 - Unreleased HBASE-2529 Make OldLogsCleaner easier to extend HBASE-2527 Add the ability to easily extend some HLog actions HBASE-2559 Set hbase.hregion.majorcompaction to 0 to disable + HBASE-1200 Add bloomfilters (Nicolas Spiegelberg via Stack) OPTIMIZATIONS HBASE-410 [testing] Speed up the test suite diff --git a/core/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/core/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 6269e3e57986..8e3bd5361f4e 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ b/core/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -29,6 +29,8 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; +import org.apache.hadoop.hbase.regionserver.StoreFile; +import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableComparable; @@ -50,7 +52,8 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor> // Version 5 was when bloom filter descriptors were removed. // Version 6 adds metadata as a map where keys and values are byte[]. // Version 7 -- add new compression and hfile blocksize to HColumnDescriptor (HBASE-1217) - private static final byte COLUMN_DESCRIPTOR_VERSION = (byte)7; + // Version 8 -- reintroduction of bloom filters, changed from boolean to enum + private static final byte COLUMN_DESCRIPTOR_VERSION = (byte)8; /** * The type of compression. @@ -113,7 +116,7 @@ public static enum CompressionType { /** * Default setting for whether or not to use bloomfilters. */ - public static final boolean DEFAULT_BLOOMFILTER = false; + public static final String DEFAULT_BLOOMFILTER = StoreFile.BloomType.NONE.toString(); /** * Default time to live of cell contents. @@ -166,7 +169,7 @@ public HColumnDescriptor(final byte [] familyName) { this (familyName == null || familyName.length <= 0? HConstants.EMPTY_BYTE_ARRAY: familyName, DEFAULT_VERSIONS, DEFAULT_COMPRESSION, DEFAULT_IN_MEMORY, DEFAULT_BLOCKCACHE, - DEFAULT_TTL, false); + DEFAULT_TTL, DEFAULT_BLOOMFILTER); } /** @@ -195,7 +198,7 @@ public HColumnDescriptor(HColumnDescriptor desc) { * @param blockCacheEnabled If true, MapFile blocks should be cached * @param timeToLive Time-to-live of cell contents, in seconds * (use HConstants.FOREVER for unlimited TTL) - * @param bloomFilter Enable the specified bloom filter for this column + * @param bloomFilter Bloom filter type for this column * * @throws IllegalArgumentException if passed a family name that is made of * other than 'word' characters: i.e. <code>[a-zA-Z_0-9]</code> or contains @@ -205,7 +208,7 @@ public HColumnDescriptor(HColumnDescriptor desc) { public HColumnDescriptor(final byte [] familyName, final int maxVersions, final String compression, final boolean inMemory, final boolean blockCacheEnabled, - final int timeToLive, final boolean bloomFilter) { + final int timeToLive, final String bloomFilter) { this(familyName, maxVersions, compression, inMemory, blockCacheEnabled, DEFAULT_BLOCKSIZE, timeToLive, bloomFilter, DEFAULT_REPLICATION_SCOPE); } @@ -222,7 +225,7 @@ public HColumnDescriptor(final byte [] familyName, final int maxVersions, * @param blocksize * @param timeToLive Time-to-live of cell contents, in seconds * (use HConstants.FOREVER for unlimited TTL) - * @param bloomFilter Enable the specified bloom filter for this column + * @param bloomFilter Bloom filter type for this column * @param scope The scope tag for this column * * @throws IllegalArgumentException if passed a family name that is made of @@ -233,7 +236,7 @@ public HColumnDescriptor(final byte [] familyName, final int maxVersions, public HColumnDescriptor(final byte [] familyName, final int maxVersions, final String compression, final boolean inMemory, final boolean blockCacheEnabled, final int blocksize, - final int timeToLive, final boolean bloomFilter, final int scope) { + final int timeToLive, final String bloomFilter, final int scope) { isLegalFamilyName(familyName); this.name = familyName; @@ -248,7 +251,8 @@ public HColumnDescriptor(final byte [] familyName, final int maxVersions, setTimeToLive(timeToLive); setCompressionType(Compression.Algorithm. valueOf(compression.toUpperCase())); - setBloomfilter(bloomFilter); + setBloomFilterType(StoreFile.BloomType. + valueOf(bloomFilter.toUpperCase())); setBlocksize(blocksize); setScope(scope); } @@ -464,20 +468,21 @@ public void setBlockCacheEnabled(boolean blockCacheEnabled) { } /** - * @return true if a bloom filter is enabled + * @return bloom filter type used for new StoreFiles in ColumnFamily */ - public boolean isBloomfilter() { - String value = getValue(BLOOMFILTER); - if (value != null) - return Boolean.valueOf(value).booleanValue(); - return DEFAULT_BLOOMFILTER; + public StoreFile.BloomType getBloomFilterType() { + String n = getValue(BLOOMFILTER); + if (n == null) { + n = DEFAULT_BLOOMFILTER; + } + return StoreFile.BloomType.valueOf(n.toUpperCase()); } /** - * @param onOff Enable/Disable bloom filter + * @param toggle bloom filter type */ - public void setBloomfilter(final boolean onOff) { - setValue(BLOOMFILTER, Boolean.toString(onOff)); + public void setBloomFilterType(final StoreFile.BloomType bt) { + setValue(BLOOMFILTER, bt.toString()); } /** @@ -513,10 +518,6 @@ public String toString() { values.entrySet()) { String key = Bytes.toString(e.getKey().get()); String value = Bytes.toString(e.getValue().get()); - if (key != null && key.toUpperCase().equals(BLOOMFILTER)) { - // Don't emit bloomfilter. Its not working. - continue; - } s.append(", "); s.append(key); s.append(" => '"); @@ -576,8 +577,8 @@ public void readFields(DataInput in) throws IOException { int ordinal = in.readInt(); setCompressionType(Compression.Algorithm.values()[ordinal]); setInMemory(in.readBoolean()); - setBloomfilter(in.readBoolean()); - if (isBloomfilter() && version < 5) { + setBloomFilterType(in.readBoolean() ? BloomType.ROW : BloomType.NONE); + if (getBloomFilterType() != BloomType.NONE && version < 5) { // If a bloomFilter is enabled and the column descriptor is less than // version 5, we need to skip over it to read the rest of the column // descriptor. There are no BloomFilterDescriptors written to disk for @@ -593,7 +594,7 @@ public void readFields(DataInput in) throws IOException { setTimeToLive(in.readInt()); } } else { - // version 7+ + // version 6+ this.name = Bytes.readByteArray(in); this.values.clear(); int numValues = in.readInt(); @@ -602,6 +603,15 @@ public void readFields(DataInput in) throws IOException { ImmutableBytesWritable value = new ImmutableBytesWritable(); key.readFields(in); value.readFields(in); + + // in version 8, the BloomFilter setting changed from bool to enum + if (version < 8 && Bytes.toString(key.get()).equals(BLOOMFILTER)) { + value.set(Bytes.toBytes( + Boolean.getBoolean(Bytes.toString(value.get())) + ? BloomType.ROW.toString() + : BloomType.NONE.toString())); + } + values.put(key, value); } if (version == 6) { diff --git a/core/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/core/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java index d0c220e9f404..0d57270ba040 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java +++ b/core/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java @@ -33,6 +33,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.WritableComparable; @@ -667,7 +668,8 @@ public static Path getTableDir(Path rootdir, final byte [] tableName) { new HColumnDescriptor[] { new HColumnDescriptor(HConstants.CATALOG_FAMILY, 10, // Ten is arbitrary number. Keep versions to help debuggging. Compression.Algorithm.NONE.getName(), true, true, 8 * 1024, - HConstants.FOREVER, false, HConstants.REPLICATION_SCOPE_LOCAL) }); + HConstants.FOREVER, StoreFile.BloomType.NONE.toString(), + HConstants.REPLICATION_SCOPE_LOCAL) }); /** Table descriptor for <code>.META.</code> catalog table */ public static final HTableDescriptor META_TABLEDESC = new HTableDescriptor( @@ -675,9 +677,11 @@ public static Path getTableDir(Path rootdir, final byte [] tableName) { new HColumnDescriptor(HConstants.CATALOG_FAMILY, 10, // Ten is arbitrary number. Keep versions to help debuggging. Compression.Algorithm.NONE.getName(), true, true, 8 * 1024, - HConstants.FOREVER, false, HConstants.REPLICATION_SCOPE_LOCAL), + HConstants.FOREVER, StoreFile.BloomType.NONE.toString(), + HConstants.REPLICATION_SCOPE_LOCAL), new HColumnDescriptor(HConstants.CATALOG_HISTORIAN_FAMILY, HConstants.ALL_VERSIONS, Compression.Algorithm.NONE.getName(), false, false, 8 * 1024, - HConstants.WEEK_IN_SECONDS, false, HConstants.REPLICATION_SCOPE_LOCAL)}); + HConstants.WEEK_IN_SECONDS,StoreFile.BloomType.NONE.toString(), + HConstants.REPLICATION_SCOPE_LOCAL)}); } diff --git a/core/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/core/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 8aac19aa572e..fc5494b4a9eb 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/core/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -945,7 +945,7 @@ public boolean isDeleteFamily() { System.arraycopy(this.bytes, o, result, 0, l); return result; } - + //--------------------------------------------------------------------------- // // KeyValue splitter @@ -1371,7 +1371,7 @@ int compareColumns(final KeyValue left, final short lrowlength, } /** - * Compares the row and column of two keyvalues + * Compares the row and column of two keyvalues for equality * @param left * @param right * @return True if same row and column. @@ -1380,10 +1380,10 @@ public boolean matchingRowColumn(final KeyValue left, final KeyValue right) { short lrowlength = left.getRowLength(); short rrowlength = right.getRowLength(); - if (!matchingRows(left, lrowlength, right, rrowlength)) { - return false; - } - return compareColumns(left, lrowlength, right, rrowlength) == 0; + // TsOffset = end of column data. just comparing Row+CF length of each + return left.getTimestampOffset() == right.getTimestampOffset() && + matchingRows(left, lrowlength, right, rrowlength) && + compareColumns(left, lrowlength, right, rrowlength) == 0; } /** @@ -1396,6 +1396,7 @@ public boolean matchingRows(final KeyValue left, final byte [] right) { } /** + * Compares the row of two keyvalues for equality * @param left * @param right * @return True if rows match. @@ -1415,11 +1416,8 @@ public boolean matchingRows(final KeyValue left, final KeyValue right) { */ public boolean matchingRows(final KeyValue left, final short lrowlength, final KeyValue right, final short rrowlength) { - int compare = compareRows(left, lrowlength, right, rrowlength); - if (compare != 0) { - return false; - } - return true; + return lrowlength == rrowlength && + compareRows(left, lrowlength, right, rrowlength) == 0; } public boolean matchingRows(final byte [] left, final int loffset, diff --git a/core/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/core/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java index 4488cccfd3ba..3433811a7eea 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ b/core/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -30,6 +30,7 @@ import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.SortedSet; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -45,6 +46,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; +import org.apache.hadoop.hbase.KeyValue.KeyComparator; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.KeyValue; @@ -55,6 +57,7 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.RawComparator; +import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Decompressor; @@ -209,7 +212,7 @@ public static class Writer implements Closeable { private long valuelength = 0; // Used to ensure we write in order. - private final RawComparator<byte []> comparator; + private final RawComparator<byte []> rawComparator; // A stream made per block written. private DataOutputStream out; @@ -239,7 +242,7 @@ public static class Writer implements Closeable { // Meta block system. private ArrayList<byte []> metaNames = new ArrayList<byte []>(); - private ArrayList<byte []> metaData = new ArrayList<byte[]>(); + private ArrayList<Writable> metaData = new ArrayList<Writable>(); // Used compression. Used even if no compression -- 'none'. private final Compression.Algorithm compressAlgo; @@ -273,7 +276,7 @@ public Writer(FileSystem fs, Path path) * @throws IOException */ public Writer(FileSystem fs, Path path, int blocksize, - String compress, final RawComparator<byte []> comparator) + String compress, final KeyComparator comparator) throws IOException { this(fs, path, blocksize, compress == null? DEFAULT_COMPRESSION_ALGORITHM: @@ -292,7 +295,7 @@ public Writer(FileSystem fs, Path path, int blocksize, */ public Writer(FileSystem fs, Path path, int blocksize, Compression.Algorithm compress, - final RawComparator<byte []> comparator) + final KeyComparator comparator) throws IOException { this(fs.create(path), blocksize, compress, comparator); this.closeOutputStream = true; @@ -309,7 +312,7 @@ public Writer(FileSystem fs, Path path, int blocksize, * @throws IOException */ public Writer(final FSDataOutputStream ostream, final int blocksize, - final String compress, final RawComparator<byte []> c) + final String compress, final KeyComparator c) throws IOException { this(ostream, blocksize, Compression.getCompressionAlgorithmByName(compress), c); @@ -324,12 +327,12 @@ public Writer(final FSDataOutputStream ostream, final int blocksize, * @throws IOException */ public Writer(final FSDataOutputStream ostream, final int blocksize, - final Compression.Algorithm compress, final RawComparator<byte []> c) + final Compression.Algorithm compress, final KeyComparator c) throws IOException { this.outputStream = ostream; this.closeOutputStream = false; this.blocksize = blocksize; - this.comparator = c == null? Bytes.BYTES_RAWCOMPARATOR: c; + this.rawComparator = c == null? Bytes.BYTES_RAWCOMPARATOR: c; this.name = this.outputStream.toString(); this.compressAlgo = compress == null? DEFAULT_COMPRESSION_ALGORITHM: compress; @@ -423,11 +426,21 @@ private int releaseCompressingStream(final DataOutputStream dos) * small, consider adding to file info using * {@link #appendFileInfo(byte[], byte[])} * @param metaBlockName name of the block - * @param bytes uninterpreted bytes of the block. + * @param content will call readFields to get data later (DO NOT REUSE) */ - public void appendMetaBlock(String metaBlockName, byte [] bytes) { - metaNames.add(Bytes.toBytes(metaBlockName)); - metaData.add(bytes); + public void appendMetaBlock(String metaBlockName, Writable content) { + byte[] key = Bytes.toBytes(metaBlockName); + int i; + for (i = 0; i < metaNames.size(); ++i) { + // stop when the current key is greater than our own + byte[] cur = metaNames.get(i); + if (this.rawComparator.compare(cur, 0, cur.length, key, 0, key.length) + > 0) { + break; + } + } + metaNames.add(i, key); + metaData.add(i, content); } /** @@ -508,7 +521,7 @@ public void append(final byte [] key, final byte [] value) * @param vlength * @throws IOException */ - public void append(final byte [] key, final int koffset, final int klength, + private void append(final byte [] key, final int koffset, final int klength, final byte [] value, final int voffset, final int vlength) throws IOException { boolean dupKey = checkKey(key, koffset, klength); @@ -552,7 +565,7 @@ private boolean checkKey(final byte [] key, final int offset, final int length) MAXIMUM_KEY_LENGTH); } if (this.lastKeyBuffer != null) { - int keyComp = this.comparator.compare(this.lastKeyBuffer, this.lastKeyOffset, + int keyComp = this.rawComparator.compare(this.lastKeyBuffer, this.lastKeyOffset, this.lastKeyLength, key, offset, length); if (keyComp > 0) { throw new IOException("Added a key not lexically larger than" + @@ -595,10 +608,16 @@ public void close() throws IOException { metaOffsets = new ArrayList<Long>(metaNames.size()); metaDataSizes = new ArrayList<Integer>(metaNames.size()); for (int i = 0 ; i < metaNames.size() ; ++ i ) { - metaOffsets.add(Long.valueOf(outputStream.getPos())); - metaDataSizes. - add(Integer.valueOf(METABLOCKMAGIC.length + metaData.get(i).length)); - writeMetaBlock(metaData.get(i)); + // store the beginning offset + long curPos = outputStream.getPos(); + metaOffsets.add(curPos); + // write the metadata content + DataOutputStream dos = getCompressingStream(); + dos.write(METABLOCKMAGIC); + metaData.get(i).write(dos); + int size = releaseCompressingStream(dos); + // store the metadata size + metaDataSizes.add(size); } } @@ -632,17 +651,6 @@ public void close() throws IOException { } } - /* Write a metadata block. - * @param metadata - * @throws IOException - */ - private void writeMetaBlock(final byte [] b) throws IOException { - DataOutputStream dos = getCompressingStream(); - dos.write(METABLOCKMAGIC); - dos.write(b); - releaseCompressingStream(dos); - } - /* * Add last bits of metadata to fileinfo and then write it out. * Reader will be expecting to find all below. @@ -668,7 +676,7 @@ private long writeFileInfo(FSDataOutputStream o) throws IOException { appendFileInfo(this.fileinfo, FileInfo.AVG_VALUE_LEN, Bytes.toBytes(avgValueLen), false); appendFileInfo(this.fileinfo, FileInfo.COMPARATOR, - Bytes.toBytes(this.comparator.getClass().getName()), false); + Bytes.toBytes(this.rawComparator.getClass().getName()), false); long pos = o.getPos(); this.fileinfo.write(o); return pos; @@ -710,6 +718,7 @@ public static class Reader implements Closeable { private final BlockCache cache; public int cacheHits = 0; public int blockLoads = 0; + public int metaLoads = 0; // Whether file is from in-memory store private boolean inMemory = false; @@ -717,15 +726,7 @@ public static class Reader implements Closeable { // Name for this object used when logging or in toString. Is either // the result of a toString on the stream or else is toString of passed // file Path plus metadata key/value pairs. - private String name; - - /* - * Do not expose the default constructor. - */ - @SuppressWarnings("unused") - private Reader() throws IOException { - this(null, -1, null, false); - } + protected String name; /** * Opens a HFile. You must load the file info before you can @@ -799,7 +800,8 @@ public boolean inMemory() { * See {@link Writer#appendFileInfo(byte[], byte[])}. * @throws IOException */ - public Map<byte [], byte []> loadFileInfo() throws IOException { + public Map<byte [], byte []> loadFileInfo() + throws IOException { this.trailer = readTrailer(); // Read in the fileinfo and get what we need from it. @@ -889,16 +891,19 @@ protected int blockContainingKey(final byte [] key, int offset, int length) { } /** * @param metaBlockName + * @param cacheBlock Add block to cache, if found * @return Block wrapped in a ByteBuffer * @throws IOException */ - public ByteBuffer getMetaBlock(String metaBlockName) throws IOException { + public ByteBuffer getMetaBlock(String metaBlockName, boolean cacheBlock) + throws IOException { if (trailer.metaIndexCount == 0) { return null; // there are no meta blocks } if (metaIndex == null) { throw new IOException("Meta index not loaded"); } + byte [] mbname = Bytes.toBytes(metaBlockName); int block = metaIndex.blockContainingKey(mbname, 0, mbname.length); if (block == -1) @@ -910,19 +915,45 @@ public ByteBuffer getMetaBlock(String metaBlockName) throws IOException { blockSize = metaIndex.blockOffsets[block+1] - metaIndex.blockOffsets[block]; } - ByteBuffer buf = decompress(metaIndex.blockOffsets[block], - longToInt(blockSize), metaIndex.blockDataSizes[block], true); - byte [] magic = new byte[METABLOCKMAGIC.length]; - buf.get(magic, 0, magic.length); + long now = System.currentTimeMillis(); - if (! Arrays.equals(magic, METABLOCKMAGIC)) { - throw new IOException("Meta magic is bad in block " + block); + // Per meta key from any given file, synchronize reads for said block + synchronized (metaIndex.blockKeys[block]) { + metaLoads++; + // Check cache for block. If found return. + if (cache != null) { + ByteBuffer cachedBuf = cache.getBlock(name + "meta" + block); + if (cachedBuf != null) { + // Return a distinct 'shallow copy' of the block, + // so pos doesnt get messed by the scanner + cacheHits++; + return cachedBuf.duplicate(); + } + // Cache Miss, please load. + } + + ByteBuffer buf = decompress(metaIndex.blockOffsets[block], + longToInt(blockSize), metaIndex.blockDataSizes[block], true); + byte [] magic = new byte[METABLOCKMAGIC.length]; + buf.get(magic, 0, magic.length); + + if (! Arrays.equals(magic, METABLOCKMAGIC)) { + throw new IOException("Meta magic is bad in block " + block); + } + + // Create a new ByteBuffer 'shallow copy' to hide the magic header + buf = buf.slice(); + + readTime += System.currentTimeMillis() - now; + readOps++; + + // Cache the block + if(cacheBlock && cache != null) { + cache.cacheBlock(name + "meta" + block, buf.duplicate(), inMemory); + } + + return buf; } - // Toss the header. May have to remove later due to performance. - buf.compact(); - buf.limit(buf.limit() - METABLOCKMAGIC.length); - buf.rewind(); - return buf; } /** @@ -952,8 +983,8 @@ ByteBuffer readBlock(int block, boolean cacheBlock, final boolean pread) if (cache != null) { ByteBuffer cachedBuf = cache.getBlock(name + block); if (cachedBuf != null) { - // Return a distinct 'copy' of the block, so pos doesnt get messed by - // the scanner + // Return a distinct 'shallow copy' of the block, + // so pos doesnt get messed by the scanner cacheHits++; return cachedBuf.duplicate(); } @@ -982,11 +1013,12 @@ ByteBuffer readBlock(int block, boolean cacheBlock, final boolean pread) if (!Arrays.equals(magic, DATABLOCKMAGIC)) { throw new IOException("Data magic is bad in block " + block); } - // Toss the header. May have to remove later due to performance. - buf.compact(); - buf.limit(buf.limit() - DATABLOCKMAGIC.length); - buf.rewind(); + // 'shallow copy' to hide the header + // NOTE: you WILL GET BIT if you call buf.array() but don't start + // reading at buf.arrayOffset() + buf = buf.slice(); + readTime += System.currentTimeMillis() - now; readOps++; @@ -1045,6 +1077,9 @@ private ByteBuffer decompress(final long offset, final int compressedSize, return this.blockIndex.isEmpty()? null: this.blockIndex.blockKeys[0]; } + /** + * @return number of KV entries in this HFile + */ public int getEntries() { if (!this.isFileInfoLoaded()) { throw new RuntimeException("File info not loaded"); @@ -1061,6 +1096,13 @@ public int getEntries() { } return this.blockIndex.isEmpty()? null: this.lastkey; } + + /** + * @return number of K entries in this HFile's filter. Returns KV count if no filter. + */ + public int getFilterEntries() { + return getEntries(); + } /** * @return Comparator. @@ -1099,7 +1141,7 @@ public void close() throws IOException { /* * Implementation of {@link HFileScanner} interface. */ - private static class Scanner implements HFileScanner { + protected static class Scanner implements HFileScanner { private final Reader reader; private ByteBuffer block; private int currBlock; @@ -1180,6 +1222,11 @@ public boolean next() throws IOException { return true; } + public boolean shouldSeek(final byte[] row, + final SortedSet<byte[]> columns) { + return true; + } + public int seekTo(byte [] key) throws IOException { return seekTo(key, 0, key.length); } @@ -1333,10 +1380,10 @@ public String getTrailerInfo() { * parts of the file. Also includes basic metadata on this file. */ private static class FixedFileTrailer { - // Offset to the data block index. - long dataIndexOffset; // Offset to the fileinfo data, a small block of vitals.. long fileinfoOffset; + // Offset to the data block index. + long dataIndexOffset; // How many index counts are there (aka: block count) int dataIndexCount; // Offset to the meta block index. diff --git a/core/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java b/core/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java index 9d891c6e68f3..f5a5dc0ce0f0 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java +++ b/core/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.nio.ByteBuffer; +import java.util.SortedSet; import org.apache.hadoop.hbase.KeyValue; @@ -64,6 +65,17 @@ public interface HFileScanner { */ public boolean seekBefore(byte [] key) throws IOException; public boolean seekBefore(byte []key, int offset, int length) throws IOException; + /** + * Optimization for single key lookups. If the file has a filter, + * perform a lookup on the key. + * @param row the row to scan + * @param family the column family to scan + * @param columns the array of column qualifiers to scan + * @return False if the key definitely does not exist in this ScanFile + * @throws IOException + */ + public boolean shouldSeek(final byte[] row, + final SortedSet<byte[]> columns); /** * Positions this scanner at the start of the file. * @return False if empty file; i.e. a call to next would return false and diff --git a/core/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java b/core/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java index 2c81723d365f..9c8e53ef9060 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java +++ b/core/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java @@ -112,7 +112,10 @@ private HFile.Writer getNewWriter(final HFile.Writer writer, private void close(final HFile.Writer w) throws IOException { if (w != null) { - StoreFile.appendMetadata(w, System.currentTimeMillis(), true); + w.appendFileInfo(StoreFile.MAX_SEQ_ID_KEY, + Bytes.toBytes(System.currentTimeMillis())); + w.appendFileInfo(StoreFile.MAJOR_COMPACTION_KEY, + Bytes.toBytes(true)); w.close(); } } diff --git a/core/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java b/core/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java index 716204ea51b9..70f42dc6fa3e 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java +++ b/core/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java @@ -51,7 +51,7 @@ public class KeyValueHeap implements KeyValueScanner, InternalScanner { * @param scanners * @param comparator */ - public KeyValueHeap(List<KeyValueScanner> scanners, KVComparator comparator) { + public KeyValueHeap(List<? extends KeyValueScanner> scanners, KVComparator comparator) { this.comparator = new KVScannerComparator(comparator); this.heap = new PriorityQueue<KeyValueScanner>(scanners.size(), this.comparator); diff --git a/core/src/main/java/org/apache/hadoop/hbase/regionserver/MinorCompactingStoreScanner.java b/core/src/main/java/org/apache/hadoop/hbase/regionserver/MinorCompactingStoreScanner.java index 2cb68cdd83b1..4b16540328f9 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/regionserver/MinorCompactingStoreScanner.java +++ b/core/src/main/java/org/apache/hadoop/hbase/regionserver/MinorCompactingStoreScanner.java @@ -33,23 +33,20 @@ * and optionally the memstore-snapshot. */ public class MinorCompactingStoreScanner implements KeyValueScanner, InternalScanner { - private KeyValueHeap heap; private KeyValue.KVComparator comparator; - MinorCompactingStoreScanner(Store store, - List<KeyValueScanner> scanners) { + MinorCompactingStoreScanner(Store store, List<? extends KeyValueScanner> scanners) { comparator = store.comparator; KeyValue firstKv = KeyValue.createFirstOnRow(HConstants.EMPTY_START_ROW); for (KeyValueScanner scanner : scanners ) { scanner.seek(firstKv); } - heap = new KeyValueHeap(scanners, store.comparator); } MinorCompactingStoreScanner(String cfName, KeyValue.KVComparator comparator, - List<KeyValueScanner> scanners) { + List<? extends KeyValueScanner> scanners) { this.comparator = comparator; KeyValue firstKv = KeyValue.createFirstOnRow(HConstants.EMPTY_START_ROW); diff --git a/core/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java b/core/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java index db4ae3b496fc..6c3153b7c622 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java +++ b/core/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java @@ -101,7 +101,7 @@ public class Store implements HConstants, HeapSize { private final HRegion region; private final HColumnDescriptor family; final FileSystem fs; - private final Configuration conf; + final Configuration conf; // ttl in milliseconds. protected long ttl; private long majorCompactionTime; @@ -144,7 +144,6 @@ public class Store implements HConstants, HeapSize { // Comparing KeyValues final KeyValue.KVComparator comparator; - final KeyValue.KVComparator comparatorIgnoringType; /** * Constructor @@ -179,7 +178,6 @@ protected Store(Path basedir, HRegion region, HColumnDescriptor family, this.blocksize = family.getBlocksize(); this.compression = family.getCompression(); this.comparator = info.getComparator(); - this.comparatorIgnoringType = this.comparator.getComparatorIgnoringType(); // getTimeToLive returns ttl in seconds. Convert to milliseconds. this.ttl = family.getTimeToLive(); if (ttl == HConstants.FOREVER) { @@ -415,7 +413,9 @@ private Map<Long, StoreFile> loadStoreFiles() } StoreFile curfile = null; try { - curfile = new StoreFile(fs, p, blockcache, this.conf, this.inMemory); + curfile = new StoreFile(fs, p, blockcache, this.conf, + this.family.getBloomFilterType(), this.inMemory); + curfile.createReader(); } catch (IOException ioe) { LOG.warn("Failed open of " + p + "; presumption is that file was " + "corrupted at flush and lost edits picked up by commit log replay. " + @@ -492,7 +492,7 @@ List<StoreFile> close() throws IOException { // Clear so metrics doesn't find them. this.storefiles.clear(); for (StoreFile f: result) { - f.close(); + f.closeReader(); } LOG.debug("closed " + this.storeNameStr); return result; @@ -534,7 +534,7 @@ private StoreFile flushCache(final long logCacheFlushId, private StoreFile internalFlushCache(final SortedSet<KeyValue> set, final long logCacheFlushId) throws IOException { - HFile.Writer writer = null; + StoreFile.Writer writer = null; long flushed = 0; // Don't flush if there are no entries. if (set.size() == 0) { @@ -546,7 +546,7 @@ private StoreFile internalFlushCache(final SortedSet<KeyValue> set, // if we fail. synchronized (flushLock) { // A. Write the map out to the disk - writer = getWriter(); + writer = createWriter(this.homedir, set.size()); int entries = 0; try { for (KeyValue kv: set) { @@ -559,13 +559,13 @@ private StoreFile internalFlushCache(final SortedSet<KeyValue> set, } finally { // Write out the log sequence number that corresponds to this output // hfile. The hfile is current up to and including logCacheFlushId. - StoreFile.appendMetadata(writer, logCacheFlushId); + writer.appendMetadata(logCacheFlushId, false); writer.close(); } } StoreFile sf = new StoreFile(this.fs, writer.getPath(), blockcache, - this.conf, this.inMemory); - Reader r = sf.getReader(); + this.conf, this.family.getBloomFilterType(), this.inMemory); + Reader r = sf.createReader(); this.storeSize += r.length(); if(LOG.isDebugEnabled()) { LOG.debug("Added " + sf + ", entries=" + r.getEntries() + @@ -577,22 +577,16 @@ private StoreFile internalFlushCache(final SortedSet<KeyValue> set, return sf; } - /** - * @return Writer for this store. - * @throws IOException - */ - HFile.Writer getWriter() throws IOException { - return getWriter(this.homedir); - } - /* * @return Writer for this store. * @param basedir Directory to put writer in. * @throws IOException */ - private HFile.Writer getWriter(final Path basedir) throws IOException { - return StoreFile.getWriter(this.fs, basedir, this.blocksize, - this.compression, this.comparator.getRawComparator()); + private StoreFile.Writer createWriter(final Path basedir, int maxKeyCount) + throws IOException { + return StoreFile.createWriter(this.fs, basedir, this.blocksize, + this.compression, this.comparator, this.conf, + this.family.getBloomFilterType(), maxKeyCount); } /* @@ -880,13 +874,25 @@ private boolean isMajorCompaction(final List<StoreFile> filesToCompact) private HFile.Writer compact(final List<StoreFile> filesToCompact, final boolean majorCompaction, final long maxId) throws IOException { + // calculate maximum key count after compaction (for blooms) + int maxKeyCount = 0; + for (StoreFile file : filesToCompact) { + StoreFile.Reader r = file.getReader(); + if (r != null) { + // NOTE: getFilterEntries could cause under-sized blooms if the user + // switches bloom type (e.g. from ROW to ROWCOL) + maxKeyCount += (r.getBloomFilterType() == family.getBloomFilterType()) + ? r.getFilterEntries() : r.getEntries(); + } + } + // For each file, obtain a scanner: - List<KeyValueScanner> scanners = StoreFileScanner.getScannersForStoreFiles( - filesToCompact, false, false); + List<StoreFileScanner> scanners = StoreFileScanner + .getScannersForStoreFiles(filesToCompact, false, false); // Make the instantiation lazy in case compaction produces no product; i.e. // where all source cells are expired or deleted. - HFile.Writer writer = null; + StoreFile.Writer writer = null; try { if (majorCompaction) { InternalScanner scanner = null; @@ -901,7 +907,7 @@ private HFile.Writer compact(final List<StoreFile> filesToCompact, // output to writer: for (KeyValue kv : kvs) { if (writer == null) { - writer = getWriter(this.regionCompactionDir); + writer = createWriter(this.regionCompactionDir, maxKeyCount); } writer.append(kv); } @@ -916,7 +922,7 @@ private HFile.Writer compact(final List<StoreFile> filesToCompact, MinorCompactingStoreScanner scanner = null; try { scanner = new MinorCompactingStoreScanner(this, scanners); - writer = getWriter(this.regionCompactionDir); + writer = createWriter(this.regionCompactionDir, maxKeyCount); while (scanner.next(writer)) { // Nothing to do } @@ -927,7 +933,7 @@ private HFile.Writer compact(final List<StoreFile> filesToCompact, } } finally { if (writer != null) { - StoreFile.appendMetadata(writer, maxId, majorCompaction); + writer.appendMetadata(maxId, majorCompaction); writer.close(); } } @@ -971,7 +977,9 @@ private StoreFile completeCompaction(final List<StoreFile> compactedFiles, LOG.error("Failed move of compacted file " + compactedFile.getPath(), e); return null; } - result = new StoreFile(this.fs, p, blockcache, this.conf, this.inMemory); + result = new StoreFile(this.fs, p, blockcache, this.conf, + this.family.getBloomFilterType(), this.inMemory); + result.createReader(); } this.lock.writeLock().lock(); try { @@ -1001,7 +1009,7 @@ private StoreFile completeCompaction(final List<StoreFile> compactedFiles, notifyChangedReadersObservers(); // Finally, delete old store files. for (StoreFile hsf: compactedFiles) { - hsf.delete(); + hsf.deleteReader(); } } catch (IOException e) { e = RemoteExceptionHandler.checkIOException(e); @@ -1570,7 +1578,7 @@ public boolean hasTooManyStoreFiles() { } public static final long FIXED_OVERHEAD = ClassSize.align( - ClassSize.OBJECT + (17 * ClassSize.REFERENCE) + + ClassSize.OBJECT + (16 * ClassSize.REFERENCE) + (6 * Bytes.SIZEOF_LONG) + (3 * Bytes.SIZEOF_INT) + Bytes.SIZEOF_BOOLEAN + ClassSize.align(ClassSize.ARRAY)); diff --git a/core/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java b/core/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java index 038c09e4f9cf..80bf09a19ad0 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java +++ b/core/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java @@ -21,26 +21,37 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.io.HalfHFileReader; +import org.apache.hadoop.hbase.KeyValue.KVComparator; +import org.apache.hadoop.hbase.KeyValue.KeyComparator; +import org.apache.hadoop.hbase.io.HalfStoreFileReader; import org.apache.hadoop.hbase.io.Reference; import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; -import org.apache.hadoop.hbase.io.hfile.HFile.Reader; +import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.LruBlockCache; +import org.apache.hadoop.hbase.util.BloomFilter; +import org.apache.hadoop.hbase.util.ByteBloomFilter; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Hash; import org.apache.hadoop.util.StringUtils; import java.io.FileNotFoundException; import java.io.IOException; import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; +import java.nio.ByteBuffer; +import java.text.DecimalFormat; +import java.text.NumberFormat; +import java.util.Arrays; import java.util.Map; +import java.util.SortedSet; import java.util.Random; import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Matcher; @@ -49,11 +60,11 @@ /** * A Store data file. Stores usually have one or more of these files. They * are produced by flushing the memstore to disk. To - * create, call {@link #getWriter(FileSystem, Path)} and append data. Be + * create, call {@link #createWriter(FileSystem, Path, int)} and append data. Be * sure to add any metadata before calling close on the Writer * (Use the appendMetadata convenience methods). On close, a StoreFile is * sitting in the Filesystem. To refer to it, create a StoreFile instance - * passing filesystem and path. To read, call {@link #getReader()}. + * passing filesystem and path. To read, call {@link #createReader()}. * <p>StoreFiles may also reference store files in another Store. */ public class StoreFile implements HConstants { @@ -65,7 +76,7 @@ public class StoreFile implements HConstants { // Make default block size for StoreFiles 8k while testing. TODO: FIX! // Need to make it 8k for testing. - private static final int DEFAULT_BLOCKSIZE_SMALL = 8 * 1024; + public static final int DEFAULT_BLOCKSIZE_SMALL = 8 * 1024; private final FileSystem fs; // This file's path. @@ -80,16 +91,23 @@ public class StoreFile implements HConstants { private boolean inMemory; // Keys for metadata stored in backing HFile. - private static final byte [] MAX_SEQ_ID_KEY = Bytes.toBytes("MAX_SEQ_ID_KEY"); + /** Constant for the max sequence ID meta */ + public static final byte [] MAX_SEQ_ID_KEY = Bytes.toBytes("MAX_SEQ_ID_KEY"); // Set when we obtain a Reader. private long sequenceid = -1; - private static final byte [] MAJOR_COMPACTION_KEY = + /** Constant for major compaction meta */ + public static final byte [] MAJOR_COMPACTION_KEY = Bytes.toBytes("MAJOR_COMPACTION_KEY"); // If true, this file was product of a major compaction. Its then set // whenever you get a Reader. private AtomicBoolean majorCompaction = null; + static final String BLOOM_FILTER_META_KEY = "BLOOM_FILTER_META"; + static final String BLOOM_FILTER_DATA_KEY = "BLOOM_FILTER_DATA"; + static final byte[] BLOOM_FILTER_TYPE_KEY = + Bytes.toBytes("BLOOM_FILTER_TYPE"); + /* * Regex that will work for straight filenames and for reference names. * If reference, then the regex has more than just one group. Group 1 is @@ -98,11 +116,12 @@ public class StoreFile implements HConstants { private static final Pattern REF_NAME_PARSER = Pattern.compile("^(\\d+)(?:\\.(.+))?$"); - private volatile HFile.Reader reader; + private volatile StoreFile.Reader reader; // Used making file ids. private final static Random rand = new Random(); private final Configuration conf; + private final BloomType bloomType; /** * Constructor, loads a reader and it's indices, etc. May allocate a @@ -112,10 +131,11 @@ public class StoreFile implements HConstants { * @param p The path of the file. * @param blockcache <code>true</code> if the block cache is enabled. * @param conf The current configuration. + * @param bt The bloom type to use for this store file * @throws IOException When opening the reader fails. */ StoreFile(final FileSystem fs, final Path p, final boolean blockcache, - final Configuration conf, final boolean inMemory) + final Configuration conf, final BloomType bt, final boolean inMemory) throws IOException { this.conf = conf; this.fs = fs; @@ -126,7 +146,14 @@ public class StoreFile implements HConstants { this.reference = Reference.read(fs, p); this.referencePath = getReferredToFile(this.path); } - this.reader = open(); + // ignore if the column family config says "no bloom filter" + // even if there is one in the hfile. + if (conf.getBoolean("io.hfile.bloom.enabled", true)) { + this.bloomType = bt; + } else { + this.bloomType = BloomType.NONE; + LOG.info("Ignoring bloom filter check for file (disabled in config)"); + } } /** @@ -255,18 +282,18 @@ public BlockCache getBlockCache() { * Opens reader on this store file. Called by Constructor. * @return Reader for the store file. * @throws IOException - * @see #close() + * @see #closeReader() */ - protected HFile.Reader open() + private StoreFile.Reader open() throws IOException { if (this.reader != null) { throw new IllegalAccessError("Already open"); } if (isReference()) { - this.reader = new HalfHFileReader(this.fs, this.referencePath, + this.reader = new HalfStoreFileReader(this.fs, this.referencePath, getBlockCache(), this.reference); } else { - this.reader = new Reader(this.fs, this.path, getBlockCache(), + this.reader = new StoreFile.Reader(this.fs, this.path, getBlockCache(), this.inMemory); } // Load up indices and fileinfo. @@ -296,44 +323,59 @@ protected HFile.Reader open() this.majorCompaction.set(mc); } } + + if (this.bloomType != BloomType.NONE) { + this.reader.loadBloomfilter(); + } - // TODO read in bloom filter here, ignore if the column family config says - // "no bloom filter" even if there is one in the hfile. + return this.reader; + } + + /** + * @return Reader for StoreFile. creates if necessary + * @throws IOException + */ + public StoreFile.Reader createReader() throws IOException { + if (this.reader == null) { + this.reader = open(); + } return this.reader; } /** - * @return Current reader. Must call open first else returns null. + * @return Current reader. Must call createReader first else returns null. + * @throws IOException + * @see {@link #createReader()} */ - public HFile.Reader getReader() { + public StoreFile.Reader getReader() { return this.reader; } /** * @throws IOException */ - public synchronized void close() throws IOException { + public synchronized void closeReader() throws IOException { if (this.reader != null) { this.reader.close(); this.reader = null; } } - @Override - public String toString() { - return this.path.toString() + - (isReference()? "-" + this.referencePath + "-" + reference.toString(): ""); - } - /** * Delete this file * @throws IOException */ - public void delete() throws IOException { - close(); + public void deleteReader() throws IOException { + closeReader(); this.fs.delete(getPath(), true); } + @Override + public String toString() { + return this.path.toString() + + (isReference()? "-" + this.referencePath + "-" + reference.toString(): ""); + } + /** * Utility to help with rename. * @param fs @@ -361,38 +403,47 @@ public static Path rename(final FileSystem fs, final Path src, * @param fs * @param dir Path to family directory. Makes the directory if doesn't exist. * Creates a file with a unique name in this directory. + * @param blocksize size per filesystem block * @return HFile.Writer * @throws IOException */ - public static HFile.Writer getWriter(final FileSystem fs, final Path dir) + public static StoreFile.Writer createWriter(final FileSystem fs, final Path dir, + final int blocksize) throws IOException { - return getWriter(fs, dir, DEFAULT_BLOCKSIZE_SMALL, null, null); + return createWriter(fs,dir,blocksize,null,null,null,BloomType.NONE,0); } /** - * Get a store file writer. Client is responsible for closing file when done. - * If metadata, add BEFORE closing using - * {@link #appendMetadata(org.apache.hadoop.hbase.io.hfile.HFile.Writer, long)}. + * Create a store file writer. Client is responsible for closing file when done. + * If metadata, add BEFORE closing using appendMetadata() * @param fs * @param dir Path to family directory. Makes the directory if doesn't exist. * Creates a file with a unique name in this directory. * @param blocksize * @param algorithm Pass null to get default. + * @param conf HBase system configuration. used with bloom filters + * @param bloomType column family setting for bloom filters * @param c Pass null to get default. + * @param maxKeySize peak theoretical entry size (maintains error rate) * @return HFile.Writer * @throws IOException */ - public static HFile.Writer getWriter(final FileSystem fs, final Path dir, - final int blocksize, final Compression.Algorithm algorithm, - final KeyValue.KeyComparator c) + public static StoreFile.Writer createWriter(final FileSystem fs, final Path dir, + final int blocksize, final Compression.Algorithm algorithm, + final KeyValue.KVComparator c, final Configuration conf, + BloomType bloomType, int maxKeySize) throws IOException { if (!fs.exists(dir)) { fs.mkdirs(dir); } Path path = getUniqueFile(fs, dir); - return new HFile.Writer(fs, path, blocksize, - algorithm == null? HFile.DEFAULT_COMPRESSION_ALGORITHM: algorithm, - c == null? KeyValue.KEY_COMPARATOR: c); + if(conf == null || !conf.getBoolean("io.hfile.bloom.enabled", true)) { + bloomType = BloomType.NONE; + } + + return new StoreFile.Writer(fs, path, blocksize, + algorithm == null? HFile.DEFAULT_COMPRESSION_ALGORITHM: algorithm, + conf, c == null? KeyValue.COMPARATOR: c, bloomType, maxKeySize); } /** @@ -442,35 +493,6 @@ static Path getRandomFilename(final FileSystem fs, final Path dir, return p; } - /** - * Write file metadata. - * Call before you call close on the passed <code>w</code> since its written - * as metadata to that file. - * - * @param w hfile writer - * @param maxSequenceId Maximum sequence id. - * @throws IOException - */ - static void appendMetadata(final HFile.Writer w, final long maxSequenceId) - throws IOException { - appendMetadata(w, maxSequenceId, false); - } - - /** - * Writes metadata. - * Call before you call close on the passed <code>w</code> since its written - * as metadata to that file. - * @param maxSequenceId Maximum sequence id. - * @param mc True if this file is product of a major compaction - * @throws IOException - */ - public static void appendMetadata(final HFile.Writer w, final long maxSequenceId, - final boolean mc) - throws IOException { - w.appendFileInfo(MAX_SEQ_ID_KEY, Bytes.toBytes(maxSequenceId)); - w.appendFileInfo(MAJOR_COMPACTION_KEY, Bytes.toBytes(mc)); - } - /* * Write out a split reference. * @param fs @@ -497,4 +519,298 @@ static Path split(final FileSystem fs, final Path splitDir, Path p = new Path(splitDir, f.getPath().getName() + "." + parentRegionName); return r.write(fs, p); } + + public static enum BloomType { + /** + * Bloomfilters disabled + */ + NONE, + /** + * Bloom enabled with Table row as Key + */ + ROW, + /** + * Bloom enabled with Table row & column (family+qualifier) as Key + */ + ROWCOL + } + + /** + * + */ + public static class Reader extends HFile.Reader { + /** Bloom Filter class. Caches only meta, pass in data */ + protected BloomFilter bloomFilter = null; + /** Type of bloom filter (e.g. ROW vs ROWCOL) */ + protected BloomType bloomFilterType; + + public Reader(FileSystem fs, Path path, BlockCache cache, + boolean inMemory) + throws IOException { + super(fs, path, cache, inMemory); + } + + public Reader(final FSDataInputStream fsdis, final long size, + final BlockCache cache, final boolean inMemory) { + super(fsdis,size,cache,inMemory); + bloomFilterType = BloomType.NONE; + } + + @Override + public Map<byte [], byte []> loadFileInfo() + throws IOException { + Map<byte [], byte []> fi = super.loadFileInfo(); + + byte[] b = fi.get(BLOOM_FILTER_TYPE_KEY); + if (b != null) { + bloomFilterType = BloomType.valueOf(Bytes.toString(b)); + } + + return fi; + } + + /** + * Load the bloom filter for this HFile into memory. + * Assumes the HFile has already been loaded + */ + public void loadBloomfilter() { + if (this.bloomFilter != null) { + return; // already loaded + } + + // see if bloom filter information is in the metadata + try { + ByteBuffer b = getMetaBlock(BLOOM_FILTER_META_KEY, false); + if (b != null) { + if (bloomFilterType == BloomType.NONE) { + throw new IOException("valid bloom filter type not found in FileInfo"); + } + this.bloomFilter = new ByteBloomFilter(b); + LOG.info("Loaded " + (bloomFilterType==BloomType.ROW? "row":"col") + + " bloom filter metadata for " + name); + } + } catch (IOException e) { + LOG.error("Error reading bloom filter meta -- proceeding without", e); + this.bloomFilter = null; + } catch (IllegalArgumentException e) { + LOG.error("Bad bloom filter meta -- proceeding without", e); + this.bloomFilter = null; + } + } + + BloomFilter getBloomFilter() { + return this.bloomFilter; + } + + /** + * @return bloom type information associated with this store file + */ + public BloomType getBloomFilterType() { + return this.bloomFilterType; + } + + @Override + public int getFilterEntries() { + return (this.bloomFilter != null) ? this.bloomFilter.getKeyCount() + : super.getFilterEntries(); + } + + @Override + public HFileScanner getScanner(boolean cacheBlocks, final boolean pread) { + return new Scanner(this, cacheBlocks, pread); + } + + protected class Scanner extends HFile.Reader.Scanner { + public Scanner(Reader r, boolean cacheBlocks, final boolean pread) { + super(r, cacheBlocks, pread); + } + + @Override + public boolean shouldSeek(final byte[] row, + final SortedSet<byte[]> columns) { + if (bloomFilter == null) { + return true; + } + + byte[] key; + switch(bloomFilterType) { + case ROW: + key = row; + break; + case ROWCOL: + if (columns.size() == 1) { + byte[] col = columns.first(); + key = Bytes.add(row, col); + break; + } + //$FALL-THROUGH$ + default: + return true; + } + + try { + ByteBuffer bloom = getMetaBlock(BLOOM_FILTER_DATA_KEY, true); + if (bloom != null) { + return bloomFilter.contains(key, bloom); + } + } catch (IOException e) { + LOG.error("Error reading bloom filter data -- proceeding without", + e); + bloomFilter = null; + } catch (IllegalArgumentException e) { + LOG.error("Bad bloom filter data -- proceeding without", e); + bloomFilter = null; + } + + return true; + } + + } + } + + /** + * + */ + public static class Writer extends HFile.Writer { + private final BloomFilter bloomFilter; + private final BloomType bloomType; + private KVComparator kvComparator; + private KeyValue lastKv = null; + private byte[] lastByteArray = null; + + /** + * Creates an HFile.Writer that also write helpful meta data. + * @param fs file system to write to + * @param path file name to create + * @param blocksize HDFS block size + * @param compress HDFS block compression + * @param conf user configuration + * @param comparator key comparator + * @param bloomType bloom filter setting + * @param maxKeys maximum amount of keys to add (for blooms) + * @throws IOException problem writing to FS + */ + public Writer(FileSystem fs, Path path, int blocksize, + Compression.Algorithm compress, final Configuration conf, + final KVComparator comparator, BloomType bloomType, int maxKeys) + throws IOException { + super(fs, path, blocksize, compress, comparator.getRawComparator()); + + this.kvComparator = comparator; + + if (bloomType != BloomType.NONE && conf != null) { + float err = conf.getFloat("io.hfile.bloom.error.rate", (float)0.01); + int maxFold = conf.getInt("io.hfile.bloom.max.fold", 7); + + this.bloomFilter = new ByteBloomFilter(maxKeys, err, + Hash.getHashType(conf), maxFold); + this.bloomFilter.allocBloom(); + this.bloomType = bloomType; + } else { + this.bloomFilter = null; + this.bloomType = BloomType.NONE; + } + } + + /** + * Writes meta data. + * Call before {@link #close()} since its written as meta data to this file. + * @param maxSequenceId Maximum sequence id. + * @param majorCompaction True if this file is product of a major compaction + * @throws IOException problem writing to FS + */ + public void appendMetadata(final long maxSequenceId, + final boolean majorCompaction) + throws IOException { + appendFileInfo(MAX_SEQ_ID_KEY, Bytes.toBytes(maxSequenceId)); + appendFileInfo(MAJOR_COMPACTION_KEY, Bytes.toBytes(majorCompaction)); + } + + @Override + public void append(final KeyValue kv) + throws IOException { + if (this.bloomFilter != null) { + // only add to the bloom filter on a new, unique key + boolean newKey = true; + if (this.lastKv != null) { + switch(bloomType) { + case ROW: + newKey = ! kvComparator.matchingRows(kv, lastKv); + break; + case ROWCOL: + newKey = ! kvComparator.matchingRowColumn(kv, lastKv); + break; + case NONE: + newKey = false; + } + } + if (newKey) { + /* + * http://2.bp.blogspot.com/_Cib_A77V54U/StZMrzaKufI/AAAAAAAAADo/ZhK7bGoJdMQ/s400/KeyValue.png + * Key = RowLen + Row + FamilyLen + Column [Family + Qualifier] + TimeStamp + * + * 2 Types of Filtering: + * 1. Row = Row + * 2. RowCol = Row + Qualifier + */ + switch (bloomType) { + case ROW: + this.bloomFilter.add(kv.getBuffer(), kv.getRowOffset(), + kv.getRowLength()); + break; + case ROWCOL: + // merge(row, qualifier) + int ro = kv.getRowOffset(); + int rl = kv.getRowLength(); + int qo = kv.getQualifierOffset(); + int ql = kv.getQualifierLength(); + byte [] result = new byte[rl + ql]; + System.arraycopy(kv.getBuffer(), ro, result, 0, rl); + System.arraycopy(kv.getBuffer(), qo, result, rl, ql); + + this.bloomFilter.add(result); + break; + default: + } + this.lastKv = kv; + } + } + super.append(kv); + } + + @Override + public void append(final byte [] key, final byte [] value) + throws IOException { + if (this.bloomFilter != null) { + // only add to the bloom filter on a new row + if(this.lastByteArray == null || !Arrays.equals(key, lastByteArray)) { + this.bloomFilter.add(key); + this.lastByteArray = key; + } + } + super.append(key, value); + } + + @Override + public void close() + throws IOException { + // make sure we wrote something to the bloom before adding it + if (this.bloomFilter != null && this.bloomFilter.getKeyCount() > 0) { + bloomFilter.finalize(); + if (this.bloomFilter.getMaxKeys() > 0) { + int b = this.bloomFilter.getByteSize(); + int k = this.bloomFilter.getKeyCount(); + int m = this.bloomFilter.getMaxKeys(); + StoreFile.LOG.info("Bloom added to HFile. " + b + "B, " + + k + "/" + m + " (" + NumberFormat.getPercentInstance().format( + ((double)k) / ((double)m)) + ")"); + } + appendMetaBlock(BLOOM_FILTER_META_KEY, bloomFilter.getMetaWriter()); + appendMetaBlock(BLOOM_FILTER_DATA_KEY, bloomFilter.getDataWriter()); + appendFileInfo(BLOOM_FILTER_TYPE_KEY, Bytes.toBytes(bloomType.toString())); + } + super.close(); + } + + } } diff --git a/core/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java b/core/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java index d9e866ad4951..52d228bbda80 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java +++ b/core/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java @@ -53,12 +53,12 @@ private StoreFileScanner(HFileScanner hfs) { * Return an array of scanners corresponding to the given * set of store files. */ - public static List<KeyValueScanner> getScannersForStoreFiles( + public static List<StoreFileScanner> getScannersForStoreFiles( Collection<StoreFile> filesToCompact, boolean cacheBlocks, boolean usePread) { - List<KeyValueScanner> scanners = - new ArrayList<KeyValueScanner>(filesToCompact.size()); + List<StoreFileScanner> scanners = + new ArrayList<StoreFileScanner>(filesToCompact.size()); for (StoreFile file : filesToCompact) { Reader r = file.getReader(); if (r == null) { @@ -72,6 +72,10 @@ public static List<KeyValueScanner> getScannersForStoreFiles( return scanners; } + public HFileScanner getHFileScanner() { + return this.hfs; + } + public String toString() { return "StoreFileScanner[" + hfs.toString() + ", cur=" + cur + "]"; } diff --git a/core/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java b/core/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java index 32daa7768df9..fde872c54b53 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java +++ b/core/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java @@ -61,10 +61,10 @@ class StoreScanner implements KeyValueScanner, InternalScanner, ChangedReadersOb store.versionsToReturn(scan.getMaxVersions())); this.isGet = scan.isGetScan(); - List<KeyValueScanner> scanners = getScanners(); + // pass columns = try to filter out unnecessary ScanFiles + List<KeyValueScanner> scanners = getScanners(scan, columns); // Seek all scanners to the initial key - // TODO if scan.isGetScan, use bloomfilters to skip seeking for(KeyValueScanner scanner : scanners) { scanner.seek(matcher.getStartKey()); } @@ -83,7 +83,7 @@ class StoreScanner implements KeyValueScanner, InternalScanner, ChangedReadersOb * @param scan the spec * @param scanners ancilliary scanners */ - StoreScanner(Store store, Scan scan, List<KeyValueScanner> scanners) { + StoreScanner(Store store, Scan scan, List<? extends KeyValueScanner> scanners) { this.store = store; this.cacheBlocks = false; this.isGet = false; @@ -124,9 +124,37 @@ class StoreScanner implements KeyValueScanner, InternalScanner, ChangedReadersOb private List<KeyValueScanner> getScanners() { // First the store file scanners Map<Long, StoreFile> map = this.store.getStorefiles().descendingMap(); + List<StoreFileScanner> sfScanners = StoreFileScanner + .getScannersForStoreFiles(map.values(), cacheBlocks, isGet); List<KeyValueScanner> scanners = - StoreFileScanner.getScannersForStoreFiles(map.values(), - cacheBlocks, isGet); + new ArrayList<KeyValueScanner>(sfScanners.size()+1); + scanners.addAll(sfScanners); + // Then the memstore scanners + scanners.addAll(this.store.memstore.getScanners()); + return scanners; + } + + /* + * @return List of scanners to seek, possibly filtered by StoreFile. + */ + private List<KeyValueScanner> getScanners(Scan scan, + final NavigableSet<byte[]> columns) { + // First the store file scanners + Map<Long, StoreFile> map = this.store.getStorefiles().descendingMap(); + List<StoreFileScanner> sfScanners = StoreFileScanner + .getScannersForStoreFiles(map.values(), cacheBlocks, isGet); + List<KeyValueScanner> scanners = + new ArrayList<KeyValueScanner>(sfScanners.size()+1); + + // exclude scan files that have failed file filters + for(StoreFileScanner sfs : sfScanners) { + if (isGet && + !sfs.getHFileScanner().shouldSeek(scan.getStartRow(), columns)) { + continue; // exclude this hfs + } + scanners.add(sfs); + } + // Then the memstore scanners scanners.addAll(this.store.memstore.getScanners()); return scanners; diff --git a/core/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java b/core/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java index 4547c469c98f..caf53683e95c 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java +++ b/core/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java @@ -146,16 +146,15 @@ public int __getBlocksize() { } /** - * @return true if the BLOOMFILTER attribute is present and true + * @return the value of the BLOOMFILTER attribute or its default if unset */ - public boolean __getBloomfilter() { + public String __getBloomfilter() { Object o = attrs.get(BLOOMFILTER); - return o != null ? - Boolean.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_BLOOMFILTER; + return o != null ? o.toString() : HColumnDescriptor.DEFAULT_BLOOMFILTER; } /** - * @return the value of the COMPRESSION attribute or its default if it is unset + * @return the value of the COMPRESSION attribute or its default if unset */ public String __getCompression() { Object o = attrs.get(COMPRESSION); @@ -203,8 +202,8 @@ public void __setBlockcache(boolean value) { attrs.put(BLOCKCACHE, Boolean.toString(value)); } - public void __setBloomfilter(boolean value) { - attrs.put(BLOOMFILTER, Boolean.toString(value)); + public void __setBloomfilter(String value) { + attrs.put(BLOOMFILTER, value); } /** diff --git a/core/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java b/core/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java index 9be58d7d51eb..f319751a5c31 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java +++ b/core/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java @@ -26,6 +26,8 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.hfile.Compression; +import org.apache.hadoop.hbase.regionserver.StoreFile; +import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType; import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; import org.apache.hadoop.hbase.thrift.generated.IllegalArgument; import org.apache.hadoop.hbase.thrift.generated.TCell; @@ -47,10 +49,8 @@ static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in) throws IllegalArgument { Compression.Algorithm comp = Compression.getCompressionAlgorithmByName(in.compression.toLowerCase()); - boolean bloom = false; - if (in.bloomFilterType.compareTo("NONE") != 0) { - bloom = true; - } + StoreFile.BloomType bt = + BloomType.valueOf(in.bloomFilterType); if (in.name == null || in.name.length <= 0) { throw new IllegalArgument("column name is empty"); @@ -58,7 +58,7 @@ static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in) byte [] parsedName = KeyValue.parseColumn(in.name)[0]; HColumnDescriptor col = new HColumnDescriptor(parsedName, in.maxVersions, comp.getName(), in.inMemory, in.blockCacheEnabled, - in.timeToLive, bloom); + in.timeToLive, bt.toString()); return col; } @@ -77,7 +77,7 @@ static public ColumnDescriptor colDescFromHbase(HColumnDescriptor in) { col.compression = in.getCompression().toString(); col.inMemory = in.isInMemory(); col.blockCacheEnabled = in.isBlockCacheEnabled(); - col.bloomFilterType = Boolean.toString(in.isBloomfilter()); + col.bloomFilterType = in.getBloomFilterType().toString(); return col; } @@ -147,4 +147,4 @@ static public List<TRowResult> rowResultFromHBase(Result in) { Result [] result = { in }; return rowResultFromHBase(result); } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/apache/hadoop/hbase/util/Hash.java b/core/src/main/java/org/apache/hadoop/hbase/util/Hash.java index 9e1d9e29fffe..0a533d9f4101 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/util/Hash.java +++ b/core/src/main/java/org/apache/hadoop/hbase/util/Hash.java @@ -106,16 +106,29 @@ public int hash(byte[] bytes) { * @return hash value */ public int hash(byte[] bytes, int initval) { - return hash(bytes, bytes.length, initval); + return hash(bytes, 0, bytes.length, initval); } /** * Calculate a hash using bytes from 0 to <code>length</code>, and * the provided seed value * @param bytes input bytes - * @param length length of the valid bytes to consider + * @param length length of the valid bytes after offset to consider * @param initval seed value * @return hash value */ - public abstract int hash(byte[] bytes, int length, int initval); + public int hash(byte[] bytes, int length, int initval) { + return hash(bytes, 0, length, initval); + } + + /** + * Calculate a hash using bytes from 0 to <code>length</code>, and + * the provided seed value + * @param bytes input bytes + * @param offset the offset into the array to start consideration + * @param length length of the valid bytes after offset to consider + * @param initval seed value + * @return hash value + */ + public abstract int hash(byte[] bytes, int offset, int length, int initval); } diff --git a/core/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java b/core/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java index 0c6c6070283c..1e673717c9f2 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java +++ b/core/src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java @@ -80,11 +80,11 @@ private static long rot(long val, int pos) { */ @Override @SuppressWarnings("fallthrough") - public int hash(byte[] key, int nbytes, int initval) { + public int hash(byte[] key, int off, int nbytes, int initval) { int length = nbytes; long a, b, c; // We use longs because we don't have unsigned ints a = b = c = (0x00000000deadbeefL + length + initval) & INT_MASK; - int offset = 0; + int offset = off; for (; length > 12; offset += 12, length -= 12) { //noinspection PointlessArithmeticExpression a = (a + (key[offset + 0] & BYTE_MASK)) & INT_MASK; diff --git a/core/src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java b/core/src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java index fcf543e39b77..085bf1e34299 100644 --- a/core/src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java +++ b/core/src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java @@ -35,7 +35,7 @@ public static Hash getInstance() { } @Override - public int hash(byte[] data, int length, int seed) { + public int hash(byte[] data, int offset, int length, int seed) { int m = 0x5bd1e995; int r = 24; @@ -44,7 +44,7 @@ public int hash(byte[] data, int length, int seed) { int len_4 = length >> 2; for (int i = 0; i < len_4; i++) { - int i_4 = i << 2; + int i_4 = (i << 2) + offset; int k = data[i_4 + 3]; k = k << 8; k = k | (data[i_4 + 2] & 0xff); @@ -63,16 +63,17 @@ public int hash(byte[] data, int length, int seed) { // avoid calculating modulo int len_m = len_4 << 2; int left = length - len_m; + int i_m = len_m + offset; if (left != 0) { if (left >= 3) { - h ^= data[len_m + 2] << 16; + h ^= data[i_m + 2] << 16; } if (left >= 2) { - h ^= data[len_m + 1] << 8; + h ^= data[i_m + 1] << 8; } if (left >= 1) { - h ^= data[len_m]; + h ^= data[i_m]; } h *= m; diff --git a/core/src/main/ruby/hbase/admin.rb b/core/src/main/ruby/hbase/admin.rb index 75490b739a9b..04da078aeb02 100644 --- a/core/src/main/ruby/hbase/admin.rb +++ b/core/src/main/ruby/hbase/admin.rb @@ -334,7 +334,7 @@ def hcd(arg) arg[HColumnDescriptor::BLOCKCACHE]? JBoolean.valueOf(arg[HColumnDescriptor::BLOCKCACHE]): HColumnDescriptor::DEFAULT_BLOCKCACHE, arg[HColumnDescriptor::BLOCKSIZE]? JInteger.valueOf(arg[HColumnDescriptor::BLOCKSIZE]): HColumnDescriptor::DEFAULT_BLOCKSIZE, arg[HColumnDescriptor::TTL]? JInteger.new(arg[HColumnDescriptor::TTL]): HColumnDescriptor::DEFAULT_TTL, - arg[HColumnDescriptor::BLOOMFILTER]? JBoolean.valueOf(arg[HColumnDescriptor::BLOOMFILTER]): HColumnDescriptor::DEFAULT_BLOOMFILTER, + arg[HColumnDescriptor::BLOOMFILTER]? arg[HColumnDescriptor::BLOOMFILTER]: HColumnDescriptor::DEFAULT_BLOOMFILTER) arg[HColumnDescriptor::REPLICATION_SCOPE]? JInteger.new(arg[REPLICATION_SCOPE]): HColumnDescriptor::DEFAULT_REPLICATION_SCOPE) end diff --git a/core/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java b/core/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java index 83be0974c0dd..e8c1d0cc960c 100644 --- a/core/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java +++ b/core/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java @@ -194,14 +194,19 @@ protected HTableDescriptor createTableDescriptor(final String name, HTableDescriptor htd = new HTableDescriptor(name); htd.addFamily(new HColumnDescriptor(fam1, versions, HColumnDescriptor.DEFAULT_COMPRESSION, false, false, - Integer.MAX_VALUE, HConstants.FOREVER, false, HConstants.REPLICATION_SCOPE_LOCAL)); + Integer.MAX_VALUE, HConstants.FOREVER, + HColumnDescriptor.DEFAULT_BLOOMFILTER, + HConstants.REPLICATION_SCOPE_LOCAL)); htd.addFamily(new HColumnDescriptor(fam2, versions, HColumnDescriptor.DEFAULT_COMPRESSION, false, false, - Integer.MAX_VALUE, HConstants.FOREVER, false, HConstants.REPLICATION_SCOPE_LOCAL)); + Integer.MAX_VALUE, HConstants.FOREVER, + HColumnDescriptor.DEFAULT_BLOOMFILTER, + HConstants.REPLICATION_SCOPE_LOCAL)); htd.addFamily(new HColumnDescriptor(fam3, versions, HColumnDescriptor.DEFAULT_COMPRESSION, false, false, Integer.MAX_VALUE, HConstants.FOREVER, - false, HConstants.REPLICATION_SCOPE_LOCAL)); + HColumnDescriptor.DEFAULT_BLOOMFILTER, + HConstants.REPLICATION_SCOPE_LOCAL)); return htd; } diff --git a/core/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/core/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index a6aca1d85bae..f60010458cdd 100644 --- a/core/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/core/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -333,7 +333,8 @@ public HTable createTable(byte[] tableName, byte[][] families, HColumnDescriptor.DEFAULT_IN_MEMORY, HColumnDescriptor.DEFAULT_BLOCKCACHE, Integer.MAX_VALUE, HColumnDescriptor.DEFAULT_TTL, - false, HColumnDescriptor.DEFAULT_REPLICATION_SCOPE); + HColumnDescriptor.DEFAULT_BLOOMFILTER, + HColumnDescriptor.DEFAULT_REPLICATION_SCOPE); desc.addFamily(hcd); } (new HBaseAdmin(getConfiguration())).createTable(desc); @@ -359,7 +360,8 @@ public HTable createTable(byte[] tableName, byte[][] families, HColumnDescriptor.DEFAULT_IN_MEMORY, HColumnDescriptor.DEFAULT_BLOCKCACHE, Integer.MAX_VALUE, HColumnDescriptor.DEFAULT_TTL, - false, HColumnDescriptor.DEFAULT_REPLICATION_SCOPE); + HColumnDescriptor.DEFAULT_BLOOMFILTER, + HColumnDescriptor.DEFAULT_REPLICATION_SCOPE); desc.addFamily(hcd); i++; } diff --git a/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java b/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java index 6b32b258878c..acb4fdc0455b 100644 --- a/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java +++ b/core/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java @@ -19,6 +19,8 @@ */ package org.apache.hadoop.hbase.io.hfile; +import java.io.DataInput; +import java.io.DataOutput; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; @@ -29,12 +31,14 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestCase; +import org.apache.hadoop.hbase.KeyValue.KeyComparator; import org.apache.hadoop.hbase.io.hfile.HFile.BlockIndex; import org.apache.hadoop.hbase.io.hfile.HFile.Reader; import org.apache.hadoop.hbase.io.hfile.HFile.Writer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.io.RawComparator; +import org.apache.hadoop.io.Writable; /** * test hfile features. @@ -170,7 +174,18 @@ public void testTFileFeatures() throws IOException { private void writeNumMetablocks(Writer writer, int n) { for (int i = 0; i < n; i++) { - writer.appendMetaBlock("HFileMeta" + i, ("something to test" + i).getBytes()); + writer.appendMetaBlock("HFileMeta" + i, new Writable() { + private int val; + public Writable setVal(int val) { this.val = val; return this; } + + @Override + public void write(DataOutput out) throws IOException { + out.write(("something to test" + val).getBytes()); + } + + @Override + public void readFields(DataInput in) throws IOException { } + }.setVal(i)); } } @@ -180,10 +195,10 @@ private void someTestingWithMetaBlock(Writer writer) { private void readNumMetablocks(Reader reader, int n) throws IOException { for (int i = 0; i < n; i++) { - ByteBuffer b = reader.getMetaBlock("HFileMeta" + i); - byte [] found = Bytes.toBytes(b); - assertTrue("failed to match metadata", Arrays.equals( - ("something to test" + i).getBytes(), found)); + ByteBuffer actual = reader.getMetaBlock("HFileMeta" + i, false); + ByteBuffer expected = + ByteBuffer.wrap(("something to test" + i).getBytes()); + assertTrue("failed to match metadata", actual.compareTo(expected) == 0); } } @@ -227,7 +242,7 @@ public void testNullMetaBlocks() throws Exception { fout.close(); Reader reader = new Reader(fs, mFile, null, false); reader.loadFileInfo(); - assertNull(reader.getMetaBlock("non-existant")); + assertNull(reader.getMetaBlock("non-existant", false)); } /** @@ -244,7 +259,7 @@ public void testComparator() throws IOException { Path mFile = new Path(ROOT_DIR, "meta.tfile"); FSDataOutputStream fout = createFSOutput(mFile); Writer writer = new Writer(fout, minBlockSize, (Compression.Algorithm) null, - new RawComparator<byte []>() { + new KeyComparator() { @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { diff --git a/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java b/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java index 142c14511832..7ff6a2e6c6c3 100644 --- a/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java +++ b/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java @@ -68,7 +68,8 @@ public class TestScanner extends HBaseTestCase { TESTTABLEDESC.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY, 10, // Ten is arbitrary number. Keep versions to help debuggging. Compression.Algorithm.NONE.getName(), false, true, 8 * 1024, - HConstants.FOREVER, false, HConstants.REPLICATION_SCOPE_LOCAL)); + HConstants.FOREVER, StoreFile.BloomType.NONE.toString(), + HConstants.REPLICATION_SCOPE_LOCAL)); } /** HRegionInfo for root region */ public static final HRegionInfo REGION_INFO = diff --git a/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java b/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java index 9f240f265b5e..d14a27fd1adf 100644 --- a/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java +++ b/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java @@ -134,8 +134,9 @@ public void testEmptyStoreFile() throws IOException { long seqid = f.getMaxSequenceId(); HBaseConfiguration c = new HBaseConfiguration(); FileSystem fs = FileSystem.get(c); - Writer w = StoreFile.getWriter(fs, storedir); - StoreFile.appendMetadata(w, seqid + 1); + StoreFile.Writer w = StoreFile.createWriter(fs, storedir, + StoreFile.DEFAULT_BLOCKSIZE_SMALL); + w.appendMetadata(seqid + 1, false); w.close(); this.store.close(); // Reopen it... should pick up two files diff --git a/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java b/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java index 0c6efde1bead..dcda84cce701 100644 --- a/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java +++ b/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java @@ -21,10 +21,13 @@ import java.io.IOException; import java.nio.ByteBuffer; +import java.util.TreeSet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -69,11 +72,11 @@ public void tearDown() throws Exception { */ public void testBasicHalfMapFile() throws Exception { // Make up a directory hierarchy that has a regiondir and familyname. - HFile.Writer writer = StoreFile.getWriter(this.fs, - new Path(new Path(this.testDir, "regionname"), "familyname"), - 2 * 1024, null, null); + HFile.Writer writer = StoreFile.createWriter(this.fs, + new Path(new Path(this.testDir, "regionname"), "familyname"), 2 * 1024); writeStoreFile(writer); - checkHalfHFile(new StoreFile(this.fs, writer.getPath(), true, conf, false)); + checkHalfHFile(new StoreFile(this.fs, writer.getPath(), true, conf, + StoreFile.BloomType.NONE, false)); } /* @@ -109,11 +112,11 @@ public void testReference() Path storedir = new Path(new Path(this.testDir, "regionname"), "familyname"); Path dir = new Path(storedir, "1234567890"); // Make a store file and write data to it. - HFile.Writer writer = StoreFile.getWriter(this.fs, dir, 8 * 1024, null, - null); + HFile.Writer writer = StoreFile.createWriter(this.fs, dir, 8 * 1024); writeStoreFile(writer); - StoreFile hsf = new StoreFile(this.fs, writer.getPath(), true, conf, false); - HFile.Reader reader = hsf.getReader(); + StoreFile hsf = new StoreFile(this.fs, writer.getPath(), true, conf, + StoreFile.BloomType.NONE, false); + HFile.Reader reader = hsf.createReader(); // Split on a row, not in middle of row. Midkey returned by reader // may be in middle of row. Create new one with empty column and // timestamp. @@ -123,10 +126,11 @@ public void testReference() byte [] finalRow = kv.getRow(); // Make a reference Path refPath = StoreFile.split(fs, dir, hsf, midRow, Range.top); - StoreFile refHsf = new StoreFile(this.fs, refPath, true, conf, false); + StoreFile refHsf = new StoreFile(this.fs, refPath, true, conf, + StoreFile.BloomType.NONE, false); // Now confirm that I can read from the reference and that it only gets // keys from top half of the file. - HFileScanner s = refHsf.getReader().getScanner(false, false); + HFileScanner s = refHsf.createReader().getScanner(false, false); for(boolean first = true; (!s.isSeeked() && s.seekTo()) || s.next();) { ByteBuffer bb = s.getKey(); kv = KeyValue.createKeyValueFromKey(bb); @@ -140,7 +144,7 @@ public void testReference() private void checkHalfHFile(final StoreFile f) throws IOException { - byte [] midkey = f.getReader().midkey(); + byte [] midkey = f.createReader().midkey(); KeyValue midKV = KeyValue.createKeyValueFromKey(midkey); byte [] midRow = midKV.getRow(); // Create top split. @@ -159,8 +163,10 @@ private void checkHalfHFile(final StoreFile f) Path bottomPath = StoreFile.split(this.fs, bottomDir, f, midRow, Range.bottom); // Make readers on top and bottom. - HFile.Reader top = new StoreFile(this.fs, topPath, true, conf, false).getReader(); - HFile.Reader bottom = new StoreFile(this.fs, bottomPath, true, conf, false).getReader(); + HFile.Reader top = new StoreFile(this.fs, topPath, true, conf, + StoreFile.BloomType.NONE, false).createReader(); + HFile.Reader bottom = new StoreFile(this.fs, bottomPath, true, conf, + StoreFile.BloomType.NONE, false).createReader(); ByteBuffer previous = null; LOG.info("Midkey: " + midKV.toString()); ByteBuffer bbMidkeyBytes = ByteBuffer.wrap(midkey); @@ -212,8 +218,10 @@ private void checkHalfHFile(final StoreFile f) topPath = StoreFile.split(this.fs, topDir, f, badmidkey, Range.top); bottomPath = StoreFile.split(this.fs, bottomDir, f, badmidkey, Range.bottom); - top = new StoreFile(this.fs, topPath, true, conf, false).getReader(); - bottom = new StoreFile(this.fs, bottomPath, true, conf, false).getReader(); + top = new StoreFile(this.fs, topPath, true, conf, + StoreFile.BloomType.NONE, false).createReader(); + bottom = new StoreFile(this.fs, bottomPath, true, conf, + StoreFile.BloomType.NONE, false).createReader(); bottomScanner = bottom.getScanner(false, false); int count = 0; while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) || @@ -256,8 +264,10 @@ private void checkHalfHFile(final StoreFile f) topPath = StoreFile.split(this.fs, topDir, f, badmidkey, Range.top); bottomPath = StoreFile.split(this.fs, bottomDir, f, badmidkey, Range.bottom); - top = new StoreFile(this.fs, topPath, true, conf, false).getReader(); - bottom = new StoreFile(this.fs, bottomPath, true, conf, false).getReader(); + top = new StoreFile(this.fs, topPath, true, conf, + StoreFile.BloomType.NONE, false).createReader(); + bottom = new StoreFile(this.fs, bottomPath, true, conf, + StoreFile.BloomType.NONE, false).createReader(); first = true; bottomScanner = bottom.getScanner(false, false); while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) || @@ -296,4 +306,138 @@ private void checkHalfHFile(final StoreFile f) fs.delete(f.getPath(), true); } } -} \ No newline at end of file + + private static String ROOT_DIR = + System.getProperty("test.build.data", "/tmp/TestStoreFile"); + private static String localFormatter = "%010d"; + + public void testBloomFilter() throws Exception { + FileSystem fs = FileSystem.getLocal(conf); + conf.setFloat("io.hfile.bloom.error.rate", (float)0.01); + conf.setBoolean("io.hfile.bloom.enabled", true); + + // write the file + Path f = new Path(ROOT_DIR, getName()); + StoreFile.Writer writer = new StoreFile.Writer(fs, f, + StoreFile.DEFAULT_BLOCKSIZE_SMALL, HFile.DEFAULT_COMPRESSION_ALGORITHM, + conf, KeyValue.COMPARATOR, StoreFile.BloomType.ROW, 2000); + + long now = System.currentTimeMillis(); + for (int i = 0; i < 2000; i += 2) { + String row = String.format(localFormatter, Integer.valueOf(i)); + KeyValue kv = new KeyValue(row.getBytes(), "family".getBytes(), + "col".getBytes(), now, "value".getBytes()); + writer.append(kv); + } + writer.close(); + + StoreFile.Reader reader = new StoreFile.Reader(fs, f, null, false); + reader.loadFileInfo(); + reader.loadBloomfilter(); + HFileScanner scanner = reader.getScanner(false, false); + + // check false positives rate + int falsePos = 0; + int falseNeg = 0; + for (int i = 0; i < 2000; i++) { + String row = String.format(localFormatter, Integer.valueOf(i)); + TreeSet<byte[]> columns = new TreeSet<byte[]>(); + columns.add("family:col".getBytes()); + + boolean exists = scanner.shouldSeek(row.getBytes(), columns); + if (i % 2 == 0) { + if (!exists) falseNeg++; + } else { + if (exists) falsePos++; + } + } + reader.close(); + fs.delete(f, true); + System.out.println("False negatives: " + falseNeg); + assertEquals(0, falseNeg); + System.out.println("False positives: " + falsePos); + assertTrue(falsePos < 2); + } + + public void testBloomTypes() throws Exception { + float err = (float) 0.01; + FileSystem fs = FileSystem.getLocal(conf); + conf.setFloat("io.hfile.bloom.error.rate", err); + conf.setBoolean("io.hfile.bloom.enabled", true); + + int rowCount = 50; + int colCount = 10; + int versions = 2; + + // run once using columns and once using rows + StoreFile.BloomType[] bt = + {StoreFile.BloomType.ROWCOL, StoreFile.BloomType.ROW}; + int[] expKeys = {rowCount*colCount, rowCount}; + // below line deserves commentary. it is expected bloom false positives + // column = rowCount*2*colCount inserts + // row-level = only rowCount*2 inserts, but failures will be magnified by + // 2nd for loop for every column (2*colCount) + float[] expErr = {2*rowCount*colCount*err, 2*rowCount*2*colCount*err}; + + for (int x : new int[]{0,1}) { + // write the file + Path f = new Path(ROOT_DIR, getName()); + StoreFile.Writer writer = new StoreFile.Writer(fs, f, + StoreFile.DEFAULT_BLOCKSIZE_SMALL, + HFile.DEFAULT_COMPRESSION_ALGORITHM, + conf, KeyValue.COMPARATOR, bt[x], expKeys[x]); + + long now = System.currentTimeMillis(); + for (int i = 0; i < rowCount*2; i += 2) { // rows + for (int j = 0; j < colCount*2; j += 2) { // column qualifiers + String row = String.format(localFormatter, Integer.valueOf(i)); + String col = String.format(localFormatter, Integer.valueOf(j)); + for (int k= 0; k < versions; ++k) { // versions + KeyValue kv = new KeyValue(row.getBytes(), + "family".getBytes(), ("col" + col).getBytes(), + now-k, Bytes.toBytes((long)-1)); + writer.append(kv); + } + } + } + writer.close(); + + StoreFile.Reader reader = new StoreFile.Reader(fs, f, null, false); + reader.loadFileInfo(); + reader.loadBloomfilter(); + HFileScanner scanner = reader.getScanner(false, false); + assertEquals(expKeys[x], reader.getBloomFilter().getKeyCount()); + + // check false positives rate + int falsePos = 0; + int falseNeg = 0; + for (int i = 0; i < rowCount*2; ++i) { // rows + for (int j = 0; j < colCount*2; ++j) { // column qualifiers + String row = String.format(localFormatter, Integer.valueOf(i)); + String col = String.format(localFormatter, Integer.valueOf(j)); + TreeSet<byte[]> columns = new TreeSet<byte[]>(); + columns.add(("col" + col).getBytes()); + + boolean exists = scanner.shouldSeek(row.getBytes(), columns); + boolean shouldRowExist = i % 2 == 0; + boolean shouldColExist = j % 2 == 0; + shouldColExist = shouldColExist || bt[x] == StoreFile.BloomType.ROW; + if (shouldRowExist && shouldColExist) { + if (!exists) falseNeg++; + } else { + if (exists) falsePos++; + } + } + } + reader.close(); + fs.delete(f, true); + System.out.println(bt[x].toString()); + System.out.println(" False negatives: " + falseNeg); + System.out.println(" False positives: " + falsePos); + assertEquals(0, falseNeg); + assertTrue(falsePos < 2*expErr[x]); + } + + } + +} diff --git a/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java b/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java index 7a7ec331a7d0..0d5a17a55ed9 100644 --- a/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java +++ b/core/src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java @@ -52,7 +52,8 @@ public class TestWideScanner extends HBaseTestCase { TESTTABLEDESC.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY, 10, // Ten is arbitrary number. Keep versions to help debuggging. Compression.Algorithm.NONE.getName(), false, true, 8 * 1024, - HConstants.FOREVER, false, HColumnDescriptor.DEFAULT_REPLICATION_SCOPE)); + HConstants.FOREVER, StoreFile.BloomType.NONE.toString(), + HColumnDescriptor.DEFAULT_REPLICATION_SCOPE)); } /** HRegionInfo for root region */ public static final HRegionInfo REGION_INFO = diff --git a/core/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java b/core/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java index 2c2ccc20ebd9..517b1421cef0 100644 --- a/core/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java +++ b/core/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java @@ -33,7 +33,7 @@ public class TestColumnSchemaModel extends TestCase { protected static final String COLUMN_NAME = "testcolumn"; protected static final boolean BLOCKCACHE = true; protected static final int BLOCKSIZE = 16384; - protected static final boolean BLOOMFILTER = false; + protected static final String BLOOMFILTER = "none"; protected static final String COMPRESSION = "GZ"; protected static final boolean IN_MEMORY = false; protected static final int TTL = 86400; @@ -42,7 +42,7 @@ public class TestColumnSchemaModel extends TestCase { protected static final String AS_XML = "<ColumnSchema name=\"testcolumn\"" + " BLOCKSIZE=\"16384\"" + - " BLOOMFILTER=\"false\"" + + " BLOOMFILTER=\"none\"" + " BLOCKCACHE=\"true\"" + " COMPRESSION=\"GZ\"" + " VERSIONS=\"1\"" +
90a5181501cbcf506c34a7870220f6f9a18b30df
hadoop
MAPREDUCE-2899. Replace major parts of- ApplicationSubmissionContext with a ContainerLaunchContext (Arun Murthy via- mahadev) - Merging r1170459 from trunk--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1170460 13f79535-47bb-0310-9956-ffa450edef68-
p
https://github.com/apache/hadoop
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index c27e0a9629249..135aba975b99d 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -263,6 +263,9 @@ Release 0.23.0 - Unreleased MAPREDUCE-2676. MR-279: JobHistory Job page needs reformatted. (Robert Evans via mahadev) + MAPREDUCE-2899. Replace major parts of ApplicationSubmissionContext with a + ContainerLaunchContext (Arun Murthy via mahadev) + OPTIMIZATIONS MAPREDUCE-2026. Make JobTracker.getJobCounters() and diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java index 3d4dcb5ed0e11..17cef5a26a6eb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java @@ -579,13 +579,12 @@ private ContainerLaunchContext createContainerLaunchContext() { + remoteJobConfPath.toUri().toASCIIString()); // //////////// End of JobConf setup - // Setup DistributedCache - setupDistributedCache(remoteFS, conf, localResources, environment); + MRApps.setupDistributedCache(conf, localResources, environment); // Set local-resources and environment container.setLocalResources(localResources); - container.setEnv(environment); + container.setEnvironment(environment); // Setup up tokens Credentials taskCredentials = new Credentials(); @@ -618,7 +617,7 @@ private ContainerLaunchContext createContainerLaunchContext() { ShuffleHandler.serializeServiceData(jobToken)); container.setServiceData(serviceData); - MRApps.addToClassPath(container.getEnv(), getInitialClasspath()); + MRApps.addToClassPath(container.getEnvironment(), getInitialClasspath()); } catch (IOException e) { throw new YarnException(e); } @@ -645,7 +644,7 @@ private ContainerLaunchContext createContainerLaunchContext() { taskAttemptListener.getAddress(), remoteTask, javaHome, workDir.toString(), containerLogDir, childTmpDir, jvmID)); - MapReduceChildJVM.setVMEnv(container.getEnv(), classPaths, + MapReduceChildJVM.setVMEnv(container.getEnvironment(), classPaths, workDir.toString(), containerLogDir, nmLdLibraryPath, remoteTask, localizedApplicationTokensFile); @@ -656,116 +655,6 @@ private ContainerLaunchContext createContainerLaunchContext() { return container; } - private static long[] parseTimeStamps(String[] strs) { - if (null == strs) { - return null; - } - long[] result = new long[strs.length]; - for(int i=0; i < strs.length; ++i) { - result[i] = Long.parseLong(strs[i]); - } - return result; - } - - private void setupDistributedCache(FileSystem remoteFS, - Configuration conf, - Map<String, LocalResource> localResources, - Map<String, String> env) - throws IOException { - - // Cache archives - parseDistributedCacheArtifacts(remoteFS, localResources, env, - LocalResourceType.ARCHIVE, - DistributedCache.getCacheArchives(conf), - parseTimeStamps(DistributedCache.getArchiveTimestamps(conf)), - getFileSizes(conf, MRJobConfig.CACHE_ARCHIVES_SIZES), - DistributedCache.getArchiveVisibilities(conf), - DistributedCache.getArchiveClassPaths(conf)); - - // Cache files - parseDistributedCacheArtifacts(remoteFS, - localResources, env, - LocalResourceType.FILE, - DistributedCache.getCacheFiles(conf), - parseTimeStamps(DistributedCache.getFileTimestamps(conf)), - getFileSizes(conf, MRJobConfig.CACHE_FILES_SIZES), - DistributedCache.getFileVisibilities(conf), - DistributedCache.getFileClassPaths(conf)); - } - - // TODO - Move this to MR! - // Use TaskDistributedCacheManager.CacheFiles.makeCacheFiles(URI[], - // long[], boolean[], Path[], FileType) - private void parseDistributedCacheArtifacts( - FileSystem remoteFS, - Map<String, LocalResource> localResources, - Map<String, String> env, - LocalResourceType type, - URI[] uris, long[] timestamps, long[] sizes, boolean visibilities[], - Path[] pathsToPutOnClasspath) throws IOException { - - if (uris != null) { - // Sanity check - if ((uris.length != timestamps.length) || (uris.length != sizes.length) || - (uris.length != visibilities.length)) { - throw new IllegalArgumentException("Invalid specification for " + - "distributed-cache artifacts of type " + type + " :" + - " #uris=" + uris.length + - " #timestamps=" + timestamps.length + - " #visibilities=" + visibilities.length - ); - } - - Map<String, Path> classPaths = new HashMap<String, Path>(); - if (pathsToPutOnClasspath != null) { - for (Path p : pathsToPutOnClasspath) { - p = remoteFS.resolvePath(p.makeQualified(remoteFS.getUri(), - remoteFS.getWorkingDirectory())); - classPaths.put(p.toUri().getPath().toString(), p); - } - } - for (int i = 0; i < uris.length; ++i) { - URI u = uris[i]; - Path p = new Path(u); - p = remoteFS.resolvePath(p.makeQualified(remoteFS.getUri(), - remoteFS.getWorkingDirectory())); - // Add URI fragment or just the filename - Path name = new Path((null == u.getFragment()) - ? p.getName() - : u.getFragment()); - if (name.isAbsolute()) { - throw new IllegalArgumentException("Resource name must be relative"); - } - String linkName = name.toUri().getPath(); - localResources.put( - linkName, - BuilderUtils.newLocalResource( - p.toUri(), type, - visibilities[i] - ? LocalResourceVisibility.PUBLIC - : LocalResourceVisibility.PRIVATE, - sizes[i], timestamps[i]) - ); - if (classPaths.containsKey(u.getPath())) { - MRApps.addToClassPath(env, linkName); - } - } - } - } - - // TODO - Move this to MR! - private static long[] getFileSizes(Configuration conf, String key) { - String[] strs = conf.getStrings(key); - if (strs == null) { - return null; - } - long[] result = new long[strs.length]; - for(int i=0; i < strs.length; ++i) { - result[i] = Long.parseLong(strs[i]); - } - return result; - } - @Override public ContainerId getAssignedContainerID() { readLock.lock(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java index 5dfa1dcfe460b..68499497ac30d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java @@ -25,14 +25,20 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.net.URI; import java.util.Arrays; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.MRJobConfig; +import org.apache.hadoop.mapreduce.filecache.DistributedCache; import org.apache.hadoop.mapreduce.v2.MRConstants; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; @@ -42,12 +48,18 @@ import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.yarn.YarnException; import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.LocalResource; +import org.apache.hadoop.yarn.api.records.LocalResourceType; +import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.util.Apps; +import org.apache.hadoop.yarn.util.BuilderUtils; /** * Helper class for MR applications */ +@Private +@Unstable public class MRApps extends Apps { public static final String JOB = "job"; public static final String TASK = "task"; @@ -232,4 +244,121 @@ public static String getJobFile(Configuration conf, String user, jobId.toString() + Path.SEPARATOR + MRConstants.JOB_CONF_FILE); return jobFile.toString(); } + + + + private static long[] parseTimeStamps(String[] strs) { + if (null == strs) { + return null; + } + long[] result = new long[strs.length]; + for(int i=0; i < strs.length; ++i) { + result[i] = Long.parseLong(strs[i]); + } + return result; + } + + public static void setupDistributedCache( + Configuration conf, + Map<String, LocalResource> localResources, + Map<String, String> env) + throws IOException { + + // Cache archives + parseDistributedCacheArtifacts(conf, localResources, env, + LocalResourceType.ARCHIVE, + DistributedCache.getCacheArchives(conf), + parseTimeStamps(DistributedCache.getArchiveTimestamps(conf)), + getFileSizes(conf, MRJobConfig.CACHE_ARCHIVES_SIZES), + DistributedCache.getArchiveVisibilities(conf), + DistributedCache.getArchiveClassPaths(conf)); + + // Cache files + parseDistributedCacheArtifacts(conf, + localResources, env, + LocalResourceType.FILE, + DistributedCache.getCacheFiles(conf), + parseTimeStamps(DistributedCache.getFileTimestamps(conf)), + getFileSizes(conf, MRJobConfig.CACHE_FILES_SIZES), + DistributedCache.getFileVisibilities(conf), + DistributedCache.getFileClassPaths(conf)); + } + + // TODO - Move this to MR! + // Use TaskDistributedCacheManager.CacheFiles.makeCacheFiles(URI[], + // long[], boolean[], Path[], FileType) + private static void parseDistributedCacheArtifacts( + Configuration conf, + Map<String, LocalResource> localResources, + Map<String, String> env, + LocalResourceType type, + URI[] uris, long[] timestamps, long[] sizes, boolean visibilities[], + Path[] pathsToPutOnClasspath) throws IOException { + + if (uris != null) { + // Sanity check + if ((uris.length != timestamps.length) || (uris.length != sizes.length) || + (uris.length != visibilities.length)) { + throw new IllegalArgumentException("Invalid specification for " + + "distributed-cache artifacts of type " + type + " :" + + " #uris=" + uris.length + + " #timestamps=" + timestamps.length + + " #visibilities=" + visibilities.length + ); + } + + Map<String, Path> classPaths = new HashMap<String, Path>(); + if (pathsToPutOnClasspath != null) { + for (Path p : pathsToPutOnClasspath) { + FileSystem remoteFS = p.getFileSystem(conf); + p = remoteFS.resolvePath(p.makeQualified(remoteFS.getUri(), + remoteFS.getWorkingDirectory())); + classPaths.put(p.toUri().getPath().toString(), p); + } + } + for (int i = 0; i < uris.length; ++i) { + URI u = uris[i]; + Path p = new Path(u); + FileSystem remoteFS = p.getFileSystem(conf); + p = remoteFS.resolvePath(p.makeQualified(remoteFS.getUri(), + remoteFS.getWorkingDirectory())); + // Add URI fragment or just the filename + Path name = new Path((null == u.getFragment()) + ? p.getName() + : u.getFragment()); + if (name.isAbsolute()) { + throw new IllegalArgumentException("Resource name must be relative"); + } + String linkName = name.toUri().getPath(); + localResources.put( + linkName, + BuilderUtils.newLocalResource( + p.toUri(), type, + visibilities[i] + ? LocalResourceVisibility.PUBLIC + : LocalResourceVisibility.PRIVATE, + sizes[i], timestamps[i]) + ); + if (classPaths.containsKey(u.getPath())) { + MRApps.addToClassPath(env, linkName); + } + } + } + } + + // TODO - Move this to MR! + private static long[] getFileSizes(Configuration conf, String key) { + String[] strs = conf.getStrings(key); + if (strs == null) { + return null; + } + long[] result = new long[strs.length]; + for(int i=0; i < strs.length; ++i) { + result[i] = Long.parseLong(strs[i]); + } + return result; + } + + + } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java index fa167a0acf13d..3751646010394 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java @@ -19,7 +19,6 @@ package org.apache.hadoop.mapred; import java.io.IOException; -import java.net.URI; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; @@ -33,7 +32,6 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.io.DataOutputBuffer; @@ -55,7 +53,6 @@ import org.apache.hadoop.mapreduce.TaskTrackerInfo; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.TypeConverter; -import org.apache.hadoop.mapreduce.filecache.DistributedCache; import org.apache.hadoop.mapreduce.protocol.ClientProtocol; import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.mapreduce.v2.MRConstants; @@ -72,6 +69,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationState; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; +import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; @@ -237,7 +235,6 @@ public JobStatus submitJob(JobID jobId, String jobSubmitDir, Credentials ts) // Construct necessary information to start the MR AM ApplicationSubmissionContext appContext = createApplicationSubmissionContext(conf, jobSubmitDir, ts); - setupDistributedCache(conf, appContext); // XXX Remove in.close(); @@ -273,16 +270,18 @@ private LocalResource createApplicationResource(FileContext fs, Path p) public ApplicationSubmissionContext createApplicationSubmissionContext( Configuration jobConf, String jobSubmitDir, Credentials ts) throws IOException { - ApplicationSubmissionContext appContext = - recordFactory.newRecordInstance(ApplicationSubmissionContext.class); ApplicationId applicationId = resMgrDelegate.getApplicationId(); - appContext.setApplicationId(applicationId); + + // Setup resource requirements Resource capability = recordFactory.newRecordInstance(Resource.class); capability.setMemory(conf.getInt(MRJobConfig.MR_AM_VMEM_MB, MRJobConfig.DEFAULT_MR_AM_VMEM_MB)); LOG.info("AppMaster capability = " + capability); - appContext.setMasterCapability(capability); + // Setup LocalResources + Map<String, LocalResource> localResources = + new HashMap<String, LocalResource>(); + Path jobConfPath = new Path(jobSubmitDir, MRConstants.JOB_CONF_FILE); URL yarnUrlForJobSubmitDir = ConverterUtils @@ -292,14 +291,11 @@ public ApplicationSubmissionContext createApplicationSubmissionContext( LOG.debug("Creating setup context, jobSubmitDir url is " + yarnUrlForJobSubmitDir); - appContext.setResource(MRConstants.JOB_SUBMIT_DIR, - yarnUrlForJobSubmitDir); - - appContext.setResourceTodo(MRConstants.JOB_CONF_FILE, + localResources.put(MRConstants.JOB_CONF_FILE, createApplicationResource(defaultFileContext, jobConfPath)); if (jobConf.get(MRJobConfig.JAR) != null) { - appContext.setResourceTodo(MRConstants.JOB_JAR, + localResources.put(MRConstants.JOB_JAR, createApplicationResource(defaultFileContext, new Path(jobSubmitDir, MRConstants.JOB_JAR))); } else { @@ -312,30 +308,21 @@ public ApplicationSubmissionContext createApplicationSubmissionContext( // TODO gross hack for (String s : new String[] { "job.split", "job.splitmetainfo", MRConstants.APPLICATION_TOKENS_FILE }) { - appContext.setResourceTodo( + localResources.put( MRConstants.JOB_SUBMIT_DIR + "/" + s, - createApplicationResource(defaultFileContext, new Path(jobSubmitDir, s))); + createApplicationResource(defaultFileContext, + new Path(jobSubmitDir, s))); } - - // TODO: Only if security is on. - List<String> fsTokens = new ArrayList<String>(); - for (Token<? extends TokenIdentifier> token : ts.getAllTokens()) { - fsTokens.add(token.encodeToUrlString()); - } - - // TODO - Remove this! - appContext.addAllFsTokens(fsTokens); - DataOutputBuffer dob = new DataOutputBuffer(); - ts.writeTokenStorageToStream(dob); - appContext.setFsTokensTodo(ByteBuffer.wrap(dob.getData(), 0, dob.getLength())); - - // Add queue information - appContext.setQueue(jobConf.get(JobContext.QUEUE_NAME, JobConf.DEFAULT_QUEUE_NAME)); - - // Add job name - appContext.setApplicationName(jobConf.get(JobContext.JOB_NAME, "N/A")); - // Add the command line + // Setup security tokens + ByteBuffer securityTokens = null; + if (UserGroupInformation.isSecurityEnabled()) { + DataOutputBuffer dob = new DataOutputBuffer(); + ts.writeTokenStorageToStream(dob); + securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength()); + } + + // Setup the command to run the AM String javaHome = "$JAVA_HOME"; Vector<CharSequence> vargs = new Vector<CharSequence>(8); vargs.add(javaHome + "/bin/java"); @@ -346,13 +333,6 @@ public ApplicationSubmissionContext createApplicationSubmissionContext( vargs.add(conf.get(MRJobConfig.MR_AM_COMMAND_OPTS, MRJobConfig.DEFAULT_MR_AM_COMMAND_OPTS)); - // Add { job jar, MR app jar } to classpath. - Map<String, String> environment = new HashMap<String, String>(); - MRApps.setInitialClasspath(environment); - MRApps.addToClassPath(environment, MRConstants.JOB_JAR); - MRApps.addToClassPath(environment, - MRConstants.YARN_MAPREDUCE_APP_JAR_PATH); - appContext.addAllEnvironment(environment); vargs.add("org.apache.hadoop.mapreduce.v2.app.MRAppMaster"); vargs.add(String.valueOf(applicationId.getClusterTimestamp())); vargs.add(String.valueOf(applicationId.getId())); @@ -370,140 +350,43 @@ public ApplicationSubmissionContext createApplicationSubmissionContext( LOG.info("Command to launch container for ApplicationMaster is : " + mergedCommand); + + // Setup the environment - Add { job jar, MR app jar } to classpath. + Map<String, String> environment = new HashMap<String, String>(); + MRApps.setInitialClasspath(environment); + MRApps.addToClassPath(environment, MRConstants.JOB_JAR); + MRApps.addToClassPath(environment, + MRConstants.YARN_MAPREDUCE_APP_JAR_PATH); - appContext.addAllCommands(vargsFinal); - // TODO: RM should get this from RPC. - appContext.setUser(UserGroupInformation.getCurrentUser().getShortUserName()); - return appContext; - } + // Parse distributed cache + MRApps.setupDistributedCache(jobConf, localResources, environment); - /** - * * TODO: Copied for now from TaskAttemptImpl.java ... fixme - * @param strs - * @return - */ - private static long[] parseTimeStamps(String[] strs) { - if (null == strs) { - return null; - } - long[] result = new long[strs.length]; - for(int i=0; i < strs.length; ++i) { - result[i] = Long.parseLong(strs[i]); - } - return result; - } + // Setup ContainerLaunchContext for AM container + ContainerLaunchContext amContainer = + recordFactory.newRecordInstance(ContainerLaunchContext.class); + amContainer.setResource(capability); // Resource (mem) required + amContainer.setLocalResources(localResources); // Local resources + amContainer.setEnvironment(environment); // Environment + amContainer.setCommands(vargsFinal); // Command for AM + amContainer.setContainerTokens(securityTokens); // Security tokens - /** - * TODO: Copied for now from TaskAttemptImpl.java ... fixme - * - * TODO: This is currently needed in YarnRunner as user code like setupJob, - * cleanupJob may need access to dist-cache. Once we separate distcache for - * maps, reduces, setup etc, this can include only a subset of artificats. - * This is also needed for uberAM case where we run everything inside AM. - */ - private void setupDistributedCache(Configuration conf, - ApplicationSubmissionContext container) throws IOException { - - // Cache archives - parseDistributedCacheArtifacts(conf, container, LocalResourceType.ARCHIVE, - DistributedCache.getCacheArchives(conf), - parseTimeStamps(DistributedCache.getArchiveTimestamps(conf)), - getFileSizes(conf, MRJobConfig.CACHE_ARCHIVES_SIZES), - DistributedCache.getArchiveVisibilities(conf), - DistributedCache.getArchiveClassPaths(conf)); - - // Cache files - parseDistributedCacheArtifacts(conf, container, LocalResourceType.FILE, - DistributedCache.getCacheFiles(conf), - parseTimeStamps(DistributedCache.getFileTimestamps(conf)), - getFileSizes(conf, MRJobConfig.CACHE_FILES_SIZES), - DistributedCache.getFileVisibilities(conf), - DistributedCache.getFileClassPaths(conf)); - } - - // TODO - Move this to MR! - // Use TaskDistributedCacheManager.CacheFiles.makeCacheFiles(URI[], long[], boolean[], Path[], FileType) - private void parseDistributedCacheArtifacts(Configuration conf, - ApplicationSubmissionContext container, LocalResourceType type, - URI[] uris, long[] timestamps, long[] sizes, boolean visibilities[], - Path[] pathsToPutOnClasspath) throws IOException { - - if (uris != null) { - // Sanity check - if ((uris.length != timestamps.length) || (uris.length != sizes.length) || - (uris.length != visibilities.length)) { - throw new IllegalArgumentException("Invalid specification for " + - "distributed-cache artifacts of type " + type + " :" + - " #uris=" + uris.length + - " #timestamps=" + timestamps.length + - " #visibilities=" + visibilities.length - ); - } - - Map<String, Path> classPaths = new HashMap<String, Path>(); - if (pathsToPutOnClasspath != null) { - for (Path p : pathsToPutOnClasspath) { - FileSystem fs = p.getFileSystem(conf); - p = p.makeQualified(fs.getUri(), fs.getWorkingDirectory()); - classPaths.put(p.toUri().getPath().toString(), p); - } - } - for (int i = 0; i < uris.length; ++i) { - URI u = uris[i]; - Path p = new Path(u); - FileSystem fs = p.getFileSystem(conf); - p = fs.resolvePath( - p.makeQualified(fs.getUri(), fs.getWorkingDirectory())); - // Add URI fragment or just the filename - Path name = new Path((null == u.getFragment()) - ? p.getName() - : u.getFragment()); - if (name.isAbsolute()) { - throw new IllegalArgumentException("Resource name must be relative"); - } - String linkName = name.toUri().getPath(); - container.setResourceTodo( - linkName, - createLocalResource( - p.toUri(), type, - visibilities[i] - ? LocalResourceVisibility.PUBLIC - : LocalResourceVisibility.PRIVATE, - sizes[i], timestamps[i]) - ); - if (classPaths.containsKey(u.getPath())) { - Map<String, String> environment = container.getAllEnvironment(); - MRApps.addToClassPath(environment, linkName); - } - } - } - } + // Set up the ApplicationSubmissionContext + ApplicationSubmissionContext appContext = + recordFactory.newRecordInstance(ApplicationSubmissionContext.class); + appContext.setApplicationId(applicationId); // ApplicationId + appContext.setUser( // User name + UserGroupInformation.getCurrentUser().getShortUserName()); + appContext.setQueue( // Queue name + jobConf.get(JobContext.QUEUE_NAME, + YarnConfiguration.DEFAULT_QUEUE_NAME)); + appContext.setApplicationName( // Job name + jobConf.get(JobContext.JOB_NAME, + YarnConfiguration.DEFAULT_APPLICATION_NAME)); + appContext.setAMContainerSpec(amContainer); // AM Container - // TODO - Move this to MR! - private static long[] getFileSizes(Configuration conf, String key) { - String[] strs = conf.getStrings(key); - if (strs == null) { - return null; - } - long[] result = new long[strs.length]; - for(int i=0; i < strs.length; ++i) { - result[i] = Long.parseLong(strs[i]); - } - return result; - } - - private LocalResource createLocalResource(URI uri, - LocalResourceType type, LocalResourceVisibility visibility, - long size, long timestamp) throws IOException { - LocalResource resource = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(LocalResource.class); - resource.setResource(ConverterUtils.getYarnUrlFromURI(uri)); - resource.setType(type); - resource.setVisibility(visibility); - resource.setSize(size); - resource.setTimestamp(timestamp); - return resource; + return appContext; } - + @Override public void setJobPriority(JobID arg0, String arg1) throws IOException, InterruptedException { diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationSubmissionContext.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationSubmissionContext.java index 46511ca0d270c..0f1243fd9fb83 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationSubmissionContext.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationSubmissionContext.java @@ -18,14 +18,8 @@ package org.apache.hadoop.yarn.api.records; -import java.nio.ByteBuffer; -import java.util.List; -import java.util.Map; - -import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Stable; -import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.api.ClientRMProtocol; /** @@ -36,26 +30,17 @@ * <p>It includes details such as: * <ul> * <li>{@link ApplicationId} of the application.</li> - * <li> - * {@link Resource} necessary to run the <code>ApplicationMaster</code>. - * </li> * <li>Application user.</li> * <li>Application name.</li> * <li>{@link Priority} of the application.</li> - * <li>Security tokens (if security is enabled).</li> - * <li> - * {@link LocalResource} necessary for running the - * <code>ApplicationMaster</code> container such - * as binaries, jar, shared-objects, side-files etc. - * </li> * <li> - * Environment variables for the launched <code>ApplicationMaster</code> - * process. + * {@link ContainerLaunchContext} of the container in which the + * <code>ApplicationMaster</code> is executed. * </li> - * <li>Command to launch the <code>ApplicationMaster</code>.</li> * </ul> * </p> * + * @see ContainerLaunchContext * @see ClientRMProtocol#submitApplication(org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest) */ @Public @@ -143,198 +128,25 @@ public interface ApplicationSubmissionContext { public void setUser(String user); /** - * Get the <code>Resource</code> required to run the - * <code>ApplicationMaster</code>. - * @return <code>Resource</code> required to run the - * <code>ApplicationMaster</code> - */ - @Public - @Stable - public Resource getMasterCapability(); - - /** - * Set <code>Resource</code> required to run the - * <code>ApplicationMaster</code>. - * @param masterCapability <code>Resource</code> required to run the - * <code>ApplicationMaster</code> - */ - @Public - @Stable - public void setMasterCapability(Resource masterCapability); - - @Private - @Unstable - public Map<String, URL> getAllResources(); - - @Private - @Unstable - public URL getResource(String key); - - @Private - @Unstable - public void addAllResources(Map<String, URL> resources); - - @Private - @Unstable - public void setResource(String key, URL url); - - @Private - @Unstable - public void removeResource(String key); - - @Private - @Unstable - public void clearResources(); - - /** - * Get all the <code>LocalResource</code> required to run the - * <code>ApplicationMaster</code>. - * @return <code>LocalResource</code> required to run the - * <code>ApplicationMaster</code> - */ - @Public - @Stable - public Map<String, LocalResource> getAllResourcesTodo(); - - @Private - @Unstable - public LocalResource getResourceTodo(String key); - - /** - * Add all the <code>LocalResource</code> required to run the - * <code>ApplicationMaster</code>. - * @param resources all <code>LocalResource</code> required to run the - * <code>ApplicationMaster</code> - */ - @Public - @Stable - public void addAllResourcesTodo(Map<String, LocalResource> resources); - - @Private - @Unstable - public void setResourceTodo(String key, LocalResource localResource); - - @Private - @Unstable - public void removeResourceTodo(String key); - - @Private - @Unstable - public void clearResourcesTodo(); - - @Private - @Unstable - public List<String> getFsTokenList(); - - @Private - @Unstable - public String getFsToken(int index); - - @Private - @Unstable - public int getFsTokenCount(); - - @Private - @Unstable - public void addAllFsTokens(List<String> fsTokens); - - @Private - @Unstable - public void addFsToken(String fsToken); - - @Private - @Unstable - public void removeFsToken(int index); - - @Private - @Unstable - public void clearFsTokens(); - - /** - * Get <em>file-system tokens</em> for the <code>ApplicationMaster</code>. - * @return file-system tokens for the <code>ApplicationMaster</code> - */ - @Public - @Stable - public ByteBuffer getFsTokensTodo(); - - /** - * Set <em>file-system tokens</em> for the <code>ApplicationMaster</code>. - * @param fsTokens file-system tokens for the <code>ApplicationMaster</code> + * Get the <code>ContainerLaunchContext</code> to describe the + * <code>Container</code> with which the <code>ApplicationMaster</code> is + * launched. + * @return <code>ContainerLaunchContext</code> for the + * <code>ApplicationMaster</code> container */ @Public @Stable - public void setFsTokensTodo(ByteBuffer fsTokens); - - /** - * Get the <em>environment variables</em> for the - * <code>ApplicationMaster</code>. - * @return environment variables for the <code>ApplicationMaster</code> - */ - @Public - @Stable - public Map<String, String> getAllEnvironment(); - - @Private - @Unstable - public String getEnvironment(String key); + public ContainerLaunchContext getAMContainerSpec(); /** - * Add all of the <em>environment variables</em> for the - * <code>ApplicationMaster</code>. - * @param environment environment variables for the - * <code>ApplicationMaster</code> + * Set the <code>ContainerLaunchContext</code> to describe the + * <code>Container</code> with which the <code>ApplicationMaster</code> is + * launched. + * @param amContainer <code>ContainerLaunchContext</code> for the + * <code>ApplicationMaster</code> container */ @Public @Stable - public void addAllEnvironment(Map<String, String> environment); + public void setAMContainerSpec(ContainerLaunchContext amContainer); - @Private - @Unstable - public void setEnvironment(String key, String env); - - @Private - @Unstable - public void removeEnvironment(String key); - - @Private - @Unstable - public void clearEnvironment(); - - /** - * Get the <em>commands</em> to launch the <code>ApplicationMaster</code>. - * @return commands to launch the <code>ApplicationMaster</code> - */ - @Public - @Stable - public List<String> getCommandList(); - - @Private - @Unstable - public String getCommand(int index); - - @Private - @Unstable - public int getCommandCount(); - - /** - * Add all of the <em>commands</em> to launch the - * <code>ApplicationMaster</code>. - * @param commands commands to launch the <code>ApplicationMaster</code> - */ - @Public - @Stable - public void addAllCommands(List<String> commands); - - @Private - @Unstable - public void addCommand(String command); - - @Private - @Unstable - public void removeCommand(int index); - - @Private - @Unstable - public void clearCommands(); } \ No newline at end of file diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerLaunchContext.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerLaunchContext.java index 0339df9af1f1c..52452b54e11f0 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerLaunchContext.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerLaunchContext.java @@ -156,7 +156,7 @@ public interface ContainerLaunchContext { */ @Public @Stable - Map<String, String> getEnv(); + Map<String, String> getEnvironment(); /** * Add <em>environment variables</em> for the container. @@ -164,7 +164,7 @@ public interface ContainerLaunchContext { */ @Public @Stable - void setEnv(Map<String, String> environment); + void setEnvironment(Map<String, String> environment); /** * Get the list of <em>commands</em> for launching the container. diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java index 2b4841888a720..1f8b5c24b1f94 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java @@ -18,56 +18,35 @@ package org.apache.hadoop.yarn.api.records.impl.pb; - -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; -import org.apache.hadoop.yarn.api.records.LocalResource; +import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.ProtoBase; -import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder; -import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto; import org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto; -import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto; -import org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto; -import org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto; -import org.apache.hadoop.yarn.proto.YarnProtos.StringURLMapProto; -import org.apache.hadoop.yarn.proto.YarnProtos.URLProto; - - -public class ApplicationSubmissionContextPBImpl extends ProtoBase<ApplicationSubmissionContextProto> implements ApplicationSubmissionContext { - ApplicationSubmissionContextProto proto = ApplicationSubmissionContextProto.getDefaultInstance(); +public class ApplicationSubmissionContextPBImpl +extends ProtoBase<ApplicationSubmissionContextProto> +implements ApplicationSubmissionContext { + ApplicationSubmissionContextProto proto = + ApplicationSubmissionContextProto.getDefaultInstance(); ApplicationSubmissionContextProto.Builder builder = null; boolean viaProto = false; private ApplicationId applicationId = null; - private Resource masterCapability = null; - private Map<String, URL> resources = null; - private Map<String, LocalResource> resourcesTodo = null; - private List<String> fsTokenList = null; - private ByteBuffer fsTokenTodo = null; - private Map<String, String> environment = null; - private List<String> commandList = null; private Priority priority = null; - - + private ContainerLaunchContext amContainer = null; public ApplicationSubmissionContextPBImpl() { builder = ApplicationSubmissionContextProto.newBuilder(); } - public ApplicationSubmissionContextPBImpl(ApplicationSubmissionContextProto proto) { + public ApplicationSubmissionContextPBImpl( + ApplicationSubmissionContextProto proto) { this.proto = proto; viaProto = true; } @@ -83,30 +62,12 @@ private void mergeLocalToBuilder() { if (this.applicationId != null) { builder.setApplicationId(convertToProtoFormat(this.applicationId)); } - if (this.masterCapability != null) { - builder.setMasterCapability(convertToProtoFormat(this.masterCapability)); - } - if (this.resources != null) { - addResourcesToProto(); - } - if (this.resourcesTodo != null) { - addResourcesTodoToProto(); - } - if (this.fsTokenList != null) { - addFsTokenListToProto(); - } - if (this.fsTokenTodo != null) { - builder.setFsTokensTodo(convertToProtoFormat(this.fsTokenTodo)); - } - if (this.environment != null) { - addEnvironmentToProto(); - } - if (this.commandList != null) { - addCommandsToProto(); - } if (this.priority != null) { builder.setPriority(convertToProtoFormat(this.priority)); } + if (this.amContainer != null) { + builder.setAmContainerSpec(convertToProtoFormat(this.amContainer)); + } } private void mergeLocalToProto() { @@ -145,6 +106,7 @@ public void setPriority(Priority priority) { builder.clearPriority(); this.priority = priority; } + @Override public ApplicationId getApplicationId() { ApplicationSubmissionContextProtoOrBuilder p = viaProto ? proto : builder; @@ -165,6 +127,7 @@ public void setApplicationId(ApplicationId applicationId) { builder.clearApplicationId(); this.applicationId = applicationId; } + @Override public String getApplicationName() { ApplicationSubmissionContextProtoOrBuilder p = viaProto ? proto : builder; @@ -183,403 +146,7 @@ public void setApplicationName(String applicationName) { } builder.setApplicationName((applicationName)); } - @Override - public Resource getMasterCapability() { - ApplicationSubmissionContextProtoOrBuilder p = viaProto ? proto : builder; - if (this.masterCapability != null) { - return masterCapability; - } // Else via proto - if (!p.hasMasterCapability()) { - return null; - } - masterCapability = convertFromProtoFormat(p.getMasterCapability()); - return this.masterCapability; - } - - @Override - public void setMasterCapability(Resource masterCapability) { - maybeInitBuilder(); - if (masterCapability == null) - builder.clearMasterCapability(); - this.masterCapability = masterCapability; - } - @Override - public Map<String, URL> getAllResources() { - initResources(); - return this.resources; - } - @Override - public URL getResource(String key) { - initResources(); - return this.resources.get(key); - } - - private void initResources() { - if (this.resources != null) { - return; - } - ApplicationSubmissionContextProtoOrBuilder p = viaProto ? proto : builder; - List<StringURLMapProto> mapAsList = p.getResourcesList(); - this.resources = new HashMap<String, URL>(); - - for (StringURLMapProto c : mapAsList) { - this.resources.put(c.getKey(), convertFromProtoFormat(c.getValue())); - } - } - - @Override - public void addAllResources(final Map<String, URL> resources) { - if (resources == null) - return; - initResources(); - this.resources.putAll(resources); - } - - private void addResourcesToProto() { - maybeInitBuilder(); - builder.clearResources(); - if (this.resources == null) - return; - Iterable<StringURLMapProto> iterable = new Iterable<StringURLMapProto>() { - - @Override - public Iterator<StringURLMapProto> iterator() { - return new Iterator<StringURLMapProto>() { - - Iterator<String> keyIter = resources.keySet().iterator(); - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - - @Override - public StringURLMapProto next() { - String key = keyIter.next(); - return StringURLMapProto.newBuilder().setKey(key).setValue(convertToProtoFormat(resources.get(key))).build(); - } - - @Override - public boolean hasNext() { - return keyIter.hasNext(); - } - }; - } - }; - builder.addAllResources(iterable); - } - @Override - public void setResource(String key, URL val) { - initResources(); - this.resources.put(key, val); - } - @Override - public void removeResource(String key) { - initResources(); - this.resources.remove(key); - } - @Override - public void clearResources() { - initResources(); - this.resources.clear(); - } - @Override - public Map<String, LocalResource> getAllResourcesTodo() { - initResourcesTodo(); - return this.resourcesTodo; - } - @Override - public LocalResource getResourceTodo(String key) { - initResourcesTodo(); - return this.resourcesTodo.get(key); - } - - private void initResourcesTodo() { - if (this.resourcesTodo != null) { - return; - } - ApplicationSubmissionContextProtoOrBuilder p = viaProto ? proto : builder; - List<StringLocalResourceMapProto> mapAsList = p.getResourcesTodoList(); - this.resourcesTodo = new HashMap<String, LocalResource>(); - - for (StringLocalResourceMapProto c : mapAsList) { - this.resourcesTodo.put(c.getKey(), convertFromProtoFormat(c.getValue())); - } - } - - @Override - public void addAllResourcesTodo(final Map<String, LocalResource> resourcesTodo) { - if (resourcesTodo == null) - return; - initResourcesTodo(); - this.resourcesTodo.putAll(resourcesTodo); - } - - private void addResourcesTodoToProto() { - maybeInitBuilder(); - builder.clearResourcesTodo(); - if (resourcesTodo == null) - return; - Iterable<StringLocalResourceMapProto> iterable = new Iterable<StringLocalResourceMapProto>() { - - @Override - public Iterator<StringLocalResourceMapProto> iterator() { - return new Iterator<StringLocalResourceMapProto>() { - - Iterator<String> keyIter = resourcesTodo.keySet().iterator(); - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - - @Override - public StringLocalResourceMapProto next() { - String key = keyIter.next(); - return StringLocalResourceMapProto.newBuilder().setKey(key).setValue(convertToProtoFormat(resourcesTodo.get(key))).build(); - } - - @Override - public boolean hasNext() { - return keyIter.hasNext(); - } - }; - } - }; - builder.addAllResourcesTodo(iterable); - } - @Override - public void setResourceTodo(String key, LocalResource val) { - initResourcesTodo(); - this.resourcesTodo.put(key, val); - } - @Override - public void removeResourceTodo(String key) { - initResourcesTodo(); - this.resourcesTodo.remove(key); - } - @Override - public void clearResourcesTodo() { - initResourcesTodo(); - this.resourcesTodo.clear(); - } - @Override - public List<String> getFsTokenList() { - initFsTokenList(); - return this.fsTokenList; - } - @Override - public String getFsToken(int index) { - initFsTokenList(); - return this.fsTokenList.get(index); - } - @Override - public int getFsTokenCount() { - initFsTokenList(); - return this.fsTokenList.size(); - } - - private void initFsTokenList() { - if (this.fsTokenList != null) { - return; - } - ApplicationSubmissionContextProtoOrBuilder p = viaProto ? proto : builder; - List<String> list = p.getFsTokensList(); - this.fsTokenList = new ArrayList<String>(); - - for (String c : list) { - this.fsTokenList.add(c); - } - } - - @Override - public void addAllFsTokens(final List<String> fsTokens) { - if (fsTokens == null) - return; - initFsTokenList(); - this.fsTokenList.addAll(fsTokens); - } - - private void addFsTokenListToProto() { - maybeInitBuilder(); - builder.clearFsTokens(); - builder.addAllFsTokens(this.fsTokenList); - } - - @Override - public void addFsToken(String fsTokens) { - initFsTokenList(); - this.fsTokenList.add(fsTokens); - } - @Override - public void removeFsToken(int index) { - initFsTokenList(); - this.fsTokenList.remove(index); - } - @Override - public void clearFsTokens() { - initFsTokenList(); - this.fsTokenList.clear(); - } - @Override - public ByteBuffer getFsTokensTodo() { - ApplicationSubmissionContextProtoOrBuilder p = viaProto ? proto : builder; - if (this.fsTokenTodo != null) { - return this.fsTokenTodo; - } - if (!p.hasFsTokensTodo()) { - return null; - } - this.fsTokenTodo = convertFromProtoFormat(p.getFsTokensTodo()); - return this.fsTokenTodo; - } - @Override - public void setFsTokensTodo(ByteBuffer fsTokensTodo) { - maybeInitBuilder(); - if (fsTokensTodo == null) - builder.clearFsTokensTodo(); - this.fsTokenTodo = fsTokensTodo; - } - @Override - public Map<String, String> getAllEnvironment() { - initEnvironment(); - return this.environment; - } - @Override - public String getEnvironment(String key) { - initEnvironment(); - return this.environment.get(key); - } - - private void initEnvironment() { - if (this.environment != null) { - return; - } - ApplicationSubmissionContextProtoOrBuilder p = viaProto ? proto : builder; - List<StringStringMapProto> mapAsList = p.getEnvironmentList(); - this.environment = new HashMap<String, String>(); - - for (StringStringMapProto c : mapAsList) { - this.environment.put(c.getKey(), c.getValue()); - } - } - - @Override - public void addAllEnvironment(Map<String, String> environment) { - if (environment == null) - return; - initEnvironment(); - this.environment.putAll(environment); - } - - private void addEnvironmentToProto() { - maybeInitBuilder(); - builder.clearEnvironment(); - if (environment == null) - return; - Iterable<StringStringMapProto> iterable = new Iterable<StringStringMapProto>() { - - @Override - public Iterator<StringStringMapProto> iterator() { - return new Iterator<StringStringMapProto>() { - - Iterator<String> keyIter = environment.keySet().iterator(); - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - - @Override - public StringStringMapProto next() { - String key = keyIter.next(); - return StringStringMapProto.newBuilder().setKey(key).setValue((environment.get(key))).build(); - } - - @Override - public boolean hasNext() { - return keyIter.hasNext(); - } - }; - } - }; - builder.addAllEnvironment(iterable); - } - @Override - public void setEnvironment(String key, String val) { - initEnvironment(); - this.environment.put(key, val); - } - @Override - public void removeEnvironment(String key) { - initEnvironment(); - this.environment.remove(key); - } - @Override - public void clearEnvironment() { - initEnvironment(); - this.environment.clear(); - } - @Override - public List<String> getCommandList() { - initCommandList(); - return this.commandList; - } - @Override - public String getCommand(int index) { - initCommandList(); - return this.commandList.get(index); - } - @Override - public int getCommandCount() { - initCommandList(); - return this.commandList.size(); - } - - private void initCommandList() { - if (this.commandList != null) { - return; - } - ApplicationSubmissionContextProtoOrBuilder p = viaProto ? proto : builder; - List<String> list = p.getCommandList(); - this.commandList = new ArrayList<String>(); - - for (String c : list) { - this.commandList.add(c); - } - } - - @Override - public void addAllCommands(final List<String> command) { - if (command == null) - return; - initCommandList(); - this.commandList.addAll(command); - } - - private void addCommandsToProto() { - maybeInitBuilder(); - builder.clearCommand(); - if (this.commandList == null) - return; - builder.addAllCommand(this.commandList); - } - @Override - public void addCommand(String command) { - initCommandList(); - this.commandList.add(command); - } - @Override - public void removeCommand(int index) { - initCommandList(); - this.commandList.remove(index); - } - @Override - public void clearCommands() { - initCommandList(); - this.commandList.clear(); - } @Override public String getQueue() { ApplicationSubmissionContextProtoOrBuilder p = viaProto ? proto : builder; @@ -598,6 +165,7 @@ public void setQueue(String queue) { } builder.setQueue((queue)); } + @Override public String getUser() { ApplicationSubmissionContextProtoOrBuilder p = viaProto ? proto : builder; @@ -617,6 +185,28 @@ public void setUser(String user) { builder.setUser((user)); } + @Override + public ContainerLaunchContext getAMContainerSpec() { + ApplicationSubmissionContextProtoOrBuilder p = viaProto ? proto : builder; + if (this.amContainer != null) { + return amContainer; + } // Else via proto + if (!p.hasAmContainerSpec()) { + return null; + } + amContainer = convertFromProtoFormat(p.getAmContainerSpec()); + return amContainer; + } + + @Override + public void setAMContainerSpec(ContainerLaunchContext amContainer) { + maybeInitBuilder(); + if (amContainer == null) { + builder.clearAmContainerSpec(); + } + this.amContainer = amContainer; + } + private PriorityPBImpl convertFromProtoFormat(PriorityProto p) { return new PriorityPBImpl(p); } @@ -633,28 +223,12 @@ private ApplicationIdProto convertToProtoFormat(ApplicationId t) { return ((ApplicationIdPBImpl)t).getProto(); } - private ResourcePBImpl convertFromProtoFormat(ResourceProto p) { - return new ResourcePBImpl(p); - } - - private ResourceProto convertToProtoFormat(Resource t) { - return ((ResourcePBImpl)t).getProto(); - } - - private URLPBImpl convertFromProtoFormat(URLProto p) { - return new URLPBImpl(p); + private ContainerLaunchContextPBImpl convertFromProtoFormat( + ContainerLaunchContextProto p) { + return new ContainerLaunchContextPBImpl(p); } - private URLProto convertToProtoFormat(URL t) { - return ((URLPBImpl)t).getProto(); + private ContainerLaunchContextProto convertToProtoFormat(ContainerLaunchContext t) { + return ((ContainerLaunchContextPBImpl)t).getProto(); } - - private LocalResourcePBImpl convertFromProtoFormat(LocalResourceProto p) { - return new LocalResourcePBImpl(p); - } - - private LocalResourceProto convertToProtoFormat(LocalResource t) { - return ((LocalResourcePBImpl)t).getProto(); - } - } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl.java index 0696d8327bdda..de292ad98e02c 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl.java @@ -39,8 +39,6 @@ import org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto; import org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto; - - public class ContainerLaunchContextPBImpl extends ProtoBase<ContainerLaunchContextProto> implements ContainerLaunchContext { @@ -54,10 +52,9 @@ public class ContainerLaunchContextPBImpl private Map<String, LocalResource> localResources = null; private ByteBuffer containerTokens = null; private Map<String, ByteBuffer> serviceData = null; - private Map<String, String> env = null; + private Map<String, String> environment = null; private List<String> commands = null; - public ContainerLaunchContextPBImpl() { builder = ContainerLaunchContextProto.newBuilder(); } @@ -94,7 +91,7 @@ private void mergeLocalToBuilder() { if (this.serviceData != null) { addServiceDataToProto(); } - if (this.env != null) { + if (this.environment != null) { addEnvToProto(); } if (this.commands != null) { @@ -364,37 +361,37 @@ public boolean hasNext() { } @Override - public Map<String, String> getEnv() { + public Map<String, String> getEnvironment() { initEnv(); - return this.env; + return this.environment; } private void initEnv() { - if (this.env != null) { + if (this.environment != null) { return; } ContainerLaunchContextProtoOrBuilder p = viaProto ? proto : builder; - List<StringStringMapProto> list = p.getEnvList(); - this.env = new HashMap<String, String>(); + List<StringStringMapProto> list = p.getEnvironmentList(); + this.environment = new HashMap<String, String>(); for (StringStringMapProto c : list) { - this.env.put(c.getKey(), c.getValue()); + this.environment.put(c.getKey(), c.getValue()); } } @Override - public void setEnv(final Map<String, String> env) { + public void setEnvironment(final Map<String, String> env) { if (env == null) return; initEnv(); - this.env.clear(); - this.env.putAll(env); + this.environment.clear(); + this.environment.putAll(env); } private void addEnvToProto() { maybeInitBuilder(); - builder.clearEnv(); - if (env == null) + builder.clearEnvironment(); + if (environment == null) return; Iterable<StringStringMapProto> iterable = new Iterable<StringStringMapProto>() { @@ -403,7 +400,7 @@ private void addEnvToProto() { public Iterator<StringStringMapProto> iterator() { return new Iterator<StringStringMapProto>() { - Iterator<String> keyIter = env.keySet().iterator(); + Iterator<String> keyIter = environment.keySet().iterator(); @Override public void remove() { @@ -414,7 +411,7 @@ public void remove() { public StringStringMapProto next() { String key = keyIter.next(); return StringStringMapProto.newBuilder().setKey(key).setValue( - (env.get(key))).build(); + (environment.get(key))).build(); } @Override @@ -424,7 +421,7 @@ public boolean hasNext() { }; } }; - builder.addAllEnv(iterable); + builder.addAllEnvironment(iterable); } private ResourcePBImpl convertFromProtoFormat(ResourceProto p) { diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto index 61e3d1f5b94ea..cdcd1a747b816 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto @@ -188,17 +188,11 @@ message AMResponseProto { //////////////////////////////////////////////////////////////////////// message ApplicationSubmissionContextProto { optional ApplicationIdProto application_id = 1; - optional string application_name = 2; - optional ResourceProto master_capability = 3; - repeated StringURLMapProto resources = 4; - repeated StringLocalResourceMapProto resources_todo = 5; - repeated string fs_tokens = 6; - optional bytes fs_tokens_todo = 7; - repeated StringStringMapProto environment = 8; - repeated string command = 9; - optional string queue = 10; - optional PriorityProto priority = 11; - optional string user = 12; + optional string application_name = 2 [default = "N/A"]; + optional string user = 3; + optional string queue = 4 [default = "default"]; + optional PriorityProto priority = 5; + optional ContainerLaunchContextProto am_container_spec = 6; } message YarnClusterMetricsProto { @@ -242,7 +236,7 @@ message ContainerLaunchContextProto { repeated StringLocalResourceMapProto localResources = 4; optional bytes container_tokens = 5; repeated StringBytesMapProto service_data = 6; - repeated StringStringMapProto env = 7; + repeated StringStringMapProto environment = 7; repeated string command = 8; } diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index 2169ee3e90829..ba23134170ffa 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -219,6 +219,12 @@ public class YarnConfiguration extends Configuration { RM_PREFIX + "max-completed-applications"; public static final int DEFAULT_RM_MAX_COMPLETED_APPLICATIONS = 10000; + /** Default application name */ + public static final String DEFAULT_APPLICATION_NAME = "N/A"; + + /** Default queue name */ + public static final String DEFAULT_QUEUE_NAME = "default"; + //////////////////////////////// // Node Manager Configs //////////////////////////////// diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java index 1a34247c306ab..497460d3e7d0b 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java @@ -89,7 +89,7 @@ public Integer call() { final Map<Path,String> localResources = container.getLocalizedResources(); String containerIdStr = ConverterUtils.toString(container.getContainerID()); final String user = launchContext.getUser(); - final Map<String,String> env = launchContext.getEnv(); + final Map<String,String> env = launchContext.getEnvironment(); final List<String> command = launchContext.getCommands(); int ret = -1; @@ -109,7 +109,7 @@ public Integer call() { } launchContext.setCommands(newCmds); - Map<String, String> envs = launchContext.getEnv(); + Map<String, String> envs = launchContext.getEnvironment(); Map<String, String> newEnvs = new HashMap<String, String>(envs.size()); for (Entry<String, String> entry : envs.entrySet()) { newEnvs.put( @@ -118,7 +118,7 @@ public Integer call() { ApplicationConstants.LOG_DIR_EXPANSION_VAR, containerLogDir.toUri().getPath())); } - launchContext.setEnv(newEnvs); + launchContext.setEnvironment(newEnvs); // /////////////////////////// End of variable expansion FileContext lfs = FileContext.getLocalFSFileContext(); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java index 593d6525a6894..a31bef8af9dc5 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java @@ -71,7 +71,6 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEventType; -import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.AMLivelinessMonitor; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler; import org.apache.hadoop.yarn.service.AbstractService; @@ -90,7 +89,6 @@ public class ClientRMService extends AbstractService implements final private AtomicInteger applicationCounter = new AtomicInteger(0); final private YarnScheduler scheduler; final private RMContext rmContext; - private final AMLivelinessMonitor amLivelinessMonitor; private final RMAppManager rmAppManager; private String clientServiceBindAddress; @@ -106,7 +104,6 @@ public ClientRMService(RMContext rmContext, YarnScheduler scheduler, super(ClientRMService.class.getName()); this.scheduler = scheduler; this.rmContext = rmContext; - this.amLivelinessMonitor = rmContext.getAMLivelinessMonitor(); this.rmAppManager = rmAppManager; } @@ -195,15 +192,18 @@ public SubmitApplicationResponse submitApplication( SubmitApplicationRequest request) throws YarnRemoteException { ApplicationSubmissionContext submissionContext = request .getApplicationSubmissionContext(); - ApplicationId applicationId = null; - String user = null; + ApplicationId applicationId = submissionContext.getApplicationId(); + String user = submissionContext.getUser(); try { user = UserGroupInformation.getCurrentUser().getShortUserName(); - applicationId = submissionContext.getApplicationId(); if (rmContext.getRMApps().get(applicationId) != null) { throw new IOException("Application with id " + applicationId + " is already present! Cannot add a duplicate!"); } + + // Safety + submissionContext.setUser(user); + // This needs to be synchronous as the client can query // immediately following the submission to get the application status. // So call handle directly and do not send an event. @@ -226,6 +226,7 @@ public SubmitApplicationResponse submitApplication( return response; } + @SuppressWarnings("unchecked") @Override public FinishApplicationResponse finishApplication( FinishApplicationRequest request) throws YarnRemoteException { diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java index 9a86dfd45799a..d0cd0a7ff86c6 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java @@ -210,7 +210,9 @@ protected synchronized void checkAppNumCompletedLimit() { } } - protected synchronized void submitApplication(ApplicationSubmissionContext submissionContext) { + @SuppressWarnings("unchecked") + protected synchronized void submitApplication( + ApplicationSubmissionContext submissionContext) { ApplicationId applicationId = submissionContext.getApplicationId(); RMApp application = null; try { @@ -224,27 +226,37 @@ protected synchronized void submitApplication(ApplicationSubmissionContext submi clientTokenStr = clientToken.encodeToUrlString(); LOG.debug("Sending client token as " + clientTokenStr); } - submissionContext.setQueue(submissionContext.getQueue() == null - ? "default" : submissionContext.getQueue()); - submissionContext.setApplicationName(submissionContext - .getApplicationName() == null ? "N/A" : submissionContext - .getApplicationName()); + + // Sanity checks + if (submissionContext.getQueue() == null) { + submissionContext.setQueue(YarnConfiguration.DEFAULT_QUEUE_NAME); + } + if (submissionContext.getApplicationName() == null) { + submissionContext.setApplicationName( + YarnConfiguration.DEFAULT_APPLICATION_NAME); + } + + // Store application for recovery ApplicationStore appStore = rmContext.getApplicationsStore() .createApplicationStore(submissionContext.getApplicationId(), submissionContext); + + // Create RMApp application = new RMAppImpl(applicationId, rmContext, this.conf, submissionContext.getApplicationName(), user, submissionContext.getQueue(), submissionContext, clientTokenStr, - appStore, rmContext.getAMLivelinessMonitor(), this.scheduler, + appStore, this.scheduler, this.masterService); - if (rmContext.getRMApps().putIfAbsent(applicationId, application) != null) { + if (rmContext.getRMApps().putIfAbsent(applicationId, application) != + null) { LOG.info("Application with id " + applicationId + " is already present! Cannot add a duplicate!"); - // don't send event through dispatcher as it will be handled by app already - // present with this id. + // don't send event through dispatcher as it will be handled by app + // already present with this id. application.handle(new RMAppRejectedEvent(applicationId, - "Application with this id is already present! Cannot add a duplicate!")); + "Application with this id is already present! " + + "Cannot add a duplicate!")); } else { this.rmContext.getDispatcher().getEventHandler().handle( new RMAppEvent(applicationId, RMAppEventType.START)); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManagerSubmitEvent.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManagerSubmitEvent.java index 99b3d77fd4b4a..495e78442808d 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManagerSubmitEvent.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManagerSubmitEvent.java @@ -18,7 +18,6 @@ package org.apache.hadoop.yarn.server.resourcemanager; -import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; public class RMAppManagerSubmitEvent extends RMAppManagerEvent { diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java index 1a10993bb08e7..b394faa85d26b 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java @@ -23,7 +23,6 @@ import java.nio.ByteBuffer; import java.security.PrivilegedAction; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; @@ -120,7 +119,8 @@ private void launch() throws IOException { + " for AM " + application.getAppAttemptId()); ContainerLaunchContext launchContext = createAMContainerLaunchContext(applicationContext, masterContainerID); - StartContainerRequest request = recordFactory.newRecordInstance(StartContainerRequest.class); + StartContainerRequest request = + recordFactory.newRecordInstance(StartContainerRequest.class); request.setContainerLaunchContext(launchContext); containerMgrProxy.startContainer(request); LOG.info("Done launching container " + application.getMasterContainer() @@ -130,7 +130,8 @@ private void launch() throws IOException { private void cleanup() throws IOException { connect(); ContainerId containerId = application.getMasterContainer().getId(); - StopContainerRequest stopRequest = recordFactory.newRecordInstance(StopContainerRequest.class); + StopContainerRequest stopRequest = + recordFactory.newRecordInstance(StopContainerRequest.class); stopRequest.setContainerId(containerId); containerMgrProxy.stopContainer(stopRequest); } @@ -145,7 +146,7 @@ private ContainerManager getContainerMgrProxy( final YarnRPC rpc = YarnRPC.create(conf); // TODO: Don't create again and again. UserGroupInformation currentUser = - UserGroupInformation.createRemoteUser("TODO"); // TODO + UserGroupInformation.createRemoteUser("yarn"); // TODO if (UserGroupInformation.isSecurityEnabled()) { ContainerToken containerToken = container.getContainerToken(); Token<ContainerTokenIdentifier> token = @@ -170,8 +171,8 @@ private ContainerLaunchContext createAMContainerLaunchContext( ContainerId containerID) throws IOException { // Construct the actual Container - ContainerLaunchContext container = recordFactory.newRecordInstance(ContainerLaunchContext.class); - container.setCommands(applicationMasterContext.getCommandList()); + ContainerLaunchContext container = + applicationMasterContext.getAMContainerSpec(); StringBuilder mergedCommand = new StringBuilder(); String failCount = Integer.toString(application.getAppAttemptId() .getAttemptId()); @@ -189,34 +190,28 @@ private ContainerLaunchContext createAMContainerLaunchContext( LOG.info("Command to launch container " + containerID + " : " + mergedCommand); - Map<String, String> environment = - applicationMasterContext.getAllEnvironment(); - environment.putAll(setupTokensInEnv(applicationMasterContext)); - container.setEnv(environment); - - // Construct the actual Container + + // Finalize the container container.setContainerId(containerID); container.setUser(applicationMasterContext.getUser()); - container.setResource(applicationMasterContext.getMasterCapability()); - container.setLocalResources(applicationMasterContext.getAllResourcesTodo()); - container.setContainerTokens(applicationMasterContext.getFsTokensTodo()); + setupTokensAndEnv(container); + return container; } - private Map<String, String> setupTokensInEnv( - ApplicationSubmissionContext asc) + private void setupTokensAndEnv( + ContainerLaunchContext container) throws IOException { - Map<String, String> env = - new HashMap<String, String>(); + Map<String, String> environment = container.getEnvironment(); if (UserGroupInformation.isSecurityEnabled()) { // TODO: Security enabled/disabled info should come from RM. Credentials credentials = new Credentials(); DataInputByteBuffer dibb = new DataInputByteBuffer(); - if (asc.getFsTokensTodo() != null) { + if (container.getContainerTokens() != null) { // TODO: Don't do this kind of checks everywhere. - dibb.reset(asc.getFsTokensTodo()); + dibb.reset(container.getContainerTokens()); credentials.readTokenStorageStream(dibb); } @@ -236,14 +231,16 @@ private Map<String, String> setupTokensInEnv( token.setService(new Text(resolvedAddr)); String appMasterTokenEncoded = token.encodeToUrlString(); LOG.debug("Putting appMaster token in env : " + appMasterTokenEncoded); - env.put(ApplicationConstants.APPLICATION_MASTER_TOKEN_ENV_NAME, + environment.put( + ApplicationConstants.APPLICATION_MASTER_TOKEN_ENV_NAME, appMasterTokenEncoded); // Add the RM token credentials.addToken(new Text(resolvedAddr), token); DataOutputBuffer dob = new DataOutputBuffer(); credentials.writeTokenStorageToStream(dob); - asc.setFsTokensTodo(ByteBuffer.wrap(dob.getData(), 0, dob.getLength())); + container.setContainerTokens( + ByteBuffer.wrap(dob.getData(), 0, dob.getLength())); ApplicationTokenIdentifier identifier = new ApplicationTokenIdentifier( application.getAppAttemptId().getApplicationId()); @@ -252,9 +249,10 @@ private Map<String, String> setupTokensInEnv( String encoded = Base64.encodeBase64URLSafeString(clientSecretKey.getEncoded()); LOG.debug("The encoded client secret-key to be put in env : " + encoded); - env.put(ApplicationConstants.APPLICATION_CLIENT_SECRET_ENV_NAME, encoded); + environment.put( + ApplicationConstants.APPLICATION_CLIENT_SECRET_ENV_NAME, + encoded); } - return env; } @SuppressWarnings("unchecked") diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java index 015c76163e458..65ee9945e29c7 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java @@ -86,7 +86,6 @@ public class RMAppImpl implements RMApp { // Mutable fields private long startTime; private long finishTime; - private AMLivelinessMonitor amLivelinessMonitor; private RMAppAttempt currentAttempt; private static final FinalTransition FINAL_TRANSITION = new FinalTransition(); @@ -163,7 +162,7 @@ RMAppEventType.KILL, new AppKilledTransition()) public RMAppImpl(ApplicationId applicationId, RMContext rmContext, Configuration config, String name, String user, String queue, ApplicationSubmissionContext submissionContext, String clientTokenStr, - ApplicationStore appStore, AMLivelinessMonitor amLivelinessMonitor, + ApplicationStore appStore, YarnScheduler scheduler, ApplicationMasterService masterService) { this.applicationId = applicationId; @@ -176,7 +175,6 @@ public RMAppImpl(ApplicationId applicationId, RMContext rmContext, this.submissionContext = submissionContext; this.clientTokenStr = clientTokenStr; this.appStore = appStore; - this.amLivelinessMonitor = amLivelinessMonitor; this.scheduler = scheduler; this.masterService = masterService; this.startTime = System.currentTimeMillis(); @@ -380,6 +378,7 @@ public void handle(RMAppEvent event) { } } + @SuppressWarnings("unchecked") private void createNewAttempt() { ApplicationAttemptId appAttemptId = Records .newRecord(ApplicationAttemptId.class); @@ -434,6 +433,7 @@ private Set<NodeId> getNodesOnWhichAttemptRan(RMAppImpl app) { return nodes; } + @SuppressWarnings("unchecked") public void transition(RMAppImpl app, RMAppEvent event) { Set<NodeId> nodes = getNodesOnWhichAttemptRan(app); for (NodeId nodeId : nodes) { diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java index 6daff1d88e766..12eca4d82f3b1 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java @@ -84,6 +84,7 @@ public class RMAppAttemptImpl implements RMAppAttempt { RMAppAttemptEvent> stateMachine; private final RMContext rmContext; + @SuppressWarnings("rawtypes") private final EventHandler eventHandler; private final YarnScheduler scheduler; private final ApplicationMasterService masterService; @@ -459,7 +460,7 @@ public void transition(RMAppAttemptImpl appAttempt, // Request a container for the AM. ResourceRequest request = BuilderUtils.newResourceRequest( AM_CONTAINER_PRIORITY, "*", appAttempt.submissionContext - .getMasterCapability(), 1); + .getAMContainerSpec().getResource(), 1); LOG.debug("About to request resources for AM of " + appAttempt.applicationAttemptId + " required " + request); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java index 94649923cb3f5..afdec298a1dc5 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java @@ -23,7 +23,6 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR_VALUE; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; -import org.apache.hadoop.yarn.util.Apps; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY; @@ -59,7 +58,8 @@ class AppsBlock extends HtmlBlock { String appId = app.getApplicationId().toString(); String trackingUrl = app.getTrackingUrl(); String ui = trackingUrl == null || trackingUrl.isEmpty() ? "UNASSIGNED" : - (app.getFinishTime() == 0 ? "ApplicationMaster" : "JobHistory"); + (app.getFinishTime() == 0 ? + "ApplicationMaster URL" : "JobHistory URL"); String percent = String.format("%.1f", app.getProgress() * 100); tbody. tr(). diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java index 901948fab706a..4be273996728b 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java @@ -29,6 +29,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; +import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.AMLauncherEvent; import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.ApplicationMasterLauncher; @@ -81,13 +82,17 @@ public RMApp submitApp(int masterMemory) throws Exception { ApplicationId appId = resp.getApplicationId(); SubmitApplicationRequest req = Records.newRecord(SubmitApplicationRequest.class); - ApplicationSubmissionContext sub = Records.newRecord(ApplicationSubmissionContext.class); + ApplicationSubmissionContext sub = + Records.newRecord(ApplicationSubmissionContext.class); sub.setApplicationId(appId); sub.setApplicationName(""); sub.setUser(""); + ContainerLaunchContext clc = + Records.newRecord(ContainerLaunchContext.class); Resource capability = Records.newRecord(Resource.class); capability.setMemory(masterMemory); - sub.setMasterCapability(capability); + clc.setResource(capability); + sub.setAMContainerSpec(clc); req.setApplicationSubmissionContext(sub); client.submitApplication(req); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java index bd66a6337f151..afdeb161775ae 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java @@ -18,19 +18,12 @@ package org.apache.hadoop.yarn.server.resourcemanager; -import static org.mockito.Mockito.*; -import java.util.ArrayList; import java.util.List; -import java.util.LinkedList; -import java.util.Map; import java.util.concurrent.ConcurrentMap; - import junit.framework.Assert; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; @@ -44,7 +37,6 @@ import org.apache.hadoop.yarn.security.ApplicationTokenSecretManager; import org.apache.hadoop.yarn.security.client.ClientToAMSecretManager; import org.apache.hadoop.yarn.server.resourcemanager.ApplicationMasterService; -import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.server.resourcemanager.RMAppManagerEvent; import org.apache.hadoop.yarn.server.resourcemanager.RMAppManagerEventType; import org.apache.hadoop.yarn.server.resourcemanager.RMAppManager; @@ -63,8 +55,6 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; import org.apache.hadoop.yarn.service.Service; -import org.junit.After; -import org.junit.Before; import org.junit.Test; import com.google.common.collect.Maps; import com.google.common.collect.Lists; @@ -75,7 +65,6 @@ */ public class TestAppManager{ - private static final Log LOG = LogFactory.getLog(TestAppManager.class); private static RMAppEventType appEventType = RMAppEventType.KILL; public synchronized RMAppEventType getAppEventType() { @@ -117,10 +106,8 @@ public ConcurrentMap<ApplicationId, RMApp> getRMApps() { public class TestAppManagerDispatcher implements EventHandler<RMAppManagerEvent> { - private final RMContext rmContext; - public TestAppManagerDispatcher(RMContext rmContext) { - this.rmContext = rmContext; + public TestAppManagerDispatcher() { } @Override @@ -132,15 +119,11 @@ public void handle(RMAppManagerEvent event) { public class TestDispatcher implements EventHandler<RMAppEvent> { - private final RMContext rmContext; - - public TestDispatcher(RMContext rmContext) { - this.rmContext = rmContext; + public TestDispatcher() { } @Override public void handle(RMAppEvent event) { - ApplicationId appID = event.getApplicationId(); //RMApp rmApp = this.rmContext.getRMApps().get(appID); setAppEventType(event.getType()); System.out.println("in handle routine " + getAppEventType().toString()); @@ -178,7 +161,8 @@ public int getCompletedAppsListSize() { public void setCompletedAppsMax(int max) { super.setCompletedAppsMax(max); } - public void submitApplication(ApplicationSubmissionContext submissionContext) { + public void submitApplication( + ApplicationSubmissionContext submissionContext) { super.submitApplication(submissionContext); } } @@ -336,8 +320,9 @@ public void testRMAppRetireZeroSetting() throws Exception { } protected void setupDispatcher(RMContext rmContext, Configuration conf) { - TestDispatcher testDispatcher = new TestDispatcher(rmContext); - TestAppManagerDispatcher testAppManagerDispatcher = new TestAppManagerDispatcher(rmContext); + TestDispatcher testDispatcher = new TestDispatcher(); + TestAppManagerDispatcher testAppManagerDispatcher = + new TestAppManagerDispatcher(); rmContext.getDispatcher().register(RMAppEventType.class, testDispatcher); rmContext.getDispatcher().register(RMAppManagerEventType.class, testAppManagerDispatcher); ((Service)rmContext.getDispatcher()).init(conf); @@ -359,7 +344,8 @@ public void testRMAppSubmit() throws Exception { ApplicationId appID = MockApps.newAppID(1); RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); - ApplicationSubmissionContext context = recordFactory.newRecordInstance(ApplicationSubmissionContext.class); + ApplicationSubmissionContext context = + recordFactory.newRecordInstance(ApplicationSubmissionContext.class); context.setApplicationId(appID); setupDispatcher(rmContext, conf); @@ -367,8 +353,12 @@ public void testRMAppSubmit() throws Exception { RMApp app = rmContext.getRMApps().get(appID); Assert.assertNotNull("app is null", app); Assert.assertEquals("app id doesn't match", appID, app.getApplicationId()); - Assert.assertEquals("app name doesn't match", "N/A", app.getName()); - Assert.assertEquals("app queue doesn't match", "default", app.getQueue()); + Assert.assertEquals("app name doesn't match", + YarnConfiguration.DEFAULT_APPLICATION_NAME, + app.getName()); + Assert.assertEquals("app queue doesn't match", + YarnConfiguration.DEFAULT_QUEUE_NAME, + app.getQueue()); Assert.assertEquals("app state doesn't match", RMAppState.NEW, app.getState()); Assert.assertNotNull("app store is null", app.getApplicationStore()); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java index 56bac77209990..56b3f4b18afa7 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java @@ -128,7 +128,7 @@ protected RMApp createNewTestApp() { RMApp application = new RMAppImpl(applicationId, rmContext, conf, name, user, queue, submissionContext, clientTokenStr, - appStore, rmContext.getAMLivelinessMonitor(), scheduler, + appStore, scheduler, masterService); testAppStartState(applicationId, user, name, queue, application); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerTokenSecretManager.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerTokenSecretManager.java index 321489827744f..989f3483d91a2 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerTokenSecretManager.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerTokenSecretManager.java @@ -27,6 +27,8 @@ import java.net.InetSocketAddress; import java.security.PrivilegedAction; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.List; import junit.framework.Assert; @@ -54,10 +56,10 @@ import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ApplicationMaster; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerToken; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; @@ -77,6 +79,7 @@ import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.security.SchedulerSecurityInfo; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; +import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; @@ -137,15 +140,11 @@ public void test() throws IOException, InterruptedException { ApplicationSubmissionContext appSubmissionContext = recordFactory.newRecordInstance(ApplicationSubmissionContext.class); appSubmissionContext.setApplicationId(appID); - appSubmissionContext.setMasterCapability(recordFactory - .newRecordInstance(Resource.class)); - appSubmissionContext.getMasterCapability().setMemory(1024); -// appSubmissionContext.resources = new HashMap<String, URL>(); + ContainerLaunchContext amContainer = + recordFactory.newRecordInstance(ContainerLaunchContext.class); + amContainer.setResource(Resources.createResource(1024)); + amContainer.setCommands(Arrays.asList("sleep", "100")); appSubmissionContext.setUser("testUser"); -// appSubmissionContext.environment = new HashMap<String, String>(); -// appSubmissionContext.command = new ArrayList<String>(); - appSubmissionContext.addCommand("sleep"); - appSubmissionContext.addCommand("100"); // TODO: Use a resource to work around bugs. Today NM doesn't create local // app-dirs if there are no file to download!! @@ -162,10 +161,11 @@ public void test() throws IOException, InterruptedException { rsrc.setTimestamp(file.lastModified()); rsrc.setType(LocalResourceType.FILE); rsrc.setVisibility(LocalResourceVisibility.PRIVATE); - appSubmissionContext.setResourceTodo("testFile", rsrc); + amContainer.setLocalResources(Collections.singletonMap("testFile", rsrc)); SubmitApplicationRequest submitRequest = recordFactory .newRecordInstance(SubmitApplicationRequest.class); submitRequest.setApplicationSubmissionContext(appSubmissionContext); + appSubmissionContext.setAMContainerSpec(amContainer); resourceManager.getClientRMService().submitApplication(submitRequest); // Wait till container gets allocated for AM
05f4312a0fd7a993277c47362726bb027282a198
tapiji
Adapts the namespace of all TapiJI plug-ins to org.eclipselabs.tapiji.*.
p
https://github.com/tapiji/tapiji
diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/.project b/at.ac.tuwien.inso.eclipse.i18n.jsf/.project index b777dc68..df955d26 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/.project +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/.project @@ -1,6 +1,6 @@ <?xml version="1.0" encoding="UTF-8"?> <projectDescription> - <name>at.ac.tuwien.inso.eclipse.i18n.jsf</name> + <name>org.eclipselabs.tapiji.tools.jsf</name> <comment></comment> <projects> </projects> diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/META-INF/MANIFEST.MF b/at.ac.tuwien.inso.eclipse.i18n.jsf/META-INF/MANIFEST.MF index d14d4da7..41ac1153 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/META-INF/MANIFEST.MF +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/META-INF/MANIFEST.MF @@ -1,20 +1,20 @@ Manifest-Version: 1.0 Bundle-ManifestVersion: 2 Bundle-Name: JsfBuilderExtension -Bundle-SymbolicName: at.ac.tuwien.inso.eclipse.i18n.jsf;singleton:=true +Bundle-SymbolicName: org.eclipselabs.tapiji.tools.jsf;singleton:=true Bundle-Version: 0.0.1.qualifier Bundle-RequiredExecutionEnvironment: JavaSE-1.6 -Require-Bundle: at.ac.tuwien.inso.eclipse.i18n;bundle-version="0.0.1", - org.eclipse.core.resources;bundle-version="3.6.0", +Require-Bundle: org.eclipse.core.resources;bundle-version="3.6.0", org.eclipse.jface, - at.ac.tuwien.inso.eclipse.rbe;bundle-version="0.0.1", + org.eclipselabs.tapiji.translator.rbe;bundle-version="0.0.1", org.eclipse.jface.text, org.eclipse.jdt.core;bundle-version="3.6.0", org.eclipse.jdt.ui;bundle-version="3.6.0", org.eclipse.jst.jsf.common;bundle-version="1.2.0", org.eclipse.jst.jsp.core;bundle-version="1.2.300", org.eclipse.wst.sse.ui, - org.eclipse.core.runtime;bundle-version="3.6.0" + org.eclipse.core.runtime;bundle-version="3.6.0", + org.eclipselabs.tapiji.tools.core;bundle-version="0.0.1" Import-Package: org.eclipse.core.filebuffers, org.eclipse.core.runtime, org.eclipse.jdt.core, diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/plugin.xml b/at.ac.tuwien.inso.eclipse.i18n.jsf/plugin.xml index 8c99619e..6c7bf706 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/plugin.xml +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/plugin.xml @@ -21,7 +21,7 @@ <extension point="org.eclipse.ui.ide.markerResolution"> <markerResolutionGenerator - class="at.ac.tuwien.inso.eclipse.i18n.builder.JSFViolationResolutionGenerator" + class="org.eclipselabs.tapiji.tools.jsf.builder.JSFViolationResolutionGenerator" markerType="org.eclipse.jst.jsf.ui.JSPSemanticsValidatorMarker"> </markerResolutionGenerator> </extension> @@ -40,7 +40,7 @@ </validator> </extension> <extension - point="at.ac.tuwien.inso.eclipse.i18n.builderExtension"> + point="org.eclipselabs.tapiji.tools.core.builderExtension"> <i18nResourceAuditor class="auditor.JSFResourceAuditor"> </i18nResourceAuditor> diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/auditor/JSFResourceAuditor.java b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/auditor/JSFResourceAuditor.java index 4038d280..5325d891 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/auditor/JSFResourceAuditor.java +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/auditor/JSFResourceAuditor.java @@ -8,17 +8,17 @@ import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.ui.IMarkerResolution; +import org.eclipselabs.tapiji.tools.core.builder.quickfix.CreateResourceBundle; +import org.eclipselabs.tapiji.tools.core.builder.quickfix.CreateResourceBundleEntry; +import org.eclipselabs.tapiji.tools.core.builder.quickfix.IncludeResource; +import org.eclipselabs.tapiji.tools.core.extensions.I18nResourceAuditor; +import org.eclipselabs.tapiji.tools.core.extensions.ILocation; +import org.eclipselabs.tapiji.tools.core.extensions.IMarkerConstants; +import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; import quickfix.ExportToResourceBundleResolution; import quickfix.ReplaceResourceBundleDefReference; import quickfix.ReplaceResourceBundleReference; -import at.ac.tuwien.inso.eclipse.i18n.builder.quickfix.CreateResourceBundle; -import at.ac.tuwien.inso.eclipse.i18n.builder.quickfix.CreateResourceBundleEntry; -import at.ac.tuwien.inso.eclipse.i18n.builder.quickfix.IncludeResource; -import at.ac.tuwien.inso.eclipse.i18n.extensions.I18nResourceAuditor; -import at.ac.tuwien.inso.eclipse.i18n.extensions.ILocation; -import at.ac.tuwien.inso.eclipse.i18n.extensions.IMarkerConstants; -import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; public class JSFResourceAuditor extends I18nResourceAuditor { @@ -29,8 +29,6 @@ public String[] getFileEndings () { public void audit(IResource resource) { parse (resource); } - - private void parse (IResource resource) { @@ -57,9 +55,9 @@ public String getContextId() { } @Override - public List<IMarkerResolution> getMarkerResolutions(IMarker marker, - int cause) { + public List<IMarkerResolution> getMarkerResolutions(IMarker marker) { List<IMarkerResolution> resolutions = new ArrayList<IMarkerResolution>(); + int cause = marker.getAttribute("cause", -1); switch (cause) { case IMarkerConstants.CAUSE_CONSTANT_LITERAL: diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/auditor/model/SLLocation.java b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/auditor/model/SLLocation.java index 97269552..3bfd8d28 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/auditor/model/SLLocation.java +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/auditor/model/SLLocation.java @@ -3,8 +3,8 @@ import java.io.Serializable; import org.eclipse.core.resources.IFile; +import org.eclipselabs.tapiji.tools.core.extensions.ILocation; -import at.ac.tuwien.inso.eclipse.i18n.extensions.ILocation; public class SLLocation implements Serializable, ILocation { @@ -13,7 +13,7 @@ public class SLLocation implements Serializable, ILocation { private int startPos = -1; private int endPos = -1; private String literal; - private Object data; + private Serializable data; public SLLocation(IFile file, int startPos, int endPos, String literal) { super(); @@ -43,10 +43,10 @@ public void setEndPos(int endPos) { public String getLiteral() { return literal; } - public Object getData () { + public Serializable getData () { return data; } - public void setData (Object data) { + public void setData (Serializable data) { this.data = data; } diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/quickfix/ExportToResourceBundleResolution.java b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/quickfix/ExportToResourceBundleResolution.java index 0482e80c..bced7ad7 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/quickfix/ExportToResourceBundleResolution.java +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/quickfix/ExportToResourceBundleResolution.java @@ -12,9 +12,9 @@ import org.eclipse.swt.graphics.Image; import org.eclipse.swt.widgets.Display; import org.eclipse.ui.IMarkerResolution2; +import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; +import org.eclipselabs.tapiji.tools.core.ui.dialogs.CreateResourceBundleEntryDialog; -import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; -import at.ac.tuwien.inso.eclipse.i18n.ui.dialogs.CreateResourceBundleEntryDialog; import auditor.JSFResourceBundleDetector; public class ExportToResourceBundleResolution implements IMarkerResolution2 { diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/quickfix/ReplaceResourceBundleDefReference.java b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/quickfix/ReplaceResourceBundleDefReference.java index 8e010192..e165e8ba 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/quickfix/ReplaceResourceBundleDefReference.java +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/quickfix/ReplaceResourceBundleDefReference.java @@ -14,10 +14,10 @@ import org.eclipse.swt.graphics.Image; import org.eclipse.swt.widgets.Display; import org.eclipse.ui.IMarkerResolution2; +import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; +import org.eclipselabs.tapiji.tools.core.ui.dialogs.InsertResourceBundleReferenceDialog; +import org.eclipselabs.tapiji.tools.core.ui.dialogs.ResourceBundleSelectionDialog; -import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; -import at.ac.tuwien.inso.eclipse.i18n.ui.dialogs.InsertResourceBundleReferenceDialog; -import at.ac.tuwien.inso.eclipse.i18n.ui.dialogs.ResourceBundleSelectionDialog; public class ReplaceResourceBundleDefReference implements IMarkerResolution2 { diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/quickfix/ReplaceResourceBundleReference.java b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/quickfix/ReplaceResourceBundleReference.java index a43d9133..c2f2c6ca 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/quickfix/ReplaceResourceBundleReference.java +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/quickfix/ReplaceResourceBundleReference.java @@ -14,9 +14,9 @@ import org.eclipse.swt.graphics.Image; import org.eclipse.swt.widgets.Display; import org.eclipse.ui.IMarkerResolution2; +import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; +import org.eclipselabs.tapiji.tools.core.ui.dialogs.InsertResourceBundleReferenceDialog; -import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; -import at.ac.tuwien.inso.eclipse.i18n.ui.dialogs.InsertResourceBundleReferenceDialog; import auditor.JSFResourceBundleDetector; public class ReplaceResourceBundleReference implements IMarkerResolution2 { diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/JSFELMessageHover.java b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/JSFELMessageHover.java index 9271274c..4a18f152 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/JSFELMessageHover.java +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/JSFELMessageHover.java @@ -11,9 +11,9 @@ import org.eclipse.jst.jsf.context.structureddocument.IStructuredDocumentContext; import org.eclipse.jst.jsf.context.structureddocument.IStructuredDocumentContextFactory; import org.eclipse.jst.jsp.core.internal.regions.DOMJSPRegionContexts; +import org.eclipselabs.tapiji.tools.core.builder.InternationalizationNature; import util.ELUtils; -import at.ac.tuwien.inso.eclipse.i18n.builder.InternationalizationNature; import auditor.JSFResourceBundleDetector; /** diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/BundleNameProposal.java b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/BundleNameProposal.java index b74747fc..5afd59d0 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/BundleNameProposal.java +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/BundleNameProposal.java @@ -19,11 +19,11 @@ import org.eclipse.jst.jsf.context.structureddocument.IStructuredDocumentContext; import org.eclipse.jst.jsf.context.structureddocument.IStructuredDocumentContextFactory; import org.eclipse.wst.xml.core.internal.regions.DOMRegionContext; +import org.eclipselabs.tapiji.tools.core.builder.InternationalizationNature; +import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; import org.w3c.dom.Attr; import org.w3c.dom.Node; -import at.ac.tuwien.inso.eclipse.i18n.builder.InternationalizationNature; -import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; public class BundleNameProposal implements IContentAssistProcessor { diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/MessageCompletionProposal.java b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/MessageCompletionProposal.java index d034358d..7c43856d 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/MessageCompletionProposal.java +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/MessageCompletionProposal.java @@ -5,8 +5,8 @@ import org.eclipse.jface.text.contentassist.IContextInformation; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.graphics.Point; +import org.eclipselabs.tapiji.tools.core.util.ImageUtils; -import at.ac.tuwien.inso.eclipse.i18n.util.ImageUtils; public class MessageCompletionProposal implements IJavaCompletionProposal { diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/NewResourceBundleEntryProposal.java b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/NewResourceBundleEntryProposal.java index 60bb6061..797c1e87 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/NewResourceBundleEntryProposal.java +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/NewResourceBundleEntryProposal.java @@ -10,9 +10,9 @@ import org.eclipse.swt.widgets.Display; import org.eclipse.ui.ISharedImages; import org.eclipse.ui.PlatformUI; +import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; +import org.eclipselabs.tapiji.tools.core.ui.dialogs.CreateResourceBundleEntryDialog; -import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; -import at.ac.tuwien.inso.eclipse.i18n.ui.dialogs.CreateResourceBundleEntryDialog; public class NewResourceBundleEntryProposal implements IJavaCompletionProposal { diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/jsf/MessageCompletionProposal.java b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/jsf/MessageCompletionProposal.java index e4371172..cf9fcdc4 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/jsf/MessageCompletionProposal.java +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/ui/autocompletion/jsf/MessageCompletionProposal.java @@ -16,11 +16,11 @@ import org.eclipse.jst.jsf.context.resolver.structureddocument.IWorkspaceContextResolver; import org.eclipse.jst.jsf.context.structureddocument.IStructuredDocumentContext; import org.eclipse.jst.jsf.context.structureddocument.IStructuredDocumentContextFactory; +import org.eclipselabs.tapiji.tools.core.builder.InternationalizationNature; +import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; import ui.autocompletion.NewResourceBundleEntryProposal; -import at.ac.tuwien.inso.eclipse.i18n.builder.InternationalizationNature; -import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; import auditor.JSFResourceBundleDetector; public class MessageCompletionProposal implements IContentAssistProcessor { diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/util/ELUtils.java b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/util/ELUtils.java index 55640a84..09b6f211 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/util/ELUtils.java +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/util/ELUtils.java @@ -1,8 +1,8 @@ package util; import org.eclipse.core.resources.IProject; +import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; -import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; public class ELUtils { diff --git a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/validator/JSFInternationalizationValidator.java b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/validator/JSFInternationalizationValidator.java index ada68dbe..07d06586 100644 --- a/at.ac.tuwien.inso.eclipse.i18n.jsf/src/validator/JSFInternationalizationValidator.java +++ b/at.ac.tuwien.inso.eclipse.i18n.jsf/src/validator/JSFInternationalizationValidator.java @@ -13,10 +13,10 @@ import org.eclipse.wst.validation.internal.provisional.core.IReporter; import org.eclipse.wst.validation.internal.provisional.core.IValidationContext; import org.eclipse.wst.validation.internal.provisional.core.IValidator; +import org.eclipselabs.tapiji.tools.core.extensions.IMarkerConstants; +import org.eclipselabs.tapiji.tools.core.model.manager.ResourceBundleManager; +import org.eclipselabs.tapiji.tools.core.util.EditorUtils; -import at.ac.tuwien.inso.eclipse.i18n.extensions.IMarkerConstants; -import at.ac.tuwien.inso.eclipse.i18n.model.manager.ResourceBundleManager; -import at.ac.tuwien.inso.eclipse.i18n.util.EditorUtils; import auditor.JSFResourceBundleDetector; import auditor.model.SLLocation;
8fcbc4e91c9dd1e7b5a69a416146960f7af1e711
hbase
HBASE-4169 FSUtils LeaseRecovery for non HDFS- FileSystems; added 4169-correction.txt correction--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1155441 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/hbase
diff --git a/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java b/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java index edca7f4d5ef3..e70b0d476e5f 100644 --- a/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java +++ b/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java @@ -29,10 +29,10 @@ /** * <a href="http://www.mapr.com">MapR</a> implementation. */ -public class FSMapRUtils { +public class FSMapRUtils extends FSUtils { private static final Log LOG = LogFactory.getLog(FSMapRUtils.class); - public static void recoverFileLease(final FileSystem fs, final Path p, + public void recoverFileLease(final FileSystem fs, final Path p, Configuration conf) throws IOException { LOG.info("Recovering file " + p.toString() + " by changing permission to readonly");
23bfe3323f5e34ba5d4e7c9d1d600913908b1d9c
Vala
gtk+-2.0, gtk+-3.0: Make Gtk.Scale.add_mark's markup argument nullable Fixes bug 624442.
c
https://github.com/GNOME/vala/
diff --git a/vapi/gtk+-2.0.vapi b/vapi/gtk+-2.0.vapi index 0cfefc0a37..e0621c081f 100644 --- a/vapi/gtk+-2.0.vapi +++ b/vapi/gtk+-2.0.vapi @@ -3320,7 +3320,7 @@ namespace Gtk { } [CCode (cheader_filename = "gtk/gtk.h")] public class Scale : Gtk.Range, Atk.Implementor, Gtk.Buildable, Gtk.Orientable { - public void add_mark (double value, Gtk.PositionType position, string markup); + public void add_mark (double value, Gtk.PositionType position, string? markup); public void clear_marks (); public int get_digits (); public bool get_draw_value (); diff --git a/vapi/gtk+-3.0.vapi b/vapi/gtk+-3.0.vapi index 1c3388cdce..0776037a2e 100644 --- a/vapi/gtk+-3.0.vapi +++ b/vapi/gtk+-3.0.vapi @@ -2983,7 +2983,7 @@ namespace Gtk { public class Scale : Gtk.Range, Atk.Implementor, Gtk.Buildable, Gtk.SizeRequest, Gtk.Orientable { [CCode (type = "GtkWidget*", has_construct_function = false)] public Scale (Gtk.Orientation orientation, Gtk.Adjustment adjustment); - public void add_mark (double value, Gtk.PositionType position, string markup); + public void add_mark (double value, Gtk.PositionType position, string? markup); public void clear_marks (); public int get_digits (); public bool get_draw_value (); diff --git a/vapi/packages/gtk+-2.0/gtk+-2.0.metadata b/vapi/packages/gtk+-2.0/gtk+-2.0.metadata index c1a37f4c0f..c16739a2a3 100644 --- a/vapi/packages/gtk+-2.0/gtk+-2.0.metadata +++ b/vapi/packages/gtk+-2.0/gtk+-2.0.metadata @@ -447,6 +447,7 @@ gtk_rc_get_style_by_paths.widget_path nullable="1" gtk_rc_get_style_by_paths.class_path nullable="1" gtk_rc_style_copy transfer_ownership="1" gtk_rc_style_create_style transfer_ownership="1" +gtk_scale_add_mark.markup nullable="1" gtk_scale_button_new.icons is_array="1" no_array_length="1" gtk_scale_button_set_icons.icons is_array="1" no_array_length="1" gtk_scale_draw_value hidden="1" diff --git a/vapi/packages/gtk+-3.0/gtk+-3.0.metadata b/vapi/packages/gtk+-3.0/gtk+-3.0.metadata index 6c34f0bb1f..1b8d4c450d 100644 --- a/vapi/packages/gtk+-3.0/gtk+-3.0.metadata +++ b/vapi/packages/gtk+-3.0/gtk+-3.0.metadata @@ -450,6 +450,7 @@ gtk_rc_get_style_by_paths.widget_path nullable="1" gtk_rc_get_style_by_paths.class_path nullable="1" gtk_rc_style_copy transfer_ownership="1" gtk_rc_style_create_style transfer_ownership="1" +gtk_scale_add_mark.markup nullable="1" gtk_scale_button_new.icons is_array="1" no_array_length="1" gtk_scale_button_set_icons.icons is_array="1" no_array_length="1" gtk_scale_draw_value hidden="1"
a72066f13f76503665abc5d93fcc6edb65ff3f28
hbase
HBASE-9600 TestColumnSchemaModel and- TestTableSchemaModel test cases are failing with IBM IBM Java 6--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1525179 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/hbase
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java index e7dc05d032e5..05c2dc0d2444 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.rest.model; import java.io.Serializable; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.Map; import javax.xml.bind.annotation.XmlAnyAttribute; @@ -57,7 +57,7 @@ public class ColumnSchemaModel implements Serializable { private static QName VERSIONS = new QName(HConstants.VERSIONS); private String name; - private Map<QName,Object> attrs = new HashMap<QName,Object>(); + private Map<QName,Object> attrs = new LinkedHashMap<QName,Object>(); /** * Default constructor diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java index 7812c602017b..6241db91c8cd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; -import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -74,7 +74,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler { new QName(HColumnDescriptor.COMPRESSION); private String name; - private Map<QName,Object> attrs = new HashMap<QName,Object>(); + private Map<QName,Object> attrs = new LinkedHashMap<QName,Object>(); private List<ColumnSchemaModel> columns = new ArrayList<ColumnSchemaModel>(); /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java index cb022d12c053..15e165285340 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java @@ -59,13 +59,13 @@ public TestColumnSchemaModel() throws Exception { protected ColumnSchemaModel buildTestModel() { ColumnSchemaModel model = new ColumnSchemaModel(); model.setName(COLUMN_NAME); - model.__setBlockcache(BLOCKCACHE); model.__setBlocksize(BLOCKSIZE); model.__setBloomfilter(BLOOMFILTER); + model.__setBlockcache(BLOCKCACHE); model.__setCompression(COMPRESSION); - model.__setInMemory(IN_MEMORY); - model.__setTTL(TTL); model.__setVersions(VERSIONS); + model.__setTTL(TTL); + model.__setInMemory(IN_MEMORY); return model; }
bbac34179e1ce5b7a136b4b5ebc6101dfb3eb65d
Mylyn Reviews
Fix broken test Change-Id: I15affe2d03ac77ff86e8f2ac01dde6539f6a64d9
c
https://github.com/eclipse-mylyn/org.eclipse.mylyn.reviews
diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapper.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapper.java index aae50b2f..3191e91b 100644 --- a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapper.java +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapper.java @@ -31,9 +31,9 @@ import org.eclipse.mylyn.versions.tasks.ui.AbstractChangesetMappingProvider; /** - * + * * @author Kilian Matt - * + * */ public class GenericTaskChangesetMapper extends AbstractChangesetMappingProvider { @@ -56,39 +56,14 @@ public void getChangesetsForTask(final IChangeSetMapping mapping, if (task == null) throw new IllegalArgumentException("task must not be null"); - List<ScmRepository> repos = getRepositoriesFor(task); - for (final ScmRepository repo : repos) { + for (final ScmRepository repo : configuration.getRepositoriesFor(task)) { ChangeSetProvider provider = new ChangeSetProvider(repo); indexSearch.search(task, repo.getUrl(), 10, new MappingChangeSetCollector(monitor, mapping, provider)); } } - private List<ScmRepository> getRepositoriesFor(ITask task) - throws CoreException { - Set<ScmRepository> repos = new HashSet<ScmRepository>(); - List<IProject> projects = configuration.getProjectsForTaskRepository( - task.getConnectorKind(), task.getRepositoryUrl()); - for (IProject p : projects) { - ScmRepository repository = getRepositoryForProject(p); - if(repository!=null) { - repos.add(repository); - } - } - return new ArrayList<ScmRepository>(repos); - } - - private ScmRepository getRepositoryForProject(IProject p) - throws CoreException { - ScmConnector connector = ScmCore.getConnector(p); - if(connector==null) { - return null; - } - ScmRepository repository = connector.getRepository(p, - new NullProgressMonitor()); - return repository; - } public int getScoreFor(ITask task) { return 0; diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/IConfiguration.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/IConfiguration.java index eca49106..a2c0aabe 100644 --- a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/IConfiguration.java +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/generic/IConfiguration.java @@ -13,16 +13,17 @@ import java.util.List; -import org.eclipse.core.resources.IProject; +import org.eclipse.core.runtime.CoreException; +import org.eclipse.mylyn.tasks.core.ITask; +import org.eclipse.mylyn.versions.core.ScmRepository; /** - * + * * @author Kilian Matt * */ public interface IConfiguration { - List<IProject> getProjectsForTaskRepository(String connectorKind, - String repositoryUrl); + List<ScmRepository> getRepositoriesFor(ITask task) throws CoreException; } diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/EclipsePluginConfiguration.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/EclipsePluginConfiguration.java index 6b0219ec..f11c7e12 100644 --- a/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/EclipsePluginConfiguration.java +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.generic/src/org/eclipse/mylyn/versions/tasks/mapper/internal/EclipsePluginConfiguration.java @@ -11,29 +11,52 @@ package org.eclipse.mylyn.versions.tasks.mapper.internal; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; +import java.util.Set; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.ResourcesPlugin; +import org.eclipse.core.runtime.CoreException; +import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.mylyn.internal.tasks.ui.TasksUiPlugin; +import org.eclipse.mylyn.tasks.core.ITask; import org.eclipse.mylyn.tasks.core.TaskRepository; +import org.eclipse.mylyn.versions.core.ScmCore; +import org.eclipse.mylyn.versions.core.ScmRepository; +import org.eclipse.mylyn.versions.core.spi.ScmConnector; import org.eclipse.mylyn.versions.tasks.mapper.generic.IConfiguration; /** - * + * * @author Kilian Matt * */ public class EclipsePluginConfiguration implements IConfiguration { - public List<IProject> getProjectsForTaskRepository(String connectorKind, + public List<ScmRepository> getRepositoriesFor(ITask task) + throws CoreException { + Set<ScmRepository> repos = new HashSet<ScmRepository>(); + + List<IProject> projects = getProjectsForTaskRepository( + task.getConnectorKind(), task.getRepositoryUrl()); + for (IProject p : projects) { + ScmRepository repository = getRepositoryForProject(p); + if(repository!=null) { + repos.add(repository); + } + } + return new ArrayList<ScmRepository>(repos); + } + + private List<IProject> getProjectsForTaskRepository(String connectorKind, String repositoryUrl) { List<IProject> projects = new ArrayList<IProject>(); for (IProject project : ResourcesPlugin.getWorkspace().getRoot() .getProjects()) { TaskRepository repo = TasksUiPlugin.getDefault() .getRepositoryForResource(project); - + if (repo!=null && connectorKind.equals(repo.getConnectorKind()) && repositoryUrl.equals(repo.getRepositoryUrl())) { projects.add(project); @@ -42,4 +65,14 @@ public List<IProject> getProjectsForTaskRepository(String connectorKind, return projects; } + private ScmRepository getRepositoryForProject(IProject p) + throws CoreException { + ScmConnector connector = ScmCore.getConnector(p); + if(connector==null) { + return null; + } + ScmRepository repository = connector.getRepository(p, + new NullProgressMonitor()); + return repository; + } } diff --git a/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapperTest.java b/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapperTest.java index bc4cb83d..41a309ab 100644 --- a/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapperTest.java +++ b/tbr/org.eclipse.mylyn.versions.tasks.mapper.tests/src/org/eclipse/mylyn/versions/tasks/mapper/generic/GenericTaskChangesetMapperTest.java @@ -14,12 +14,15 @@ import static org.mockito.Mockito.*; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.mylyn.tasks.core.ITask; import org.eclipse.mylyn.versions.core.ChangeSet; +import org.eclipse.mylyn.versions.core.ScmRepository; +import org.eclipse.mylyn.versions.core.spi.ScmConnector; import org.eclipse.mylyn.versions.tasks.core.IChangeSetMapping; import org.junit.After; import org.junit.Before; @@ -35,12 +38,15 @@ public class GenericTaskChangesetMapperTest { private GenericTaskChangesetMapper mapper; private IChangeSetIndexSearcher indexSearcher; - + private IConfiguration configuration; @Before public void setUp() throws Exception { - IConfiguration config = mock(IConfiguration.class); + configuration = mock(IConfiguration.class); indexSearcher = mock(IChangeSetIndexSearcher.class); - mapper = new GenericTaskChangesetMapper(config, indexSearcher); + mapper = new GenericTaskChangesetMapper(configuration, indexSearcher); + + ScmConnector connector = mock(ScmConnector.class); + when(configuration.getRepositoriesFor(any(ITask.class))).thenReturn(Collections.singletonList(new ScmRepository(connector,"test", "http://localhost"))); } @Test @@ -56,15 +62,16 @@ public void testGetChangesetsForTask_TaskNullNotAllowed() } @Test - public void testGetChangesetsForTask_() throws CoreException { + public void testGetChangesetsForTask() throws CoreException { + ITask task = mock(ITask.class); TestChangeSetMapping mapping = new TestChangeSetMapping(task); doAnswer(new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { - ((IChangeSetCollector) invocation.getArguments()[3]).collect( - "123", (String) invocation.getArguments()[1]); + IChangeSetCollector collector = (IChangeSetCollector) invocation.getArguments()[3]; + collector.collect("123", (String) invocation.getArguments()[1]); return null; }
663d9f43b0f5b49088102781ae8ce7123932e97b
intellij-community
Cheaper ProgressManager.checkCanceled().- Mostly
a
https://github.com/JetBrains/intellij-community
diff --git a/python/src/com/jetbrains/python/inspections/PyUnresolvedReferencesInspection.java b/python/src/com/jetbrains/python/inspections/PyUnresolvedReferencesInspection.java index 4772e6ae4abe0..d4ba0adc9abd6 100644 --- a/python/src/com/jetbrains/python/inspections/PyUnresolvedReferencesInspection.java +++ b/python/src/com/jetbrains/python/inspections/PyUnresolvedReferencesInspection.java @@ -149,7 +149,7 @@ static List<LocalQuickFix> proposeImportFixes(final PyElement node, String ref_t } } // maybe some unimported file has it, too - ProgressManager.getInstance().checkCanceled(); // before expensive index searches + ProgressManager.checkCanceled(); // before expensive index searches // NOTE: current indices have limitations, only finding direct definitions of classes and functions. Project project = node.getProject(); GlobalSearchScope scope = null; // GlobalSearchScope.projectScope(project); diff --git a/python/src/com/jetbrains/python/psi/resolve/PyResolveUtil.java b/python/src/com/jetbrains/python/psi/resolve/PyResolveUtil.java index e391f31c07b11..73cd77033c9f1 100644 --- a/python/src/com/jetbrains/python/psi/resolve/PyResolveUtil.java +++ b/python/src/com/jetbrains/python/psi/resolve/PyResolveUtil.java @@ -121,7 +121,7 @@ public static PsiElement treeCrawlUp(PsiScopeProcessor processor, boolean fromun PsiElement seeker = elt; PsiElement cap = getConcealingParent(elt); do { - ProgressManager.getInstance().checkCanceled(); + ProgressManager.checkCanceled(); if (!seeker.isValid()) return null; if (fromunder) { fromunder = false; // only honour fromunder once per call diff --git a/python/src/com/jetbrains/python/testing/PythonUnitTestTestIdUrlProvider.java b/python/src/com/jetbrains/python/testing/PythonUnitTestTestIdUrlProvider.java index 05db4a5916bcd..6d6b1ec3f401c 100644 --- a/python/src/com/jetbrains/python/testing/PythonUnitTestTestIdUrlProvider.java +++ b/python/src/com/jetbrains/python/testing/PythonUnitTestTestIdUrlProvider.java @@ -54,7 +54,7 @@ public List<Location> getLocation(@NotNull final String protocolId, @NotNull fin final List<Location> locations = new ArrayList<Location>(); for (PyClass cls : getClassesByName(project, className)) { - ProgressManager.getInstance().checkCanceled(); + ProgressManager.checkCanceled(); final PyFunction method = locateMethodInHierarchy(cls, methodName); if (method == null) {
a8a2815cdf14fbcc8854338bf98e344f7b4f149c
Delta Spike
DELTASPIKE-266 refactor JSF test setup
c
https://github.com/apache/deltaspike
diff --git a/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/view/ViewScopedContextTest.java b/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/view/ViewScopedContextTest.java index e00fa890c..1fb578fc2 100644 --- a/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/view/ViewScopedContextTest.java +++ b/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/view/ViewScopedContextTest.java @@ -65,9 +65,8 @@ public static WebArchive deploy() .create(WebArchive.class, "viewScopedContextTest.war") .addPackage(BackingBean.class.getPackage()) .addAsLibraries(ArchiveUtils.getDeltaSpikeCoreAndJsfArchive()) - .addAsWebInfResource("viewScopedContextTest/WEB-INF/web.xml", "web.xml") - .addAsWebResource("viewScopedContextTest/index.html", "index.html") - .addAsWebResource("viewScopedContextTest/page1.xhtml", "page1.xhtml") + .addAsWebInfResource("default/WEB-INF/web.xml", "web.xml") + .addAsWebResource("viewScopedContextTest/page.xhtml", "page.xhtml") .addAsWebInfResource(EmptyAsset.INSTANCE, "beans.xml"); } @@ -76,7 +75,7 @@ public static WebArchive deploy() @RunAsClient public void testViewScopedContext() throws Exception { - driver.get(new URL(contextPath, "page1.xhtml").toString()); + driver.get(new URL(contextPath, "page.xhtml").toString()); WebElement inputField = driver.findElement(By.id("test:valueInput")); inputField.sendKeys("23"); diff --git a/deltaspike/modules/jsf/impl/src/test/resources/viewScopedContextTest/WEB-INF/web.xml b/deltaspike/modules/jsf/impl/src/test/resources/default/WEB-INF/web.xml similarity index 100% rename from deltaspike/modules/jsf/impl/src/test/resources/viewScopedContextTest/WEB-INF/web.xml rename to deltaspike/modules/jsf/impl/src/test/resources/default/WEB-INF/web.xml diff --git a/deltaspike/modules/jsf/impl/src/test/resources/viewScopedContextTest/index.html b/deltaspike/modules/jsf/impl/src/test/resources/viewScopedContextTest/index.html deleted file mode 100644 index f2d6e352f..000000000 --- a/deltaspike/modules/jsf/impl/src/test/resources/viewScopedContextTest/index.html +++ /dev/null @@ -1,24 +0,0 @@ -<!-- - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. ---> - -<html> -<body> -index page works! -</body> -</html> diff --git a/deltaspike/modules/jsf/impl/src/test/resources/viewScopedContextTest/page1.xhtml b/deltaspike/modules/jsf/impl/src/test/resources/viewScopedContextTest/page.xhtml similarity index 100% rename from deltaspike/modules/jsf/impl/src/test/resources/viewScopedContextTest/page1.xhtml rename to deltaspike/modules/jsf/impl/src/test/resources/viewScopedContextTest/page.xhtml
3e20fd82c05daa60005aaff11411cd1814beb778
Vala
girparser: Support scope=async parameters. Based on patch by Evan. Fixes bug 704176.
a
https://github.com/GNOME/vala/
diff --git a/vala/valagirparser.vala b/vala/valagirparser.vala index b6cbe1634b..fde6b68fba 100644 --- a/vala/valagirparser.vala +++ b/vala/valagirparser.vala @@ -2734,13 +2734,14 @@ public class Vala.GirParser : CodeVisitor { } class ParameterInfo { - public ParameterInfo (Parameter param, int array_length_idx, int closure_idx, int destroy_idx) { + public ParameterInfo (Parameter param, int array_length_idx, int closure_idx, int destroy_idx, bool is_async = false) { this.param = param; this.array_length_idx = array_length_idx; this.closure_idx = closure_idx; this.destroy_idx = destroy_idx; this.vala_idx = 0.0F; this.keep = true; + this.is_async = is_async; } public Parameter param; @@ -2749,6 +2750,7 @@ public class Vala.GirParser : CodeVisitor { public int closure_idx; public int destroy_idx; public bool keep; + public bool is_async; } void parse_function (string element_name) { @@ -2940,7 +2942,7 @@ public class Vala.GirParser : CodeVisitor { comment.add_content_for_parameter ((param.ellipsis)? "..." : param.name, param_comment); } - var info = new ParameterInfo (param, array_length_idx, closure_idx, destroy_idx); + var info = new ParameterInfo (param, array_length_idx, closure_idx, destroy_idx, scope == "async"); if (s is Method && scope == "async") { var unresolved_type = param.variable_type as UnresolvedType; @@ -3387,42 +3389,61 @@ public class Vala.GirParser : CodeVisitor { } foreach (ParameterInfo info in parameters) { - if (info.keep) { - - /* add_parameter sets carray_length_parameter_position and cdelegate_target_parameter_position - so do it first*/ - if (s is Method) { - ((Method) s).add_parameter (info.param); - } else if (s is Delegate) { - ((Delegate) s).add_parameter (info.param); - } else if (s is Signal) { - ((Signal) s).add_parameter (info.param); - } - - if (info.array_length_idx != -1) { - if ((info.array_length_idx) >= parameters.size) { - Report.error (get_current_src (), "invalid array_length index"); - continue; - } - set_array_ccode (info.param, parameters[info.array_length_idx]); + if (!info.keep) { + continue; + } + + /* add_parameter sets carray_length_parameter_position and cdelegate_target_parameter_position + so do it first*/ + if (s is Method) { + ((Method) s).add_parameter (info.param); + } else if (s is Delegate) { + ((Delegate) s).add_parameter (info.param); + } else if (s is Signal) { + ((Signal) s).add_parameter (info.param); + } + + if (info.array_length_idx != -1) { + if ((info.array_length_idx) >= parameters.size) { + Report.error (get_current_src (), "invalid array_length index"); + continue; } + set_array_ccode (info.param, parameters[info.array_length_idx]); + } - if (info.closure_idx != -1) { - if ((info.closure_idx) >= parameters.size) { - Report.error (get_current_src (), "invalid closure index"); - continue; - } - if ("%g".printf (parameters[info.closure_idx].vala_idx) != "%g".printf (info.vala_idx + 0.1)) { - info.param.set_attribute_double ("CCode", "delegate_target_pos", parameters[info.closure_idx].vala_idx); - } + if (info.closure_idx != -1) { + if ((info.closure_idx) >= parameters.size) { + Report.error (get_current_src (), "invalid closure index"); + continue; + } + if ("%g".printf (parameters[info.closure_idx].vala_idx) != "%g".printf (info.vala_idx + 0.1)) { + info.param.set_attribute_double ("CCode", "delegate_target_pos", parameters[info.closure_idx].vala_idx); + } + } + if (info.destroy_idx != -1) { + if (info.destroy_idx >= parameters.size) { + Report.error (get_current_src (), "invalid destroy index"); + continue; } - if (info.destroy_idx != -1) { - if (info.destroy_idx >= parameters.size) { - Report.error (get_current_src (), "invalid destroy index"); - continue; + if ("%g".printf (parameters[info.destroy_idx].vala_idx) != "%g".printf (info.vala_idx + 0.2)) { + info.param.set_attribute_double ("CCode", "destroy_notify_pos", parameters[info.destroy_idx].vala_idx); + } + } + + if (info.is_async) { + var resolved_type = info.param.variable_type; + if (resolved_type is UnresolvedType) { + var resolved_symbol = resolve_symbol (node.parent, ((UnresolvedType) resolved_type).unresolved_symbol); + if (resolved_symbol is Delegate) { + resolved_type = new DelegateType ((Delegate) resolved_symbol); } - if ("%g".printf (parameters[info.destroy_idx].vala_idx) != "%g".printf (info.vala_idx + 0.2)) { - info.param.set_attribute_double ("CCode", "destroy_notify_pos", parameters[info.destroy_idx].vala_idx); + } + + if (resolved_type is DelegateType) { + var d = ((DelegateType) resolved_type).delegate_symbol; + if (!(d.name == "DestroyNotify" && d.parent_symbol.name == "GLib")) { + info.param.set_attribute_string ("CCode", "scope", "async"); + info.param.variable_type.value_owned = true; } } } diff --git a/vapi/gstreamer-1.0.vapi b/vapi/gstreamer-1.0.vapi index 54d4aa5a4f..e5bc8cf720 100644 --- a/vapi/gstreamer-1.0.vapi +++ b/vapi/gstreamer-1.0.vapi @@ -1772,7 +1772,7 @@ namespace Gst { public virtual void cleanup (); public virtual void join (void* id); public virtual void prepare () throws GLib.Error; - public virtual void* push (Gst.TaskPoolFunction func) throws GLib.Error; + public virtual void* push ([CCode (scope = "async")] owned Gst.TaskPoolFunction func) throws GLib.Error; } [CCode (cheader_filename = "gst/gst.h", ref_function = "gst_toc_ref", type_id = "gst_toc_get_type ()", unref_function = "gst_toc_unref")] [Compact] @@ -1961,7 +1961,7 @@ namespace Gst { public static bool api_type_has_tag (GLib.Type api, GLib.Quark tag); public static GLib.Type api_type_register (string api, string tags); public static unowned Gst.MetaInfo? get_info (string impl); - public static unowned Gst.MetaInfo? register (GLib.Type api, string impl, size_t size, Gst.MetaInitFunction init_func, Gst.MetaFreeFunction free_func, Gst.MetaTransformFunction transform_func); + public static unowned Gst.MetaInfo? register (GLib.Type api, string impl, size_t size, [CCode (scope = "async")] owned Gst.MetaInitFunction init_func, [CCode (scope = "async")] owned Gst.MetaFreeFunction free_func, [CCode (scope = "async")] owned Gst.MetaTransformFunction transform_func); } [CCode (cheader_filename = "gst/gst.h", has_type_id = false)] public struct MetaInfo { diff --git a/vapi/libsoup-2.4.vapi b/vapi/libsoup-2.4.vapi index c78e6343fe..334a8add96 100644 --- a/vapi/libsoup-2.4.vapi +++ b/vapi/libsoup-2.4.vapi @@ -90,7 +90,7 @@ namespace Soup { public uint hash_by_ip (); public uint hash_by_name (); public bool is_resolved (); - public void resolve_async (GLib.MainContext? async_context, GLib.Cancellable? cancellable, Soup.AddressCallback callback); + public void resolve_async (GLib.MainContext? async_context, GLib.Cancellable? cancellable, [CCode (scope = "async")] owned Soup.AddressCallback callback); public uint resolve_sync (GLib.Cancellable? cancellable = null); [NoAccessorMethod] public Soup.AddressFamily family { get; construct; } @@ -645,10 +645,10 @@ namespace Soup { [NoWrapper] public virtual void kick (); public void pause_message (Soup.Message msg); - public void prefetch_dns (string hostname, GLib.Cancellable? cancellable, Soup.AddressCallback? callback); + public void prefetch_dns (string hostname, GLib.Cancellable? cancellable, [CCode (scope = "async")] owned Soup.AddressCallback? callback); [Deprecated (since = "2.38")] public void prepare_for_uri (Soup.URI uri); - public virtual void queue_message (owned Soup.Message msg, Soup.SessionCallback? callback); + public virtual void queue_message (owned Soup.Message msg, [CCode (scope = "async")] owned Soup.SessionCallback? callback); public bool redirect_message (Soup.Message msg); public void remove_feature (Soup.SessionFeature feature); public void remove_feature_by_type (GLib.Type feature_type); @@ -728,7 +728,7 @@ namespace Soup { public class Socket : GLib.Object { [CCode (has_construct_function = false)] public Socket (string optname1, ...); - public void connect_async (GLib.Cancellable? cancellable, Soup.SocketCallback callback); + public void connect_async (GLib.Cancellable? cancellable, [CCode (scope = "async")] owned Soup.SocketCallback callback); public uint connect_sync (GLib.Cancellable? cancellable = null); public void disconnect (); public int get_fd (); @@ -822,7 +822,7 @@ namespace Soup { } [CCode (cheader_filename = "libsoup/soup.h", type_cname = "SoupPasswordManagerInterface", type_id = "soup_password_manager_get_type ()")] public interface PasswordManager : Soup.SessionFeature, GLib.Object { - public abstract void get_passwords_async (Soup.Message msg, Soup.Auth auth, bool retrying, GLib.MainContext async_context, GLib.Cancellable? cancellable, Soup.PasswordManagerCallback callback); + public abstract void get_passwords_async (Soup.Message msg, Soup.Auth auth, bool retrying, GLib.MainContext async_context, GLib.Cancellable? cancellable, [CCode (scope = "async")] owned Soup.PasswordManagerCallback callback); public abstract void get_passwords_sync (Soup.Message msg, Soup.Auth auth, GLib.Cancellable? cancellable = null); } [CCode (cheader_filename = "libsoup/soup.h", type_cname = "SoupProxyResolverInterface", type_id = "soup_proxy_resolver_get_type ()")] @@ -833,7 +833,7 @@ namespace Soup { } [CCode (cheader_filename = "libsoup/soup.h", type_cname = "SoupProxyURIResolverInterface", type_id = "soup_proxy_uri_resolver_get_type ()")] public interface ProxyURIResolver : Soup.SessionFeature, GLib.Object { - public abstract void get_proxy_uri_async (Soup.URI uri, GLib.MainContext? async_context, GLib.Cancellable? cancellable, Soup.ProxyURIResolverCallback callback); + public abstract void get_proxy_uri_async (Soup.URI uri, GLib.MainContext? async_context, GLib.Cancellable? cancellable, [CCode (scope = "async")] owned Soup.ProxyURIResolverCallback callback); public abstract uint get_proxy_uri_sync (Soup.URI uri, GLib.Cancellable? cancellable, out Soup.URI proxy_uri); } [CCode (cheader_filename = "libsoup/soup.h", type_cname = "SoupSessionFeatureInterface", type_id = "soup_session_feature_get_type ()")]
895ccff4f350d1e8c7acc121bb9bb118e164adfa
hbase
Fixing broken build... forgot to add- JVMClusterUtil
c
https://github.com/apache/hbase
diff --git a/contrib/stargate/core/src/test/java/org/apache/hadoop/hbase/stargate/MiniClusterTestBase.java b/contrib/stargate/core/src/test/java/org/apache/hadoop/hbase/stargate/MiniClusterTestBase.java index 939d2475f491..24600c5bc552 100644 --- a/contrib/stargate/core/src/test/java/org/apache/hadoop/hbase/stargate/MiniClusterTestBase.java +++ b/contrib/stargate/core/src/test/java/org/apache/hadoop/hbase/stargate/MiniClusterTestBase.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.hadoop.hbase.util.JVMClusterUtil; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.util.StringUtils; import org.apache.log4j.Level; diff --git a/contrib/transactional/src/test/java/org/apache/hadoop/hbase/regionserver/transactional/TestTHLogRecovery.java b/contrib/transactional/src/test/java/org/apache/hadoop/hbase/regionserver/transactional/TestTHLogRecovery.java index 09b7d983495b..192ce69775a9 100644 --- a/contrib/transactional/src/test/java/org/apache/hadoop/hbase/regionserver/transactional/TestTHLogRecovery.java +++ b/contrib/transactional/src/test/java/org/apache/hadoop/hbase/regionserver/transactional/TestTHLogRecovery.java @@ -47,6 +47,7 @@ import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.JVMClusterUtil; public class TestTHLogRecovery extends HBaseClusterTestCase { private static final Log LOG = LogFactory.getLog(TestTHLogRecovery.class); @@ -141,8 +142,8 @@ public void testWithFlushBeforeCommit() throws IOException, // } private void flushRegionServer() { - List<LocalHBaseCluster.RegionServerThread> regionThreads = cluster - .getRegionThreads(); + List<JVMClusterUtil.RegionServerThread> regionThreads = cluster + .getRegionServerThreads(); HRegion region = null; int server = -1; @@ -171,8 +172,8 @@ private void flushRegionServer() { * just shut down. */ private void stopOrAbortRegionServer(final boolean abort) { - List<LocalHBaseCluster.RegionServerThread> regionThreads = cluster - .getRegionThreads(); + List<JVMClusterUtil.RegionServerThread> regionThreads = cluster + .getRegionServerThreads(); int server = -1; for (int i = 0; i < regionThreads.size(); i++) { diff --git a/core/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java b/core/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java new file mode 100644 index 000000000000..40e993764aef --- /dev/null +++ b/core/src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java @@ -0,0 +1,162 @@ +/** + * Copyright 2010 The Apache Software Foundation + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.util; + +import java.io.IOException; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.master.HMaster; +import org.apache.hadoop.hbase.regionserver.HRegionServer; + +/** + * Utility used running a cluster all in the one JVM. + */ +public class JVMClusterUtil { + private static final Log LOG = LogFactory.getLog(JVMClusterUtil.class); + + /** + * Datastructure to hold RegionServer Thread and RegionServer instance + */ + public static class RegionServerThread extends Thread { + private final HRegionServer regionServer; + + public RegionServerThread(final HRegionServer r, final int index) { + super(r, "RegionServer:" + index); + this.regionServer = r; + } + + /** @return the region server */ + public HRegionServer getRegionServer() { + return this.regionServer; + } + + /** + * Block until the region server has come online, indicating it is ready + * to be used. + */ + public void waitForServerOnline() { + while (!regionServer.isOnline()) { + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + // continue waiting + } + } + } + } + + /** + * Creates a {@link RegionServerThread}. + * Call 'start' on the returned thread to make it run. + * @param c Configuration to use. + * @param hrsc Class to create. + * @param index Used distingushing the object returned. + * @throws IOException + * @return Region server added. + */ + public static JVMClusterUtil.RegionServerThread createRegionServerThread(final HBaseConfiguration c, + final Class<? extends HRegionServer> hrsc, final int index) + throws IOException { + HRegionServer server; + try { + server = hrsc.getConstructor(HBaseConfiguration.class).newInstance(c); + } catch (Exception e) { + IOException ioe = new IOException(); + ioe.initCause(e); + throw ioe; + } + return new JVMClusterUtil.RegionServerThread(server, index); + } + + /** + * Start the cluster. + * @param m + * @param regionServers + * @return Address to use contacting master. + */ + public static String startup(final HMaster m, + final List<JVMClusterUtil.RegionServerThread> regionservers) { + if (m != null) m.start(); + if (regionservers != null) { + for (JVMClusterUtil.RegionServerThread t: regionservers) { + t.start(); + } + } + return m == null? null: m.getMasterAddress().toString(); + } + + /** + * @param master + * @param regionservers + */ + public static void shutdown(final HMaster master, + final List<RegionServerThread> regionservers) { + LOG.debug("Shutting down HBase Cluster"); + // Be careful how the hdfs shutdown thread runs in context where more than + // one regionserver in the mix. + Thread hdfsClientFinalizer = null; + for (JVMClusterUtil.RegionServerThread t: regionservers) { + Thread tt = t.getRegionServer().setHDFSShutdownThreadOnExit(null); + if (hdfsClientFinalizer == null && tt != null) { + hdfsClientFinalizer = tt; + } + } + if (master != null) { + master.shutdown(); + } + // regionServerThreads can never be null because they are initialized when + // the class is constructed. + for(Thread t: regionservers) { + if (t.isAlive()) { + try { + t.join(); + } catch (InterruptedException e) { + // continue + } + } + } + if (master != null) { + while (master.isAlive()) { + try { + // The below has been replaced to debug sometime hangs on end of + // tests. + // this.master.join(): + Threads.threadDumpingIsAlive(master); + } catch(InterruptedException e) { + // continue + } + } + } + if (hdfsClientFinalizer != null) { + // Don't run the shutdown thread. Plays havoc if we try to start a + // minihbasecluster immediately after this one has gone down (In + // Filesystem, the shutdown thread is kept in a static and is created + // on classloading. Can only run it once). + // hdfsClientFinalizer.start(); + // Threads.shutdown(hdfsClientFinalizer); + } + LOG.info("Shutdown " + + ((regionservers != null)? master.getName(): "0 masters") + + " " + regionservers.size() + " region server(s)"); + } +} \ No newline at end of file
82bd585bb78ab580d1bd16e0e9ae7402c5348579
drools
[DROOLS-114] Support Defeasible rules--
a
https://github.com/kiegroup/drools
diff --git a/drools-beliefs/src/main/java/org/drools/beliefs/bayes/BayesBeliefSet.java b/drools-beliefs/src/main/java/org/drools/beliefs/bayes/BayesBeliefSet.java index 9247cf6f69a..a5f6dcd8087 100644 --- a/drools-beliefs/src/main/java/org/drools/beliefs/bayes/BayesBeliefSet.java +++ b/drools-beliefs/src/main/java/org/drools/beliefs/bayes/BayesBeliefSet.java @@ -98,7 +98,12 @@ public boolean isNegated() { @Override public boolean isUndecided() { - return conflictCounter > 0 ; + return isConflicting(); + } + + @Override + public boolean isConflicting() { + return conflictCounter > 0; } @Override diff --git a/drools-compiler/src/test/java/org/drools/compiler/beliefsystem/defeasible/DefeasibilityTest.java b/drools-compiler/src/test/java/org/drools/compiler/beliefsystem/defeasible/DefeasibilityTest.java index 246cc6c5bad..374f1c9666a 100644 --- a/drools-compiler/src/test/java/org/drools/compiler/beliefsystem/defeasible/DefeasibilityTest.java +++ b/drools-compiler/src/test/java/org/drools/compiler/beliefsystem/defeasible/DefeasibilityTest.java @@ -911,4 +911,31 @@ public void testManyDefeasibles() { } + + @Test + public void testRetractNegativeDefeaters() { + + String drl = "declare Foo end " + + + "rule Def " + + " @Defeater " + + "when " + + " String() " + + "then " + + " insertLogical( new Foo(), 'neg' ); " + + "end "; + StatefulKnowledgeSession session = getSessionFromString( drl ); + + FactHandle h = session.insert( "foo" ); + + session.fireAllRules(); + assertEquals( 1, session.getObjects().size() ); + + session.delete( h ); + + session.fireAllRules(); + assertEquals( 0, session.getObjects().size() ); + } + + } diff --git a/drools-core/src/main/java/org/drools/core/beliefsystem/BeliefSet.java b/drools-core/src/main/java/org/drools/core/beliefsystem/BeliefSet.java index 40624e678cc..f782d0b551f 100644 --- a/drools-core/src/main/java/org/drools/core/beliefsystem/BeliefSet.java +++ b/drools-core/src/main/java/org/drools/core/beliefsystem/BeliefSet.java @@ -41,6 +41,8 @@ public interface BeliefSet { boolean isUndecided(); + boolean isConflicting(); + boolean isPositive(); } diff --git a/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSetImpl.java b/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSetImpl.java index 687843e12a9..cc332141852 100644 --- a/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSetImpl.java +++ b/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSetImpl.java @@ -90,6 +90,11 @@ public boolean isNegated() { @Override public boolean isUndecided() { + return isConflicting(); + } + + @Override + public boolean isConflicting() { return posCounter > 0 && negCounter > 0; } diff --git a/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSystem.java b/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSystem.java index 8642f233ccb..477336914f9 100644 --- a/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSystem.java +++ b/drools-core/src/main/java/org/drools/core/beliefsystem/jtms/JTMSBeliefSystem.java @@ -147,7 +147,7 @@ public void delete(LogicalDependency<T> node, BeliefSet beliefSet, PropagationContext context) { JTMSBeliefSet jtmsBeliefSet = (JTMSBeliefSet) beliefSet; - boolean wasConflicting = jtmsBeliefSet.isUndecided(); + boolean wasUndecided = jtmsBeliefSet.isUndecided(); boolean wasNegated = jtmsBeliefSet.isNegated(); // If the prime object is removed, we need to update the FactHandle, and tell the callback to update @@ -160,7 +160,7 @@ public void delete(LogicalDependency<T> node, beliefSet.remove( (JTMSMode) node.getMode() ); if ( beliefSet.isEmpty() ) { - if ( wasNegated ) { + if ( wasNegated && ! wasUndecided ) { defEP.getObjectStore().addHandle( beliefSet.getFactHandle(), beliefSet.getFactHandle().getObject() ); // was negated, so add back in, so main retract works InternalFactHandle fh = jtmsBeliefSet.getNegativeFactHandle(); ((NamedEntryPoint) fh.getEntryPoint()).delete( fh, context.getRuleOrigin(), node.getJustifier() ); @@ -172,14 +172,14 @@ public void delete(LogicalDependency<T> node, ((NamedEntryPoint) fh.getEntryPoint()).delete( fh, context.getRuleOrigin(), node.getJustifier() ); } - } else if ( wasConflicting && !jtmsBeliefSet.isUndecided() ) { + } else if ( wasUndecided && !jtmsBeliefSet.isUndecided() ) { insertBelief( node, defEP.getObjectTypeConfigurationRegistry().getObjectTypeConf( defEP.getEntryPoint(), node.getObject() ), jtmsBeliefSet, context, false, wasNegated, - wasConflicting ); + wasUndecided ); } else if ( primeChanged ) { // we know there must be at least one more of the same type, as they are still in conflict diff --git a/drools-core/src/main/java/org/drools/core/beliefsystem/simple/SimpleBeliefSet.java b/drools-core/src/main/java/org/drools/core/beliefsystem/simple/SimpleBeliefSet.java index ea40aad79f3..dfd1ab5f48f 100644 --- a/drools-core/src/main/java/org/drools/core/beliefsystem/simple/SimpleBeliefSet.java +++ b/drools-core/src/main/java/org/drools/core/beliefsystem/simple/SimpleBeliefSet.java @@ -74,6 +74,11 @@ public boolean isUndecided() { return false; } + @Override + public boolean isConflicting() { + return false; + } + @Override public boolean isPositive() { return ! isEmpty();
fcb64742e9fd38e7fe0729c7835acbf0c4c6312c
Search_api
Issue #2150779 by hefox: Fixed "Overridden" detection for index features.
a
https://github.com/lucidworks/drupal_search_api
diff --git a/CHANGELOG.txt b/CHANGELOG.txt index 3388b363..61acf160 100644 --- a/CHANGELOG.txt +++ b/CHANGELOG.txt @@ -1,5 +1,6 @@ Search API 1.x, dev (xx/xx/xxxx): --------------------------------- +- #2150779 by hefox: Fixed "Overridden" detection for index features. - #1227702 by drunken monkey: Improved error handling. Search API 1.11 (12/25/2013): diff --git a/includes/index_entity.inc b/includes/index_entity.inc index 10d5c901..7afb9123 100644 --- a/includes/index_entity.inc +++ b/includes/index_entity.inc @@ -264,6 +264,10 @@ class SearchApiIndex extends Entity { $this->server = NULL; $this->enabled = FALSE; } + if (!empty($this->options['fields'])) { + ksort($this->options['fields']); + } + $this->resetCaches(); return parent::save();
c029bfc5918c47a8c9a97e23bf94184ef964a017
drools
BZ-1074672: Fixing legacy API to properly handle- resource configuration. (cherry picked from commit- 645d0e5cb86225a898391960350588d1817b1de2)--
c
https://github.com/kiegroup/drools
diff --git a/drools-core/src/main/java/org/drools/core/builder/conf/impl/JaxbConfigurationImpl.java b/drools-core/src/main/java/org/drools/core/builder/conf/impl/JaxbConfigurationImpl.java index ca1008fdaa9..abf9cec54ff 100644 --- a/drools-core/src/main/java/org/drools/core/builder/conf/impl/JaxbConfigurationImpl.java +++ b/drools-core/src/main/java/org/drools/core/builder/conf/impl/JaxbConfigurationImpl.java @@ -25,12 +25,12 @@ import java.util.List; import java.util.Properties; -import com.sun.tools.xjc.Language; -import org.kie.internal.builder.JaxbConfiguration; import org.kie.api.io.ResourceConfiguration; +import org.kie.internal.builder.JaxbConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.sun.tools.xjc.Language; import com.sun.tools.xjc.Options; public class JaxbConfigurationImpl extends ResourceConfigurationImpl implements JaxbConfiguration { @@ -70,6 +70,14 @@ public void setClasses(List<String> classes) { this.classes = classes; } + public void setSystemId(String systemId) { + this.systemId = systemId; + } + + public void setXjcOpts(Options xjcOpts) { + this.xjcOpts = xjcOpts; + } + public byte[] toByteArray() { ByteArrayOutputStream buf = new ByteArrayOutputStream(); try { diff --git a/knowledge-api-legacy5-adapter/pom.xml b/knowledge-api-legacy5-adapter/pom.xml index 2cea9dac988..467b1aa4252 100644 --- a/knowledge-api-legacy5-adapter/pom.xml +++ b/knowledge-api-legacy5-adapter/pom.xml @@ -28,6 +28,10 @@ <groupId>org.drools</groupId> <artifactId>drools-reteoo</artifactId> </dependency> + <dependency> + <groupId>org.drools</groupId> + <artifactId>drools-decisiontables</artifactId> + </dependency> <dependency> <!-- External dependencies --> <groupId>org.osgi</groupId> diff --git a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/JaxbConfigurationImpl.java b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/JaxbConfigurationImpl.java new file mode 100644 index 00000000000..652aff173c1 --- /dev/null +++ b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/JaxbConfigurationImpl.java @@ -0,0 +1,77 @@ +/* + * Copyright 2010 JBoss Inc + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.drools.impl; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.util.ArrayList; +import java.util.List; + +import org.drools.builder.JaxbConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.sun.tools.xjc.Options; + +public class JaxbConfigurationImpl implements JaxbConfiguration { + private final Logger logger = LoggerFactory.getLogger( JaxbConfigurationImpl.class ); + + private Options xjcOpts; + private String systemId; + + private List<String> classes; + + public JaxbConfigurationImpl() { } + + public JaxbConfigurationImpl(Options xjcOpts, + String systemId) { + this.xjcOpts = xjcOpts; + this.systemId = systemId; + this.classes = new ArrayList<String>(); + } + + + public Options getXjcOpts() { + return xjcOpts; + } + + + public String getSystemId() { + return systemId; + } + + + public List<String> getClasses() { + return classes; + } + + + public void setClasses(List<String> classes) { + this.classes = classes; + } + + public void setSystemId(String systemId) { + this.systemId = systemId; + } + + public void setXjcOpts(Options xjcOpts) { + this.xjcOpts = xjcOpts; + } +} diff --git a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderFactoryServiceImpl.java b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderFactoryServiceImpl.java index 249683410d9..76a8385bd3e 100644 --- a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderFactoryServiceImpl.java +++ b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderFactoryServiceImpl.java @@ -1,5 +1,7 @@ package org.drools.impl; +import java.util.Properties; + import org.drools.KnowledgeBase; import org.drools.builder.DecisionTableConfiguration; import org.drools.builder.JaxbConfiguration; @@ -8,14 +10,10 @@ import org.drools.builder.KnowledgeBuilderFactoryService; import org.drools.compiler.compiler.PackageBuilder; import org.drools.compiler.compiler.PackageBuilderConfiguration; -import org.drools.core.builder.conf.impl.JaxbConfigurationImpl; import org.drools.core.impl.KnowledgeBaseImpl; - -import java.util.Properties; +import org.drools.impl.adapters.KnowledgeBuilderConfigurationAdapter; import com.sun.tools.xjc.Options; -import org.drools.impl.adapters.JaxbConfigurationAdapter; -import org.drools.impl.adapters.KnowledgeBuilderConfigurationAdapter; public class KnowledgeBuilderFactoryServiceImpl implements KnowledgeBuilderFactoryService { @@ -58,6 +56,6 @@ public KnowledgeBuilder newKnowledgeBuilder(KnowledgeBase kbase, public JaxbConfiguration newJaxbConfiguration(Options xjcOpts, String systemId) { - return new JaxbConfigurationAdapter(new JaxbConfigurationImpl( xjcOpts, systemId )); + return new org.drools.impl.JaxbConfigurationImpl( xjcOpts, systemId ); } } diff --git a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderImpl.java b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderImpl.java index bb3b0e9b541..9110fda9110 100644 --- a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderImpl.java +++ b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/KnowledgeBuilderImpl.java @@ -1,7 +1,14 @@ package org.drools.impl; +import static org.drools.impl.adapters.AdapterUtil.adaptResultSeverity; +import static org.drools.impl.adapters.KnowledgePackageAdapter.adaptKnowledgePackages; + +import java.util.Collection; + import org.drools.KnowledgeBase; import org.drools.builder.CompositeKnowledgeBuilder; +import org.drools.builder.DecisionTableConfiguration; +import org.drools.builder.JaxbConfiguration; import org.drools.builder.KnowledgeBuilder; import org.drools.builder.KnowledgeBuilderErrors; import org.drools.builder.KnowledgeBuilderResults; @@ -11,17 +18,14 @@ import org.drools.compiler.compiler.PackageBuilder; import org.drools.definition.KnowledgePackage; import org.drools.impl.adapters.CompositeKnowledgeBuilderAdapter; +import org.drools.impl.adapters.DecisionTableConfigurationAdapter; +import org.drools.impl.adapters.JaxbConfigurationAdapter; import org.drools.impl.adapters.KnowledgeBaseAdapter; import org.drools.impl.adapters.KnowledgeBuilderErrorsAdapter; import org.drools.impl.adapters.KnowledgeBuilderResultsAdapter; import org.drools.impl.adapters.ResourceAdapter; import org.drools.io.Resource; -import java.util.Collection; - -import static org.drools.impl.adapters.AdapterUtil.adaptResultSeverity; -import static org.drools.impl.adapters.KnowledgePackageAdapter.adaptKnowledgePackages; - public class KnowledgeBuilderImpl implements KnowledgeBuilder { private final org.drools.compiler.builder.impl.KnowledgeBuilderImpl delegate; @@ -35,7 +39,15 @@ public void add(Resource resource, ResourceType type) { } public void add(Resource resource, ResourceType type, ResourceConfiguration configuration) { - delegate.add(((ResourceAdapter)resource).getDelegate(), type.toKieResourceType(), null); + org.kie.api.io.ResourceConfiguration conf = null; + if( configuration != null ) { + if( configuration instanceof DecisionTableConfiguration ) { + conf = new DecisionTableConfigurationAdapter( (DecisionTableConfiguration) configuration ); + } else if( configuration instanceof JaxbConfiguration ) { + conf = new JaxbConfigurationAdapter((JaxbConfiguration) configuration); + } + } + delegate.add(((ResourceAdapter)resource).getDelegate(), type.toKieResourceType(), conf ); } public Collection<KnowledgePackage> getKnowledgePackages() { diff --git a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/DecisionTableConfigurationAdapter.java b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/DecisionTableConfigurationAdapter.java new file mode 100644 index 00000000000..ce91dd23fb3 --- /dev/null +++ b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/DecisionTableConfigurationAdapter.java @@ -0,0 +1,60 @@ +package org.drools.impl.adapters; + +import java.util.Properties; + +import org.drools.core.builder.conf.impl.DecisionTableConfigurationImpl; +import org.drools.core.builder.conf.impl.ResourceConfigurationImpl; +import org.kie.api.io.ResourceConfiguration; +import org.kie.api.io.ResourceType; +import org.kie.internal.builder.DecisionTableConfiguration; +import org.kie.internal.builder.DecisionTableInputType; + + +public class DecisionTableConfigurationAdapter extends ResourceConfigurationImpl implements DecisionTableConfiguration { + + private static final long serialVersionUID = -2052308765193190359L; + + private final org.drools.builder.DecisionTableConfiguration delegate; + + public DecisionTableConfigurationAdapter( org.drools.builder.DecisionTableConfiguration delegate ) { + super.setResourceType(ResourceType.DTABLE); + this.delegate = delegate; + } + + public void setInputType(org.drools.builder.DecisionTableInputType inputType) { + delegate.setInputType(inputType); + } + + public DecisionTableInputType getInputType() { + return delegate.getInputType() == org.drools.builder.DecisionTableInputType.CSV ? DecisionTableInputType.CSV : DecisionTableInputType.XLS; + } + + public void setWorksheetName(String name) { + delegate.setWorksheetName(name); + } + + public String getWorksheetName() { + return delegate.getWorksheetName(); + } + + public Properties toProperties() { + Properties prop = super.toProperties(); + prop.setProperty( DecisionTableConfigurationImpl.DROOLS_DT_TYPE, getInputType().toString() ); + if( getWorksheetName() != null ) { + prop.setProperty( DecisionTableConfigurationImpl.DROOLS_DT_WORKSHEET, getWorksheetName() ); + } + return prop; + } + + public ResourceConfiguration fromProperties(Properties prop) { + super.fromProperties(prop); + setInputType( DecisionTableInputType.valueOf( prop.getProperty( DecisionTableConfigurationImpl.DROOLS_DT_TYPE, DecisionTableInputType.XLS.toString() ) ) ); + setWorksheetName( prop.getProperty( DecisionTableConfigurationImpl.DROOLS_DT_WORKSHEET, null ) ); + return this; + } + + @Override + public void setInputType(DecisionTableInputType inputType) { + delegate.setInputType( inputType == DecisionTableInputType.CSV ? org.drools.builder.DecisionTableInputType.CSV : org.drools.builder.DecisionTableInputType.XLS); + } +} diff --git a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/JaxbConfigurationAdapter.java b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/JaxbConfigurationAdapter.java index ec6917d4327..1fda38ba3cd 100644 --- a/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/JaxbConfigurationAdapter.java +++ b/knowledge-api-legacy5-adapter/src/main/java/org/drools/impl/adapters/JaxbConfigurationAdapter.java @@ -1,15 +1,23 @@ package org.drools.impl.adapters; -import com.sun.tools.xjc.Options; +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; + +import org.drools.core.builder.conf.impl.JaxbConfigurationImpl; +import org.drools.core.builder.conf.impl.ResourceConfigurationImpl; +import org.kie.api.io.ResourceConfiguration; import org.kie.internal.builder.JaxbConfiguration; -import java.util.List; +import com.sun.tools.xjc.Language; +import com.sun.tools.xjc.Options; -public class JaxbConfigurationAdapter implements org.drools.builder.JaxbConfiguration { +public class JaxbConfigurationAdapter extends ResourceConfigurationImpl implements JaxbConfiguration { - private final JaxbConfiguration delegate; + private static final long serialVersionUID = -1425447385459529502L; + private final org.drools.builder.JaxbConfiguration delegate; - public JaxbConfigurationAdapter(JaxbConfiguration delegate) { + public JaxbConfigurationAdapter(org.drools.builder.JaxbConfiguration delegate) { this.delegate = delegate; } @@ -24,4 +32,51 @@ public String getSystemId() { public List<String> getClasses() { return delegate.getClasses(); } + + public Properties toProperties() { + Properties prop = super.toProperties(); + prop.setProperty( "drools.jaxb.conf.systemId", getSystemId() ); + prop.setProperty( "drools.jaxb.conf.classes", getClass().toString() ); + Options xjcOpts = getXjcOpts(); + if (xjcOpts != null) { + // how to serialize Options to a property file??? + prop.setProperty( "drools.jaxb.conf.opts.class", xjcOpts.getClass().getName() ); + if (xjcOpts.getSchemaLanguage() != null) { + prop.setProperty( "drools.jaxb.conf.opts.lang", xjcOpts.getSchemaLanguage().toString() ); + } + } + return prop; + } + + public ResourceConfiguration fromProperties(Properties prop) { + super.fromProperties(prop); + ((JaxbConfigurationImpl)delegate).setSystemId( prop.getProperty( "drools.jaxb.conf.systemId", null ) ); + String classesStr = prop.getProperty( "drools.jaxb.conf.classes", "[]" ); + classesStr = classesStr.substring( 1, classesStr.length()-1 ).trim(); + List<String> classes = new ArrayList<String>(); + if( classesStr != null && classesStr.length() > 1 ) { + // can't use Arrays.asList() because have to trim() each element + for( String clz : classesStr.split( "," ) ) { + classes.add( clz.trim() ); + } + } + ((JaxbConfigurationImpl)delegate).setClasses(classes); + + // how to deserialize Options from a properties file? + String optsClass = prop.getProperty( "drools.jaxb.conf.opts.class", null ); + if (optsClass != null) { + try { + Options xjcOpts = (Options) Class.forName( optsClass ).newInstance(); + String optsLang = prop.getProperty( "drools.jaxb.conf.opts.lang", null ); + if (optsLang != null) { + xjcOpts.setSchemaLanguage( Language.valueOf(optsLang) ); + } + ((JaxbConfigurationImpl)delegate).setXjcOpts(xjcOpts); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + return this; + } } diff --git a/knowledge-api-legacy5-adapter/src/test/java/org/drools/integrationtests/ResourceCompilationTest.java b/knowledge-api-legacy5-adapter/src/test/java/org/drools/integrationtests/ResourceCompilationTest.java new file mode 100644 index 00000000000..f116afa75cd --- /dev/null +++ b/knowledge-api-legacy5-adapter/src/test/java/org/drools/integrationtests/ResourceCompilationTest.java @@ -0,0 +1,42 @@ +package org.drools.integrationtests; + +import org.drools.builder.DecisionTableConfiguration; +import org.drools.builder.DecisionTableInputType; +import org.junit.Test; +import org.drools.builder.KnowledgeBuilder; +import org.drools.builder.KnowledgeBuilderFactory; +import org.drools.builder.ResourceType; +import org.drools.io.ResourceFactory; + +/** + * Illustrates knowledge-api resource compilation problems. + */ +public class ResourceCompilationTest { + + @Test + public void testDecisionTableXls() { + DecisionTableConfiguration dtconf = KnowledgeBuilderFactory.newDecisionTableConfiguration(); + dtconf.setInputType(DecisionTableInputType.XLS); + + KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); + kbuilder.add(ResourceFactory.newClassPathResource("sample.xls", getClass()), ResourceType.DTABLE, dtconf); + + if (kbuilder.hasErrors()) { + throw new RuntimeException("Drools compile errors: " + kbuilder.getErrors().toString()); + } + } + + @Test + public void testDecisionTableCsv() { + DecisionTableConfiguration dtconf = KnowledgeBuilderFactory.newDecisionTableConfiguration(); + dtconf.setInputType(DecisionTableInputType.CSV); + + KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); + kbuilder.add(ResourceFactory.newClassPathResource("sample.csv", getClass()), ResourceType.DTABLE, dtconf); + + if (kbuilder.hasErrors()) { + throw new RuntimeException("Drools compile errors: " + kbuilder.getErrors().toString()); + } + } + +} diff --git a/knowledge-api-legacy5-adapter/src/test/java/org/drools/model/Person.java b/knowledge-api-legacy5-adapter/src/test/java/org/drools/model/Person.java new file mode 100644 index 00000000000..8eabfec84fc --- /dev/null +++ b/knowledge-api-legacy5-adapter/src/test/java/org/drools/model/Person.java @@ -0,0 +1,109 @@ +package org.drools.model; + +import java.io.Serializable; +import javax.xml.bind.annotation.XmlRootElement; + +/** + * Sample fact for person. + */ +@XmlRootElement +public class Person implements Serializable { + + private static final long serialVersionUID = -5411807328989112195L; + + private int id = 0; + private String name = ""; + private int age; + private String likes; + + public Person() { + } + + public Person(String name) { + super(); + this.name = name; + } + + public Person(String name, int age) { + this.name = name; + this.age = age; + } + + public Person(String name, String likes, int age) { + this.name = name; + this.likes = likes; + this.age = age; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public String toString() { + return String.format("%s[id='%s', name='%s']", getClass().getName(), id, name); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + id; + result = prime * result + ((name == null) ? 0 : name.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Person other = (Person) obj; + if (id != other.id) { + return false; + } + if (name == null) { + if (other.name != null) { + return false; + } + } else if (!name.equals(other.name)) { + return false; + } + return true; + } + + public void setAge(int age) { + this.age = age; + } + + public int getAge() { + return age; + } + + public void setLikes(String likes) { + this.likes = likes; + } + + public String getLikes() { + return likes; + } + +} diff --git a/knowledge-api-legacy5-adapter/src/test/resources/org/drools/integrationtests/sample.csv b/knowledge-api-legacy5-adapter/src/test/resources/org/drools/integrationtests/sample.csv new file mode 100644 index 00000000000..35931d14344 --- /dev/null +++ b/knowledge-api-legacy5-adapter/src/test/resources/org/drools/integrationtests/sample.csv @@ -0,0 +1,13 @@ +, +"RuleSet","org.drools.knowledgeapi" +"Import","org.drools.model.Person" +"Notes", +, +"RuleTable ID change", +"CONDITION","ACTION" +"person:Person","person" +"id == $param","setId($param)" +"ID","new ID" +0,1 +1,2 +2,3 diff --git a/knowledge-api-legacy5-adapter/src/test/resources/org/drools/integrationtests/sample.xls b/knowledge-api-legacy5-adapter/src/test/resources/org/drools/integrationtests/sample.xls new file mode 100644 index 00000000000..06090780a4b Binary files /dev/null and b/knowledge-api-legacy5-adapter/src/test/resources/org/drools/integrationtests/sample.xls differ
4fa174541fd3402cc067ebab5fb44c9b5ce2587e
camel
CAMEL-3203: Fixed adding routes with quartz- endpoints to already started camel should add jobs to scheduler.--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1005489 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/camel
diff --git a/components/camel-quartz/src/main/java/org/apache/camel/component/quartz/QuartzComponent.java b/components/camel-quartz/src/main/java/org/apache/camel/component/quartz/QuartzComponent.java index 9ff9bf8cb7515..1ef0cdf81b779 100644 --- a/components/camel-quartz/src/main/java/org/apache/camel/component/quartz/QuartzComponent.java +++ b/components/camel-quartz/src/main/java/org/apache/camel/component/quartz/QuartzComponent.java @@ -186,9 +186,14 @@ protected void doStop() throws Exception { } } - public void addJob(JobDetail job, Trigger trigger) { - // add job to internal list because we will defer adding to the scheduler when camel context has been fully started - jobsToAdd.add(new JobToAdd(job, trigger)); + public void addJob(JobDetail job, Trigger trigger) throws SchedulerException { + if (scheduler == null) { + // add job to internal list because we will defer adding to the scheduler when camel context has been fully started + jobsToAdd.add(new JobToAdd(job, trigger)); + } else { + // add job directly to scheduler + doAddJob(job, trigger); + } } private void doAddJob(JobDetail job, Trigger trigger) throws SchedulerException { diff --git a/components/camel-quartz/src/test/java/org/apache/camel/component/quartz/QuartzAddRoutesAfterCamelContextStartedTest.java b/components/camel-quartz/src/test/java/org/apache/camel/component/quartz/QuartzAddRoutesAfterCamelContextStartedTest.java new file mode 100644 index 0000000000000..52503df24ec5a --- /dev/null +++ b/components/camel-quartz/src/test/java/org/apache/camel/component/quartz/QuartzAddRoutesAfterCamelContextStartedTest.java @@ -0,0 +1,49 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.quartz; + +import org.apache.camel.builder.RouteBuilder; +import org.apache.camel.component.mock.MockEndpoint; +import org.apache.camel.test.junit4.CamelTestSupport; +import org.junit.Test; + +/** + * @version $Revision$ + */ +public class QuartzAddRoutesAfterCamelContextStartedTest extends CamelTestSupport { + + @Test + public void testAddRoutes() throws Exception { + // camel context should already be started + assertTrue(context.getStatus().isStarted()); + + MockEndpoint mock = getMockEndpoint("mock:result"); + mock.expectedMessageCount(2); + + // add the quartz router after CamelContext has been started + context.addRoutes(new RouteBuilder() { + @Override + public void configure() throws Exception { + from("quartz://myGroup/myTimerName?trigger.repeatInterval=2&trigger.repeatCount=1").to("mock:result"); + } + }); + + // it should also work + assertMockEndpointsSatisfied(); + } + +}
97a8657ae17bf9105a41d4930ee26fb2dbd1e3cc
orientdb
Supported new syntax in SQL UPDATE to use a inner- query as target. Example: update ( traverse V.in
a
https://github.com/orientechnologies/orientdb
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLDelete.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLDelete.java index 338f1613f4b..aac0e71c057 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLDelete.java +++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLDelete.java @@ -76,7 +76,7 @@ public OCommandExecutorSQLDelete parse(final OCommandRequest iRequest) { String subjectName = parserRequiredWord(false, "Syntax error", " =><,\r\n"); if (subjectName == null) - throwSyntaxErrorException("Invalid subject name. Expected cluster, class or index"); + throwSyntaxErrorException("Invalid subject name. Expected cluster, class, index or sub-query"); if (OStringParser.startsWithIgnoreCase(subjectName, OCommandExecutorSQLAbstract.INDEX_PREFIX)) { // INDEX diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java index a0cb59f4a30..f5befe3c4ea 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java +++ b/core/src/main/java/com/orientechnologies/orient/core/sql/OCommandExecutorSQLUpdate.java @@ -37,6 +37,7 @@ import com.orientechnologies.orient.core.query.OQuery; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.serialization.serializer.OStringSerializerHelper; +import com.orientechnologies.orient.core.sql.filter.OSQLFilter; import com.orientechnologies.orient.core.sql.filter.OSQLFilterItem; import com.orientechnologies.orient.core.sql.functions.OSQLFunctionRuntime; import com.orientechnologies.orient.core.sql.query.OSQLAsynchQuery; @@ -61,6 +62,7 @@ public class OCommandExecutorSQLUpdate extends OCommandExecutorSQLSetAware imple private Map<String, Number> incrementEntries = new LinkedHashMap<String, Number>(); private OQuery<?> query; + private OSQLFilter compiledFilter; private int recordCount = 0; private String subjectName; private static final Object EMPTY_VALUE = new Object(); @@ -82,9 +84,11 @@ public OCommandExecutorSQLUpdate parse(final OCommandRequest iRequest) { query = null; recordCount = 0; - parserRequiredKeyword("UPDATE"); + parserRequiredKeyword(KEYWORD_UPDATE); - subjectName = parserRequiredWord(true, "Invalid target"); + subjectName = parserRequiredWord(true, "Invalid target", " =><,\r\n"); + if (subjectName == null) + throwSyntaxErrorException("Invalid subject name. Expected cluster, class, index or sub-query"); parserNextWord(true); String word = parserGetLastWord(); @@ -116,7 +120,16 @@ else if (word.equals(KEYWORD_INCREMENT)) final String additionalStatement = parserGetLastWord(); - if (additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_WHERE) + if (subjectName.startsWith("(")) { + subjectName = subjectName.trim(); + query = database.command(new OSQLAsynchQuery<ODocument>(subjectName.substring(1, subjectName.length() - 1), this)); + + if (additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_WHERE) + || additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_LIMIT)) + compiledFilter = OSQLEngine.getInstance().parseCondition(parserText.substring(parserGetCurrentPosition()), getContext(), + KEYWORD_WHERE); + + } else if (additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_WHERE) || additionalStatement.equals(OCommandExecutorSQLAbstract.KEYWORD_LIMIT)) query = new OSQLAsynchQuery<ODocument>("select from " + subjectName + " " + additionalStatement + " " + parserText.substring(parserGetCurrentPosition()), this); @@ -149,6 +162,12 @@ public Object execute(final Map<Object, Object> iArgs) { public boolean result(final Object iRecord) { final ODocument record = (ODocument) iRecord; + if (compiledFilter != null) { + // ADDITIONAL FILTERING + if (!(Boolean) compiledFilter.evaluate(record, null, context)) + return false; + } + boolean recordUpdated = false; parameters.reset();
b09e981f1e236c08194c5871214d5431c98eb260
drools
[BZ-1007977] when returning a cached KieModule from- the KieRepository referring to a snapshot release check if there is a newer- release on the maven repository--
c
https://github.com/kiegroup/drools
diff --git a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/InternalKieScanner.java b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/InternalKieScanner.java index 0b3a11e27eb..3194f75b493 100644 --- a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/InternalKieScanner.java +++ b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/InternalKieScanner.java @@ -14,4 +14,6 @@ public interface InternalKieScanner extends KieScanner { KieModule loadArtifact(ReleaseId releaseId); KieModule loadArtifact(ReleaseId releaseId, InputStream pomXML); + + String getArtifactVersion(ReleaseId releaseId); } diff --git a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieRepositoryImpl.java b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieRepositoryImpl.java index 5c335a60843..708d0d1adf6 100644 --- a/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieRepositoryImpl.java +++ b/drools-compiler/src/main/java/org/drools/compiler/kie/builder/impl/KieRepositoryImpl.java @@ -46,14 +46,19 @@ public class KieRepositoryImpl public static final KieRepository INSTANCE = new KieRepositoryImpl(); - private final KieModuleRepo kieModuleRepo = new KieModuleRepo(); + private final KieModuleRepo kieModuleRepo; + + private InternalKieScanner internalKieScanner; + + public KieRepositoryImpl() { + internalKieScanner = getInternalKieScanner(); + kieModuleRepo = new KieModuleRepo(internalKieScanner); + } private final AtomicReference<ReleaseId> defaultGAV = new AtomicReference(new ReleaseIdImpl(DEFAULT_GROUP, DEFAULT_ARTIFACT, DEFAULT_VERSION)); - private InternalKieScanner internalKieScanner; - public void setDefaultGAV(ReleaseId releaseId) { this.defaultGAV.set(releaseId); } @@ -122,10 +127,6 @@ private static class DummyKieScanner implements InternalKieScanner { - public KieModule loadArtifact(ReleaseId releaseId) { - return null; - } - public void start(long pollingInterval) { } @@ -138,9 +139,17 @@ public void scanNow() { public void setKieContainer(KieContainer kieContainer) { } + public KieModule loadArtifact(ReleaseId releaseId) { + return null; + } + public KieModule loadArtifact(ReleaseId releaseId, InputStream pomXML) { return null; } + + public String getArtifactVersion(ReleaseId releaseId) { + return null; + } } public KieModule addKieModule(Resource resource, Resource... dependencies) { @@ -195,9 +204,14 @@ public KieModule getKieModule(Resource resource) { private static class KieModuleRepo { + private final InternalKieScanner kieScanner; private final Map<String, TreeMap<ComparableVersion, KieModule>> kieModules = new HashMap<String, TreeMap<ComparableVersion, KieModule>>(); private final Map<ReleaseId, KieModule> oldKieModules = new HashMap<ReleaseId, KieModule>(); + private KieModuleRepo(InternalKieScanner kieScanner) { + this.kieScanner = kieScanner; + } + void store(KieModule kieModule) { ReleaseId releaseId = kieModule.getReleaseId(); String ga = releaseId.getGroupId() + ":" + releaseId.getArtifactId(); @@ -225,12 +239,23 @@ KieModule load(ReleaseId releaseId) { KieModule load(ReleaseId releaseId, VersionRange versionRange) { String ga = releaseId.getGroupId() + ":" + releaseId.getArtifactId(); TreeMap<ComparableVersion, KieModule> artifactMap = kieModules.get(ga); - if (artifactMap == null) { + if ( artifactMap == null ) { return null; } if (versionRange.fixed) { - return artifactMap.get(new ComparableVersion(releaseId.getVersion())); + KieModule kieModule = artifactMap.get(new ComparableVersion(releaseId.getVersion())); + if ( kieModule != null && releaseId.isSnapshot() ) { + String oldSnapshotVersion = ((ReleaseIdImpl)kieModule.getReleaseId()).getSnapshotVersion(); + String currentSnapshotVersion = kieScanner.getArtifactVersion(releaseId); + if ( oldSnapshotVersion != null && currentSnapshotVersion != null && + new ComparableVersion(currentSnapshotVersion).compareTo(new ComparableVersion(oldSnapshotVersion)) > 0) { + // if the snapshot currently available on the maven repo is newer than the cached one + // return null to enforce the building of this newer version + return null; + } + } + return kieModule; } if (versionRange.upperBound == null) { @@ -241,11 +266,11 @@ KieModule load(ReleaseId releaseId, VersionRange versionRange) { artifactMap.ceilingEntry(new ComparableVersion(versionRange.upperBound)) : artifactMap.lowerEntry(new ComparableVersion(versionRange.upperBound)); - if (entry == null) { + if ( entry == null ) { return null; } - if (versionRange.lowerBound == null) { + if ( versionRange.lowerBound == null ) { return entry.getValue(); } diff --git a/drools-compiler/src/main/java/org/drools/compiler/kproject/ReleaseIdImpl.java b/drools-compiler/src/main/java/org/drools/compiler/kproject/ReleaseIdImpl.java index 6777edbd6db..58f204c9625 100644 --- a/drools-compiler/src/main/java/org/drools/compiler/kproject/ReleaseIdImpl.java +++ b/drools-compiler/src/main/java/org/drools/compiler/kproject/ReleaseIdImpl.java @@ -12,6 +12,8 @@ public class ReleaseIdImpl implements ReleaseId { private final String artifactId; private final String version; + private String snapshotVersion; + public ReleaseIdImpl(String releaseId) { String[] split = releaseId.split(":"); this.groupId = split[0]; @@ -60,6 +62,10 @@ public String getCompilationCachePathPrefix() { //return "META-INF/maven/" + groupId + "/" + artifactId + "/"; return "META-INF/"; } + + public boolean isSnapshot() { + return version.endsWith("-SNAPSHOT"); + } public static ReleaseId fromPropertiesString(String string) { Properties props = new Properties(); @@ -101,4 +107,12 @@ public int hashCode() { result = 31 * result + (version != null ? version.hashCode() : 0); return result; } + + public String getSnapshotVersion() { + return snapshotVersion; + } + + public void setSnapshotVersion(String snapshotVersion) { + this.snapshotVersion = snapshotVersion; + } } diff --git a/kie-ci/src/main/java/org/kie/scanner/KieRepositoryScannerImpl.java b/kie-ci/src/main/java/org/kie/scanner/KieRepositoryScannerImpl.java index 6eef4d1d340..b7917859b5b 100644 --- a/kie-ci/src/main/java/org/kie/scanner/KieRepositoryScannerImpl.java +++ b/kie-ci/src/main/java/org/kie/scanner/KieRepositoryScannerImpl.java @@ -1,5 +1,6 @@ package org.kie.scanner; +import org.drools.compiler.kproject.ReleaseIdImpl; import org.drools.compiler.kproject.models.KieModuleModelImpl; import org.kie.api.builder.ReleaseId; import org.kie.api.builder.KieModule; @@ -87,6 +88,14 @@ public KieModule loadArtifact(ReleaseId releaseId, InputStream pomXml) { Artifact artifact = resolver.resolveArtifact(artifactName); return artifact != null ? buildArtifact(releaseId, artifact, resolver) : loadPomArtifact(releaseId); } + + public String getArtifactVersion(ReleaseId releaseId) { + if (!releaseId.isSnapshot()) { + return releaseId.getVersion(); + } + Artifact artifact = getArtifactResolver().resolveArtifact(releaseId.toString()); + return artifact != null ? artifact.getVersion() : null; + } private KieModule loadPomArtifact(ReleaseId releaseId) { ArtifactResolver resolver = getResolverFor(releaseId, false); @@ -101,6 +110,9 @@ private KieModule loadPomArtifact(ReleaseId releaseId) { } private InternalKieModule buildArtifact(ReleaseId releaseId, Artifact artifact, ArtifactResolver resolver) { + if (releaseId.isSnapshot()) { + ((ReleaseIdImpl)releaseId).setSnapshotVersion(artifact.getVersion()); + } ZipKieModule kieModule = createZipKieModule(releaseId, artifact.getFile()); if (kieModule != null) { addDependencies(kieModule, resolver, resolver.getArtifactDependecies(new DependencyDescriptor(artifact).toString())); diff --git a/kie-ci/src/test/java/org/kie/scanner/KieModuleMavenTest.java b/kie-ci/src/test/java/org/kie/scanner/KieModuleMavenTest.java index 636c97cbaf7..3c09126fae0 100644 --- a/kie-ci/src/test/java/org/kie/scanner/KieModuleMavenTest.java +++ b/kie-ci/src/test/java/org/kie/scanner/KieModuleMavenTest.java @@ -24,8 +24,6 @@ import org.kie.api.builder.ReleaseId; import org.kie.api.builder.model.KieBaseModel; import org.kie.api.definition.KiePackage; -import org.kie.api.definition.process.*; -import org.kie.api.definition.process.Process; import org.kie.api.definition.rule.Rule; import org.kie.api.runtime.KieContainer; import org.kie.api.runtime.KieSession; @@ -138,8 +136,9 @@ public void testKieModulePojoDependencies() throws Exception { assertEquals(1, list.size()); } - @Test @Ignore + @Test public void testKieContainerBeforeAndAfterDeployOfSnapshot() throws Exception { + // BZ-1007977 KieServices ks = KieServices.Factory.get(); String group = "org.kie.test"; @@ -180,7 +179,6 @@ public void testKieContainerBeforeAndAfterDeployOfSnapshot() throws Exception { assertEquals(1, packages2.size()); Collection<Rule> rules2 = packages2.iterator().next().getRules(); assertEquals(4, rules2.size()); - } public static String generatePomXml(ReleaseId releaseId, ReleaseId... dependencies) {
58cad7799654a6155914000934d50d850e303adf
Vala
mysql: mark Database.fetch_row as null terminated, several other fixes Fixes bug 615925.
c
https://github.com/GNOME/vala/
diff --git a/vapi/mysql.vapi b/vapi/mysql.vapi index 7997016b31..fcb47c5c8b 100644 --- a/vapi/mysql.vapi +++ b/vapi/mysql.vapi @@ -54,7 +54,7 @@ namespace Mysql { public int options (int option, string arg); public int ping (); public int query (string stmt_str); - public bool real_connect (string host, string username, string passwd, string dbname, uint port, string? unix_socket, ulong client_flag); + public bool real_connect (string? host = null, string? username = null, string? passwd = null, string? dbname = null, uint port = 0, string? unix_socket = null, ulong client_flag = 0); public ulong real_escape_string (string to, string from, ulong length); public int real_query (string query, ulong len); public int reload (); @@ -80,7 +80,7 @@ namespace Mysql { public Field fetch_field_direct (uint field_nbr); public Field[] fetch_fields (); public ulong[] fetch_lengths (); - [CCode (array_length = false)] + [CCode (array_length = false, array_null_terminated = true)] public unowned string[]? fetch_row (); public uint fetch_count (); public uint num_fields (); @@ -97,9 +97,9 @@ namespace Mysql { public void debug (string msg); public ulong hex_string (string to, string from, ulong length); public void library_end (); - public int library_init (int argc, string[] argv, string[] groups); + public int library_init ([CCode (array_length_pos = 0.1)] string[] argv, [CCode (array_length = false, array_null_terminated = true)] string[]? groups = null); public void server_end (); - public int server_init (int argc, string[] argv, string[] groups); + public int server_init ([CCode (array_length_pos = 0.1)] string[] argv, [CCode (array_length = false, array_null_terminated = true)] string[]? groups = null); public void thread_end (); public bool thread_init (); public uint thread_safe ();
d703ec89cf73fa94cbd8993b05c5f46ec7bd84ff
orientdb
Issue -1607 WAL page change tracking was- reimplemented.--
p
https://github.com/orientechnologies/orientdb
diff --git a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/paginated/wal/OPageChanges.java b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/paginated/wal/OPageChanges.java index a296a502646..0f4a65b7cf1 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/paginated/wal/OPageChanges.java +++ b/core/src/main/java/com/orientechnologies/orient/core/storage/impl/local/paginated/wal/OPageChanges.java @@ -1,368 +1,163 @@ package com.orientechnologies.orient.core.storage.impl.local.paginated.wal; +import java.util.ArrayList; +import java.util.List; +import java.util.ListIterator; + import com.orientechnologies.common.directmemory.ODirectMemory; import com.orientechnologies.common.directmemory.ODirectMemoryFactory; import com.orientechnologies.common.serialization.types.OIntegerSerializer; public class OPageChanges { - private static final int INITIAL_SIZE = 16; - - private final ODirectMemory directMemory = ODirectMemoryFactory.INSTANCE.directMemory(); - private ChangesBucket[] changesBuckets = new ChangesBucket[INITIAL_SIZE]; - - private int size = 0; - - public void addChanges(int pageOffset, byte[] newValues, byte[] oldValues) { - assert newValues.length == oldValues.length; - - if (size == 0) { - changesBuckets[0] = new ChangesBucket(pageOffset, newValues, oldValues); - size = 1; - } else { - ChangesBucket bucketToUse; - int bucketIndex; - - final int insertionIndex = binarySearch(pageOffset); - if (insertionIndex >= 0) { - bucketIndex = insertionIndex; - bucketToUse = changesBuckets[bucketIndex]; - bucketToUse.updateValues(pageOffset, newValues, oldValues); - } else { - bucketIndex = -insertionIndex - 1; - - if (bucketIndex < size) { - final ChangesBucket bucket = changesBuckets[bucketIndex]; - if (bucket.startPosition < pageOffset) { - bucketToUse = bucket; - bucketToUse.updateValues(pageOffset, newValues, oldValues); - } else { - bucketToUse = new ChangesBucket(pageOffset, newValues, oldValues); - } - } else { - bucketToUse = new ChangesBucket(pageOffset, newValues, oldValues); - } - } - - int shiftBackFrom = -1; - int shiftBackTo = -1; - - int startIndex; - if (bucketIndex < size && bucketToUse == changesBuckets[bucketIndex]) { - startIndex = bucketIndex + 1; - } else { - startIndex = bucketIndex; - } - - for (int i = startIndex; i < size; i++) { - ChangesBucket bucketToMerge = changesBuckets[i]; - if (bucketToUse.endPosition >= bucketToMerge.startPosition) { - bucketToUse.merge(bucketToMerge); - if (i == startIndex) { - shiftBackFrom = startIndex; - shiftBackTo = startIndex; - } else - shiftBackTo = i; - } else - break; - } - - if (shiftBackFrom == bucketIndex) { - shiftBackFrom++; - changesBuckets[bucketIndex] = bucketToUse; - - if (shiftBackFrom <= shiftBackTo) - collapse(shiftBackFrom, shiftBackTo); - } else { - if (shiftBackFrom >= 0) - collapse(shiftBackFrom, shiftBackTo); - } - - if (bucketIndex >= size || bucketToUse != changesBuckets[bucketIndex]) - insert(bucketIndex, bucketToUse); - } - } + private final ODirectMemory directMemory = ODirectMemoryFactory.INSTANCE.directMemory(); + private List<ChangeUnit> changeUnits = new ArrayList<ChangeUnit>(); + private int serializedSize = OIntegerSerializer.INT_SIZE; - public boolean isEmpty() { - return size == 0; - } + public void addChanges(int pageOffset, byte[] newValues, byte[] oldValues) { + assert newValues.length == oldValues.length; + changeUnits.add(new ChangeUnit(pageOffset, oldValues, newValues)); - public void applyChanges(long pointer) { - for (int i = 0; i < size; i++) { - ChangesBucket bucket = changesBuckets[i]; - directMemory.set(pointer + bucket.startPosition, bucket.newValues, 0, bucket.newValues.length); + serializedSize += compressedIntegerSize(pageOffset) + compressedIntegerSize(newValues.length) + newValues.length + + oldValues.length; } - } - public void revertChanges(long pointer) { - for (int i = 0; i < size; i++) { - ChangesBucket bucket = changesBuckets[i]; - directMemory.set(pointer + bucket.startPosition, bucket.oldValues, 0, bucket.oldValues.length); + public boolean isEmpty() { + return changeUnits.isEmpty(); } - } - - private void insert(int bucketIndex, ChangesBucket bucket) { - assert bucketIndex <= size; - if (size < changesBuckets.length) { - System.arraycopy(changesBuckets, bucketIndex, changesBuckets, bucketIndex + 1, size - bucketIndex); - changesBuckets[bucketIndex] = bucket; - } else { - ChangesBucket[] oldChangesBuckets = changesBuckets; - changesBuckets = new ChangesBucket[changesBuckets.length << 1]; - - if (bucketIndex > 0) - System.arraycopy(oldChangesBuckets, 0, changesBuckets, 0, bucketIndex); - - if (bucketIndex < size) - System.arraycopy(oldChangesBuckets, bucketIndex, changesBuckets, bucketIndex + 1, size - bucketIndex); - - changesBuckets[bucketIndex] = bucket; + public void applyChanges(long pointer) { + for (ChangeUnit changeUnit : changeUnits) { + directMemory.set(pointer + changeUnit.pageOffset, changeUnit.newValues, 0, changeUnit.newValues.length); + } } - size++; - } - - private int binarySearch(int startPosition) { - int low = 0; - int high = size - 1; - - while (low <= high) { - int mid = (low + high) >>> 1; - ChangesBucket midBucket = changesBuckets[mid]; - if (midBucket.endPosition < startPosition) - low = mid + 1; - else if (midBucket.endPosition > startPosition) - high = mid - 1; - else - return mid; + public void revertChanges(long pointer) { + ListIterator<ChangeUnit> iterator = changeUnits.listIterator(changeUnits.size()); + while (iterator.hasPrevious()) { + ChangeUnit changeUnit = iterator.previous(); + directMemory.set(pointer + changeUnit.pageOffset, changeUnit.oldValues, 0, changeUnit.oldValues.length); + } } - return -(low + 1); - } - - private void collapse(int shiftBackFrom, int shiftBackTo) { - assert shiftBackTo >= shiftBackFrom; - int sizeDiff = shiftBackTo - shiftBackFrom + 1; - if (shiftBackTo < size - 1) { - System.arraycopy(changesBuckets, shiftBackTo + 1, changesBuckets, shiftBackFrom, size - (shiftBackTo + 1)); - - for (int i = size - sizeDiff; i < size; i++) - changesBuckets[i] = null; - } else { - for (int i = shiftBackFrom; i <= shiftBackTo; i++) - changesBuckets[i] = null; + public int serializedSize() { + return serializedSize; } - size -= sizeDiff; - } - - public int serializedSize() { - int serializedSize = OIntegerSerializer.INT_SIZE; + public int toStream(byte[] content, int offset) { + OIntegerSerializer.INSTANCE.serializeNative(changeUnits.size(), content, offset); + offset += OIntegerSerializer.INT_SIZE; - serializedSize += compressedIntegerSize(size); - for (int i = 0; i < size; i++) { - ChangesBucket changesBucket = changesBuckets[i]; + for (ChangeUnit changeUnit : changeUnits) { + offset = serializeCompressedInteger(content, offset, changeUnit.pageOffset); + offset = serializeCompressedInteger(content, offset, changeUnit.newValues.length); - serializedSize += compressedIntegerSize(changesBucket.startPosition); - serializedSize += compressedIntegerSize(changesBucket.newValues.length); - - assert changesBucket.newValues.length == changesBucket.oldValues.length; - - serializedSize += 2 * changesBucket.newValues.length; - } + System.arraycopy(changeUnit.newValues, 0, content, offset, changeUnit.newValues.length); + offset += changeUnit.newValues.length; - return serializedSize; - } - - public int serializedSize(byte[] content, int offset) { - return OIntegerSerializer.INSTANCE.deserializeNative(content, offset); - } - - public int toStream(byte[] content, int offset) { - int initialOffset = offset; - - offset += OIntegerSerializer.INT_SIZE; - - offset = serializeCompressedInteger(content, offset, size); - for (int i = 0; i < size; i++) { - ChangesBucket changesBucket = changesBuckets[i]; - offset = serializeCompressedInteger(content, offset, changesBucket.startPosition); - offset = serializeCompressedInteger(content, offset, changesBucket.newValues.length); - - System.arraycopy(changesBucket.newValues, 0, content, offset, changesBucket.newValues.length); - offset += changesBucket.newValues.length; + System.arraycopy(changeUnit.oldValues, 0, content, offset, changeUnit.oldValues.length); + offset += changeUnit.oldValues.length; + } - System.arraycopy(changesBucket.oldValues, 0, content, offset, changesBucket.oldValues.length); - offset += changesBucket.oldValues.length; + return offset; } - OIntegerSerializer.INSTANCE.serializeNative(offset - initialOffset, content, initialOffset); - - return offset; - } + public int fromStream(byte[] content, int offset) { + final int changesSize = OIntegerSerializer.INSTANCE.deserializeNative(content, offset); + offset += OIntegerSerializer.INT_SIZE; - public int fromStream(byte[] content, int offset) { - offset += OIntegerSerializer.INT_SIZE; + changeUnits = new ArrayList<ChangeUnit>(changesSize); - int[] decompressionResult = deserializeCompressedInteger(content, offset); + int[] decompressResult; + for (int i = 0; i < changesSize; i++) { + decompressResult = deserializeCompressedInteger(content, offset); - size = decompressionResult[0]; - changesBuckets = new ChangesBucket[size]; + int pageOffset = decompressResult[0]; + offset = decompressResult[1]; - offset = decompressionResult[1]; - for (int i = 0; i < size; i++) { - decompressionResult = deserializeCompressedInteger(content, offset); - - int startPosition = decompressionResult[0]; - offset = decompressionResult[1]; - - decompressionResult = deserializeCompressedInteger(content, offset); - int changesSize = decompressionResult[0]; - offset = decompressionResult[1]; - - byte[] newValues = new byte[changesSize]; - byte[] oldValues = new byte[changesSize]; - - System.arraycopy(content, offset, newValues, 0, changesSize); - offset += changesSize; - - System.arraycopy(content, offset, oldValues, 0, changesSize); - offset += changesSize; - - changesBuckets[i] = new ChangesBucket(startPosition, newValues, oldValues); - } + decompressResult = deserializeCompressedInteger(content, offset); + int dataLength = decompressResult[0]; + offset = decompressResult[1]; - return offset; - } + byte[] newValues = new byte[dataLength]; + System.arraycopy(content, offset, newValues, 0, dataLength); + offset += dataLength; - private int compressedIntegerSize(int value) { - if (value <= 127) - return 1; - if (value <= 16383) - return 2; - if (value <= 2097151) - return 3; + byte[] oldValues = new byte[dataLength]; + System.arraycopy(content, offset, oldValues, 0, dataLength); + offset += dataLength; - throw new IllegalArgumentException("Values more than 2097151 are not supported."); - } + changeUnits.add(new ChangeUnit(pageOffset, oldValues, newValues)); + } - private int serializeCompressedInteger(byte[] content, int offset, int value) { - if (value <= 127) { - content[offset] = (byte) value; - return offset + 1; + return offset; } - if (value <= 16383) { - content[offset + 1] = (byte) (0xFF & value); + private int compressedIntegerSize(int value) { + if (value <= 127) + return 1; + if (value <= 16383) + return 2; + if (value <= 2097151) + return 3; - value = value >>> 8; - content[offset] = (byte) (0x80 | value); - return offset + 2; + throw new IllegalArgumentException("Values more than 2097151 are not supported."); } - if (value <= 2097151) { - content[offset + 2] = (byte) (0xFF & value); - value = value >>> 8; - - content[offset + 1] = (byte) (0xFF & value); - value = value >>> 8; - - content[offset] = (byte) (0xC0 | value); - - return offset + 3; - } - - throw new IllegalArgumentException("Values more than 2097151 are not supported."); - } - - private int[] deserializeCompressedInteger(byte[] content, int offset) { - if ((content[offset] & 0x80) == 0) - return new int[] { content[offset], offset + 1 }; + private int serializeCompressedInteger(byte[] content, int offset, int value) { + if (value <= 127) { + content[offset] = (byte) value; + return offset + 1; + } - if ((content[offset] & 0xC0) == 0x80) { - final int value = (0xFF & content[offset + 1]) | ((content[offset] & 0x3F) << 8); - return new int[] { value, offset + 2 }; - } + if (value <= 16383) { + content[offset + 1] = (byte) (0xFF & value); - if ((content[offset] & 0xE0) == 0xC0) { - final int value = (0xFF & content[offset + 2]) | ((0xFF & content[offset + 1]) << 8) | ((content[offset] & 0x1F) << 16); - return new int[] { value, offset + 3 }; - } + value = value >>> 8; + content[offset] = (byte) (0x80 | value); + return offset + 2; + } - throw new IllegalArgumentException("Invalid integer format."); - } + if (value <= 2097151) { + content[offset + 2] = (byte) (0xFF & value); + value = value >>> 8; - private static final class ChangesBucket { - private final int startPosition; - private int endPosition; + content[offset + 1] = (byte) (0xFF & value); + value = value >>> 8; - private byte[] newValues; - private byte[] oldValues; + content[offset] = (byte) (0xC0 | value); - private ChangesBucket(int startPosition, byte[] newValues, byte[] oldValues) { - assert newValues.length == oldValues.length; + return offset + 3; + } - this.startPosition = startPosition; - this.endPosition = startPosition + newValues.length; - this.newValues = newValues; - this.oldValues = oldValues; + throw new IllegalArgumentException("Values more than 2097151 are not supported."); } - public void updateValues(int startPosition, byte[] newValues, byte[] oldValues) { - assert startPosition <= this.endPosition; - assert startPosition >= this.startPosition; - assert newValues.length == oldValues.length; - - int endPosition = startPosition + newValues.length; - - if (endPosition > this.endPosition) { - int lenDiff = endPosition - this.endPosition; + private int[] deserializeCompressedInteger(byte[] content, int offset) { + if ((content[offset] & 0x80) == 0) + return new int[]{content[offset], offset + 1}; - byte[] oldNewValues = this.newValues; - byte[] oldOldValues = this.oldValues; - - this.newValues = new byte[this.newValues.length + lenDiff]; - System.arraycopy(oldNewValues, 0, this.newValues, 0, oldNewValues.length); - - this.oldValues = new byte[this.oldValues.length + lenDiff]; - System.arraycopy(oldOldValues, 0, this.oldValues, 0, oldOldValues.length); - - System.arraycopy(oldValues, oldValues.length - lenDiff, this.oldValues, this.oldValues.length - lenDiff, lenDiff); + if ((content[offset] & 0xC0) == 0x80) { + final int value = (0xFF & content[offset + 1]) | ((content[offset] & 0x3F) << 8); + return new int[]{value, offset + 2}; + } - this.endPosition = endPosition; - } + if ((content[offset] & 0xE0) == 0xC0) { + final int value = (0xFF & content[offset + 2]) | ((0xFF & content[offset + 1]) << 8) | ((content[offset] & 0x1F) << 16); + return new int[]{value, offset + 3}; + } - final int dataOffset = startPosition - this.startPosition; - System.arraycopy(newValues, 0, this.newValues, dataOffset, newValues.length); + throw new IllegalArgumentException("Invalid integer format."); } - public void merge(ChangesBucket bucketToMerge) { - assert bucketToMerge.startPosition <= endPosition; - assert bucketToMerge.startPosition >= startPosition; - - if (endPosition < bucketToMerge.endPosition) { - int newValuesDiff = bucketToMerge.endPosition - this.endPosition; - - byte[] oldNewValues = this.newValues; - byte[] oldOldValues = this.oldValues; - - this.newValues = new byte[this.newValues.length + newValuesDiff]; - System.arraycopy(oldNewValues, 0, this.newValues, 0, oldNewValues.length); - - this.oldValues = new byte[this.oldValues.length + newValuesDiff]; - System.arraycopy(oldOldValues, 0, this.oldValues, 0, oldOldValues.length); - - System.arraycopy(bucketToMerge.newValues, bucketToMerge.newValues.length - newValuesDiff, this.newValues, - this.newValues.length - newValuesDiff, newValuesDiff); - - this.endPosition = bucketToMerge.endPosition; - } - - int oldValuesFrom = bucketToMerge.startPosition - this.startPosition; - - assert oldValuesFrom + bucketToMerge.oldValues.length <= this.oldValues.length; - System.arraycopy(bucketToMerge.oldValues, 0, this.oldValues, oldValuesFrom, bucketToMerge.oldValues.length); + private final static class ChangeUnit { + private final int pageOffset; + private final byte[] oldValues; + private final byte[] newValues; + private ChangeUnit(int pageOffset, byte[] oldValues, byte[] newValues) { + this.pageOffset = pageOffset; + this.oldValues = oldValues; + this.newValues = newValues; + } } - } }
c7cbcd3136e0e5a914f6541db0267b9bb93a07ec
isa-tools$isacreator
Decoupled functionality for creating user profile from the GUI class.
p
https://github.com/isa-tools/isacreator
diff --git a/src/main/java/org/isatools/isacreator/api/CreateProfile.java b/src/main/java/org/isatools/isacreator/api/CreateProfile.java new file mode 100644 index 00000000..d29bf01b --- /dev/null +++ b/src/main/java/org/isatools/isacreator/api/CreateProfile.java @@ -0,0 +1,70 @@ +package org.isatools.isacreator.api; + +import org.isatools.isacreator.gui.ISAcreator; +import org.isatools.isacreator.io.UserProfile; +import org.isatools.isacreator.managers.ApplicationManager; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Created by the ISATeam. + * User: agbeltran + * Date: 22/08/2012 + * Time: 12:38 + * + * Functionality for validating user profile fields and creating user profile + * + * @author <a href="mailto:[email protected]">Alejandra Gonzalez-Beltran</a> + */ +public class CreateProfile { + + private static ISAcreator main = ApplicationManager.getCurrentApplicationInstance(); + + + public static boolean emptyPassword(char[] cpassword){ + String password = new String(cpassword); + return password.equals(""); + } + + public static boolean emptyField(String field){ + return field.equals(""); + } + + public static boolean matchingPasswords(char[] cpassword1, char[] cpassword2){ + String password1 = new String(cpassword1); + String password2 = new String(cpassword2); + return password1.equals(password2); + } + + public static boolean validEmail(String email){ + Pattern p = Pattern.compile("[.]*@[.]*"); + Matcher m = p.matcher(email); + return m.find(); + } + + public static boolean duplicateUser(String username){ + for (UserProfile up : main.getUserProfiles()) { + if (up.getUsername().equals(username)) { + return true; + } + } + return false; + } + + public static void createProfile(String username,char[] password, String firstname, String surname, String institution, String email){ + + UserProfile newUser = new UserProfile(username, + new String(password).hashCode(), + firstname, + surname, + institution, + email); + + main.getUserProfiles().add(newUser); + main.setCurrentUser(newUser); + main.setUserOntologyHistory(newUser.getUserHistory()); + main.saveUserProfiles(); + } + +} diff --git a/src/main/java/org/isatools/isacreator/gui/menu/CreateProfile.java b/src/main/java/org/isatools/isacreator/gui/menu/CreateProfileMenu.java similarity index 80% rename from src/main/java/org/isatools/isacreator/gui/menu/CreateProfile.java rename to src/main/java/org/isatools/isacreator/gui/menu/CreateProfileMenu.java index 79614ab9..49cbd2bf 100644 --- a/src/main/java/org/isatools/isacreator/gui/menu/CreateProfile.java +++ b/src/main/java/org/isatools/isacreator/gui/menu/CreateProfileMenu.java @@ -5,7 +5,7 @@ ISAcreator is a component of the ISA software suite (http://www.isa-tools.org) ISAcreator is licensed under the Common Public Attribution License version 1.0 (CPAL) EXHIBIT A. CPAL version 1.0 - ÒThe contents of this file are subject to the CPAL version 1.0 (the ÒLicenseÓ); + �The contents of this file are subject to the CPAL version 1.0 (the �License�); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://isa-tools.org/licenses/ISAcreator-license.html. The License is based on the Mozilla Public License version 1.1 but Sections @@ -13,7 +13,7 @@ ISAcreator is a component of the ISA software suite (http://www.isa-tools.org) provide for limited attribution for the Original Developer. In addition, Exhibit A has been modified to be consistent with Exhibit B. - Software distributed under the License is distributed on an ÒAS ISÓ basis, + Software distributed under the License is distributed on an �AS IS� basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the specific language governing rights and limitations under the License. @@ -37,9 +37,9 @@ The ISA Team and the ISA software suite have been funded by the EU Carcinogenomi package org.isatools.isacreator.gui.menu; +import org.isatools.isacreator.api.CreateProfile; import org.isatools.isacreator.common.UIHelper; import org.isatools.isacreator.effects.components.RoundedJPasswordField; -import org.isatools.isacreator.io.UserProfile; import org.jdesktop.fuse.InjectedResource; import javax.swing.*; @@ -47,8 +47,7 @@ The ISA Team and the ISA software suite have been funded by the EU Carcinogenomi import java.awt.event.ActionEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; -import java.util.regex.Matcher; -import java.util.regex.Pattern; + /** * CreateProfileGUI provides interface to allow users to construct a new profile @@ -59,7 +58,7 @@ The ISA Team and the ISA software suite have been funded by the EU Carcinogenomi */ -public class CreateProfile extends MenuUIComponent { +public class CreateProfileMenu extends MenuUIComponent { @InjectedResource private ImageIcon createProfileButton, createProfileButtonOver, backButtonSml, backButtonSmlOver; @@ -75,7 +74,7 @@ public class CreateProfile extends MenuUIComponent { private JTextField surnameVal; private JTextField usernameVal; - public CreateProfile(ISAcreatorMenu menu) { + public CreateProfileMenu(ISAcreatorMenu menu) { super(menu); status = new JLabel(" "); status.setForeground(UIHelper.RED_COLOR); @@ -254,56 +253,30 @@ private void assignKeyActionToComponent(Action action, JComponent field) { private void createProfile() { // check password is not empty and that the password and the confirmation match! - String password = new String(passwordVal.getPassword()); - if (!password.equals("")) { - String passwordConfirmation = new String(confirmPasswordVal.getPassword()); - if (!password.equals(passwordConfirmation)) { - status.setText( - "<html><b>passwords do not match!</b> the password and confirmation must match!</html>"); - return; - } - } else { + if (CreateProfile.emptyPassword(passwordVal.getPassword())) { status.setText( "<html><b>password is required!</b></html>"); return; } + if (!CreateProfile.matchingPasswords(passwordVal.getPassword(),confirmPasswordVal.getPassword())){ + status.setText( + "<html><b>passwords do not match!</b> the password and confirmation must match!</html>"); + return; + }; // check the rest of the fields to ensure values have been entered and proceed to creating the // profile if everything is ok! - if (!usernameVal.getText().equals("")) { - if (!firstnameVal.getText().equals("")) { - if (!surnameVal.getText().equals("")) { - if (!institutionVal.getText().equals("")) { - if (!emailVal.getText().equals("")) { - Pattern p = Pattern.compile("[.]*@[.]*"); - Matcher m = p.matcher(emailVal.getText()); - - if (m.find()) { - UserProfile newUser = new UserProfile(usernameVal.getText(), - new String(passwordVal.getPassword()).hashCode(), - firstnameVal.getText(), - surnameVal.getText(), - institutionVal.getText(), - emailVal.getText()); - boolean dupUser = false; - - for (UserProfile up : menu.getMain().getUserProfiles()) { - if (up.getUsername() - .equals(usernameVal.getText())) { - dupUser = true; - status.setText( - "<html><b>user name taken!</b> this username is already in use</html>"); - - break; - } - } - - if (!dupUser) { - menu.getMain().getUserProfiles().add(newUser); - menu.getMain().setCurrentUser(newUser); - menu.getMain().setUserOntologyHistory(newUser.getUserHistory()); - menu.getMain().saveUserProfiles(); - + if (!CreateProfile.emptyField(usernameVal.getText())) { + if (!CreateProfile.emptyField(firstnameVal.getText())) { + if (!CreateProfile.emptyField(surnameVal.getText())) { + if (!CreateProfile.emptyField(institutionVal.getText())) { + if (!CreateProfile.emptyField(emailVal.getText())) { + if (CreateProfile.validEmail(emailVal.getText())) { + if (CreateProfile.duplicateUser(usernameVal.getText())){ + status.setText( + "<html><b>user name taken!</b> this username is already in use</html>"); + }else{ + CreateProfile.createProfile(usernameVal.getText(), passwordVal.getPassword(),firstnameVal.getText(),surnameVal.getText(),institutionVal.getText(),emailVal.getText()); menu.changeView(menu.getMainMenuGUI()); } } else { @@ -330,6 +303,6 @@ private void createProfile() { status.setText( "<html><b>username is required!</b> please enter a username</html>"); } - } + }//createProfile method } diff --git a/src/main/resources/dependency-injections/gui-package.properties b/src/main/resources/dependency-injections/gui-package.properties index a7540c70..3f289acf 100644 --- a/src/main/resources/dependency-injections/gui-package.properties +++ b/src/main/resources/dependency-injections/gui-package.properties @@ -99,8 +99,8 @@ SaveAsDialog.saveSubmissionOver=/images/gui/savesubmission_over.png # MenuUIComponent images. This class is extended by AuthenticationMenu, CreateISATabMenu, CreateProfie, # ImportConfiguration, ImportFilesMenu & MainMenu -MenuUIComponent.backButton={CreateProfile.backButtonSml} -MenuUIComponent.backButtonOver={CreateProfile.backButtonSmlOver} +MenuUIComponent.backButton={CreateProfileMenu.backButtonSml} +MenuUIComponent.backButtonOver={CreateProfileMenu.backButtonSmlOver} MenuUIComponent.exitButtonSml=/images/gui/menu_new/exit_sml.png MenuUIComponent.exitButtonSmlOver=/images/gui/menu_new/exit_sml_over.png MenuUIComponent.createProfileButton=/images/gui/menu_new/create_profile.png @@ -114,10 +114,10 @@ AuthenticationMenu.createProfileButtonOver={MenuUIComponent.createProfileButtonO AuthenticationMenu.exitButtonSml={MenuUIComponent.exitButtonSml} AuthenticationMenu.exitButtonSmlOver={MenuUIComponent.exitButtonSmlOver} -CreateProfile.createProfileButton={AuthenticationMenu.createProfileButton} -CreateProfile.createProfileButtonOver={AuthenticationMenu.createProfileButtonOver} -CreateProfile.backButtonSml=/images/gui/menu_new/back_sml.png -CreateProfile.backButtonSmlOver=/images/gui/menu_new/back_sml_over.png +CreateProfileMenu.createProfileButton={AuthenticationMenu.createProfileButton} +CreateProfileMenu.createProfileButtonOver={AuthenticationMenu.createProfileButtonOver} +CreateProfileMenu.backButtonSml=/images/gui/menu_new/back_sml.png +CreateProfileMenu.backButtonSmlOver=/images/gui/menu_new/back_sml_over.png MainMenu.panelHeader=/images/gui/mainmenu.png MainMenu.createNew=/images/gui/menu_new/create_new.png
c1dcdf131031ab6903abf399a710744bae53f239
Mylyn Reviews
Cleanup -Renamed interface ReviewScopeItem -Moved xtext setup to core -removed now unecessary dependencies in ui
p
https://github.com/eclipse-mylyn/org.eclipse.mylyn.reviews
diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/ReviewScopeItem.java b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/IReviewScopeItem.java similarity index 96% rename from tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/ReviewScopeItem.java rename to tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/IReviewScopeItem.java index 87bf3993..3b6dae06 100644 --- a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/ReviewScopeItem.java +++ b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/IReviewScopeItem.java @@ -18,7 +18,7 @@ * @author mattk * */ -public interface ReviewScopeItem { +public interface IReviewScopeItem { List<IReviewFile> getReviewFiles(NullProgressMonitor monitor)throws CoreException; diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/PatchScopeItem.java b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/PatchScopeItem.java index fd2459f7..13f92a7e 100644 --- a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/PatchScopeItem.java +++ b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/PatchScopeItem.java @@ -31,7 +31,7 @@ * @author mattk * */ -public class PatchScopeItem implements ReviewScopeItem { +public class PatchScopeItem implements IReviewScopeItem { private Attachment attachment; diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/ResourceScopeItem.java b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/ResourceScopeItem.java index 195f30a0..05ec47fd 100644 --- a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/ResourceScopeItem.java +++ b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/ResourceScopeItem.java @@ -32,7 +32,7 @@ * @author mattk * */ -public class ResourceScopeItem implements ReviewScopeItem { +public class ResourceScopeItem implements IReviewScopeItem { private Attachment attachment; public ResourceScopeItem(Attachment attachment) { diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/ReviewScope.java b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/ReviewScope.java index d9a42225..7cc85ee3 100644 --- a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/ReviewScope.java +++ b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/ReviewScope.java @@ -19,13 +19,13 @@ * */ public class ReviewScope { - private List<ReviewScopeItem> items = new ArrayList<ReviewScopeItem>(); + private List<IReviewScopeItem> items = new ArrayList<IReviewScopeItem>(); private String creator; - public List<ReviewScopeItem> getItems() { + public List<IReviewScopeItem> getItems() { return Collections.unmodifiableList( items ); } - public void addScope(ReviewScopeItem item) { + public void addScope(IReviewScopeItem item) { this.items.add(item); } public String getCreator() { diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewScopeNode.java b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewScopeNode.java index 8299e9b4..504c2c51 100644 --- a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewScopeNode.java +++ b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewScopeNode.java @@ -18,7 +18,7 @@ import org.eclipse.mylyn.reviews.tasks.core.Rating; import org.eclipse.mylyn.reviews.tasks.core.ReviewResult; import org.eclipse.mylyn.reviews.tasks.core.ReviewScope; -import org.eclipse.mylyn.reviews.tasks.core.ReviewScopeItem; +import org.eclipse.mylyn.reviews.tasks.core.IReviewScopeItem; /** * * @author mattk @@ -47,15 +47,15 @@ public String getDescription() { } private static class Counter { int counter; - ReviewScopeItem item; - public Counter(ReviewScopeItem item) { + IReviewScopeItem item; + public Counter(IReviewScopeItem item) { this.item=item; } } private String convertScopeToDescription() { StringBuilder sb = new StringBuilder(); Map<String, Counter> counts = new TreeMap<String, Counter>(); - for (ReviewScopeItem item : scope.getItems()) { + for (IReviewScopeItem item : scope.getItems()) { String key = item.getType(1); if (!counts.containsKey(key)) { counts.put(key, new Counter(item)); diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewTaskMapper.java b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewTaskMapper.java index ec559bf7..65975a64 100644 --- a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewTaskMapper.java +++ b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewTaskMapper.java @@ -22,7 +22,7 @@ import org.eclipse.mylyn.reviews.tasks.core.Rating; import org.eclipse.mylyn.reviews.tasks.core.ResourceScopeItem; import org.eclipse.mylyn.reviews.tasks.core.ReviewScope; -import org.eclipse.mylyn.reviews.tasks.core.ReviewScopeItem; +import org.eclipse.mylyn.reviews.tasks.core.IReviewScopeItem; import org.eclipse.mylyn.reviews.tasks.core.TaskComment; import org.eclipse.mylyn.reviews.tasks.dsl.parser.antlr.ReviewDslParser; import org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.AttachmentSource; @@ -149,14 +149,14 @@ private org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScope mapScope( ReviewScope scope) { org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScope scope2 = ReviewDslFactory.eINSTANCE .createReviewScope(); - for (ReviewScopeItem item : scope.getItems()) { + for (IReviewScopeItem item : scope.getItems()) { scope2.getScope().add(mapScopeItem(item)); } return scope2; } private org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScopeItem mapScopeItem( - ReviewScopeItem item) { + IReviewScopeItem item) { if (item instanceof PatchScopeItem) { PatchScopeItem patchItem = (PatchScopeItem) item; PatchDef patch = ReviewDslFactory.eINSTANCE.createPatchDef(); @@ -174,7 +174,6 @@ private org.eclipse.mylyn.reviews.tasks.dsl.reviewDsl.ReviewScopeItem mapScopeIt resource.setSource(source); return resource; } - System.err.println("would return null"); return null; } diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewsUtil.java b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewsUtil.java index f0096105..5907e378 100644 --- a/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewsUtil.java +++ b/tbr/org.eclipse.mylyn.reviews.tasks.core/src/org/eclipse/mylyn/reviews/tasks/core/internal/ReviewsUtil.java @@ -23,9 +23,14 @@ import org.eclipse.mylyn.reviews.tasks.core.patch.GitPatchPathFindingStrategy; import org.eclipse.mylyn.reviews.tasks.core.patch.ITargetPathStrategy; import org.eclipse.mylyn.reviews.tasks.core.patch.SimplePathFindingStrategy; +import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslStandaloneSetup; +import org.eclipse.mylyn.reviews.tasks.dsl.parser.antlr.ReviewDslParser; import org.eclipse.mylyn.tasks.core.ITask; import org.eclipse.mylyn.tasks.core.ITaskContainer; import org.eclipse.mylyn.tasks.core.data.ITaskDataManager; +import org.eclipse.xtext.parsetree.reconstr.Serializer; + +import com.google.inject.Injector; /** * @author Kilian Matt @@ -48,9 +53,8 @@ public static ITreeNode getReviewSubTasksFor(ITask task, if (task instanceof ITaskContainer) { ITaskContainer taskContainer = (ITaskContainer) task; for (ITask subTask : taskContainer.getChildren()) { - current.addChildren( - getReviewSubTasksFor(subTask, taskDataManager, - mapper, monitor)); + current.addChildren(getReviewSubTasksFor(subTask, + taskDataManager, mapper, monitor)); } } return current; @@ -87,4 +91,14 @@ public static Attachment findAttachment(String filename, String author, return null; } + public static ReviewTaskMapper createMapper() { + Injector createInjectorAndDoEMFRegistration = new ReviewDslStandaloneSetup() + .createInjectorAndDoEMFRegistration(); + ReviewDslParser parser = createInjectorAndDoEMFRegistration + .getInstance(ReviewDslParser.class); + Serializer serializer = createInjectorAndDoEMFRegistration + .getInstance(Serializer.class); + return new ReviewTaskMapper(parser, serializer); + } + } diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.ui/META-INF/MANIFEST.MF b/tbr/org.eclipse.mylyn.reviews.tasks.ui/META-INF/MANIFEST.MF index 30bc017f..3d08a164 100644 --- a/tbr/org.eclipse.mylyn.reviews.tasks.ui/META-INF/MANIFEST.MF +++ b/tbr/org.eclipse.mylyn.reviews.tasks.ui/META-INF/MANIFEST.MF @@ -11,11 +11,6 @@ Require-Bundle: org.eclipse.ui, org.eclipse.mylyn.tasks.core;bundle-version="3.3.0", org.eclipse.compare;bundle-version="3.5.0", org.eclipse.core.resources;bundle-version="3.6.0", - org.eclipse.mylyn.reviews.tasks.dsl;bundle-version="0.0.1", - org.eclipse.emf.common;bundle-version="2.6.0", - org.eclipse.emf.ecore;bundle-version="2.6.1", - org.eclipse.xtext;bundle-version="1.0.1", - org.eclipse.xtext.util;bundle-version="1.0.1", org.eclipse.mylyn.reviews.tasks.core;bundle-version="0.0.1" Bundle-ActivationPolicy: lazy Bundle-RequiredExecutionEnvironment: JavaSE-1.6 diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.ui/src/org/eclipse/mylyn/reviews/tasks/ui/ReviewsUiPlugin.java b/tbr/org.eclipse.mylyn.reviews.tasks.ui/src/org/eclipse/mylyn/reviews/tasks/ui/ReviewsUiPlugin.java index 8a02ceb4..8651c34c 100644 --- a/tbr/org.eclipse.mylyn.reviews.tasks.ui/src/org/eclipse/mylyn/reviews/tasks/ui/ReviewsUiPlugin.java +++ b/tbr/org.eclipse.mylyn.reviews.tasks.ui/src/org/eclipse/mylyn/reviews/tasks/ui/ReviewsUiPlugin.java @@ -12,14 +12,10 @@ import org.eclipse.mylyn.reviews.tasks.core.IReviewMapper; import org.eclipse.mylyn.reviews.tasks.core.internal.ReviewTaskMapper; -import org.eclipse.mylyn.reviews.tasks.dsl.ReviewDslStandaloneSetup; -import org.eclipse.mylyn.reviews.tasks.dsl.parser.antlr.ReviewDslParser; +import org.eclipse.mylyn.reviews.tasks.core.internal.ReviewsUtil; import org.eclipse.ui.plugin.AbstractUIPlugin; -import org.eclipse.xtext.parsetree.reconstr.Serializer; import org.osgi.framework.BundleContext; -import com.google.inject.Injector; - /** * The activator class controls the plug-in life cycle * @@ -78,13 +74,7 @@ public static ReviewsUiPlugin getDefault() { public static IReviewMapper getMapper() { if (mapper == null) { - Injector createInjectorAndDoEMFRegistration = new ReviewDslStandaloneSetup() - .createInjectorAndDoEMFRegistration(); - ReviewDslParser parser = createInjectorAndDoEMFRegistration - .getInstance(ReviewDslParser.class); - Serializer serializer = createInjectorAndDoEMFRegistration - .getInstance(Serializer.class); - mapper = new ReviewTaskMapper(parser, serializer); + mapper=ReviewsUtil.createMapper(); } return mapper; } diff --git a/tbr/org.eclipse.mylyn.reviews.tasks.ui/src/org/eclipse/mylyn/reviews/tasks/ui/editors/ReviewTaskEditorPart.java b/tbr/org.eclipse.mylyn.reviews.tasks.ui/src/org/eclipse/mylyn/reviews/tasks/ui/editors/ReviewTaskEditorPart.java index 2881a1ed..327e148d 100644 --- a/tbr/org.eclipse.mylyn.reviews.tasks.ui/src/org/eclipse/mylyn/reviews/tasks/ui/editors/ReviewTaskEditorPart.java +++ b/tbr/org.eclipse.mylyn.reviews.tasks.ui/src/org/eclipse/mylyn/reviews/tasks/ui/editors/ReviewTaskEditorPart.java @@ -44,7 +44,7 @@ import org.eclipse.mylyn.reviews.tasks.core.Rating; import org.eclipse.mylyn.reviews.tasks.core.ReviewResult; import org.eclipse.mylyn.reviews.tasks.core.ReviewScope; -import org.eclipse.mylyn.reviews.tasks.core.ReviewScopeItem; +import org.eclipse.mylyn.reviews.tasks.core.IReviewScopeItem; import org.eclipse.mylyn.reviews.tasks.core.internal.TaskProperties; import org.eclipse.mylyn.reviews.tasks.ui.Images; import org.eclipse.mylyn.reviews.tasks.ui.ReviewsUiPlugin; @@ -118,8 +118,8 @@ public String getColumnText(Object node, int columnIndex) { Object element = ((TreeNode) node).getValue(); switch (columnIndex) { case COLUMN_GROUP: - if (element instanceof ReviewScopeItem) { - return ((ReviewScopeItem) element).getDescription(); + if (element instanceof IReviewScopeItem) { + return ((IReviewScopeItem) element).getDescription(); } break; case COLUMN_FILE: @@ -223,11 +223,11 @@ public void run() throws Exception { section.setExpanded(false); return; } - List<ReviewScopeItem> files = reviewScope.getItems(); + List<IReviewScopeItem> files = reviewScope.getItems(); final TreeNode[] rootNodes = new TreeNode[files.size()]; int index = 0; - for (ReviewScopeItem item : files) { + for (IReviewScopeItem item : files) { TreeNode node = new TreeNode(item); List<IReviewFile> reviewFiles = item .getReviewFiles(new NullProgressMonitor());
be560b58da1c89e56091cd109f620d7eda3a24fb
ReactiveX-RxJava
Incorporate review suggestions.--- Changes finally0 to finallyDo.-- Removes unnecessary subscription-wrapping.-- Handle exceptions in onCompleted/onError-
p
https://github.com/ReactiveX/RxJava
diff --git a/rxjava-core/src/main/java/rx/Observable.java b/rxjava-core/src/main/java/rx/Observable.java index 631e175751..fd14a42ce3 100644 --- a/rxjava-core/src/main/java/rx/Observable.java +++ b/rxjava-core/src/main/java/rx/Observable.java @@ -1192,8 +1192,8 @@ public static <T> Observable<T> concat(Observable<T>... source) { * @return an Observable that emits the same objects, then calls the action. * @see <a href="http://msdn.microsoft.com/en-us/library/hh212133(v=vs.103).aspx">MSDN: Observable.Finally Method</a> */ - public static <T> Observable<T> finally0(Observable source, Action0 action) { - return _create(OperationFinally.finally0(source, action)); + public static <T> Observable<T> finallyDo(Observable source, Action0 action) { + return _create(OperationFinally.finallyDo(source, action)); } /** @@ -2463,8 +2463,8 @@ public Observable<T> filter(Func1<T, Boolean> predicate) { * @return an Observable that emits the same objects as this observable, then calls the action. * @see <a href="http://msdn.microsoft.com/en-us/library/hh212133(v=vs.103).aspx">MSDN: Observable.Finally Method</a> */ - public Observable<T> finally0(Action0 action) { - return _create(OperationFinally.finally0(this, action)); + public Observable<T> finallyDo(Action0 action) { + return _create(OperationFinally.finallyDo(this, action)); } /** diff --git a/rxjava-core/src/main/java/rx/operators/OperationFinally.java b/rxjava-core/src/main/java/rx/operators/OperationFinally.java index d90b0572a6..636a8e61ae 100644 --- a/rxjava-core/src/main/java/rx/operators/OperationFinally.java +++ b/rxjava-core/src/main/java/rx/operators/OperationFinally.java @@ -33,12 +33,10 @@ public final class OperationFinally { /** * Call a given action when a sequence completes (with or without an * exception). The returned observable is exactly as threadsafe as the - * source observable; in particular, any situation allowing the source to - * call onComplete or onError multiple times allows the returned observable - * to call the final action multiple times. + * source observable. * <p/> * Note that "finally" is a Java reserved word and cannot be an identifier, - * so we use "finally0". + * so we use "finallyDo". * * @param sequence An observable sequence of elements * @param action An action to be taken when the sequence is complete or throws an exception @@ -48,7 +46,7 @@ public final class OperationFinally { * the given action will be called. * @see <a href="http://msdn.microsoft.com/en-us/library/hh212133(v=vs.103).aspx">MSDN Observable.Finally method</a> */ - public static <T> Func1<Observer<T>, Subscription> finally0(final Observable<T> sequence, final Action0 action) { + public static <T> Func1<Observer<T>, Subscription> finallyDo(final Observable<T> sequence, final Action0 action) { return new Func1<Observer<T>, Subscription>() { @Override public Subscription call(Observer<T> observer) { @@ -60,26 +58,14 @@ public Subscription call(Observer<T> observer) { private static class Finally<T> implements Func1<Observer<T>, Subscription> { private final Observable<T> sequence; private final Action0 finalAction; - private Subscription s; Finally(final Observable<T> sequence, Action0 finalAction) { this.sequence = sequence; this.finalAction = finalAction; } - private final AtomicObservableSubscription Subscription = new AtomicObservableSubscription(); - - private final Subscription actualSubscription = new Subscription() { - @Override - public void unsubscribe() { - if (null != s) - s.unsubscribe(); - } - }; - public Subscription call(Observer<T> observer) { - s = sequence.subscribe(new FinallyObserver(observer)); - return Subscription.wrap(actualSubscription); + return sequence.subscribe(new FinallyObserver(observer)); } private class FinallyObserver implements Observer<T> { @@ -91,14 +77,20 @@ private class FinallyObserver implements Observer<T> { @Override public void onCompleted() { - observer.onCompleted(); - finalAction.call(); + try { + observer.onCompleted(); + } finally { + finalAction.call(); + } } @Override public void onError(Exception e) { - observer.onError(e); - finalAction.call(); + try { + observer.onError(e); + } finally { + finalAction.call(); + } } @Override @@ -117,7 +109,7 @@ public void before() { aObserver = mock(Observer.class); } private void checkActionCalled(Observable<String> input) { - Observable.create(finally0(input, aAction0)).subscribe(aObserver); + Observable.create(finallyDo(input, aAction0)).subscribe(aObserver); verify(aAction0, times(1)).call(); } @Test
610c4bbad87af05da8ebd7581f64c8fb3d2388a7
restlet-framework-java
Fixed potential infinite loops while reading- headers (issues -599 and -656). Reported by weiweiwang.--
c
https://github.com/restlet/restlet-framework-java
diff --git a/modules/org.restlet/src/org/restlet/engine/header/HeaderReader.java b/modules/org.restlet/src/org/restlet/engine/header/HeaderReader.java index ee42dc4cee..e0ad1aafd3 100644 --- a/modules/org.restlet/src/org/restlet/engine/header/HeaderReader.java +++ b/modules/org.restlet/src/org/restlet/engine/header/HeaderReader.java @@ -303,6 +303,8 @@ public void addValues(Collection<V> values) { skipSpaces(); do { + int i = index; + // Read the first value V nextValue = readValue(); if (canAdd(nextValue, values)) { @@ -312,6 +314,11 @@ public void addValues(Collection<V> values) { // Attempt to skip the value separator skipValueSeparator(); + if (i == index) { + // Infinite loop + throw new IOException( + "The reading of one header initiates an infinite loop"); + } } while (peek() != -1); } catch (IOException ioe) { Context.getCurrentLogger().log(Level.INFO,
42393d7aacf98e0e1657aadebdc8d4961db7a447
Vala
codegen: Inherit array_length and array_null_terminated of methods Fixes bug 726744
a
https://github.com/GNOME/vala/
diff --git a/codegen/valaccodeattribute.vala b/codegen/valaccodeattribute.vala index 05745df0cf..ce1dcc7836 100644 --- a/codegen/valaccodeattribute.vala +++ b/codegen/valaccodeattribute.vala @@ -1303,6 +1303,13 @@ public class Vala.CCodeAttribute : AttributeCache { if (param.base_parameter != null) { return CCodeBaseModule.get_ccode_array_length (param.base_parameter); } + } else if (node is Method) { + var method = (Method) node; + if (method.base_method != null && method.base_method != method) { + return CCodeBaseModule.get_ccode_array_length (method.base_method); + } else if (method.base_interface_method != null && method.base_interface_method != method) { + return CCodeBaseModule.get_ccode_array_length (method.base_interface_method); + } } return true; } @@ -1313,6 +1320,13 @@ public class Vala.CCodeAttribute : AttributeCache { if (param.base_parameter != null) { return CCodeBaseModule.get_ccode_array_null_terminated (param.base_parameter); } + } else if (node is Method) { + var method = (Method) node; + if (method.base_method != null && method.base_method != method) { + return CCodeBaseModule.get_ccode_array_null_terminated (method.base_method); + } else if (method.base_interface_method != null && method.base_interface_method != method) { + return CCodeBaseModule.get_ccode_array_null_terminated (method.base_interface_method); + } } return false; }
e4671f5ff0a01bd22e6d21f666188380646821c2
camel
CAMEL-751 Get the CamelContext ErrorHandler to- work--git-svn-id: https://svn.apache.org/repos/asf/activemq/camel/trunk@679345 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/camel
diff --git a/camel-core/src/main/java/org/apache/camel/CamelContext.java b/camel-core/src/main/java/org/apache/camel/CamelContext.java index f248808bbbe77..17d31c112e246 100644 --- a/camel-core/src/main/java/org/apache/camel/CamelContext.java +++ b/camel-core/src/main/java/org/apache/camel/CamelContext.java @@ -208,10 +208,10 @@ public interface CamelContext extends Service { /** * Gets the default error handler builder which is inherited by the routes */ - public ErrorHandlerBuilder getErrorHandlerBuilder(); + ErrorHandlerBuilder getErrorHandlerBuilder(); /** * Sets the default error handler builder which is inherited by the routes */ - public void setErrorHandlerBuilder(ErrorHandlerBuilder errorHandlerBuilder); + void setErrorHandlerBuilder(ErrorHandlerBuilder errorHandlerBuilder); } diff --git a/camel-core/src/main/java/org/apache/camel/builder/BuilderSupport.java b/camel-core/src/main/java/org/apache/camel/builder/BuilderSupport.java index 9524700335890..21a5592c1665f 100644 --- a/camel-core/src/main/java/org/apache/camel/builder/BuilderSupport.java +++ b/camel-core/src/main/java/org/apache/camel/builder/BuilderSupport.java @@ -247,11 +247,7 @@ public ErrorHandlerBuilder getErrorHandlerBuilder() { protected ErrorHandlerBuilder createErrorHandlerBuilder() { if (isInheritErrorHandler()) { - ErrorHandlerBuilder errorHandler= context.getErrorHandlerBuilder(); - if (errorHandler == null) { - errorHandler = new DeadLetterChannelBuilder(); - } - return errorHandler; + return new DeadLetterChannelBuilder(); } else { return new NoErrorHandlerBuilder(); } diff --git a/camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java b/camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java index d188e8360436c..7b690fbab1a12 100644 --- a/camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java +++ b/camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java @@ -173,6 +173,11 @@ public void setErrorHandlerBuilder(ErrorHandlerBuilder errorHandlerBuilder) { // ----------------------------------------------------------------------- protected void checkInitialized() throws Exception { if (initialized.compareAndSet(false, true)) { + // Set the CamelContext ErrorHandler here + CamelContext camelContext = getContext(); + if (camelContext.getErrorHandlerBuilder() != null) { + setErrorHandlerBuilder(camelContext.getErrorHandlerBuilder()); + } configure(); populateRoutes(routes); } @@ -194,21 +199,21 @@ public void setRouteCollection(RoutesType routeCollection) { public RoutesType getRouteCollection() { return this.routeCollection; } - + /** - * Completely disable stream caching for all routes being defined in the same RouteBuilder after this. + * Completely disable stream caching for all routes being defined in the same RouteBuilder after this. */ public void noStreamCaching() { StreamCachingInterceptor.noStreamCaching(routeCollection.getInterceptors()); } - + /** * Enable stream caching for all routes being defined in the same RouteBuilder after this call. */ public void streamCaching() { routeCollection.intercept(new StreamCachingInterceptor()); } - + /** * Factory method */ diff --git a/camel-core/src/main/java/org/apache/camel/builder/xml/MessageVariableResolver.java b/camel-core/src/main/java/org/apache/camel/builder/xml/MessageVariableResolver.java index b56a358323ec2..b7ab43dd60bbc 100644 --- a/camel-core/src/main/java/org/apache/camel/builder/xml/MessageVariableResolver.java +++ b/camel-core/src/main/java/org/apache/camel/builder/xml/MessageVariableResolver.java @@ -37,7 +37,7 @@ * A variable resolver for XPath expressions which support properties on the * messge, exchange as well as making system properties and environment * properties available. - * + * * @version $Revision$ */ public class MessageVariableResolver implements XPathVariableResolver { @@ -75,8 +75,8 @@ public Object resolveVariable(QName name) { try { answer = System.getProperty(localPart); } catch (Exception e) { - LOG.debug("Security exception evaluating system property: " + localPart + - ". Reason: " + e, e); + LOG.debug("Security exception evaluating system property: " + localPart + + ". Reason: " + e, e); } } else if (uri.equals(ENVIRONMENT_VARIABLES)) { answer = System.getenv().get(localPart); diff --git a/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java b/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java index 5884c1487119d..5f4719afa6a49 100644 --- a/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java +++ b/camel-core/src/main/java/org/apache/camel/impl/DefaultCamelContext.java @@ -338,7 +338,7 @@ public void addRoutes(Collection<Route> routes) throws Exception { public void addRoutes(Routes builder) throws Exception { // lets now add the routes from the builder - builder.setContext(this); + builder.setContext(this); List<Route> routeList = builder.getRouteList(); LOG.debug("Adding routes from: " + builder + " routes: " + routeList); addRoutes(routeList); diff --git a/camel-core/src/main/java/org/apache/camel/model/ProcessorType.java b/camel-core/src/main/java/org/apache/camel/model/ProcessorType.java index 6d3e09327ae6d..1d4d3b635aab5 100644 --- a/camel-core/src/main/java/org/apache/camel/model/ProcessorType.java +++ b/camel-core/src/main/java/org/apache/camel/model/ProcessorType.java @@ -1596,7 +1596,7 @@ protected ErrorHandlerBuilder createErrorHandlerBuilder() { if (errorHandlerRef != null) { return new ErrorHandlerBuilderRef(errorHandlerRef); } - if (isInheritErrorHandler()) { + if (isInheritErrorHandler()) { return new DeadLetterChannelBuilder(); } else { return new NoErrorHandlerBuilder(); diff --git a/camel-core/src/test/java/org/apache/camel/builder/ContextErrorHandlerTest.java b/camel-core/src/test/java/org/apache/camel/builder/ContextErrorHandlerTest.java new file mode 100644 index 0000000000000..70a7406424728 --- /dev/null +++ b/camel-core/src/test/java/org/apache/camel/builder/ContextErrorHandlerTest.java @@ -0,0 +1,115 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.builder; + +import java.util.List; + +import org.apache.camel.CamelContext; +import org.apache.camel.ContextTestSupport; +import org.apache.camel.Endpoint; +import org.apache.camel.Processor; +import org.apache.camel.Route; +import org.apache.camel.TestSupport; +import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.impl.EventDrivenConsumerRoute; +import org.apache.camel.processor.DeadLetterChannel; +import org.apache.camel.processor.LoggingErrorHandler; +import org.apache.camel.processor.RedeliveryPolicy; +import org.apache.camel.processor.SendProcessor; + +public class ContextErrorHandlerTest extends ContextTestSupport { + + protected void setUp() throws Exception { + setUseRouteBuilder(false); + super.setUp(); + RedeliveryPolicy redeliveryPolicy = new RedeliveryPolicy(); + redeliveryPolicy.maximumRedeliveries(1); + redeliveryPolicy.setUseExponentialBackOff(true); + DeadLetterChannelBuilder deadLetterChannelBuilder = new DeadLetterChannelBuilder(); + deadLetterChannelBuilder.setRedeliveryPolicy(redeliveryPolicy); + context.setErrorHandlerBuilder(deadLetterChannelBuilder); + } + + protected void startCamelContext() throws Exception { + // do nothing here + } + + protected void stopCamelContext() throws Exception { + // do nothing here + } + + protected List<Route> getRouteList(RouteBuilder builder) throws Exception { + context.addRoutes(builder); + context.start(); + List<Route> answer = context.getRoutes(); + context.stop(); + return answer; + } + + public void testOverloadingTheDefaultErrorHandler() throws Exception { + + RouteBuilder builder = new RouteBuilder() { + public void configure() { + errorHandler(loggingErrorHandler("FOO.BAR")); + from("seda:a").to("seda:b"); + } + }; + + List<Route> list = getRouteList(builder); + assertEquals("Number routes created" + list, 1, list.size()); + for (Route route : list) { + Endpoint key = route.getEndpoint(); + assertEquals("From endpoint", "seda:a", key.getEndpointUri()); + + EventDrivenConsumerRoute consumerRoute = assertIsInstanceOf(EventDrivenConsumerRoute.class, route); + Processor processor = consumerRoute.getProcessor(); + processor = unwrap(processor); + LoggingErrorHandler loggingProcessor = assertIsInstanceOf(LoggingErrorHandler.class, processor); + processor = unwrap(loggingProcessor.getOutput()); + SendProcessor sendProcessor = assertIsInstanceOf(SendProcessor.class, processor); + log.debug("Found sendProcessor: " + sendProcessor); + } + } + + public void testGetTheDefaultErrorHandlerFromContext() throws Exception { + + RouteBuilder builder = new RouteBuilder() { + public void configure() { + from("seda:a").to("seda:b"); + } + }; + + List<Route> list = getRouteList(builder); + assertEquals("Number routes created" + list, 1, list.size()); + for (Route route : list) { + Endpoint key = route.getEndpoint(); + assertEquals("From endpoint", "seda:a", key.getEndpointUri()); + + EventDrivenConsumerRoute consumerRoute = assertIsInstanceOf(EventDrivenConsumerRoute.class, route); + Processor processor = consumerRoute.getProcessor(); + processor = unwrap(processor); + + DeadLetterChannel deadLetterChannel = assertIsInstanceOf(DeadLetterChannel.class, processor); + + RedeliveryPolicy redeliveryPolicy = deadLetterChannel.getRedeliveryPolicy(); + + assertEquals("getMaximumRedeliveries()", 1, redeliveryPolicy.getMaximumRedeliveries()); + assertEquals("isUseExponentialBackOff()", true, redeliveryPolicy.isUseExponentialBackOff()); + } + } + +}
50e3ca62e5b5cceb13ead212f50aaae57e8990f5
orientdb
Working to fix corrupted data in sockets--
c
https://github.com/orientechnologies/orientdb
diff --git a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/OBinaryNetworkProtocolAbstract.java b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/OBinaryNetworkProtocolAbstract.java index 7b3c2430ba3..4d2373a9126 100644 --- a/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/OBinaryNetworkProtocolAbstract.java +++ b/server/src/main/java/com/orientechnologies/orient/server/network/protocol/binary/OBinaryNetworkProtocolAbstract.java @@ -154,6 +154,12 @@ protected void sendOk(final int iClientTxId) throws IOException { } protected void sendError(final int iClientTxId, final Throwable t) throws IOException { + if (t instanceof SocketException) { + // DON'T SEND TO THE CLIENT BECAUSE THE SOCKET HAS PROBLEMS + shutdown(); + return; + } + channel.acquireExclusiveLock(); try {
19b650c78a1c76f4fd90274d7f163f863c0d39e4
hdiv$hdiv
Memory and performance optimizations
p
https://github.com/hdiv/hdiv
diff --git a/hdiv-config/src/main/java/org/hdiv/config/xml/ConfigBeanDefinitionParser.java b/hdiv-config/src/main/java/org/hdiv/config/xml/ConfigBeanDefinitionParser.java index 31cc5be7..56fb4279 100644 --- a/hdiv-config/src/main/java/org/hdiv/config/xml/ConfigBeanDefinitionParser.java +++ b/hdiv-config/src/main/java/org/hdiv/config/xml/ConfigBeanDefinitionParser.java @@ -30,7 +30,7 @@ import org.hdiv.config.multipart.SpringMVCMultipartConfig; import org.hdiv.context.RedirectHelper; import org.hdiv.dataComposer.DataComposerFactory; -import org.hdiv.dataValidator.DataValidatorFactory; +import org.hdiv.dataValidator.DataValidator; import org.hdiv.dataValidator.ValidationResult; import org.hdiv.events.HDIVFacesEventListener; import org.hdiv.filter.DefaultRequestInitializer; @@ -51,6 +51,7 @@ import org.hdiv.session.SessionHDIV; import org.hdiv.session.StateCache; import org.hdiv.state.StateUtil; +import org.hdiv.urlProcessor.BasicUrlProcessor; import org.hdiv.urlProcessor.FormUrlProcessor; import org.hdiv.urlProcessor.LinkUrlProcessor; import org.hdiv.util.EncodingUtil; @@ -137,13 +138,15 @@ public class ConfigBeanDefinitionParser implements BeanDefinitionParser { protected RuntimeBeanReference stateUtilRef; - protected RuntimeBeanReference dataValidatorFactoryRef; + protected RuntimeBeanReference dataValidatorRef; protected RuntimeBeanReference dataComposerFactoryRef; protected RuntimeBeanReference linkUrlProcessorRef; protected RuntimeBeanReference formUrlProcessorRef; + + protected RuntimeBeanReference basicUrlProcessorRef; protected RuntimeBeanReference loggerRef; @@ -176,10 +179,11 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { this.createCipher(element, source, parserContext); this.createSimpleBean(element, source, parserContext, ValidationResult.class); this.stateUtilRef = this.createStateUtil(element, source, parserContext); - this.dataValidatorFactoryRef = this.createDataValidatorFactory(element, source, parserContext); + this.dataValidatorRef = this.createDataValidator(element, source, parserContext); this.dataComposerFactoryRef = this.createDataComposerFactory(element, source, parserContext); this.linkUrlProcessorRef = this.createLinkUrlProcessor(element, source, parserContext); this.formUrlProcessorRef = this.createFormUrlProcessor(element, source, parserContext); + this.basicUrlProcessorRef = this.createBasicUrlProcessor(element, source, parserContext); this.createRequestInitializer(element, source, parserContext); // register Spring MVC beans if we are using Spring MVC web framework @@ -351,11 +355,11 @@ protected RuntimeBeanReference createStateUtil(Element element, Object source, P return new RuntimeBeanReference(name); } - protected RuntimeBeanReference createDataValidatorFactory(Element element, Object source, ParserContext parserContext) { - RootBeanDefinition bean = new RootBeanDefinition(DataValidatorFactory.class); + protected RuntimeBeanReference createDataValidator(Element element, Object source, ParserContext parserContext) { + RootBeanDefinition bean = new RootBeanDefinition(DataValidator.class); bean.setSource(source); bean.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); - bean.getPropertyValues().addPropertyValue("hdivConfig", this.configRef); + bean.getPropertyValues().addPropertyValue("config", this.configRef); String name = parserContext.getReaderContext().generateBeanName(bean); parserContext.getRegistry().registerBeanDefinition(name, bean); return new RuntimeBeanReference(name); @@ -392,7 +396,8 @@ protected RuntimeBeanReference createValidatorHelper(Element element, Object sou bean.getPropertyValues().addPropertyValue("stateUtil", this.stateUtilRef); bean.getPropertyValues().addPropertyValue("hdivConfig", this.configRef); bean.getPropertyValues().addPropertyValue("session", this.sessionRef); - bean.getPropertyValues().addPropertyValue("dataValidatorFactory", this.dataValidatorFactoryRef); + bean.getPropertyValues().addPropertyValue("dataValidator", this.dataValidatorRef); + bean.getPropertyValues().addPropertyValue("urlProcessor", this.basicUrlProcessorRef); bean.getPropertyValues().addPropertyValue("dataComposerFactory", this.dataComposerFactoryRef); parserContext.getRegistry().registerBeanDefinition(VALIDATOR_HELPER_NAME, bean); return new RuntimeBeanReference(VALIDATOR_HELPER_NAME); @@ -460,6 +465,17 @@ protected RuntimeBeanReference createFormUrlProcessor(Element element, Object so return new RuntimeBeanReference(FORM_URL_PROCESSOR_NAME); } } + + protected RuntimeBeanReference createBasicUrlProcessor(Element element, Object source, ParserContext parserContext) { + RootBeanDefinition bean = new RootBeanDefinition(BasicUrlProcessor.class); + bean.setSource(source); + bean.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); + bean.getPropertyValues().addPropertyValue("config", this.configRef); + + String name = parserContext.getReaderContext().generateBeanName(bean); + parserContext.getRegistry().registerBeanDefinition(name, bean); + return new RuntimeBeanReference(name); + } protected RuntimeBeanReference createRequestDataValueProcessor(Element element, Object source, ParserContext parserContext) { @@ -622,7 +638,8 @@ protected RuntimeBeanReference createJsfValidatorHelper(Element element, Object bean.getPropertyValues().addPropertyValue("stateUtil", this.stateUtilRef); bean.getPropertyValues().addPropertyValue("hdivConfig", this.configRef); bean.getPropertyValues().addPropertyValue("session", this.sessionRef); - bean.getPropertyValues().addPropertyValue("dataValidatorFactory", this.dataValidatorFactoryRef); + bean.getPropertyValues().addPropertyValue("dataValidator", this.dataValidatorRef); + bean.getPropertyValues().addPropertyValue("urlProcessor", this.basicUrlProcessorRef); bean.getPropertyValues().addPropertyValue("dataComposerFactory", this.dataComposerFactoryRef); String name = parserContext.getReaderContext().generateBeanName(bean); diff --git a/hdiv-core/src/main/java/org/hdiv/dataComposer/AbstractDataComposer.java b/hdiv-core/src/main/java/org/hdiv/dataComposer/AbstractDataComposer.java index 3259d2ac..2f7899d5 100644 --- a/hdiv-core/src/main/java/org/hdiv/dataComposer/AbstractDataComposer.java +++ b/hdiv-core/src/main/java/org/hdiv/dataComposer/AbstractDataComposer.java @@ -87,8 +87,8 @@ public void init() { */ public void initPage() { this.page = new Page(); - String pageId = this.session.getPageId(); - this.page.setName(pageId); + int pageId = this.session.getPageId(); + this.page.setId(pageId); this.page.setRandomToken(this.uidGenerator.generateUid().toString()); } diff --git a/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerCipher.java b/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerCipher.java index cfd2fe0f..384e2773 100644 --- a/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerCipher.java +++ b/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerCipher.java @@ -71,7 +71,7 @@ public class DataComposerCipher extends DataComposerMemory { public String endRequest() { IState state = super.getStatesStack().pop(); - state.setPageId(this.getPage().getName()); + state.setPageId(this.getPage().getId()); String stateData = this.encodingUtil.encode64Cipher(state); String id = null; @@ -89,9 +89,9 @@ public String endRequest() { super.startPage(); this.getPage().addState(state); - state.setPageId(this.getPage().getName()); + state.setPageId(this.getPage().getId()); - id = this.getPage().getName() + DASH + state.getId() + DASH + this.getHdivStateSuffix(); + id = this.getPage().getId() + DASH + state.getId() + DASH + this.getHdivStateSuffix(); } return (id != null) ? id : stateData; diff --git a/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerFactory.java b/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerFactory.java index 495f4de7..8bec5410 100644 --- a/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerFactory.java +++ b/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerFactory.java @@ -132,12 +132,12 @@ protected void initDataComposer(IDataComposer dataComposer, HttpServletRequest r // We are modifying an existing state, preload dataComposer with it IState state = this.stateUtil.restoreState(preState); - if(state.getPageId() != null){ - IPage page = this.session.getPage(state.getPageId()); + if (state.getPageId() > 0) { + IPage page = this.session.getPage(state.getPageId() + ""); if (page != null) { dataComposer.startPage(page); } - } + } if (state != null) { dataComposer.beginRequest(state); } diff --git a/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerHash.java b/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerHash.java index b22c9273..420f59a2 100644 --- a/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerHash.java +++ b/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerHash.java @@ -55,7 +55,7 @@ public class DataComposerHash extends DataComposerMemory { public String endRequest() { IState state = super.getStatesStack().pop(); - state.setPageId(this.getPage().getName()); + state.setPageId(this.getPage().getId()); String id = null; String stateWithSuffix = null; @@ -72,9 +72,9 @@ public String endRequest() { } this.getPage().addState(state); - state.setPageId(this.getPage().getName()); + state.setPageId(this.getPage().getId()); - id = this.getPage().getName() + DASH + state.getId() + DASH + this.getHdivStateSuffix(); + id = this.getPage().getId() + DASH + state.getId() + DASH + this.getHdivStateSuffix(); } else { // generate hash to add to the page that will be stored in session diff --git a/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerMemory.java b/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerMemory.java index f8f5945b..26b23ae3 100644 --- a/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerMemory.java +++ b/hdiv-core/src/main/java/org/hdiv/dataComposer/DataComposerMemory.java @@ -17,6 +17,8 @@ import java.io.UnsupportedEncodingException; import java.net.URLDecoder; +import java.util.HashMap; +import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -215,6 +217,76 @@ public String compose(String parameter, String value, boolean editable, String e return this.compose(parameter, value, editable, editableName, isActionParam, method, Constants.ENCODING_UTF_8); } + /* + * (non-Javadoc) + * + * @see org.hdiv.dataComposer.IDataComposer#composeParams(java.lang.String, java.lang.String, java.lang.String) + */ + public String composeParams(String parameters, String method, String charEncoding) { + + if (parameters == null || parameters.length() == 0) { + return null; + } + + // Get actual IState + IState state = this.getStatesStack().peek(); + + state.setParams(parameters); + + if (this.hdivConfig.getConfidentiality()) { + // replace real values with confidential ones + parameters = this.applyConfidentialityToParams(parameters, method); + } + + return parameters; + } + + /** + * Apply confidentiality to parameters String. Replaces real values with confidential ones. + * + * @param parameters + * parameters in query format + * @param method + * HTTP method + * @return parameters in query format with confidential values + */ + private String applyConfidentialityToParams(String parameters, String method) { + + Map<String, Integer> pCount = new HashMap<String, Integer>(); + + String newParameters = parameters; + + // Init indexes + int beginIndex = 0; + int endIndex = parameters.indexOf("&") > 0 ? parameters.indexOf("&") : parameters.length(); + do { + String param = parameters.substring(beginIndex, endIndex); + int index = param.indexOf("="); + index = index < 0 ? param.length() : index; + String name = param.substring(0, index); + + if (this.isConfidentialParam(name, method)) { + // Parameter is not a start parameter + Integer count = pCount.get(name); + int num = (count == null) ? 0 : count + 1; + pCount.put(name, num); + + // Replace parameter with confidential values + newParameters = newParameters.replaceFirst(param, name + "=" + num); + } + + // Update indexes + beginIndex = endIndex + 1; + endIndex = parameters.indexOf("&", endIndex + 1); + if (endIndex < 0) { + endIndex = parameters.length(); + } + + } while (endIndex > beginIndex); + + return newParameters; + } + /** * Adds a new IParameter object, generated from the values passed as parameters, to the current state * <code>state</code>. If confidentiality is activated it generates a new encoded value that will be returned by the @@ -518,7 +590,7 @@ public String beginRequest(IState state) { this.requestCounter = state.getId() + 1; - String id = this.getPage().getName() + DASH + state.getId() + DASH + this.getHdivStateSuffix(); + String id = this.getPage().getId() + DASH + state.getId() + DASH + this.getHdivStateSuffix(); return id; } @@ -534,7 +606,7 @@ public String endRequest() { IState state = this.getStatesStack().pop(); IPage page = this.getPage(); - state.setPageId(page.getName()); + state.setPageId(page.getId()); page.addState(state); // Save Page in session if this is the first state to add @@ -544,7 +616,7 @@ public String endRequest() { super.session.addPage(page.getName(), page); } - String id = this.getPage().getName() + DASH + state.getId() + DASH + this.getHdivStateSuffix(); + String id = this.getPage().getId() + DASH + state.getId() + DASH + this.getHdivStateSuffix(); return id; } diff --git a/hdiv-core/src/main/java/org/hdiv/dataComposer/IDataComposer.java b/hdiv-core/src/main/java/org/hdiv/dataComposer/IDataComposer.java index bbf21f2e..6d3d0886 100644 --- a/hdiv-core/src/main/java/org/hdiv/dataComposer/IDataComposer.java +++ b/hdiv-core/src/main/java/org/hdiv/dataComposer/IDataComposer.java @@ -119,6 +119,20 @@ public interface IDataComposer { public String compose(String parameter, String value, boolean editable, String editableName, boolean isActionParam, String method, String charEncoding); + /** + * Compose parameters in request query format. For example: param1=val1&param2=val2 + * + * @param parameters + * parameters in query format + * @param method + * http method + * @param charEncoding + * character encoding + * @return parameters in query format with confidential values + * @since HDIV 2.1.7 + */ + public String composeParams(String parameters, String method, String charEncoding); + /** * Adds a new IParameter object, generated from the values passed as parameters, to the current state * <code>state</code>. If confidentiality is activated it generates a new encoded value that will be returned by the diff --git a/hdiv-core/src/main/java/org/hdiv/dataValidator/DataValidator.java b/hdiv-core/src/main/java/org/hdiv/dataValidator/DataValidator.java index c7c3b5ea..3e27fbab 100644 --- a/hdiv-core/src/main/java/org/hdiv/dataValidator/DataValidator.java +++ b/hdiv-core/src/main/java/org/hdiv/dataValidator/DataValidator.java @@ -22,12 +22,11 @@ import org.apache.commons.logging.LogFactory; import org.hdiv.config.HDIVConfig; import org.hdiv.state.IParameter; -import org.hdiv.state.IState; import org.hdiv.util.HDIVUtil; /** - * It uses an object of type IState and validates all the entry data, besides to replacing the relative values by its - * real values. + * Validates that one parameter value or values are correct, besides to replacing the relative values by its real + * values. * * @author Roberto Velasco * @author Oscar Ocariz @@ -39,41 +38,19 @@ public class DataValidator implements IDataValidator { */ private Log log = LogFactory.getLog(DataValidator.class); - /** - * Object that represents the result of the validation. - */ - private IValidationResult validationResult; - - /** - * State that represents all the data that composes a request or a form. - */ - private IState state; - /** * HDIV general configuration. */ private HDIVConfig config; - /** - * <p> - * Checks if the value <code>data</code> sent by the user to the server in the parameter <code>parameter</code> is - * correct or not. The received value is checked with the one stored in the state to decide if it is correct. - * </p> - * <p> - * In the encoded and hash strategies, the state is obtained from the user request. However, in the memory strategy - * the state is obtained from the user session, using the state identifier received within the request. - * </p> + /* + * (non-Javadoc) * - * @param value - * value sent by the client - * @param target - * target action name - * @param parameter - * parameter name - * @return object that represents the result of the validation process for the parameter <code>parameter</code> and - * the value <code>data</code>. + * @see org.hdiv.ee.dataValidator.IDataValidator#validate(java.lang.String, java.lang.String, java.lang.String, + * org.hdiv.state.IParameter, java.lang.String[]) */ - public IValidationResult validate(String value, String target, String parameter) { + public IValidationResult validate(String value, String target, String parameter, IParameter stateParameter, + String[] actionParamValues) { boolean confidentiality = this.config.getConfidentiality(); boolean noConfidentiality = this.config.isParameterWithoutConfidentiality(parameter); @@ -81,41 +58,70 @@ public IValidationResult validate(String value, String target, String parameter) log.debug("Parameter [" + parameter + "] is ParameterWithoutConfidentiality."); } - IParameter stateParameter = this.state.getParameter(parameter); + IValidationResult result = new ValidationResult(); + + // TODO include here checking that there are no more values. Currently done in the helper + if (!confidentiality || noConfidentiality) { // Confidentiality = false - if (stateParameter.existValue(value)) { - validationResult.setResult(value); - validationResult.setLegal(true); + if (stateParameter != null) { + if (stateParameter.existValue(value)) { + result.setResult(value); + result.setLegal(true); + } else { + result.setLegal(false); + } + return result; } else { - validationResult.setLegal(false); + // actionParamValues contains values + for (int i = 0; i < actionParamValues.length; i++) { + if (value.equals(actionParamValues[i])) { + result.setResult(value); + result.setLegal(true); + return result; + } + } + result.setLegal(false); + return result; } - return validationResult; - } else { // Confidentiality = true if (!this.isInt(value)) { - validationResult.setLegal(false); - return validationResult; + result.setLegal(false); + return result; } // Confidentiality assures that data is int value int position = new Integer(value).intValue(); - if (stateParameter.existPosition(position)) { + if (stateParameter != null) { + + if (stateParameter.existPosition(position)) { - validationResult.setLegal(true); + result.setLegal(true); - // update position value with the original value - validationResult.setResult(stateParameter.getValuePosition(position)); - return validationResult; + // update position value with the original value + result.setResult(stateParameter.getValuePosition(position)); + return result; + } else { + result.setLegal(false); + return result; + } } else { - validationResult.setLegal(false); - return validationResult; + + if (actionParamValues.length > position) { + + result.setLegal(true); + result.setResult(actionParamValues[position]); + return result; + } + result.setLegal(false); + return result; } + } } @@ -132,22 +138,6 @@ private boolean isInt(String data) { return m.matches(); } - public IValidationResult getValidationResult() { - return validationResult; - } - - public void setValidationResult(IValidationResult validationResult) { - this.validationResult = validationResult; - } - - /** - * @param state - * The validation process state to set. - */ - public void setState(IState state) { - this.state = state; - } - /** * @param config * the config to set diff --git a/hdiv-core/src/main/java/org/hdiv/dataValidator/DataValidatorFactory.java b/hdiv-core/src/main/java/org/hdiv/dataValidator/DataValidatorFactory.java deleted file mode 100644 index fa4f247f..00000000 --- a/hdiv-core/src/main/java/org/hdiv/dataValidator/DataValidatorFactory.java +++ /dev/null @@ -1,55 +0,0 @@ -/** - * Copyright 2005-2013 hdiv.org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.hdiv.dataValidator; - -import org.hdiv.config.HDIVConfig; -import org.hdiv.state.IState; - -/** - * DataValidator object factory, more efficient than to use the Spring factory. - * - * @author Gotzon Illarramendi - * @since HDIV 2.1.0 - */ -public class DataValidatorFactory { - - /** - * HDIV configuration object. - */ - private HDIVConfig hdivConfig; - - /** - * Creates a new instance of IDataValidator that validates the request over the state. - * - * @param state - * IState object - * @return IDataValidator instance - */ - public IDataValidator newInstance(IState state) { - - IValidationResult result = new ValidationResult(); - DataValidator dataValidator = new DataValidator(); - dataValidator.setValidationResult(result); - dataValidator.setConfig(this.hdivConfig); - dataValidator.setState(state); - return dataValidator; - } - - public void setHdivConfig(HDIVConfig hdivConfig) { - this.hdivConfig = hdivConfig; - } - -} diff --git a/hdiv-core/src/main/java/org/hdiv/dataValidator/IDataValidator.java b/hdiv-core/src/main/java/org/hdiv/dataValidator/IDataValidator.java index 5919ff79..53f3a4b7 100644 --- a/hdiv-core/src/main/java/org/hdiv/dataValidator/IDataValidator.java +++ b/hdiv-core/src/main/java/org/hdiv/dataValidator/IDataValidator.java @@ -15,40 +15,42 @@ */ package org.hdiv.dataValidator; - -import org.hdiv.state.IState; +import org.hdiv.state.IParameter; /** - * Interface to validate the data sent by the user. - * + * Interface to validate a parameter sent by the user. + * * @author Roberto Velasco * @author Oscar Ocariz */ public interface IDataValidator { - + /** * <p> - * Checks if the value <code>data</code> sent by the user to the server in the - * parameter <code>parameter</code> is correct or not. The received value is - * checked with the one stored in the state to decide if it is correct. + * Checks if the value <code>data</code> sent by the user to the server in the parameter <code>parameter</code> is + * correct or not. The received value is checked with the one stored in the state to decide if it is correct. * </p> * <p> - * In the encoded and hash strategies, the state is obtained from the user - * request. However, in the memory strategy the state is obtained from the user - * session, using the state identifier receiced within the request. + * In the encoded and hash strategies, the state is obtained from the user request. However, in the memory strategy + * the state is obtained from the user session, using the state identifier received within the request. * </p> * - * @param value value sent by the client - * @param target target action name - * @param parameter parameter name - * @return object that represents the result of the validation process for the - * parameter <code>parameter</code> and the value <code>data</code>. - */ - public IValidationResult validate(String value, String target, String parameter); - - /** - * @param state The validation process state to set. + * @param value + * value sent by the client + * @param target + * target action name + * @param parameter + * parameter name + * @param stateParameter + * {@link IParameter} object with parameters data + * @param actionParamValues + * values for the action parameters + * @return object that represents the result of the validation process for the parameter <code>parameter</code> and + * the value <code>data</code>. */ - public void setState(IState state); - + public IValidationResult validate(String value, String target, String parameter, IParameter stateParameter, + String[] actionParamValues); + // TODO merge stateParameter and actionParamValues parameters + } + diff --git a/hdiv-core/src/main/java/org/hdiv/filter/ValidatorHelperRequest.java b/hdiv-core/src/main/java/org/hdiv/filter/ValidatorHelperRequest.java index 809a5e41..b9f0c607 100644 --- a/hdiv-core/src/main/java/org/hdiv/filter/ValidatorHelperRequest.java +++ b/hdiv-core/src/main/java/org/hdiv/filter/ValidatorHelperRequest.java @@ -18,6 +18,7 @@ import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.ArrayList; +import java.util.Arrays; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; @@ -37,7 +38,6 @@ import org.hdiv.config.HDIVConfig; import org.hdiv.dataComposer.DataComposerFactory; import org.hdiv.dataComposer.IDataComposer; -import org.hdiv.dataValidator.DataValidatorFactory; import org.hdiv.dataValidator.IDataValidator; import org.hdiv.dataValidator.IValidationResult; import org.hdiv.exception.HDIVException; @@ -47,6 +47,8 @@ import org.hdiv.state.IParameter; import org.hdiv.state.IState; import org.hdiv.state.StateUtil; +import org.hdiv.urlProcessor.BasicUrlData; +import org.hdiv.urlProcessor.BasicUrlProcessor; import org.hdiv.util.Constants; import org.hdiv.util.HDIVErrorCodes; import org.hdiv.util.HDIVUtil; @@ -80,27 +82,32 @@ public class ValidatorHelperRequest implements IValidationHelper { /** * Utility methods for state */ - private StateUtil stateUtil; + protected StateUtil stateUtil; /** * State that represents all the data of a request or a form existing in a page <code>page</code> */ - private ISession session; + protected ISession session; /** * IDataValidator factory */ - private DataValidatorFactory dataValidatorFactory; + protected IDataValidator dataValidator; /** * {@link IDataComposer} factory */ - private DataComposerFactory dataComposerFactory; + protected DataComposerFactory dataComposerFactory; /** * Compiled numeric <code>Pattern</code> */ - private Pattern numberPattern = Pattern.compile("[0-9]+"); + protected Pattern numberPattern = Pattern.compile("[0-9]+"); + + /** + * URL String processor. + */ + protected BasicUrlProcessor urlProcessor; /** * Initialization of the objects needed for the validation process. @@ -139,8 +146,8 @@ public void init() { public ValidatorHelperResult validate(HttpServletRequest request) { String target = this.getTarget(request); - target = this.decodeUrl(target); target = this.getTargetWithoutContextPath(request, target); + target = decodeUrl(target); // Hook before the validation ValidatorHelperResult result = this.preValidate(request, target); @@ -205,6 +212,10 @@ public ValidatorHelperResult validate(HttpServletRequest request) { // Hdiv parameter name String hdivParameter = this.getHdivParameter(request); + // Extract url params from State + BasicUrlData urlData = this.urlProcessor.processUrl(request, "?" + state.getParams()); + Map<String, String[]> stateParams = urlData.getUrlParams(); + Map<String, String[]> unauthorizedEditableParameters = new HashMap<String, String[]>(); Enumeration<?> parameters = request.getParameterNames(); while (parameters.hasMoreElements()) { @@ -212,8 +223,8 @@ public ValidatorHelperResult validate(HttpServletRequest request) { String parameter = (String) parameters.nextElement(); // Validate parameter - result = this.validateParameter(request, state, unauthorizedEditableParameters, hdivParameter, target, - parameter); + result = this.validateParameter(request, state.getParameter(parameter), stateParams.get(parameter), + unauthorizedEditableParameters, hdivParameter, target, parameter); if (!result.isValid()) { return result; } @@ -233,7 +244,7 @@ public ValidatorHelperResult validate(HttpServletRequest request) { * * @param url * url to decode - * @return decoded url + * @return decoder url */ protected String decodeUrl(String url) { try { @@ -267,9 +278,11 @@ protected ValidatorHelperResult isTheSameAction(HttpServletRequest request, Stri } } - if (log.isDebugEnabled()) { - log.debug("target:" + target); - log.debug("state action:" + state.getAction()); + if (state.getAction().contains(" ")) { + String action = state.getAction().replace(" ", "%20"); + if (action.equalsIgnoreCase(target)) { + return ValidatorHelperResult.VALID; + } } this.logger.log(HDIVErrorCodes.ACTION_ERROR, target, null, null); @@ -491,8 +504,10 @@ private ValidatorHelperResult allRequiredParametersReceived(HttpServletRequest r * * @param request * HttpServletRequest to validate - * @param state - * IState The restored state for this url + * @param stateParameter + * IParameter The restored state for this url + * @param actionParamValues + * actio params values * @param unauthorizedEditableParameters * Editable parameters with errors * @param hdivParameter @@ -504,8 +519,9 @@ private ValidatorHelperResult allRequiredParametersReceived(HttpServletRequest r * @return Valid if parameter has not errors * @since HDIV 2.1.5 */ - protected ValidatorHelperResult validateParameter(HttpServletRequest request, IState state, - Map<String, String[]> unauthorizedEditableParameters, String hdivParameter, String target, String parameter) { + protected ValidatorHelperResult validateParameter(HttpServletRequest request, IParameter stateParameter, + String[] actionParamValues, Map<String, String[]> unauthorizedEditableParameters, String hdivParameter, + String target, String parameter) { // Check if the HDIV validation must be applied to the parameter if (!this.hdivConfig.needValidation(parameter, hdivParameter)) { @@ -521,8 +537,7 @@ protected ValidatorHelperResult validateParameter(HttpServletRequest request, IS return ValidatorHelperResult.VALIDATION_NOT_REQUIRED; } - IParameter stateParameter = state.getParameter(parameter); - if (stateParameter == null) { + if (stateParameter == null && actionParamValues == null) { // If the parameter is not defined in the state, it is an error. // With this verification we guarantee that no extra parameters are added. @@ -540,7 +555,7 @@ protected ValidatorHelperResult validateParameter(HttpServletRequest request, IS String[] values = request.getParameterValues(parameter); // Check if the parameter is editable - if (stateParameter.isEditable()) { + if (stateParameter != null && stateParameter.isEditable()) { // Mark parameter as editable this.addEditableParameter(request, parameter); @@ -553,8 +568,8 @@ protected ValidatorHelperResult validateParameter(HttpServletRequest request, IS } try { - ValidatorHelperResult result = this.validateParameterValues(request, target, state, stateParameter, - parameter, values); + ValidatorHelperResult result = this.validateParameterValues(request, target, stateParameter, + actionParamValues, parameter, values); return result; } catch (HDIVException e) { String errorMessage = HDIVUtil.getMessage("validation.error", e.getMessage()); @@ -566,15 +581,15 @@ protected ValidatorHelperResult validateParameter(HttpServletRequest request, IS * Checks if the parameter <code>parameter</code> is defined by the user as a no required validation parameter for * the action <code>this.target</code>. * - * @param targetWithoutContextPath - * target with the ContextPath stripped + * @param target + * target * @param parameter * parameter name * @return True If it is parameter that needs no validation. False otherwise. */ - private boolean isUserDefinedNonValidationParameter(String targetWithoutContextPath, String parameter) { + private boolean isUserDefinedNonValidationParameter(String target, String parameter) { - if (this.hdivConfig.isParameterWithoutValidation(targetWithoutContextPath, parameter)) { + if (this.hdivConfig.isParameterWithoutValidation(target, parameter)) { if (log.isDebugEnabled()) { log.debug("parameter " + parameter + " doesn't need validation. It is user defined parameter."); @@ -704,8 +719,8 @@ protected boolean validateHDIVSuffix(String value) { * @throws HDIVException * if there is an error in parameter validation process. */ - private ValidatorHelperResult validateParameterValues(HttpServletRequest request, String target, IState state, - IParameter stateParameter, String parameter, String[] values) { + private ValidatorHelperResult validateParameterValues(HttpServletRequest request, String target, + IParameter stateParameter, String[] actionParamValues, String parameter, String[] values) { try { // Only for required parameters must be checked if the number of received @@ -713,19 +728,25 @@ private ValidatorHelperResult validateParameterValues(HttpServletRequest request // taken into account, this verification will be done for every parameter, // including for example, a multiple combo where hardly ever are all its // values received. - if (stateParameter.isActionParam()) { + if (actionParamValues != null) { - if (values.length != stateParameter.getValues().size()) { + if (values.length != actionParamValues.length) { - String valueMessage = (values.length > stateParameter.getValues().size()) ? "extra value" + String valueMessage = (values.length > actionParamValues.length) ? "extra value" : "more values expected"; this.logger.log(HDIVErrorCodes.VALUE_LENGTH_INCORRECT, target, parameter, valueMessage); return new ValidatorHelperResult(HDIVErrorCodes.VALUE_LENGTH_INCORRECT); } } - ValidatorHelperResult result = this.hasRepeatedOrInvalidValues(target, parameter, values, - stateParameter.getValues()); + List<String> stateParamValues = null; + if (stateParameter != null) { + stateParamValues = stateParameter.getValues(); + } else { + stateParamValues = Arrays.asList(actionParamValues); + } + + ValidatorHelperResult result = this.hasRepeatedOrInvalidValues(target, parameter, values, stateParamValues); if (!result.isValid()) { return result; } @@ -733,7 +754,8 @@ private ValidatorHelperResult validateParameterValues(HttpServletRequest request // At this point, we know that the number of received values is the same // as the number of values sent to the client. Now we have to check if // the received values are all tha ones stored in the state. - return this.validateReceivedValuesInState(request, target, state, parameter, values); + return this.validateReceivedValuesInState(request, target, stateParameter, actionParamValues, parameter, + values); } catch (HDIVException e) { String errorMessage = HDIVUtil.getMessage("validation.error", e.getMessage()); @@ -898,17 +920,15 @@ private boolean isInRange(String target, String parameter, String value, List<St * @return True If the <code>values</code> validation is correct. False otherwise. */ private ValidatorHelperResult validateReceivedValuesInState(HttpServletRequest request, String target, - IState state, String parameter, String[] values) { + IParameter stateParameter, String[] actionParamValues, String parameter, String[] values) { int size = values.length; String[] originalValues = new String[size]; - IDataValidator dataValidator = this.dataValidatorFactory.newInstance(state); - - IValidationResult result = null; for (int i = 0; i < size; i++) { - result = dataValidator.validate(values[i], target, parameter); + IValidationResult result = this.dataValidator.validate(values[i], target, parameter, stateParameter, + actionParamValues); if (!result.getLegal()) { this.logger.log(HDIVErrorCodes.PARAMETER_VALUE_INCORRECT, target, parameter, values[i]); @@ -1123,11 +1143,11 @@ public void setSession(ISession session) { } /** - * @param dataValidatorFactory - * the dataValidatorFactory to set + * @param dataValidator + * the dataValidator to set */ - public void setDataValidatorFactory(DataValidatorFactory dataValidatorFactory) { - this.dataValidatorFactory = dataValidatorFactory; + public void setDataValidator(IDataValidator dataValidator) { + this.dataValidator = dataValidator; } /** @@ -1147,17 +1167,11 @@ public void setDataComposerFactory(DataComposerFactory dataComposerFactory) { } /** - * @return the hdivConfig - */ - protected HDIVConfig getHdivConfig() { - return hdivConfig; - } - - /** - * @return the logger + * @param urlProcessor + * the urlProcessor to set */ - protected Logger getLogger() { - return logger; + public void setUrlProcessor(BasicUrlProcessor urlProcessor) { + this.urlProcessor = urlProcessor; } -} \ No newline at end of file +} diff --git a/hdiv-core/src/main/java/org/hdiv/idGenerator/PageIdGenerator.java b/hdiv-core/src/main/java/org/hdiv/idGenerator/PageIdGenerator.java index 9ff63e48..6ac54c98 100644 --- a/hdiv-core/src/main/java/org/hdiv/idGenerator/PageIdGenerator.java +++ b/hdiv-core/src/main/java/org/hdiv/idGenerator/PageIdGenerator.java @@ -26,10 +26,15 @@ public interface PageIdGenerator extends Serializable { /** + * <p> * Create a new page id. + * </p> + * <p> + * The identifier must be an integer greater than 0. + * </p> * * @return new id */ - String getNextPageId(); + int getNextPageId(); } diff --git a/hdiv-core/src/main/java/org/hdiv/idGenerator/SequentialPageIdGenerator.java b/hdiv-core/src/main/java/org/hdiv/idGenerator/SequentialPageIdGenerator.java index b68bd949..54222e12 100644 --- a/hdiv-core/src/main/java/org/hdiv/idGenerator/SequentialPageIdGenerator.java +++ b/hdiv-core/src/main/java/org/hdiv/idGenerator/SequentialPageIdGenerator.java @@ -30,7 +30,7 @@ public class SequentialPageIdGenerator implements PageIdGenerator { /** * Sequence number */ - private long id; + private int id; /** * Constructor that initializes the sequence number in a non-constant value. @@ -44,19 +44,18 @@ public SequentialPageIdGenerator() { * * @see org.hdiv.util.PageIdGenerator#getNextPageId() */ - public synchronized String getNextPageId() { + public synchronized int getNextPageId() { this.id = this.id + 1; - return Long.toString(this.id); + return this.id; } /** - * Generate the initial number of sequencer, which is based on a random value - * between 1 and 20. + * Generate the initial number of sequencer, which is based on a random value between 1 and 20. * * @return valor sequencer initial value */ - protected long generateInitialPageId() { + protected int generateInitialPageId() { Random r = new Random(); int i = r.nextInt(20); diff --git a/hdiv-core/src/main/java/org/hdiv/listener/InitListener.java b/hdiv-core/src/main/java/org/hdiv/listener/InitListener.java index 0bcfa99d..fb368ddd 100644 --- a/hdiv-core/src/main/java/org/hdiv/listener/InitListener.java +++ b/hdiv-core/src/main/java/org/hdiv/listener/InitListener.java @@ -29,7 +29,6 @@ import org.hdiv.cipher.Key; import org.hdiv.config.HDIVConfig; import org.hdiv.idGenerator.PageIdGenerator; -import org.hdiv.session.ISession; import org.hdiv.urlProcessor.FormUrlProcessor; import org.hdiv.urlProcessor.LinkUrlProcessor; import org.hdiv.util.Constants; diff --git a/hdiv-core/src/main/java/org/hdiv/session/ISession.java b/hdiv-core/src/main/java/org/hdiv/session/ISession.java index c55846c1..367a9869 100644 --- a/hdiv-core/src/main/java/org/hdiv/session/ISession.java +++ b/hdiv-core/src/main/java/org/hdiv/session/ISession.java @@ -71,7 +71,7 @@ public interface ISession { * * @return Returns the pageId. */ - public String getPageId(); + public int getPageId(); /** * Returns the page with id <code>pageId</code>. diff --git a/hdiv-core/src/main/java/org/hdiv/session/SessionHDIV.java b/hdiv-core/src/main/java/org/hdiv/session/SessionHDIV.java index a010232b..900531d0 100644 --- a/hdiv-core/src/main/java/org/hdiv/session/SessionHDIV.java +++ b/hdiv-core/src/main/java/org/hdiv/session/SessionHDIV.java @@ -72,7 +72,7 @@ public class SessionHDIV implements ISession, BeanFactoryAware { * * @return Returns the pageId. */ - public String getPageId() { + public int getPageId() { HttpSession session = this.getHttpSession(); @@ -81,7 +81,13 @@ public String getPageId() { throw new HDIVException("session.nopageidgenerator"); } - String id = pageIdGenerator.getNextPageId(); + int id = pageIdGenerator.getNextPageId(); + + // PageId must be greater than 0 + if(id <= 0){ + throw new HDIVException("Incorrect PageId generated [" + id + "]. PageId must be greater than 0."); + } + session.setAttribute(this.pageIdGeneratorName, pageIdGenerator); return id; @@ -122,7 +128,6 @@ public void addPage(String pageId, IPage page) { IStateCache cache = this.getStateCache(session); - page.setName(pageId); String removedPageId = cache.addPage(pageId); // if it returns a page identifier it is because the cache has reached @@ -250,7 +255,7 @@ protected void addPageToSession(HttpSession session, IPage page) { session.setAttribute(page.getName(), page); if (log.isDebugEnabled()) { - log.debug("Added new page with id:" + page.getName()); + log.debug("Added new page with id:" + page.getId()); } } diff --git a/hdiv-core/src/main/java/org/hdiv/state/IPage.java b/hdiv-core/src/main/java/org/hdiv/state/IPage.java index a1c44eea..1013d1ff 100644 --- a/hdiv-core/src/main/java/org/hdiv/state/IPage.java +++ b/hdiv-core/src/main/java/org/hdiv/state/IPage.java @@ -67,12 +67,17 @@ public interface IPage { * @return Returns the page name. */ public String getName(); + + /** + * @return Returns the page id. + */ + public int getId(); /** - * @param name - * The page name to set. + * @param id + * The page id to set. */ - public void setName(String name); + public void setId(int id); /** * @return Returns the page states. diff --git a/hdiv-core/src/main/java/org/hdiv/state/IState.java b/hdiv-core/src/main/java/org/hdiv/state/IState.java index 204fba59..a5f223ab 100644 --- a/hdiv-core/src/main/java/org/hdiv/state/IState.java +++ b/hdiv-core/src/main/java/org/hdiv/state/IState.java @@ -60,12 +60,12 @@ public interface IState { /** * @return Returns the page identifier which the state <code>this</code> belongs to. */ - public String getPageId(); + public int getPageId(); /** * @param pageId The pageId to set. */ - public void setPageId(String pageId); + public void setPageId(int pageId); /** * Checks if exists a parameter with the given identifier <code>key</code>. @@ -81,4 +81,8 @@ public interface IState { */ public List<String> getRequiredParams(); + public String getParams(); + + public void setParams(String params); + } diff --git a/hdiv-core/src/main/java/org/hdiv/state/Page.java b/hdiv-core/src/main/java/org/hdiv/state/Page.java index 0a621cd6..f21277df 100644 --- a/hdiv-core/src/main/java/org/hdiv/state/Page.java +++ b/hdiv-core/src/main/java/org/hdiv/state/Page.java @@ -16,8 +16,10 @@ package org.hdiv.state; import java.io.Serializable; +import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; +import java.util.List; import java.util.Map; /** @@ -34,14 +36,16 @@ public class Page implements IPage, Serializable { private static final long serialVersionUID = -5701140762067196143L; /** - * Map with the states of the page <code>this</code>. + * List with the states of the page <code>this</code>. */ - protected Map<Integer, Object> states = new HashMap<Integer, Object>(); + protected List<IState> states = new ArrayList<IState>(); + + protected Map<Integer, String> hashStates; /** * Page <code>this</code> identifier. */ - private String name; + private int id; /** * Unique id of flow @@ -55,6 +59,11 @@ public class Page implements IPage, Serializable { */ private String randomToken; + /** + * Page size. + */ + private long size; + /** * Adds a new state to the page <code>this</code>. * @@ -62,7 +71,19 @@ public class Page implements IPage, Serializable { * State that represents all the data that composes a possible request. */ public void addState(IState state) { - this.states.put(state.getId(), state); + int id = state.getId(); + if (this.states.size() < id) { + // There are empty positions before id + this.states.add(id - 1, null); + this.states.add(id, state); + + } else if (this.states.size() > id) { + this.states.set(id, state); + + } else { + // list size == id + this.states.add(id, state); + } } /** @@ -74,28 +95,31 @@ public void addState(IState state) { * Hash of a state that represents all the data that composes a possible request. */ public void addState(int id, String stateHash) { - this.states.put(id, stateHash); + if (this.hashStates == null) { + this.hashStates = new HashMap<Integer, String>(); + } + this.hashStates.put(id, stateHash); } /** - * Checks if exists a state with the given identifier <code>key</code>. + * Checks if exists a state with the given identifier <code>id</code>. * * @param id * State identifier */ public boolean existState(int id) { - return this.states.containsKey(id); + return this.states.get(id) != null; } /** - * Returns the state with the given identifier <code>key</code> from the map of states + * Returns the state with the given identifier <code>id</code> from the map of states * * @param id * State identifier - * @return IState State with the identifier <code>key</code>. + * @return IState State with the identifier <code>id</code>. */ public IState getState(int id) { - return (IState) this.states.get(id); + return this.states.get(id); } /** @@ -106,36 +130,50 @@ public IState getState(int id) { * @return String hash with the identifier <code>key</code>. */ public String getStateHash(int key) { - return (String) this.states.get(key); + if (this.hashStates == null) { + return null; + } + return this.hashStates.get(key); } /** * @return Returns the page name. */ public String getName() { - return name; + return this.id + ""; + } + + /** + * @return Returns the page id. + */ + public int getId() { + return this.id; } /** - * @param name - * The page name to set. + * @param id + * The page id to set. */ - public void setName(String name) { - this.name = name; + public void setId(int id) { + this.id = id; } /** * @return Returns the page states. */ public Collection<Object> getStates() { - return states.values(); + return (List) states; } /** - * @return Returns the page states. + * @return Returns number of states. */ public int getStatesCount() { - return states.size(); + int count = states.size(); + if (hashStates != null) { + count = count + hashStates.size(); + } + return count; } /** @@ -172,12 +210,27 @@ public void setRandomToken(String randomToken) { this.randomToken = randomToken; } + /** + * @return the size + */ + public long getSize() { + return size; + } + + /** + * @param size + * the size to set + */ + public void setSize(long size) { + this.size = size; + } + public String toString() { StringBuffer result = new StringBuffer(); - result.append("Page:" + this.name + " "); + result.append("Page:" + this.id + " "); - for (Object state : states.values()) { + for (IState state : states) { result.append(" " + state.toString()); } diff --git a/hdiv-core/src/main/java/org/hdiv/state/Parameter.java b/hdiv-core/src/main/java/org/hdiv/state/Parameter.java index 472bae3f..d8330361 100644 --- a/hdiv-core/src/main/java/org/hdiv/state/Parameter.java +++ b/hdiv-core/src/main/java/org/hdiv/state/Parameter.java @@ -17,66 +17,73 @@ import java.io.Serializable; import java.util.ArrayList; -import java.util.Arrays; +import java.util.Collections; import java.util.List; /** - * Data struture to store all the values of a parameter + * Data structure to store all the values of a parameter * * @author Roberto Velasco */ public class Parameter implements IParameter, Serializable { /** - * Universal version identifier. Deserialization uses this number to ensure that - * a loaded class corresponds exactly to a serialized object. + * Universal version identifier. Deserialization uses this number to ensure that a loaded class corresponds exactly + * to a serialized object. */ private static final long serialVersionUID = 1390866699507616631L; + private static final int VALUES_LIST_SIZE = 3; + + public Parameter(String name, String[] values, boolean editable, String editableDataType, boolean actionParam) { + + this(name, values[0], editable, editableDataType, actionParam); + + if (values.length > 1) { + for (int i = 1; i < values.length; i++) { + this.addValue(values[i]); + } + } + } + public Parameter(String name, String value, boolean editable, String editableDataType, boolean actionParam) { this.name = name; - this.addValue(value); + this.value = value; this.editable = editable; this.editableDataType = editableDataType; this.actionParam = actionParam; } - + /** * parameter name */ private String name; + private String value; + /** * List of values for parameter <code>this</code> */ - private List<String> values = new ArrayList<String>(); + private List<String> values; /** * Indicates if the parameter <code>this</code> is editable or not. * <p> - * A parameter is editable when the user can modify the value or values returned - * by the server, and it is noneditable when the data returned by the server - * cannot be modified by the user under no circumstance. + * A parameter is editable when the user can modify the value or values returned by the server, and it is + * noneditable when the data returned by the server cannot be modified by the user under no circumstance. * </p> */ private boolean editable; - + /** * Parameter type. Only for editable parameters. */ private String editableDataType; /** - * Counter to be able to change real values for relative ones. Used to guarantee - * confidentiality - */ - private int count = 0; - - /** - * Indicates if <code>this</code> is a parameter added in the action atribute - * of a link or form. If it is <code>actionParam</code> ALL the values of this - * parameter must arrived within the request. If not, it means that the user has - * modified the request on purpose. + * Indicates if <code>this</code> is a parameter added in the action atribute of a link or form. If it is + * <code>actionParam</code> ALL the values of this parameter must arrived within the request. If not, it means that + * the user has modified the request on purpose. */ private boolean actionParam; @@ -84,43 +91,49 @@ public Parameter(String name, String value, boolean editable, String editableDat * Adds the value <code>value</code> to the parameter <code>this</code>. */ public void addValue(String value) { + if (this.values == null) { + this.values = new ArrayList<String>(VALUES_LIST_SIZE); + this.values.add(this.value); + } + this.values.add(value); - count++; } /** * Checks if parameter has <code>value</code>. * - * @return True if <code>value</code> exists in the array of values - * <code>values</code>. False otherwise. + * @return True if <code>value</code> exists in the array of values <code>values</code>. False otherwise. */ public boolean existValue(String value) { + if (this.values == null) { + return this.value.equalsIgnoreCase(value); + } - for (String val: this.values) { - if (val.equalsIgnoreCase(value)) { + for (int i = 0; i < this.values.size(); i++) { + String tempValue = values.get(i); + if (tempValue.equalsIgnoreCase(value)) { return true; } } + return false; } /** - * Checks if the position <code>position</code> exists in the array of values - * <code>values</code>. - * - * @return True if <code>position</code> is valid position in the array of values - * <code>values</code>. False otherwise. + * Checks if the position <code>position</code> exists in the array of values <code>values</code>. + * + * @return True if <code>position</code> is valid position in the array of values <code>values</code>. False + * otherwise. */ public boolean existPosition(int position) { - return (position < values.size()); + return (position == 0) || (values != null && position < values.size()); } /** - * @return Obtains the value of the position <code>position</code> in the list - * of values of the parameter. + * @return Obtains the value of the position <code>position</code> in the list of values of the parameter. */ public String getValuePosition(int position) { - return this.values.get(position); + return (position == 0 ? this.value : this.values.get(position)); } /** @@ -130,73 +143,48 @@ public String getName() { return name; } - /** - * @param name The parameter name to set. - */ - public void setName(String name) { - this.name = name; - } - /** * @return Returns the values of parameter <code>this</code>. */ public List<String> getValues() { - return this.values; + return (this.values == null ? Collections.singletonList(value) : this.values); } /** - * @param values The values to set. - */ - public void setValues(List<String> values) { - this.values = values; - } - - /** - * @return Returns if parameter <code>this</code> is editable or not. + * @return Returns if parameter <code>this</code> is editbale or not. */ public boolean isEditable() { return editable; } /** - * @param editable The editable to set. + * @return Returns confidential value. */ - public void setEditable(boolean editable) { - this.editable = editable; - } - public String getConfidentialValue() { - if (count == 0) { + if (this.values == null) { return "0"; } - return (count-1) +""; - } - /** - * @param count The count to set. - */ - public void setCount(int count) { - this.count = count; + return Integer.toString(this.values.size() - 1); } /** - * Indicates if the parameter has been added to the action attribute of a link or - * form. + * Indicates if the parameter has been added to the action attribute of a link or form. * - * @return True if the parameter has been added to the action attribute of a link - * or form. False in otherwise. + * @return True if the parameter has been added to the action attribute of a link or form. False in otherwise. */ public boolean isActionParam() { return actionParam; } /** - * @param actionParam The actionParam to set. + * @param actionParam + * The actionParam to set. */ public void setActionParam(boolean actionParam) { this.actionParam = actionParam; } - + /** * @return Returns the editable data type. */ @@ -204,25 +192,54 @@ public String getEditableDataType() { return editableDataType; } - /** - * @param editableDataType The editable data type to set. - */ - public void setEditableDataType(String editableDataType) { - this.editableDataType = editableDataType; - } - public String toString() { StringBuffer result = new StringBuffer(); result.append(" Parameter:" + this.getName() + " Values:"); - for (int i = 0; i < this.values.size(); i++) { - String value = (String) this.values.get(i); - result.append(value); - if (!(i + 1 == this.values.size())) { - result.append(","); + + if (values == null) { + result.append(this.value); + } else { + for (int i = 0; i < this.values.size(); i++) { + String value = this.values.get(i); + result.append(value); + if (!(i + 1 == this.values.size())) { + result.append(","); + } } } + return result.toString(); } + @Override + public boolean equals(Object obj) { + + if (obj instanceof Parameter) { + + IParameter param = (IParameter) obj; + + if (!this.name.equals(param.getName())) { + return false; + } + if (this.actionParam != param.isActionParam()) { + return false; + } + if (this.editable != param.isEditable()) { + return false; + } + List<String> values = param.getValues(); + if (values.size() != this.getValues().size()) { + return false; + } + for (String paramValue : values) { + if (!this.existValue(paramValue)) { + return false; + } + } + return true; + } + return false; + } + } diff --git a/hdiv-core/src/main/java/org/hdiv/state/State.java b/hdiv-core/src/main/java/org/hdiv/state/State.java index e268c9ea..40263373 100644 --- a/hdiv-core/src/main/java/org/hdiv/state/State.java +++ b/hdiv-core/src/main/java/org/hdiv/state/State.java @@ -16,35 +16,44 @@ package org.hdiv.state; import java.io.Serializable; +import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Collection; -import java.util.HashMap; +import java.util.Collections; import java.util.List; -import java.util.Map; + +import org.hdiv.exception.HDIVException; +import org.hdiv.util.Constants; /** - * Data structure to store all data related with one request (parameters, parameter - * values, ...) + * Data structure to store all data related with one request (parameters, parameter values, ...) * * @author Roberto Velasco */ public class State implements IState, Serializable { /** - * Universal version identifier. Deserialization uses this number to ensure that - * a loaded class corresponds exactly to a serialized object. + * Universal version identifier. Deserialization uses this number to ensure that a loaded class corresponds exactly + * to a serialized object. */ private static final long serialVersionUID = -5179573248448214135L; + private static final int PARAMETERS_LIST_SIZE = 3; + /** * Name of the action related with the state <code>this</code> - */ + */ private String action; + /** + * State url parameters in UTF-8 + */ + private byte[] params; + /** * Map to store all the parameters in a HTTP (GET or POST) request */ - private Map<String, IParameter> parameters = new HashMap<String, IParameter>(); + private List<IParameter> parameters; /** * State identifier <code>this</code> @@ -54,82 +63,106 @@ public class State implements IState, Serializable { /** * Page identifier which the state <code>this</code> belongs to */ - private String pageId; - + private int pageId; + /** - * List with the required parameters to be able to do a correct request with state - * <code>this</code>. We consider required parameters all of the parameters - * that can be sent via GET or those that are added to the name of an action. + * Flag to initialize the lists */ - private List<String> requiredParams = new ArrayList<String>(); + private boolean parametersInitialized = false; + + /** + * Map with the required parameters to be able to do a correct request with state <code>this</code>. We consider + * required parameters all of the parameters that can be sent via GET or those that are added to the name of an + * action. + */ + private List<String> requiredParams; public State(int id) { this.id = id; } + public List<IParameter> getParameters() { + return this.parameters; + } + /** - * Adds a new parameter to the state <code>this</code>. If it is a required parameter - * <code>parameter</code>, it is also added to the required parameters map. - * - * @param parameter The parameter + * Adds a new parameter to the state <code>this</code>. If it is a required parameter <code>parameter</code>, it is + * also added to the required parameters map. + * + * @param parameter + * The parameter */ public void addParameter(IParameter parameter) { - - String paramName = parameter.getName(); - + if (!parametersInitialized) { + parametersInitialized = true; + this.parameters = new ArrayList<IParameter>(PARAMETERS_LIST_SIZE); + this.requiredParams = new ArrayList<String>(PARAMETERS_LIST_SIZE); + } + if (parameter.isActionParam()) { - this.requiredParams.add(paramName); + this.requiredParams.add(parameter.getName()); } - this.parameters.put(paramName, parameter); - } - /** - * Checks if exists a parameter with the given identifier <code>key</code>. - * - * @param key parameter identifier - * @return True if exists a parameter with this identifier <code>key</code>. False - * otherwise. - */ - public boolean existParameter(String key) { - return this.parameters.containsKey(key); + this.parameters.add(parameter); } /** - * Returns the parameter that matches the given identifier <code>key</code>. - * - * @param key parameter identifier + * Returns the parameter that matches the given identifier <code>key</code>. Null is returned if the parameter name + * is not found. + * + * @param key + * parameter identifier * @return IParameter object that matches the given identifier <code>key</code>. */ public IParameter getParameter(String key) { - return (IParameter) this.parameters.get(key); + if (parameters != null) { + for (IParameter parameter : parameters) { + if (parameter.getName().equalsIgnoreCase(key)) { + return parameter; + } + } + } + + return null; } /** * @return Returns the action asociated to state <code>this</code>. */ public String getAction() { - return action; + return this.action; } /** - * @param action The action to set. + * @param action + * The action to set. */ public void setAction(String action) { this.action = action; } - /** - * @return Returns the parameters asociated to state <code>this</code>. - */ - public Collection<IParameter> getParameters() { - return parameters.values(); + public String getParams() { + if (this.params == null) { + return null; + } + + try { + return new String(params, Constants.ENCODING_UTF_8); + } catch (UnsupportedEncodingException e) { + throw new HDIVException("Error converting parameters to String", e); + } } - /** - * @param parameters The parameters to set. - */ - public void setParameters(Map<String, IParameter> parameters) { - this.parameters = parameters; + public void setParams(String params) { + try { + if (params != null) { + this.params = params.getBytes(Constants.ENCODING_UTF_8); + } else { + this.params = null; + } + } catch (UnsupportedEncodingException e) { + throw new HDIVException("Error converting action to byte array", e); + } } /** @@ -139,24 +172,15 @@ public int getId() { return id; } - /** - * @return Returns the page identifier which the state <code>this</code> belongs to. - */ - public String getPageId() { - return pageId; - } - - /** - * @param pageId The pageId to set. - */ - public void setPageId(String pageId) { - this.pageId = pageId; - } - /** * @return Returns required parameters map. */ + @SuppressWarnings("unchecked") public List<String> getRequiredParams() { + if (!parametersInitialized) { + return Collections.EMPTY_LIST; + } + return requiredParams; } @@ -166,8 +190,63 @@ public String toString() { sb.append("action: ").append(this.action); sb.append("parameters: ").append(this.parameters); sb.append("requiredParams: ").append(this.requiredParams); - sb.append("pageId: ").append(this.pageId); return super.toString(); } + @Override + public boolean equals(Object obj) { + + if (obj instanceof IState) { + + IState state = (IState) obj; + + // Same action + if (!(this.getAction().equals(state.getAction()))) { + return false; + } + + // Same Parameters + Collection<IParameter> otherParams = state.getParameters(); + if (otherParams != null && this.parameters != null) { + if (otherParams.size() != this.parameters.size()) { + return false; + } + for (IParameter param : this.parameters) { + + if (!otherParams.contains(param)) { + return false; + } + } + } + + // Same required Parameters + List<String> otherRequiredParams = state.getRequiredParams(); + if (otherRequiredParams != null && this.requiredParams != null) { + if (otherRequiredParams.size() != this.requiredParams.size()) { + return false; + } + for (String requiredParam : this.requiredParams) { + if (!otherRequiredParams.contains(requiredParam)) { + return false; + } + } + } + + return true; + } + return false; + } + + public int getPageId() { + return this.pageId; + } + + public void setPageId(int pageId) { + this.pageId = pageId; + } + + public boolean existParameter(String key) { + throw new UnsupportedOperationException(); + } + } diff --git a/hdiv-core/src/main/java/org/hdiv/state/StateUtil.java b/hdiv-core/src/main/java/org/hdiv/state/StateUtil.java index 50ab0ca3..75fd493d 100644 --- a/hdiv-core/src/main/java/org/hdiv/state/StateUtil.java +++ b/hdiv-core/src/main/java/org/hdiv/state/StateUtil.java @@ -211,7 +211,7 @@ protected IState restoreHashState(String value) { String restoredStateHash = this.encodingUtil.calculateStateHash(value); IState decodedState = (IState) encodingUtil.decode64(value); - String sessionStateHash = this.session.getStateHash(decodedState.getPageId(), decodedState.getId()); + String sessionStateHash = this.session.getStateHash(decodedState.getPageId() + "", decodedState.getId()); if (restoredStateHash.equals(sessionStateHash)) { return decodedState; diff --git a/hdiv-core/src/main/java/org/hdiv/urlProcessor/AbstractUrlProcessor.java b/hdiv-core/src/main/java/org/hdiv/urlProcessor/AbstractUrlProcessor.java index 0087c8f6..cdd8eaa7 100644 --- a/hdiv-core/src/main/java/org/hdiv/urlProcessor/AbstractUrlProcessor.java +++ b/hdiv-core/src/main/java/org/hdiv/urlProcessor/AbstractUrlProcessor.java @@ -79,8 +79,8 @@ public UrlData createUrlData(String url, String method, HttpServletRequest reque int paramInit = url.indexOf("?"); if (paramInit > -1) { String urlParams = url.substring(paramInit + 1); - Map<String, String[]> ulrParamsMap = this.getUrlParamsAsMap(request, urlParams); - urlData.setOriginalUrlParams(ulrParamsMap); + urlData.setUrlParams(urlParams); + url = url.substring(0, paramInit); } @@ -227,34 +227,15 @@ public String getProcessedUrlWithHdivState(HttpServletRequest request, UrlData u */ protected String getParamsQueryString(UrlData urlData) { - Map<String, String[]> params = null; - if (urlData.getProcessedUrlParams() != null) { - params = urlData.getProcessedUrlParams(); - } else { - params = urlData.getOriginalUrlParams(); - } - - if (params == null || params.size() == 0) { - return ""; - } + UrlData eeUrlData = (UrlData) urlData; - StringBuffer sb = new StringBuffer(); + String queryString = ""; - String separator = "?"; - - for (String key : params.keySet()) { - String[] values = (String[]) params.get(key); - - for (int i = 0; i < values.length; i++) { - String value = values[i]; - sb.append(separator).append(key).append("=").append(value); - if (separator.equals("?")) { - separator = "&"; - } - } + if (eeUrlData.getUrlParams() != null) { + queryString = "?" + eeUrlData.getUrlParams(); } - return sb.toString(); + return queryString; } /** diff --git a/hdiv-core/src/main/java/org/hdiv/urlProcessor/BasicUrlData.java b/hdiv-core/src/main/java/org/hdiv/urlProcessor/BasicUrlData.java new file mode 100644 index 00000000..3f1d63ba --- /dev/null +++ b/hdiv-core/src/main/java/org/hdiv/urlProcessor/BasicUrlData.java @@ -0,0 +1,67 @@ +/** + * Copyright 2005-2013 hdiv.org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.hdiv.urlProcessor; + +import java.util.Map; + +/** + * Contains the data of an url. Comparing to {@link UrlData}, only contains basic data. + * + * @author Gotzon Illarramendi + */ +public class BasicUrlData { + + /** + * Url that starts with contextPath + */ + private String contextPathRelativeUrl; + + /** + * Map with original url parameter name and values + */ + private Map<String, String[]> urlParams; + + /** + * @return the contextPathRelativeUrl + */ + public String getContextPathRelativeUrl() { + return contextPathRelativeUrl; + } + + /** + * @param contextPathRelativeUrl + * the contextPathRelativeUrl to set + */ + public void setContextPathRelativeUrl(String contextPathRelativeUrl) { + this.contextPathRelativeUrl = contextPathRelativeUrl; + } + + /** + * @return the urlParams + */ + public Map<String, String[]> getUrlParams() { + return urlParams; + } + + /** + * @param urlParams + * the urlParams to set + */ + public void setUrlParams(Map<String, String[]> urlParams) { + this.urlParams = urlParams; + } + +} diff --git a/hdiv-core/src/main/java/org/hdiv/urlProcessor/BasicUrlProcessor.java b/hdiv-core/src/main/java/org/hdiv/urlProcessor/BasicUrlProcessor.java new file mode 100644 index 00000000..7586a309 --- /dev/null +++ b/hdiv-core/src/main/java/org/hdiv/urlProcessor/BasicUrlProcessor.java @@ -0,0 +1,73 @@ +/** + * Copyright 2005-2013 hdiv.org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.hdiv.urlProcessor; + +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; + +import org.hdiv.state.IState; + +/** + * UrlProcessor implementation for {@link IState} restore and URL validation purpose method. + * + * @since 2.1.7 + */ +public class BasicUrlProcessor extends AbstractUrlProcessor { + + /** + * Create {@link UrlData} instance only with the ContextPath relative url and parameters in a Map. + * + * @param url + * original url, must be context relative + * @param request + * {@link HttpServletRequest} object + * @return new instance of {@link BasicUrlData} + */ + public BasicUrlData createBasicUrlData(String url, HttpServletRequest request) { + + BasicUrlData urlData = new BasicUrlData(); + + // Remove parameters + int paramInit = url.indexOf("?"); + if (paramInit > -1) { + String urlParams = url.substring(paramInit + 1); + Map<String, String[]> ulrParamsMap = this.getUrlParamsAsMap(request, urlParams); + urlData.setUrlParams(ulrParamsMap); + url = url.substring(0, paramInit); + } + + urlData.setContextPathRelativeUrl(url); + + return urlData; + } + + /** + * Creates {@link BasicUrlData} instance with contextPath relative URL and parameters processed. + * + * @param request + * {@link HttpServletRequest} object + * @param url + * URL to process + * @return {@link BasicUrlData} instance + */ + public BasicUrlData processUrl(HttpServletRequest request, String url) { + + BasicUrlData urlData = this.createBasicUrlData(url, request); + return urlData; + } + +} diff --git a/hdiv-core/src/main/java/org/hdiv/urlProcessor/FormUrlProcessor.java b/hdiv-core/src/main/java/org/hdiv/urlProcessor/FormUrlProcessor.java index 9649fb09..8b69c995 100644 --- a/hdiv-core/src/main/java/org/hdiv/urlProcessor/FormUrlProcessor.java +++ b/hdiv-core/src/main/java/org/hdiv/urlProcessor/FormUrlProcessor.java @@ -15,8 +15,6 @@ */ package org.hdiv.urlProcessor; -import java.util.Map; - import javax.servlet.http.HttpServletRequest; import org.apache.commons.logging.Log; @@ -88,21 +86,9 @@ public String processUrl(HttpServletRequest request, String url, String method) request.setAttribute(FORM_STATE_ID, stateId); // Process url params - Map<String, String[]> params = urlData.getOriginalUrlParams(); - if (params != null) { - for (String key : params.keySet()) { - String[] values = (String[]) params.get(key); - - for (int i = 0; i < values.length; i++) { - String value = values[i]; - String composedParam = dataComposer.compose(key, value, false, null, true, method, - Constants.ENCODING_UTF_8); - values[i] = composedParam; - } - params.put(key, values); - } - urlData.setProcessedUrlParams(params); - } + String processedParams = dataComposer.composeParams(urlData.getUrlParams(), method, + Constants.ENCODING_UTF_8); + urlData.setUrlParams(processedParams); // Action url with confidential values url = this.getProcessedUrl(urlData); diff --git a/hdiv-core/src/main/java/org/hdiv/urlProcessor/LinkUrlProcessor.java b/hdiv-core/src/main/java/org/hdiv/urlProcessor/LinkUrlProcessor.java index ab342e92..09603949 100644 --- a/hdiv-core/src/main/java/org/hdiv/urlProcessor/LinkUrlProcessor.java +++ b/hdiv-core/src/main/java/org/hdiv/urlProcessor/LinkUrlProcessor.java @@ -15,8 +15,6 @@ */ package org.hdiv.urlProcessor; -import java.util.Map; - import javax.servlet.http.HttpServletRequest; import org.apache.commons.logging.Log; @@ -78,20 +76,8 @@ public String processUrl(HttpServletRequest request, String url, String encoding // the url needs protection dataComposer.beginRequest(urlData.getUrlWithoutContextPath()); - Map<String, String[]> params = urlData.getOriginalUrlParams(); - if (params != null) { - for (String key : params.keySet()) { - String[] values = (String[]) params.get(key); - - for (int i = 0; i < values.length; i++) { - String value = values[i]; - String composedParam = dataComposer.compose(key, value, false, false, encoding); - values[i] = composedParam; - } - params.put(key, values); - } - urlData.setProcessedUrlParams(params); - } + String processedParams = dataComposer.composeParams(urlData.getUrlParams(), "GET", encoding); + urlData.setUrlParams(processedParams); // Hdiv state param value String stateParam = dataComposer.endRequest(); diff --git a/hdiv-core/src/main/java/org/hdiv/urlProcessor/UrlData.java b/hdiv-core/src/main/java/org/hdiv/urlProcessor/UrlData.java index 2175b4e8..5425f225 100644 --- a/hdiv-core/src/main/java/org/hdiv/urlProcessor/UrlData.java +++ b/hdiv-core/src/main/java/org/hdiv/urlProcessor/UrlData.java @@ -49,6 +49,13 @@ public class UrlData { */ private String urlWithoutContextPath; + /** + * URL parameters in query string format. For example: param1=val1&param2=val2 + * + * @since 2.1.7 + */ + private String urlParams; + /** * Map with original url parameter name and values */ @@ -103,7 +110,7 @@ public boolean isGetMethod() { * @return has parameters? */ public boolean containsParams() { - return originalUrlParams != null && originalUrlParams.size() > 0; + return (originalUrlParams != null && originalUrlParams.size() > 0) || urlParams != null; } /** @@ -256,4 +263,19 @@ public void setjSessionId(String jSessionId) { this.jSessionId = jSessionId; } + /** + * @return the urlParams + */ + public String getUrlParams() { + return urlParams; + } + + /** + * @param urlParams + * the urlParams to set + */ + public void setUrlParams(String urlParams) { + this.urlParams = urlParams; + } + } diff --git a/hdiv-core/src/test/java/org/hdiv/dataComposer/DataComposerMemoryTest.java b/hdiv-core/src/test/java/org/hdiv/dataComposer/DataComposerMemoryTest.java index aa4dfb87..324c8665 100644 --- a/hdiv-core/src/test/java/org/hdiv/dataComposer/DataComposerMemoryTest.java +++ b/hdiv-core/src/test/java/org/hdiv/dataComposer/DataComposerMemoryTest.java @@ -123,7 +123,7 @@ public void testComposeExistingState() { // New request IState state = this.stateUtil.restoreState(stateId); - IPage page = this.session.getPage(state.getPageId()); + IPage page = this.session.getPage(state.getPageId() + ""); dataComposer = this.dataComposerFactory.newInstance(request); HDIVUtil.setDataComposer(dataComposer, request); diff --git a/hdiv-core/src/test/java/org/hdiv/dataValidator/DataValidatorTest.java b/hdiv-core/src/test/java/org/hdiv/dataValidator/DataValidatorTest.java index cf16d181..a913cff1 100644 --- a/hdiv-core/src/test/java/org/hdiv/dataValidator/DataValidatorTest.java +++ b/hdiv-core/src/test/java/org/hdiv/dataValidator/DataValidatorTest.java @@ -16,11 +16,8 @@ package org.hdiv.dataValidator; import org.hdiv.AbstractHDIVTestCase; -import org.hdiv.dataComposer.IDataComposer; import org.hdiv.state.IParameter; -import org.hdiv.state.IState; import org.hdiv.state.Parameter; -import org.hdiv.state.State; /** * Unit tests for the <code>org.hdiv.dataValidator.DataValidator</code> class. @@ -29,13 +26,11 @@ */ public class DataValidatorTest extends AbstractHDIVTestCase { - protected DataValidatorFactory dataValidatorFactory; - - protected IDataComposer composer; + protected IDataValidator dataValidator; protected void onSetUp() throws Exception { - this.dataValidatorFactory = this.getApplicationContext().getBean(DataValidatorFactory.class); + this.dataValidator = this.getApplicationContext().getBean(IDataValidator.class); } /** @@ -44,44 +39,33 @@ protected void onSetUp() throws Exception { */ public void testValidateDataIsNotInt() { - IState state = new State(0); - IDataValidator validator = this.dataValidatorFactory.newInstance(state); - - Parameter param1 = new Parameter("param1", "value1" , false, null, false); - - state.addParameter(param1); + // XXX String editableDataType, boolean actionParam no estaban definidos + IParameter param1 = new Parameter("param1", "value1", false, null, false); - validator.setState(state); - - IValidationResult result = validator.validate("dataIsNotInt", "simpleAction", "param1"); + IValidationResult result = dataValidator.validate("dataIsNotInt", "simpleAction", "param1", param1, null); assertFalse(result.getLegal()); } - /** - * Validation test with a noneditable parameter. It should not pass the validation as the received parameter doesn't - * exists. - */ - public void testValidateParameterDoesNotExist() { - - IState state = new State(0); - IDataValidator validator = this.dataValidatorFactory.newInstance(state); - - Parameter param1 = new Parameter("param1", "value1" , false, null, false); - state.addParameter(param1); - - validator.setState(state); - - boolean confidentiality = this.getConfig().getConfidentiality(); - String value = (confidentiality) ? "0" : "value1"; - try { - IValidationResult result = validator.validate(value, "simpleAction", "parameterDoesNotExist"); - assertFalse(result != null); - } catch (NullPointerException e) { - assertTrue(true); - return; - } - assertFalse(true); - } +// /** +// * Validation test with a noneditable parameter. It should not pass the validation as the received parameter doesn't +// * exists. +// */ +// public void testValidateParameterDoesNotExist() { +// +// IParameter param1 = new Parameter("param1", "value1", false, null, false); +// +// boolean confidentiality = this.getConfig().getConfidentiality(); +// String value = (confidentiality) ? "0" : "value1"; +// try { +// IValidationResult result = dataValidator.validate(value, "simpleAction", "parameterDoesNotExist", param1, +// null); +// assertFalse(result != null); +// } catch (NullPointerException e) { +// assertTrue(true); +// return; +// } +// assertFalse(true); +// } /** * Validation test with a noneditable parameter. It should not pass the validation as the received parameter doesn't @@ -89,16 +73,9 @@ public void testValidateParameterDoesNotExist() { */ public void testValidatePositionDoesNotExist() { - IState state = new State(0); - IDataValidator validator = this.dataValidatorFactory.newInstance(state); - - Parameter param1 = new Parameter("param1", "value1" , false, null, false); - - state.addParameter(param1); - - validator.setState(state); + IParameter param1 = new Parameter("param1", "value1", false, null, false); - IValidationResult result = validator.validate("1", "simpleAction", "param1"); + IValidationResult result = dataValidator.validate("1", "simpleAction", "param1", param1, null); assertFalse(result.getLegal()); } @@ -107,21 +84,25 @@ public void testValidatePositionDoesNotExist() { */ public void testValidateCorrectData() { - IState state = new State(0); - IDataValidator validator = this.dataValidatorFactory.newInstance(state); + IParameter param1 = new Parameter("param1", "value1", false, null, false); - Parameter param1 = new Parameter("param1", "value1" , false, null, false); + boolean confidentiality = this.getConfig().getConfidentiality(); + String value = (confidentiality) ? "0" : "value1"; + IValidationResult result = dataValidator.validate(value, "simpleAction", "param1", param1, null); - state = new State(0); - state.addParameter(param1); + assertEquals(((String) result.getResult()), "value1"); + assertTrue(result.getLegal()); + } + + public void testValidateActionParams() { - validator.setState(state); + String[] values = new String[]{"value1"}; boolean confidentiality = this.getConfig().getConfidentiality(); String value = (confidentiality) ? "0" : "value1"; - IValidationResult result = validator.validate(value, "simpleAction", "param1"); + IValidationResult result = dataValidator.validate(value, "simpleAction", "param1", null, values); assertEquals(((String) result.getResult()), "value1"); assertTrue(result.getLegal()); } -} +} \ No newline at end of file diff --git a/hdiv-core/src/test/java/org/hdiv/session/SessionTest.java b/hdiv-core/src/test/java/org/hdiv/session/SessionTest.java new file mode 100644 index 00000000..9aca5a9f --- /dev/null +++ b/hdiv-core/src/test/java/org/hdiv/session/SessionTest.java @@ -0,0 +1,97 @@ +/** + * Copyright 2005-2013 hdiv.org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.hdiv.session; + +import org.hdiv.AbstractHDIVTestCase; +import org.hdiv.state.IPage; +import org.hdiv.state.IParameter; +import org.hdiv.state.IState; +import org.hdiv.state.Page; +import org.hdiv.state.Parameter; +import org.hdiv.state.State; + +public class SessionTest extends AbstractHDIVTestCase { + + private ISession session; + + protected void onSetUp() throws Exception { + + this.session = this.getApplicationContext().getBean(ISession.class); + } + + public void testGetPageId() { + + int pageId = session.getPageId(); + + assertTrue(pageId > 0); + } + + public void testAddPage() { + + IPage page = new Page(); + page.setId(20); + + IState state = new State(0); + state.setAction("/action"); + IParameter param = new Parameter("name", "value", false, null, true); + state.addParameter(param); + page.addState(state); + + session.addPage("20", page); + + } + + public void testGetState() { + + IPage page = new Page(); + page.setId(20); + + IState state = new State(0); + state.setAction("/action"); + IParameter param = new Parameter("name", "value", false, null, true); + state.addParameter(param); + page.addState(state); + + session.addPage("20", page); + + // Restore state + IState restored = session.getState("20", 0); + + assertNotNull(restored); + assertEquals(state, restored); + } + + public void testGetPage() { + + IPage page = new Page(); + page.setId(20); + + IState state = new State(0); + state.setAction("/action"); + IParameter param = new Parameter("name", "value", false, null, true); + state.addParameter(param); + page.addState(state); + + session.addPage("20", page); + + // Restore page + IPage restored = session.getPage("20"); + + assertNotNull(restored); + assertEquals(page, restored); + } + +} diff --git a/hdiv-core/src/test/java/org/hdiv/session/StateCacheTest.java b/hdiv-core/src/test/java/org/hdiv/session/StateCacheTest.java index 6b71545e..0bc52293 100644 --- a/hdiv-core/src/test/java/org/hdiv/session/StateCacheTest.java +++ b/hdiv-core/src/test/java/org/hdiv/session/StateCacheTest.java @@ -45,19 +45,19 @@ public void testAddPage() { IPage page3 = new Page(); IState state1 = new State(0); - IState state2 = new State(1); - IState state3 = new State(2); + IState state2 = new State(0); + IState state3 = new State(0); page1.addState(state1); - page1.setName("page1"); + page1.setId(1); cache.addPage("1"); page2.addState(state2); - page2.setName("page2"); + page2.setId(2); cache.addPage("2"); page3.addState(state3); - page3.setName("page3"); + page3.setId(3); cache.addPage("3"); log.info("cache:" + cache.toString()); diff --git a/hdiv-core/src/test/java/org/hdiv/urlProcessor/BasicUrlProcessorTest.java b/hdiv-core/src/test/java/org/hdiv/urlProcessor/BasicUrlProcessorTest.java new file mode 100644 index 00000000..de9538a0 --- /dev/null +++ b/hdiv-core/src/test/java/org/hdiv/urlProcessor/BasicUrlProcessorTest.java @@ -0,0 +1,56 @@ +/** + * Copyright 2005-2013 hdiv.org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.hdiv.urlProcessor; + +import javax.servlet.http.HttpServletRequest; + +import org.hdiv.AbstractHDIVTestCase; +import org.hdiv.util.HDIVUtil; + +public class BasicUrlProcessorTest extends AbstractHDIVTestCase { + + private BasicUrlProcessor urlProcessor; + + protected void onSetUp() throws Exception { + this.urlProcessor = this.getApplicationContext().getBean(BasicUrlProcessor.class); + } + + public void testProcessAction() { + + HttpServletRequest request = HDIVUtil.getHttpServletRequest(); + String url = "/testAction.do?par1=val1&par2=val2"; + + BasicUrlData result = this.urlProcessor.processUrl(request, url); + + assertEquals(result.getContextPathRelativeUrl(), "/testAction.do"); + assertEquals(2, result.getUrlParams().size()); + assertEquals("val1", result.getUrlParams().get("par1")[0]); + assertEquals("val2", result.getUrlParams().get("par2")[0]); + } + + public void testOnlyParams() { + + HttpServletRequest request = HDIVUtil.getHttpServletRequest(); + String url = "?par1=val1&par2=val2"; + + BasicUrlData result = this.urlProcessor.processUrl(request, url); + + assertEquals(result.getContextPathRelativeUrl(), ""); + assertEquals(2, result.getUrlParams().size()); + assertEquals("val1", result.getUrlParams().get("par1")[0]); + assertEquals("val2", result.getUrlParams().get("par2")[0]); + } +} diff --git a/hdiv-core/src/test/resources/org/hdiv/config/hdiv-core-applicationContext.xml b/hdiv-core/src/test/resources/org/hdiv/config/hdiv-core-applicationContext.xml index 921b8a3e..7d265518 100644 --- a/hdiv-core/src/test/resources/org/hdiv/config/hdiv-core-applicationContext.xml +++ b/hdiv-core/src/test/resources/org/hdiv/config/hdiv-core-applicationContext.xml @@ -55,18 +55,20 @@ </property> </bean> - <bean id="results" class="org.hdiv.dataValidator.ValidationResult" /> - <bean id="stateUtil" class="org.hdiv.state.StateUtil" init-method="init"> <property name="encodingUtil" ref="encoding" /> <property name="config" ref="config" /> <property name="session" ref="sessionHDIV" /> - </bean> - <bean id="dataValidatorFactory" class="org.hdiv.dataValidator.DataValidatorFactory"> - <property name="hdivConfig" ref="config" /> + </bean> + + <!-- DATAVALIDATOR --> + <bean id="results" class="org.hdiv.dataValidator.ValidationResult" /> + + <bean id="dataValidator" class="org.hdiv.dataValidator.DataValidator"> + <property name="config" ref="config" /> </bean> - <!-- DATACOMPOSERS, DATAVALIDATOR --> + <!-- DATACOMPOSERS --> <bean id="dataComposerFactory" class="org.hdiv.dataComposer.DataComposerFactory"> <property name="hdivConfig" ref="config" /> @@ -88,8 +90,9 @@ <property name="stateUtil" ref="stateUtil" /> <property name="hdivConfig" ref="config" /> <property name="session" ref="sessionHDIV" /> - <property name="dataValidatorFactory" ref="dataValidatorFactory" /> + <property name="dataValidator" ref="dataValidator" /> <property name="dataComposerFactory" ref="dataComposerFactory" /> + <property name="urlProcessor" ref="basicUrlProcessor"/> </bean> <bean id="validatorErrorHandler" class="org.hdiv.filter.DefaultValidatorErrorHandler"> @@ -117,6 +120,10 @@ <bean id="formUrlProcessor" class="org.hdiv.urlProcessor.FormUrlProcessor"> <property name="config" ref="config" /> </bean> + + <bean id="basicUrlProcessor" class="org.hdiv.urlProcessor.BasicUrlProcessor"> + <property name="config" ref="config" /> + </bean> <!-- RegExp executor --> <bean id="patternMatcherFactory" class="org.hdiv.regex.PatternMatcherFactory" /> diff --git a/hdiv-jsf/src/main/java/org/hdiv/components/support/OutcomeTargetComponentProcessor.java b/hdiv-jsf/src/main/java/org/hdiv/components/support/OutcomeTargetComponentProcessor.java index d21abbeb..ce30a245 100644 --- a/hdiv-jsf/src/main/java/org/hdiv/components/support/OutcomeTargetComponentProcessor.java +++ b/hdiv-jsf/src/main/java/org/hdiv/components/support/OutcomeTargetComponentProcessor.java @@ -15,8 +15,6 @@ */ package org.hdiv.components.support; -import java.util.Map; - import javax.faces.FacesException; import javax.faces.component.UIOutcomeTarget; import javax.faces.component.UIParameter; @@ -63,24 +61,10 @@ public void processOutcomeTargetLinkComponent(FacesContext context, UIOutcomeTar } IDataComposer dataComposer = HDIVUtil.getDataComposer(request); - dataComposer.beginRequest(urlData.getContextPathRelativeUrl()); - - Map<String, String[]> params = urlData.getOriginalUrlParams(); - if (params != null) { - // Process url params - for (String key : params.keySet()) { - String[] values = params.get(key); - - for (int i = 0; i < values.length; i++) { - String value = values[i]; - String composedParam = dataComposer.compose(key, value, false, true, - Constants.ENCODING_UTF_8); - values[i] = composedParam; - } - params.put(key, values); - } - urlData.setProcessedUrlParams(params); - } + dataComposer.beginRequest(urlData.getUrlWithoutContextPath()); + + String processedParams = dataComposer.composeParams(urlData.getUrlParams(), "GET", Constants.ENCODING_UTF_8); + urlData.setUrlParams(processedParams); String stateParam = dataComposer.endRequest(); diff --git a/hdiv-jsf/src/main/java/org/hdiv/components/support/OutputLinkComponentProcessor.java b/hdiv-jsf/src/main/java/org/hdiv/components/support/OutputLinkComponentProcessor.java index 61852b9f..e56f7ad1 100644 --- a/hdiv-jsf/src/main/java/org/hdiv/components/support/OutputLinkComponentProcessor.java +++ b/hdiv-jsf/src/main/java/org/hdiv/components/support/OutputLinkComponentProcessor.java @@ -15,8 +15,6 @@ */ package org.hdiv.components.support; -import java.util.Map; - import javax.faces.FacesException; import javax.faces.component.UIComponent; import javax.faces.component.UIParameter; @@ -62,24 +60,10 @@ public void processOutputLink(FacesContext context, HtmlOutputLink component) { } IDataComposer dataComposer = HDIVUtil.getDataComposer(request); - dataComposer.beginRequest(urlData.getContextPathRelativeUrl()); - - Map<String, String[]> params = urlData.getOriginalUrlParams(); - if (params != null) { - // Process url params - for (String key : params.keySet()) { - String[] values = params.get(key); - - for (int i = 0; i < values.length; i++) { - String value = values[i]; - String composedParam = dataComposer.compose(key, value, false, true, - Constants.ENCODING_UTF_8); - values[i] = composedParam; - } - params.put(key, values); - } - urlData.setProcessedUrlParams(params); - } + dataComposer.beginRequest(urlData.getUrlWithoutContextPath()); + + String processedParams = dataComposer.composeParams(urlData.getUrlParams(), "GET", Constants.ENCODING_UTF_8); + urlData.setUrlParams(processedParams); if (hasUIParams) { diff --git a/hdiv-jsf/src/main/java/org/hdiv/filter/JsfValidatorHelper.java b/hdiv-jsf/src/main/java/org/hdiv/filter/JsfValidatorHelper.java index 7c312232..705a4ac7 100644 --- a/hdiv-jsf/src/main/java/org/hdiv/filter/JsfValidatorHelper.java +++ b/hdiv-jsf/src/main/java/org/hdiv/filter/JsfValidatorHelper.java @@ -73,7 +73,7 @@ protected ValidatorHelperResult preValidate(HttpServletRequest request, String t log.debug("Request contains view state"); } - if (this.getHdivConfig().isStartPage(target, request.getMethod())) { + if (this.hdivConfig.isStartPage(target, request.getMethod())) { // It is an init page if (log.isDebugEnabled()) { log.debug("Request is start page"); @@ -133,7 +133,7 @@ public ValidatorHelperResult isTheSameAction(HttpServletRequest request, String log.debug(" target:" + target); log.debug(" state action:" + state.getAction()); } - this.getLogger().log(HDIVErrorCodes.ACTION_ERROR, target, null, null); + this.logger.log(HDIVErrorCodes.ACTION_ERROR, target, null, null); return new ValidatorHelperResult(HDIVErrorCodes.ACTION_ERROR); } diff --git a/hdiv-jsf/src/test/resources/org/hdiv/config/applicationContext-extra.xml b/hdiv-jsf/src/test/resources/org/hdiv/config/applicationContext-extra.xml index 9af059e3..3c51a51a 100644 --- a/hdiv-jsf/src/test/resources/org/hdiv/config/applicationContext-extra.xml +++ b/hdiv-jsf/src/test/resources/org/hdiv/config/applicationContext-extra.xml @@ -36,8 +36,9 @@ <property name="stateUtil" ref="stateUtil" /> <property name="hdivConfig" ref="config" /> <property name="session" ref="sessionHDIV" /> - <property name="dataValidatorFactory" ref="dataValidatorFactory" /> + <property name="dataValidator" ref="dataValidator" /> <property name="dataComposerFactory" ref="dataComposerFactory" /> + <property name="urlProcessor" ref="basicUrlProcessor"/> </bean> </beans> \ No newline at end of file
ade1458676e7170ccc296476edc734508a098e3f
Valadoc
gtkdoc-importer: Add support for figures
a
https://github.com/GNOME/vala/
diff --git a/src/libvaladoc/documentation/gtkdoccommentparser.vala b/src/libvaladoc/documentation/gtkdoccommentparser.vala index b60f3ecae5..629aebbc29 100644 --- a/src/libvaladoc/documentation/gtkdoccommentparser.vala +++ b/src/libvaladoc/documentation/gtkdoccommentparser.vala @@ -771,9 +771,20 @@ public class Valadoc.Gtkdoc.Parser : Object, ResourceLocator { return p; } - private Embedded? parse_docbook_inlinegraphic () { - if (!check_xml_open_tag ("inlinegraphic")) { - this.report_unexpected_token (current, "<inlinegraphic>"); + private Paragraph? parse_docbook_graphic () { + var tmp = parse_docbook_inlinegraphic ("graphic"); + if (tmp == null) { + return null; + } + + Paragraph? p = factory.create_paragraph (); + p.content.add (tmp); + return p; + } + + private Embedded? parse_docbook_inlinegraphic (string tag_name = "inlinegraphic") { + if (!check_xml_open_tag (tag_name)) { + this.report_unexpected_token (current, "<%s>".printf (tag_name)); return null; } @@ -783,8 +794,8 @@ public class Valadoc.Gtkdoc.Parser : Object, ResourceLocator { next (); parse_docbook_spaces (); - if (!check_xml_close_tag ("inlinegraphic")) { - this.report_unexpected_token (current, "</inlinegrapic>"); + if (!check_xml_close_tag (tag_name)) { + this.report_unexpected_token (current, "</%s>".printf (tag_name)); return e; } @@ -942,6 +953,52 @@ public class Valadoc.Gtkdoc.Parser : Object, ResourceLocator { return content; } + private LinkedList<Block>? parse_docbook_figure () { + if (!check_xml_open_tag ("figure")) { + this.report_unexpected_token (current, "<figure>"); + return null; + } + next (); + + LinkedList<Block> content = new LinkedList<Block> (); + parse_docbook_spaces (); + + if (current.type == TokenType.XML_OPEN && current.content == "title") { + append_block_content_not_null (content, parse_docbook_title ()); + parse_docbook_spaces (); + } + + while (current.type == TokenType.XML_OPEN) { + if (current.content == "inlinegraphic") { + Paragraph p = (content.size > 0)? content[0] as Paragraph : null; + if (p == null) { + p = factory.create_paragraph (); + } + + while (current.type == TokenType.XML_OPEN && current.content == "inlinegraphic") { + p.content.add (parse_docbook_inlinegraphic ()); + next (); + parse_docbook_spaces (); + } + } else if (current.content == "graphic") { + append_block_content_not_null (content, parse_docbook_graphic ()); + next (); + } else { + break; + } + + parse_docbook_spaces (); + } + + if (!check_xml_close_tag ("figure")) { + this.report_unexpected_token (current, "</figure>"); + return content; + } + + next (); + return content; + } + private Run? parse_docbook_footnote () { if (!check_xml_open_tag ("footnote")) { this.report_unexpected_token (current, "<footnote>"); @@ -1030,6 +1087,8 @@ public class Valadoc.Gtkdoc.Parser : Object, ResourceLocator { this.append_block_content_not_null (content, parse_docbook_important ()); } else if (current.type == TokenType.XML_OPEN && current.content == "refsect2") { this.append_block_content_not_null_all (content, parse_docbook_refsect2 ()); + } else if (current.type == TokenType.XML_OPEN && current.content == "figure") { + this.append_block_content_not_null_all (content, parse_docbook_figure ()); } else if (current.type == TokenType.GTKDOC_PARAGRAPH) { this.append_block_content_not_null (content, parse_gtkdoc_paragraph ()); } else if (current.type == TokenType.GTKDOC_SOURCE_OPEN) {
e1a2fd0eb19fca68caba2bfcdbfb069789a302a9
Valadoc
libvala: Add support for libvala-0.16
a
https://github.com/GNOME/vala/
diff --git a/src/libvaladoc/moduleloader.vala b/src/libvaladoc/moduleloader.vala index 2bb427644b..bcad4d5552 100755 --- a/src/libvaladoc/moduleloader.vala +++ b/src/libvaladoc/moduleloader.vala @@ -143,7 +143,7 @@ public class Valadoc.ModuleLoader : Object { DriverMetaData (0, 10, 0, 10, "0.10.x"), DriverMetaData (0, 11, 0, 12, "0.12.x"), DriverMetaData (0, 13, 0, 14, "0.14.x"), - DriverMetaData (0, 15, 0, 15, "0.16.x") + DriverMetaData (0, 15, 0, 16, "0.16.x") };
497c7441ad1370b383d3c00c614d32dec4ef1952
Delta Spike
DELTASPIKE-277 fix JsfMessageProducer type detection
c
https://github.com/apache/deltaspike
diff --git a/deltaspike/modules/jsf/impl/src/main/java/org/apache/deltaspike/jsf/impl/message/JsfMessageProducer.java b/deltaspike/modules/jsf/impl/src/main/java/org/apache/deltaspike/jsf/impl/message/JsfMessageProducer.java index 6dc58b8a7..2a26c82d4 100644 --- a/deltaspike/modules/jsf/impl/src/main/java/org/apache/deltaspike/jsf/impl/message/JsfMessageProducer.java +++ b/deltaspike/modules/jsf/impl/src/main/java/org/apache/deltaspike/jsf/impl/message/JsfMessageProducer.java @@ -23,6 +23,9 @@ import javax.enterprise.inject.Produces; import javax.enterprise.inject.spi.InjectionPoint; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; + import org.apache.deltaspike.core.util.ReflectionUtils; import org.apache.deltaspike.jsf.message.JsfMessage; @@ -36,13 +39,24 @@ public class JsfMessageProducer @Dependent public JsfMessage createJsfMessage(InjectionPoint injectionPoint) { - return createJsfMessageFor(injectionPoint, ReflectionUtils.getRawType(injectionPoint.getType())); + if (! (injectionPoint.getType() instanceof ParameterizedType)) + { + throw new IllegalArgumentException("JsfMessage must be used as generic type"); + } + ParameterizedType paramType = (ParameterizedType) injectionPoint.getType(); + Type[] actualTypes = paramType.getActualTypeArguments(); + if (actualTypes.length != 1) + { + throw new IllegalArgumentException("JsfMessage must have the MessageBundle as generic type parameter"); + } + + return createJsfMessageFor(injectionPoint, actualTypes[0]); } - private JsfMessage createJsfMessageFor(InjectionPoint injectionPoint, Class<Object> rawType) + private JsfMessage createJsfMessageFor(InjectionPoint injectionPoint, Type rawType) { //X TODO check if the JsfMessage should get injected into a UIComponent and use #getClientId() - return new DefaultJsfMessage(rawType, null); + return new DefaultJsfMessage((Class) rawType, null); } } diff --git a/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/view/beans/BackingBean.java b/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/view/beans/ViewScopedBackingBean.java similarity index 95% rename from deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/view/beans/BackingBean.java rename to deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/view/beans/ViewScopedBackingBean.java index cf6eb6cea..cb59e88b5 100644 --- a/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/view/beans/BackingBean.java +++ b/deltaspike/modules/jsf/impl/src/test/java/org/apache/deltaspike/test/jsf/impl/scope/view/beans/ViewScopedBackingBean.java @@ -27,7 +27,7 @@ */ @ViewScoped @Named("viewScopedBean") -public class BackingBean implements Serializable +public class ViewScopedBackingBean implements Serializable { private int i = 0;
9b53aafc716b44c0d905ec60c44c4f266d718131
tapiji
Adds the new project `tapiji.tools.java`.
p
https://github.com/tapiji/tapiji
diff --git a/org.eclipse.babel.tapiji.tools.java/.classpath b/org.eclipse.babel.tapiji.tools.java/.classpath new file mode 100644 index 00000000..ad32c83a --- /dev/null +++ b/org.eclipse.babel.tapiji.tools.java/.classpath @@ -0,0 +1,7 @@ +<?xml version="1.0" encoding="UTF-8"?> +<classpath> + <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/> + <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/> + <classpathentry kind="src" path="src"/> + <classpathentry kind="output" path="bin"/> +</classpath> diff --git a/org.eclipse.babel.tapiji.tools.java/.project b/org.eclipse.babel.tapiji.tools.java/.project new file mode 100644 index 00000000..72635d93 --- /dev/null +++ b/org.eclipse.babel.tapiji.tools.java/.project @@ -0,0 +1,28 @@ +<?xml version="1.0" encoding="UTF-8"?> +<projectDescription> + <name>org.eclipse.babel.tapiji.tools.java</name> + <comment></comment> + <projects> + </projects> + <buildSpec> + <buildCommand> + <name>org.eclipse.jdt.core.javabuilder</name> + <arguments> + </arguments> + </buildCommand> + <buildCommand> + <name>org.eclipse.pde.ManifestBuilder</name> + <arguments> + </arguments> + </buildCommand> + <buildCommand> + <name>org.eclipse.pde.SchemaBuilder</name> + <arguments> + </arguments> + </buildCommand> + </buildSpec> + <natures> + <nature>org.eclipse.pde.PluginNature</nature> + <nature>org.eclipse.jdt.core.javanature</nature> + </natures> +</projectDescription> diff --git a/org.eclipse.babel.tapiji.tools.java/.settings/org.eclipse.jdt.core.prefs b/org.eclipse.babel.tapiji.tools.java/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 00000000..c537b630 --- /dev/null +++ b/org.eclipse.babel.tapiji.tools.java/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,7 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6 +org.eclipse.jdt.core.compiler.compliance=1.6 +org.eclipse.jdt.core.compiler.problem.assertIdentifier=error +org.eclipse.jdt.core.compiler.problem.enumIdentifier=error +org.eclipse.jdt.core.compiler.source=1.6 diff --git a/org.eclipse.babel.tapiji.tools.java/META-INF/MANIFEST.MF b/org.eclipse.babel.tapiji.tools.java/META-INF/MANIFEST.MF new file mode 100644 index 00000000..1e66b103 --- /dev/null +++ b/org.eclipse.babel.tapiji.tools.java/META-INF/MANIFEST.MF @@ -0,0 +1,7 @@ +Manifest-Version: 1.0 +Bundle-ManifestVersion: 2 +Bundle-Name: TapiJI Tools Java extension +Bundle-SymbolicName: org.eclipse.babel.tapiji.tools.java +Bundle-Version: 0.0.2.qualifier +Bundle-Vendor: Vienna University of Technology +Bundle-RequiredExecutionEnvironment: JavaSE-1.6 diff --git a/org.eclipse.babel.tapiji.tools.java/build.properties b/org.eclipse.babel.tapiji.tools.java/build.properties new file mode 100644 index 00000000..34d2e4d2 --- /dev/null +++ b/org.eclipse.babel.tapiji.tools.java/build.properties @@ -0,0 +1,4 @@ +source.. = src/ +output.. = bin/ +bin.includes = META-INF/,\ + .
3ca94cabb68789cb20e37e0c9ecea7acc8b9487e
Valadoc
libvaladoc/html: Add "All known members inherited from "
a
https://github.com/GNOME/vala/
diff --git a/src/libvaladoc/api/class.vala b/src/libvaladoc/api/class.vala index 6c19ea0786..6eec1370d6 100644 --- a/src/libvaladoc/api/class.vala +++ b/src/libvaladoc/api/class.vala @@ -160,7 +160,7 @@ public class Valadoc.Api.Class : TypeSymbol { */ public Collection<TypeReference> get_full_implemented_interface_list () { if (_full_implemented_interfaces == null) { - _full_implemented_interfaces = new HashSet<TypeReference> (); + _full_implemented_interfaces = new LinkedList<TypeReference> (); _full_implemented_interfaces.add_all (this.interfaces); if (base_type != null) { diff --git a/src/libvaladoc/api/interface.vala b/src/libvaladoc/api/interface.vala index b8c339ee9a..51b6e00095 100644 --- a/src/libvaladoc/api/interface.vala +++ b/src/libvaladoc/api/interface.vala @@ -67,9 +67,10 @@ public class Valadoc.Api.Interface : TypeSymbol { /** * Returns a list of all preconditioned interfaces */ + // TODO: rename to get_full_... public Collection<TypeReference> get_all_implemented_interface_list () { if (_full_implemented_interfaces == null) { - _full_implemented_interfaces = new HashSet<TypeReference> (); + _full_implemented_interfaces = new LinkedList<TypeReference> (); _full_implemented_interfaces.add_all (this.interfaces); if (base_type != null) { diff --git a/src/libvaladoc/api/node.vala b/src/libvaladoc/api/node.vala index e28a898792..8c5345c864 100644 --- a/src/libvaladoc/api/node.vala +++ b/src/libvaladoc/api/node.vala @@ -144,6 +144,7 @@ public abstract class Valadoc.Api.Node : Item, Browsable, Documentation, Compara private Map<string,Node> per_name_children; private Map<NodeType?, Gee.List<Node>> per_type_children; + public Node (Node? parent, SourceFile? file, string? name, void* data) { base (data); @@ -165,6 +166,7 @@ public abstract class Valadoc.Api.Node : Item, Browsable, Documentation, Compara /** * {@inheritDoc} */ + // TODO: rename to is_visible public abstract bool is_browsable (Settings settings); /** @@ -210,13 +212,46 @@ public abstract class Valadoc.Api.Node : Item, Browsable, Documentation, Compara } } + /** + * Specifies whether this node has at least one visible child with the given type + * + * @param type a node type + */ + public bool has_visible_children_by_type (NodeType type, Settings settings) { + Gee.List<Node>? all_children = per_type_children.get (type); + if (all_children != null) { + foreach (Node node in all_children) { + if (node.is_browsable (settings)) { + return true; + } + } + } + + return false; + } + + /** + * Specifies whether this node has at least one visible child with the given types + * + * @param types a list of node types + */ + public bool has_visible_children_by_types (NodeType[] types, Settings settings) { + foreach (NodeType type in types) { + if (has_visible_children_by_type (type, settings)) { + return true; + } + } + + return false; + } + /** * Specifies whether this node has at least one child with the given type * * @param type a node type */ public bool has_children_by_type (NodeType type) { - Gee.List<Node> all_children = per_type_children.get (type); + Gee.List<Node>? all_children = per_type_children.get (type); return all_children != null && !all_children.is_empty; } @@ -306,6 +341,7 @@ public abstract class Valadoc.Api.Node : Item, Browsable, Documentation, Compara * Visits all children of this node with the specified Visitor. * * @param visitor the visitor to be called while traversing + * @param filtered specifies whether nodes which are not browsable should appear in the list */ public void accept_all_children (Visitor visitor, bool filtered = true) { foreach (Gee.List<Node> children in per_type_children.values) { diff --git a/src/libvaladoc/html/basicdoclet.vala b/src/libvaladoc/html/basicdoclet.vala index 4f6d7c2b64..c63edc7438 100644 --- a/src/libvaladoc/html/basicdoclet.vala +++ b/src/libvaladoc/html/basicdoclet.vala @@ -128,6 +128,25 @@ public abstract class Valadoc.Html.BasicDoclet : Api.Visitor, Doclet { } + private TypeSymbol? unpack_type_reference (TypeReference? type_reference) { + Api.Item pos = type_reference; + + while (pos != null) { + if (pos is TypeReference) { + pos = ((TypeReference) pos).data_type; + } else if (pos is Api.Array) { + pos = ((Api.Array) pos).data_type; + } else if (pos is Pointer) { + pos = ((Pointer) pos).data_type; + } else { + assert (pos is TypeSymbol); + return (TypeSymbol) pos; + } + } + + return null; + } + protected void write_navi_entry_html_template (string style, string content, bool is_deprecated) { writer.start_tag ("li", {"class", style}); @@ -536,7 +555,7 @@ public abstract class Valadoc.Html.BasicDoclet : Api.Visitor, Doclet { return list; } - private void write_known_symbols_node (Gee.Collection<Api.Node> nodes2, Api.Node container, string headline) { + private void write_known_symbols_note (Gee.Collection<Api.Node> nodes2, Api.Node container, string headline) { var nodes = get_accessible_nodes_from_list (nodes2); if (nodes.size == 0) { return ; @@ -578,7 +597,12 @@ public abstract class Valadoc.Html.BasicDoclet : Api.Visitor, Doclet { for (int p = 0; p < list_sizes[i] && iter.next (); p++) { var node = iter.get (); writer.start_tag ("li", {"class", cssresolver.resolve (node)}); - writer.link (get_link (node, container), node.name); + string link = get_link (node, container); + if (link == null) { + writer.text (node.name); + } else { + writer.link (link, node.name); + } writer.end_tag ("li"); } @@ -607,13 +631,15 @@ public abstract class Valadoc.Html.BasicDoclet : Api.Visitor, Doclet { if (node is Class) { var cl = node as Class; - write_known_symbols_node (cl.get_known_child_classes (), cl, "All known sub-classes:"); - write_known_symbols_node (cl.get_known_derived_interfaces (), cl, "Required by:"); + write_known_symbols_note (cl.get_known_child_classes (), cl, "All known sub-classes:"); + write_known_symbols_note (cl.get_known_derived_interfaces (), cl, "Required by:"); } else if (node is Interface) { var iface = node as Interface; - write_known_symbols_node (iface.get_known_implementations (), iface, "All known implementing classes:"); - write_known_symbols_node (iface.get_known_related_interfaces (), iface, "All known sub-interfaces:"); + write_known_symbols_note (iface.get_known_implementations (), iface, "All known implementing classes:"); + write_known_symbols_note (iface.get_known_related_interfaces (), iface, "All known sub-interfaces:"); } + // TODO: All known sub-structs + if (node.parent is Namespace) { writer.simple_tag ("br"); @@ -621,7 +647,9 @@ public abstract class Valadoc.Html.BasicDoclet : Api.Visitor, Doclet { write_package_note (node); } - if (!(node is Method || node is Delegate || node is Api.Signal)) { // avoids exception listings & implementations + if (!(node is Method || node is Delegate || node is Api.Signal)) { + // avoids exception listings & implementations + if (node.has_children ({ Api.NodeType.ERROR_CODE, Api.NodeType.ENUM_VALUE, @@ -653,9 +681,124 @@ public abstract class Valadoc.Html.BasicDoclet : Api.Visitor, Doclet { write_children (node, Api.NodeType.FIELD, "Fields", node); } } + + if (node is Class) { + write_inherited_symbols_note_for_class ((Class) node, node); + } else if (node is Interface) { + write_inherited_symbols_note_for_interface ((Interface) node, node); + } else if (node is Struct) { + write_inherited_symbols_note_for_struct ((Struct) node, node); + } + writer.end_tag ("div"); } + private static NodeType[] inheritable_members = { + NodeType.CONSTANT, + NodeType.PROPERTY, + NodeType.DELEGATE, + NodeType.STATIC_METHOD, + NodeType.METHOD, + NodeType.SIGNAL, + NodeType.FIELD, + }; + + private inline bool has_visible_inheritable_children (TypeSymbol symbol) { + return symbol.has_visible_children_by_types (inheritable_members, _settings); + } + + private void write_inherited_members_headline () { + writer.start_tag ("h3", {"class", css_title}).text ("Inherited Members:").end_tag ("h3"); + } + + private void write_inherited_symbols_note_for_class (Class cl, Api.Node container) { + bool headline_printed = false; + + // class hierarchy: + Class base_class = unpack_type_reference (cl.base_type) as Class; + while (base_class != null) { + if (!headline_printed && has_visible_inheritable_children (base_class)) { + write_inherited_members_headline (); + headline_printed = true; + } + + write_inherited_symbols_note (base_class, "class", container); + base_class = unpack_type_reference (base_class.base_type) as Class; + } + + + // implemented interfaces + Gee.LinkedList<Interface> printed_interfaces = new Gee.LinkedList<Interface> (); + foreach (TypeReference iface_ref in cl.get_full_implemented_interface_list ()) { + Interface iface = (Interface) unpack_type_reference (iface_ref); + + if (!headline_printed && has_visible_inheritable_children (iface)) { + write_inherited_members_headline (); + headline_printed = true; + } else if (printed_interfaces.contains (iface)) { + continue ; + } + + write_inherited_symbols_note (iface, "interface", container); + printed_interfaces.add (iface); + } + } + + private void write_inherited_symbols_note_for_interface (Interface iface, Api.Node container) { + bool headline_printed = false; + + // class hierarchy: + Class base_class = unpack_type_reference (iface.base_type) as Class; + while (base_class != null) { + if (!headline_printed && has_visible_inheritable_children (base_class)) { + write_inherited_members_headline (); + headline_printed = true; + } + + write_inherited_symbols_note (base_class, "class", container); + base_class = unpack_type_reference (base_class.base_type) as Class; + } + + + // interfaces: + Gee.LinkedList<Interface> printed_interfaces = new Gee.LinkedList<Interface> (); + foreach (TypeReference pre_ref in iface.get_all_implemented_interface_list ()) { + Interface pre = (Interface) unpack_type_reference (pre_ref); + + if (!headline_printed && has_visible_inheritable_children (pre)) { + write_inherited_members_headline (); + headline_printed = true; + } else if (printed_interfaces.contains (pre)) { + continue ; + } + + write_inherited_symbols_note (pre, "interface", container); + printed_interfaces.add (pre); + } + } + + private void write_inherited_symbols_note_for_struct (Struct str, Api.Node container) { + Struct base_struct = unpack_type_reference (str.base_type) as Struct; + if (base_struct != null && has_visible_inheritable_children (base_struct)) { + write_inherited_members_headline (); + write_inherited_symbols_note (base_struct, "struct", container); + } + } + + private void write_inherited_symbols_note (TypeSymbol symbol, string type, Api.Node container) { + write_known_symbols_note (symbol.get_children_by_types (inheritable_members, false), container, "All known members inherited from %s %s".printf (type, symbol.get_full_name ())); + + /* + write_known_symbols_note (symbol.get_children_by_type (NodeType.CONSTANT, false), container, "All known constants inherited from %s %s".printf (type, symbol.get_full_name ())); + write_known_symbols_note (symbol.get_children_by_type (NodeType.PROPERTY, false), container, "All known properties inherited from %s %s".printf (type, symbol.get_full_name ())); + write_known_symbols_note (symbol.get_children_by_type (NodeType.DELEGATE, false), container, "All known delegates inherited from %s %s".printf (type, symbol.get_full_name ())); + write_known_symbols_note (symbol.get_children_by_type (NodeType.STATIC_METHOD, false), container, "All known static methods inherited from %s %s".printf (type, symbol.get_full_name ())); + write_known_symbols_note (symbol.get_children_by_type (NodeType.METHOD, false), container, "All known methods inherited from %s %s".printf (type, symbol.get_full_name ())); + write_known_symbols_note (symbol.get_children_by_type (NodeType.SIGNAL, false), container, "All known signals inherited from %s %s".printf (type, symbol.get_full_name ())); + write_known_symbols_note (symbol.get_children_by_type (NodeType.FIELD, false), container, "All known fields inherited from %s %s".printf (type, symbol.get_full_name ())); + */ + } + protected void write_child_namespaces (Api.Node node, Api.Node? parent) { Gee.ArrayList<Namespace> namespaces = new Gee.ArrayList<Namespace> (); this.fetch_subnamespace_names (node, namespaces); @@ -698,7 +841,7 @@ public abstract class Valadoc.Html.BasicDoclet : Api.Visitor, Doclet { } protected void write_child_dependencies (Package package, Api.Node? parent) { - Gee.Collection<Package> deps = package.get_full_dependency_list (); + Gee.Collection<Package>? deps = package.get_full_dependency_list (); if (deps.size == 0) { return; } @@ -706,7 +849,7 @@ public abstract class Valadoc.Html.BasicDoclet : Api.Visitor, Doclet { writer.start_tag ("h2", {"class", css_title}).text ("Dependencies:").end_tag ("h2"); writer.start_tag ("ul", {"class", css_inline_navigation}); foreach (Package p in deps) { - string link = this.get_link (p, parent); + string? link = this.get_link (p, parent); if (link == null) { writer.start_tag ("li", {"class", cssresolver.resolve (p), "id", p.name}).text (p.name).end_tag ("li"); } else {
0ace17c659c5e73c9de4003718ba4860bfa3be43
restlet-framework-java
- Initial code for new default HTTP connector and- SIP connector.--
a
https://github.com/restlet/restlet-framework-java
diff --git a/modules/org.restlet/src/org/restlet/engine/http/connector/AcceptorTask.java b/modules/org.restlet/src/org/restlet/engine/http/connector/AcceptorTask.java index 4d9b6e0d2a..5ab1b6022d 100644 --- a/modules/org.restlet/src/org/restlet/engine/http/connector/AcceptorTask.java +++ b/modules/org.restlet/src/org/restlet/engine/http/connector/AcceptorTask.java @@ -99,9 +99,8 @@ public void run() { if ((getHelper().getMaxTotalConnections() == -1) || (connectionsCount <= getHelper() .getMaxTotalConnections())) { - final Connection<?> connection = getHelper() - .createConnection(getHelper(), - client.socket()); + Connection<?> connection = getHelper().createConnection( + getHelper(), client.socket()); connection.open(); getHelper().getConnections().add(connection); } else { diff --git a/modules/org.restlet/src/org/restlet/engine/http/connector/ControllerTask.java b/modules/org.restlet/src/org/restlet/engine/http/connector/ControllerTask.java index 1a9eb2dc42..200a8b2377 100644 --- a/modules/org.restlet/src/org/restlet/engine/http/connector/ControllerTask.java +++ b/modules/org.restlet/src/org/restlet/engine/http/connector/ControllerTask.java @@ -146,6 +146,10 @@ public void run() { } }); } + + if (conn.getState() == ConnectionState.CLOSED) { + getHelper().getConnections().remove(conn); + } } // Control if there are some pending requests that could
a421be8c740645d1c09402bdf94463b4319fcec1
Delta Spike
DELTASPIKE-385 fix for getBeanManager for ClassLoaders which have not been used in Extension loading
c
https://github.com/apache/deltaspike
diff --git a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/provider/BeanManagerProvider.java b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/provider/BeanManagerProvider.java index 3db6f3a81..cfa590746 100644 --- a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/provider/BeanManagerProvider.java +++ b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/provider/BeanManagerProvider.java @@ -167,15 +167,22 @@ public BeanManager getBeanManager() // warn the user if he tries to use the BeanManager before container startup if (!bmi.booted) { - LOG.warning("When using the BeanManager to retrieve Beans before the Container is started," + - " non-portable behaviour results!"); + if (!isParentBeanManagerBooted()) + { + LOG.warning("When using the BeanManager to retrieve Beans before the Container is started," + + " non-portable behaviour results!"); + + // reset the flag to only issue the warning once. + // this is a workaround for some containers which mess up EAR handling. + bmi.booted = true; + } } BeanManager result = bmi.finalBm; if (result == null) { - synchronized (this) + synchronized (bmi) { result = bmi.finalBm; if (result == null) @@ -211,7 +218,13 @@ public BeanManager getBeanManager() * container might not be fully setup yet. * * This might happen if someone uses the BeanManagerProvider during Extension - * startup. + * startup. This should generally avoided but instead you should just use + * an injected BeanManager in your Extension and propagate the BeanManager + * via setters. + * + * In EARs with multiple webapps you might get different Extensions per WAR. + * This depends on the container you use. By resetting <i>all</i> known + * BeanManagerInfos we try to */ public void cleanupFinalBeanManagers(@Observes AfterDeploymentValidation adv) { @@ -303,4 +316,40 @@ private static BeanManagerProvider setBeanManagerProvider(BeanManagerProvider be return bmpSingleton; } + + /** + * @return whether a BeanManagerInfo for a parent classloader is available and has the booted flag set. + */ + private boolean isParentBeanManagerBooted() + { + ClassLoader classLoader = ClassUtils.getClassLoader(null); + BeanManagerInfo parentBmi = getParentBeanManagerInfo(classLoader); + + return parentBmi != null && parentBmi.booted; + } + + /** + * This method recurses into the parent ClassLoaders and will check if a + * BeanManagerInfo for it exists. + * @return the BeanManagerInfo of the parent ClassLoader hierarchy if any exists, + * or <code>null</code> if there is no {@link BeanManagerInfo} for the ClassLoaders in the hierarchy. + */ + private BeanManagerInfo getParentBeanManagerInfo(ClassLoader classLoader) + { + ClassLoader parentClassLoader = classLoader.getParent(); + if (parentClassLoader.equals(ClassLoader.getSystemClassLoader())) + { + return null; + } + + BeanManagerInfo bmi = getBeanManagerInfo(parentClassLoader); + if (bmi == null) + { + bmi = getParentBeanManagerInfo(parentClassLoader); + } + + return bmi; + } + + }
49ac71ce130c7b647bad6dee7319d10a5ac905f5
lmco$eurekastreams
Improved centering of dialogs. Initially highlight selected notif source. Split up notif dialog for testability/cleanliness. (B-08706)
p
https://github.com/lmco/eurekastreams
diff --git a/web/src/main/java/org/eurekastreams/web/client/ui/common/dialog/Dialog.java b/web/src/main/java/org/eurekastreams/web/client/ui/common/dialog/Dialog.java index b0c026fff..0da951234 100644 --- a/web/src/main/java/org/eurekastreams/web/client/ui/common/dialog/Dialog.java +++ b/web/src/main/java/org/eurekastreams/web/client/ui/common/dialog/Dialog.java @@ -19,6 +19,8 @@ import org.eurekastreams.web.client.ui.Session; import org.eurekastreams.web.client.ui.pages.master.StaticResourceBundle; +import com.google.gwt.core.client.Scheduler; +import com.google.gwt.core.client.Scheduler.ScheduledCommand; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.event.dom.client.KeyCodes; @@ -141,8 +143,14 @@ public void showCentered() { popupPanel.addStyleName(StaticResourceBundle.INSTANCE.coreCss().hidden()); show(); - center(); - popupPanel.removeStyleName(StaticResourceBundle.INSTANCE.coreCss().hidden()); + Scheduler.get().scheduleDeferred(new ScheduledCommand() + { + public void execute() + { + center(); + popupPanel.removeStyleName(StaticResourceBundle.INSTANCE.coreCss().hidden()); + } + }); } /** diff --git a/web/src/main/java/org/eurekastreams/web/client/ui/common/notification/dialog/NotificationsDialogContent.java b/web/src/main/java/org/eurekastreams/web/client/ui/common/notification/dialog/NotificationsDialogContent.java old mode 100755 new mode 100644 index 3e3a989d3..6f81c1e9a --- a/web/src/main/java/org/eurekastreams/web/client/ui/common/notification/dialog/NotificationsDialogContent.java +++ b/web/src/main/java/org/eurekastreams/web/client/ui/common/notification/dialog/NotificationsDialogContent.java @@ -17,13 +17,9 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; import java.util.List; import java.util.Map; -import org.eurekastreams.server.domain.EntityType; import org.eurekastreams.server.domain.InAppNotificationDTO; import org.eurekastreams.web.client.events.DialogLinkClickedEvent; import org.eurekastreams.web.client.events.EventBus; @@ -54,26 +50,9 @@ */ public class NotificationsDialogContent extends BaseDialogContent { - /** For sorting source filters. */ - private static final Comparator<Source> SOURCE_SORTER = new Comparator<Source>() - { - public int compare(final Source inO1, final Source inO2) - { - return inO1.getDisplayName().compareTo(inO2.getDisplayName()); - } - }; - /** Main content widget. */ private final Widget main; - // /** Notification list wrapper. */ - // @UiField - // ScrollPanel scrollPanel; - - // - // /** Notification list. */ - // private final Panel listPanel = new FlowPanel(); - /** To unwire the observer when done with dialog. */ private Observer<DialogLinkClickedEvent> linkClickedObserver; @@ -88,10 +67,6 @@ public int compare(final Source inO1, final Source inO2) @UiField(provided = true) CoreCss coreCss; - // /** Notification list wrapper. */ - // @UiField - // ScrollPanel scrollPanel; - /** The list of sources. */ @UiField FlowPanel sourceFiltersPanel; @@ -118,17 +93,14 @@ public int compare(final Source inO1, final Source inO2) private Source rootSource; /** Index of actual sources. */ - private final Map<String, Source> sourceIndex = new HashMap<String, Source>(); + private Map<String, Source> sourceIndex; - /** Currently selected source filter. */ - private Filter currentFilter; + /** Currently-selected source. */ + private Source currentSource; /** Currently selected show read option. */ private final boolean currentShowRead = false; - /** Currently-highlighted source filter widget. */ - private Widget currentFilterWidget = null; - /** Observer (allow unlinking). */ private final Observer<UnreadNotificationClearedEvent> unreadNotificationClearedObserver = // \n new Observer<UnreadNotificationClearedEvent>() @@ -172,7 +144,7 @@ public void update(final GotNotificationListResponseEvent ev) { eventBus.removeObserver(ev, this); storeReceivedNotifications(ev.getResponse()); - displayNotifications(currentFilter, currentShowRead); + selectSource(currentSource); } }); @@ -235,15 +207,6 @@ public String getTitle() return "Notifications"; } - /** - * {@inheritDoc} - */ - @Override - public String getCssName() - { - return StaticResourceBundle.INSTANCE.coreCss().notifModal(); - } - /** * Reduces the unread count for all applicable sources. * @@ -286,131 +249,17 @@ private void storeReceivedNotifications(final List<InAppNotificationDTO> list) { allNotifications = list; - // TODO: determine counts, build filter UI - - // -- build index of sources by type with unread counts -- - // create the high-level sources - rootSource = new Source(null, null, "All", null); - Source streamSource = new Source(null, null, "Streams", rootSource); - Source appSource = new Source(null, null, "Apps", rootSource); - - // loop through all notifications, building the sources - List<Source> streamSources = new ArrayList<Source>(); - List<Source> appSources = new ArrayList<Source>(); - String currentUserAccountId = Session.getInstance().getCurrentPerson().getAccountId(); - int unread = 0; - Map<EntityType, Map<String, Integer>> sourcesWithCountsByType = new HashMap<EntityType, Map<String, Integer>>(); - for (InAppNotificationDTO item : list) - { - String sourceKey = item.getSourceType() + item.getSourceUniqueId(); - Source source = sourceIndex.get(sourceKey); - if (source == null && item.getSourceType() != null) - { - Source parent; - String name = item.getSourceName(); - List<Source> sourceList = null; - switch (item.getSourceType()) - { - case PERSON: - parent = streamSource; - sourceList = streamSources; - if (currentUserAccountId.equals(item.getSourceUniqueId())) - { - name = ""; // sort to beginning - } - break; - case GROUP: - parent = streamSource; - sourceList = streamSources; - break; - case APPLICATION: - parent = appSource; - sourceList = appSources; - break; - default: - parent = null; - break; - } - // if a parent was found, then create the source, else leave the notif for the "all" bin - if (parent != null) - { - source = new Source(item.getSourceType(), item.getSourceUniqueId(), name, parent); - sourceIndex.put(sourceKey, source); - sourceList.add(source); - } - } + SourceListBuilder builder = new SourceListBuilder(list, Session.getInstance().getCurrentPerson() + .getAccountId()); + rootSource = builder.getRootSource(); + sourceIndex = builder.getSourceIndex(); - if (!item.isRead()) - { - unread++; - if (source != null) - { - source.incrementUnreadCount(); - if (source.getParent() != null) - { - source.getParent().incrementUnreadCount(); - } - } - } - } - rootSource.setUnreadCount(unread); - - // -- create source filter links -- - // all - currentFilter = new Filter() + for (Source source : builder.getSourceList()) { - public boolean shouldDisplay(final InAppNotificationDTO inItem) - { - return true; - } - }; - addSourceFilter(rootSource, false, currentFilter); - // streams - if (!streamSources.isEmpty()) - { - // all streams - addSourceFilter(streamSource, false, new Filter() - { - public boolean shouldDisplay(final InAppNotificationDTO inItem) - { - return EntityType.PERSON == inItem.getSourceType() || EntityType.GROUP == inItem.getSourceType(); - } - }); - - // prepare list of stream sources: sort, set "My Stream" - Collections.sort(streamSources, SOURCE_SORTER); - if (streamSources.get(0).getDisplayName().isEmpty()) - { - streamSources.get(0).setDisplayName("My Stream"); - } - - // create each source filter - for (Source source : streamSources) - { - addSourceFilter(source, true, new SpecificSourceFilter(source)); - } + addSourceFilter(source, source.getParent() != null && source.getParent() != rootSource); } - // apps - if (!appSources.isEmpty()) - { - // all apps - addSourceFilter(appSource, false, new Filter() - { - public boolean shouldDisplay(final InAppNotificationDTO inItem) - { - return EntityType.APPLICATION == inItem.getSourceType(); - } - }); - - // prepare list of sources: sort - Collections.sort(appSources, SOURCE_SORTER); - // create each source filter - for (Source source : appSources) - { - addSourceFilter(source, true, new SpecificSourceFilter(source)); - } - } + currentSource = rootSource; } /** @@ -420,40 +269,47 @@ public boolean shouldDisplay(final InAppNotificationDTO inItem) * Source data. * @param indent * If the label should be indented. - * @param filter - * The filtering function to use for the source. */ - private void addSourceFilter(final Source source, final boolean indent, final Filter filter) + private void addSourceFilter(final Source source, final boolean indent) { int count = source.getUnreadCount(); String text = count > 0 ? source.getDisplayName() + " (" + count + ")" : source.getDisplayName(); + final Label label = new Label(text); - label.addClickHandler(new ClickHandler() - { - public void onClick(final ClickEvent inEvent) - { - currentFilter = filter; - if (currentFilterWidget != null) - { - currentFilterWidget.removeStyleName(style.sourceFilterSelected()); - } - currentFilterWidget = label; - currentFilterWidget.addStyleName(style.sourceFilterSelected()); - displayNotifications(currentFilter, currentShowRead); - } - }); label.addStyleName(style.sourceFilter()); label.addStyleName(StaticResourceBundle.INSTANCE.coreCss().buttonLabel()); if (indent) { label.addStyleName(style.sourceFilterIndented()); } + label.addClickHandler(new ClickHandler() + { + public void onClick(final ClickEvent inEvent) + { + selectSource(source); + } + }); sourceFiltersPanel.add(label); source.setWidget(label); } + /** + * Updates the display to show a new source. + * + * @param newSource + * New source. + */ + private void selectSource(final Source newSource) + { + currentSource.getWidget().removeStyleName(style.sourceFilterSelected()); + + currentSource = newSource; + currentSource.getWidget().addStyleName(style.sourceFilterSelected()); + displayNotifications(currentSource.getFilter(), currentShowRead); + } + /** * Displays the appropriate subset of notifications. * @@ -462,9 +318,10 @@ public void onClick(final ClickEvent inEvent) * @param showRead * If read notifications should be displayed (unread are always displayed). */ - private void displayNotifications(final Filter filter, final boolean showRead) + private void displayNotifications(final Source.Filter filter, final boolean showRead) { noNotificationsUi.getStyle().setDisplay(Display.NONE); + notificationListScrollPanel.setVisible(false); notificationListPanel.clear(); idsShowing.clear(); @@ -485,53 +342,7 @@ private void displayNotifications(final Filter filter, final boolean showRead) else { notificationListScrollPanel.scrollToTop(); - } - } - - /** - * Filter for displaying notifications. - */ - interface Filter - { - /** - * Determines if a notification should be displayed. - * - * @param item - * Notification. - * @return If notification should be displayed. - */ - boolean shouldDisplay(InAppNotificationDTO item); - } - - /** - * Filter to match only notifications from a specific source. - */ - static class SpecificSourceFilter implements Filter - { - /** Type of the source. */ - EntityType sourceType; - - /** Unique ID of the source. */ - String sourceUniqueId; - - /** - * Constructor. - * - * @param inSource - * The source. - */ - public SpecificSourceFilter(final Source inSource) - { - sourceType = inSource.getEntityType(); - sourceUniqueId = inSource.getUniqueId(); - } - - /** - * {@inheritDoc} - */ - public boolean shouldDisplay(final InAppNotificationDTO inItem) - { - return sourceType == inItem.getSourceType() && sourceUniqueId.equals(inItem.getSourceUniqueId()); + notificationListScrollPanel.setVisible(true); } } diff --git a/web/src/main/java/org/eurekastreams/web/client/ui/common/notification/dialog/Source.java b/web/src/main/java/org/eurekastreams/web/client/ui/common/notification/dialog/Source.java old mode 100755 new mode 100644 index c7648b38d..8cf561ed4 --- a/web/src/main/java/org/eurekastreams/web/client/ui/common/notification/dialog/Source.java +++ b/web/src/main/java/org/eurekastreams/web/client/ui/common/notification/dialog/Source.java @@ -16,20 +16,36 @@ package org.eurekastreams.web.client.ui.common.notification.dialog; import org.eurekastreams.server.domain.EntityType; +import org.eurekastreams.server.domain.InAppNotificationDTO; -import com.google.gwt.user.client.ui.HasText; +import com.google.gwt.user.client.ui.Label; /** * Identifies a source. */ class Source { - /** The string-based unique identifier of the source. */ - private final String uniqueId; + /** + * Filter for displaying notifications. + */ + public interface Filter + { + /** + * Determines if a notification should be displayed. + * + * @param item + * Notification. + * @return If notification should be displayed. + */ + boolean shouldDisplay(InAppNotificationDTO item); + } /** The type of the source. */ private final EntityType entityType; + /** The string-based unique identifier of the source. */ + private final String uniqueId; + /** The name to display for the source. */ private String displayName; @@ -40,7 +56,10 @@ class Source private final Source parent; /** Widget to display the source. */ - private HasText widget; + private Label widget; + + /** Filter to use with this source. */ + private Filter filter; /** * Constructor. @@ -53,16 +72,20 @@ class Source * The name to display for the source. * @param inParent * Parent "source". + * @param inFilter + * Filter to use with this source. */ public Source(final EntityType inEntityType, final String inUniqueId, final String inDisplayName, - final Source inParent) + final Source inParent, final Filter inFilter) { entityType = inEntityType; uniqueId = inUniqueId; displayName = inDisplayName; parent = inParent; + filter = inFilter; } + /** * increment Unread Count. */ @@ -131,7 +154,7 @@ public Source getParent() /** * @return the widget */ - public HasText getWidget() + public Label getWidget() { return widget; } @@ -140,7 +163,7 @@ public HasText getWidget() * @param inWidget * the widget to set */ - public void setWidget(final HasText inWidget) + public void setWidget(final Label inWidget) { widget = inWidget; } @@ -153,4 +176,21 @@ public void setDisplayName(final String inDisplayName) { displayName = inDisplayName; } + + /** + * @return the filter + */ + public Filter getFilter() + { + return filter; + } + + /** + * @param inFilter + * the filter to set + */ + public void setFilter(final Filter inFilter) + { + filter = inFilter; + } } \ No newline at end of file diff --git a/web/src/main/java/org/eurekastreams/web/client/ui/common/notification/dialog/SourceListBuilder.java b/web/src/main/java/org/eurekastreams/web/client/ui/common/notification/dialog/SourceListBuilder.java new file mode 100644 index 000000000..b7ef02816 --- /dev/null +++ b/web/src/main/java/org/eurekastreams/web/client/ui/common/notification/dialog/SourceListBuilder.java @@ -0,0 +1,236 @@ +/* + * Copyright (c) 2011 Lockheed Martin Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.eurekastreams.web.client.ui.common.notification.dialog; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.eurekastreams.server.domain.EntityType; +import org.eurekastreams.server.domain.InAppNotificationDTO; + +/** + * Extracts the sources from a collection of notifications for use by the notifications dialog. + */ +public class SourceListBuilder +{ + /** + * Filter to match only notifications from a specific source. + */ + static class SpecificSourceFilter implements Source.Filter + { + /** Type of the source. */ + EntityType sourceType; + + /** Unique ID of the source. */ + String sourceUniqueId; + + /** + * Constructor. + * + * @param inSource + * The source. + */ + public SpecificSourceFilter(final Source inSource) + { + sourceType = inSource.getEntityType(); + sourceUniqueId = inSource.getUniqueId(); + } + + /** + * {@inheritDoc} + */ + public boolean shouldDisplay(final InAppNotificationDTO inItem) + { + return sourceType == inItem.getSourceType() && sourceUniqueId.equals(inItem.getSourceUniqueId()); + } + } + + /** For sorting source filters. */ + private static final Comparator<Source> SOURCE_SORTER = new Comparator<Source>() + { + public int compare(final Source inO1, final Source inO2) + { + return inO1.getDisplayName().compareTo(inO2.getDisplayName()); + } + }; + + /** Source representing all notifications. */ + private final Source rootSource; + + /** Index of actual sources. Key is type+uniqueID. */ + private final Map<String, Source> sourceIndex = new HashMap<String, Source>(); + + /** List of sources in display order. */ + private final List<Source> sourceList; + + /** + * Constructor - analyzes and builds lists. + * + * @param list + * Notifications to process. + * @param currentUserAccountId + * Current user's account ID. + */ + public SourceListBuilder(final Collection<InAppNotificationDTO> list, final String currentUserAccountId) + { + // -- build index of sources by type with unread counts -- + + // create the high-level sources + rootSource = new Source(null, null, "All", null, new Source.Filter() + { + public boolean shouldDisplay(final InAppNotificationDTO inItem) + { + return true; + } + }); + Source streamSource = new Source(null, null, "Streams", rootSource, new Source.Filter() + { + public boolean shouldDisplay(final InAppNotificationDTO inItem) + { + return EntityType.PERSON == inItem.getSourceType() || EntityType.GROUP == inItem.getSourceType(); + } + }); + Source appSource = new Source(null, null, "Apps", rootSource, new Source.Filter() + { + public boolean shouldDisplay(final InAppNotificationDTO inItem) + { + return EntityType.APPLICATION == inItem.getSourceType(); + } + }); + + // loop through all notifications, building the sources + List<Source> streamSources = new ArrayList<Source>(); + List<Source> appSources = new ArrayList<Source>(); + int unread = 0; + for (InAppNotificationDTO item : list) + { + String sourceKey = item.getSourceType() + item.getSourceUniqueId(); + Source source = sourceIndex.get(sourceKey); + if (source == null && item.getSourceType() != null) + { + Source parent; + String name = item.getSourceName(); + List<Source> midSourceList = null; + switch (item.getSourceType()) + { + case PERSON: + parent = streamSource; + midSourceList = streamSources; + if (currentUserAccountId.equals(item.getSourceUniqueId())) + { + name = ""; // sort to beginning + } + break; + case GROUP: + parent = streamSource; + midSourceList = streamSources; + break; + case APPLICATION: + parent = appSource; + midSourceList = appSources; + break; + default: + parent = null; + break; + } + // if a parent was found, then create the source, else leave the notif for the "all" bin + if (parent != null) + { + source = new Source(item.getSourceType(), item.getSourceUniqueId(), name, parent, null); + source.setFilter(new SourceListBuilder.SpecificSourceFilter(source)); + sourceIndex.put(sourceKey, source); + midSourceList.add(source); + } + } + + if (!item.isRead()) + { + unread++; + if (source != null) + { + source.incrementUnreadCount(); + if (source.getParent() != null) + { + source.getParent().incrementUnreadCount(); + } + } + } + } + rootSource.setUnreadCount(unread); + + // -- build list in display order -- + sourceList = new ArrayList<Source>(sourceIndex.size()); + + // "all" + sourceList.add(rootSource); + + // streams + if (!streamSources.isEmpty()) + { + // all streams + sourceList.add(streamSource); + + // prepare list of stream sources: sort, set name on "My Stream" + Collections.sort(streamSources, SOURCE_SORTER); + if (streamSources.get(0).getDisplayName().isEmpty()) + { + streamSources.get(0).setDisplayName("My Stream"); + } + + sourceList.addAll(streamSources); + } + // apps + if (!appSources.isEmpty()) + { + // all apps + sourceList.add(appSource); + + // prepare list of sources: sort + Collections.sort(appSources, SOURCE_SORTER); + + sourceList.addAll(appSources); + } + } + + /** + * @return the rootSource + */ + public Source getRootSource() + { + return rootSource; + } + + /** + * @return the sourceIndex + */ + public Map<String, Source> getSourceIndex() + { + return sourceIndex; + } + + /** + * @return the sourceList + */ + public List<Source> getSourceList() + { + return sourceList; + } +} diff --git a/web/src/main/java/org/eurekastreams/web/client/ui/pages/master/CoreCss.java b/web/src/main/java/org/eurekastreams/web/client/ui/pages/master/CoreCss.java index d659bd341..42c40ae43 100644 --- a/web/src/main/java/org/eurekastreams/web/client/ui/pages/master/CoreCss.java +++ b/web/src/main/java/org/eurekastreams/web/client/ui/pages/master/CoreCss.java @@ -1390,8 +1390,8 @@ public interface CoreCss extends CssResource @ClassName("notif-grid") String notifGrid(); - @ClassName("notif-modal") - String notifModal(); + // @ClassName("notif-modal") + // String notifModal(); // @ClassName("notif-no-notifications") // String notifNoNotifications(); diff --git a/web/src/main/resources/public/style/core.css b/web/src/main/resources/public/style/core.css index ee13e4e8d..29f3a6d42 100644 --- a/web/src/main/resources/public/style/core.css +++ b/web/src/main/resources/public/style/core.css @@ -6407,11 +6407,6 @@ body .user-bar li div.notif-count:hover { cursor: pointer; } -.notif-modal { - width: 700px; - height: 438px; -} - .modal-content-panel { background: #EEEEEE url(style/images/globalNavshadow.png) repeat-x scroll 0 0; diff --git a/web/src/test/java/org/eurekastreams/web/client/ui/common/notification/dialog/SourceListBuilderTest.java b/web/src/test/java/org/eurekastreams/web/client/ui/common/notification/dialog/SourceListBuilderTest.java new file mode 100644 index 000000000..ee1f6d032 --- /dev/null +++ b/web/src/test/java/org/eurekastreams/web/client/ui/common/notification/dialog/SourceListBuilderTest.java @@ -0,0 +1,209 @@ +/* + * Copyright (c) 2011 Lockheed Martin Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.eurekastreams.web.client.ui.common.notification.dialog; + +import static junit.framework.Assert.assertEquals; +import static junit.framework.Assert.assertNotNull; +import static junit.framework.Assert.assertNull; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.List; + +import org.eurekastreams.server.domain.EntityType; +import org.eurekastreams.server.domain.InAppNotificationDTO; +import org.eurekastreams.web.client.ui.common.notification.dialog.Source.Filter; +import org.junit.Test; + +/** + * Tests SourceListBuilder. + */ +public class SourceListBuilderTest +{ + /** + * Tests source building. + */ + @Test + public void test() + { + String personId1 = "person1"; + String personId2 = "person2"; + String groupId1 = "group1"; + String groupId2 = "group2"; + + String personName1 = "Zachary Iwouldsortlast"; + String personName2 = "John Doe"; + String groupName1 = "Eureka Streams"; + String groupName2 = "Apple Fans"; + + String app1Id = "app1"; + String app2Id = "app2"; + String app3Id = "app3"; + String app1Name = "Test Application"; + String app2Name = "Another Application"; + String app3Name = "My Application"; + + // -- prepare list of notifications -- + List<InAppNotificationDTO> notifs = new ArrayList<InAppNotificationDTO>(); + notifs.add(makeNotif(EntityType.PERSON, personId1, false, personName1)); + notifs.add(makeNotif(EntityType.PERSON, personId1, false, personName1)); + + notifs.add(makeNotif(EntityType.PERSON, personId2, true, personName2)); + notifs.add(makeNotif(EntityType.PERSON, personId2, false, personName2)); + notifs.add(makeNotif(EntityType.PERSON, personId2, true, personName2)); + + notifs.add(makeNotif(EntityType.GROUP, groupId1, true, groupName1)); + notifs.add(makeNotif(EntityType.GROUP, groupId1, true, groupName1)); + + notifs.add(makeNotif(EntityType.GROUP, groupId2, false, groupName2)); + notifs.add(makeNotif(EntityType.GROUP, groupId2, true, groupName2)); + notifs.add(makeNotif(EntityType.GROUP, groupId2, false, groupName2)); + + notifs.add(makeNotif(EntityType.NOTSET, null, false, null)); + notifs.add(makeNotif(EntityType.NOTSET, "", false, "")); + notifs.add(makeNotif(null, null, false, null)); + notifs.add(makeNotif(null, "", false, "")); + + notifs.add(makeNotif(EntityType.NOTSET, null, true, null)); + notifs.add(makeNotif(EntityType.NOTSET, "", true, "")); + notifs.add(makeNotif(null, null, true, null)); + notifs.add(makeNotif(null, "", true, "")); + + notifs.add(makeNotif(EntityType.APPLICATION, app1Id, false, app1Name)); + notifs.add(makeNotif(EntityType.APPLICATION, app1Id, false, app1Name)); + + notifs.add(makeNotif(EntityType.APPLICATION, app2Id, true, app2Name)); + notifs.add(makeNotif(EntityType.APPLICATION, app2Id, true, app2Name)); + + notifs.add(makeNotif(EntityType.APPLICATION, app3Id, false, app3Name)); + notifs.add(makeNotif(EntityType.APPLICATION, app3Id, true, app3Name)); + + // "shuffle" the notifs + int shuffler = 5; + final int size = notifs.size(); + assertTrue("The test needs to use a shuffler value that doesn't divide evenly into the number of notifs.", + size % shuffler != 0); + List<InAppNotificationDTO> notifs2 = new ArrayList<InAppNotificationDTO>(size); + for (int i = 0; i < size; i++) + { + notifs2.add(notifs.get(i*shuffler % size)); + } + + // run! + SourceListBuilder sut = new SourceListBuilder(notifs2, personId1); + + // validate + Source rootSource = sut.getRootSource(); + List<Source> sources = sut.getSourceList(); + + assertEquals(2 * 5, sources.size()); + + assertSource(sources.get(0), null, null, "All", 2 * 6, 4 * 6, notifs2); + assertNull(sources.get(0).getParent()); + assertSame(rootSource, sources.get(0)); + + assertSource(sources.get(1), null, null, "Streams", 5, 5 * 2, notifs2); + assertSame(rootSource, sources.get(1).getParent()); + + assertSource(sources.get(2), EntityType.PERSON, personId1, "My Stream", 0, 2, notifs2); + assertNotNull(sources.get(2).getParent()); + + assertSource(sources.get(3), EntityType.GROUP, groupId2, groupName2, 1, 3, notifs2); + assertNotNull(sources.get(3).getParent()); + + assertSource(sources.get(4), EntityType.GROUP, groupId1, groupName1, 2, 2, notifs2); + assertNotNull(sources.get(4).getParent()); + + assertSource(sources.get(5), EntityType.PERSON, personId2, personName2, 2, 3, notifs2); + assertNotNull(sources.get(5).getParent()); + + assertSource(sources.get(6), null, null, "Apps", 3, 6, notifs2); + assertSame(rootSource, sources.get(6).getParent()); + + assertSource(sources.get(7), EntityType.APPLICATION, app2Id, app2Name, 2, 2, notifs2); + assertNotNull(sources.get(7).getParent()); + + assertSource(sources.get(8), EntityType.APPLICATION, app3Id, app3Name, 1, 2, notifs2); + assertNotNull(sources.get(8).getParent()); + + assertSource(sources.get(9), EntityType.APPLICATION, app1Id, app1Name, 0, 2, notifs2); + assertNotNull(sources.get(9).getParent()); + + assertNotNull(sut.getSourceIndex()); + assertFalse(sut.getSourceIndex().isEmpty()); + } + + /** + * Validates a source is as expected. + * + * @param source + * Source to inspect. + * @param entityType + * Expected entity type. + * @param uniqueId + * Expected unique id. + * @param displayName + * Expected display name. + * @param unreadCount + * Expected unread count. + * @param filterCount + * Expected number of notifs that match the filter. + * @param notifs + * List of notifs. + */ + private void assertSource(final Source source, final EntityType entityType, final String uniqueId, + final String displayName, final int unreadCount, final int filterCount, + final List<InAppNotificationDTO> notifs) + { + assertEquals(entityType, source.getEntityType()); + assertEquals(uniqueId, source.getUniqueId()); + assertEquals(displayName, source.getDisplayName()); + assertEquals(unreadCount, source.getUnreadCount()); + Filter filter = source.getFilter(); + assertNotNull(filter); + int matchCount = 0; + for (InAppNotificationDTO notif : notifs) + { + if (filter.shouldDisplay(notif)) + { + matchCount++; + } + } + assertEquals(filterCount, matchCount); + } + + /** + * Create a notification for test data. + * + * @param entityType + * Type of source entity. + * @param uniqueId + * ID of source entity. + * @param unread + * If unread. + * @param name + * Name of source entity. + * @return Notification. + */ + private InAppNotificationDTO makeNotif(final EntityType entityType, final String uniqueId, final boolean unread, + final String name) + { + return new InAppNotificationDTO(0, null, null, null, null, false, !unread, entityType, uniqueId, name, null, + null); + } +} diff --git a/web/src/test/java/org/eurekastreams/web/client/ui/common/notification/dialog/SourceTest.java b/web/src/test/java/org/eurekastreams/web/client/ui/common/notification/dialog/SourceTest.java old mode 100755 new mode 100644 index dfab5b8d7..1931b3e36 --- a/web/src/test/java/org/eurekastreams/web/client/ui/common/notification/dialog/SourceTest.java +++ b/web/src/test/java/org/eurekastreams/web/client/ui/common/notification/dialog/SourceTest.java @@ -16,15 +16,17 @@ package org.eurekastreams.web.client.ui.common.notification.dialog; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import org.eurekastreams.server.domain.EntityType; +import org.eurekastreams.web.client.ui.common.notification.dialog.Source.Filter; import org.jmock.integration.junit4.JUnit4Mockery; import org.jmock.lib.legacy.ClassImposteriser; import org.junit.Test; import com.google.gwt.junit.GWTMockUtilities; -import com.google.gwt.user.client.ui.HasText; +import com.google.gwt.user.client.ui.Label; /** * Tests Source. @@ -44,7 +46,10 @@ public class SourceTest private final Source parent = context.mock(Source.class); /** Fixture. */ - private final HasText widget = context.mock(HasText.class); + private final Label widget = context.mock(Label.class); + + /** Fixture. */ + private final Filter filter = context.mock(Filter.class); /** * Tests getters/setters. @@ -52,12 +57,13 @@ public class SourceTest @Test public void testGettersSetters() { - Source sut = new Source(EntityType.GROUP, "UID", "Display", parent); + Source sut = new Source(EntityType.GROUP, "UID", "Display", parent, filter); assertEquals("UID", sut.getUniqueId()); assertEquals(EntityType.GROUP, sut.getEntityType()); assertEquals("Display", sut.getDisplayName()); assertSame(parent, sut.getParent()); + assertSame(filter, sut.getFilter()); sut.setUnreadCount(9); assertEquals(9, sut.getUnreadCount()); @@ -71,5 +77,10 @@ public void testGettersSetters() sut.setDisplayName("New Name"); assertEquals("New Name", sut.getDisplayName()); + + sut.setFilter(null); + assertNull(sut.getFilter()); + sut.setFilter(filter); + assertSame(filter, sut.getFilter()); } }
bcbeaad00f7482a18942f22579a4dda6b3588522
Vala
vapigen: Support default_value for arguments
a
https://github.com/GNOME/vala/
diff --git a/vapigen/valagidlparser.vala b/vapigen/valagidlparser.vala index e25515755f..b686be2886 100644 --- a/vapigen/valagidlparser.vala +++ b/vapigen/valagidlparser.vala @@ -1759,6 +1759,30 @@ public class Vala.GIdlParser : CodeVisitor { foreach (string type_arg in type_args) { param_type.add_type_argument (get_type_from_string (type_arg)); } + } else if (nv[0] == "default_value") { + var val = eval (nv[1]); + if (val == "null") { + p.default_expression = new NullLiteral (param_type.source_reference); + } else if (val == "true") { + p.default_expression = new BooleanLiteral (true, param_type.source_reference); + } else if (val == "false") { + p.default_expression = new BooleanLiteral (false, param_type.source_reference); + } else { + unowned string endptr; + unowned string val_end = val.offset (val.len ()); + + val.to_long (out endptr); + if ((long)endptr == (long)val_end) { + p.default_expression = new IntegerLiteral (val, param_type.source_reference); + } else { + val.to_double (out endptr); + if ((long)endptr == (long)val_end) { + p.default_expression = new RealLiteral (val, param_type.source_reference); + } else { + p.default_expression = new StringLiteral ("\"%s\"".printf (val), param_type.source_reference); + } + } + } } } }
9cedaa31ffbb5af791ea46548e8d5e75ca9fe738
drools
Fixing test--git-svn-id: https://svn.jboss.org/repos/labs/labs/jbossrules/trunk@20387 c60d74c8-e8f6-0310-9e8f-d4a2fc68ab70-
c
https://github.com/kiegroup/drools
diff --git a/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java b/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java index ea5368c6539..4ed34a49808 100644 --- a/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java +++ b/drools-compiler/src/test/java/org/drools/integrationtests/MiscTest.java @@ -553,8 +553,8 @@ public void testGeneratedBeans1() throws Exception { "stilton" ); // just documenting toString() result: - assertEquals( "Cheese( type=stilton )", - cheese.toString() ); +// assertEquals( "Cheese( type=stilton )", +// cheese.toString() ); // reading the field attribute, using the method chain assertEquals( "stilton", @@ -594,8 +594,8 @@ public void testGeneratedBeans1() throws Exception { 7 ); // just documenting toString() result: - assertEquals( "Person( age=7, likes=Cheese( type=stilton ) )", - person.toString() ); +// assertEquals( "Person( age=7, likes=Cheese( type=stilton ) )", +// person.toString() ); // inserting fact wm.insert( person );
2e0bf34ee31beda97f21430ab4869f1e842d22e9
restlet-framework-java
Fixed javadocs.--
p
https://github.com/restlet/restlet-framework-java
diff --git a/modules/org.restlet.android/src/org/restlet/Component.java b/modules/org.restlet.android/src/org/restlet/Component.java index 5ef02ce292..1c6e964505 100644 --- a/modules/org.restlet.android/src/org/restlet/Component.java +++ b/modules/org.restlet.android/src/org/restlet/Component.java @@ -336,11 +336,6 @@ public List<VirtualHost> getHosts() { * {@link Context#getServerDispatcher()} method, but the internal router is * easily addressable via an URI scheme and can be fully private to the * current Component.<br> - * <br> - * The second use case is the composition/mash-up of several representations - * via the {@link org.restlet.routing.Transformer} class for example. For - * this you can leverage the XPath's document() function or the XSLT's - * include and import elements with RIAP URIs. * * @return The private internal router. */ diff --git a/modules/org.restlet.android/src/org/restlet/representation/XmlRepresentation.java b/modules/org.restlet.android/src/org/restlet/representation/XmlRepresentation.java index c8f24f7996..c3ec6e7339 100644 --- a/modules/org.restlet.android/src/org/restlet/representation/XmlRepresentation.java +++ b/modules/org.restlet.android/src/org/restlet/representation/XmlRepresentation.java @@ -179,7 +179,7 @@ private Map<String, String> getNamespaces() { * TODO Seems unused. * * @param prefix - * @return + * @return result */ public String getNamespaceURI(String prefix) { return this.namespaces.get(prefix); @@ -189,7 +189,7 @@ public String getNamespaceURI(String prefix) { * TODO Seems unused. * * @param namespaceURI - * @return + * @return result */ public String getPrefix(String namespaceURI) { String result = null; @@ -212,7 +212,7 @@ public String getPrefix(String namespaceURI) { * TODO Seems unused. * * @param namespaceURI - * @return + * @return result */ public Iterator<String> getPrefixes(String namespaceURI) { final List<String> result = new ArrayList<String>(); diff --git a/modules/org.restlet.gae/src/org/restlet/ext/net/HttpClientHelper.java b/modules/org.restlet.gae/src/org/restlet/ext/net/HttpClientHelper.java index c981078aa3..292e3baa85 100644 --- a/modules/org.restlet.gae/src/org/restlet/ext/net/HttpClientHelper.java +++ b/modules/org.restlet.gae/src/org/restlet/ext/net/HttpClientHelper.java @@ -87,9 +87,6 @@ * </tr> * </table> * - * It is also possible to specify a hostname verifier for HTTPS connections. See - * the {@link #getHostnameVerifier()} method for details. - * * Note that by default, the {@link HttpURLConnection} class as implemented by * Sun will retry a request if an IO exception is caught, for example due to a * connection reset by the server. This can be annoying, especially because the diff --git a/modules/org.restlet/src/org/restlet/resource/ServerResource.java b/modules/org.restlet/src/org/restlet/resource/ServerResource.java index 3d090dffd8..7ab66ade01 100644 --- a/modules/org.restlet/src/org/restlet/resource/ServerResource.java +++ b/modules/org.restlet/src/org/restlet/resource/ServerResource.java @@ -161,7 +161,7 @@ protected Representation delete() throws ResourceException { * The variant of the response entity. * @return The optional response entity. * @throws ResourceException - * @see {@link ServerResource#get(Variant)} + * @see #get(Variant) * @see <a * href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.7" * >HTTP DELETE method</a> @@ -630,7 +630,7 @@ protected Representation get() throws ResourceException { * @param variant * The variant whose full representation must be returned. * @return The resource's representation. - * @see {@link ServerResource#get(Variant)} + * @see #get(Variant) * @throws ResourceException */ protected Representation get(Variant variant) throws ResourceException { @@ -981,7 +981,7 @@ protected Representation head() throws ResourceException { * @param variant * The variant whose full representation must be returned. * @return The resource's representation. - * @see {@link ServerResource#get(Variant)} + * @see #get(Variant) * @throws ResourceException */ protected Representation head(Variant variant) throws ResourceException { @@ -1066,7 +1066,7 @@ protected Representation options() throws ResourceException { * @param variant * The variant of the response entity. * @return The optional response entity. - * @see {@link ServerResource#get(Variant)} + * @see #get(Variant) */ protected Representation options(Variant variant) throws ResourceException { setStatus(Status.CLIENT_ERROR_METHOD_NOT_ALLOWED); @@ -1082,7 +1082,7 @@ protected Representation options(Variant variant) throws ResourceException { * The posted entity. * @return The optional response entity. * @throws ResourceException - * @see {@link ServerResource#get(Variant)} + * @see #get(Variant) * @see <a * href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.5">HTTP * POST method</a> @@ -1144,7 +1144,7 @@ protected Representation put(Representation representation) * The variant of the response entity. * @return The optional result entity. * @throws ResourceException - * @see {@link ServerResource#get(Variant)} + * @see #get(Variant) * @see <a * href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.6" * >HTTP PUT method</a>
2318498c7712a3147bdbd8145138ab8ff91a1772
Delta Spike
removed obsolete TODO for Weld This scenario must not happen. The ClassLoader used to load the Extension is also the one used at shutdown. Even in EAR scenarios with a hierarchic setup we will hopefully get the correct TCCL. The mem will get freed up lastest after the EAR gets undeployed anyway. Sparse re-deployments of portions of an EAR are not guaranteed to work by the EE spec neither.
p
https://github.com/apache/deltaspike
diff --git a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/provider/BeanManagerProvider.java b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/provider/BeanManagerProvider.java index 5cb361fc9..3db6f3a81 100644 --- a/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/provider/BeanManagerProvider.java +++ b/deltaspike/core/api/src/main/java/org/apache/deltaspike/core/api/provider/BeanManagerProvider.java @@ -242,9 +242,6 @@ public void cleanupStoredBeanManagerOnShutdown(@Observes BeforeShutdown beforeSh ClassLoader classLoader = ClassUtils.getClassLoader(null); bmpSingleton.bmInfos.remove(classLoader); - - //X TODO this might not be enough as there might be - //X ClassLoaders used during Weld startup which are not the TCCL... } /**
bf9eba31da7599606e23993949f0d3f9858bf200
Valadoc
libvaladoc: gtkdoc: Add support for important
a
https://github.com/GNOME/vala/
diff --git a/src/libvaladoc/documentation/gtkdoccommentparser.vala b/src/libvaladoc/documentation/gtkdoccommentparser.vala index bc876a7d6e..b60f3ecae5 100644 --- a/src/libvaladoc/documentation/gtkdoccommentparser.vala +++ b/src/libvaladoc/documentation/gtkdoccommentparser.vala @@ -617,6 +617,10 @@ public class Valadoc.Gtkdoc.Parser : Object, ResourceLocator { return container; } + private Note? parse_docbook_important () { + return (Note?) parse_docbook_information_box_template ("important", factory.create_note ()); + } + private Note? parse_docbook_note () { return (Note?) parse_docbook_information_box_template ("note", factory.create_note ()); } @@ -1022,6 +1026,8 @@ public class Valadoc.Gtkdoc.Parser : Object, ResourceLocator { this.append_block_content_not_null (content, parse_docbook_warning ()); } else if (current.type == TokenType.XML_OPEN && current.content == "note") { this.append_block_content_not_null (content, parse_docbook_note ()); + } else if (current.type == TokenType.XML_OPEN && current.content == "important") { + this.append_block_content_not_null (content, parse_docbook_important ()); } else if (current.type == TokenType.XML_OPEN && current.content == "refsect2") { this.append_block_content_not_null_all (content, parse_docbook_refsect2 ()); } else if (current.type == TokenType.GTKDOC_PARAGRAPH) {
8bd2486b519ed6dcdc638843514d9034b7f3c49f
hadoop
HADOOP-6148. Implement a fast
a
https://github.com/apache/hadoop
diff --git a/CHANGES.txt b/CHANGES.txt index 88ca9c8f0d2cd..06cf3033d5d60 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -475,6 +475,9 @@ Trunk (unreleased changes) HADOOP-6142. Update documentation and use of harchives for relative paths added in MAPREDUCE-739. (Mahadev Konar via cdouglas) + HADOOP-6148. Implement a fast, pure Java CRC32 calculator which outperforms + java.util.zip.CRC32. (Todd Lipcon and Scott Carey via szetszwo) + OPTIMIZATIONS HADOOP-5595. NameNode does not need to run a replicator to choose a diff --git a/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java b/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java index 72a09bd75f2c4..6f9701e4d729b 100644 --- a/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java +++ b/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java @@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.Progressable; +import org.apache.hadoop.util.PureJavaCrc32; import org.apache.hadoop.util.StringUtils; /**************************************************************** @@ -135,7 +136,7 @@ public ChecksumFSInputChecker(ChecksumFileSystem fs, Path file, int bufferSize) if (!Arrays.equals(version, CHECKSUM_VERSION)) throw new IOException("Not a checksum file: "+sumFile); this.bytesPerSum = sums.readInt(); - set(fs.verifyChecksum, new CRC32(), bytesPerSum, 4); + set(fs.verifyChecksum, new PureJavaCrc32(), bytesPerSum, 4); } catch (FileNotFoundException e) { // quietly ignore set(fs.verifyChecksum, null, 1, 0); } catch (IOException e) { // loudly ignore @@ -330,7 +331,7 @@ public ChecksumFSOutputSummer(ChecksumFileSystem fs, long blockSize, Progressable progress) throws IOException { - super(new CRC32(), fs.getBytesPerSum(), 4); + super(new PureJavaCrc32(), fs.getBytesPerSum(), 4); int bytesPerSum = fs.getBytesPerSum(); this.datas = fs.getRawFileSystem().create(file, overwrite, bufferSize, replication, blockSize, progress); diff --git a/src/java/org/apache/hadoop/util/DataChecksum.java b/src/java/org/apache/hadoop/util/DataChecksum.java index 9aa339025b3d3..eb529bc483e64 100644 --- a/src/java/org/apache/hadoop/util/DataChecksum.java +++ b/src/java/org/apache/hadoop/util/DataChecksum.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.util; +package org.apache.hadoop.util; import java.util.zip.Checksum; import java.util.zip.CRC32; @@ -51,7 +51,7 @@ public static DataChecksum newDataChecksum( int type, int bytesPerChecksum ) { return new DataChecksum( CHECKSUM_NULL, new ChecksumNull(), CHECKSUM_NULL_SIZE, bytesPerChecksum ); case CHECKSUM_CRC32 : - return new DataChecksum( CHECKSUM_CRC32, new CRC32(), + return new DataChecksum( CHECKSUM_CRC32, new PureJavaCrc32(), CHECKSUM_CRC32_SIZE, bytesPerChecksum ); default: return null; @@ -205,10 +205,10 @@ public int getBytesPerChecksum() { public int getNumBytesInSum() { return inSum; } - - public static final int SIZE_OF_INTEGER = Integer.SIZE / Byte.SIZE; + + public static final int SIZE_OF_INTEGER = Integer.SIZE / Byte.SIZE; static public int getChecksumHeaderSize() { - return 1 + SIZE_OF_INTEGER; // type byte, bytesPerChecksum int + return 1 + SIZE_OF_INTEGER; // type byte, bytesPerChecksum int } //Checksum Interface. Just a wrapper around member summer. public long getValue() { diff --git a/src/java/org/apache/hadoop/util/PureJavaCrc32.java b/src/java/org/apache/hadoop/util/PureJavaCrc32.java new file mode 100644 index 0000000000000..206f931b2674f --- /dev/null +++ b/src/java/org/apache/hadoop/util/PureJavaCrc32.java @@ -0,0 +1,351 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.util; + +import java.util.zip.Checksum; + +/** + * A pure-java implementation of the CRC32 checksum that uses + * the same polynomial as the built-in native CRC32. + * + * This is to avoid the JNI overhead for certain uses of Checksumming + * where many small pieces of data are checksummed in succession. + * + * The current version is ~10x to 1.8x as fast as Sun's native + * java.util.zip.CRC32 in Java 1.6 + * + * @see java.util.zip.CRC32 + */ +public class PureJavaCrc32 implements Checksum { + + /** the current CRC value, bit-flipped */ + private int crc; + + public PureJavaCrc32() { + reset(); + } + + /** {@inheritDoc} */ + public long getValue() { + return (~crc) & 0xffffffffL; + } + + /** {@inheritDoc} */ + public void reset() { + crc = 0xffffffff; + } + + /** {@inheritDoc} */ + public void update(byte[] b, int off, int len) { + while(len > 3) { + int c0 = crc ^ b[off++]; + int c1 = (crc >>>= 8) ^ b[off++]; + int c2 = (crc >>>= 8) ^ b[off++]; + int c3 = (crc >>>= 8) ^ b[off++]; + crc = T4[c0 & 0xff] ^ T3[c1 & 0xff] ^ T2[c2 & 0xff] ^ T1[c3 & 0xff]; + len -= 4; + } + while(len > 0) { + crc = (crc >>> 8) ^ T1[(crc ^ b[off++]) & 0xff]; + len--; + } + } + + /** {@inheritDoc} */ + final public void update(int b) { + crc = (crc >>> 8) ^ T1[(crc ^ b) & 0xff]; + } + + /** + * Pre-generated lookup tables. For the code to generate these tables + * please see HDFS-297. + */ + + /** T1[x] is ~CRC(x) */ + private static final int[] T1 = new int[] { + 0x0, 0x77073096, 0xee0e612c, 0x990951ba, + 0x76dc419, 0x706af48f, 0xe963a535, 0x9e6495a3, + 0xedb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988, + 0x9b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, + 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, + 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, + 0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, + 0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5, + 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172, + 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, + 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, + 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59, + 0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, + 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f, + 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, + 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, + 0x76dc4190, 0x1db7106, 0x98d220bc, 0xefd5102a, + 0x71b18589, 0x6b6b51f, 0x9fbfe4a5, 0xe8b8d433, + 0x7807c9a2, 0xf00f934, 0x9609a88e, 0xe10e9818, + 0x7f6a0dbb, 0x86d3d2d, 0x91646c97, 0xe6635c01, + 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, + 0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, + 0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c, + 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65, + 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, + 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, + 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0, + 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, + 0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086, + 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f, + 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, + 0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, + 0xedb88320, 0x9abfb3b6, 0x3b6e20c, 0x74b1d29a, + 0xead54739, 0x9dd277af, 0x4db2615, 0x73dc1683, + 0xe3630b12, 0x94643b84, 0xd6d6a3e, 0x7a6a5aa8, + 0xe40ecf0b, 0x9309ff9d, 0xa00ae27, 0x7d079eb1, + 0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, + 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7, + 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc, + 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, + 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, + 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b, + 0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, + 0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79, + 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, + 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, + 0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, + 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d, + 0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x26d930a, + 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x5005713, + 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0xcb61b38, + 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0xbdbdf21, + 0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e, + 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777, + 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, + 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, + 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2, + 0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, + 0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0, + 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9, + 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, + 0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf, + 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94, + 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d + }; + + /** T2[x] is ~CRC(x followed by one 0x00 byte) */ + private static final int[] T2 = new int[] { + 0x0, 0x191b3141, 0x32366282, 0x2b2d53c3, + 0x646cc504, 0x7d77f445, 0x565aa786, 0x4f4196c7, + 0xc8d98a08, 0xd1c2bb49, 0xfaefe88a, 0xe3f4d9cb, + 0xacb54f0c, 0xb5ae7e4d, 0x9e832d8e, 0x87981ccf, + 0x4ac21251, 0x53d92310, 0x78f470d3, 0x61ef4192, + 0x2eaed755, 0x37b5e614, 0x1c98b5d7, 0x5838496, + 0x821b9859, 0x9b00a918, 0xb02dfadb, 0xa936cb9a, + 0xe6775d5d, 0xff6c6c1c, 0xd4413fdf, 0xcd5a0e9e, + 0x958424a2, 0x8c9f15e3, 0xa7b24620, 0xbea97761, + 0xf1e8e1a6, 0xe8f3d0e7, 0xc3de8324, 0xdac5b265, + 0x5d5daeaa, 0x44469feb, 0x6f6bcc28, 0x7670fd69, + 0x39316bae, 0x202a5aef, 0xb07092c, 0x121c386d, + 0xdf4636f3, 0xc65d07b2, 0xed705471, 0xf46b6530, + 0xbb2af3f7, 0xa231c2b6, 0x891c9175, 0x9007a034, + 0x179fbcfb, 0xe848dba, 0x25a9de79, 0x3cb2ef38, + 0x73f379ff, 0x6ae848be, 0x41c51b7d, 0x58de2a3c, + 0xf0794f05, 0xe9627e44, 0xc24f2d87, 0xdb541cc6, + 0x94158a01, 0x8d0ebb40, 0xa623e883, 0xbf38d9c2, + 0x38a0c50d, 0x21bbf44c, 0xa96a78f, 0x138d96ce, + 0x5ccc0009, 0x45d73148, 0x6efa628b, 0x77e153ca, + 0xbabb5d54, 0xa3a06c15, 0x888d3fd6, 0x91960e97, + 0xded79850, 0xc7cca911, 0xece1fad2, 0xf5facb93, + 0x7262d75c, 0x6b79e61d, 0x4054b5de, 0x594f849f, + 0x160e1258, 0xf152319, 0x243870da, 0x3d23419b, + 0x65fd6ba7, 0x7ce65ae6, 0x57cb0925, 0x4ed03864, + 0x191aea3, 0x188a9fe2, 0x33a7cc21, 0x2abcfd60, + 0xad24e1af, 0xb43fd0ee, 0x9f12832d, 0x8609b26c, + 0xc94824ab, 0xd05315ea, 0xfb7e4629, 0xe2657768, + 0x2f3f79f6, 0x362448b7, 0x1d091b74, 0x4122a35, + 0x4b53bcf2, 0x52488db3, 0x7965de70, 0x607eef31, + 0xe7e6f3fe, 0xfefdc2bf, 0xd5d0917c, 0xcccba03d, + 0x838a36fa, 0x9a9107bb, 0xb1bc5478, 0xa8a76539, + 0x3b83984b, 0x2298a90a, 0x9b5fac9, 0x10aecb88, + 0x5fef5d4f, 0x46f46c0e, 0x6dd93fcd, 0x74c20e8c, + 0xf35a1243, 0xea412302, 0xc16c70c1, 0xd8774180, + 0x9736d747, 0x8e2de606, 0xa500b5c5, 0xbc1b8484, + 0x71418a1a, 0x685abb5b, 0x4377e898, 0x5a6cd9d9, + 0x152d4f1e, 0xc367e5f, 0x271b2d9c, 0x3e001cdd, + 0xb9980012, 0xa0833153, 0x8bae6290, 0x92b553d1, + 0xddf4c516, 0xc4eff457, 0xefc2a794, 0xf6d996d5, + 0xae07bce9, 0xb71c8da8, 0x9c31de6b, 0x852aef2a, + 0xca6b79ed, 0xd37048ac, 0xf85d1b6f, 0xe1462a2e, + 0x66de36e1, 0x7fc507a0, 0x54e85463, 0x4df36522, + 0x2b2f3e5, 0x1ba9c2a4, 0x30849167, 0x299fa026, + 0xe4c5aeb8, 0xfdde9ff9, 0xd6f3cc3a, 0xcfe8fd7b, + 0x80a96bbc, 0x99b25afd, 0xb29f093e, 0xab84387f, + 0x2c1c24b0, 0x350715f1, 0x1e2a4632, 0x7317773, + 0x4870e1b4, 0x516bd0f5, 0x7a468336, 0x635db277, + 0xcbfad74e, 0xd2e1e60f, 0xf9ccb5cc, 0xe0d7848d, + 0xaf96124a, 0xb68d230b, 0x9da070c8, 0x84bb4189, + 0x3235d46, 0x1a386c07, 0x31153fc4, 0x280e0e85, + 0x674f9842, 0x7e54a903, 0x5579fac0, 0x4c62cb81, + 0x8138c51f, 0x9823f45e, 0xb30ea79d, 0xaa1596dc, + 0xe554001b, 0xfc4f315a, 0xd7626299, 0xce7953d8, + 0x49e14f17, 0x50fa7e56, 0x7bd72d95, 0x62cc1cd4, + 0x2d8d8a13, 0x3496bb52, 0x1fbbe891, 0x6a0d9d0, + 0x5e7ef3ec, 0x4765c2ad, 0x6c48916e, 0x7553a02f, + 0x3a1236e8, 0x230907a9, 0x824546a, 0x113f652b, + 0x96a779e4, 0x8fbc48a5, 0xa4911b66, 0xbd8a2a27, + 0xf2cbbce0, 0xebd08da1, 0xc0fdde62, 0xd9e6ef23, + 0x14bce1bd, 0xda7d0fc, 0x268a833f, 0x3f91b27e, + 0x70d024b9, 0x69cb15f8, 0x42e6463b, 0x5bfd777a, + 0xdc656bb5, 0xc57e5af4, 0xee530937, 0xf7483876, + 0xb809aeb1, 0xa1129ff0, 0x8a3fcc33, 0x9324fd72 + }; + + /** T3[x] is ~CRC(x followed by two 0x00 bytes) */ + private static final int[] T3 = new int[] { + 0x0, 0x1c26a37, 0x384d46e, 0x246be59, + 0x709a8dc, 0x6cbc2eb, 0x48d7cb2, 0x54f1685, + 0xe1351b8, 0xfd13b8f, 0xd9785d6, 0xc55efe1, + 0x91af964, 0x8d89353, 0xa9e2d0a, 0xb5c473d, + 0x1c26a370, 0x1de4c947, 0x1fa2771e, 0x1e601d29, + 0x1b2f0bac, 0x1aed619b, 0x18abdfc2, 0x1969b5f5, + 0x1235f2c8, 0x13f798ff, 0x11b126a6, 0x10734c91, + 0x153c5a14, 0x14fe3023, 0x16b88e7a, 0x177ae44d, + 0x384d46e0, 0x398f2cd7, 0x3bc9928e, 0x3a0bf8b9, + 0x3f44ee3c, 0x3e86840b, 0x3cc03a52, 0x3d025065, + 0x365e1758, 0x379c7d6f, 0x35dac336, 0x3418a901, + 0x3157bf84, 0x3095d5b3, 0x32d36bea, 0x331101dd, + 0x246be590, 0x25a98fa7, 0x27ef31fe, 0x262d5bc9, + 0x23624d4c, 0x22a0277b, 0x20e69922, 0x2124f315, + 0x2a78b428, 0x2bbade1f, 0x29fc6046, 0x283e0a71, + 0x2d711cf4, 0x2cb376c3, 0x2ef5c89a, 0x2f37a2ad, + 0x709a8dc0, 0x7158e7f7, 0x731e59ae, 0x72dc3399, + 0x7793251c, 0x76514f2b, 0x7417f172, 0x75d59b45, + 0x7e89dc78, 0x7f4bb64f, 0x7d0d0816, 0x7ccf6221, + 0x798074a4, 0x78421e93, 0x7a04a0ca, 0x7bc6cafd, + 0x6cbc2eb0, 0x6d7e4487, 0x6f38fade, 0x6efa90e9, + 0x6bb5866c, 0x6a77ec5b, 0x68315202, 0x69f33835, + 0x62af7f08, 0x636d153f, 0x612bab66, 0x60e9c151, + 0x65a6d7d4, 0x6464bde3, 0x662203ba, 0x67e0698d, + 0x48d7cb20, 0x4915a117, 0x4b531f4e, 0x4a917579, + 0x4fde63fc, 0x4e1c09cb, 0x4c5ab792, 0x4d98dda5, + 0x46c49a98, 0x4706f0af, 0x45404ef6, 0x448224c1, + 0x41cd3244, 0x400f5873, 0x4249e62a, 0x438b8c1d, + 0x54f16850, 0x55330267, 0x5775bc3e, 0x56b7d609, + 0x53f8c08c, 0x523aaabb, 0x507c14e2, 0x51be7ed5, + 0x5ae239e8, 0x5b2053df, 0x5966ed86, 0x58a487b1, + 0x5deb9134, 0x5c29fb03, 0x5e6f455a, 0x5fad2f6d, + 0xe1351b80, 0xe0f771b7, 0xe2b1cfee, 0xe373a5d9, + 0xe63cb35c, 0xe7fed96b, 0xe5b86732, 0xe47a0d05, + 0xef264a38, 0xeee4200f, 0xeca29e56, 0xed60f461, + 0xe82fe2e4, 0xe9ed88d3, 0xebab368a, 0xea695cbd, + 0xfd13b8f0, 0xfcd1d2c7, 0xfe976c9e, 0xff5506a9, + 0xfa1a102c, 0xfbd87a1b, 0xf99ec442, 0xf85cae75, + 0xf300e948, 0xf2c2837f, 0xf0843d26, 0xf1465711, + 0xf4094194, 0xf5cb2ba3, 0xf78d95fa, 0xf64fffcd, + 0xd9785d60, 0xd8ba3757, 0xdafc890e, 0xdb3ee339, + 0xde71f5bc, 0xdfb39f8b, 0xddf521d2, 0xdc374be5, + 0xd76b0cd8, 0xd6a966ef, 0xd4efd8b6, 0xd52db281, + 0xd062a404, 0xd1a0ce33, 0xd3e6706a, 0xd2241a5d, + 0xc55efe10, 0xc49c9427, 0xc6da2a7e, 0xc7184049, + 0xc25756cc, 0xc3953cfb, 0xc1d382a2, 0xc011e895, + 0xcb4dafa8, 0xca8fc59f, 0xc8c97bc6, 0xc90b11f1, + 0xcc440774, 0xcd866d43, 0xcfc0d31a, 0xce02b92d, + 0x91af9640, 0x906dfc77, 0x922b422e, 0x93e92819, + 0x96a63e9c, 0x976454ab, 0x9522eaf2, 0x94e080c5, + 0x9fbcc7f8, 0x9e7eadcf, 0x9c381396, 0x9dfa79a1, + 0x98b56f24, 0x99770513, 0x9b31bb4a, 0x9af3d17d, + 0x8d893530, 0x8c4b5f07, 0x8e0de15e, 0x8fcf8b69, + 0x8a809dec, 0x8b42f7db, 0x89044982, 0x88c623b5, + 0x839a6488, 0x82580ebf, 0x801eb0e6, 0x81dcdad1, + 0x8493cc54, 0x8551a663, 0x8717183a, 0x86d5720d, + 0xa9e2d0a0, 0xa820ba97, 0xaa6604ce, 0xaba46ef9, + 0xaeeb787c, 0xaf29124b, 0xad6fac12, 0xacadc625, + 0xa7f18118, 0xa633eb2f, 0xa4755576, 0xa5b73f41, + 0xa0f829c4, 0xa13a43f3, 0xa37cfdaa, 0xa2be979d, + 0xb5c473d0, 0xb40619e7, 0xb640a7be, 0xb782cd89, + 0xb2cddb0c, 0xb30fb13b, 0xb1490f62, 0xb08b6555, + 0xbbd72268, 0xba15485f, 0xb853f606, 0xb9919c31, + 0xbcde8ab4, 0xbd1ce083, 0xbf5a5eda, 0xbe9834ed + }; + + /** T4[x] is ~CRC(x followed by three 0x00 bytes) */ + private static final int[] T4 = new int[] { + 0x0, 0xb8bc6765, 0xaa09c88b, 0x12b5afee, + 0x8f629757, 0x37def032, 0x256b5fdc, 0x9dd738b9, + 0xc5b428ef, 0x7d084f8a, 0x6fbde064, 0xd7018701, + 0x4ad6bfb8, 0xf26ad8dd, 0xe0df7733, 0x58631056, + 0x5019579f, 0xe8a530fa, 0xfa109f14, 0x42acf871, + 0xdf7bc0c8, 0x67c7a7ad, 0x75720843, 0xcdce6f26, + 0x95ad7f70, 0x2d111815, 0x3fa4b7fb, 0x8718d09e, + 0x1acfe827, 0xa2738f42, 0xb0c620ac, 0x87a47c9, + 0xa032af3e, 0x188ec85b, 0xa3b67b5, 0xb28700d0, + 0x2f503869, 0x97ec5f0c, 0x8559f0e2, 0x3de59787, + 0x658687d1, 0xdd3ae0b4, 0xcf8f4f5a, 0x7733283f, + 0xeae41086, 0x525877e3, 0x40edd80d, 0xf851bf68, + 0xf02bf8a1, 0x48979fc4, 0x5a22302a, 0xe29e574f, + 0x7f496ff6, 0xc7f50893, 0xd540a77d, 0x6dfcc018, + 0x359fd04e, 0x8d23b72b, 0x9f9618c5, 0x272a7fa0, + 0xbafd4719, 0x241207c, 0x10f48f92, 0xa848e8f7, + 0x9b14583d, 0x23a83f58, 0x311d90b6, 0x89a1f7d3, + 0x1476cf6a, 0xaccaa80f, 0xbe7f07e1, 0x6c36084, + 0x5ea070d2, 0xe61c17b7, 0xf4a9b859, 0x4c15df3c, + 0xd1c2e785, 0x697e80e0, 0x7bcb2f0e, 0xc377486b, + 0xcb0d0fa2, 0x73b168c7, 0x6104c729, 0xd9b8a04c, + 0x446f98f5, 0xfcd3ff90, 0xee66507e, 0x56da371b, + 0xeb9274d, 0xb6054028, 0xa4b0efc6, 0x1c0c88a3, + 0x81dbb01a, 0x3967d77f, 0x2bd27891, 0x936e1ff4, + 0x3b26f703, 0x839a9066, 0x912f3f88, 0x299358ed, + 0xb4446054, 0xcf80731, 0x1e4da8df, 0xa6f1cfba, + 0xfe92dfec, 0x462eb889, 0x549b1767, 0xec277002, + 0x71f048bb, 0xc94c2fde, 0xdbf98030, 0x6345e755, + 0x6b3fa09c, 0xd383c7f9, 0xc1366817, 0x798a0f72, + 0xe45d37cb, 0x5ce150ae, 0x4e54ff40, 0xf6e89825, + 0xae8b8873, 0x1637ef16, 0x48240f8, 0xbc3e279d, + 0x21e91f24, 0x99557841, 0x8be0d7af, 0x335cb0ca, + 0xed59b63b, 0x55e5d15e, 0x47507eb0, 0xffec19d5, + 0x623b216c, 0xda874609, 0xc832e9e7, 0x708e8e82, + 0x28ed9ed4, 0x9051f9b1, 0x82e4565f, 0x3a58313a, + 0xa78f0983, 0x1f336ee6, 0xd86c108, 0xb53aa66d, + 0xbd40e1a4, 0x5fc86c1, 0x1749292f, 0xaff54e4a, + 0x322276f3, 0x8a9e1196, 0x982bbe78, 0x2097d91d, + 0x78f4c94b, 0xc048ae2e, 0xd2fd01c0, 0x6a4166a5, + 0xf7965e1c, 0x4f2a3979, 0x5d9f9697, 0xe523f1f2, + 0x4d6b1905, 0xf5d77e60, 0xe762d18e, 0x5fdeb6eb, + 0xc2098e52, 0x7ab5e937, 0x680046d9, 0xd0bc21bc, + 0x88df31ea, 0x3063568f, 0x22d6f961, 0x9a6a9e04, + 0x7bda6bd, 0xbf01c1d8, 0xadb46e36, 0x15080953, + 0x1d724e9a, 0xa5ce29ff, 0xb77b8611, 0xfc7e174, + 0x9210d9cd, 0x2aacbea8, 0x38191146, 0x80a57623, + 0xd8c66675, 0x607a0110, 0x72cfaefe, 0xca73c99b, + 0x57a4f122, 0xef189647, 0xfdad39a9, 0x45115ecc, + 0x764dee06, 0xcef18963, 0xdc44268d, 0x64f841e8, + 0xf92f7951, 0x41931e34, 0x5326b1da, 0xeb9ad6bf, + 0xb3f9c6e9, 0xb45a18c, 0x19f00e62, 0xa14c6907, + 0x3c9b51be, 0x842736db, 0x96929935, 0x2e2efe50, + 0x2654b999, 0x9ee8defc, 0x8c5d7112, 0x34e11677, + 0xa9362ece, 0x118a49ab, 0x33fe645, 0xbb838120, + 0xe3e09176, 0x5b5cf613, 0x49e959fd, 0xf1553e98, + 0x6c820621, 0xd43e6144, 0xc68bceaa, 0x7e37a9cf, + 0xd67f4138, 0x6ec3265d, 0x7c7689b3, 0xc4caeed6, + 0x591dd66f, 0xe1a1b10a, 0xf3141ee4, 0x4ba87981, + 0x13cb69d7, 0xab770eb2, 0xb9c2a15c, 0x17ec639, + 0x9ca9fe80, 0x241599e5, 0x36a0360b, 0x8e1c516e, + 0x866616a7, 0x3eda71c2, 0x2c6fde2c, 0x94d3b949, + 0x90481f0, 0xb1b8e695, 0xa30d497b, 0x1bb12e1e, + 0x43d23e48, 0xfb6e592d, 0xe9dbf6c3, 0x516791a6, + 0xccb0a91f, 0x740cce7a, 0x66b96194, 0xde0506f1 + }; + +} diff --git a/src/test/core/org/apache/hadoop/util/TestPureJavaCrc32.java b/src/test/core/org/apache/hadoop/util/TestPureJavaCrc32.java new file mode 100644 index 0000000000000..715d8f620c9b6 --- /dev/null +++ b/src/test/core/org/apache/hadoop/util/TestPureJavaCrc32.java @@ -0,0 +1,171 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.util; + +import junit.framework.TestCase; +import java.util.zip.CRC32; +import java.util.zip.Checksum; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Random; + +/** + * Unit test to verify that the pure-Java CRC32 algorithm gives + * the same results as the built-in implementation. + */ +public class TestPureJavaCrc32 extends TestCase { + private CRC32 theirs; + private PureJavaCrc32 ours; + + public void setUp() { + theirs = new CRC32(); + ours = new PureJavaCrc32(); + } + + public void testCorrectness() throws Exception { + checkSame(); + + theirs.update(104); + ours.update(104); + checkSame(); + + checkOnBytes(new byte[] {40, 60, 97, -70}, false); + + checkOnBytes("hello world!".getBytes("UTF-8"), false); + + for (int i = 0; i < 10000; i++) { + byte randomBytes[] = new byte[new Random().nextInt(2048)]; + new Random().nextBytes(randomBytes); + checkOnBytes(randomBytes, false); + } + + } + + private void checkOnBytes(byte[] bytes, boolean print) { + theirs.reset(); + ours.reset(); + checkSame(); + + for (int i = 0; i < bytes.length; i++) { + ours.update(bytes[i]); + theirs.update(bytes[i]); + checkSame(); + } + + if (print) { + System.out.println("theirs:\t" + Long.toHexString(theirs.getValue()) + + "\nours:\t" + Long.toHexString(ours.getValue())); + } + + theirs.reset(); + ours.reset(); + + ours.update(bytes, 0, bytes.length); + theirs.update(bytes, 0, bytes.length); + if (print) { + System.out.println("theirs:\t" + Long.toHexString(theirs.getValue()) + + "\nours:\t" + Long.toHexString(ours.getValue())); + } + + checkSame(); + + if (bytes.length >= 10) { + ours.update(bytes, 5, 5); + theirs.update(bytes, 5, 5); + checkSame(); + } + } + + private void checkSame() { + assertEquals(theirs.getValue(), ours.getValue()); + } + + /** + * Performance tests to compare performance of the Pure Java implementation + * to the built-in java.util.zip implementation. This can be run from the + * command line with: + * + * java -cp path/to/test/classes:path/to/common/classes \ + * 'org.apache.hadoop.util.TestPureJavaCrc32$PerformanceTest' + * + * The output is in JIRA table format. + */ + public static class PerformanceTest { + public static final int MAX_LEN = 32*1024*1024; // up to 32MB chunks + public static final int BYTES_PER_SIZE = MAX_LEN * 4; + + public static LinkedHashMap<String, Checksum> getImplsToTest() { + LinkedHashMap<String, Checksum> impls = + new LinkedHashMap<String, Checksum>(); + impls.put("BuiltIn", new CRC32()); + impls.put("PureJava", new PureJavaCrc32()); + return impls; + } + + public static void main(String args[]) { + LinkedHashMap<String, Checksum> impls = getImplsToTest(); + + Random rand = new Random(); + byte[] bytes = new byte[MAX_LEN]; + rand.nextBytes(bytes); + + + // Print header + System.out.printf("||num bytes||"); + for (String entry : impls.keySet()) { + System.out.printf(entry + " MB/sec||"); + } + System.out.printf("\n"); + + // Warm up implementations to get jit going. + for (Map.Entry<String, Checksum> entry : impls.entrySet()) { + doBench("warmUp" + entry.getKey(), + entry.getValue(), bytes, 2, false); + doBench("warmUp" + entry.getKey(), + entry.getValue(), bytes, 2101, false); + } + + // Test on a variety of sizes + for (int size = 1; size < MAX_LEN; size *= 2) { + System.out.printf("| %d\t|", size); + + for (Map.Entry<String, Checksum> entry : impls.entrySet()) { + System.gc(); + doBench(entry.getKey(), entry.getValue(), bytes, size, true); + } + System.out.printf("\n"); + } + } + + private static void doBench(String id, Checksum crc, + byte[] bytes, int size, boolean printout) { + long st = System.nanoTime(); + int trials = BYTES_PER_SIZE / size; + for (int i = 0; i < trials; i++) { + crc.update(bytes, 0, size); + } + long et = System.nanoTime(); + + double mbProcessed = trials * size / 1024.0 / 1024.0; + double secsElapsed = (et - st) / 1000000000.0d; + if (printout) { + System.out.printf("%.3f \t|", mbProcessed / secsElapsed); + } + } + } +}
cb51d3c5762e6f2808f9ab358943b3dc85e9bd95
elasticsearch
Sorting on _score in the URI format is reversed
a
https://github.com/elastic/elasticsearch
diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java index 3cd3762651c9a..8ca9d706d4768 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/FieldSortBuilder.java @@ -64,8 +64,8 @@ public FieldSortBuilder(String fieldName) { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(fieldName); - if (order == SortOrder.DESC) { - builder.field("reverse", true); + if (order != null) { + builder.field("order", order.toString()); } if (missing != null) { builder.field("missing", missing); diff --git a/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/SortOrder.java b/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/SortOrder.java index 750cb601123f8..757f0fbc939c5 100644 --- a/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/SortOrder.java +++ b/modules/elasticsearch/src/main/java/org/elasticsearch/search/sort/SortOrder.java @@ -28,9 +28,17 @@ public enum SortOrder { /** * Ascending order. */ - ASC, + ASC { + @Override public String toString() { + return "asc"; + } + }, /** * Descending order. */ - DESC + DESC { + @Override public String toString() { + return "desc"; + } + } } diff --git a/modules/test/integration/src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java b/modules/test/integration/src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java index 37674b53a3580..8bc68550a8d49 100644 --- a/modules/test/integration/src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java +++ b/modules/test/integration/src/test/java/org/elasticsearch/test/integration/search/sort/SimpleSortTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.integration.AbstractNodesTests; +import org.hamcrest.Matchers; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; @@ -112,6 +113,41 @@ protected Client getClient() { } } + @Test public void testScoreSortDirection() throws Exception { + try { + client.admin().indices().prepareDelete("test").execute().actionGet(); + } catch (Exception e) { + // ignore + } + client.admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("number_of_shards", 1)).execute().actionGet(); + client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet(); + + client.prepareIndex("test", "type", "1").setSource("field", 2).execute().actionGet(); + client.prepareIndex("test", "type", "2").setSource("field", 1).execute().actionGet(); + client.prepareIndex("test", "type", "3").setSource("field", 0).execute().actionGet(); + + client.admin().indices().prepareRefresh().execute().actionGet(); + + SearchResponse searchResponse = client.prepareSearch("test").setQuery(customScoreQuery(matchAllQuery()).script("_source.field")).execute().actionGet(); + assertThat(searchResponse.hits().getAt(0).getId(), equalTo("1")); + assertThat(searchResponse.hits().getAt(1).score(), Matchers.lessThan(searchResponse.hits().getAt(0).score())); + assertThat(searchResponse.hits().getAt(1).getId(), equalTo("2")); + assertThat(searchResponse.hits().getAt(2).score(), Matchers.lessThan(searchResponse.hits().getAt(1).score())); + assertThat(searchResponse.hits().getAt(2).getId(), equalTo("3")); + + searchResponse = client.prepareSearch("test").setQuery(customScoreQuery(matchAllQuery()).script("_source.field")).addSort("_score", SortOrder.DESC).execute().actionGet(); + assertThat(searchResponse.hits().getAt(0).getId(), equalTo("1")); + assertThat(searchResponse.hits().getAt(1).score(), Matchers.lessThan(searchResponse.hits().getAt(0).score())); + assertThat(searchResponse.hits().getAt(1).getId(), equalTo("2")); + assertThat(searchResponse.hits().getAt(2).score(), Matchers.lessThan(searchResponse.hits().getAt(1).score())); + assertThat(searchResponse.hits().getAt(2).getId(), equalTo("3")); + + searchResponse = client.prepareSearch("test").setQuery(customScoreQuery(matchAllQuery()).script("_source.field")).addSort("_score", SortOrder.DESC).execute().actionGet(); + assertThat(searchResponse.hits().getAt(2).getId(), equalTo("3")); + assertThat(searchResponse.hits().getAt(1).getId(), equalTo("2")); + assertThat(searchResponse.hits().getAt(0).getId(), equalTo("1")); + } + @Test public void testSimpleSortsSingleShard() throws Exception { testSimpleSorts(1); }
c9b6c3c36c184a03de3b5a1860f336ed834f1f04
hadoop
YARN-1635. Implemented a Leveldb based- ApplicationTimelineStore. Contributed by Billie Rinaldi. svn merge- --ignore-ancestry -c 1565868 ../../trunk/--git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1565869 13f79535-47bb-0310-9956-ffa450edef68-
a
https://github.com/apache/hadoop
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 3dc6476c85f39..80ecc54ab0999 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -778,6 +778,12 @@ <artifactId>grizzly-http-servlet</artifactId> <version>2.1.2</version> </dependency> + + <dependency> + <groupId>org.fusesource.leveldbjni</groupId> + <artifactId>leveldbjni-all</artifactId> + <version>1.8</version> + </dependency> </dependencies> </dependencyManagement> diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 4da20a6fb808a..0359628850607 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -87,6 +87,9 @@ Release 2.4.0 - UNRELEASED YARN-1566. Changed Distributed Shell to retain containers across application attempts. (Jian He via vinodkv) + YARN-1635. Implemented a Leveldb based ApplicationTimelineStore. (Billie + Rinaldi via zjshen) + IMPROVEMENTS YARN-1007. Enhance History Reader interface for Containers. (Mayank Bansal via diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSPutErrors.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSPutErrors.java index 91458e1419f0f..d330eb41dff1d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSPutErrors.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/apptimeline/ATSPutErrors.java @@ -94,9 +94,21 @@ public void setErrors(List<ATSPutError> errors) { @Public @Unstable public static class ATSPutError { + /** + * Error code returned when no start time can be found when putting an + * entity. This occurs when the entity does not already exist in the + * store and it is put with no start time or events specified. + */ + public static final int NO_START_TIME = 1; + /** + * Error code returned if an IOException is encountered when putting an + * entity. + */ + public static final int IO_EXCEPTION = 2; + private String entityId; private String entityType; - private Integer errorCode; + private int errorCode; /** * Get the entity Id @@ -144,7 +156,7 @@ public void setEntityType(String entityType) { * @return an error code */ @XmlElement(name = "errorcode") - public Integer getErrorCode() { + public int getErrorCode() { return errorCode; } @@ -154,7 +166,7 @@ public Integer getErrorCode() { * @param errorCode * an error code */ - public void setErrorCode(Integer errorCode) { + public void setErrorCode(int errorCode) { this.errorCode = errorCode; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index 5322ccd5de6a6..8c8ad16e8e46e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -1041,6 +1041,10 @@ public class YarnConfiguration extends Configuration { /** ATS store class */ public static final String ATS_STORE = ATS_PREFIX + "store.class"; + /** ATS leveldb path */ + public static final String ATS_LEVELDB_PATH_PROPERTY = + ATS_PREFIX + "leveldb-apptimeline-store.path"; + //////////////////////////////// // Other Configs //////////////////////////////// diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml index c50ea7b7087c5..cc8b12437ea03 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml @@ -1145,7 +1145,13 @@ <property> <description>Store class name for application timeline store</description> <name>yarn.ats.store.class</name> - <value>org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.MemoryApplicationTimelineStore</value> + <value>org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline.LeveldbApplicationTimelineStore</value> + </property> + + <property> + <description>Store file name for leveldb application timeline store</description> + <name>yarn.ats.leveldb-apptimeline-store.path</name> + <value>${yarn.log.dir}/ats</value> </property> <!-- Other configuration --> diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/apptimeline/TestApplicationTimelineRecords.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/apptimeline/TestApplicationTimelineRecords.java index f2a6d3ef46131..24d1ce91e626a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/apptimeline/TestApplicationTimelineRecords.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/records/apptimeline/TestApplicationTimelineRecords.java @@ -117,14 +117,14 @@ public void testATSPutErrors() { ATSPutError error1 = new ATSPutError(); error1.setEntityId("entity id 1"); error1.setEntityId("entity type 1"); - error1.setErrorCode(1); + error1.setErrorCode(ATSPutError.NO_START_TIME); atsPutErrors.addError(error1); List<ATSPutError> errors = new ArrayList<ATSPutError>(); errors.add(error1); ATSPutError error2 = new ATSPutError(); error2.setEntityId("entity id 2"); error2.setEntityId("entity type 2"); - error2.setErrorCode(2); + error2.setErrorCode(ATSPutError.IO_EXCEPTION); errors.add(error2); atsPutErrors.addErrors(errors); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml index 843afe3260abd..b8f43eca7e694 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml @@ -167,6 +167,25 @@ <artifactId>jersey-test-framework-grizzly2</artifactId> <scope>test</scope> </dependency> + + <dependency> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-core-asl</artifactId> + </dependency> + <dependency> + <groupId>org.codehaus.jackson</groupId> + <artifactId>jackson-mapper-asl</artifactId> + </dependency> + + <dependency> + <groupId>commons-collections</groupId> + <artifactId>commons-collections</artifactId> + </dependency> + + <dependency> + <groupId>org.fusesource.leveldbjni</groupId> + <artifactId>leveldbjni-all</artifactId> + </dependency> </dependencies> </project> diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineReader.java index 97a217dc98a0b..e448ba8bcad9d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineReader.java @@ -18,6 +18,7 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; +import java.io.IOException; import java.util.Collection; import java.util.EnumSet; import java.util.Set; @@ -78,13 +79,15 @@ enum Field { * retrieve (see {@link Field}). If the set of fields * contains {@link Field#LAST_EVENT_ONLY} and not * {@link Field#EVENTS}, the most recent event for - * each entity is retrieved. + * each entity is retrieved. If null, retrieves all + * fields. * @return An {@link ATSEntities} object. + * @throws IOException */ ATSEntities getEntities(String entityType, Long limit, Long windowStart, Long windowEnd, NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters, - EnumSet<Field> fieldsToRetrieve); + EnumSet<Field> fieldsToRetrieve) throws IOException; /** * This method retrieves the entity information for a given entity. @@ -95,11 +98,13 @@ ATSEntities getEntities(String entityType, * retrieve (see {@link Field}). If the set of * fields contains {@link Field#LAST_EVENT_ONLY} and * not {@link Field#EVENTS}, the most recent event - * for each entity is retrieved. + * for each entity is retrieved. If null, retrieves + * all fields. * @return An {@link ATSEntity} object. + * @throws IOException */ ATSEntity getEntity(String entity, String entityType, EnumSet<Field> - fieldsToRetrieve); + fieldsToRetrieve) throws IOException; /** * This method retrieves the events for a list of entities all of the same @@ -118,8 +123,9 @@ ATSEntity getEntity(String entity, String entityType, EnumSet<Field> * @param eventTypes Restricts the events returned to the given types. If * null, events of all types will be returned. * @return An {@link ATSEvents} object. + * @throws IOException */ ATSEvents getEntityTimelines(String entityType, SortedSet<String> entityIds, Long limit, Long windowStart, - Long windowEnd, Set<String> eventTypes); + Long windowEnd, Set<String> eventTypes) throws IOException; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineWriter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineWriter.java index b7bd0708e43e8..2a16833d98066 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineWriter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineWriter.java @@ -23,6 +23,8 @@ import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors; +import java.io.IOException; + /** * This interface is for storing application timeline information. */ @@ -37,7 +39,8 @@ public interface ApplicationTimelineWriter { * * @param data An {@link ATSEntities} object. * @return An {@link ATSPutErrors} object. + * @throws IOException */ - ATSPutErrors put(ATSEntities data); + ATSPutErrors put(ATSEntities data) throws IOException; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/EntityId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/EntityIdentifier.java similarity index 91% rename from hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/EntityId.java rename to hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/EntityIdentifier.java index 26431f875693a..d22e616fd1c78 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/EntityId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/EntityIdentifier.java @@ -26,12 +26,12 @@ */ @Private @Unstable -public class EntityId implements Comparable<EntityId> { +public class EntityIdentifier implements Comparable<EntityIdentifier> { private String id; private String type; - public EntityId(String id, String type) { + public EntityIdentifier(String id, String type) { this.id = id; this.type = type; } @@ -53,7 +53,7 @@ public String getType() { } @Override - public int compareTo(EntityId other) { + public int compareTo(EntityIdentifier other) { int c = type.compareTo(other.type); if (c != 0) return c; return id.compareTo(other.id); @@ -78,7 +78,7 @@ public boolean equals(Object obj) { return false; if (getClass() != obj.getClass()) return false; - EntityId other = (EntityId) obj; + EntityIdentifier other = (EntityIdentifier) obj; if (id == null) { if (other.id != null) return false; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/GenericObjectMapper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/GenericObjectMapper.java new file mode 100644 index 0000000000000..38ceb30c7d49a --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/GenericObjectMapper.java @@ -0,0 +1,208 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.io.WritableUtils; +import org.codehaus.jackson.map.ObjectMapper; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +/** + * A utility class providing methods for serializing and deserializing + * objects. The {@link #write(Object)}, {@link #read(byte[])} and {@link + * #write(java.io.DataOutputStream, Object)}, {@link + * #read(java.io.DataInputStream)} methods are used by the + * {@link LeveldbApplicationTimelineStore} to store and retrieve arbitrary + * JSON, while the {@link #writeReverseOrderedLong} and {@link + * #readReverseOrderedLong} methods are used to sort entities in descending + * start time order. + */ [email protected] [email protected] +public class GenericObjectMapper { + private static final byte[] EMPTY_BYTES = new byte[0]; + + private static final byte LONG = 0x1; + private static final byte INTEGER = 0x2; + private static final byte DOUBLE = 0x3; + private static final byte STRING = 0x4; + private static final byte BOOLEAN = 0x5; + private static final byte LIST = 0x6; + private static final byte MAP = 0x7; + + /** + * Serializes an Object into a byte array. Along with {@link #read(byte[]) }, + * can be used to serialize an Object and deserialize it into an Object of + * the same type without needing to specify the Object's type, + * as long as it is one of the JSON-compatible objects Long, Integer, + * Double, String, Boolean, List, or Map. The current implementation uses + * ObjectMapper to serialize complex objects (List and Map) while using + * Writable to serialize simpler objects, to produce fewer bytes. + * + * @param o An Object + * @return A byte array representation of the Object + * @throws IOException + */ + public static byte[] write(Object o) throws IOException { + if (o == null) + return EMPTY_BYTES; + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + write(new DataOutputStream(baos), o); + return baos.toByteArray(); + } + + /** + * Serializes an Object and writes it to a DataOutputStream. Along with + * {@link #read(java.io.DataInputStream)}, can be used to serialize an Object + * and deserialize it into an Object of the same type without needing to + * specify the Object's type, as long as it is one of the JSON-compatible + * objects Long, Integer, Double, String, Boolean, List, or Map. The current + * implementation uses ObjectMapper to serialize complex objects (List and + * Map) while using Writable to serialize simpler objects, to produce fewer + * bytes. + * + * @param dos A DataOutputStream + * @param o An Object + * @throws IOException + */ + public static void write(DataOutputStream dos, Object o) + throws IOException { + if (o == null) + return; + if (o instanceof Long) { + dos.write(LONG); + WritableUtils.writeVLong(dos, (Long) o); + } else if(o instanceof Integer) { + dos.write(INTEGER); + WritableUtils.writeVInt(dos, (Integer) o); + } else if(o instanceof Double) { + dos.write(DOUBLE); + dos.writeDouble((Double) o); + } else if (o instanceof String) { + dos.write(STRING); + WritableUtils.writeString(dos, (String) o); + } else if (o instanceof Boolean) { + dos.write(BOOLEAN); + dos.writeBoolean((Boolean) o); + } else if (o instanceof List) { + dos.write(LIST); + ObjectMapper mapper = new ObjectMapper(); + mapper.writeValue(dos, o); + } else if (o instanceof Map) { + dos.write(MAP); + ObjectMapper mapper = new ObjectMapper(); + mapper.writeValue(dos, o); + } else { + throw new IOException("Couldn't serialize object"); + } + } + + /** + * Deserializes an Object from a byte array created with + * {@link #write(Object)}. + * + * @param b A byte array + * @return An Object + * @throws IOException + */ + public static Object read(byte[] b) throws IOException { + if (b == null || b.length == 0) + return null; + ByteArrayInputStream bais = new ByteArrayInputStream(b); + return read(new DataInputStream(bais)); + } + + /** + * Reads an Object from a DataInputStream whose data has been written with + * {@link #write(java.io.DataOutputStream, Object)}. + * + * @param dis A DataInputStream + * @return An Object, null if an unrecognized type + * @throws IOException + */ + public static Object read(DataInputStream dis) throws IOException { + byte code = (byte)dis.read(); + ObjectMapper mapper; + switch (code) { + case LONG: + return WritableUtils.readVLong(dis); + case INTEGER: + return WritableUtils.readVInt(dis); + case DOUBLE: + return dis.readDouble(); + case STRING: + return WritableUtils.readString(dis); + case BOOLEAN: + return dis.readBoolean(); + case LIST: + mapper = new ObjectMapper(); + return mapper.readValue(dis, ArrayList.class); + case MAP: + mapper = new ObjectMapper(); + return mapper.readValue(dis, HashMap.class); + default: + return null; + } + } + + /** + * Converts a long to a 8-byte array so that lexicographic ordering of the + * produced byte arrays sort the longs in descending order. + * + * @param l A long + * @return A byte array + */ + public static byte[] writeReverseOrderedLong(long l) { + byte[] b = new byte[8]; + b[0] = (byte)(0x7f ^ ((l >> 56) & 0xff)); + for (int i = 1; i < 7; i++) + b[i] = (byte)(0xff ^ ((l >> 8*(7-i)) & 0xff)); + b[7] = (byte)(0xff ^ (l & 0xff)); + return b; + } + + /** + * Reads 8 bytes from an array starting at the specified offset and + * converts them to a long. The bytes are assumed to have been created + * with {@link #writeReverseOrderedLong}. + * + * @param b A byte array + * @param offset An offset into the byte array + * @return A long + */ + public static long readReverseOrderedLong(byte[] b, int offset) { + long l = b[offset] & 0xff; + for (int i = 1; i < 8; i++) { + l = l << 8; + l = l | (b[offset+i]&0xff); + } + return l ^ 0x7fffffffffffffffl; + } + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/LeveldbApplicationTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/LeveldbApplicationTimelineStore.java new file mode 100644 index 0000000000000..c2e93cab94860 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/LeveldbApplicationTimelineStore.java @@ -0,0 +1,854 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; + +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeMap; + +import com.google.common.annotations.VisibleForTesting; +import org.apache.commons.collections.map.LRUMap; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.io.WritableComparator; +import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; +import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity; +import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvent; +import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents; +import org.apache.hadoop.yarn.api.records.apptimeline.ATSEvents.ATSEventsOfOneEntity; +import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors; +import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors.ATSPutError; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.fusesource.leveldbjni.JniDBFactory; +import org.iq80.leveldb.DB; +import org.iq80.leveldb.DBIterator; +import org.iq80.leveldb.Options; +import org.iq80.leveldb.WriteBatch; + +import static org.apache.hadoop.yarn.server.applicationhistoryservice + .apptimeline.GenericObjectMapper.readReverseOrderedLong; +import static org.apache.hadoop.yarn.server.applicationhistoryservice + .apptimeline.GenericObjectMapper.writeReverseOrderedLong; + +/** + * An implementation of an application timeline store backed by leveldb. + */ [email protected] [email protected] +public class LeveldbApplicationTimelineStore extends AbstractService + implements ApplicationTimelineStore { + private static final Log LOG = LogFactory + .getLog(LeveldbApplicationTimelineStore.class); + + private static final String FILENAME = "leveldb-apptimeline-store.ldb"; + + private static final byte[] START_TIME_LOOKUP_PREFIX = "k".getBytes(); + private static final byte[] ENTITY_ENTRY_PREFIX = "e".getBytes(); + private static final byte[] INDEXED_ENTRY_PREFIX = "i".getBytes(); + + private static final byte[] PRIMARY_FILTER_COLUMN = "f".getBytes(); + private static final byte[] OTHER_INFO_COLUMN = "i".getBytes(); + private static final byte[] RELATED_COLUMN = "r".getBytes(); + private static final byte[] TIME_COLUMN = "t".getBytes(); + + private static final byte[] EMPTY_BYTES = new byte[0]; + + private static final int START_TIME_CACHE_SIZE = 10000; + + @SuppressWarnings("unchecked") + private final Map<EntityIdentifier, Long> startTimeCache = + Collections.synchronizedMap(new LRUMap(START_TIME_CACHE_SIZE)); + + private DB db; + + public LeveldbApplicationTimelineStore() { + super(LeveldbApplicationTimelineStore.class.getName()); + } + + @Override + protected void serviceInit(Configuration conf) throws Exception { + Options options = new Options(); + options.createIfMissing(true); + JniDBFactory factory = new JniDBFactory(); + String path = conf.get(YarnConfiguration.ATS_LEVELDB_PATH_PROPERTY); + File p = new File(path); + if (!p.exists()) + if (!p.mkdirs()) + throw new IOException("Couldn't create directory for leveldb " + + "application timeline store " + path); + LOG.info("Using leveldb path " + path); + db = factory.open(new File(path, FILENAME), options); + super.serviceInit(conf); + } + + @Override + protected void serviceStop() throws Exception { + IOUtils.cleanup(LOG, db); + super.serviceStop(); + } + + private static class KeyBuilder { + private static final int MAX_NUMBER_OF_KEY_ELEMENTS = 10; + private byte[][] b; + private boolean[] useSeparator; + private int index; + private int length; + + public KeyBuilder(int size) { + b = new byte[size][]; + useSeparator = new boolean[size]; + index = 0; + length = 0; + } + + public static KeyBuilder newInstance() { + return new KeyBuilder(MAX_NUMBER_OF_KEY_ELEMENTS); + } + + public KeyBuilder add(String s) { + return add(s.getBytes(), true); + } + + public KeyBuilder add(byte[] t) { + return add(t, false); + } + + public KeyBuilder add(byte[] t, boolean sep) { + b[index] = t; + useSeparator[index] = sep; + length += t.length; + if (sep) + length++; + index++; + return this; + } + + public byte[] getBytes() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(length); + for (int i = 0; i < index; i++) { + baos.write(b[i]); + if (i < index-1 && useSeparator[i]) + baos.write(0x0); + } + return baos.toByteArray(); + } + + public byte[] getBytesForLookup() throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(length); + for (int i = 0; i < index; i++) { + baos.write(b[i]); + if (useSeparator[i]) + baos.write(0x0); + } + return baos.toByteArray(); + } + } + + private static class KeyParser { + private final byte[] b; + private int offset; + + public KeyParser(byte[] b, int offset) { + this.b = b; + this.offset = offset; + } + + public String getNextString() throws IOException { + if (offset >= b.length) + throw new IOException( + "tried to read nonexistent string from byte array"); + int i = 0; + while (offset+i < b.length && b[offset+i] != 0x0) + i++; + String s = new String(b, offset, i); + offset = offset + i + 1; + return s; + } + + public long getNextLong() throws IOException { + if (offset+8 >= b.length) + throw new IOException("byte array ran out when trying to read long"); + long l = readReverseOrderedLong(b, offset); + offset += 8; + return l; + } + + public int getOffset() { + return offset; + } + } + + @Override + public ATSEntity getEntity(String entity, String entityType, + EnumSet<Field> fields) throws IOException { + DBIterator iterator = null; + try { + byte[] revStartTime = getStartTime(entity, entityType, null, null, null); + if (revStartTime == null) + return null; + byte[] prefix = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) + .add(entityType).add(revStartTime).add(entity).getBytesForLookup(); + + iterator = db.iterator(); + iterator.seek(prefix); + + return getEntity(entity, entityType, + readReverseOrderedLong(revStartTime, 0), fields, iterator, prefix, + prefix.length); + } finally { + IOUtils.cleanup(LOG, iterator); + } + } + + /** + * Read entity from a db iterator. If no information is found in the + * specified fields for this entity, return null. + */ + private static ATSEntity getEntity(String entity, String entityType, + Long startTime, EnumSet<Field> fields, DBIterator iterator, + byte[] prefix, int prefixlen) throws IOException { + if (fields == null) + fields = EnumSet.allOf(Field.class); + + ATSEntity atsEntity = new ATSEntity(); + boolean events = false; + boolean lastEvent = false; + if (fields.contains(Field.EVENTS)) { + events = true; + atsEntity.setEvents(new ArrayList<ATSEvent>()); + } else if (fields.contains(Field.LAST_EVENT_ONLY)) { + lastEvent = true; + atsEntity.setEvents(new ArrayList<ATSEvent>()); + } + else { + atsEntity.setEvents(null); + } + boolean relatedEntities = false; + if (fields.contains(Field.RELATED_ENTITIES)) { + relatedEntities = true; + atsEntity.setRelatedEntities(new HashMap<String, List<String>>()); + } else { + atsEntity.setRelatedEntities(null); + } + boolean primaryFilters = false; + if (fields.contains(Field.PRIMARY_FILTERS)) { + primaryFilters = true; + atsEntity.setPrimaryFilters(new HashMap<String, Object>()); + } else { + atsEntity.setPrimaryFilters(null); + } + boolean otherInfo = false; + if (fields.contains(Field.OTHER_INFO)) { + otherInfo = true; + atsEntity.setOtherInfo(new HashMap<String, Object>()); + } else { + atsEntity.setOtherInfo(null); + } + + // iterate through the entity's entry, parsing information if it is part + // of a requested field + for (; iterator.hasNext(); iterator.next()) { + byte[] key = iterator.peekNext().getKey(); + if (!prefixMatches(prefix, prefixlen, key)) + break; + if (key[prefixlen] == PRIMARY_FILTER_COLUMN[0]) { + if (primaryFilters) { + atsEntity.addPrimaryFilter(parseRemainingKey(key, + prefixlen + PRIMARY_FILTER_COLUMN.length), + GenericObjectMapper.read(iterator.peekNext().getValue())); + } + } else if (key[prefixlen] == OTHER_INFO_COLUMN[0]) { + if (otherInfo) { + atsEntity.addOtherInfo(parseRemainingKey(key, + prefixlen + OTHER_INFO_COLUMN.length), + GenericObjectMapper.read(iterator.peekNext().getValue())); + } + } else if (key[prefixlen] == RELATED_COLUMN[0]) { + if (relatedEntities) { + addRelatedEntity(atsEntity, key, + prefixlen + RELATED_COLUMN.length); + } + } else if (key[prefixlen] == TIME_COLUMN[0]) { + if (events || (lastEvent && atsEntity.getEvents().size() == 0)) { + ATSEvent event = getEntityEvent(null, key, prefixlen + + TIME_COLUMN.length, iterator.peekNext().getValue()); + if (event != null) { + atsEntity.addEvent(event); + } + } + } else { + LOG.warn(String.format("Found unexpected column for entity %s of " + + "type %s (0x%02x)", entity, entityType, key[prefixlen])); + } + } + + atsEntity.setEntityId(entity); + atsEntity.setEntityType(entityType); + atsEntity.setStartTime(startTime); + + return atsEntity; + } + + @Override + public ATSEvents getEntityTimelines(String entityType, + SortedSet<String> entityIds, Long limit, Long windowStart, + Long windowEnd, Set<String> eventType) throws IOException { + ATSEvents atsEvents = new ATSEvents(); + if (entityIds == null || entityIds.isEmpty()) + return atsEvents; + // create a lexicographically-ordered map from start time to entities + Map<byte[], List<EntityIdentifier>> startTimeMap = new TreeMap<byte[], + List<EntityIdentifier>>(new Comparator<byte[]>() { + @Override + public int compare(byte[] o1, byte[] o2) { + return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0, + o2.length); + } + }); + DBIterator iterator = null; + try { + // look up start times for the specified entities + // skip entities with no start time + for (String entity : entityIds) { + byte[] startTime = getStartTime(entity, entityType, null, null, null); + if (startTime != null) { + List<EntityIdentifier> entities = startTimeMap.get(startTime); + if (entities == null) { + entities = new ArrayList<EntityIdentifier>(); + startTimeMap.put(startTime, entities); + } + entities.add(new EntityIdentifier(entity, entityType)); + } + } + for (Entry<byte[], List<EntityIdentifier>> entry : + startTimeMap.entrySet()) { + // look up the events matching the given parameters (limit, + // start time, end time, event types) for entities whose start times + // were found and add the entities to the return list + byte[] revStartTime = entry.getKey(); + for (EntityIdentifier entity : entry.getValue()) { + ATSEventsOfOneEntity atsEntity = new ATSEventsOfOneEntity(); + atsEntity.setEntityId(entity.getId()); + atsEntity.setEntityType(entityType); + atsEvents.addEvent(atsEntity); + KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) + .add(entityType).add(revStartTime).add(entity.getId()) + .add(TIME_COLUMN); + byte[] prefix = kb.getBytesForLookup(); + if (windowEnd == null) { + windowEnd = Long.MAX_VALUE; + } + byte[] revts = writeReverseOrderedLong(windowEnd); + kb.add(revts); + byte[] first = kb.getBytesForLookup(); + byte[] last = null; + if (windowStart != null) { + last = KeyBuilder.newInstance().add(prefix) + .add(writeReverseOrderedLong(windowStart)).getBytesForLookup(); + } + if (limit == null) { + limit = DEFAULT_LIMIT; + } + iterator = db.iterator(); + for (iterator.seek(first); atsEntity.getEvents().size() < limit && + iterator.hasNext(); iterator.next()) { + byte[] key = iterator.peekNext().getKey(); + if (!prefixMatches(prefix, prefix.length, key) || (last != null && + WritableComparator.compareBytes(key, 0, key.length, last, 0, + last.length) > 0)) + break; + ATSEvent event = getEntityEvent(eventType, key, prefix.length, + iterator.peekNext().getValue()); + if (event != null) + atsEntity.addEvent(event); + } + } + } + } finally { + IOUtils.cleanup(LOG, iterator); + } + return atsEvents; + } + + /** + * Returns true if the byte array begins with the specified prefix. + */ + private static boolean prefixMatches(byte[] prefix, int prefixlen, + byte[] b) { + if (b.length < prefixlen) + return false; + return WritableComparator.compareBytes(prefix, 0, prefixlen, b, 0, + prefixlen) == 0; + } + + @Override + public ATSEntities getEntities(String entityType, + Long limit, Long windowStart, Long windowEnd, + NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters, + EnumSet<Field> fields) throws IOException { + if (primaryFilter == null) { + // if no primary filter is specified, prefix the lookup with + // ENTITY_ENTRY_PREFIX + return getEntityByTime(ENTITY_ENTRY_PREFIX, entityType, limit, + windowStart, windowEnd, secondaryFilters, fields); + } else { + // if a primary filter is specified, prefix the lookup with + // INDEXED_ENTRY_PREFIX + primaryFilterName + primaryFilterValue + + // ENTITY_ENTRY_PREFIX + byte[] base = KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX) + .add(primaryFilter.getName()) + .add(GenericObjectMapper.write(primaryFilter.getValue()), true) + .add(ENTITY_ENTRY_PREFIX).getBytesForLookup(); + return getEntityByTime(base, entityType, limit, windowStart, windowEnd, + secondaryFilters, fields); + } + } + + /** + * Retrieves a list of entities satisfying given parameters. + * + * @param base A byte array prefix for the lookup + * @param entityType The type of the entity + * @param limit A limit on the number of entities to return + * @param starttime The earliest entity start time to retrieve (exclusive) + * @param endtime The latest entity start time to retrieve (inclusive) + * @param secondaryFilters Filter pairs that the entities should match + * @param fields The set of fields to retrieve + * @return A list of entities + * @throws IOException + */ + private ATSEntities getEntityByTime(byte[] base, + String entityType, Long limit, Long starttime, Long endtime, + Collection<NameValuePair> secondaryFilters, EnumSet<Field> fields) + throws IOException { + DBIterator iterator = null; + try { + KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType); + // only db keys matching the prefix (base + entity type) will be parsed + byte[] prefix = kb.getBytesForLookup(); + if (endtime == null) { + // if end time is null, place no restriction on end time + endtime = Long.MAX_VALUE; + } + // using end time, construct a first key that will be seeked to + byte[] revts = writeReverseOrderedLong(endtime); + kb.add(revts); + byte[] first = kb.getBytesForLookup(); + byte[] last = null; + if (starttime != null) { + // if start time is not null, set a last key that will not be + // iterated past + last = KeyBuilder.newInstance().add(base).add(entityType) + .add(writeReverseOrderedLong(starttime)).getBytesForLookup(); + } + if (limit == null) { + // if limit is not specified, use the default + limit = DEFAULT_LIMIT; + } + + ATSEntities atsEntities = new ATSEntities(); + iterator = db.iterator(); + iterator.seek(first); + // iterate until one of the following conditions is met: limit is + // reached, there are no more keys, the key prefix no longer matches, + // or a start time has been specified and reached/exceeded + while (atsEntities.getEntities().size() < limit && iterator.hasNext()) { + byte[] key = iterator.peekNext().getKey(); + if (!prefixMatches(prefix, prefix.length, key) || (last != null && + WritableComparator.compareBytes(key, 0, key.length, last, 0, + last.length) > 0)) + break; + // read the start time and entity from the current key + KeyParser kp = new KeyParser(key, prefix.length); + Long startTime = kp.getNextLong(); + String entity = kp.getNextString(); + // parse the entity that owns this key, iterating over all keys for + // the entity + ATSEntity atsEntity = getEntity(entity, entityType, startTime, + fields, iterator, key, kp.getOffset()); + if (atsEntity == null) + continue; + // determine if the retrieved entity matches the provided secondary + // filters, and if so add it to the list of entities to return + boolean filterPassed = true; + if (secondaryFilters != null) { + for (NameValuePair filter : secondaryFilters) { + Object v = atsEntity.getOtherInfo().get(filter.getName()); + if (v == null) + v = atsEntity.getPrimaryFilters().get(filter.getName()); + if (v == null || !v.equals(filter.getValue())) { + filterPassed = false; + break; + } + } + } + if (filterPassed) + atsEntities.addEntity(atsEntity); + } + return atsEntities; + } finally { + IOUtils.cleanup(LOG, iterator); + } + } + + /** + * Put a single entity. If there is an error, add a PutError to the given + * response. + */ + private void put(ATSEntity atsEntity, ATSPutErrors response) { + WriteBatch writeBatch = null; + try { + writeBatch = db.createWriteBatch(); + List<ATSEvent> events = atsEntity.getEvents(); + // look up the start time for the entity + byte[] revStartTime = getStartTime(atsEntity.getEntityId(), + atsEntity.getEntityType(), atsEntity.getStartTime(), events, + writeBatch); + if (revStartTime == null) { + // if no start time is found, add an error and return + ATSPutError error = new ATSPutError(); + error.setEntityId(atsEntity.getEntityId()); + error.setEntityType(atsEntity.getEntityType()); + error.setErrorCode(ATSPutError.NO_START_TIME); + response.addError(error); + return; + } + Long revStartTimeLong = readReverseOrderedLong(revStartTime, 0); + Map<String, Object> primaryFilters = atsEntity.getPrimaryFilters(); + + // write event entries + if (events != null && !events.isEmpty()) { + for (ATSEvent event : events) { + byte[] revts = writeReverseOrderedLong(event.getTimestamp()); + byte[] key = createEntityEventKey(atsEntity.getEntityId(), + atsEntity.getEntityType(), revStartTime, revts, + event.getEventType()); + byte[] value = GenericObjectMapper.write(event.getEventInfo()); + writeBatch.put(key, value); + writePrimaryFilterEntries(writeBatch, primaryFilters, key, value); + } + } + + // write related entity entries + Map<String,List<String>> relatedEntities = + atsEntity.getRelatedEntities(); + if (relatedEntities != null && !relatedEntities.isEmpty()) { + for (Entry<String, List<String>> relatedEntityList : + relatedEntities.entrySet()) { + String relatedEntityType = relatedEntityList.getKey(); + for (String relatedEntityId : relatedEntityList.getValue()) { + // look up start time of related entity + byte[] relatedEntityStartTime = getStartTime(relatedEntityId, + relatedEntityType, null, null, writeBatch); + if (relatedEntityStartTime == null) { + // if start time is not found, set start time of the related + // entity to the start time of this entity, and write it to the + // db and the cache + relatedEntityStartTime = revStartTime; + writeBatch.put(createStartTimeLookupKey(relatedEntityId, + relatedEntityType), relatedEntityStartTime); + startTimeCache.put(new EntityIdentifier(relatedEntityId, + relatedEntityType), revStartTimeLong); + } + // write reverse entry (related entity -> entity) + byte[] key = createReleatedEntityKey(relatedEntityId, + relatedEntityType, relatedEntityStartTime, + atsEntity.getEntityId(), atsEntity.getEntityType()); + writeBatch.put(key, EMPTY_BYTES); + // TODO: write forward entry (entity -> related entity)? + } + } + } + + // write primary filter entries + if (primaryFilters != null && !primaryFilters.isEmpty()) { + for (Entry<String, Object> primaryFilter : primaryFilters.entrySet()) { + byte[] key = createPrimaryFilterKey(atsEntity.getEntityId(), + atsEntity.getEntityType(), revStartTime, primaryFilter.getKey()); + byte[] value = GenericObjectMapper.write(primaryFilter.getValue()); + writeBatch.put(key, value); + writePrimaryFilterEntries(writeBatch, primaryFilters, key, value); + } + } + + // write other info entries + Map<String, Object> otherInfo = atsEntity.getOtherInfo(); + if (otherInfo != null && !otherInfo.isEmpty()) { + for (Entry<String, Object> i : otherInfo.entrySet()) { + byte[] key = createOtherInfoKey(atsEntity.getEntityId(), + atsEntity.getEntityType(), revStartTime, i.getKey()); + byte[] value = GenericObjectMapper.write(i.getValue()); + writeBatch.put(key, value); + writePrimaryFilterEntries(writeBatch, primaryFilters, key, value); + } + } + db.write(writeBatch); + } catch (IOException e) { + LOG.error("Error putting entity " + atsEntity.getEntityId() + + " of type " + atsEntity.getEntityType(), e); + ATSPutError error = new ATSPutError(); + error.setEntityId(atsEntity.getEntityId()); + error.setEntityType(atsEntity.getEntityType()); + error.setErrorCode(ATSPutError.IO_EXCEPTION); + response.addError(error); + } finally { + IOUtils.cleanup(LOG, writeBatch); + } + } + + /** + * For a given key / value pair that has been written to the db, + * write additional entries to the db for each primary filter. + */ + private static void writePrimaryFilterEntries(WriteBatch writeBatch, + Map<String, Object> primaryFilters, byte[] key, byte[] value) + throws IOException { + if (primaryFilters != null && !primaryFilters.isEmpty()) { + for (Entry<String, Object> p : primaryFilters.entrySet()) { + writeBatch.put(addPrimaryFilterToKey(p.getKey(), p.getValue(), + key), value); + } + } + } + + @Override + public ATSPutErrors put(ATSEntities atsEntities) { + ATSPutErrors response = new ATSPutErrors(); + for (ATSEntity atsEntity : atsEntities.getEntities()) { + put(atsEntity, response); + } + return response; + } + + /** + * Get the unique start time for a given entity as a byte array that sorts + * the timestamps in reverse order (see {@link + * GenericObjectMapper#writeReverseOrderedLong(long)}). + * + * @param entityId The id of the entity + * @param entityType The type of the entity + * @param startTime The start time of the entity, or null + * @param events A list of events for the entity, or null + * @param writeBatch A leveldb write batch, if the method is called by a + * put as opposed to a get + * @return A byte array + * @throws IOException + */ + private byte[] getStartTime(String entityId, String entityType, + Long startTime, List<ATSEvent> events, WriteBatch writeBatch) + throws IOException { + EntityIdentifier entity = new EntityIdentifier(entityId, entityType); + if (startTime == null) { + // start time is not provided, so try to look it up + if (startTimeCache.containsKey(entity)) { + // found the start time in the cache + startTime = startTimeCache.get(entity); + } else { + // try to look up the start time in the db + byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType()); + byte[] v = db.get(b); + if (v == null) { + // did not find the start time in the db + // if this is a put, try to set it from the provided events + if (events == null || writeBatch == null) { + // no events, or not a put, so return null + return null; + } + Long min = Long.MAX_VALUE; + for (ATSEvent e : events) + if (min > e.getTimestamp()) + min = e.getTimestamp(); + startTime = min; + // selected start time as minimum timestamp of provided events + // write start time to db and cache + writeBatch.put(b, writeReverseOrderedLong(startTime)); + startTimeCache.put(entity, startTime); + } else { + // found the start time in the db + startTime = readReverseOrderedLong(v, 0); + if (writeBatch != null) { + // if this is a put, re-add the start time to the cache + startTimeCache.put(entity, startTime); + } + } + } + } else { + // start time is provided + // TODO: verify start time in db as well as cache? + if (startTimeCache.containsKey(entity)) { + // if the start time is already in the cache, + // and it is different from the provided start time, + // use the one from the cache + if (!startTime.equals(startTimeCache.get(entity))) + startTime = startTimeCache.get(entity); + } else if (writeBatch != null) { + // if this is a put, write the provided start time to the db and the + // cache + byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType()); + writeBatch.put(b, writeReverseOrderedLong(startTime)); + startTimeCache.put(entity, startTime); + } + } + return writeReverseOrderedLong(startTime); + } + + /** + * Creates a key for looking up the start time of a given entity, + * of the form START_TIME_LOOKUP_PREFIX + entitytype + entity. + */ + private static byte[] createStartTimeLookupKey(String entity, + String entitytype) throws IOException { + return KeyBuilder.newInstance().add(START_TIME_LOOKUP_PREFIX) + .add(entitytype).add(entity).getBytes(); + } + + /** + * Creates an index entry for the given key of the form + * INDEXED_ENTRY_PREFIX + primaryfiltername + primaryfiltervalue + key. + */ + private static byte[] addPrimaryFilterToKey(String primaryFilterName, + Object primaryFilterValue, byte[] key) throws IOException { + return KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX) + .add(primaryFilterName) + .add(GenericObjectMapper.write(primaryFilterValue), true).add(key) + .getBytes(); + } + + /** + * Creates an event key, serializing ENTITY_ENTRY_PREFIX + entitytype + + * revstarttime + entity + TIME_COLUMN + reveventtimestamp + eventtype. + */ + private static byte[] createEntityEventKey(String entity, String entitytype, + byte[] revStartTime, byte[] reveventtimestamp, String eventtype) + throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX) + .add(entitytype).add(revStartTime).add(entity).add(TIME_COLUMN) + .add(reveventtimestamp).add(eventtype).getBytes(); + } + + /** + * Creates an event object from the given key, offset, and value. If the + * event type is not contained in the specified set of event types, + * returns null. + */ + private static ATSEvent getEntityEvent(Set<String> eventTypes, byte[] key, + int offset, byte[] value) throws IOException { + KeyParser kp = new KeyParser(key, offset); + long ts = kp.getNextLong(); + String tstype = kp.getNextString(); + if (eventTypes == null || eventTypes.contains(tstype)) { + ATSEvent event = new ATSEvent(); + event.setTimestamp(ts); + event.setEventType(tstype); + Object o = GenericObjectMapper.read(value); + if (o == null) { + event.setEventInfo(null); + } else if (o instanceof Map) { + @SuppressWarnings("unchecked") + Map<String, Object> m = (Map<String, Object>) o; + event.setEventInfo(m); + } else { + throw new IOException("Couldn't deserialize event info map"); + } + return event; + } + return null; + } + + /** + * Creates a primary filter key, serializing ENTITY_ENTRY_PREFIX + + * entitytype + revstarttime + entity + PRIMARY_FILTER_COLUMN + name. + */ + private static byte[] createPrimaryFilterKey(String entity, + String entitytype, byte[] revStartTime, String name) throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype) + .add(revStartTime).add(entity).add(PRIMARY_FILTER_COLUMN).add(name) + .getBytes(); + } + + /** + * Creates an other info key, serializing ENTITY_ENTRY_PREFIX + entitytype + + * revstarttime + entity + OTHER_INFO_COLUMN + name. + */ + private static byte[] createOtherInfoKey(String entity, String entitytype, + byte[] revStartTime, String name) throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype) + .add(revStartTime).add(entity).add(OTHER_INFO_COLUMN).add(name) + .getBytes(); + } + + /** + * Creates a string representation of the byte array from the given offset + * to the end of the array (for parsing other info keys). + */ + private static String parseRemainingKey(byte[] b, int offset) { + return new String(b, offset, b.length - offset); + } + + /** + * Creates a related entity key, serializing ENTITY_ENTRY_PREFIX + + * entitytype + revstarttime + entity + RELATED_COLUMN + relatedentitytype + + * relatedentity. + */ + private static byte[] createReleatedEntityKey(String entity, + String entitytype, byte[] revStartTime, String relatedEntity, + String relatedEntityType) throws IOException { + return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entitytype) + .add(revStartTime).add(entity).add(RELATED_COLUMN) + .add(relatedEntityType).add(relatedEntity).getBytes(); + } + + /** + * Parses the related entity from the given key at the given offset and + * adds it to the given entity. + */ + private static void addRelatedEntity(ATSEntity atsEntity, byte[] key, + int offset) throws IOException { + KeyParser kp = new KeyParser(key, offset); + String type = kp.getNextString(); + String id = kp.getNextString(); + atsEntity.addRelatedEntity(type, id); + } + + /** + * Clears the cache to test reloading start times from leveldb (only for + * testing). + */ + @VisibleForTesting + void clearStartTimeCache() { + startTimeCache.clear(); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java index 45f0a11d764d0..1c8e392cfe289 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/MemoryApplicationTimelineStore.java @@ -53,8 +53,8 @@ public class MemoryApplicationTimelineStore extends AbstractService implements ApplicationTimelineStore { - private Map<EntityId, ATSEntity> entities = - new HashMap<EntityId, ATSEntity>(); + private Map<EntityIdentifier, ATSEntity> entities = + new HashMap<EntityIdentifier, ATSEntity>(); public MemoryApplicationTimelineStore() { super(MemoryApplicationTimelineStore.class.getName()); @@ -125,7 +125,7 @@ public ATSEntity getEntity(String entityId, String entityType, if (fieldsToRetrieve == null) { fieldsToRetrieve = EnumSet.allOf(Field.class); } - ATSEntity entity = entities.get(new EntityId(entityId, entityType)); + ATSEntity entity = entities.get(new EntityIdentifier(entityId, entityType)); if (entity == null) { return null; } else { @@ -152,7 +152,7 @@ public ATSEvents getEntityTimelines(String entityType, windowEnd = Long.MAX_VALUE; } for (String entityId : entityIds) { - EntityId entityID = new EntityId(entityId, entityType); + EntityIdentifier entityID = new EntityIdentifier(entityId, entityType); ATSEntity entity = entities.get(entityID); if (entity == null) { continue; @@ -184,8 +184,8 @@ public ATSEvents getEntityTimelines(String entityType, public ATSPutErrors put(ATSEntities data) { ATSPutErrors errors = new ATSPutErrors(); for (ATSEntity entity : data.getEntities()) { - EntityId entityId = - new EntityId(entity.getEntityId(), entity.getEntityType()); + EntityIdentifier entityId = + new EntityIdentifier(entity.getEntityId(), entity.getEntityType()); // store entity info in memory ATSEntity existingEntity = entities.get(entityId); if (existingEntity == null) { @@ -210,7 +210,7 @@ public ATSPutErrors put(ATSEntities data) { ATSPutError error = new ATSPutError(); error.setEntityId(entityId.getId()); error.setEntityType(entityId.getType()); - error.setErrorCode(1); + error.setErrorCode(ATSPutError.NO_START_TIME); errors.addError(error); entities.remove(entityId); continue; @@ -242,12 +242,20 @@ public ATSPutErrors put(ATSEntities data) { continue; } for (String idStr : partRelatedEntities.getValue()) { - EntityId relatedEntityId = - new EntityId(idStr, partRelatedEntities.getKey()); + EntityIdentifier relatedEntityId = + new EntityIdentifier(idStr, partRelatedEntities.getKey()); ATSEntity relatedEntity = entities.get(relatedEntityId); if (relatedEntity != null) { relatedEntity.addRelatedEntity( existingEntity.getEntityType(), existingEntity.getEntityId()); + } else { + relatedEntity = new ATSEntity(); + relatedEntity.setEntityId(relatedEntityId.getId()); + relatedEntity.setEntityType(relatedEntityId.getType()); + relatedEntity.setStartTime(existingEntity.getStartTime()); + relatedEntity.addRelatedEntity(existingEntity.getEntityType(), + existingEntity.getEntityId()); + entities.put(relatedEntityId, relatedEntity); } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ATSWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ATSWebServices.java index 4ea501d89a845..063b67afd07fa 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ATSWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ATSWebServices.java @@ -18,6 +18,7 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp; +import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.EnumSet; @@ -45,6 +46,8 @@ import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; @@ -64,6 +67,8 @@ //TODO: support XML serialization/deserialization public class ATSWebServices { + private static final Log LOG = LogFactory.getLog(ATSWebServices.class); + private ApplicationTimelineStore store; @Inject @@ -143,6 +148,10 @@ public ATSEntities getEntities( "windowStart, windowEnd or limit is not a numeric value."); } catch (IllegalArgumentException e) { throw new BadRequestException("requested invalid field."); + } catch (IOException e) { + LOG.error("Error getting entities", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); } if (entities == null) { return new ATSEntities(); @@ -171,6 +180,10 @@ public ATSEntity getEntity( } catch (IllegalArgumentException e) { throw new BadRequestException( "requested invalid field."); + } catch (IOException e) { + LOG.error("Error getting entity", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); } if (entity == null) { throw new WebApplicationException(Response.Status.NOT_FOUND); @@ -206,6 +219,10 @@ public ATSEvents getEvents( } catch (NumberFormatException e) { throw new BadRequestException( "windowStart, windowEnd or limit is not a numeric value."); + } catch (IOException e) { + LOG.error("Error getting entity timelines", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); } if (events == null) { return new ATSEvents(); @@ -228,7 +245,13 @@ public ATSPutErrors postEntities( if (entities == null) { return new ATSPutErrors(); } - return store.put(entities); + try { + return store.put(entities); + } catch (IOException e) { + LOG.error("Error putting entities", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); + } } private void init(HttpServletResponse response) { @@ -275,7 +298,17 @@ private static EnumSet<Field> parseFieldsStr(String str, String delimiter) { String[] strs = str.split(delimiter); List<Field> fieldList = new ArrayList<Field>(); for (String s : strs) { - fieldList.add(Field.valueOf(s.toUpperCase())); + s = s.trim().toUpperCase(); + if (s.equals("EVENTS")) + fieldList.add(Field.EVENTS); + else if (s.equals("LASTEVENTONLY")) + fieldList.add(Field.LAST_EVENT_ONLY); + else if (s.equals("RELATEDENTITIES")) + fieldList.add(Field.RELATED_ENTITIES); + else if (s.equals("PRIMARYFILTERS")) + fieldList.add(Field.PRIMARY_FILTERS); + else if (s.equals("OTHERINFO")) + fieldList.add(Field.OTHER_INFO); } if (fieldList.size() == 0) return null; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java index 5825af192b8c2..9afa5c0234a07 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/ApplicationTimelineStoreTestUtils.java @@ -21,6 +21,8 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import java.io.File; +import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -71,7 +73,7 @@ public class ApplicationTimelineStoreTestUtils { /** * Load test data into the given store */ - protected void loadTestData() { + protected void loadTestData() throws IOException { ATSEntities atsEntities = new ATSEntities(); Map<String, Object> primaryFilters = new HashMap<String, Object>(); primaryFilters.put("user", "username"); @@ -126,7 +128,7 @@ protected void loadTestData() { response = store.put(atsEntities); assertEquals(0, response.getErrors().size()); atsEntities.setEntities(Collections.singletonList(createEntity(entity1b, - entityType1, 123l, Collections.singletonList(ev2), null, + entityType1, 789l, Collections.singletonList(ev2), null, primaryFilters, otherInfo2))); response = store.put(atsEntities); assertEquals(0, response.getErrors().size()); @@ -138,11 +140,11 @@ protected void loadTestData() { ATSPutError error = response.getErrors().get(0); assertEquals("badentityid", error.getEntityId()); assertEquals("badentity", error.getEntityType()); - assertEquals((Integer) 1, error.getErrorCode()); + assertEquals(ATSPutError.NO_START_TIME, error.getErrorCode()); } /** - * Load veification data + * Load verification data */ protected void loadVerificationData() throws Exception { userFilter = new NameValuePair("user", @@ -197,7 +199,7 @@ protected void loadVerificationData() throws Exception { events2.add(ev4); } - public void testGetSingleEntity() { + public void testGetSingleEntity() throws IOException { // test getting entity info verifyEntityInfo(null, null, null, null, null, null, store.getEntity("id_1", "type_2", EnumSet.allOf(Field.class))); @@ -222,6 +224,10 @@ public void testGetSingleEntity() { null, null, null, store.getEntity(entity1, entityType1, EnumSet.of(Field.LAST_EVENT_ONLY))); + verifyEntityInfo(entity1b, entityType1, events1, EMPTY_REL_ENTITIES, + primaryFilters, otherInfo, store.getEntity(entity1b, entityType1, + null)); + verifyEntityInfo(entity1, entityType1, null, null, primaryFilters, null, store.getEntity(entity1, entityType1, EnumSet.of(Field.PRIMARY_FILTERS))); @@ -234,7 +240,7 @@ public void testGetSingleEntity() { EnumSet.of(Field.RELATED_ENTITIES))); } - public void testGetEntities() { + public void testGetEntities() throws IOException { // test getting entities assertEquals("nonzero entities size for nonexistent type", 0, store.getEntities("type_0", null, null, null, null, null, @@ -305,7 +311,7 @@ public void testGetEntities() { primaryFilters, otherInfo, entities.get(1)); } - public void testGetEntitiesWithPrimaryFilters() { + public void testGetEntitiesWithPrimaryFilters() throws IOException { // test using primary filter assertEquals("nonzero entities size for primary filter", 0, store.getEntities("type_1", null, null, null, @@ -361,7 +367,7 @@ public void testGetEntitiesWithPrimaryFilters() { primaryFilters, otherInfo, entities.get(1)); } - public void testGetEntitiesWithSecondaryFilters() { + public void testGetEntitiesWithSecondaryFilters() throws IOException { // test using secondary filter List<ATSEntity> entities = store.getEntities("type_1", null, null, null, null, goodTestingFilters, EnumSet.allOf(Field.class)).getEntities(); @@ -388,7 +394,7 @@ public void testGetEntitiesWithSecondaryFilters() { assertEquals(0, entities.size()); } - public void testGetEvents() { + public void testGetEvents() throws IOException { // test getting entity timelines SortedSet<String> sortedSet = new TreeSet<String>(); sortedSet.add(entity1); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestGenericObjectMapper.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestGenericObjectMapper.java new file mode 100644 index 0000000000000..4bb453a41be4d --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestGenericObjectMapper.java @@ -0,0 +1,89 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.io.WritableComparator; +import org.junit.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.junit.Assert.assertEquals; + [email protected] [email protected] +public class TestGenericObjectMapper { + + @Test + public void testEncoding() { + testEncoding(Long.MAX_VALUE); + testEncoding(Long.MIN_VALUE); + testEncoding(0l); + testEncoding(128l); + testEncoding(256l); + testEncoding(512l); + testEncoding(-256l); + } + + private static void testEncoding(long l) { + byte[] b = GenericObjectMapper.writeReverseOrderedLong(l); + assertEquals("error decoding", l, + GenericObjectMapper.readReverseOrderedLong(b, 0)); + byte[] buf = new byte[16]; + System.arraycopy(b, 0, buf, 5, 8); + assertEquals("error decoding at offset", l, + GenericObjectMapper.readReverseOrderedLong(buf, 5)); + if (l > Long.MIN_VALUE) { + byte[] a = GenericObjectMapper.writeReverseOrderedLong(l-1); + assertEquals("error preserving ordering", 1, + WritableComparator.compareBytes(a, 0, a.length, b, 0, b.length)); + } + if (l < Long.MAX_VALUE) { + byte[] c = GenericObjectMapper.writeReverseOrderedLong(l+1); + assertEquals("error preserving ordering", 1, + WritableComparator.compareBytes(b, 0, b.length, c, 0, c.length)); + } + } + + private static void verify(Object o) throws IOException { + assertEquals(o, GenericObjectMapper.read(GenericObjectMapper.write(o))); + } + + @Test + public void testValueTypes() throws IOException { + verify(42l); + verify(42); + verify(1.23); + verify("abc"); + verify(true); + List<String> list = new ArrayList<String>(); + list.add("123"); + list.add("abc"); + verify(list); + Map<String,String> map = new HashMap<String,String>(); + map.put("k1","v1"); + map.put("k2","v2"); + verify(map); + } + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestLeveldbApplicationTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestLeveldbApplicationTimelineStore.java new file mode 100644 index 0000000000000..b868049c4fbaa --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestLeveldbApplicationTimelineStore.java @@ -0,0 +1,95 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.yarn.server.applicationhistoryservice.apptimeline; + +import java.io.File; +import java.io.IOException; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileContext; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntities; +import org.apache.hadoop.yarn.api.records.apptimeline.ATSEntity; +import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors; +import org.apache.hadoop.yarn.api.records.apptimeline.ATSPutErrors.ATSPutError; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + [email protected] [email protected] +public class TestLeveldbApplicationTimelineStore + extends ApplicationTimelineStoreTestUtils { + private FileContext fsContext; + private File fsPath; + + @Before + public void setup() throws Exception { + fsContext = FileContext.getLocalFSFileContext(); + Configuration conf = new Configuration(); + fsPath = new File("target", this.getClass().getSimpleName() + + "-tmpDir").getAbsoluteFile(); + fsContext.delete(new Path(fsPath.getAbsolutePath()), true); + conf.set(YarnConfiguration.ATS_LEVELDB_PATH_PROPERTY, + fsPath.getAbsolutePath()); + store = new LeveldbApplicationTimelineStore(); + store.init(conf); + store.start(); + loadTestData(); + loadVerificationData(); + } + + @After + public void tearDown() throws Exception { + store.stop(); + fsContext.delete(new Path(fsPath.getAbsolutePath()), true); + } + + @Test + public void testGetSingleEntity() throws IOException { + super.testGetSingleEntity(); + ((LeveldbApplicationTimelineStore)store).clearStartTimeCache(); + super.testGetSingleEntity(); + } + + @Test + public void testGetEntities() throws IOException { + super.testGetEntities(); + } + + @Test + public void testGetEntitiesWithPrimaryFilters() throws IOException { + super.testGetEntitiesWithPrimaryFilters(); + } + + @Test + public void testGetEntitiesWithSecondaryFilters() throws IOException { + super.testGetEntitiesWithSecondaryFilters(); + } + + @Test + public void testGetEvents() throws IOException { + super.testGetEvents(); + } + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestMemoryApplicationTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestMemoryApplicationTimelineStore.java index aa88b74a90100..07a3955bf67ce 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestMemoryApplicationTimelineStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/apptimeline/TestMemoryApplicationTimelineStore.java @@ -23,6 +23,7 @@ import org.junit.Before; import org.junit.Test; +import java.io.IOException; public class TestMemoryApplicationTimelineStore extends ApplicationTimelineStoreTestUtils { @@ -46,27 +47,27 @@ public ApplicationTimelineStore getApplicationTimelineStore() { } @Test - public void testGetSingleEntity() { + public void testGetSingleEntity() throws IOException { super.testGetSingleEntity(); } @Test - public void testGetEntities() { + public void testGetEntities() throws IOException { super.testGetEntities(); } @Test - public void testGetEntitiesWithPrimaryFilters() { + public void testGetEntitiesWithPrimaryFilters() throws IOException { super.testGetEntitiesWithPrimaryFilters(); } @Test - public void testGetEntitiesWithSecondaryFilters() { + public void testGetEntitiesWithSecondaryFilters() throws IOException { super.testGetEntitiesWithSecondaryFilters(); } @Test - public void testGetEvents() { + public void testGetEvents() throws IOException { super.testGetEvents(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestATSWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestATSWebServices.java index 1ff73ff35a22a..58a826c9ac033 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestATSWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestATSWebServices.java @@ -156,6 +156,43 @@ public void testGetEntity() throws Exception { Assert.assertEquals(4, entity.getOtherInfo().size()); } + @Test + public void testGetEntityFields1() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("apptimeline") + .path("type_1").path("id_1").queryParam("fields", "events,otherinfo") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + ATSEntity entity = response.getEntity(ATSEntity.class); + Assert.assertNotNull(entity); + Assert.assertEquals("id_1", entity.getEntityId()); + Assert.assertEquals("type_1", entity.getEntityType()); + Assert.assertEquals(123l, entity.getStartTime().longValue()); + Assert.assertEquals(2, entity.getEvents().size()); + Assert.assertEquals(0, entity.getPrimaryFilters().size()); + Assert.assertEquals(4, entity.getOtherInfo().size()); + } + + @Test + public void testGetEntityFields2() throws Exception { + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("apptimeline") + .path("type_1").path("id_1").queryParam("fields", "lasteventonly," + + "primaryfilters,relatedentities") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + ATSEntity entity = response.getEntity(ATSEntity.class); + Assert.assertNotNull(entity); + Assert.assertEquals("id_1", entity.getEntityId()); + Assert.assertEquals("type_1", entity.getEntityType()); + Assert.assertEquals(123l, entity.getStartTime().longValue()); + Assert.assertEquals(1, entity.getEvents().size()); + Assert.assertEquals(2, entity.getPrimaryFilters().size()); + Assert.assertEquals(0, entity.getOtherInfo().size()); + } + @Test public void testGetEvents() throws Exception { WebResource r = resource();
6b3cef06538b2ba3ad1a67b8f0a67473b5596812
restlet-framework-java
- Cleaned code to remove Eclipse 3.4 warnings and- errors.--
p
https://github.com/restlet/restlet-framework-java
diff --git a/modules/com.noelios.restlet.ext.javamail_1.4/src/com/noelios/restlet/ext/javamail/TriggerResource.java b/modules/com.noelios.restlet.ext.javamail_1.4/src/com/noelios/restlet/ext/javamail/TriggerResource.java index 9e1341b7b5..a0ce6e6c87 100644 --- a/modules/com.noelios.restlet.ext.javamail_1.4/src/com/noelios/restlet/ext/javamail/TriggerResource.java +++ b/modules/com.noelios.restlet.ext.javamail_1.4/src/com/noelios/restlet/ext/javamail/TriggerResource.java @@ -340,7 +340,6 @@ protected List<String> getMailIdentifiers() throws ResourceException { * @return The URI of the mail. * @throws ResourceException */ - @SuppressWarnings("unused") protected Reference getMailRef(String identifier) throws ResourceException { Template mailTemplate = new Template(getMailUriTemplate()); Reference result = new Reference(mailTemplate.format(new MailResolver( @@ -435,7 +434,6 @@ protected Status getResponseStatus(List<String> mailsSuccessful, * @return The target challengeResponse object. * @throws ResourceException */ - @SuppressWarnings("unused") protected ChallengeResponse getTargetChallengeResponse( Resolver<String> resolver) throws ResourceException { ChallengeScheme challengeScheme = ChallengeScheme.valueOf(resolver @@ -497,7 +495,6 @@ protected Method getTargetMethod(Resolver<String> resolver) { * @return The target reference. * @throws ResourceException */ - @SuppressWarnings("unused") protected Reference getTargetRef(Resolver<String> resolver) throws ResourceException { Template targetTemplate = new Template(getTargetUri()); diff --git a/modules/com.noelios.restlet.ext.servlet_2.5/src/com/noelios/restlet/ext/servlet/ServerServlet.java b/modules/com.noelios.restlet.ext.servlet_2.5/src/com/noelios/restlet/ext/servlet/ServerServlet.java index 2595846004..4558ccf553 100644 --- a/modules/com.noelios.restlet.ext.servlet_2.5/src/com/noelios/restlet/ext/servlet/ServerServlet.java +++ b/modules/com.noelios.restlet.ext.servlet_2.5/src/com/noelios/restlet/ext/servlet/ServerServlet.java @@ -381,7 +381,7 @@ protected Component createComponent() { // Define the list of supported client protocols. String clientProtocolsString = getInitParameter(CLIENTS_KEY, null); - if (component != null && clientProtocolsString != null) { + if (clientProtocolsString != null) { String[] clientProtocols = clientProtocolsString.split(" "); for (String clientProtocol : clientProtocols) { component.getClients().add(Protocol.valueOf(clientProtocol)); @@ -439,6 +439,7 @@ public void destroy() { * * @return The application. */ + @SuppressWarnings("null") public Application getApplication() { Application result = this.application; @@ -485,6 +486,7 @@ protected Class<?> getClass(String className) throws ClassNotFoundException { * * @return The component. */ + @SuppressWarnings("null") public Component getComponent() { Component result = this.component; @@ -547,6 +549,7 @@ public String getInitParameter(String name, String defaultValue) { * The HTTP Servlet request. * @return The HTTP server handling calls. */ + @SuppressWarnings("null") public HttpServerHelper getServer(HttpServletRequest request) { HttpServerHelper result = this.helper; diff --git a/modules/com.noelios.restlet.ext.xdb_11.1/src/com/noelios/restlet/ext/xdb/XdbServletConverter.java b/modules/com.noelios.restlet.ext.xdb_11.1/src/com/noelios/restlet/ext/xdb/XdbServletConverter.java index 52cca2326f..230341a2ed 100644 --- a/modules/com.noelios.restlet.ext.xdb_11.1/src/com/noelios/restlet/ext/xdb/XdbServletConverter.java +++ b/modules/com.noelios.restlet.ext.xdb_11.1/src/com/noelios/restlet/ext/xdb/XdbServletConverter.java @@ -117,7 +117,6 @@ public XdbServletConverter(ServletContext context, Restlet target) { CallableStatement preparedstatement = null; try { conn = XdbServerServlet.getConnection(); - @SuppressWarnings("unused") int endPoint = 1; preparedstatement = conn .prepareCall("{ call dbms_xdb.getListenerEndPoint(1,?,?,?) }"); @@ -152,7 +151,6 @@ public XdbServletConverter(ServletContext context, Restlet target) { * @throws ServletException * @throws IOException */ - @SuppressWarnings("unused") public void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if (getTarget() != null) { diff --git a/modules/com.noelios.restlet.ext.xdb_11.1/src/com/noelios/restlet/ext/xdb/XdbServletWarClientHelper.java b/modules/com.noelios.restlet.ext.xdb_11.1/src/com/noelios/restlet/ext/xdb/XdbServletWarClientHelper.java index 9ab45a63d8..3a13e1f642 100644 --- a/modules/com.noelios.restlet.ext.xdb_11.1/src/com/noelios/restlet/ext/xdb/XdbServletWarClientHelper.java +++ b/modules/com.noelios.restlet.ext.xdb_11.1/src/com/noelios/restlet/ext/xdb/XdbServletWarClientHelper.java @@ -104,7 +104,6 @@ public ServletConfig getConfig() { return this.config; } - @SuppressWarnings("unchecked") @Override public void handle(Request request, Response response) { PreparedStatement stmt = null; diff --git a/modules/com.noelios.restlet.test/src/com/noelios/restlet/test/ChunkedEncodingPutTestCase.java b/modules/com.noelios.restlet.test/src/com/noelios/restlet/test/ChunkedEncodingPutTestCase.java index f4690ec515..19375ca3e0 100644 --- a/modules/com.noelios.restlet.test/src/com/noelios/restlet/test/ChunkedEncodingPutTestCase.java +++ b/modules/com.noelios.restlet.test/src/com/noelios/restlet/test/ChunkedEncodingPutTestCase.java @@ -54,7 +54,6 @@ public PutTestResource(Context ctx, Request request, Response response) { } @Override - @SuppressWarnings("unchecked") public void storeRepresentation(Representation entity) { getResponse().setEntity(entity); } diff --git a/modules/com.noelios.restlet.test/src/com/noelios/restlet/test/ChunkedEncodingTestCase.java b/modules/com.noelios.restlet.test/src/com/noelios/restlet/test/ChunkedEncodingTestCase.java index 97efe41682..7ab9f1f5bf 100644 --- a/modules/com.noelios.restlet.test/src/com/noelios/restlet/test/ChunkedEncodingTestCase.java +++ b/modules/com.noelios.restlet.test/src/com/noelios/restlet/test/ChunkedEncodingTestCase.java @@ -75,7 +75,6 @@ public Representation represent(Variant variant) { } @Override - @SuppressWarnings("unchecked") public void storeRepresentation(Representation entity) { checkForChunkedHeader(getRequest()); diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/authentication/AuthenticationHelper.java b/modules/com.noelios.restlet/src/com/noelios/restlet/authentication/AuthenticationHelper.java index 154bf33e3a..98eb09340b 100644 --- a/modules/com.noelios.restlet/src/com/noelios/restlet/authentication/AuthenticationHelper.java +++ b/modules/com.noelios.restlet/src/com/noelios/restlet/authentication/AuthenticationHelper.java @@ -141,7 +141,6 @@ public String format(ChallengeRequest request) { * The current request HTTP headers. * @return The authorization header value. */ - @SuppressWarnings("deprecation") public String format(ChallengeResponse challenge, Request request, Series<Parameter> httpHeaders) { StringBuilder sb = new StringBuilder(); diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/authentication/AuthenticationUtils.java b/modules/com.noelios.restlet/src/com/noelios/restlet/authentication/AuthenticationUtils.java index 250c53a011..2c87ff5c1b 100644 --- a/modules/com.noelios.restlet/src/com/noelios/restlet/authentication/AuthenticationUtils.java +++ b/modules/com.noelios.restlet/src/com/noelios/restlet/authentication/AuthenticationUtils.java @@ -175,7 +175,6 @@ public static String format(ChallengeRequest request) { * The current request HTTP headers. * @return The authorization header value. */ - @SuppressWarnings("deprecation") public static String format(ChallengeResponse challenge, Request request, Series<Parameter> httpHeaders) { String result = null; diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpCall.java b/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpCall.java index 7d7535a5cb..6d33821d71 100644 --- a/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpCall.java +++ b/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpCall.java @@ -315,7 +315,6 @@ public int getServerPort() { * @return The status code. * @throws IOException */ - @SuppressWarnings("unused") public int getStatusCode() throws IOException { return this.statusCode; } diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpServerCall.java b/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpServerCall.java index f821adabca..11b484f455 100644 --- a/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpServerCall.java +++ b/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpServerCall.java @@ -554,7 +554,6 @@ public void writeResponseBody(Representation entity, * The response. * @throws IOException */ - @SuppressWarnings("unused") public void writeResponseHead(Response response) throws IOException { // Do nothing by default } diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpServerConverter.java b/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpServerConverter.java index 3fa08f0f17..38abdf2eb7 100644 --- a/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpServerConverter.java +++ b/modules/com.noelios.restlet/src/com/noelios/restlet/http/HttpServerConverter.java @@ -213,7 +213,6 @@ public HttpServerConverter(Context context) { * @param response * The response returned. */ - @SuppressWarnings("unchecked") protected void addEntityHeaders(HttpResponse response) { Series<Parameter> responseHeaders = response.getHttpCall() .getResponseHeaders(); diff --git a/modules/com.noelios.restlet/src/com/noelios/restlet/http/StreamServerHelper.java b/modules/com.noelios.restlet/src/com/noelios/restlet/http/StreamServerHelper.java index b823c7c39f..b834576df4 100644 --- a/modules/com.noelios.restlet/src/com/noelios/restlet/http/StreamServerHelper.java +++ b/modules/com.noelios.restlet/src/com/noelios/restlet/http/StreamServerHelper.java @@ -156,7 +156,6 @@ protected ServerSocketChannel createServerSocket() throws IOException { * @return The created socket address. * @throws IOException */ - @SuppressWarnings("unused") protected SocketAddress createSocketAddress() throws IOException { if (getHelped().getAddress() == null) { return new InetSocketAddress(getHelped().getPort()); diff --git a/modules/org.restlet.ext.jaxb_2.1/src/org/restlet/ext/jaxb/JaxbRepresentation.java b/modules/org.restlet.ext.jaxb_2.1/src/org/restlet/ext/jaxb/JaxbRepresentation.java index 1a62e3ce2e..6aa803e2a1 100644 --- a/modules/org.restlet.ext.jaxb_2.1/src/org/restlet/ext/jaxb/JaxbRepresentation.java +++ b/modules/org.restlet.ext.jaxb_2.1/src/org/restlet/ext/jaxb/JaxbRepresentation.java @@ -360,7 +360,6 @@ public JaxbRepresentation(MediaType mediaType, T object) { * @throws IOException * If unmarshalling XML fails. */ - @SuppressWarnings("unchecked") public JaxbRepresentation(Representation xmlRepresentation, Class<T> type) { this(xmlRepresentation, type.getPackage().getName(), null); } @@ -381,7 +380,6 @@ public JaxbRepresentation(Representation xmlRepresentation, Class<T> type) { * @throws IOException * If unmarshalling XML fails. */ - @SuppressWarnings("unchecked") public JaxbRepresentation(Representation xmlRepresentation, Class<T> type, ValidationEventHandler validationHandler) { this(xmlRepresentation, type.getPackage().getName(), validationHandler); @@ -401,7 +399,6 @@ public JaxbRepresentation(Representation xmlRepresentation, Class<T> type, * @throws IOException * If unmarshalling XML fails. */ - @SuppressWarnings("unchecked") public JaxbRepresentation(Representation xmlRepresentation, String contextPath) { this(xmlRepresentation, contextPath, null); @@ -423,7 +420,6 @@ public JaxbRepresentation(Representation xmlRepresentation, * @throws IOException * If unmarshalling XML fails. */ - @SuppressWarnings("unchecked") public JaxbRepresentation(Representation xmlRepresentation, String contextPath, ValidationEventHandler validationHandler) { super(xmlRepresentation.getMediaType()); diff --git a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/JaxRsRestlet.java b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/JaxRsRestlet.java index a1e5212ddf..2d82f16ef2 100644 --- a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/JaxRsRestlet.java +++ b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/JaxRsRestlet.java @@ -173,7 +173,6 @@ public class JaxRsRestlet extends Restlet { /** * Contains the thread localized {@link CallContext}s. */ - @SuppressWarnings("unchecked") private final ThreadLocalizedContext tlContext = new ThreadLocalizedContext(); private volatile ObjectFactory objectFactory; @@ -420,7 +419,6 @@ private boolean addProvider(Class<?> jaxRsProviderClass, } @Override - @SuppressWarnings("unchecked") public void start() throws Exception { for (Provider<?> provider : allProviders) provider.init(tlContext, entityProviders, contextResolvers, diff --git a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/CallContext.java b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/CallContext.java index 59301678a7..6e7263abdf 100644 --- a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/CallContext.java +++ b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/CallContext.java @@ -263,10 +263,7 @@ public CallContext(Request request, org.restlet.data.Response response, this.readOnly = false; this.request = request; this.response = response; - if (roleChecker != null) - this.roleChecker = roleChecker; - else - this.roleChecker = RoleChecker.REJECT_WITH_ERROR; + this.roleChecker = roleChecker; this.accMediaTypes = SortedMetadata.getForMediaTypes(request .getClientInfo().getAcceptedMediaTypes()); } diff --git a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/JaxRsUriBuilder.java b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/JaxRsUriBuilder.java index 1d88582500..86e0494847 100644 --- a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/JaxRsUriBuilder.java +++ b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/JaxRsUriBuilder.java @@ -203,7 +203,6 @@ public URI build() throws UriBuilderException { * @see javax.ws.rs.core.UriBuilder#build(java.util.Map) */ @Override - @SuppressWarnings("unchecked") public URI build(final Map<String, Object> values) throws IllegalArgumentException, UriBuilderException { Template template = new Template(toStringWithCheck(false)); diff --git a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/ThreadLocalizedContext.java b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/ThreadLocalizedContext.java index 17ba2024b6..c7ec6df71b 100644 --- a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/ThreadLocalizedContext.java +++ b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/ThreadLocalizedContext.java @@ -140,7 +140,6 @@ public List<String> getAcceptableLanguages() { * @see CallContext#getAcceptableMediaTypes() * @see HttpHeaders#getAcceptableMediaTypes() */ - @SuppressWarnings("deprecation") public List<MediaType> getAcceptableMediaTypes() { return get().getAcceptableMediaTypes(); } diff --git a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/UnmodifiableMultivaluedMap.java b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/UnmodifiableMultivaluedMap.java index 56adbb37c7..1ef0290922 100644 --- a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/UnmodifiableMultivaluedMap.java +++ b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/core/UnmodifiableMultivaluedMap.java @@ -69,13 +69,11 @@ private UnmodifiableMultivaluedMap(MultivaluedMapImpl<K, V> mmap, } @Deprecated - @SuppressWarnings("unused") public void add(K key, V value) { throw throwUnmodifiable(); } @Deprecated - @SuppressWarnings("unused") public void clear() throws UnsupportedOperationException { throw throwUnmodifiable(); } @@ -158,27 +156,23 @@ public Set<K> keySet() { } @Deprecated - @SuppressWarnings("unused") public List<V> put(K key, List<V> value) throws UnsupportedOperationException { throw throwUnmodifiable(); } @Deprecated - @SuppressWarnings("unused") public void putAll(Map<? extends K, ? extends List<V>> t) throws UnsupportedOperationException { throw throwUnmodifiable(); } @Deprecated - @SuppressWarnings("unused") public void putSingle(K key, V value) throws UnsupportedOperationException { throw throwUnmodifiable(); } @Deprecated - @SuppressWarnings("unused") public List<V> remove(Object key) throws UnsupportedOperationException { throw throwUnmodifiable(); } diff --git a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/util/EncodeOrCheck.java b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/util/EncodeOrCheck.java index 79a4b0d3b6..7970765291 100644 --- a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/util/EncodeOrCheck.java +++ b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/util/EncodeOrCheck.java @@ -271,7 +271,6 @@ else if (c == '%') { * @throws IllegalArgumentException * if encode is false and at least one character is invalid. */ - @SuppressWarnings("unused") public static CharSequence fullMatrix(CharSequence matrix, boolean encode) throws IllegalArgumentException { return fullQueryOrMatrix(matrix, ';', "%20", encode); @@ -286,7 +285,6 @@ public static CharSequence fullMatrix(CharSequence matrix, boolean encode) * @param encode * @return */ - @SuppressWarnings("unused") public static CharSequence fullQuery(CharSequence query, boolean encode) { return fullQueryOrMatrix(query, '&', "+", encode); } diff --git a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/wrappers/ResourceClass.java b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/wrappers/ResourceClass.java index 4ce9227199..4b030e49fe 100644 --- a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/wrappers/ResourceClass.java +++ b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/wrappers/ResourceClass.java @@ -150,7 +150,6 @@ protected ResourceClass(Class<?> jaxRsClass, ThreadLocalizedContext tlContext, EntityProviders entityProviders, Collection<ContextResolver<?>> allCtxResolvers, ExtensionBackwardMapping extensionBackwardMapping, Logger logger, - @SuppressWarnings("unused") Logger sameLogger) throws IllegalArgumentException, IllegalPathOnClassException, MissingAnnotationException { super(PathRegExp.createForClass(jaxRsClass)); diff --git a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/wrappers/provider/ContextResolverCollection.java b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/wrappers/provider/ContextResolverCollection.java index 3241a1ea8a..41a48969cf 100644 --- a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/wrappers/provider/ContextResolverCollection.java +++ b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/wrappers/provider/ContextResolverCollection.java @@ -40,7 +40,6 @@ public ContextResolverCollection(Collection<ContextResolver<?>> resolvers) { /** * @see javax.ws.rs.ext.ContextResolver#getContext(java.lang.Class) */ - @SuppressWarnings("unchecked") public Object getContext(Class<?> type) { for (ContextResolver<?> cr : resolvers) { Object context = cr.getContext(type); diff --git a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyReader.java b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyReader.java index 5c5ba232af..3182189dde 100644 --- a/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyReader.java +++ b/modules/org.restlet.ext.jaxrs_0.8/src/org/restlet/ext/jaxrs/internal/wrappers/provider/MessageBodyReader.java @@ -28,7 +28,6 @@ * @param <T> * the java type to convert to. */ -@SuppressWarnings("unchecked") public interface MessageBodyReader<T> extends javax.ws.rs.ext.MessageBodyReader<T> { /** diff --git a/modules/org.restlet.ext.shell/src/org/restlet/ext/shell/connectors/Shell.java b/modules/org.restlet.ext.shell/src/org/restlet/ext/shell/connectors/Shell.java index 10f38387bf..5d3a777f0e 100644 --- a/modules/org.restlet.ext.shell/src/org/restlet/ext/shell/connectors/Shell.java +++ b/modules/org.restlet.ext.shell/src/org/restlet/ext/shell/connectors/Shell.java @@ -28,7 +28,7 @@ public Shell(ScriptEngine aScriptEngine, String aPrompt) { public void loop() { for (;;) { // update completor - console.setCandidates(new TreeSet(scriptEngine.getBindings(ScriptContext.ENGINE_SCOPE).keySet())); + console.setCandidates(new TreeSet<String>(scriptEngine.getBindings(ScriptContext.ENGINE_SCOPE).keySet())); String line = console.readLine(prompt); diff --git a/modules/org.restlet.ext.shell/src/org/restlet/ext/shell/helpers/ConsoleHelper.java b/modules/org.restlet.ext.shell/src/org/restlet/ext/shell/helpers/ConsoleHelper.java index 86bd941be3..b9930298d6 100644 --- a/modules/org.restlet.ext.shell/src/org/restlet/ext/shell/helpers/ConsoleHelper.java +++ b/modules/org.restlet.ext.shell/src/org/restlet/ext/shell/helpers/ConsoleHelper.java @@ -9,13 +9,18 @@ public class ConsoleHelper { - private static final File historyFile = new File(System.getProperty("user.home") + File.separator + ".RESTSHell_history"); + private static final File historyFile = new File(System + .getProperty("user.home") + + File.separator + ".RESTSHell_history"); + private ConsoleReader consoleReader; + private History history; + private SimpleCompletor completor; public ConsoleHelper() { - /// ConsoleHelper + // / ConsoleHelper try { consoleReader = new ConsoleReader(); } catch (IOException e) { @@ -26,14 +31,15 @@ public ConsoleHelper() { try { history = new History(historyFile); } catch (IOException e) { - throw new RuntimeException(String.format("cannot initialize history file %s", historyFile), e); + throw new RuntimeException(String.format( + "cannot initialize history file %s", historyFile), e); } consoleReader.setHistory(history); consoleReader.setUseHistory(true); // Completition - completor = new SimpleCompletor(new String[]{"help", "version"}); + completor = new SimpleCompletor(new String[] { "help", "version" }); consoleReader.addCompletor(completor); } @@ -44,9 +50,9 @@ public String readLine(String aPrompt) { line = consoleReader.readLine(aPrompt); } catch (IOException e) { // do nothing - } finally { - return line; } + + return line; } public String readPassword(String aPrompt) { @@ -56,9 +62,9 @@ public String readPassword(String aPrompt) { password = consoleReader.readLine(aPrompt); } catch (IOException e) { // do nothing - } finally { - return password; } + + return password; } public void writeLine(String line) { diff --git a/modules/org.restlet.gwt/src/org/restlet/gwt/resource/Representation.java b/modules/org.restlet.gwt/src/org/restlet/gwt/resource/Representation.java index 9e26554f17..bc7937e637 100644 --- a/modules/org.restlet.gwt/src/org/restlet/gwt/resource/Representation.java +++ b/modules/org.restlet.gwt/src/org/restlet/gwt/resource/Representation.java @@ -1,329 +1,328 @@ -/* - * Copyright 2005-2008 Noelios Consulting. - * - * The contents of this file are subject to the terms of the Common Development - * and Distribution License (the "License"). You may not use this file except in - * compliance with the License. - * - * You can obtain a copy of the license at - * http://www.opensource.org/licenses/cddl1.txt See the License for the specific - * language governing permissions and limitations under the License. - * - * When distributing Covered Code, include this CDDL HEADER in each file and - * include the License file at http://www.opensource.org/licenses/cddl1.txt If - * applicable, add the following below this CDDL HEADER, with the fields - * enclosed by brackets "[]" replaced with your own identifying information: - * Portions Copyright [yyyy] [name of copyright owner] - */ - -package org.restlet.gwt.resource; - -import java.util.Date; - -import org.restlet.gwt.data.MediaType; -import org.restlet.gwt.data.Tag; - -/** - * Current or intended state of a resource. The content of a representation can - * be retrieved several times if there is a stable and accessible source, like a - * local file or a string. When the representation is obtained via a temporary - * source like a network socket, its content can only be retrieved once. The - * "transient" and "available" properties are available to help you figure out - * those aspects at runtime.<br> - * <br> - * For performance purpose, it is essential that a minimal overhead occurs upon - * initialization. The main overhead must only occur during invocation of - * content processing methods (write, getStream, getChannel and toString).<br> - * <br> - * "REST components perform actions on a resource by using a representation to - * capture the current or intended state of that resource and transferring that - * representation between components. A representation is a sequence of bytes, - * plus representation metadata to describe those bytes. Other commonly used but - * less precise names for a representation include: document, file, and HTTP - * message entity, instance, or variant." Roy T. Fielding - * - * @see <a - * href="http://roy.gbiv.com/pubs/dissertation/rest_arch_style.htm#sec_5_2_1_2">Source - * dissertation</a> - * @author Jerome Louvel ([email protected]) - */ -public abstract class Representation extends Variant { - /** - * Empty representation with no content. - */ - private static class EmptyRepresentation extends Representation { - - /** - * Constructor. - */ - public EmptyRepresentation() { - setAvailable(false); - setTransient(true); - setSize(0); - } - - @Override - public String getText() { - return null; - } - } - - /** - * Indicates that the size of the representation can't be known in advance. - */ - @SuppressWarnings("hiding") - public static final long UNKNOWN_SIZE = -1L; - - /** - * Returns a new empty representation with no content. - * - * @return A new empty representation. - */ - public static Representation createEmpty() { - return new EmptyRepresentation(); - } - - /** Indicates if the representation's content is available. */ - private volatile boolean available; - - /** Indicates if the representation is downloadable. */ - private volatile boolean downloadable; - - /** - * Indicates the suggested download file name for the representation's - * content. - */ - private volatile String downloadName; - - /** The expiration date. */ - private volatile Date expirationDate; - - /** Indicates if the representation's content is transient. */ - private volatile boolean isTransient; - - /** The modification date. */ - private volatile Date modificationDate; - - /** - * The expected size. Dynamic representations can have any size, but - * sometimes we can know in advance the expected size. If this expected size - * is specified by the user, it has a higher priority than any size that can - * be guessed by the representation (like a file size). - */ - private volatile long size; - - /** The tag. */ - private volatile Tag tag; - - /** - * Default constructor. - */ - public Representation() { - this(null); - } - - /** - * Constructor. - * - * @param mediaType - * The media type. - */ - public Representation(MediaType mediaType) { - super(mediaType); - this.available = true; - this.isTransient = false; - this.size = UNKNOWN_SIZE; - this.expirationDate = null; - this.modificationDate = null; - this.tag = null; - } - - /** - * Returns the suggested download file name for this representation. This is - * mainly used to suggest to the client a local name for a downloaded - * representation. - * - * @return The suggested file name for this representation. - */ - public String getDownloadName() { - return this.downloadName; - } - - /** - * Returns the future date when this representation expire. If this - * information is not known, returns null. - * - * @return The expiration date. - */ - public Date getExpirationDate() { - return this.expirationDate; - } - - /** - * Returns the last date when this representation was modified. If this - * information is not known, returns null. - * - * @return The modification date. - */ - public Date getModificationDate() { - return this.modificationDate; - } - - /** - * Returns the size in bytes if known, UNKNOWN_SIZE (-1) otherwise. - * - * @return The size in bytes if known, UNKNOWN_SIZE (-1) otherwise. - */ - public long getSize() { - return this.size; - } - - /** - * Returns the tag. - * - * @return The tag. - */ - public Tag getTag() { - return this.tag; - } - - /** - * Converts the representation to a string value. Be careful when using this - * method as the conversion of large content to a string fully stored in - * memory can result in OutOfMemoryErrors being thrown. - * - * @return The representation as a string value. - */ - public abstract String getText(); - - /** - * Indicates if some fresh content is available, without having to actually - * call one of the content manipulation method like getStream() that would - * actually consume it. This is especially useful for transient - * representation whose content can only be accessed once and also when the - * size of the representation is not known in advance. - * - * @return True if some fresh content is available. - */ - public boolean isAvailable() { - return (getSize() != 0) && this.available; - } - - /** - * Indicates if the representation is downloadable which means that it can - * be obtained via a download dialog box. - * - * @return True if the representation's content is downloadable. - */ - public boolean isDownloadable() { - return downloadable; - } - - /** - * Indicates if the representation's content is transient, which means that - * it can be obtained only once. This is often the case with representations - * transmitted via network sockets for example. In such case, if you need to - * read the content several times, you need to cache it first, for example - * into memory or into a file. - * - * @return True if the representation's content is transient. - */ - public boolean isTransient() { - return this.isTransient; - } - - /** - * Releases the representation's content and all associated objects like - * sockets, channels or files. If the representation is transient and hasn't - * been read yet, all the remaining content will be discarded, any open - * socket, channel, file or similar source of content will be immediately - * closed. The representation is also no more available. - */ - public void release() { - this.available = false; - } - - /** - * Indicates if some fresh content is available. - * - * @param available - * True if some fresh content is available. - */ - public void setAvailable(boolean available) { - this.available = available; - } - - /** - * Indicates if the representation is downloadable which means that it can - * be obtained via a download dialog box. - * - * @param downloadable - * True if the representation's content is downloadable. - */ - public void setDownloadable(boolean downloadable) { - this.downloadable = downloadable; - } - - /** - * Set the suggested download file name for this representation. - * - * @param fileName - * The suggested file name. - */ - public void setDownloadName(String fileName) { - this.downloadName = fileName; - } - - /** - * Sets the future date when this representation expire. If this information - * is not known, pass null. - * - * @param expirationDate - * The expiration date. - */ - public void setExpirationDate(Date expirationDate) { - this.expirationDate = expirationDate; - } - - /** - * Sets the last date when this representation was modified. If this - * information is not known, pass null. - * - * @param modificationDate - * The modification date. - */ - public void setModificationDate(Date modificationDate) { - this.modificationDate = modificationDate; - } - - /** - * Sets the expected size in bytes if known, -1 otherwise. - * - * @param expectedSize - * The expected size in bytes if known, -1 otherwise. - */ - public void setSize(long expectedSize) { - this.size = expectedSize; - } - - /** - * Sets the tag. - * - * @param tag - * The tag. - */ - public void setTag(Tag tag) { - this.tag = tag; - } - - /** - * Indicates if the representation's content is transient. - * - * @param isTransient - * True if the representation's content is transient. - */ - public void setTransient(boolean isTransient) { - this.isTransient = isTransient; - } - -} +/* + * Copyright 2005-2008 Noelios Consulting. + * + * The contents of this file are subject to the terms of the Common Development + * and Distribution License (the "License"). You may not use this file except in + * compliance with the License. + * + * You can obtain a copy of the license at + * http://www.opensource.org/licenses/cddl1.txt See the License for the specific + * language governing permissions and limitations under the License. + * + * When distributing Covered Code, include this CDDL HEADER in each file and + * include the License file at http://www.opensource.org/licenses/cddl1.txt If + * applicable, add the following below this CDDL HEADER, with the fields + * enclosed by brackets "[]" replaced with your own identifying information: + * Portions Copyright [yyyy] [name of copyright owner] + */ + +package org.restlet.gwt.resource; + +import java.util.Date; + +import org.restlet.gwt.data.MediaType; +import org.restlet.gwt.data.Tag; + +/** + * Current or intended state of a resource. The content of a representation can + * be retrieved several times if there is a stable and accessible source, like a + * local file or a string. When the representation is obtained via a temporary + * source like a network socket, its content can only be retrieved once. The + * "transient" and "available" properties are available to help you figure out + * those aspects at runtime.<br> + * <br> + * For performance purpose, it is essential that a minimal overhead occurs upon + * initialization. The main overhead must only occur during invocation of + * content processing methods (write, getStream, getChannel and toString).<br> + * <br> + * "REST components perform actions on a resource by using a representation to + * capture the current or intended state of that resource and transferring that + * representation between components. A representation is a sequence of bytes, + * plus representation metadata to describe those bytes. Other commonly used but + * less precise names for a representation include: document, file, and HTTP + * message entity, instance, or variant." Roy T. Fielding + * + * @see <a + * href="http://roy.gbiv.com/pubs/dissertation/rest_arch_style.htm#sec_5_2_1_2">Source + * dissertation</a> + * @author Jerome Louvel ([email protected]) + */ +public abstract class Representation extends Variant { + /** + * Empty representation with no content. + */ + private static class EmptyRepresentation extends Representation { + + /** + * Constructor. + */ + public EmptyRepresentation() { + setAvailable(false); + setTransient(true); + setSize(0); + } + + @Override + public String getText() { + return null; + } + } + + /** + * Indicates that the size of the representation can't be known in advance. + */ + public static final long UNKNOWN_SIZE = -1L; + + /** + * Returns a new empty representation with no content. + * + * @return A new empty representation. + */ + public static Representation createEmpty() { + return new EmptyRepresentation(); + } + + /** Indicates if the representation's content is available. */ + private volatile boolean available; + + /** Indicates if the representation is downloadable. */ + private volatile boolean downloadable; + + /** + * Indicates the suggested download file name for the representation's + * content. + */ + private volatile String downloadName; + + /** The expiration date. */ + private volatile Date expirationDate; + + /** Indicates if the representation's content is transient. */ + private volatile boolean isTransient; + + /** The modification date. */ + private volatile Date modificationDate; + + /** + * The expected size. Dynamic representations can have any size, but + * sometimes we can know in advance the expected size. If this expected size + * is specified by the user, it has a higher priority than any size that can + * be guessed by the representation (like a file size). + */ + private volatile long size; + + /** The tag. */ + private volatile Tag tag; + + /** + * Default constructor. + */ + public Representation() { + this(null); + } + + /** + * Constructor. + * + * @param mediaType + * The media type. + */ + public Representation(MediaType mediaType) { + super(mediaType); + this.available = true; + this.isTransient = false; + this.size = UNKNOWN_SIZE; + this.expirationDate = null; + this.modificationDate = null; + this.tag = null; + } + + /** + * Returns the suggested download file name for this representation. This is + * mainly used to suggest to the client a local name for a downloaded + * representation. + * + * @return The suggested file name for this representation. + */ + public String getDownloadName() { + return this.downloadName; + } + + /** + * Returns the future date when this representation expire. If this + * information is not known, returns null. + * + * @return The expiration date. + */ + public Date getExpirationDate() { + return this.expirationDate; + } + + /** + * Returns the last date when this representation was modified. If this + * information is not known, returns null. + * + * @return The modification date. + */ + public Date getModificationDate() { + return this.modificationDate; + } + + /** + * Returns the size in bytes if known, UNKNOWN_SIZE (-1) otherwise. + * + * @return The size in bytes if known, UNKNOWN_SIZE (-1) otherwise. + */ + public long getSize() { + return this.size; + } + + /** + * Returns the tag. + * + * @return The tag. + */ + public Tag getTag() { + return this.tag; + } + + /** + * Converts the representation to a string value. Be careful when using this + * method as the conversion of large content to a string fully stored in + * memory can result in OutOfMemoryErrors being thrown. + * + * @return The representation as a string value. + */ + public abstract String getText(); + + /** + * Indicates if some fresh content is available, without having to actually + * call one of the content manipulation method like getStream() that would + * actually consume it. This is especially useful for transient + * representation whose content can only be accessed once and also when the + * size of the representation is not known in advance. + * + * @return True if some fresh content is available. + */ + public boolean isAvailable() { + return (getSize() != 0) && this.available; + } + + /** + * Indicates if the representation is downloadable which means that it can + * be obtained via a download dialog box. + * + * @return True if the representation's content is downloadable. + */ + public boolean isDownloadable() { + return downloadable; + } + + /** + * Indicates if the representation's content is transient, which means that + * it can be obtained only once. This is often the case with representations + * transmitted via network sockets for example. In such case, if you need to + * read the content several times, you need to cache it first, for example + * into memory or into a file. + * + * @return True if the representation's content is transient. + */ + public boolean isTransient() { + return this.isTransient; + } + + /** + * Releases the representation's content and all associated objects like + * sockets, channels or files. If the representation is transient and hasn't + * been read yet, all the remaining content will be discarded, any open + * socket, channel, file or similar source of content will be immediately + * closed. The representation is also no more available. + */ + public void release() { + this.available = false; + } + + /** + * Indicates if some fresh content is available. + * + * @param available + * True if some fresh content is available. + */ + public void setAvailable(boolean available) { + this.available = available; + } + + /** + * Indicates if the representation is downloadable which means that it can + * be obtained via a download dialog box. + * + * @param downloadable + * True if the representation's content is downloadable. + */ + public void setDownloadable(boolean downloadable) { + this.downloadable = downloadable; + } + + /** + * Set the suggested download file name for this representation. + * + * @param fileName + * The suggested file name. + */ + public void setDownloadName(String fileName) { + this.downloadName = fileName; + } + + /** + * Sets the future date when this representation expire. If this information + * is not known, pass null. + * + * @param expirationDate + * The expiration date. + */ + public void setExpirationDate(Date expirationDate) { + this.expirationDate = expirationDate; + } + + /** + * Sets the last date when this representation was modified. If this + * information is not known, pass null. + * + * @param modificationDate + * The modification date. + */ + public void setModificationDate(Date modificationDate) { + this.modificationDate = modificationDate; + } + + /** + * Sets the expected size in bytes if known, -1 otherwise. + * + * @param expectedSize + * The expected size in bytes if known, -1 otherwise. + */ + public void setSize(long expectedSize) { + this.size = expectedSize; + } + + /** + * Sets the tag. + * + * @param tag + * The tag. + */ + public void setTag(Tag tag) { + this.tag = tag; + } + + /** + * Indicates if the representation's content is transient. + * + * @param isTransient + * True if the representation's content is transient. + */ + public void setTransient(boolean isTransient) { + this.isTransient = isTransient; + } + +} diff --git a/modules/org.restlet.test/src/org/restlet/test/RestletTestSuite.java b/modules/org.restlet.test/src/org/restlet/test/RestletTestSuite.java index eef871275c..cef4a2f1a3 100644 --- a/modules/org.restlet.test/src/org/restlet/test/RestletTestSuite.java +++ b/modules/org.restlet.test/src/org/restlet/test/RestletTestSuite.java @@ -31,7 +31,6 @@ */ public class RestletTestSuite extends TestSuite { /** Constructor. */ - @SuppressWarnings("deprecation") public RestletTestSuite() { addTestSuite(AtomTestCase.class); addTestSuite(ByteUtilsTestCase.class); diff --git a/modules/org.restlet.test/src/org/restlet/test/RiapTestCase.java b/modules/org.restlet.test/src/org/restlet/test/RiapTestCase.java index ead17ad481..38bc72e8c9 100644 --- a/modules/org.restlet.test/src/org/restlet/test/RiapTestCase.java +++ b/modules/org.restlet.test/src/org/restlet/test/RiapTestCase.java @@ -57,7 +57,6 @@ public void testRiap() throws Exception { @Override public Restlet createRoot() { return new Restlet(getContext()) { - @SuppressWarnings("unchecked") @Override public void handle(Request request, Response response) { final String selfBase = "riap://application"; diff --git a/modules/org.restlet.test/src/org/restlet/test/RouteListTestCase.java b/modules/org.restlet.test/src/org/restlet/test/RouteListTestCase.java index 1998e38156..da8218fcac 100644 --- a/modules/org.restlet.test/src/org/restlet/test/RouteListTestCase.java +++ b/modules/org.restlet.test/src/org/restlet/test/RouteListTestCase.java @@ -67,7 +67,6 @@ public void testGetNext() { assertSame(first, list.getNext(null, null, 1f)); } - @SuppressWarnings("null") public void testGetRandom() { RouteList list = new RouteList(); diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/resources/IllegalConstructorResource.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/resources/IllegalConstructorResource.java index d862e56d2b..6e6fcf7ddc 100644 --- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/resources/IllegalConstructorResource.java +++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/resources/IllegalConstructorResource.java @@ -29,7 +29,6 @@ * @author Stephan Koops * @see IllegalConstructorTest */ -@SuppressWarnings("unused") @Path("IllegalConstructorResource") public class IllegalConstructorResource { diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/resources/PersonsResource.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/resources/PersonsResource.java index b719497ef7..fec245195f 100644 --- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/resources/PersonsResource.java +++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/resources/PersonsResource.java @@ -63,7 +63,6 @@ public Response addPerson(Person person) { * @param person * @return */ - @SuppressWarnings("unused") private int createPerson(Person person) { return 5; } diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/CarTest.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/CarTest.java index 872af6616c..e27e739dc0 100644 --- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/CarTest.java +++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/CarTest.java @@ -51,7 +51,6 @@ public void testDelete() throws Exception { .getStatus()); } - @SuppressWarnings("null") public void testGetCar() throws Exception { String carNumber = "57"; @@ -69,7 +68,6 @@ public void testGetHtmlText() throws Exception { assertEquals(Status.CLIENT_ERROR_NOT_ACCEPTABLE, response.getStatus()); } - @SuppressWarnings("null") public void testGetOffers() throws Exception { Response response = get("offers"); Representation representation = response.getEntity(); diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/InjectionTest.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/InjectionTest.java index 319d1c9361..cb2462d1ab 100644 --- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/InjectionTest.java +++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/InjectionTest.java @@ -42,7 +42,6 @@ public class InjectionTest extends JaxRsTestCase { protected ApplicationConfig getAppConfig() { ApplicationConfig appConfig = new ApplicationConfig() { @Override - @SuppressWarnings("unchecked") public Set<Class<?>> getResourceClasses() { Set<Class<?>> rrcs = new HashSet<Class<?>>(); rrcs.add(getRootResourceClass()); diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/JaxRsTestCase.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/JaxRsTestCase.java index ff2ef335cd..c580efa189 100644 --- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/JaxRsTestCase.java +++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/JaxRsTestCase.java @@ -274,7 +274,6 @@ public Response accessServer(Method httpMethod, Class<?> klasse, return accessServer(request); } - @SuppressWarnings("unchecked") public Response accessServer(Method httpMethod, Class<?> klasse, String subPath, MediaType accMediaType) { Collection<MediaType> mediaTypes = null; diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/JsonTest.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/JsonTest.java index 49211d4437..f91775a2cd 100644 --- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/JsonTest.java +++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/JsonTest.java @@ -50,7 +50,6 @@ */ public class JsonTest extends JaxRsTestCase { - @SuppressWarnings("unchecked") @Override protected Class<?> getRootResourceClass() { return JsonTestService.class; diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/ProviderTest.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/ProviderTest.java index d3a4192281..8a61334383 100644 --- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/ProviderTest.java +++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/ProviderTest.java @@ -85,7 +85,6 @@ private Response getAndExpectAlphabet(String subPath) throws IOException { return response; } - @SuppressWarnings("unchecked") @Override protected Class<?> getRootResourceClass() { return ProviderTestService.class; @@ -95,7 +94,6 @@ protected Class<?> getRootResourceClass() { * @param subPath * @throws IOException */ - @SuppressWarnings("unused") private void postAndCheckXml(String subPath) throws Exception { Representation send = new DomRepresentation( new StringRepresentation( diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/RequestTest.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/RequestTest.java index 59576e07bd..7fa9b2adde 100644 --- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/RequestTest.java +++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/RequestTest.java @@ -182,7 +182,6 @@ public void testDateAndEntityTag4Put() throws Exception { .contains("The entity does not match Entity Tag")); } - @SuppressWarnings("deprecation") public void testGetDateNotModified() throws Exception { Conditions conditions = new Conditions(); conditions.setModifiedSince(AFTER); diff --git a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/ResponseBuilderTest.java b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/ResponseBuilderTest.java index 4d13c640d1..2506ebb5e0 100644 --- a/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/ResponseBuilderTest.java +++ b/modules/org.restlet.test/src/org/restlet/test/jaxrs/services/tests/ResponseBuilderTest.java @@ -33,7 +33,6 @@ public class ResponseBuilderTest extends JaxRsTestCase { @Override - @SuppressWarnings("unchecked") protected Class<?> getRootResourceClass() { return ResponseBuilderService.class; } diff --git a/modules/org.restlet.test/src/org/restlet/test/spring/BeanNameRouterTest.java b/modules/org.restlet.test/src/org/restlet/test/spring/BeanNameRouterTest.java index b59b306532..3c74b1137a 100644 --- a/modules/org.restlet.test/src/org/restlet/test/spring/BeanNameRouterTest.java +++ b/modules/org.restlet.test/src/org/restlet/test/spring/BeanNameRouterTest.java @@ -63,7 +63,6 @@ public void testRoutesCreatedForUrlAliases() throws Exception { .contains(FISH_URI)); } - @SuppressWarnings("null") public void testRoutesPointToFindersForBeans() throws Exception { router.postProcessBeanFactory(factory); diff --git a/modules/org.restlet/src/org/restlet/Component.java b/modules/org.restlet/src/org/restlet/Component.java index 820fc1fd93..d807606b7b 100644 --- a/modules/org.restlet/src/org/restlet/Component.java +++ b/modules/org.restlet/src/org/restlet/Component.java @@ -492,7 +492,6 @@ private Route attach(Router router, String targetClassName, * Is this route the default one? * @return the created route, or null. */ - @SuppressWarnings("unchecked") private Route attachWithDescriptor(Router router, String targetDescriptor, String uriPattern, boolean defaultRoute) { Route route = null; diff --git a/modules/org.restlet/src/org/restlet/Guard.java b/modules/org.restlet/src/org/restlet/Guard.java index 447de4290f..00a9b05d9e 100644 --- a/modules/org.restlet/src/org/restlet/Guard.java +++ b/modules/org.restlet/src/org/restlet/Guard.java @@ -215,7 +215,6 @@ public int authenticate(Request request) { * The request to authorize. * @return True if the request is authorized. */ - @SuppressWarnings("unused") public boolean authorize(Request request) { // Authorize everything by default return true; @@ -261,7 +260,6 @@ public void challenge(Response response, boolean stale) { * the identifier's secret * @return true if the secret is valid for the given identifier */ - @SuppressWarnings("unused") public boolean checkSecret(Request request, String identifier, char[] secret) { return checkSecret(identifier, secret); } diff --git a/modules/org.restlet/src/org/restlet/data/ChallengeResponse.java b/modules/org.restlet/src/org/restlet/data/ChallengeResponse.java index 1058a5bc6b..e86c478b4b 100644 --- a/modules/org.restlet/src/org/restlet/data/ChallengeResponse.java +++ b/modules/org.restlet/src/org/restlet/data/ChallengeResponse.java @@ -42,7 +42,6 @@ private final class PrincipalImpl implements Principal, Serializable { /** * Constructor for deserialization. */ - @SuppressWarnings("unused") private PrincipalImpl() { } diff --git a/modules/org.restlet/src/org/restlet/resource/ObjectRepresentation.java b/modules/org.restlet/src/org/restlet/resource/ObjectRepresentation.java index 48f6ef0794..2440f340ec 100644 --- a/modules/org.restlet/src/org/restlet/resource/ObjectRepresentation.java +++ b/modules/org.restlet/src/org/restlet/resource/ObjectRepresentation.java @@ -84,7 +84,6 @@ public ObjectRepresentation(T object) { * @return The represented object. * @throws IOException */ - @SuppressWarnings("unused") public T getObject() throws IOException { return this.object; } diff --git a/modules/org.restlet/src/org/restlet/resource/SaxRepresentation.java b/modules/org.restlet/src/org/restlet/resource/SaxRepresentation.java index fd8a376ba9..ace86b0ecc 100644 --- a/modules/org.restlet/src/org/restlet/resource/SaxRepresentation.java +++ b/modules/org.restlet/src/org/restlet/resource/SaxRepresentation.java @@ -231,7 +231,6 @@ public void write(OutputStream outputStream) throws IOException { * The XML writer to write to. * @throws IOException */ - @SuppressWarnings("unused") public void write(XmlWriter writer) throws IOException { // Do nothing by default. } diff --git a/modules/org.restlet/src/org/restlet/service/TunnelService.java b/modules/org.restlet/src/org/restlet/service/TunnelService.java index d2ab9548c4..2b35b6c336 100644 --- a/modules/org.restlet/src/org/restlet/service/TunnelService.java +++ b/modules/org.restlet/src/org/restlet/service/TunnelService.java @@ -223,8 +223,7 @@ public TunnelService(boolean enabled, boolean methodTunnel, * The client to test. * @return True if the request from a given client can be tunnelled. */ - public boolean allowClient(@SuppressWarnings("unused") - ClientInfo client) { + public boolean allowClient(ClientInfo client) { return true; } diff --git a/modules/org.restlet/src/org/restlet/util/ByteUtils.java b/modules/org.restlet/src/org/restlet/util/ByteUtils.java index b427bea602..c55bc523b5 100644 --- a/modules/org.restlet/src/org/restlet/util/ByteUtils.java +++ b/modules/org.restlet/src/org/restlet/util/ByteUtils.java @@ -492,7 +492,6 @@ public static void exhaust(InputStream input) throws IOException { * The input stream to convert. * @return A readable byte channel. */ - @SuppressWarnings("unused") public static ReadableByteChannel getChannel(InputStream inputStream) { return (inputStream != null) ? Channels.newChannel(inputStream) : null; } @@ -504,7 +503,6 @@ public static ReadableByteChannel getChannel(InputStream inputStream) { * The output stream. * @return A writable byte channel. */ - @SuppressWarnings("unused") public static WritableByteChannel getChannel(OutputStream outputStream) { return (outputStream != null) ? Channels.newChannel(outputStream) : null; @@ -608,7 +606,6 @@ public void run() { * The readable byte channel. * @return An input stream based on a given readable byte channel. */ - @SuppressWarnings("unused") public static InputStream getStream(ReadableByteChannel readableChannel) { InputStream result = null; @@ -641,7 +638,6 @@ public static InputStream getStream(Reader reader, CharacterSet characterSet) { * the representation to get the {@link OutputStream} from. * @return A stream with the representation's content. */ - @SuppressWarnings("unused") public static InputStream getStream(final Representation representation) { if (representation == null) { return null; @@ -708,7 +704,6 @@ public static OutputStream getStream(WritableByteChannel writableChannel) { * The writer. * @return the output stream of the writer */ - @SuppressWarnings("unused") public static OutputStream getStream(Writer writer) { return new WriterOutputStream(writer); }
c95eca10efeaa160791b351cd3786418b35f416c
kotlin
Avoid wrapping AssertionError over and over- again
c
https://github.com/JetBrains/kotlin
diff --git a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingVisitorDispatcher.java b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingVisitorDispatcher.java index 8501ab1904efe..31dc99541657e 100644 --- a/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingVisitorDispatcher.java +++ b/compiler/frontend/src/org/jetbrains/kotlin/types/expressions/ExpressionTypingVisitorDispatcher.java @@ -182,18 +182,30 @@ private JetTypeInfo getTypeInfo(@NotNull JetExpression expression, ExpressionTyp } catch (Throwable e) { context.trace.report(Errors.EXCEPTION_FROM_ANALYZER.on(expression, e)); - LOG.error( - "Exception while analyzing expression at " + DiagnosticUtils.atLocation(expression) + ":\n" + expression.getText() + "\n", - e - ); + logOrThrowException(expression, e); return JetTypeInfo.create( ErrorUtils.createErrorType(e.getClass().getSimpleName() + " from analyzer"), context.dataFlowInfo ); } - } + } -////////////////////////////////////////////////////////////////////////////////////////////// + private static void logOrThrowException(@NotNull JetExpression expression, Throwable e) { + try { + // This trows AssertionError in CLI and reports the error in the IDE + LOG.error( + "Exception while analyzing expression at " + DiagnosticUtils.atLocation(expression) + ":\n" + expression.getText() + "\n", + e + ); + } + catch (AssertionError errorFromLogger) { + // If we ended up here, we are in CLI, and the initial exception needs to be rethrown, + // simply throwing AssertionError causes its being wrapped over and over again + throw new KotlinFrontEndException(errorFromLogger.getMessage(), e); + } + } + + ////////////////////////////////////////////////////////////////////////////////////////////// @Override public JetTypeInfo visitFunctionLiteralExpression(@NotNull JetFunctionLiteralExpression expression, ExpressionTypingContext data) {
921a85b1618ebc4a2f92c1c786b6b9b84f1ffb38
Valadoc
libvaladoc: @parameter: check for existence
a
https://github.com/GNOME/vala/
diff --git a/icons/devhelpstyle.css b/icons/devhelpstyle.css index c52052af21..d6c94432b1 100644 --- a/icons/devhelpstyle.css +++ b/icons/devhelpstyle.css @@ -202,6 +202,10 @@ h3.main_title { .main_parameter_table_text, .main_errordomain_table_text, .main_enum_table_text { } +.main_parameter_table_unknown_parameter { + color: GREY; +} + .main_parameter_table_name, .main_errordomain_table_name, .main_enum_table_name { vertical-align: top; text-align: right; diff --git a/icons/style.css b/icons/style.css index c4b0c799be..919fba6a9e 100644 --- a/icons/style.css +++ b/icons/style.css @@ -217,6 +217,10 @@ h3.main_title { .main_parameter_table_text, .main_errordomain_table_text, .main_enum_table_text { } +.main_parameter_table_unknown_parameter { + color: GREY; +} + .main_parameter_table_name, .main_errordomain_table_name, .main_enum_table_name { vertical-align: top; text-align: right; diff --git a/icons/wikistyle.css b/icons/wikistyle.css index a3d8e4b981..d9aab37db4 100644 --- a/icons/wikistyle.css +++ b/icons/wikistyle.css @@ -203,6 +203,10 @@ h3.main_title { .main_parameter_table_text, .main_errordomain_table_text, .main_enum_table_text { } +.main_parameter_table_unknown_parameter { + color: GREY; +} + .main_parameter_table_name, .main_errordomain_table_name, .main_enum_table_name { vertical-align: top; text-align: right; diff --git a/src/libvaladoc/html/htmlrenderer.vala b/src/libvaladoc/html/htmlrenderer.vala index 22353804d8..d0ea807cbd 100755 --- a/src/libvaladoc/html/htmlrenderer.vala +++ b/src/libvaladoc/html/htmlrenderer.vala @@ -127,7 +127,12 @@ public class Valadoc.Html.HtmlRenderer : ContentRenderer { taglets, (taglet) => { var param = taglet as Taglets.Param; - writer.start_tag ("tr"); + string[]? unknown_parameter_css = null; + if (param.parameter == null) { + unknown_parameter_css = {"class", "main_parameter_table_unknown_parameter"}; + } + + writer.start_tag ("tr", unknown_parameter_css); writer.start_tag ("td", {"class", "main_parameter_table_name"}).text (param.parameter_name).end_tag ("td"); writer.start_tag ("td"); param.accept_children (this); diff --git a/src/libvaladoc/taglets/tagletparam.vala b/src/libvaladoc/taglets/tagletparam.vala index ed5505ee75..db0d962c35 100755 --- a/src/libvaladoc/taglets/tagletparam.vala +++ b/src/libvaladoc/taglets/tagletparam.vala @@ -27,6 +27,8 @@ using Valadoc.Content; public class Valadoc.Taglets.Param : InlineContent, Taglet, Block { public string parameter_name { internal set; get; } + public Api.Symbol? parameter { private set; get; } + public Rule? get_parser_rule (Rule run_rule) { return Rule.seq ({ Rule.option ({ Rule.many ({ TokenType.SPACE }) }), @@ -37,7 +39,31 @@ public class Valadoc.Taglets.Param : InlineContent, Taglet, Block { public override void check (Api.Tree api_root, Api.Node container, ErrorReporter reporter, Settings settings) { - // TODO check for the existence of such a parameter + // Check for the existence of such a parameter + + this.parameter = null; + + if (parameter_name == "...") { + Gee.List<Api.Node> params = container.get_children_by_type (Api.NodeType.FORMAL_PARAMETER, false); + foreach (Api.Node param in params) { + if (((Api.FormalParameter) param).ellipsis) { + this.parameter = (Api.Symbol) param; + break; + } + } + } else { + Gee.List<Api.Node> params = container.get_children_by_types ({Api.NodeType.FORMAL_PARAMETER, Api.NodeType.TYPE_PARAMETER}, false); + foreach (Api.Node param in params) { + if (param.name == parameter_name) { + this.parameter = (Api.Symbol) param; + break; + } + } + } + + if (this.parameter == null) { + reporter.simple_warning ("%s: Unknown parameter `%s'", container.get_full_name (), parameter_name); + } base.check (api_root, container, reporter, settings); }
c2951e9c0fe94dbf2c000183a8721f6606a69ffc
Delta Spike
upgrade Arquillian Drone to 1.1.0.CR3
p
https://github.com/apache/deltaspike
diff --git a/deltaspike/modules/jsf/impl/pom.xml b/deltaspike/modules/jsf/impl/pom.xml index dd78267f1..9421514bf 100644 --- a/deltaspike/modules/jsf/impl/pom.xml +++ b/deltaspike/modules/jsf/impl/pom.xml @@ -52,7 +52,7 @@ <dependency> <groupId>org.jboss.arquillian.extension</groupId> <artifactId>arquillian-drone-api</artifactId> - <version>1.0.0.Final</version> + <version>1.1.0.CR3</version> <scope>test</scope> </dependency> <dependency>
ba7cd6426301af5e66f8537a4f1f1693bf192da1
Vala
glib-2.0: add GLib.IOChannel.win32_socket and win32_messages
a
https://github.com/GNOME/vala/
diff --git a/vapi/glib-2.0.vapi b/vapi/glib-2.0.vapi index 11eff0af02..de1b658f16 100644 --- a/vapi/glib-2.0.vapi +++ b/vapi/glib-2.0.vapi @@ -1723,6 +1723,10 @@ namespace GLib { public int unix_get_fd (); [CCode (cname = "g_io_channel_win32_new_fd")] public IOChannel.win32_new_fd (int fd); + [CCode (cname = "g_io_channel_win32_new_socket")] + public IOChannel.win32_socket (int socket); + [CCode (cname = "g_io_channel_win32_new_messages")] + public IOChannel.win32_messages (size_t hwnd); public void init (); public IOChannel.file (string filename, string mode) throws FileError; public IOStatus read_chars (char[] buf, out size_t bytes_read) throws ConvertError, IOChannelError;
0b37cec28721a379b87dd8a67a0d8cdd629d13c0
spring-framework
Consistent support for JTA 1.1- TransactionSynchronizationRegistry--JtaTransactionManager's configuration options for a TransactionSynchronizationRegistry are now in sync with the options for UserTransaction/TransactionManager. Specifically
a
https://github.com/spring-projects/spring-framework
diff --git a/spring-tx/src/main/java/org/springframework/transaction/jta/JtaTransactionManager.java b/spring-tx/src/main/java/org/springframework/transaction/jta/JtaTransactionManager.java index 5e808b70c4e9..9f136ba1e7e8 100644 --- a/spring-tx/src/main/java/org/springframework/transaction/jta/JtaTransactionManager.java +++ b/spring-tx/src/main/java/org/springframework/transaction/jta/JtaTransactionManager.java @@ -164,9 +164,11 @@ public class JtaTransactionManager extends AbstractPlatformTransactionManager private boolean autodetectTransactionManager = true; + private transient TransactionSynchronizationRegistry transactionSynchronizationRegistry; + private String transactionSynchronizationRegistryName; - private transient TransactionSynchronizationRegistry transactionSynchronizationRegistry; + private boolean autodetectTransactionSynchronizationRegistry = true; private boolean allowCustomIsolationLevels = false; @@ -327,7 +329,7 @@ public void setTransactionManager(TransactionManager transactionManager) { } /** - * Return the JTA TransactionManager that this transaction manager uses. + * Return the JTA TransactionManager that this transaction manager uses, if any. */ public TransactionManager getTransactionManager() { return this.transactionManager; @@ -363,6 +365,28 @@ public void setAutodetectTransactionManager(boolean autodetectTransactionManager this.autodetectTransactionManager = autodetectTransactionManager; } + /** + * Set the JTA 1.1 TransactionSynchronizationRegistry to use as direct reference. + * <p>A TransactionSynchronizationRegistry allows for interposed registration + * of transaction synchronizations, as an alternative to the regular registration + * methods on the JTA TransactionManager API. Also, it is an official part of the + * Java EE 5 platform, in contrast to the JTA TransactionManager itself. + * <p>Note that the TransactionSynchronizationRegistry will be autodetected in JNDI and + * also from the UserTransaction/TransactionManager object if implemented there as well. + * @see #setTransactionSynchronizationRegistryName + * @see #setAutodetectTransactionSynchronizationRegistry + */ + public void setTransactionSynchronizationRegistry(TransactionSynchronizationRegistry transactionSynchronizationRegistry) { + this.transactionSynchronizationRegistry = transactionSynchronizationRegistry; + } + + /** + * Return the JTA 1.1 TransactionSynchronizationRegistry that this transaction manager uses, if any. + */ + public TransactionSynchronizationRegistry getTransactionSynchronizationRegistry() { + return this.transactionSynchronizationRegistry; + } + /** * Set the JNDI name of the JTA 1.1 TransactionSynchronizationRegistry. * <p>Note that the TransactionSynchronizationRegistry will be autodetected @@ -374,6 +398,20 @@ public void setTransactionSynchronizationRegistryName(String transactionSynchron this.transactionSynchronizationRegistryName = transactionSynchronizationRegistryName; } + /** + * Set whether to autodetect a JTA 1.1 TransactionSynchronizationRegistry object + * at its default JDNI location ("java:comp/TransactionSynchronizationRegistry") + * if the UserTransaction has also been obtained from JNDI, and also whether + * to fall back to checking whether the JTA UserTransaction/TransactionManager + * object implements the JTA TransactionSynchronizationRegistry interface too. + * <p>Default is "true", autodetecting the TransactionSynchronizationRegistry + * unless it has been specified explicitly. Can be turned off to delegate + * synchronization registration to the regular JTA TransactionManager API. + */ + public void setAutodetectTransactionSynchronizationRegistry(boolean autodetectTransactionSynchronizationRegistry) { + this.autodetectTransactionSynchronizationRegistry = autodetectTransactionSynchronizationRegistry; + } + /** * Set whether to allow custom isolation levels to be specified. * <p>Default is "false", throwing an exception if a non-default isolation level @@ -404,38 +442,36 @@ public void afterPropertiesSet() throws TransactionSystemException { * @throws TransactionSystemException if initialization failed */ protected void initUserTransactionAndTransactionManager() throws TransactionSystemException { - // Fetch JTA UserTransaction from JNDI, if necessary. if (this.userTransaction == null) { + // Fetch JTA UserTransaction from JNDI, if necessary. if (StringUtils.hasLength(this.userTransactionName)) { this.userTransaction = lookupUserTransaction(this.userTransactionName); this.userTransactionObtainedFromJndi = true; } else { this.userTransaction = retrieveUserTransaction(); + if (this.userTransaction == null && this.autodetectUserTransaction) { + // Autodetect UserTransaction at its default JNDI location. + this.userTransaction = findUserTransaction(); + } } } - // Fetch JTA TransactionManager from JNDI, if necessary. if (this.transactionManager == null) { + // Fetch JTA TransactionManager from JNDI, if necessary. if (StringUtils.hasLength(this.transactionManagerName)) { this.transactionManager = lookupTransactionManager(this.transactionManagerName); } else { this.transactionManager = retrieveTransactionManager(); + if (this.transactionManager == null && this.autodetectTransactionManager) { + // Autodetect UserTransaction object that implements TransactionManager, + // and check fallback JNDI locations otherwise. + this.transactionManager = findTransactionManager(this.userTransaction); + } } } - // Autodetect UserTransaction at its default JNDI location. - if (this.userTransaction == null && this.autodetectUserTransaction) { - this.userTransaction = findUserTransaction(); - } - - // Autodetect UserTransaction object that implements TransactionManager, - // and check fallback JNDI locations else. - if (this.transactionManager == null && this.autodetectTransactionManager) { - this.transactionManager = findTransactionManager(this.userTransaction); - } - // If only JTA TransactionManager specified, create UserTransaction handle for it. if (this.userTransaction == null && this.transactionManager != null) { this.userTransaction = buildUserTransaction(this.transactionManager); @@ -477,15 +513,20 @@ protected void checkUserTransactionAndTransactionManager() throws IllegalStateEx * @throws TransactionSystemException if initialization failed */ protected void initTransactionSynchronizationRegistry() { - if (StringUtils.hasLength(this.transactionSynchronizationRegistryName)) { - this.transactionSynchronizationRegistry = - lookupTransactionSynchronizationRegistry(this.transactionSynchronizationRegistryName); - } - else { - this.transactionSynchronizationRegistry = retrieveTransactionSynchronizationRegistry(); - if (this.transactionSynchronizationRegistry == null) { + if (this.transactionSynchronizationRegistry == null) { + // Fetch JTA TransactionSynchronizationRegistry from JNDI, if necessary. + if (StringUtils.hasLength(this.transactionSynchronizationRegistryName)) { this.transactionSynchronizationRegistry = - findTransactionSynchronizationRegistry(this.userTransaction, this.transactionManager); + lookupTransactionSynchronizationRegistry(this.transactionSynchronizationRegistryName); + } + else { + this.transactionSynchronizationRegistry = retrieveTransactionSynchronizationRegistry(); + if (this.transactionSynchronizationRegistry == null && this.autodetectTransactionSynchronizationRegistry) { + // Autodetect in JNDI if applicable, and check UserTransaction/TransactionManager + // object that implements TransactionSynchronizationRegistry otherwise. + this.transactionSynchronizationRegistry = + findTransactionSynchronizationRegistry(this.userTransaction, this.transactionManager); + } } }
85cb78d8788f9f604ba9f644126c3ebf30bdfd7b
hbase
HBASE-8287 TestRegionMergeTransactionOnCluster- failed in trunk build -4010--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1465528 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/hbase
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/DispatchMergingRegionHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/DispatchMergingRegionHandler.java index 88626f355734..65ff8c4000a5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/DispatchMergingRegionHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/DispatchMergingRegionHandler.java @@ -121,10 +121,14 @@ public void process() throws IOException { while (!masterServices.isStopped()) { try { Thread.sleep(20); + // Make sure check RIT first, then get region location, otherwise + // we would make a wrong result if region is online between getting + // region location and checking RIT + boolean isRIT = regionStates.isRegionInTransition(region_b); region_b_location = masterServices.getAssignmentManager() .getRegionStates().getRegionServerOfRegion(region_b); onSameRS = region_a_location.equals(region_b_location); - if (onSameRS || !regionStates.isRegionInTransition(region_b)) { + if (onSameRS || !isRIT) { // Regions are on the same RS, or region_b is not in // RegionInTransition any more break;
208f53a0626a814616470d39d8a7032aac23eb7b
camel
Fixed compiler issue on JDK 1.5--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@1056650 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/camel
diff --git a/camel-core/src/test/java/org/apache/camel/processor/SplitterParallelBigFileTest.java b/camel-core/src/test/java/org/apache/camel/processor/SplitterParallelBigFileTest.java index 08e597a1cc865..ef6404c7f63d2 100644 --- a/camel-core/src/test/java/org/apache/camel/processor/SplitterParallelBigFileTest.java +++ b/camel-core/src/test/java/org/apache/camel/processor/SplitterParallelBigFileTest.java @@ -62,12 +62,12 @@ public void xxxtestSplitParallelBigFile() throws Exception { StopWatch watch = new StopWatch(); NotifyBuilder builder = new NotifyBuilder(context).whenDone(lines + 1).create(); - boolean done = builder.matches(5, TimeUnit.MINUTES); + boolean done = builder.matches(120, TimeUnit.SECONDS); log.info("Took " + TimeUtils.printDuration(watch.stop())); if (!done) { - throw new CamelException("Could not split file in 5 minutes"); + throw new CamelException("Could not split file in 2 minutes"); } // need a little sleep for capturing memory profiling @@ -83,7 +83,7 @@ public void configure() throws Exception { //context.getExecutorServiceStrategy().getDefaultThreadPoolProfile().setMaxPoolSize(10); from("file:target/split") - .split(body().tokenize("\n")).parallelProcessing() + .split(body().tokenize("\n")).streaming().parallelProcessing() .to("log:split?groupSize=1000"); } };
f0e9bf9f4ccaaa8e0b41f28f97fb7b6d15a88363
intellij-community
Make is possible to enhance color schemes from- plugin in non-intellij environment (e.g. upsource)--
a
https://github.com/JetBrains/intellij-community
diff --git a/platform/editor-ui-ex/src/com/intellij/openapi/editor/colors/impl/AbstractColorsScheme.java b/platform/editor-ui-ex/src/com/intellij/openapi/editor/colors/impl/AbstractColorsScheme.java index e00655bef5cb5..93f9f3bf0b4c7 100644 --- a/platform/editor-ui-ex/src/com/intellij/openapi/editor/colors/impl/AbstractColorsScheme.java +++ b/platform/editor-ui-ex/src/com/intellij/openapi/editor/colors/impl/AbstractColorsScheme.java @@ -333,7 +333,7 @@ else if (ATTRIBUTES_ELEMENT.equals(childName)) { initFonts(); } - protected void readAttributes(@NotNull Element childNode) { + public void readAttributes(@NotNull Element childNode) { for (Element e : childNode.getChildren(OPTION_ELEMENT)) { TextAttributesKey name = TextAttributesKey.find(e.getAttributeValue(NAME_ATTR)); TextAttributes attr = new TextAttributes(e.getChild(VALUE_ELEMENT));
5a57b334d860c19332f81e7b84947e452341a18c
camel
CAMEL-2970: JmsProducer supports non blocking- async routing engine for InOut Exchanges (request-reply over JMS).--git-svn-id: https://svn.apache.org/repos/asf/camel/trunk@978995 13f79535-47bb-0310-9956-ffa450edef68-
a
https://github.com/apache/camel
diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/EndpointMessageListener.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/EndpointMessageListener.java index 5b4520ea27d29..aad18b087e72b 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/EndpointMessageListener.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/EndpointMessageListener.java @@ -67,7 +67,7 @@ public void onMessage(final Message message) { LOG.trace("onMessage START"); if (LOG.isDebugEnabled()) { - LOG.debug(endpoint + " consumer receiving JMS message: " + message); + LOG.debug(endpoint + " consumer received JMS message: " + message); } RuntimeCamelException rce = null; @@ -82,6 +82,12 @@ public void onMessage(final Message message) { if (LOG.isTraceEnabled()) { LOG.trace("onMessage.process START"); } + + String correlationId = message.getJMSCorrelationID(); + if (correlationId != null) { + LOG.debug("Received Message has JMSCorrelationID [" + correlationId + "]"); + } + processor.process(exchange); if (LOG.isTraceEnabled()) { LOG.trace("onMessage.process END"); @@ -292,19 +298,11 @@ protected void sendReply(String replyDestination, final Message message, final E getTemplate().send(replyDestination, new MessageCreator() { public Message createMessage(Session session) throws JMSException { Message reply = endpoint.getBinding().makeJmsMessage(exchange, out, session, cause); - - if (endpoint.getConfiguration().isUseMessageIDAsCorrelationID()) { - String messageID = exchange.getIn().getHeader("JMSMessageID", String.class); - reply.setJMSCorrelationID(messageID); - } else { - String correlationID = message.getJMSCorrelationID(); - if (correlationID != null) { - reply.setJMSCorrelationID(correlationID); - } - } + final String correlationID = determineCorrelationId(message); + reply.setJMSCorrelationID(correlationID); if (LOG.isDebugEnabled()) { - LOG.debug(endpoint + " sending reply JMS message: " + reply); + LOG.debug(endpoint + " sending reply JMS message [correlationId:" + correlationID + "]: " + reply); } return reply; } @@ -318,7 +316,9 @@ protected Object getReplyToDestination(Message message) throws JMSException { try { destination = message.getJMSReplyTo(); } catch (JMSException e) { - LOG.trace("Cannot read JMSReplyTo header. Will ignore this exception.", e); + if (LOG.isDebugEnabled()) { + LOG.debug("Cannot read JMSReplyTo header. Will ignore this exception.", e); + } } } return destination; diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsComponent.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsComponent.java index 527848c35a5b5..bf965c1ecdf73 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsComponent.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsComponent.java @@ -17,19 +17,16 @@ package org.apache.camel.component.jms; import java.util.Map; -import java.util.concurrent.ScheduledExecutorService; import javax.jms.ConnectionFactory; import javax.jms.ExceptionListener; import javax.jms.Session; import org.apache.camel.CamelContext; import org.apache.camel.Endpoint; -import org.apache.camel.component.jms.requestor.Requestor; import org.apache.camel.impl.DefaultComponent; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.spi.HeaderFilterStrategyAware; import org.apache.camel.util.CastUtils; -import org.apache.camel.util.EndpointHelper; import org.apache.camel.util.ObjectHelper; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -54,13 +51,10 @@ public class JmsComponent extends DefaultComponent implements ApplicationContextAware, HeaderFilterStrategyAware { private static final transient Log LOG = LogFactory.getLog(JmsComponent.class); - private static final int DEFAULT_THREADPOOL_SIZE = 100; private static final String DEFAULT_QUEUE_BROWSE_STRATEGY = "org.apache.camel.component.jms.DefaultQueueBrowseStrategy"; private static final String KEY_FORMAT_STRATEGY_PARAM = "jmsKeyFormatStrategy"; - private ScheduledExecutorService scheduledExecutorService; private JmsConfiguration configuration; private ApplicationContext applicationContext; - private Requestor requestor; private QueueBrowseStrategy queueBrowseStrategy; private boolean attemptedToCreateQueueBrowserStrategy; private HeaderFilterStrategy headerFilterStrategy = new JmsHeaderFilterStrategy(); @@ -341,30 +335,6 @@ public void setDestinationResolver(DestinationResolver destinationResolver) { getConfiguration().setDestinationResolver(destinationResolver); } - public synchronized Requestor getRequestor() throws Exception { - if (requestor == null) { - requestor = new Requestor(getConfiguration(), getScheduledExecutorService()); - requestor.start(); - } - return requestor; - } - - public void setRequestor(Requestor requestor) { - this.requestor = requestor; - } - - public synchronized ScheduledExecutorService getScheduledExecutorService() { - if (scheduledExecutorService == null) { - scheduledExecutorService = getCamelContext().getExecutorServiceStrategy() - .newScheduledThreadPool(this, "JmsComponent", DEFAULT_THREADPOOL_SIZE); - } - return scheduledExecutorService; - } - - public void setScheduledExecutorService(ScheduledExecutorService scheduledExecutorService) { - this.scheduledExecutorService = scheduledExecutorService; - } - public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { this.applicationContext = applicationContext; } @@ -401,9 +371,6 @@ public void setHeaderFilterStrategy(HeaderFilterStrategy strategy) { @Override protected void doStop() throws Exception { - if (requestor != null) { - requestor.stop(); - } super.doStop(); } diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsConfiguration.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsConfiguration.java index 0e5a91b81a408..dca332cd99a8f 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsConfiguration.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsConfiguration.java @@ -124,12 +124,6 @@ public class JmsConfiguration implements Cloneable { private boolean useMessageIDAsCorrelationID; private JmsProviderMetadata providerMetadata = new JmsProviderMetadata(); private JmsOperations metadataJmsOperations; - // defines the component created temporary replyTo destination sharing strategy: - // possible values are: "component", "endpoint", "producer" - // component - a single temp queue is shared among all producers for a given component instance - // endpoint - a single temp queue is shared among all producers for a given endpoint instance - // producer - a single temp queue is created per producer - private String replyToTempDestinationAffinity = REPLYTO_TEMP_DEST_AFFINITY_PER_ENDPOINT; private String replyToDestination; private String replyToDestinationSelectorName; private JmsMessageType jmsMessageType; @@ -157,10 +151,6 @@ public JmsConfiguration copy() { } - public static interface MessageSentCallback { - void sent(Message message); - } - public static class CamelJmsTemplate extends JmsTemplate { private JmsConfiguration config; @@ -220,6 +210,9 @@ private Object doSendToDestination(final Destination destination, try { message = messageCreator.createMessage(session); doSend(producer, message); + if (message != null && callback != null) { + callback.sent(message, destination); + } // Check commit - avoid commit call within a JTA transaction. if (session.getTransacted() && isSessionLocallyTransacted(session)) { // Transacted session created by this template -> commit. @@ -228,9 +221,6 @@ private Object doSendToDestination(final Destination destination, } finally { JmsUtils.closeMessageProducer(producer); } - if (message != null && callback != null) { - callback.sent(message); - } return null; } @@ -349,6 +339,9 @@ private Object doSendToDestination(final Destination destination, logger.debug("Sending JMS message to: " + producer.getDestination() + " with message: " + message); } doSend(producer, message); + if (message != null && callback != null) { + callback.sent(message, destination); + } // Check commit - avoid commit call within a JTA transaction. if (session.getTransacted() && isSessionLocallyTransacted(session)) { // Transacted session created by this template -> commit. @@ -357,9 +350,6 @@ private Object doSendToDestination(final Destination destination, } finally { JmsUtils.closeMessageProducer(producer); } - if (message != null && callback != null) { - callback.sent(message); - } return null; } @@ -516,6 +506,7 @@ public AbstractMessageListenerContainer createMessageListenerContainer(JmsEndpoi // Properties // ------------------------------------------------------------------------- + public ConnectionFactory getConnectionFactory() { if (connectionFactory == null) { connectionFactory = createConnectionFactory(); @@ -863,10 +854,12 @@ public void setTransacted(boolean consumerTransacted) { * <p/> * By default this is false as you need to commit the outgoing request before you can consume the input */ + @Deprecated public boolean isTransactedInOut() { return transactedInOut; } + @Deprecated public void setTransactedInOut(boolean transactedInOut) { this.transactedInOut = transactedInOut; } @@ -1231,14 +1224,6 @@ public void setUseMessageIDAsCorrelationID(boolean useMessageIDAsCorrelationID) this.useMessageIDAsCorrelationID = useMessageIDAsCorrelationID; } - public String getReplyToTempDestinationAffinity() { - return replyToTempDestinationAffinity; - } - - public void setReplyToTempDestinationAffinity(String replyToTempDestinationAffinity) { - this.replyToTempDestinationAffinity = replyToTempDestinationAffinity; - } - public long getRequestTimeout() { return requestTimeout; } diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java index b10e8ffb52c95..60397a75c1d1b 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsEndpoint.java @@ -16,6 +16,10 @@ */ package org.apache.camel.component.jms; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ScheduledExecutorService; import javax.jms.ConnectionFactory; import javax.jms.Destination; @@ -32,12 +36,17 @@ import org.apache.camel.MultipleConsumersSupport; import org.apache.camel.PollingConsumer; import org.apache.camel.Processor; -import org.apache.camel.component.jms.requestor.Requestor; +import org.apache.camel.Service; +import org.apache.camel.component.jms.reply.PersistentQueueReplyManager; +import org.apache.camel.component.jms.reply.ReplyHolder; +import org.apache.camel.component.jms.reply.ReplyManager; +import org.apache.camel.component.jms.reply.TemporaryQueueReplyManager; import org.apache.camel.impl.DefaultEndpoint; import org.apache.camel.impl.DefaultExchange; import org.apache.camel.spi.HeaderFilterStrategy; import org.apache.camel.spi.HeaderFilterStrategyAware; import org.apache.camel.spi.ManagementAware; +import org.apache.camel.util.ServiceHelper; import org.springframework.core.task.TaskExecutor; import org.springframework.jms.core.JmsOperations; import org.springframework.jms.listener.AbstractMessageListenerContainer; @@ -53,7 +62,7 @@ * @version $Revision:520964 $ */ @ManagedResource(description = "Managed JMS Endpoint") -public class JmsEndpoint extends DefaultEndpoint implements HeaderFilterStrategyAware, ManagementAware<JmsEndpoint>, MultipleConsumersSupport { +public class JmsEndpoint extends DefaultEndpoint implements HeaderFilterStrategyAware, ManagementAware<JmsEndpoint>, MultipleConsumersSupport, Service { private HeaderFilterStrategy headerFilterStrategy; private boolean pubSubDomain; private JmsBinding binding; @@ -61,8 +70,10 @@ public class JmsEndpoint extends DefaultEndpoint implements HeaderFilterStrategy private Destination destination; private String selector; private JmsConfiguration configuration; - private Requestor requestor; - private ScheduledExecutorService requestorExecutorService; + private final Map<String, ReplyManager> replyToReplyManager = new HashMap<String, ReplyManager>(); + private ReplyManager replyManager; + // scheduled executor to check for timeout (reply not received) + private ScheduledExecutorService replyManagerExecutorService; public JmsEndpoint() { this(null, null); @@ -284,16 +295,29 @@ public boolean isSingleton() { return true; } - public synchronized Requestor getRequestor() throws Exception { - if (requestor == null) { - requestor = new Requestor(getConfiguration(), getRequestorExecutorService()); - requestor.start(); + public synchronized ReplyManager getReplyManager() throws Exception { + if (replyManager == null) { + // use a temporary queue + replyManager = new TemporaryQueueReplyManager(); + replyManager.setEndpoint(this); + replyManager.setScheduledExecutorService(getReplyManagerExecutorService()); + ServiceHelper.startService(replyManager); } - return requestor; - } - - public void setRequestor(Requestor requestor) { - this.requestor = requestor; + return replyManager; + } + + public synchronized ReplyManager getReplyManager(String replyTo) throws Exception { + ReplyManager answer = replyToReplyManager.get(replyTo); + if (answer == null) { + // use a persistent queue + answer = new PersistentQueueReplyManager(); + answer.setEndpoint(this); + answer.setScheduledExecutorService(getReplyManagerExecutorService()); + ServiceHelper.startService(answer); + // remember this manager so we can re-use it + replyToReplyManager.put(replyTo, answer); + } + return answer; } public boolean isPubSubDomain() { @@ -343,11 +367,28 @@ protected JmsOperations getMetadataJmsOperations() { return template; } - protected synchronized ScheduledExecutorService getRequestorExecutorService() { - if (requestorExecutorService == null) { - requestorExecutorService = getCamelContext().getExecutorServiceStrategy().newScheduledThreadPool(this, "JmsRequesterTimeoutTask", 1); + protected synchronized ScheduledExecutorService getReplyManagerExecutorService() { + if (replyManagerExecutorService == null) { + replyManagerExecutorService = getCamelContext().getExecutorServiceStrategy().newScheduledThreadPool(this, "JmsReplyManagerTimeoutChecker", 1); + } + return replyManagerExecutorService; + } + + public void start() throws Exception { + } + + public void stop() throws Exception { + if (replyManager != null) { + ServiceHelper.stopService(replyManager); + replyManager = null; + } + + if (!replyToReplyManager.isEmpty()) { + for (ReplyManager replyManager : replyToReplyManager.values()) { + ServiceHelper.stopService(replyManager); + } + replyToReplyManager.clear(); } - return requestorExecutorService; } // Delegated properties from the configuration @@ -459,10 +500,6 @@ public String getReplyToDestinationSelectorName() { return getConfiguration().getReplyToDestinationSelectorName(); } - public String getReplyToTempDestinationAffinity() { - return getConfiguration().getReplyToTempDestinationAffinity(); - } - public long getRequestMapPurgePollTimeMillis() { return getConfiguration().getRequestMapPurgePollTimeMillis(); } @@ -768,10 +805,6 @@ public void setReplyToDestinationSelectorName(String replyToDestinationSelectorN getConfiguration().setReplyToDestinationSelectorName(replyToDestinationSelectorName); } - public void setReplyToTempDestinationAffinity(String replyToTempDestinationAffinity) { - getConfiguration().setReplyToTempDestinationAffinity(replyToTempDestinationAffinity); - } - public void setRequestMapPurgePollTimeMillis(long requestMapPurgePollTimeMillis) { getConfiguration().setRequestMapPurgePollTimeMillis(requestMapPurgePollTimeMillis); } @@ -911,4 +944,6 @@ protected String createEndpointUri() { return super.createEndpointUri(); } + + } diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsMessageHelper.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsMessageHelper.java index 438c62d561c7e..e39124f77574d 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsMessageHelper.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsMessageHelper.java @@ -125,4 +125,19 @@ public static void setProperty(Message jmsMessage, String name, Object value) th } } + /** + * Sets the correlation id on the JMS message. + * <p/> + * Will ignore exception thrown + * + * @param message the JMS message + * @param correlationId the correlation id + */ + public static void setCorrelationId(Message message, String correlationId) { + try { + message.setJMSCorrelationID(correlationId); + } catch (JMSException e) { + // ignore + } + } } diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsProducer.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsProducer.java index d293949a9cc24..bf06deb6dfff4 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsProducer.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsProducer.java @@ -16,11 +16,7 @@ */ package org.apache.camel.component.jms; -import java.util.concurrent.FutureTask; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; - import javax.jms.Destination; import javax.jms.JMSException; import javax.jms.Message; @@ -28,21 +24,19 @@ import org.apache.camel.AsyncCallback; import org.apache.camel.Exchange; -import org.apache.camel.ExchangeTimedOutException; import org.apache.camel.FailedToCreateProducerException; -import org.apache.camel.RuntimeCamelException; import org.apache.camel.RuntimeExchangeException; import org.apache.camel.component.jms.JmsConfiguration.CamelJmsTemplate; import org.apache.camel.component.jms.JmsConfiguration.CamelJmsTemplate102; -import org.apache.camel.component.jms.requestor.DeferredRequestReplyMap; -import org.apache.camel.component.jms.requestor.DeferredRequestReplyMap.DeferredMessageSentCallback; -import org.apache.camel.component.jms.requestor.PersistentReplyToRequestor; -import org.apache.camel.component.jms.requestor.Requestor; +import org.apache.camel.component.jms.reply.ReplyManager; +import org.apache.camel.component.jms.reply.UseMessageIdAsCorrelationIdMessageSentCallback; import org.apache.camel.impl.DefaultAsyncProducer; +import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.UuidGenerator; import org.apache.camel.util.ValueHolder; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; + import org.springframework.jms.core.JmsOperations; import org.springframework.jms.core.MessageCreator; @@ -51,97 +45,46 @@ */ public class JmsProducer extends DefaultAsyncProducer { private static final transient Log LOG = LogFactory.getLog(JmsProducer.class); - private RequestorAffinity affinity; private final JmsEndpoint endpoint; + private final AtomicBoolean started = new AtomicBoolean(false); private JmsOperations inOnlyTemplate; private JmsOperations inOutTemplate; private UuidGenerator uuidGenerator; - private DeferredRequestReplyMap deferredRequestReplyMap; - private Requestor requestor; - private AtomicBoolean started = new AtomicBoolean(false); - - private enum RequestorAffinity { - PER_COMPONENT(0), - PER_ENDPOINT(1), - PER_PRODUCER(2); - private int value; - private RequestorAffinity(int value) { - this.value = value; - } - } + private ReplyManager replyManager; public JmsProducer(JmsEndpoint endpoint) { super(endpoint); this.endpoint = endpoint; - JmsConfiguration c = endpoint.getConfiguration(); - affinity = RequestorAffinity.PER_PRODUCER; - if (c.getReplyTo() != null) { - if (c.getReplyToTempDestinationAffinity().equals(JmsConfiguration.REPLYTO_TEMP_DEST_AFFINITY_PER_ENDPOINT)) { - affinity = RequestorAffinity.PER_ENDPOINT; - } else if (c.getReplyToTempDestinationAffinity().equals(JmsConfiguration.REPLYTO_TEMP_DEST_AFFINITY_PER_COMPONENT)) { - affinity = RequestorAffinity.PER_COMPONENT; - } - } - } - - public long getRequestTimeout() { - return endpoint.getConfiguration().getRequestTimeout(); } - protected void doStart() throws Exception { - super.doStart(); - } - - protected void testAndSetRequestor() throws RuntimeCamelException { + protected void initReplyManager() { if (!started.get()) { synchronized (this) { if (started.get()) { return; } try { - JmsConfiguration c = endpoint.getConfiguration(); - if (c.getReplyTo() != null) { - requestor = new PersistentReplyToRequestor(endpoint.getConfiguration(), endpoint.getRequestorExecutorService()); - requestor.start(); + if (endpoint.getReplyTo() != null) { + replyManager = endpoint.getReplyManager(endpoint.getReplyTo()); + if (LOG.isInfoEnabled()) { + LOG.info("Using JmsReplyManager: " + replyManager + " to process replies from: " + endpoint.getReplyTo() + + " queue with " + endpoint.getConcurrentConsumers() + " concurrent consumers."); + } } else { - if (affinity == RequestorAffinity.PER_PRODUCER) { - requestor = new Requestor(endpoint.getConfiguration(), endpoint.getRequestorExecutorService()); - requestor.start(); - } else if (affinity == RequestorAffinity.PER_ENDPOINT) { - requestor = endpoint.getRequestor(); - } else if (affinity == RequestorAffinity.PER_COMPONENT) { - requestor = ((JmsComponent)endpoint.getComponent()).getRequestor(); + replyManager = endpoint.getReplyManager(); + if (LOG.isInfoEnabled()) { + LOG.info("Using JmsReplyManager: " + replyManager + " to process replies from temporary queue with " + + endpoint.getConcurrentConsumers() + " concurrent consumers."); } } } catch (Exception e) { throw new FailedToCreateProducerException(endpoint, e); } - deferredRequestReplyMap = requestor.getDeferredRequestReplyMap(this); started.set(true); } } } - protected void testAndUnsetRequestor() throws Exception { - if (started.get()) { - synchronized (this) { - if (!started.get()) { - return; - } - requestor.removeDeferredRequestReplyMap(this); - if (affinity == RequestorAffinity.PER_PRODUCER) { - requestor.stop(); - } - started.set(false); - } - } - } - - protected void doStop() throws Exception { - testAndUnsetRequestor(); - super.doStop(); - } - public boolean process(Exchange exchange, AsyncCallback callback) { if (!endpoint.isDisableReplyTo() && exchange.getPattern().isOutCapable()) { // in out requires a bit more work than in only @@ -173,111 +116,76 @@ protected boolean processInOut(final Exchange exchange, final AsyncCallback call destinationName = null; } - testAndSetRequestor(); + initReplyManager(); // note due to JMS transaction semantics we cannot use a single transaction // for sending the request and receiving the response - final Destination replyTo = requestor.getReplyTo(); - + final Destination replyTo = replyManager.getReplyTo(); if (replyTo == null) { throw new RuntimeExchangeException("Failed to resolve replyTo destination", exchange); } + // when using message id as correlation id, we need at first to use a provisional correlation id + // which we then update to the real JMSMessageID when the message has been sent + // this is done with the help of the MessageSentCallback final boolean msgIdAsCorrId = endpoint.getConfiguration().isUseMessageIDAsCorrelationID(); - String correlationId = in.getHeader("JMSCorrelationID", String.class); + final String provisionalCorrelationId = msgIdAsCorrId ? getUuidGenerator().generateUuid() : null; + MessageSentCallback messageSentCallback = null; + if (msgIdAsCorrId) { + messageSentCallback = new UseMessageIdAsCorrelationIdMessageSentCallback(replyManager, provisionalCorrelationId, endpoint.getRequestTimeout()); + } + final ValueHolder<MessageSentCallback> sentCallback = new ValueHolder<MessageSentCallback>(messageSentCallback); - if (correlationId == null && !msgIdAsCorrId) { + final String originalCorrelationId = in.getHeader("JMSCorrelationID", String.class); + if (originalCorrelationId == null && !msgIdAsCorrId) { in.setHeader("JMSCorrelationID", getUuidGenerator().generateUuid()); } - final ValueHolder<FutureTask> futureHolder = new ValueHolder<FutureTask>(); - final DeferredMessageSentCallback jmsCallback = msgIdAsCorrId ? deferredRequestReplyMap.createDeferredMessageSentCallback() : null; - MessageCreator messageCreator = new MessageCreator() { public Message createMessage(Session session) throws JMSException { Message message = endpoint.getBinding().makeJmsMessage(exchange, in, session, null); message.setJMSReplyTo(replyTo); - requestor.setReplyToSelectorHeader(in, message); + replyManager.setReplyToSelectorHeader(in, message); - FutureTask future; - future = (!msgIdAsCorrId) - ? requestor.getReceiveFuture(message.getJMSCorrelationID(), endpoint.getConfiguration().getRequestTimeout()) - : requestor.getReceiveFuture(jmsCallback); + String correlationId = determineCorrelationId(message, provisionalCorrelationId); + replyManager.registerReply(replyManager, exchange, callback, originalCorrelationId, correlationId, endpoint.getRequestTimeout()); - futureHolder.set(future); return message; } }; - doSend(true, destinationName, destination, messageCreator, jmsCallback); + doSend(true, destinationName, destination, messageCreator, sentCallback.get()); // after sending then set the OUT message id to the JMSMessageID so its identical setMessageId(exchange); - // now we should routing asynchronously to not block while waiting for the reply - // TODO: - // we need a thread pool to use for continue routing messages, just like a seda consumer - // and we need options to configure it as well so you can indicate how many threads to use - // TODO: Also consider requestTimeout - - // lets wait and return the response - long requestTimeout = endpoint.getConfiguration().getRequestTimeout(); - try { - Message message = null; - try { - if (LOG.isDebugEnabled()) { - LOG.debug("Message sent, now waiting for reply at: " + replyTo.toString()); - } - if (requestTimeout <= 0) { - message = (Message)futureHolder.get().get(); - } else { - message = (Message)futureHolder.get().get(requestTimeout, TimeUnit.MILLISECONDS); - } - } catch (InterruptedException e) { - if (LOG.isDebugEnabled()) { - LOG.debug("Future interrupted: " + e, e); - } - } catch (TimeoutException e) { - if (LOG.isDebugEnabled()) { - LOG.debug("Future timed out: " + e, e); - } - } - if (message != null) { - // the response can be an exception - JmsMessage response = new JmsMessage(message, endpoint.getBinding()); - Object body = response.getBody(); - - if (endpoint.isTransferException() && body instanceof Exception) { - if (LOG.isDebugEnabled()) { - LOG.debug("Reply received. Setting reply as an Exception: " + body); - } - // we got an exception back and endpoint was configured to transfer exception - // therefore set response as exception - exchange.setException((Exception) body); - } else { - if (LOG.isDebugEnabled()) { - LOG.debug("Reply received. Setting reply as OUT message: " + body); - } - // regular response - exchange.setOut(response); - } + // continue routing asynchronously (reply will be processed async when its received) + return false; + } - // restore correlation id in case the remote server messed with it - if (correlationId != null) { - message.setJMSCorrelationID(correlationId); - exchange.getOut().setHeader("JMSCorrelationID", correlationId); - } - } else { - // no response, so lets set a timed out exception - exchange.setException(new ExchangeTimedOutException(exchange, requestTimeout)); - } - } catch (Exception e) { - exchange.setException(e); + /** + * Strategy to determine which correlation id to use among <tt>JMSMessageID</tt> and <tt>JMSCorrelationID</tt>. + * + * @param message the JMS message + * @param provisionalCorrelationId an optional provisional correlation id, which is preferred to be used + * @return the correlation id to use + * @throws JMSException can be thrown + */ + protected String determineCorrelationId(Message message, String provisionalCorrelationId) throws JMSException { + if (provisionalCorrelationId != null) { + return provisionalCorrelationId; } - // TODO: should be async - callback.done(true); - return true; + final String messageId = message.getJMSMessageID(); + final String correlationId = message.getJMSCorrelationID(); + if (endpoint.getConfiguration().isUseMessageIDAsCorrelationID()) { + return messageId; + } else if (ObjectHelper.isEmpty(correlationId)) { + // correlation id is empty so fallback to message id + return messageId; + } else { + return correlationId; + } } protected boolean processInOnly(final Exchange exchange, final AsyncCallback callback) { @@ -340,14 +248,14 @@ public Message createMessage(Session session) throws JMSException { /** * Sends the message using the JmsTemplate. * - * @param inOut use inOut or inOnly template + * @param inOut use inOut or inOnly template * @param destinationName the destination name * @param destination the destination (if no name provided) - * @param messageCreator the creator to create the javax.jms.Message to send + * @param messageCreator the creator to create the {@link Message} to send * @param callback optional callback for inOut messages */ protected void doSend(boolean inOut, String destinationName, Destination destination, - MessageCreator messageCreator, DeferredMessageSentCallback callback) { + MessageCreator messageCreator, MessageSentCallback callback) { CamelJmsTemplate template = null; CamelJmsTemplate102 template102 = null; @@ -405,7 +313,7 @@ protected void setMessageId(Exchange exchange) { } } catch (JMSException e) { LOG.warn("Unable to retrieve JMSMessageID from outgoing " - + "JMS Message and set it into Camel's MessageId", e); + + "JMS Message and set it into Camel's MessageId", e); } } } @@ -433,9 +341,6 @@ public void setInOutTemplate(JmsOperations inOutTemplate) { } public UuidGenerator getUuidGenerator() { - if (uuidGenerator == null) { - uuidGenerator = UuidGenerator.get(); - } return uuidGenerator; } @@ -443,4 +348,16 @@ public void setUuidGenerator(UuidGenerator uuidGenerator) { this.uuidGenerator = uuidGenerator; } + protected void doStart() throws Exception { + super.doStart(); + if (uuidGenerator == null) { + // use the default generator + uuidGenerator = UuidGenerator.get(); + } + } + + protected void doStop() throws Exception { + super.doStop(); + } + } diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/ReplyHandler.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/MessageSentCallback.java similarity index 71% rename from components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/ReplyHandler.java rename to components/camel-jms/src/main/java/org/apache/camel/component/jms/MessageSentCallback.java index c40579f3c321d..c2739a2cd16c7 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/ReplyHandler.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/MessageSentCallback.java @@ -14,18 +14,23 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.camel.component.jms.requestor; +package org.apache.camel.component.jms; -import javax.jms.JMSException; +import javax.jms.Destination; import javax.jms.Message; /** + * Callback when a {@link Message} has been sent. + * * @version $Revision$ */ -public interface ReplyHandler { +public interface MessageSentCallback { + /** - * Processes the message, returning true if this is the last method of a lifecycle - * so that the handler can be discarded + * Callback when the message has been sent. + * + * @param message the message + * @param destination the destination */ - boolean handle(Message message) throws JMSException; + void sent(Message message, Destination destination); } diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/CorrelationMap.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/CorrelationMap.java new file mode 100644 index 0000000000000..2fa218ae849f7 --- /dev/null +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/CorrelationMap.java @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms.reply; + +import java.util.concurrent.ScheduledExecutorService; + +import org.apache.camel.util.DefaultTimeoutMap; + +/** + * @version $Revision$ + */ +public class CorrelationMap extends DefaultTimeoutMap<String, ReplyHandler> { + + public CorrelationMap(ScheduledExecutorService executor, long requestMapPollTimeMillis) { + super(executor, requestMapPollTimeMillis); + } + + public boolean onEviction(String key, ReplyHandler value) { + // trigger timeout + value.onTimeout(key); + // return true to remove the element + return true; + } + +} diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/MessageSelectorProvider.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/MessageSelectorCreator.java similarity index 64% rename from components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/MessageSelectorProvider.java rename to components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/MessageSelectorCreator.java index 873a6524bffa9..cf5f5ed23705b 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/MessageSelectorProvider.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/MessageSelectorCreator.java @@ -14,17 +14,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.camel.component.jms.requestor; +package org.apache.camel.component.jms.reply; import java.util.HashMap; import java.util.Map; -public class MessageSelectorProvider { +/** + * A creator which can build the JMS message selector query string to use + * with a shared persistent reply-to queue, so we can select the correct messages we expect as replies. + */ +public class MessageSelectorCreator { protected Map<String, String> correlationIds; protected boolean dirty = true; protected StringBuilder expression; - public MessageSelectorProvider() { + public MessageSelectorCreator() { correlationIds = new HashMap<String, String>(); } @@ -42,18 +46,27 @@ public synchronized String get() { if (!dirty) { return expression.toString(); } + expression = new StringBuilder("JMSCorrelationID='"); - boolean first = true; - for (Map.Entry<String, String> entry : correlationIds.entrySet()) { - if (!first) { - expression.append(" OR JMSCorrelationID='"); - } - expression.append(entry.getValue()).append("'"); - if (first) { - first = false; + + if (correlationIds.isEmpty()) { + // no id's so use a dummy to select nothing + expression.append("CamelDummyJmsMessageSelector'"); + } else { + boolean first = true; + for (Map.Entry<String, String> entry : correlationIds.entrySet()) { + if (!first) { + expression.append(" OR JMSCorrelationID='"); + } + expression.append(entry.getValue()).append("'"); + if (first) { + first = false; + } } } + dirty = false; return expression.toString(); } -} + +} \ No newline at end of file diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/PersistentQueueReplyHandler.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/PersistentQueueReplyHandler.java new file mode 100644 index 0000000000000..e29fe00a41e4d --- /dev/null +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/PersistentQueueReplyHandler.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms.reply; + +import javax.jms.Message; + +import org.apache.camel.AsyncCallback; +import org.apache.camel.Exchange; + +/** + * {@link ReplyHandler} to handle processing replies when using persistent queues. + * + * @version $Revision$ + */ +public class PersistentQueueReplyHandler extends TemporaryQueueReplyHandler { + + private MessageSelectorCreator dynamicMessageSelector; + + public PersistentQueueReplyHandler(ReplyManager replyManager, Exchange exchange, AsyncCallback callback, + String originalCorrelationId, long timeout, MessageSelectorCreator dynamicMessageSelector) { + super(replyManager, exchange, callback, originalCorrelationId, timeout); + this.dynamicMessageSelector = dynamicMessageSelector; + } + + @Override + public void onReply(String correlationId, Message reply) { + if (dynamicMessageSelector != null) { + // remove correlation id from message selector + dynamicMessageSelector.removeCorrelationID(correlationId); + } + super.onReply(correlationId, reply); + } + + @Override + public void onTimeout(String correlationId) { + if (dynamicMessageSelector != null) { + // remove correlation id from message selector + dynamicMessageSelector.removeCorrelationID(correlationId); + } + super.onTimeout(correlationId); + } +} diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/PersistentQueueReplyManager.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/PersistentQueueReplyManager.java new file mode 100644 index 0000000000000..7f6da3f0e9492 --- /dev/null +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/PersistentQueueReplyManager.java @@ -0,0 +1,239 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms.reply; + +import java.math.BigInteger; +import java.util.Random; +import javax.jms.Destination; +import javax.jms.ExceptionListener; +import javax.jms.JMSException; +import javax.jms.Message; +import javax.jms.Session; + +import org.apache.camel.AsyncCallback; +import org.apache.camel.Exchange; +import org.springframework.core.task.TaskExecutor; +import org.springframework.jms.listener.AbstractMessageListenerContainer; +import org.springframework.jms.listener.DefaultMessageListenerContainer; +import org.springframework.jms.support.destination.DestinationResolver; +import org.springframework.transaction.PlatformTransactionManager; + +/** + * A {@link ReplyManager} when using persistent queues. + * + * @version $Revision$ + */ +public class PersistentQueueReplyManager extends ReplyManagerSupport { + + private String replyToSelectorValue; + private MessageSelectorCreator dynamicMessageSelector; + + public String registerReply(ReplyManager replyManager, Exchange exchange, AsyncCallback callback, + String originalCorrelationId, String correlationId, long requestTimeout) { + // add to correlation map + PersistentQueueReplyHandler handler = new PersistentQueueReplyHandler(replyManager, exchange, callback, + originalCorrelationId, requestTimeout, dynamicMessageSelector); + correlation.put(correlationId, handler, requestTimeout); + if (dynamicMessageSelector != null) { + // also remember to keep the dynamic selector updated with the new correlation id + dynamicMessageSelector.addCorrelationID(correlationId); + } + return correlationId; + } + + public void updateCorrelationId(String correlationId, String newCorrelationId, long requestTimeout) { + if (log.isTraceEnabled()) { + log.trace("Updated provisional correlationId [" + correlationId + "] to expected correlationId [" + newCorrelationId + "]"); + } + + ReplyHandler handler = correlation.remove(correlationId); + if (handler == null) { + // should not happen that we can't find the handler + return; + } + + correlation.put(newCorrelationId, handler, requestTimeout); + + // no not arrived early + if (dynamicMessageSelector != null) { + // also remember to keep the dynamic selector updated with the new correlation id + dynamicMessageSelector.addCorrelationID(newCorrelationId); + } + } + + protected void handleReplyMessage(String correlationID, Message message) { + ReplyHandler handler = correlation.get(correlationID); + if (handler == null && endpoint.isUseMessageIDAsCorrelationID()) { + handler = waitForProvisionCorrelationToBeUpdated(correlationID, message); + } + + if (handler != null) { + try { + handler.onReply(correlationID, message); + } finally { + if (dynamicMessageSelector != null) { + // also remember to keep the dynamic selector updated with the new correlation id + dynamicMessageSelector.removeCorrelationID(correlationID); + } + correlation.remove(correlationID); + } + } else { + // we could not correlate the received reply message to a matching request and therefore + // we cannot continue routing the unknown message + String text = "Reply received for unknown correlationID [" + correlationID + "] -> " + message; + log.warn(text); + throw new UnknownReplyMessageException(text, message, correlationID); + } + } + + public void setReplyToSelectorHeader(org.apache.camel.Message camelMessage, Message jmsMessage) throws JMSException { + String replyToSelectorName = endpoint.getReplyToDestinationSelectorName(); + if (replyToSelectorName != null && replyToSelectorValue != null) { + camelMessage.setHeader(replyToSelectorName, replyToSelectorValue); + jmsMessage.setStringProperty(replyToSelectorName, replyToSelectorValue); + } + } + + private final class DestinationResolverDelegate implements DestinationResolver { + private DestinationResolver delegate; + private Destination destination; + + public DestinationResolverDelegate(DestinationResolver delegate) { + this.delegate = delegate; + } + + public Destination resolveDestinationName(Session session, String destinationName, + boolean pubSubDomain) throws JMSException { + synchronized (PersistentQueueReplyManager.this) { + try { + // resolve the reply to destination + if (destination == null) { + destination = delegate.resolveDestinationName(session, destinationName, pubSubDomain); + setReplyTo(destination); + } + } finally { + PersistentQueueReplyManager.this.notifyAll(); + } + } + return destination; + } + }; + + private final class PersistentQueueMessageListenerContainer extends DefaultMessageListenerContainer { + + private String fixedMessageSelector; + private MessageSelectorCreator creator; + + private PersistentQueueMessageListenerContainer(String fixedMessageSelector) { + this.fixedMessageSelector = fixedMessageSelector; + } + + private PersistentQueueMessageListenerContainer(MessageSelectorCreator creator) { + this.creator = creator; + } + + @Override + public String getMessageSelector() { + String id = null; + if (fixedMessageSelector != null) { + id = fixedMessageSelector; + } else if (creator != null) { + id = creator.get(); + } + if (log.isTraceEnabled()) { + log.trace("Using MessageSelector[" + id + "]"); + } + return id; + } + } + + protected AbstractMessageListenerContainer createListenerContainer() throws Exception { + DefaultMessageListenerContainer answer; + + String replyToSelectorName = endpoint.getReplyToDestinationSelectorName(); + if (replyToSelectorName != null) { + // 24 max char is what IBM WebSphereMQ supports in CorrelationIDs + // use a fixed selector name so we can select the replies which is intended for us + replyToSelectorValue = "ID:" + new BigInteger(24 * 8, new Random()).toString(16); + String fixedMessageSelector = replyToSelectorName + "='" + replyToSelectorValue + "'"; + answer = new PersistentQueueMessageListenerContainer(fixedMessageSelector); + } else { + // use a dynamic message selector which will select the message we want to receive as reply + dynamicMessageSelector = new MessageSelectorCreator(); + answer = new PersistentQueueMessageListenerContainer(dynamicMessageSelector); + } + + answer.setConnectionFactory(endpoint.getListenerConnectionFactory()); + DestinationResolver resolver = endpoint.getDestinationResolver(); + if (resolver == null) { + resolver = answer.getDestinationResolver(); + } + answer.setDestinationResolver(new DestinationResolverDelegate(resolver)); + answer.setDestinationName(endpoint.getReplyTo()); + + answer.setAutoStartup(true); + answer.setMessageListener(this); + answer.setPubSubDomain(false); + answer.setSubscriptionDurable(false); + answer.setConcurrentConsumers(endpoint.getConcurrentConsumers()); + + ExceptionListener exceptionListener = endpoint.getExceptionListener(); + if (exceptionListener != null) { + answer.setExceptionListener(exceptionListener); + } + + answer.setSessionTransacted(endpoint.isTransacted()); + if (endpoint.isTransacted()) { + answer.setSessionAcknowledgeMode(Session.SESSION_TRANSACTED); + } else { + if (endpoint.getAcknowledgementMode() >= 0) { + answer.setSessionAcknowledgeMode(endpoint.getAcknowledgementMode()); + } else if (endpoint.getAcknowledgementModeName() != null) { + answer.setSessionAcknowledgeModeName(endpoint.getAcknowledgementModeName()); + } + } + + answer.setConcurrentConsumers(1); + answer.setCacheLevel(DefaultMessageListenerContainer.CACHE_SESSION); + + if (endpoint.getReceiveTimeout() >= 0) { + answer.setReceiveTimeout(endpoint.getReceiveTimeout()); + } + if (endpoint.getRecoveryInterval() >= 0) { + answer.setRecoveryInterval(endpoint.getRecoveryInterval()); + } + TaskExecutor taskExecutor = endpoint.getTaskExecutor(); + if (taskExecutor != null) { + answer.setTaskExecutor(taskExecutor); + } + PlatformTransactionManager tm = endpoint.getTransactionManager(); + if (tm != null) { + answer.setTransactionManager(tm); + } else if (endpoint.isTransacted()) { + throw new IllegalArgumentException("Property transacted is enabled but a transactionManager was not injected!"); + } + if (endpoint.getTransactionName() != null) { + answer.setTransactionName(endpoint.getTransactionName()); + } + if (endpoint.getTransactionTimeout() >= 0) { + answer.setTransactionTimeout(endpoint.getTransactionTimeout()); + } + + return answer; + } + +} diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/FailedToProcessResponse.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/ReplyHandler.java similarity index 58% rename from components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/FailedToProcessResponse.java rename to components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/ReplyHandler.java index e14ee3f29978e..424dec01088e0 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/FailedToProcessResponse.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/ReplyHandler.java @@ -14,30 +14,29 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.camel.component.jms.requestor; +package org.apache.camel.component.jms.reply; -import javax.jms.JMSException; import javax.jms.Message; -import org.apache.camel.RuntimeCamelException; - /** - * An exception thrown if a response message from an InOut could not be processed + * Handles a reply. * * @version $Revision$ */ -public class FailedToProcessResponse extends RuntimeCamelException { - private final Message response; +public interface ReplyHandler { - public FailedToProcessResponse(Message response, JMSException e) { - super("Failed to process response: " + e + ". Message: " + response, e); - this.response = response; - } + /** + * The reply message was received + * + * @param correlationId the correlation id + * @param reply the reply message + */ + void onReply(String correlationId, Message reply); /** - * The response message which caused the exception + * The reply message was not received and a timeout triggered + * + * @param correlationId the correlation id */ - public Message getResponse() { - return response; - } + void onTimeout(String correlationId); } diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/ReplyHolder.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/ReplyHolder.java new file mode 100644 index 0000000000000..4386ff6f69092 --- /dev/null +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/ReplyHolder.java @@ -0,0 +1,100 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms.reply; + +import javax.jms.Message; + +import org.apache.camel.AsyncCallback; +import org.apache.camel.Exchange; + +/** + * Holder which contains the {@link Exchange} and {@link org.apache.camel.AsyncCallback} to be used + * when the reply arrives, so we can set the reply on the {@link Exchange} and continue routing using the callback. + * + * @version $Revision$ + */ +public class ReplyHolder { + + private final Exchange exchange; + private final AsyncCallback callback; + private final Message message; + private final String originalCorrelationId; + private long timeout; + + /** + * Constructor to use when a reply message was received + */ + public ReplyHolder(Exchange exchange, AsyncCallback callback, String originalCorrelationId, Message message) { + this.exchange = exchange; + this.callback = callback; + this.originalCorrelationId = originalCorrelationId; + this.message = message; + } + + /** + * Constructor to use when a timeout occurred + */ + public ReplyHolder(Exchange exchange, AsyncCallback callback, String originalCorrelationId, long timeout) { + this(exchange, callback, originalCorrelationId, null); + this.timeout = timeout; + } + + public Exchange getExchange() { + return exchange; + } + + public AsyncCallback getCallback() { + return callback; + } + + /** + * Gets the original correlation id, if one was set when sending the message. + * <p/> + * Some JMS brokers will mess with the correlation id and send back a different/empty correlation id. + * So we need to remember it so we can restore the correlation id. + */ + public String getOriginalCorrelationId() { + return originalCorrelationId; + } + + /** + * Gets the received message + * + * @return the received message, or <tt>null</tt> if timeout occurred and no message has been received + * @see #isTimeout() + */ + public Message getMessage() { + return message; + } + + /** + * Whether timeout triggered or not. + * <p/> + * A timeout is triggered if <tt>requestTimeout</tt> option has been configured, and a reply message has <b>not</b> been + * received within that time frame. + */ + public boolean isTimeout() { + return message == null; + } + + /** + * The timeout value + */ + public long getRequestTimeout() { + return timeout; + } +} diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/ReplyManager.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/ReplyManager.java new file mode 100644 index 0000000000000..9156ac5d47359 --- /dev/null +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/ReplyManager.java @@ -0,0 +1,98 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms.reply; + +import java.util.concurrent.ScheduledExecutorService; +import javax.jms.Destination; +import javax.jms.JMSException; +import javax.jms.Message; +import javax.jms.MessageListener; + +import org.apache.camel.AsyncCallback; +import org.apache.camel.Exchange; +import org.apache.camel.component.jms.JmsEndpoint; + +/** + * The {@link ReplyManager} is responsible for handling <a href="http://camel.apache.org/request-reply.html">request-reply</a> + * over JMS. + * + * @version $Revision$ + */ +public interface ReplyManager extends MessageListener { + + /** + * Sets the belonging {@link org.apache.camel.component.jms.JmsEndpoint}. + */ + void setEndpoint(JmsEndpoint endpoint); + + /** + * Sets the reply to queue the manager should listen for replies. + * <p/> + * The queue is either a temporary or a persistent queue. + */ + void setReplyTo(Destination replyTo); + + /** + * Sets the scheduled to use when checking for timeouts (no reply received within a given time period) + */ + void setScheduledExecutorService(ScheduledExecutorService executorService); + + /** + * Gets the reply to queue being used + */ + Destination getReplyTo(); + + /** + * To be used when a persistent reply queue is used with a custom JMS selector is being used. + */ + void setReplyToSelectorHeader(org.apache.camel.Message camelMessage, Message jmsMessage) throws JMSException; + + /** + * Register a reply + * + * @param replyManager the reply manager being used + * @param exchange the exchange + * @param callback the callback + * @param originalCorrelationId an optional original correlation id + * @param correlationId the correlation id to expect being used + * @param requestTimeout an optional timeout + * @return the correlation id used + */ + String registerReply(ReplyManager replyManager, Exchange exchange, AsyncCallback callback, + String originalCorrelationId, String correlationId, long requestTimeout); + + /** + * Updates the correlation id to the new correlation id. + * <p/> + * This is only used when <tt>useMessageIDasCorrelationID</tt> option is used, which means a + * provisional correlation id is first used, then after the message has been sent, the real + * correlation id is known. This allows us then to update the internal mapping to expect the + * real correlation id. + * + * @param correlationId the provisional correlation id + * @param newCorrelationId the real correlation id + * @param requestTimeout an optional timeout + */ + void updateCorrelationId(String correlationId, String newCorrelationId, long requestTimeout); + + /** + * Process the reply + * + * @param holder containing needed data to process the reply and continue routing + */ + void processReply(ReplyHolder holder); +} diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/ReplyManagerSupport.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/ReplyManagerSupport.java new file mode 100644 index 0000000000000..7f542aba6e460 --- /dev/null +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/ReplyManagerSupport.java @@ -0,0 +1,217 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms.reply; + +import java.util.concurrent.ScheduledExecutorService; +import javax.jms.Destination; +import javax.jms.JMSException; +import javax.jms.Message; + +import org.apache.camel.AsyncCallback; +import org.apache.camel.Exchange; +import org.apache.camel.ExchangeTimedOutException; +import org.apache.camel.component.jms.JmsEndpoint; +import org.apache.camel.component.jms.JmsMessage; +import org.apache.camel.component.jms.JmsMessageHelper; +import org.apache.camel.impl.ServiceSupport; +import org.apache.camel.util.ObjectHelper; +import org.apache.camel.util.ServiceHelper; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.jms.listener.AbstractMessageListenerContainer; + +/** + * Base class for {@link ReplyManager} implementations. + * + * @version $Revision$ + */ +public abstract class ReplyManagerSupport extends ServiceSupport implements ReplyManager { + + protected final Log log = LogFactory.getLog(getClass()); + protected ScheduledExecutorService executorService; + protected JmsEndpoint endpoint; + protected Destination replyTo; + protected AbstractMessageListenerContainer listenerContainer; + protected long replyToResolverTimeout = 5000; + protected CorrelationMap correlation; + + public void setScheduledExecutorService(ScheduledExecutorService executorService) { + this.executorService = executorService; + } + + public void setEndpoint(JmsEndpoint endpoint) { + this.endpoint = endpoint; + } + + public void setReplyTo(Destination replyTo) { + if (log.isTraceEnabled()) { + log.trace("ReplyTo destination: " + replyTo); + } + this.replyTo = replyTo; + } + + public Destination getReplyTo() { + synchronized (this) { + try { + // wait for the reply to destination to be resolved + if (replyTo == null) { + wait(replyToResolverTimeout); + } + } catch (Throwable e) { + // ignore + } + } + return replyTo; + } + + public void onMessage(Message message) { + String correlationID = null; + try { + correlationID = message.getJMSCorrelationID(); + } catch (JMSException e) { + // ignore + } + if (correlationID == null) { + log.warn("Ignoring message with no correlationID: " + message); + return; + } + + if (log.isDebugEnabled()) { + log.debug("Received reply message with correlationID: " + correlationID + " -> " + message); + } + + // handle the reply message + handleReplyMessage(correlationID, message); + } + + public void processReply(ReplyHolder holder) { + if (holder != null && isRunAllowed()) { + Exchange exchange = holder.getExchange(); + Message message = holder.getMessage(); + + boolean timeout = holder.isTimeout(); + if (timeout) { + // no response, so lets set a timed out exception + exchange.setException(new ExchangeTimedOutException(exchange, holder.getRequestTimeout())); + } else { + JmsMessage response = new JmsMessage(message, endpoint.getBinding()); + Object body = response.getBody(); + + if (endpoint.isTransferException() && body instanceof Exception) { + if (log.isDebugEnabled()) { + log.debug("Reply received. Setting reply as an Exception: " + body); + } + // we got an exception back and endpoint was configured to transfer exception + // therefore set response as exception + exchange.setException((Exception) body); + } else { + if (log.isDebugEnabled()) { + log.debug("Reply received. Setting reply as OUT message: " + body); + } + // regular response + exchange.setOut(response); + } + + // restore correlation id in case the remote server messed with it + if (holder.getOriginalCorrelationId() != null) { + JmsMessageHelper.setCorrelationId(message, holder.getOriginalCorrelationId()); + exchange.getOut().setHeader("JMSCorrelationID", holder.getOriginalCorrelationId()); + } + } + + // notify callback + AsyncCallback callback = holder.getCallback(); + callback.done(false); + } + } + + protected abstract void handleReplyMessage(String correlationID, Message message); + + protected abstract AbstractMessageListenerContainer createListenerContainer() throws Exception; + + /** + * <b>IMPORTANT:</b> This logic is only being used due to high performance in-memory only + * testing using InOut over JMS. Its unlikely to happen in a real life situation with communication + * to a remote broker, which always will be slower to send back reply, before Camel had a chance + * to update it's internal correlation map. + */ + protected ReplyHandler waitForProvisionCorrelationToBeUpdated(String correlationID, Message message) { + // race condition, when using messageID as correlationID then we store a provisional correlation id + // at first, which gets updated with the JMSMessageID after the message has been sent. And in the unlikely + // event that the reply comes back really really fast, and the correlation map hasn't yet been updated + // from the provisional id to the JMSMessageID. If so we have to wait a bit and lookup again. + if (log.isWarnEnabled()) { + log.warn("Early reply received with correlationID [" + correlationID + "] -> " + message); + } + + ReplyHandler answer = null; + + // wait up till 5 seconds + boolean done = false; + int counter = 0; + while (!done && counter++ < 50) { + if (log.isTraceEnabled()) { + log.trace("Early reply not found handler at attempt " + counter + ". Waiting a bit longer."); + } + try { + Thread.sleep(100); + } catch (InterruptedException e) { + // ignore + } + + // try again + answer = correlation.get(correlationID); + done = answer != null; + + if (answer != null) { + if (log.isTraceEnabled()) { + log.trace("Early reply with correlationID [" + correlationID + "] has been matched after " + + counter + " attempts and can be processed using handler: " + answer); + } + } + } + + return answer; + } + + @Override + protected void doStart() throws Exception { + ObjectHelper.notNull(executorService, "executorService", this); + ObjectHelper.notNull(endpoint, "endpoint", this); + + // purge for timeout every second + correlation = new CorrelationMap(executorService, 1000); + ServiceHelper.startService(correlation); + + // create JMS listener and start it + listenerContainer = createListenerContainer(); + listenerContainer.afterPropertiesSet(); + listenerContainer.start(); + } + + @Override + protected void doStop() throws Exception { + ServiceHelper.stopService(correlation); + + if (listenerContainer != null) { + listenerContainer.stop(); + listenerContainer.destroy(); + listenerContainer = null; + } + } + +} diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/TemporaryQueueReplyHandler.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/TemporaryQueueReplyHandler.java new file mode 100644 index 0000000000000..b5c02ed3aac1a --- /dev/null +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/TemporaryQueueReplyHandler.java @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms.reply; + +import javax.jms.Message; + +import org.apache.camel.AsyncCallback; +import org.apache.camel.Exchange; + +/** + * {@link ReplyHandler} to handle processing replies when using temporary queues. + * + * @version $Revision$ + */ +public class TemporaryQueueReplyHandler implements ReplyHandler { + + // task queue to add the holder so we can process the reply + protected final ReplyManager replyManager; + protected final Exchange exchange; + protected final AsyncCallback callback; + // remember the original correlation id, in case the server returns back a reply with a messed up correlation id + protected final String originalCorrelationId; + protected final long timeout; + + public TemporaryQueueReplyHandler(ReplyManager replyManager, Exchange exchange, AsyncCallback callback, + String originalCorrelationId, long timeout) { + this.replyManager = replyManager; + this.exchange = exchange; + this.originalCorrelationId = originalCorrelationId; + this.callback = callback; + this.timeout = timeout; + } + + public void onReply(String correlationId, Message reply) { + // create holder object with the reply and add to task queue so we can process the reply and continue + // route the exchange using the async routing engine + ReplyHolder holder = new ReplyHolder(exchange, callback, originalCorrelationId, reply); + // process reply + replyManager.processReply(holder); + } + + public void onTimeout(String correlationId) { + // create holder object without the reply which means a timeout occurred + ReplyHolder holder = new ReplyHolder(exchange, callback, originalCorrelationId, timeout); + // process timeout + replyManager.processReply(holder); + } +} diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/TemporaryQueueReplyManager.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/TemporaryQueueReplyManager.java new file mode 100644 index 0000000000000..225ff631f9d2b --- /dev/null +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/TemporaryQueueReplyManager.java @@ -0,0 +1,126 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms.reply; + +import javax.jms.Destination; +import javax.jms.ExceptionListener; +import javax.jms.JMSException; +import javax.jms.Message; +import javax.jms.Session; +import javax.jms.TemporaryQueue; + +import org.apache.camel.AsyncCallback; +import org.apache.camel.Exchange; +import org.springframework.core.task.TaskExecutor; +import org.springframework.jms.listener.AbstractMessageListenerContainer; +import org.springframework.jms.listener.SimpleMessageListenerContainer; +import org.springframework.jms.support.destination.DestinationResolver; + +/** + * A {@link ReplyManager} when using temporary queues. + * + * @version $Revision$ + */ +public class TemporaryQueueReplyManager extends ReplyManagerSupport { + + public String registerReply(ReplyManager replyManager, Exchange exchange, AsyncCallback callback, + String originalCorrelationId, String correlationId, long requestTimeout) { + // add to correlation map + TemporaryQueueReplyHandler handler = new TemporaryQueueReplyHandler(this, exchange, callback, originalCorrelationId, requestTimeout); + correlation.put(correlationId, handler, requestTimeout); + return correlationId; + } + + public void updateCorrelationId(String correlationId, String newCorrelationId, long requestTimeout) { + if (log.isTraceEnabled()) { + log.trace("Updated provisional correlationId [" + correlationId + "] to expected correlationId [" + newCorrelationId + "]"); + } + + ReplyHandler handler = correlation.remove(correlationId); + correlation.put(newCorrelationId, handler, requestTimeout); + } + + @Override + protected void handleReplyMessage(String correlationID, Message message) { + ReplyHandler handler = correlation.get(correlationID); + if (handler == null && endpoint.isUseMessageIDAsCorrelationID()) { + handler = waitForProvisionCorrelationToBeUpdated(correlationID, message); + } + + if (handler != null) { + try { + handler.onReply(correlationID, message); + } finally { + correlation.remove(correlationID); + } + } else { + // we could not correlate the received reply message to a matching request and therefore + // we cannot continue routing the unknown message + String text = "Reply received for unknown correlationID [" + correlationID + "] -> " + message; + log.warn(text); + throw new UnknownReplyMessageException(text, message, correlationID); + } + } + + public void setReplyToSelectorHeader(org.apache.camel.Message camelMessage, Message jmsMessage) throws JMSException { + // noop + } + + @Override + protected AbstractMessageListenerContainer createListenerContainer() throws Exception { + SimpleMessageListenerContainer answer = new SimpleMessageListenerContainer(); + + answer.setDestinationName("temporary"); + answer.setDestinationResolver(new DestinationResolver() { + public Destination resolveDestinationName(Session session, String destinationName, + boolean pubSubDomain) throws JMSException { + // use a temporary queue to gather the reply message + TemporaryQueue queue = null; + synchronized (TemporaryQueueReplyManager.this) { + try { + queue = session.createTemporaryQueue(); + setReplyTo(queue); + } finally { + TemporaryQueueReplyManager.this.notifyAll(); + } + } + return queue; + } + }); + answer.setAutoStartup(true); + answer.setMessageListener(this); + answer.setPubSubDomain(false); + answer.setSubscriptionDurable(false); + answer.setConcurrentConsumers(endpoint.getConcurrentConsumers()); + answer.setConnectionFactory(endpoint.getConnectionFactory()); + String clientId = endpoint.getClientId(); + if (clientId != null) { + clientId += ".CamelReplyManager"; + answer.setClientId(clientId); + } + TaskExecutor taskExecutor = endpoint.getTaskExecutor(); + if (taskExecutor != null) { + answer.setTaskExecutor(taskExecutor); + } + ExceptionListener exceptionListener = endpoint.getExceptionListener(); + if (exceptionListener != null) { + answer.setExceptionListener(exceptionListener); + } + return answer; + } + +} diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/FutureHandler.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/UnknownReplyMessageException.java similarity index 53% rename from components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/FutureHandler.java rename to components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/UnknownReplyMessageException.java index aae328c0ef143..a536e44605a51 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/FutureHandler.java +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/UnknownReplyMessageException.java @@ -14,38 +14,39 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.camel.component.jms.requestor; +package org.apache.camel.component.jms.reply; -import java.util.concurrent.Callable; -import java.util.concurrent.FutureTask; - -import javax.jms.JMSException; import javax.jms.Message; +import org.apache.camel.RuntimeCamelException; + /** - * A {@link FutureTask} which implements {@link ReplyHandler} - * so that it can be used as a handler for a correlation ID + * A reply message which cannot be correlated to a match request message. * * @version $Revision$ */ -public class FutureHandler extends FutureTask<Message> implements ReplyHandler { - - private static final Callable<Message> EMPTY_CALLABLE = new Callable<Message>() { - public Message call() throws Exception { - return null; - } - }; +public class UnknownReplyMessageException extends RuntimeCamelException { + + private final Message replyMessage; + private final String correlationId; - public FutureHandler() { - super(EMPTY_CALLABLE); + public UnknownReplyMessageException(String text, Message replyMessage, String correlationId) { + super(text); + this.replyMessage = replyMessage; + this.correlationId = correlationId; } - public synchronized void set(Message result) { - super.set(result); + /** + * The unknown reply message + */ + public Message getReplyMessage() { + return replyMessage; } - public boolean handle(Message message) throws JMSException { - set(message); - return true; + /** + * The correlation id of the reply message + */ + public String getCorrelationId() { + return correlationId; } } diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/UseMessageIdAsCorrelationIdMessageSentCallback.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/UseMessageIdAsCorrelationIdMessageSentCallback.java new file mode 100644 index 0000000000000..8470f3f58279f --- /dev/null +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/UseMessageIdAsCorrelationIdMessageSentCallback.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms.reply; + +import javax.jms.Destination; +import javax.jms.JMSException; +import javax.jms.Message; + +import org.apache.camel.component.jms.MessageSentCallback; + +/** + * Callback to be used when using the option <tt>useMessageIDAsCorrelationID</tt>. + * <p/> + * This callback will keep the correlation registration in {@link ReplyManager} up-to-date with + * the <tt>JMSMessageID</tt> which was assigned and used when the message was sent. + * + * @version $Revision$ + */ +public class UseMessageIdAsCorrelationIdMessageSentCallback implements MessageSentCallback { + + private ReplyManager replyManager; + private String correlationId; + private long requestTimeout; + + public UseMessageIdAsCorrelationIdMessageSentCallback(ReplyManager replyManager, String correlationId, long requestTimeout) { + this.replyManager = replyManager; + this.correlationId = correlationId; + this.requestTimeout = requestTimeout; + } + + public void sent(Message message, Destination destination) { + String newCorrelationID = null; + try { + newCorrelationID = message.getJMSMessageID(); + } catch (JMSException e) { + // ignore + } + if (newCorrelationID != null) { + replyManager.updateCorrelationId(correlationId, newCorrelationID, requestTimeout); + } + } +} diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/package.html b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/package.html similarity index 80% rename from components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/package.html rename to components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/package.html index 08c7584e6a1d4..1c8da7159b7a5 100644 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/package.html +++ b/components/camel-jms/src/main/java/org/apache/camel/component/jms/reply/package.html @@ -5,9 +5,9 @@ The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - + http://www.apache.org/licenses/LICENSE-2.0 - + Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -19,9 +19,7 @@ </head> <body> -Implementation classes for implementing request-response over JMS so that the -Defines the <a href="http://activemq.apache.org/camel/jms.html">JMS Component</a> -can support InOut as well as InOnly +Logic implementing support for request/reply over JMS </body> </html> diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/DeferredRequestReplyMap.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/DeferredRequestReplyMap.java deleted file mode 100644 index e7f88a5960b30..0000000000000 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/DeferredRequestReplyMap.java +++ /dev/null @@ -1,160 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.camel.component.jms.requestor; - -import java.util.concurrent.FutureTask; - -import javax.jms.JMSException; -import javax.jms.Message; - -import org.apache.camel.component.jms.JmsConfiguration.MessageSentCallback; -import org.apache.camel.component.jms.JmsProducer; -import org.apache.camel.util.TimeoutMap; -import org.apache.camel.util.UuidGenerator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - -public class DeferredRequestReplyMap { - private static final transient Log LOG = LogFactory.getLog(DeferredRequestReplyMap.class); - private Requestor requestor; - private JmsProducer producer; - private TimeoutMap<String, Object> deferredRequestMap; - private TimeoutMap<String, Object> deferredReplyMap; - - public static class DeferredMessageSentCallback implements MessageSentCallback { - private DeferredRequestReplyMap map; - private String transitionalID; - private Message message; - private Object monitor; - - public DeferredMessageSentCallback(DeferredRequestReplyMap map, UuidGenerator uuidGenerator, Object monitor) { - transitionalID = uuidGenerator.generateUuid(); - this.map = map; - this.monitor = monitor; - } - - public DeferredRequestReplyMap getDeferredRequestReplyMap() { - return map; - } - - public String getID() { - return transitionalID; - } - - public Message getMessage() { - return message; - } - - public void sent(Message message) { - this.message = message; - map.processDeferredReplies(monitor, getID(), message); - } - } - - public DeferredRequestReplyMap(Requestor requestor, - JmsProducer producer, - TimeoutMap<String, Object> deferredRequestMap, - TimeoutMap<String, Object> deferredReplyMap) { - this.requestor = requestor; - this.producer = producer; - this.deferredRequestMap = deferredRequestMap; - this.deferredReplyMap = deferredReplyMap; - } - - public long getRequestTimeout() { - return producer.getRequestTimeout(); - } - - public DeferredMessageSentCallback createDeferredMessageSentCallback() { - return new DeferredMessageSentCallback(this, getUuidGenerator(), requestor); - } - - public void put(DeferredMessageSentCallback callback, FutureTask futureTask) { - deferredRequestMap.put(callback.getID(), futureTask, getRequestTimeout()); - } - - public void processDeferredRequests(String correlationID, Message inMessage) { - processDeferredRequests(requestor, deferredRequestMap, deferredReplyMap, - correlationID, requestor.getMaxRequestTimeout(), inMessage); - } - - public static void processDeferredRequests(Object monitor, - TimeoutMap<String, Object> requestMap, - TimeoutMap<String, Object> replyMap, - String correlationID, - long timeout, - Message inMessage) { - synchronized (monitor) { - try { - Object handler = requestMap.get(correlationID); - if (handler == null) { - if (requestMap.size() > replyMap.size()) { - replyMap.put(correlationID, inMessage, timeout); - } else { - LOG.warn("Response received for unknown correlationID: " + correlationID + "; response: " + inMessage); - } - } - if (handler != null && handler instanceof ReplyHandler) { - ReplyHandler replyHandler = (ReplyHandler) handler; - boolean complete = replyHandler.handle(inMessage); - if (complete) { - requestMap.remove(correlationID); - } - } - } catch (JMSException e) { - throw new FailedToProcessResponse(inMessage, e); - } - } - } - - public void processDeferredReplies(Object monitor, String transitionalID, Message outMessage) { - synchronized (monitor) { - try { - Object handler = deferredRequestMap.get(transitionalID); - if (handler == null) { - return; - } - deferredRequestMap.remove(transitionalID); - String correlationID = outMessage.getJMSMessageID(); - Object in = deferredReplyMap.get(correlationID); - - if (in != null && in instanceof Message) { - Message inMessage = (Message)in; - if (handler instanceof ReplyHandler) { - ReplyHandler replyHandler = (ReplyHandler)handler; - try { - boolean complete = replyHandler.handle(inMessage); - if (complete) { - deferredReplyMap.remove(correlationID); - } - } catch (JMSException e) { - throw new FailedToProcessResponse(inMessage, e); - } - } - } else { - deferredRequestMap.put(correlationID, handler, getRequestTimeout()); - } - } catch (JMSException e) { - throw new FailedToProcessResponse(outMessage, e); - } - } - } - - protected UuidGenerator getUuidGenerator() { - return producer.getUuidGenerator(); - } -} diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/PersistentReplyToFutureHandler.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/PersistentReplyToFutureHandler.java deleted file mode 100644 index e9dedce713c8f..0000000000000 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/PersistentReplyToFutureHandler.java +++ /dev/null @@ -1,91 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.camel.component.jms.requestor; - -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -import javax.jms.Message; - -import org.apache.camel.component.jms.requestor.DeferredRequestReplyMap.DeferredMessageSentCallback; -import org.apache.camel.component.jms.requestor.PersistentReplyToRequestor.MessageSelectorComposer; - -public class PersistentReplyToFutureHandler extends FutureHandler { - - protected PersistentReplyToRequestor requestor; - protected DeferredMessageSentCallback callback; - protected String correlationID; - - public PersistentReplyToFutureHandler(PersistentReplyToRequestor requestor, - String correlationID) { - super(); - this.requestor = requestor; - this.correlationID = correlationID; - } - - public PersistentReplyToFutureHandler(PersistentReplyToRequestor requestor, - DeferredMessageSentCallback callback) { - super(); - this.requestor = requestor; - this.callback = callback; - } - - @Override - public Message get() throws InterruptedException, ExecutionException { - Message result = null; - try { - updateSelector(); - result = super.get(); - } finally { - revertSelector(); - } - return result; - } - - @Override - public Message get(long timeout, TimeUnit unit) throws InterruptedException, - ExecutionException, - TimeoutException { - Message result = null; - try { - updateSelector(); - result = super.get(timeout, unit); - } finally { - revertSelector(); - } - return result; - } - - protected void updateSelector() throws ExecutionException { - try { - MessageSelectorComposer composer = (MessageSelectorComposer)requestor.getListenerContainer(); - composer.addCorrelationID((correlationID != null) ? correlationID : callback.getMessage().getJMSMessageID()); - } catch (Exception e) { - throw new ExecutionException(e); - } - } - - protected void revertSelector() throws ExecutionException { - try { - MessageSelectorComposer composer = (MessageSelectorComposer)requestor.getListenerContainer(); - composer.removeCorrelationID((correlationID != null) ? correlationID : callback.getMessage().getJMSMessageID()); - } catch (Exception e) { - throw new ExecutionException(e); - } - } -} diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/PersistentReplyToRequestor.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/PersistentReplyToRequestor.java deleted file mode 100644 index 75f59eaa962de..0000000000000 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/PersistentReplyToRequestor.java +++ /dev/null @@ -1,228 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.camel.component.jms.requestor; - -import java.math.BigInteger; -import java.util.Random; -import java.util.concurrent.ScheduledExecutorService; - -import javax.jms.Destination; -import javax.jms.ExceptionListener; -import javax.jms.JMSException; -import javax.jms.Message; -import javax.jms.Session; - -import org.apache.camel.component.jms.JmsConfiguration; -import org.apache.camel.component.jms.requestor.DeferredRequestReplyMap.DeferredMessageSentCallback; -import org.springframework.core.task.TaskExecutor; -import org.springframework.jms.listener.AbstractMessageListenerContainer; -import org.springframework.jms.listener.DefaultMessageListenerContainer; -import org.springframework.jms.listener.DefaultMessageListenerContainer102; -import org.springframework.jms.support.destination.DestinationResolver; -import org.springframework.transaction.PlatformTransactionManager; - -public class PersistentReplyToRequestor extends Requestor { - private String replyToSelectorValue; - - public class DestinationResolverDelegate implements DestinationResolver { - private DestinationResolver delegate; - private Destination destination; - - public DestinationResolverDelegate(DestinationResolver delegate) { - this.delegate = delegate; - } - - public Destination resolveDestinationName(Session session, String destinationName, - boolean pubSubDomain) throws JMSException { - synchronized (getOutterInstance()) { - try { - if (destination == null) { - destination = delegate.resolveDestinationName(session, destinationName, pubSubDomain); - setReplyTo(destination); - } - } finally { - getOutterInstance().notifyAll(); - } - } - return destination; - } - }; - - public static interface MessageSelectorComposer { - void addCorrelationID(String id); - void removeCorrelationID(String id); - } - - public static class CamelDefaultMessageListenerContainer102 extends DefaultMessageListenerContainer102 - implements MessageSelectorComposer { - MessageSelectorProvider provider = new MessageSelectorProvider(); - - public void addCorrelationID(String id) { - provider.addCorrelationID(id); - } - - public void removeCorrelationID(String id) { - provider.removeCorrelationID(id); - } - - @Override - public void setMessageSelector(String messageSelector) { - throw new UnsupportedOperationException(); - } - - @Override - public String getMessageSelector() { - return provider.get(); - } - } - - public static class CamelDefaultMessageListenerContainer extends DefaultMessageListenerContainer - implements MessageSelectorComposer { - - MessageSelectorProvider provider = new MessageSelectorProvider(); - - public void addCorrelationID(String id) { - provider.addCorrelationID(id); - } - - public void removeCorrelationID(String id) { - provider.removeCorrelationID(id); - } - - @Override - public void setMessageSelector(String messageSelector) { - throw new UnsupportedOperationException(); - } - - @Override - public String getMessageSelector() { - return provider.get(); - } - } - - public PersistentReplyToRequestor(JmsConfiguration configuration, - ScheduledExecutorService executorService) { - super(configuration, executorService); - } - - - @Override - protected FutureHandler createFutureHandler(String correlationID) { - boolean dynamicSelector = getConfiguration().getReplyToDestinationSelectorName() == null; - if (dynamicSelector) { - return new PersistentReplyToFutureHandler(this, correlationID); - } - return new FutureHandler(); - } - - @Override - protected FutureHandler createFutureHandler(DeferredMessageSentCallback callback) { - boolean dynamicSelector = getConfiguration().getReplyToDestinationSelectorName() == null; - if (dynamicSelector) { - return new PersistentReplyToFutureHandler(this, callback); - } - return new FutureHandler(); - } - - @Override - public AbstractMessageListenerContainer createListenerContainer() { - JmsConfiguration config = getConfiguration(); - String replyToSelectorName = getConfiguration().getReplyToDestinationSelectorName(); - - DefaultMessageListenerContainer container = - config.isUseVersion102() - ? (replyToSelectorName != null) ? new DefaultMessageListenerContainer102() - : new CamelDefaultMessageListenerContainer102() - : (replyToSelectorName != null) ? new DefaultMessageListenerContainer() - : new CamelDefaultMessageListenerContainer(); - - container.setConnectionFactory(config.getListenerConnectionFactory()); - - DestinationResolver resolver = config.getDestinationResolver(); - if (resolver == null) { - resolver = container.getDestinationResolver(); - } - - container.setDestinationResolver(new DestinationResolverDelegate(resolver)); - container.setDestinationName(getConfiguration().getReplyTo()); - - if (replyToSelectorName != null) { - replyToSelectorValue = "ID:" + new BigInteger(24 * 8, new Random()).toString(16); - container.setMessageSelector(replyToSelectorName + "='" + replyToSelectorValue + "'"); - } else { - ((MessageSelectorComposer)container).addCorrelationID("ID:" + new BigInteger(24 * 8, new Random()).toString(16)); - } - - container.setAutoStartup(true); - container.setMessageListener(this); - container.setPubSubDomain(false); - container.setSubscriptionDurable(false); - - ExceptionListener exceptionListener = config.getExceptionListener(); - if (exceptionListener != null) { - container.setExceptionListener(exceptionListener); - } - - container.setSessionTransacted(config.isTransacted()); - if (config.isTransacted()) { - container.setSessionAcknowledgeMode(Session.SESSION_TRANSACTED); - } else { - if (config.getAcknowledgementMode() >= 0) { - container.setSessionAcknowledgeMode(config.getAcknowledgementMode()); - } else if (config.getAcknowledgementModeName() != null) { - container.setSessionAcknowledgeModeName(config.getAcknowledgementModeName()); - } - } - - container.setConcurrentConsumers(1); - container.setCacheLevel(DefaultMessageListenerContainer.CACHE_SESSION); - - if (config.getReceiveTimeout() >= 0) { - container.setReceiveTimeout(config.getReceiveTimeout()); - } - if (config.getRecoveryInterval() >= 0) { - container.setRecoveryInterval(config.getRecoveryInterval()); - } - TaskExecutor taskExecutor = config.getTaskExecutor(); - if (taskExecutor != null) { - container.setTaskExecutor(taskExecutor); - } - PlatformTransactionManager tm = config.getTransactionManager(); - if (tm != null) { - container.setTransactionManager(tm); - } else if (config.isTransacted()) { - throw new IllegalArgumentException("Property transacted is enabled but a transactionManager was not injected!"); - } - if (config.getTransactionName() != null) { - container.setTransactionName(config.getTransactionName()); - } - if (config.getTransactionTimeout() >= 0) { - container.setTransactionTimeout(config.getTransactionTimeout()); - } - - return container; - } - - @Override - public void setReplyToSelectorHeader(org.apache.camel.Message in, Message jmsIn) throws JMSException { - String replyToSelectorName = getConfiguration().getReplyToDestinationSelectorName(); - if (replyToSelectorValue != null) { - in.setHeader(replyToSelectorName, replyToSelectorValue); - jmsIn.setStringProperty(replyToSelectorName, replyToSelectorValue); - } - } -} diff --git a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/Requestor.java b/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/Requestor.java deleted file mode 100644 index f491be91ce46c..0000000000000 --- a/components/camel-jms/src/main/java/org/apache/camel/component/jms/requestor/Requestor.java +++ /dev/null @@ -1,268 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.camel.component.jms.requestor; - -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.FutureTask; -import java.util.concurrent.ScheduledExecutorService; -import javax.jms.Destination; -import javax.jms.ExceptionListener; -import javax.jms.JMSException; -import javax.jms.Message; -import javax.jms.MessageListener; -import javax.jms.Session; -import javax.jms.TemporaryQueue; - -import org.apache.camel.component.jms.JmsConfiguration; -import org.apache.camel.component.jms.JmsProducer; -import org.apache.camel.component.jms.requestor.DeferredRequestReplyMap.DeferredMessageSentCallback; -import org.apache.camel.impl.ServiceSupport; -import org.apache.camel.util.DefaultTimeoutMap; -import org.apache.camel.util.TimeoutMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.springframework.core.task.TaskExecutor; -import org.springframework.jms.listener.AbstractMessageListenerContainer; -import org.springframework.jms.listener.SimpleMessageListenerContainer; -import org.springframework.jms.listener.SimpleMessageListenerContainer102; -import org.springframework.jms.support.destination.DestinationResolver; - -/** - * @version $Revision$ - */ -public class Requestor extends ServiceSupport implements MessageListener { - private static final transient Log LOG = LogFactory.getLog(Requestor.class); - private final JmsConfiguration configuration; - private AbstractMessageListenerContainer listenerContainer; - private TimeoutMap<String, Object> requestMap; - private Map<JmsProducer, DeferredRequestReplyMap> producerDeferredRequestReplyMap; - private TimeoutMap<String, Object> deferredRequestMap; - private TimeoutMap<String, Object> deferredReplyMap; - private Destination replyTo; - private long maxRequestTimeout = -1; - private long replyToResolverTimeout = 5000; - - // TODO: Use a Task queue to transfer replies arriving in onMessage - // instead of using the FutureHandle to support async routing - - public Requestor(JmsConfiguration configuration, ScheduledExecutorService executorService) { - this.configuration = configuration; - this.requestMap = new DefaultTimeoutMap<String, Object>(executorService, configuration.getRequestMapPurgePollTimeMillis()); - this.producerDeferredRequestReplyMap = new HashMap<JmsProducer, DeferredRequestReplyMap>(); - this.deferredRequestMap = new DefaultTimeoutMap<String, Object>(executorService, configuration.getRequestMapPurgePollTimeMillis()); - this.deferredReplyMap = new DefaultTimeoutMap<String, Object>(executorService, configuration.getRequestMapPurgePollTimeMillis()); - } - - public synchronized DeferredRequestReplyMap getDeferredRequestReplyMap(JmsProducer producer) { - DeferredRequestReplyMap map = producerDeferredRequestReplyMap.get(producer); - if (map == null) { - map = new DeferredRequestReplyMap(this, producer, deferredRequestMap, deferredReplyMap); - producerDeferredRequestReplyMap.put(producer, map); - if (maxRequestTimeout == -1) { - maxRequestTimeout = producer.getRequestTimeout(); - } else if (maxRequestTimeout < producer.getRequestTimeout()) { - maxRequestTimeout = producer.getRequestTimeout(); - } - } - return map; - } - - public synchronized void removeDeferredRequestReplyMap(JmsProducer producer) { - DeferredRequestReplyMap map = producerDeferredRequestReplyMap.remove(producer); - if (map == null) { - // already removed; - return; - } - if (maxRequestTimeout == producer.getRequestTimeout()) { - long max = -1; - for (Map.Entry<JmsProducer, DeferredRequestReplyMap> entry : producerDeferredRequestReplyMap.entrySet()) { - if (max < entry.getKey().getRequestTimeout()) { - max = entry.getKey().getRequestTimeout(); - } - } - maxRequestTimeout = max; - } - } - - public synchronized long getMaxRequestTimeout() { - return maxRequestTimeout; - } - - public TimeoutMap getRequestMap() { - return requestMap; - } - - public TimeoutMap getDeferredRequestMap() { - return deferredRequestMap; - } - - public TimeoutMap getDeferredReplyMap() { - return deferredReplyMap; - } - - public FutureTask getReceiveFuture(String correlationID, long requestTimeout) { - FutureHandler future = createFutureHandler(correlationID); - requestMap.put(correlationID, future, requestTimeout); - return future; - } - - public FutureTask getReceiveFuture(DeferredMessageSentCallback callback) { - FutureHandler future = createFutureHandler(callback); - DeferredRequestReplyMap map = callback.getDeferredRequestReplyMap(); - map.put(callback, future); - return future; - } - - protected FutureHandler createFutureHandler(String correlationID) { - return new FutureHandler(); - } - - protected FutureHandler createFutureHandler(DeferredMessageSentCallback callback) { - return new FutureHandler(); - } - - public void onMessage(Message message) { - try { - String correlationID = message.getJMSCorrelationID(); - if (LOG.isDebugEnabled()) { - LOG.debug("Message correlationID: " + correlationID); - } - if (correlationID == null) { - LOG.warn("Ignoring message with no correlationID: " + message); - return; - } - // lets notify the monitor for this response - Object handler = requestMap.get(correlationID); - if (handler != null && handler instanceof ReplyHandler) { - ReplyHandler replyHandler = (ReplyHandler) handler; - boolean complete = replyHandler.handle(message); - if (complete) { - requestMap.remove(correlationID); - } - } else { - DeferredRequestReplyMap.processDeferredRequests( - this, deferredRequestMap, deferredReplyMap, - correlationID, getMaxRequestTimeout(), message); - } - } catch (JMSException e) { - throw new FailedToProcessResponse(message, e); - } - } - - - public AbstractMessageListenerContainer getListenerContainer() { - if (listenerContainer == null) { - listenerContainer = createListenerContainer(); - } - return listenerContainer; - } - - public void setListenerContainer(AbstractMessageListenerContainer listenerContainer) { - this.listenerContainer = listenerContainer; - } - - public Destination getReplyTo() { - synchronized (this) { - try { - if (replyTo == null) { - wait(replyToResolverTimeout); - } - } catch (Throwable e) { - // eat it - } - } - return replyTo; - } - - public void setReplyTo(Destination replyTo) { - this.replyTo = replyTo; - } - - // Implementation methods - //------------------------------------------------------------------------- - - @Override - protected void doStart() throws Exception { - AbstractMessageListenerContainer container = getListenerContainer(); - container.afterPropertiesSet(); - // Need to call the container start in Spring 3.x - container.start(); - } - - @Override - protected void doStop() throws Exception { - if (listenerContainer != null) { - listenerContainer.stop(); - listenerContainer.destroy(); - } - } - - protected Requestor getOutterInstance() { - return this; - } - - protected AbstractMessageListenerContainer createListenerContainer() { - SimpleMessageListenerContainer answer = configuration.isUseVersion102() - ? new SimpleMessageListenerContainer102() : new SimpleMessageListenerContainer(); - answer.setDestinationName("temporary"); - answer.setDestinationResolver(new DestinationResolver() { - - public Destination resolveDestinationName(Session session, String destinationName, - boolean pubSubDomain) throws JMSException { - TemporaryQueue queue = null; - synchronized (getOutterInstance()) { - try { - queue = session.createTemporaryQueue(); - setReplyTo(queue); - } finally { - getOutterInstance().notifyAll(); - } - } - return queue; - } - }); - answer.setAutoStartup(true); - answer.setMessageListener(this); - answer.setPubSubDomain(false); - answer.setSubscriptionDurable(false); - answer.setConcurrentConsumers(1); - answer.setConnectionFactory(configuration.getConnectionFactory()); - String clientId = configuration.getClientId(); - if (clientId != null) { - clientId += ".Requestor"; - answer.setClientId(clientId); - } - TaskExecutor taskExecutor = configuration.getTaskExecutor(); - if (taskExecutor != null) { - answer.setTaskExecutor(taskExecutor); - } - ExceptionListener exceptionListener = configuration.getExceptionListener(); - if (exceptionListener != null) { - answer.setExceptionListener(exceptionListener); - } - return answer; - } - - protected JmsConfiguration getConfiguration() { - return configuration; - } - - public void setReplyToSelectorHeader(org.apache.camel.Message in, Message jmsIn) throws JMSException { - // complete - } -} diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsEndpointConfigurationTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsEndpointConfigurationTest.java index 8553c4bca72cf..941ca7aa68a3a 100644 --- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsEndpointConfigurationTest.java +++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsEndpointConfigurationTest.java @@ -205,9 +205,7 @@ public void testDefaultEndpointOptions() throws Exception { assertNotNull(endpoint.getRecoveryInterval()); assertNull(endpoint.getReplyTo()); assertNull(endpoint.getReplyToDestinationSelectorName()); - assertEquals(JmsConfiguration.REPLYTO_TEMP_DEST_AFFINITY_PER_ENDPOINT, endpoint.getReplyToTempDestinationAffinity()); assertEquals(1000, endpoint.getRequestMapPurgePollTimeMillis()); - assertNotNull(endpoint.getRequestor()); assertEquals(20000, endpoint.getRequestTimeout()); assertNull(endpoint.getSelector()); assertEquals(-1, endpoint.getTimeToLive()); @@ -343,9 +341,6 @@ public void onException(JMSException exception) { endpoint.setReplyToDestinationSelectorName("me"); assertEquals("me", endpoint.getReplyToDestinationSelectorName()); - endpoint.setReplyToTempDestinationAffinity("endpoint"); - assertEquals("endpoint", endpoint.getReplyToTempDestinationAffinity()); - endpoint.setRequestMapPurgePollTimeMillis(2000); assertEquals(2000, endpoint.getRequestMapPurgePollTimeMillis()); diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsRouteRequestReplyTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsRouteRequestReplyTest.java index 7a9c9b3fef8f2..c40564e538db9 100644 --- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsRouteRequestReplyTest.java +++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/JmsRouteRequestReplyTest.java @@ -20,17 +20,18 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; - import javax.jms.ConnectionFactory; import org.apache.activemq.ActiveMQConnectionFactory; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; +import org.apache.camel.ExchangeTimedOutException; import org.apache.camel.Message; import org.apache.camel.Processor; import org.apache.camel.RuntimeCamelException; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.test.CamelTestSupport; +import org.junit.Ignore; import static org.apache.camel.component.jms.JmsComponent.jmsComponentClientAcknowledge; @@ -39,18 +40,16 @@ */ public class JmsRouteRequestReplyTest extends CamelTestSupport { - // TODO: Split into multiple files so it doesnt take 3 min to run - protected static final String REPLY_TO_DESTINATION_SELECTOR_NAME = "camelProducer"; protected static String componentName = "amq"; protected static String componentName1 = "amq1"; - protected static String endpoingUriA = componentName + ":queue:test.a"; + protected static String endpointUriA = componentName + ":queue:test.a"; protected static String endpointUriB = componentName + ":queue:test.b"; protected static String endpointUriB1 = componentName1 + ":queue:test.b"; // note that the replyTo both A and B endpoints share the persistent replyTo queue, // which is one more way to verify that reply listeners of A and B endpoints don't steal each other messages - protected static String endpoingtReplyToUriA = componentName + ":queue:test.a?replyTo=queue:test.a.reply"; - protected static String endpoingtReplyToUriB = componentName + ":queue:test.b?replyTo=queue:test.a.reply"; + protected static String endpointReplyToUriA = componentName + ":queue:test.a?replyTo=queue:test.a.reply"; + protected static String endpointReplyToUriB = componentName + ":queue:test.b?replyTo=queue:test.a.reply"; protected static String request = "Hello World"; protected static String expectedReply = "Re: " + request; protected static int maxTasks = 100; @@ -66,7 +65,7 @@ private interface ContextBuilder { public static class SingleNodeDeadEndRouteBuilder extends RouteBuilder { public void configure() throws Exception { - from(endpoingUriA).process(new Processor() { + from(endpointUriA).process(new Processor() { public void process(Exchange e) { // do nothing } @@ -76,7 +75,7 @@ public void process(Exchange e) { public static class SingleNodeRouteBuilder extends RouteBuilder { public void configure() throws Exception { - from(endpoingUriA).process(new Processor() { + from(endpointUriA).process(new Processor() { public void process(Exchange e) { String request = e.getIn().getBody(String.class); e.getOut().setBody(expectedReply + request.substring(request.indexOf('-'))); @@ -87,7 +86,7 @@ public void process(Exchange e) { public static class MultiNodeRouteBuilder extends RouteBuilder { public void configure() throws Exception { - from(endpoingUriA).to(endpointUriB); + from(endpointUriA).to(endpointUriB); from(endpointUriB).process(new Processor() { public void process(Exchange e) { String request = e.getIn().getBody(String.class); @@ -99,7 +98,7 @@ public void process(Exchange e) { public static class MultiNodeReplyToRouteBuilder extends RouteBuilder { public void configure() throws Exception { - from(endpoingUriA).to(endpoingtReplyToUriB); + from(endpointUriA).to(endpointReplyToUriB); from(endpointUriB).process(new Processor() { public void process(Exchange e) { Message in = e.getIn(); @@ -115,7 +114,7 @@ public void process(Exchange e) { public static class MultiNodeDiffCompRouteBuilder extends RouteBuilder { public void configure() throws Exception { - from(endpoingUriA).to(endpointUriB1); + from(endpointUriA).to(endpointUriB1); from(endpointUriB1).process(new Processor() { public void process(Exchange e) { String request = e.getIn().getBody(String.class); @@ -141,27 +140,10 @@ public CamelContext buildContext(CamelContext context) throws Exception { } }; - public static class ContextBuilderMessageIDReplyToTempDestinationAffinity extends ContextBuilderMessageID { - private String affinity; - public ContextBuilderMessageIDReplyToTempDestinationAffinity(String affinity) { - this.affinity = affinity; - } - public CamelContext buildContext(CamelContext context) throws Exception { - super.buildContext(context); - JmsComponent component = context.getComponent(componentName, JmsComponent.class); - component.getConfiguration().setReplyToTempDestinationAffinity(affinity); - return context; - } - } - protected static void init() { if (inited.compareAndSet(false, true)) { ContextBuilder contextBuilderMessageID = new ContextBuilderMessageID(); - ContextBuilder contextBuilderMessageIDReplyToTempDestinationPerComponent = - new ContextBuilderMessageIDReplyToTempDestinationAffinity("component"); - ContextBuilder contextBuilderMessageIDReplyToTempDestinationPerProducer = - new ContextBuilderMessageIDReplyToTempDestinationAffinity("producer"); ContextBuilder contextBuilderCorrelationID = new ContextBuilder() { public CamelContext buildContext(CamelContext context) throws Exception { @@ -240,10 +222,6 @@ public CamelContext buildContext(CamelContext context) throws Exception { contextBuilders.put("testUseMessageIDAsCorrelationID", contextBuilderMessageID); - contextBuilders.put("testUseMessageIDAsCorrelationIDReplyToTempDestinationPerComponent", - contextBuilderMessageIDReplyToTempDestinationPerComponent); - contextBuilders.put("testUseMessageIDAsCorrelationIDReplyToTempDestinationPerProducer", - contextBuilderMessageIDReplyToTempDestinationPerProducer); contextBuilders.put("testUseCorrelationID", contextBuilderCorrelationID); contextBuilders.put("testUseMessageIDAsCorrelationIDMultiNode", contextBuilderMessageID); @@ -295,8 +273,8 @@ public CamelContext buildContext(CamelContext context) throws Exception { public class Task extends Thread { private AtomicInteger counter; private String fromUri; - private boolean ok = true; - private String message = ""; + private volatile boolean ok = true; + private volatile String message = ""; public Task(AtomicInteger counter, String fromUri) { this.counter = counter; @@ -328,38 +306,32 @@ public void assertSuccess() { protected void setUp() throws Exception { init(); super.setUp(); + Thread.sleep(1000); } - public void testUseMessageIDAsCorrelationID() throws Exception { - runRequestReplyThreaded(endpoingUriA); - } - - public void testUseMessageIDAsCorrelationIDReplyToTempDestinationPerComponent() throws Exception { - runRequestReplyThreaded(endpoingUriA); + public void xxxtestUseMessageIDAsCorrelationID() throws Exception { + runRequestReplyThreaded(endpointUriA); } - public void testUseMessageIDAsCorrelationIDReplyToTempDestinationPerProducer() throws Exception { - runRequestReplyThreaded(endpoingUriA); + public void xxxtestUseCorrelationID() throws Exception { + runRequestReplyThreaded(endpointUriA); } - public void testUseCorrelationID() throws Exception { - runRequestReplyThreaded(endpoingUriA); + public void xxxtestUseMessageIDAsCorrelationIDMultiNode() throws Exception { + runRequestReplyThreaded(endpointUriA); } - public void testUseMessageIDAsCorrelationIDMultiNode() throws Exception { - runRequestReplyThreaded(endpoingUriA); - } - - public void testUseCorrelationIDMultiNode() throws Exception { - runRequestReplyThreaded(endpoingUriA); + public void xxxtestUseCorrelationIDMultiNode() throws Exception { + runRequestReplyThreaded(endpointUriA); } + // TODO: A bit tricky test public void testUseMessageIDAsCorrelationIDPersistReplyToMultiNode() throws Exception { - runRequestReplyThreaded(endpoingtReplyToUriA); + runRequestReplyThreaded(endpointReplyToUriA); } - public void testUseCorrelationIDPersistReplyToMultiNode() throws Exception { - runRequestReplyThreaded(endpoingUriA); + public void xxxtestUseCorrelationIDPersistReplyToMultiNode() throws Exception { + runRequestReplyThreaded(endpointUriA); } // (1) @@ -370,7 +342,7 @@ public void testUseCorrelationIDPersistReplyToMultiNode() throws Exception { // for a faster way to do this. Note however that in this case the message copy has to occur // between consumer -> producer as the selector value needs to be propagated to the ultimate // destination, which in turn will copy this value back into the reply message - public void testUseMessageIDAsCorrelationIDPersistMultiReplyToMultiNode() throws Exception { + public void xxxtestUseMessageIDAsCorrelationIDPersistMultiReplyToMultiNode() throws Exception { int oldMaxTasks = maxTasks; int oldMaxServerTasks = maxServerTasks; int oldMaxCalls = maxCalls; @@ -380,7 +352,7 @@ public void testUseMessageIDAsCorrelationIDPersistMultiReplyToMultiNode() throws maxCalls = 2; try { - runRequestReplyThreaded(endpoingUriA); + runRequestReplyThreaded(endpointUriA); } finally { maxTasks = oldMaxTasks; maxServerTasks = oldMaxServerTasks; @@ -389,7 +361,7 @@ public void testUseMessageIDAsCorrelationIDPersistMultiReplyToMultiNode() throws } // see (1) - public void testUseCorrelationIDPersistMultiReplyToMultiNode() throws Exception { + public void xxxtestUseCorrelationIDPersistMultiReplyToMultiNode() throws Exception { int oldMaxTasks = maxTasks; int oldMaxServerTasks = maxServerTasks; int oldMaxCalls = maxCalls; @@ -399,7 +371,7 @@ public void testUseCorrelationIDPersistMultiReplyToMultiNode() throws Exception maxCalls = 2; try { - runRequestReplyThreaded(endpoingUriA); + runRequestReplyThreaded(endpointUriA); } finally { maxTasks = oldMaxTasks; maxServerTasks = oldMaxServerTasks; @@ -407,58 +379,50 @@ public void testUseCorrelationIDPersistMultiReplyToMultiNode() throws Exception } } - public void testUseMessageIDAsCorrelationIDPersistMultiReplyToWithNamedSelectorMultiNode() throws Exception { - runRequestReplyThreaded(endpoingUriA); + public void xxxtestUseMessageIDAsCorrelationIDPersistMultiReplyToWithNamedSelectorMultiNode() throws Exception { + runRequestReplyThreaded(endpointUriA); } - public void testUseCorrelationIDPersistMultiReplyToWithNamedSelectorMultiNode() throws Exception { - runRequestReplyThreaded(endpoingUriA); + public void xxxtestUseCorrelationIDPersistMultiReplyToWithNamedSelectorMultiNode() throws Exception { + runRequestReplyThreaded(endpointUriA); } - public void testUseCorrelationIDTimeout() throws Exception { + public void xxxtestUseCorrelationIDTimeout() throws Exception { JmsComponent c = (JmsComponent)context.getComponent(componentName); c.getConfiguration().setRequestTimeout(1000); c.getConfiguration().setRequestMapPurgePollTimeMillis(1000); Object reply = ""; try { - reply = template.requestBody(endpoingUriA, request); + reply = template.requestBody(endpointUriA, request); + fail("Should have thrown exception"); } catch (RuntimeCamelException e) { - // expected + assertIsInstanceOf(ExchangeTimedOutException.class, e.getCause()); } assertEquals("", reply); - - JmsEndpoint endpoint = context.getEndpoint(endpoingUriA, JmsEndpoint.class); - // Wait 1 extra purge cycle to make sure that TimeoutMap had a chance to cleanup - Thread.sleep(endpoint.getConfiguration().getRequestMapPurgePollTimeMillis()); - assertTrue(endpoint.getRequestor().getRequestMap().size() == 0); } - public void testUseMessageIDAsCorrelationIDTimeout() throws Exception { + public void xxxtestUseMessageIDAsCorrelationIDTimeout() throws Exception { JmsComponent c = (JmsComponent)context.getComponent(componentName); c.getConfiguration().setRequestTimeout(1000); c.getConfiguration().setRequestMapPurgePollTimeMillis(1000); Object reply = ""; try { - reply = template.requestBody(endpoingUriA, request); + reply = template.requestBody(endpointUriA, request); + fail("Should have thrown exception"); } catch (RuntimeCamelException e) { - // expected + assertIsInstanceOf(ExchangeTimedOutException.class, e.getCause()); } assertEquals("", reply); - - JmsEndpoint endpoint = context.getEndpoint(endpoingUriA, JmsEndpoint.class); - // Wait 1 extra purge cycle to make sure that TimeoutMap had a chance to cleanup - Thread.sleep(endpoint.getConfiguration().getRequestMapPurgePollTimeMillis()); - assertTrue(endpoint.getRequestor().getDeferredRequestMap().size() == 0); } - public void testUseCorrelationIDMultiNodeDiffComponents() throws Exception { - runRequestReplyThreaded(endpoingUriA); + public void xxxtestUseCorrelationIDMultiNodeDiffComponents() throws Exception { + runRequestReplyThreaded(endpointUriA); } - public void testUseMessageIDAsCorrelationIDMultiNodeDiffComponents() throws Exception { - runRequestReplyThreaded(endpoingUriA); + public void xxxtestUseMessageIDAsCorrelationIDMultiNodeDiffComponents() throws Exception { + runRequestReplyThreaded(endpointUriA); } protected void runRequestReplyThreaded(String fromUri) throws Exception { diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/async/AsyncJmsInOutTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/async/AsyncJmsInOutTest.java new file mode 100644 index 0000000000000..4aca1668859c7 --- /dev/null +++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/async/AsyncJmsInOutTest.java @@ -0,0 +1,90 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms.async; + +import java.util.concurrent.TimeUnit; +import javax.jms.ConnectionFactory; + +import org.apache.activemq.ActiveMQConnectionFactory; +import org.apache.camel.CamelContext; +import org.apache.camel.builder.RouteBuilder; +import org.apache.camel.component.mock.MockEndpoint; +import org.apache.camel.test.junit4.CamelTestSupport; +import org.apache.camel.util.StopWatch; +import org.junit.Test; + +import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge; + +/** + * @version $Revision$ + */ +public class AsyncJmsInOutTest extends CamelTestSupport { + + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + + ConnectionFactory connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false"); + camelContext.addComponent("activemq", jmsComponentAutoAcknowledge(connectionFactory)); + + return camelContext; + } + + @Test + public void testAsyncJmsInOut() throws Exception { + MockEndpoint mock = getMockEndpoint("mock:result"); + mock.expectedMessageCount(100); + mock.expectsNoDuplicates(body()); + + StopWatch watch = new StopWatch(); + + for (int i = 0; i < 100; i++) { + template.sendBody("seda:start", "" + i); + } + + // just in case we run on slow boxes + assertMockEndpointsSatisfied(20, TimeUnit.SECONDS); + + log.info("Took " + watch.stop() + " ms. to process 100 messages request/reply over JMS"); + } + + @Override + protected RouteBuilder createRouteBuilder() throws Exception { + return new RouteBuilder() { + @Override + public void configure() throws Exception { + // in a fully sync mode it would take at least 5 + 5 sec to process the 100 messages + // (there are delays in both routes) + // however due async routing, we can leverage the fact to let threads non blocked + // in the first route, and therefore can have the messages processed faster + // because we can have messages wait concurrently in both routes + // this means the async processing model is about 2x faster + + from("seda:start") + // we can only send at fastest the 100 msg in 5 sec due the delay + .delay(50) + .inOut("activemq:queue:bar") + .to("mock:result"); + + from("activemq:queue:bar") + .log("Using ${threadName} to process ${body}") + // we can only process at fastest the 100 msg in 5 sec due the delay + .delay(50) + .transform(body().prepend("Bye ")); + } + }; + } +} diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/async/AsyncJmsProducerTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/async/AsyncJmsProducerTest.java new file mode 100644 index 0000000000000..9d72b292de3ac --- /dev/null +++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/async/AsyncJmsProducerTest.java @@ -0,0 +1,90 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms.async; + +import javax.jms.ConnectionFactory; + +import org.apache.activemq.ActiveMQConnectionFactory; +import org.apache.camel.CamelContext; +import org.apache.camel.Exchange; +import org.apache.camel.Processor; +import org.apache.camel.builder.RouteBuilder; +import org.apache.camel.test.junit4.CamelTestSupport; +import org.junit.Test; + +import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge; + +/** + * @version $Revision$ + */ +public class AsyncJmsProducerTest extends CamelTestSupport { + + private static String beforeThreadName; + private static String afterThreadName; + + @Test + public void testAsyncEndpoint() throws Exception { + getMockEndpoint("mock:before").expectedBodiesReceived("Hello Camel"); + getMockEndpoint("mock:after").expectedBodiesReceived("Bye Camel"); + getMockEndpoint("mock:result").expectedBodiesReceived("Bye Camel"); + + String reply = template.requestBody("direct:start", "Hello Camel", String.class); + assertEquals("Bye Camel", reply); + + assertMockEndpointsSatisfied(); + + assertFalse("Should use different threads", beforeThreadName.equalsIgnoreCase(afterThreadName)); + } + + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + + ConnectionFactory connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false"); + camelContext.addComponent("activemq", jmsComponentAutoAcknowledge(connectionFactory)); + + return camelContext; + } + + @Override + protected RouteBuilder createRouteBuilder() throws Exception { + return new RouteBuilder() { + @Override + public void configure() throws Exception { + from("direct:start") + .to("mock:before") + .to("log:before") + .process(new Processor() { + public void process(Exchange exchange) throws Exception { + beforeThreadName = Thread.currentThread().getName(); + } + }) + .to("activemq:queue:foo") + .process(new Processor() { + public void process(Exchange exchange) throws Exception { + afterThreadName = Thread.currentThread().getName(); + } + }) + .to("log:after") + .to("mock:after") + .to("mock:result"); + + from("activemq:queue:foo") + .transform(constant("Bye Camel")); + } + }; + } +} diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsInOutIssueTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsInOutIssueTest.java index 342cbe780976c..1e02701540bbb 100644 --- a/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsInOutIssueTest.java +++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsInOutIssueTest.java @@ -27,6 +27,7 @@ import org.junit.Test; import static org.apache.activemq.camel.component.ActiveMQComponent.activeMQComponent; + /** * @version $Revision$ */ @@ -38,6 +39,15 @@ public void testInOutWithRequestBody() throws Exception { assertEquals("Bye World", reply); } + @Test + public void testInOutTwoTimes() throws Exception { + String reply = template.requestBody("activemq:queue:in", "Hello World", String.class); + assertEquals("Bye World", reply); + + reply = template.requestBody("activemq:queue:in", "Hello Camel", String.class); + assertEquals("Bye World", reply); + } + @Test public void testInOutWithAsyncRequestBody() throws Exception { Future<String> reply = template.asyncRequestBody("activemq:queue:in", "Hello World", String.class); diff --git a/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsInOutUseMessageIDasCorrelationIDTest.java b/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsInOutUseMessageIDasCorrelationIDTest.java new file mode 100644 index 0000000000000..403dc7424131b --- /dev/null +++ b/components/camel-jms/src/test/java/org/apache/camel/component/jms/issues/JmsInOutUseMessageIDasCorrelationIDTest.java @@ -0,0 +1,66 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.component.jms.issues; + +import org.apache.camel.CamelContext; +import org.apache.camel.Exchange; +import org.apache.camel.Processor; +import org.apache.camel.builder.RouteBuilder; +import org.apache.camel.test.junit4.CamelTestSupport; +import org.junit.Test; + +import static org.apache.activemq.camel.component.ActiveMQComponent.activeMQComponent; + +/** + * @version $Revision$ + */ +public class JmsInOutUseMessageIDasCorrelationIDTest extends CamelTestSupport { + + @Test + public void testInOutWithMsgIdAsCorrId() throws Exception { + String reply = template.requestBody("activemq:queue:in?useMessageIDAsCorrelationID=true", "Hello World", String.class); + assertEquals("Bye World", reply); + } + + @Test + public void testInOutFixedReplyToAndWithMsgIdAsCorrId() throws Exception { + String reply = template.requestBody("activemq:queue:in?replyTo=bar&useMessageIDAsCorrelationID=true", "Hello World", String.class); + assertEquals("Bye World", reply); + } + + protected CamelContext createCamelContext() throws Exception { + CamelContext camelContext = super.createCamelContext(); + camelContext.addComponent("activemq", activeMQComponent("vm://localhost?broker.persistent=false&broker.useJmx=false")); + return camelContext; + } + + protected RouteBuilder createRouteBuilder() throws Exception { + return new RouteBuilder() { + public void configure() throws Exception { + from("activemq:queue:in?useMessageIDAsCorrelationID=true").process(new Processor() { + public void process(Exchange exchange) throws Exception { + String id = exchange.getIn().getHeader("JMSCorrelationID", String.class); + assertNull("JMSCorrelationID should be null", id); + + exchange.getOut().setBody("Bye World"); + } + }); + } + }; + } + +} \ No newline at end of file diff --git a/components/camel-jms/src/test/resources/log4j.properties b/components/camel-jms/src/test/resources/log4j.properties index bbebcddbaa2d0..1d04ba2af1587 100644 --- a/components/camel-jms/src/test/resources/log4j.properties +++ b/components/camel-jms/src/test/resources/log4j.properties @@ -37,4 +37,6 @@ log4j.logger.org.apache.camel.management=WARN log4j.logger.org.apache.camel.impl.DefaultPackageScanClassResolver=WARN #log4j.logger.org.apache.activemq.spring=WARN #log4j.logger.org.apache.camel.component.jms=TRACE +#log4j.logger.org.apache.camel.component.jms.reply.CorrelationMap=DEBUG #log4j.logger.org.apache.camel=DEBUG +#log4j.logger.org.springframework.jms.listener=TRACE diff --git a/components/camel-test/src/main/java/org/apache/camel/test/CamelTestSupport.java b/components/camel-test/src/main/java/org/apache/camel/test/CamelTestSupport.java index a4550d9d0836d..1eac2e525b5d4 100644 --- a/components/camel-test/src/main/java/org/apache/camel/test/CamelTestSupport.java +++ b/components/camel-test/src/main/java/org/apache/camel/test/CamelTestSupport.java @@ -51,9 +51,9 @@ */ public abstract class CamelTestSupport extends TestSupport { - protected CamelContext context; - protected ProducerTemplate template; - protected ConsumerTemplate consumer; + protected volatile CamelContext context; + protected volatile ProducerTemplate template; + protected volatile ConsumerTemplate consumer; private boolean useRouteBuilder = true; private Service camelContextService; diff --git a/components/camel-test/src/main/java/org/apache/camel/test/junit4/CamelTestSupport.java b/components/camel-test/src/main/java/org/apache/camel/test/junit4/CamelTestSupport.java index 775d1c5089e1d..c6811d0b1d15e 100644 --- a/components/camel-test/src/main/java/org/apache/camel/test/junit4/CamelTestSupport.java +++ b/components/camel-test/src/main/java/org/apache/camel/test/junit4/CamelTestSupport.java @@ -53,9 +53,9 @@ */ public abstract class CamelTestSupport extends TestSupport { - protected CamelContext context; - protected ProducerTemplate template; - protected ConsumerTemplate consumer; + protected volatile CamelContext context; + protected volatile ProducerTemplate template; + protected volatile ConsumerTemplate consumer; private boolean useRouteBuilder = true; private Service camelContextService;
f75570734bb41a61d66b10922b6fc1ed48119067
hadoop
YARN-2582. Fixed Log CLI and Web UI for showing- aggregated logs of LRS. Contributed Xuan Gong.--(cherry picked from commit e90718fa5a0e7c18592af61534668acebb9db51b)-
c
https://github.com/apache/hadoop
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 92512356d7e15..ccfc1db7d70bc 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -346,6 +346,9 @@ Release 2.6.0 - UNRELEASED YARN-2673. Made timeline client put APIs retry if ConnectException happens. (Li Lu via zjshen) + YARN-2582. Fixed Log CLI and Web UI for showing aggregated logs of LRS. (Xuan + Gong via zjshen) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java index eb6169cf36868..0b34a46281baf 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java @@ -31,7 +31,6 @@ import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Tool; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -39,8 +38,6 @@ import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; -import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat; -import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils; import org.apache.hadoop.yarn.logaggregation.LogCLIHelpers; import org.apache.hadoop.yarn.util.ConverterUtils; @@ -113,17 +110,16 @@ public int run(String[] args) throws Exception { System.err.println("Invalid ApplicationId specified"); return -1; } - + try { int resultCode = verifyApplicationState(appId); if (resultCode != 0) { - System.out.println("Application has not completed." + - " Logs are only available after an application completes"); + System.out.println("Logs are not avaiable right now."); return resultCode; } } catch (Exception e) { - System.err.println("Unable to get ApplicationState." + - " Attempting to fetch logs directly from the filesystem."); + System.err.println("Unable to get ApplicationState." + + " Attempting to fetch logs directly from the filesystem."); } LogCLIHelpers logCliHelper = new LogCLIHelpers(); @@ -141,18 +137,9 @@ public int run(String[] args) throws Exception { printHelpMessage(printOpts); resultCode = -1; } else { - Path remoteRootLogDir = - new Path(getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, - YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR)); - AggregatedLogFormat.LogReader reader = - new AggregatedLogFormat.LogReader(getConf(), - LogAggregationUtils.getRemoteNodeLogFileForApp( - remoteRootLogDir, - appId, - appOwner, - ConverterUtils.toNodeId(nodeAddress), - LogAggregationUtils.getRemoteNodeLogDirSuffix(getConf()))); - resultCode = logCliHelper.dumpAContainerLogs(containerIdStr, reader, System.out); + resultCode = + logCliHelper.dumpAContainersLogs(appIdStr, containerIdStr, + nodeAddress, appOwner); } return resultCode; @@ -167,10 +154,10 @@ private int verifyApplicationState(ApplicationId appId) throws IOException, switch (appReport.getYarnApplicationState()) { case NEW: case NEW_SAVING: - case ACCEPTED: case SUBMITTED: - case RUNNING: return -1; + case ACCEPTED: + case RUNNING: case FAILED: case FINISHED: case KILLED: diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java index f02f3358a25f5..132dca245c5d6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java @@ -25,21 +25,38 @@ import static org.mockito.Mockito.mock; import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileWriter; import java.io.IOException; import java.io.PrintStream; import java.io.PrintWriter; +import java.io.Writer; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.junit.Assert; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.yarn.api.records.ApplicationAccessType; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; +import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.YarnApplicationState; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat; +import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils; import org.apache.hadoop.yarn.logaggregation.LogCLIHelpers; import org.junit.Before; import org.junit.Test; @@ -138,6 +155,116 @@ public void testHelpMessage() throws Exception { Assert.assertEquals(appReportStr, sysOutStream.toString()); } + @Test (timeout = 15000) + public void testFetchApplictionLogs() throws Exception { + String remoteLogRootDir = "target/logs/"; + Configuration configuration = new Configuration(); + configuration.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true); + configuration + .set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, remoteLogRootDir); + configuration.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); + configuration.set(YarnConfiguration.YARN_ADMIN_ACL, "admin"); + FileSystem fs = FileSystem.get(configuration); + + UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); + ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 1); + ApplicationAttemptId appAttemptId = + ApplicationAttemptIdPBImpl.newInstance(appId, 1); + ContainerId containerId1 = ContainerIdPBImpl.newInstance(appAttemptId, 1); + ContainerId containerId2 = ContainerIdPBImpl.newInstance(appAttemptId, 2); + + NodeId nodeId = NodeId.newInstance("localhost", 1234); + + // create local logs + String rootLogDir = "target/LocalLogs"; + Path rootLogDirPath = new Path(rootLogDir); + if (fs.exists(rootLogDirPath)) { + fs.delete(rootLogDirPath, true); + } + assertTrue(fs.mkdirs(rootLogDirPath)); + + Path appLogsDir = new Path(rootLogDirPath, appId.toString()); + if (fs.exists(appLogsDir)) { + fs.delete(appLogsDir, true); + } + assertTrue(fs.mkdirs(appLogsDir)); + List<String> rootLogDirs = Arrays.asList(rootLogDir); + + // create container logs in localLogDir + createContainerLogInLocalDir(appLogsDir, containerId1, fs); + createContainerLogInLocalDir(appLogsDir, containerId2, fs); + + Path path = + new Path(remoteLogRootDir + ugi.getShortUserName() + + "/logs/application_0_0001"); + if (fs.exists(path)) { + fs.delete(path, true); + } + assertTrue(fs.mkdirs(path)); + // upload container logs into remote directory + uploadContainerLogIntoRemoteDir(ugi, configuration, rootLogDirs, nodeId, + containerId1, path, fs); + uploadContainerLogIntoRemoteDir(ugi, configuration, rootLogDirs, nodeId, + containerId2, path, fs); + + YarnClient mockYarnClient = + createMockYarnClient(YarnApplicationState.FINISHED); + LogsCLI cli = new LogsCLIForTest(mockYarnClient); + cli.setConf(configuration); + + int exitCode = cli.run(new String[] { "-applicationId", appId.toString() }); + assertTrue(exitCode == 0); + assertTrue(sysOutStream.toString().contains( + "Hello container_0_0001_01_000001!")); + assertTrue(sysOutStream.toString().contains( + "Hello container_0_0001_01_000002!")); + sysOutStream.reset(); + + exitCode = + cli.run(new String[] { "-applicationId", appId.toString(), + "-nodeAddress", nodeId.toString(), "-containerId", + containerId1.toString() }); + assertTrue(exitCode == 0); + assertTrue(sysOutStream.toString().contains( + "Hello container_0_0001_01_000001!")); + + fs.delete(new Path(remoteLogRootDir), true); + fs.delete(new Path(rootLogDir), true); + } + + private static void createContainerLogInLocalDir(Path appLogsDir, + ContainerId containerId, FileSystem fs) throws Exception { + Path containerLogsDir = new Path(appLogsDir, containerId.toString()); + if (fs.exists(containerLogsDir)) { + fs.delete(containerLogsDir, true); + } + assertTrue(fs.mkdirs(containerLogsDir)); + Writer writer = + new FileWriter(new File(containerLogsDir.toString(), "sysout")); + writer.write("Hello " + containerId + "!"); + writer.close(); + } + + private static void uploadContainerLogIntoRemoteDir(UserGroupInformation ugi, + Configuration configuration, List<String> rootLogDirs, NodeId nodeId, + ContainerId containerId, Path appDir, FileSystem fs) throws Exception { + Path path = + new Path(appDir, LogAggregationUtils.getNodeString(nodeId) + + System.currentTimeMillis()); + AggregatedLogFormat.LogWriter writer = + new AggregatedLogFormat.LogWriter(configuration, path, ugi); + writer.writeApplicationOwner(ugi.getUserName()); + + Map<ApplicationAccessType, String> appAcls = + new HashMap<ApplicationAccessType, String>(); + appAcls.put(ApplicationAccessType.VIEW_APP, ugi.getUserName()); + writer.writeApplicationACLs(appAcls); + writer.append(new AggregatedLogFormat.LogKey(containerId), + new AggregatedLogFormat.LogValue(rootLogDirs, containerId, + UserGroupInformation.getCurrentUser().getShortUserName())); + writer.close(); + } + private YarnClient createMockYarnClient(YarnApplicationState appState) throws YarnException, IOException { YarnClient mockClient = mock(YarnClient.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.java index fe4983e70b2b1..34c9100cc8ba7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.java @@ -110,4 +110,9 @@ public static String getRemoteNodeLogDirSuffix(Configuration conf) { public static String getNodeString(NodeId nodeId) { return nodeId.toString().replace(":", "_"); } + + @VisibleForTesting + public static String getNodeString(String nodeId) { + return nodeId.toString().replace(":", "_"); + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java index 3bafdb35438af..9efdef891d2e5 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java @@ -52,19 +52,47 @@ public int dumpAContainersLogs(String appId, String containerId, YarnConfiguration.NM_REMOTE_APP_LOG_DIR, YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR)); String suffix = LogAggregationUtils.getRemoteNodeLogDirSuffix(getConf()); - Path logPath = LogAggregationUtils.getRemoteNodeLogFileForApp( + Path remoteAppLogDir = LogAggregationUtils.getRemoteAppLogDir( remoteRootLogDir, ConverterUtils.toApplicationId(appId), jobOwner, - ConverterUtils.toNodeId(nodeId), suffix); - AggregatedLogFormat.LogReader reader; + suffix); + RemoteIterator<FileStatus> nodeFiles; try { - reader = new AggregatedLogFormat.LogReader(getConf(), logPath); - } catch (FileNotFoundException fnfe) { - System.out.println("Logs not available at " + logPath.toString()); - System.out - .println("Log aggregation has not completed or is not enabled."); + Path qualifiedLogDir = + FileContext.getFileContext(getConf()).makeQualified( + remoteAppLogDir); + nodeFiles = + FileContext.getFileContext(qualifiedLogDir.toUri(), getConf()) + .listStatus(remoteAppLogDir); + } catch (FileNotFoundException fnf) { + logDirNotExist(remoteAppLogDir.toString()); + return -1; + } + boolean foundContainerLogs = false; + while (nodeFiles.hasNext()) { + FileStatus thisNodeFile = nodeFiles.next(); + String fileName = thisNodeFile.getPath().getName(); + if (fileName.contains(LogAggregationUtils.getNodeString(nodeId)) + && !fileName.endsWith(LogAggregationUtils.TMP_FILE_SUFFIX)) { + AggregatedLogFormat.LogReader reader = null; + try { + reader = + new AggregatedLogFormat.LogReader(getConf(), + thisNodeFile.getPath()); + if (dumpAContainerLogs(containerId, reader, System.out) > -1) { + foundContainerLogs = true; + } + } finally { + if (reader != null) { + reader.close(); + } + } + } + } + if (!foundContainerLogs) { + containerLogNotFound(containerId); return -1; } - return dumpAContainerLogs(containerId, reader, System.out); + return 0; } @Private @@ -81,8 +109,7 @@ public int dumpAContainerLogs(String containerIdStr, } if (valueStream == null) { - System.out.println("Logs for container " + containerIdStr - + " are not present in this log-file."); + containerLogNotFound(containerIdStr); return -1; } @@ -114,42 +141,49 @@ public int dumpAllContainersLogs(ApplicationId appId, String appOwner, nodeFiles = FileContext.getFileContext(qualifiedLogDir.toUri(), getConf()).listStatus(remoteAppLogDir); } catch (FileNotFoundException fnf) { - System.out.println("Logs not available at " + remoteAppLogDir.toString()); - System.out - .println("Log aggregation has not completed or is not enabled."); + logDirNotExist(remoteAppLogDir.toString()); return -1; } + boolean foundAnyLogs = false; while (nodeFiles.hasNext()) { FileStatus thisNodeFile = nodeFiles.next(); - AggregatedLogFormat.LogReader reader = new AggregatedLogFormat.LogReader( - getConf(), new Path(remoteAppLogDir, thisNodeFile.getPath().getName())); - try { + if (!thisNodeFile.getPath().getName() + .endsWith(LogAggregationUtils.TMP_FILE_SUFFIX)) { + AggregatedLogFormat.LogReader reader = + new AggregatedLogFormat.LogReader(getConf(), thisNodeFile.getPath()); + try { + + DataInputStream valueStream; + LogKey key = new LogKey(); + valueStream = reader.next(key); - DataInputStream valueStream; - LogKey key = new LogKey(); - valueStream = reader.next(key); - - while (valueStream != null) { - String containerString = "\n\nContainer: " + key + " on " - + thisNodeFile.getPath().getName(); - out.println(containerString); - out.println(StringUtils.repeat("=", containerString.length())); - while (true) { - try { - LogReader.readAContainerLogsForALogType(valueStream, out); - } catch (EOFException eof) { - break; + while (valueStream != null) { + String containerString = + "\n\nContainer: " + key + " on " + thisNodeFile.getPath().getName(); + out.println(containerString); + out.println(StringUtils.repeat("=", containerString.length())); + while (true) { + try { + LogReader.readAContainerLogsForALogType(valueStream, out); + foundAnyLogs = true; + } catch (EOFException eof) { + break; + } } - } - // Next container - key = new LogKey(); - valueStream = reader.next(key); + // Next container + key = new LogKey(); + valueStream = reader.next(key); + } + } finally { + reader.close(); } - } finally { - reader.close(); } } + if (! foundAnyLogs) { + emptyLogDir(remoteAppLogDir.toString()); + return -1; + } return 0; } @@ -162,4 +196,18 @@ public void setConf(Configuration conf) { public Configuration getConf() { return this.conf; } + + private static void containerLogNotFound(String containerId) { + System.out.println("Logs for container " + containerId + + " are not present in this log-file."); + } + + private static void logDirNotExist(String remoteAppLogDir) { + System.out.println(remoteAppLogDir + "does not exist."); + System.out.println("Log aggregation has not completed or is not enabled."); + } + + private static void emptyLogDir(String remoteAppLogDir) { + System.out.println(remoteAppLogDir + "does not have any log files."); + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java index 2b83e6941e4f8..16e635994b533 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java @@ -30,7 +30,10 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileContext; +import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -59,113 +62,127 @@ public class AggregatedLogsBlock extends HtmlBlock { @Override protected void render(Block html) { - AggregatedLogFormat.LogReader reader = null; - try { - ContainerId containerId = verifyAndGetContainerId(html); - NodeId nodeId = verifyAndGetNodeId(html); - String appOwner = verifyAndGetAppOwner(html); - LogLimits logLimits = verifyAndGetLogLimits(html); - if (containerId == null || nodeId == null || appOwner == null - || appOwner.isEmpty() || logLimits == null) { - return; - } - - ApplicationId applicationId = containerId.getApplicationAttemptId() - .getApplicationId(); - String logEntity = $(ENTITY_STRING); - if (logEntity == null || logEntity.isEmpty()) { - logEntity = containerId.toString(); - } + ContainerId containerId = verifyAndGetContainerId(html); + NodeId nodeId = verifyAndGetNodeId(html); + String appOwner = verifyAndGetAppOwner(html); + LogLimits logLimits = verifyAndGetLogLimits(html); + if (containerId == null || nodeId == null || appOwner == null + || appOwner.isEmpty() || logLimits == null) { + return; + } - if (!conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, - YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)) { - html.h1() - ._("Aggregation is not enabled. Try the nodemanager at " + nodeId) - ._(); - return; - } + ApplicationId applicationId = containerId.getApplicationAttemptId() + .getApplicationId(); + String logEntity = $(ENTITY_STRING); + if (logEntity == null || logEntity.isEmpty()) { + logEntity = containerId.toString(); + } - Path remoteRootLogDir = new Path(conf.get( - YarnConfiguration.NM_REMOTE_APP_LOG_DIR, - YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR)); + if (!conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, + YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)) { + html.h1() + ._("Aggregation is not enabled. Try the nodemanager at " + nodeId) + ._(); + return; + } - try { - reader = new AggregatedLogFormat.LogReader(conf, - LogAggregationUtils.getRemoteNodeLogFileForApp(remoteRootLogDir, - applicationId, appOwner, nodeId, - LogAggregationUtils.getRemoteNodeLogDirSuffix(conf))); - } catch (FileNotFoundException e) { - // ACLs not available till the log file is opened. - html.h1() - ._("Logs not available for " + logEntity - + ". Aggregation may not be complete, " - + "Check back later or try the nodemanager at " + nodeId)._(); - return; - } catch (IOException e) { - html.h1()._("Error getting logs for " + logEntity)._(); - LOG.error("Error getting logs for " + logEntity, e); - return; - } + Path remoteRootLogDir = new Path(conf.get( + YarnConfiguration.NM_REMOTE_APP_LOG_DIR, + YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR)); + Path remoteAppDir = LogAggregationUtils.getRemoteAppLogDir( + remoteRootLogDir, applicationId, appOwner, + LogAggregationUtils.getRemoteNodeLogDirSuffix(conf)); + RemoteIterator<FileStatus> nodeFiles; + try { + Path qualifiedLogDir = + FileContext.getFileContext(conf).makeQualified( + remoteAppDir); + nodeFiles = + FileContext.getFileContext(qualifiedLogDir.toUri(), conf) + .listStatus(remoteAppDir); + } catch (FileNotFoundException fnf) { + html.h1() + ._("Logs not available for " + logEntity + + ". Aggregation may not be complete, " + + "Check back later or try the nodemanager at " + nodeId)._(); + return; + } catch (Exception ex) { + html.h1() + ._("Error getting logs at " + nodeId)._(); + return; + } - String owner = null; - Map<ApplicationAccessType, String> appAcls = null; - try { - owner = reader.getApplicationOwner(); - appAcls = reader.getApplicationAcls(); - } catch (IOException e) { - html.h1()._("Error getting logs for " + logEntity)._(); - LOG.error("Error getting logs for " + logEntity, e); - return; - } - ApplicationACLsManager aclsManager = new ApplicationACLsManager(conf); - aclsManager.addApplication(applicationId, appAcls); + boolean foundLog = false; + String desiredLogType = $(CONTAINER_LOG_TYPE); + try { + while (nodeFiles.hasNext()) { + AggregatedLogFormat.LogReader reader = null; + try { + FileStatus thisNodeFile = nodeFiles.next(); + if (!thisNodeFile.getPath().getName() + .contains(LogAggregationUtils.getNodeString(nodeId)) + || thisNodeFile.getPath().getName() + .endsWith(LogAggregationUtils.TMP_FILE_SUFFIX)) { + continue; + } + reader = + new AggregatedLogFormat.LogReader(conf, thisNodeFile.getPath()); + + String owner = null; + Map<ApplicationAccessType, String> appAcls = null; + try { + owner = reader.getApplicationOwner(); + appAcls = reader.getApplicationAcls(); + } catch (IOException e) { + LOG.error("Error getting logs for " + logEntity, e); + continue; + } + ApplicationACLsManager aclsManager = new ApplicationACLsManager(conf); + aclsManager.addApplication(applicationId, appAcls); - String remoteUser = request().getRemoteUser(); - UserGroupInformation callerUGI = null; - if (remoteUser != null) { - callerUGI = UserGroupInformation.createRemoteUser(remoteUser); - } - if (callerUGI != null - && !aclsManager.checkAccess(callerUGI, + String remoteUser = request().getRemoteUser(); + UserGroupInformation callerUGI = null; + if (remoteUser != null) { + callerUGI = UserGroupInformation.createRemoteUser(remoteUser); + } + if (callerUGI != null && !aclsManager.checkAccess(callerUGI, ApplicationAccessType.VIEW_APP, owner, applicationId)) { - html.h1() - ._("User [" + remoteUser - + "] is not authorized to view the logs for " + logEntity)._(); - return; - } + html.h1() + ._("User [" + remoteUser + + "] is not authorized to view the logs for " + logEntity + + " in log file [" + thisNodeFile.getPath().getName() + "]")._(); + LOG.error("User [" + remoteUser + + "] is not authorized to view the logs for " + logEntity); + continue; + } - String desiredLogType = $(CONTAINER_LOG_TYPE); - try { - AggregatedLogFormat.ContainerLogsReader logReader = reader + AggregatedLogFormat.ContainerLogsReader logReader = reader .getContainerLogsReader(containerId); - if (logReader == null) { - html.h1() - ._("Logs not available for " + logEntity - + ". Could be caused by the rentention policy")._(); - return; - } - - boolean foundLog = readContainerLogs(html, logReader, logLimits, - desiredLogType); - - if (!foundLog) { - if (desiredLogType.isEmpty()) { - html.h1("No logs available for container " + containerId.toString()); - } else { - html.h1("Unable to locate '" + desiredLogType - + "' log for container " + containerId.toString()); + if (logReader == null) { + continue; } - return; + + foundLog = readContainerLogs(html, logReader, logLimits, + desiredLogType); + } catch (IOException ex) { + LOG.error("Error getting logs for " + logEntity, ex); + continue; + } finally { + if (reader != null) + reader.close(); } - } catch (IOException e) { - html.h1()._("Error getting logs for " + logEntity)._(); - LOG.error("Error getting logs for " + logEntity, e); - return; } - } finally { - if (reader != null) { - reader.close(); + if (!foundLog) { + if (desiredLogType.isEmpty()) { + html.h1("No logs available for container " + containerId.toString()); + } else { + html.h1("Unable to locate '" + desiredLogType + + "' log for container " + containerId.toString()); + } } + } catch (IOException e) { + html.h1()._("Error getting logs for " + logEntity)._(); + LOG.error("Error getting logs for " + logEntity, e); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java index 502d2dc2b584a..0a17433c44fca 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogsBlock.java @@ -47,7 +47,6 @@ import org.apache.hadoop.yarn.webapp.view.BlockForTest; import org.apache.hadoop.yarn.webapp.view.HtmlBlock; import org.apache.hadoop.yarn.webapp.view.HtmlBlockForTest; -import org.junit.Ignore; import org.junit.Test; import static org.mockito.Mockito.*; @@ -149,10 +148,8 @@ public void testAggregatedLogsBlock() throws Exception { } /** * Log files was deleted. - * TODO: YARN-2582: fix log web ui for Long Running application * @throws Exception */ - @Ignore @Test public void testNoLogs() throws Exception {
14f8d46f2fdfc5994b025ba2b4d19ba685f90b2e
hbase
HBASE-9366 TestHTraceHooks.testTraceCreateTable- errors out sometimes.--git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1523816 13f79535-47bb-0310-9956-ffa450edef68-
c
https://github.com/apache/hbase
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java index faf015cb347b..3595c392e34f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/trace/TestHTraceHooks.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.MediumTests; +import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.cloudera.htrace.Sampler; @@ -71,6 +72,15 @@ public void testTraceCreateTable() throws Exception { tableCreationSpan.close(); } + // Some table creation is async. Need to make sure that everything is full in before + // checking to see if the spans are there. + TEST_UTIL.waitFor(1000, new Waiter.Predicate<Exception>() { + @Override + public boolean evaluate() throws Exception { + return rcvr.getSpans().size() >= 5; + } + }); + Collection<Span> spans = rcvr.getSpans(); TraceTree traceTree = new TraceTree(spans); Collection<Span> roots = traceTree.getRoots();
5ec3d92183e6c7fda25e098c570bfef29ceb0625
arquillian$arquillian-graphene
ARQGRA-229: Support for JavaScript interfaces defined as abstract classes
a
https://github.com/arquillian/arquillian-graphene
diff --git a/graphene-webdriver/graphene-webdriver-ftest/src/test/java/org/jboss/arquillian/graphene/ftest/javascript/JavaScriptPageExtensionTestCase.java b/graphene-webdriver/graphene-webdriver-ftest/src/test/java/org/jboss/arquillian/graphene/ftest/javascript/JavaScriptPageExtensionTestCase.java index 372109683..bd26e300f 100644 --- a/graphene-webdriver/graphene-webdriver-ftest/src/test/java/org/jboss/arquillian/graphene/ftest/javascript/JavaScriptPageExtensionTestCase.java +++ b/graphene-webdriver/graphene-webdriver-ftest/src/test/java/org/jboss/arquillian/graphene/ftest/javascript/JavaScriptPageExtensionTestCase.java @@ -32,6 +32,7 @@ import org.junit.Test; import org.junit.Assert; import org.junit.runner.RunWith; +import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; @@ -77,6 +78,13 @@ public void testWithoutSourceAndWithInterfaceDependencies() { loadPage(); JSInterfaceFactory.create(Document2.class).getTitle(); } + + @Test + public void testAbstractClass() { + loadPage(); + Document3 document = JSInterfaceFactory.create(Document3.class); + Assert.assertEquals(browser.findElement(By.tagName("h1")), document.getHeader()); + } @JavaScript("document") public static interface Document { @@ -91,17 +99,31 @@ public static interface Document { public static interface Document2 { String getTitle(); } + + @JavaScript("document") + public abstract class Document3 { + + public abstract List<WebElement> getElementsByTagName(String tagName); + + public WebElement getHeader() { + List<WebElement> elements = getElementsByTagName("h1"); + if (elements.iterator().hasNext()) { + return elements.iterator().next(); + } + return null; + } + } @JavaScript(value = "Document.helloworld") @Dependency(sources = {"org/jboss/arquillian/graphene/ftest/javascript/hello-world.js"}) - private interface HelloWorld extends InstallableJavaScript { + public interface HelloWorld extends InstallableJavaScript { String hello(); } @JavaScript(value = "Document.helloworld2") @Dependency(sources = {"org/jboss/arquillian/graphene/ftest/javascript/hello-world2.js"}, interfaces=HelloWorld.class) - private interface HelloWorld2 { + public interface HelloWorld2 { String hello(); } diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/GrapheneExtension.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/GrapheneExtension.java index fbe6d2c2b..e1f3f60e0 100644 --- a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/GrapheneExtension.java +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/GrapheneExtension.java @@ -48,5 +48,4 @@ public void register(ExtensionBuilder builder) { /** Page Extensions */ builder.observer(GraphenePageExtensionRegistrar.class); } - } diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/cglib/ClassImposterizer.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/cglib/ClassImposterizer.java new file mode 100644 index 000000000..e2327fee5 --- /dev/null +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/cglib/ClassImposterizer.java @@ -0,0 +1,177 @@ +/** + * Thanks to Mockito guys for some modifications. This class has been further modified for use in lambdaj + * and then modified for use in Arquillian Graphene project. + * + * Mockito License for redistributed, modified file. + * +Copyright (c) 2007 Mockito contributors +This program is made available under the terms of the MIT License. + * + * + * jMock License for original distributed file + * +Copyright (c) 2000-2007, jMock.org +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this list of +conditions and the following disclaimer. Redistributions in binary form must reproduce +the above copyright notice, this list of conditions and the following disclaimer in +the documentation and/or other materials provided with the distribution. + +Neither the name of jMock nor the names of its contributors may be used to endorse +or promote products derived from this software without specific prior written +permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT +SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY +WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. +*/ +package org.jboss.arquillian.graphene.cglib; + +import java.lang.reflect.Constructor; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import net.sf.cglib.core.DefaultNamingPolicy; +import net.sf.cglib.core.NamingPolicy; +import net.sf.cglib.core.Predicate; +import net.sf.cglib.proxy.Callback; +import net.sf.cglib.proxy.CallbackFilter; +import net.sf.cglib.proxy.Enhancer; +import net.sf.cglib.proxy.Factory; +import net.sf.cglib.proxy.MethodInterceptor; +import net.sf.cglib.proxy.NoOp; + +import org.objenesis.Objenesis; +import org.objenesis.ObjenesisStd; + +/** + * Thanks to jMock guys for this handy class that wraps all the cglib magic. + * In particular it workarounds a cglib limitation by allowing to proxy a class even if the misses a no args constructor. + * + * @author Mario Fusco + * @author Sebastian Jancke + */ +@SuppressWarnings("rawtypes") +public class ClassImposterizer { + + protected ClassImposterizer() {} + + private final Objenesis objenesis = new ObjenesisStd(); + + private static final NamingPolicy DEFAULT_POLICY = new DefaultNamingPolicy() { + /** + * {@inheritDoc} + */ + @Override + protected String getTag() { + return "CGLIB"; + } + }; + + private static final NamingPolicy SIGNED_CLASSES_POLICY = new DefaultNamingPolicy() { + /** + * {@inheritDoc} + */ + @Override + public String getClassName(String prefix, String source, Object key, Predicate names) { + return "codegen." + super.getClassName(prefix, source, key, names); + } + + /** + * {@inheritDoc} + */ + @Override + protected String getTag() { + return "CGLIB"; + } + }; + + private static final CallbackFilter IGNORE_BRIDGE_METHODS = new CallbackFilter() { + public int accept(Method method) { + return method.isBridge() ? 1 : 0; + } + }; + + protected <T> T imposteriseProtected(MethodInterceptor interceptor, Class<?> mockedType, Class<?>... ancillaryTypes) { + if (mockedType.isInterface()) { + return imposteriseInterface(interceptor, mockedType, ancillaryTypes); + } else { + return imposteriseClass(interceptor, mockedType, ancillaryTypes); + } + } + + @SuppressWarnings("unchecked") + protected <T> T imposteriseClass(MethodInterceptor interceptor, Class<?> mockedType, Class<?>... ancillaryTypes) { + setConstructorsAccessible(mockedType, true); + Class<?> proxyClass = createProxyClass(mockedType, ancillaryTypes); + return (T) mockedType.cast(createProxy(proxyClass, interceptor)); + } + + protected <T> T imposteriseInterface(MethodInterceptor interceptor, Class<?> mockedInterface, Class<?>... ancillaryTypes) { + + if (!Modifier.isPublic(mockedInterface.getModifiers())) { + throw new IllegalArgumentException("Imposterized interface must be public: " + mockedInterface); + } + + List<Class<?>> list = new ArrayList<Class<?>>(Arrays.asList(ancillaryTypes)); + list.add(mockedInterface); + + Class<?>[] interfaces = list.toArray(new Class<?>[list.size()]); + + return imposteriseClass(interceptor, Object.class, interfaces); + } + + private void setConstructorsAccessible(Class<?> mockedType, boolean accessible) { + for (Constructor<?> constructor : mockedType.getDeclaredConstructors()) { + constructor.setAccessible(accessible); + } + } + + private Class<?> createProxyClass(Class<?> mockedType, Class<?>...interfaces) { + if (mockedType == Object.class) mockedType = ClassWithSuperclassToWorkAroundCglibBug.class; + + Enhancer enhancer = new ClassEnhancer(); + enhancer.setUseFactory(true); + enhancer.setSuperclass(mockedType); + enhancer.setInterfaces(interfaces); + + enhancer.setCallbackTypes(new Class[]{MethodInterceptor.class, NoOp.class}); + enhancer.setCallbackFilter(IGNORE_BRIDGE_METHODS); + enhancer.setNamingPolicy(mockedType.getSigners() != null ? SIGNED_CLASSES_POLICY : DEFAULT_POLICY); + + return enhancer.createClass(); + } + + private static class ClassEnhancer extends Enhancer { + /** + * {@inheritDoc} + */ + @Override + protected void filterConstructors(Class sc, List constructors) { } + } + + private Object createProxy(Class<?> proxyClass, Callback callback) { + Factory proxy = (Factory) objenesis.newInstance(proxyClass); + proxy.setCallbacks(new Callback[] {callback, NoOp.INSTANCE}); + return proxy; + } + + /** + * Class With Superclass To WorkAround Cglib Bug + */ + public static class ClassWithSuperclassToWorkAroundCglibBug {} +} \ No newline at end of file diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/condition/element/ElementValueEquals.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/condition/element/ElementValueEquals.java new file mode 100644 index 000000000..5e9552c67 --- /dev/null +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/condition/element/ElementValueEquals.java @@ -0,0 +1,45 @@ +/** + * JBoss, Home of Professional Open Source + * Copyright 2012, Red Hat, Inc. and individual contributors + * by the @authors tag. See the copyright.txt in the distribution for a + * full listing of individual contributors. + * + * This is free software; you can redistribute it and/or modify it + * under the terms of the GNU Lesser General Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * This software is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this software; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301 USA, or see the FSF site: http://www.fsf.org. + */ +package org.jboss.arquillian.graphene.condition.element; + +import org.openqa.selenium.WebDriver; +import org.openqa.selenium.WebElement; + +/** + * @author <a href="mailto:[email protected]">Jan Papousek</a> + */ +public class ElementValueEquals extends AbstractElementAndTextBooleanCondition { + + public ElementValueEquals(WebElement element, String text) { + super(element, text); + } + + public ElementValueEquals(WebElement element, String text, boolean negation) { + super(element, text, negation); + } + + @Override + protected Boolean check(WebDriver driver) { + return getElement().getText().equals(getText()); + } + +} diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/ClassImposterizer.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/ClassImposterizer.java new file mode 100644 index 000000000..1084e5d1b --- /dev/null +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/ClassImposterizer.java @@ -0,0 +1,12 @@ +package org.jboss.arquillian.graphene.javascript; + +import net.sf.cglib.proxy.MethodInterceptor; + +class ClassImposterizer extends org.jboss.arquillian.graphene.cglib.ClassImposterizer { + + static final ClassImposterizer INSTANCE = new ClassImposterizer(); + + <T> T imposterise(MethodInterceptor interceptor, Class<T> mockedType, Class<?>... ancillaryTypes) { + return INSTANCE.imposteriseProtected(interceptor, mockedType, ancillaryTypes); + } +} diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/JSInterfaceFactory.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/JSInterfaceFactory.java index ade4b8741..4e54f2f0d 100644 --- a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/JSInterfaceFactory.java +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/JSInterfaceFactory.java @@ -1,6 +1,6 @@ package org.jboss.arquillian.graphene.javascript; -import java.lang.reflect.Proxy; +import java.lang.reflect.Modifier; public class JSInterfaceFactory<T> { @@ -8,8 +8,8 @@ public class JSInterfaceFactory<T> { private JSInterfaceFactory(Class<T> jsInterface) { - if (!jsInterface.isInterface()) { - throw new IllegalArgumentException("interface must be provided"); + if (!jsInterface.isInterface() && !Modifier.isAbstract(jsInterface.getModifiers())) { + throw new IllegalArgumentException("interface or abstract class must be provided :" + jsInterface); } this.handler = new JSInterfaceHandler(new JSTarget(jsInterface)); @@ -23,7 +23,6 @@ public static <T> T create(Class<T> jsInterface) { @SuppressWarnings("unchecked") public T instantiate() { Class<?> jsInterface = handler.getTarget().getInterface(); - return (T) Proxy.newProxyInstance(jsInterface.getClassLoader(), new Class<?>[] { jsInterface }, handler); + return (T) ClassImposterizer.INSTANCE.imposterise(handler, jsInterface); } - } diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/JSInterfaceHandler.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/JSInterfaceHandler.java index 13b30b73f..70eb57735 100644 --- a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/JSInterfaceHandler.java +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/javascript/JSInterfaceHandler.java @@ -1,9 +1,12 @@ package org.jboss.arquillian.graphene.javascript; -import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; +import java.lang.reflect.Modifier; -public class JSInterfaceHandler implements InvocationHandler { +import net.sf.cglib.proxy.MethodInterceptor; +import net.sf.cglib.proxy.MethodProxy; + +public class JSInterfaceHandler implements MethodInterceptor { private JSTarget target; @@ -16,10 +19,14 @@ public JSTarget getTarget() { } @Override - public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { + public Object intercept(Object obj, Method method, Object[] args, MethodProxy methodProxy) throws Throwable { + if (!target.getInterface().isInterface()) { + if (!Modifier.isAbstract(method.getModifiers())) { + return methodProxy.invokeSuper(obj, args); + } + } args = (args != null) ? args : new Object[]{}; JSCall call = new JSCall(new JSMethod(target, method), args); return target.getResolver().execute(call); } - } diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/proxy/ClassImposterizer.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/proxy/ClassImposterizer.java index 6e2e9adc7..aee57d0cc 100644 --- a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/proxy/ClassImposterizer.java +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/proxy/ClassImposterizer.java @@ -1,144 +1,12 @@ -/** - * Thanks to Mockito guys for some modifications. This class has been further modified for use in lambdaj - * and then modified for use in Arquillian Graphene project. - * - * Mockito License for redistributed, modified file. - * -Copyright (c) 2007 Mockito contributors -This program is made available under the terms of the MIT License. - * - * - * jMock License for original distributed file - * -Copyright (c) 2000-2007, jMock.org -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -Redistributions of source code must retain the above copyright notice, this list of -conditions and the following disclaimer. Redistributions in binary form must reproduce -the above copyright notice, this list of conditions and the following disclaimer in -the documentation and/or other materials provided with the distribution. - -Neither the name of jMock nor the names of its contributors may be used to endorse -or promote products derived from this software without specific prior written -permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY -EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES -OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT -SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR -BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY -WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH -DAMAGE. -*/ package org.jboss.arquillian.graphene.proxy; -import java.lang.reflect.*; -import java.util.*; - -import net.sf.cglib.core.*; -import net.sf.cglib.proxy.*; - -import org.objenesis.*; +import net.sf.cglib.proxy.MethodInterceptor; -/** - * Thanks to jMock guys for this handy class that wraps all the cglib magic. - * In particular it workarounds a cglib limitation by allowing to proxy a class even if the misses a no args constructor. - * - * @author Mario Fusco - * @author Sebastian Jancke - */ -@SuppressWarnings("rawtypes") -final class ClassImposterizer { +class ClassImposterizer extends org.jboss.arquillian.graphene.cglib.ClassImposterizer { static final ClassImposterizer INSTANCE = new ClassImposterizer(); - private ClassImposterizer() {} - - private final Objenesis objenesis = new ObjenesisStd(); - - private static final NamingPolicy DEFAULT_POLICY = new DefaultNamingPolicy() { - /** - * {@inheritDoc} - */ - @Override - protected String getTag() { - return "CGLIB"; - } - }; - - private static final NamingPolicy SIGNED_CLASSES_POLICY = new DefaultNamingPolicy() { - /** - * {@inheritDoc} - */ - @Override - public String getClassName(String prefix, String source, Object key, Predicate names) { - return "codegen." + super.getClassName(prefix, source, key, names); - } - - /** - * {@inheritDoc} - */ - @Override - protected String getTag() { - return "CGLIB"; - } - }; - - private static final CallbackFilter IGNORE_BRIDGE_METHODS = new CallbackFilter() { - public int accept(Method method) { - return method.isBridge() ? 1 : 0; - } - }; - - <T> T imposterise(Callback callback, Class<T> mockedType, Class<?>... ancillaryTypes) { - setConstructorsAccessible(mockedType, true); - Class<?> proxyClass = createProxyClass(mockedType, ancillaryTypes); - return mockedType.cast(createProxy(proxyClass, callback)); + <T> T imposterise(MethodInterceptor interceptor, Class<T> mockedType, Class<?>... ancillaryTypes) { + return INSTANCE.imposteriseProtected(interceptor, mockedType, ancillaryTypes); } - - private void setConstructorsAccessible(Class<?> mockedType, boolean accessible) { - for (Constructor<?> constructor : mockedType.getDeclaredConstructors()) { - constructor.setAccessible(accessible); - } - } - - private Class<?> createProxyClass(Class<?> mockedType, Class<?>...interfaces) { - if (mockedType == Object.class) mockedType = ClassWithSuperclassToWorkAroundCglibBug.class; - - Enhancer enhancer = new ClassEnhancer(); - enhancer.setUseFactory(true); - enhancer.setSuperclass(mockedType); - enhancer.setInterfaces(interfaces); - - enhancer.setCallbackTypes(new Class[]{MethodInterceptor.class, NoOp.class}); - enhancer.setCallbackFilter(IGNORE_BRIDGE_METHODS); - enhancer.setNamingPolicy(mockedType.getSigners() != null ? SIGNED_CLASSES_POLICY : DEFAULT_POLICY); - - return enhancer.createClass(); - } - - private static class ClassEnhancer extends Enhancer { - /** - * {@inheritDoc} - */ - @Override - protected void filterConstructors(Class sc, List constructors) { } - } - - private Object createProxy(Class<?> proxyClass, Callback callback) { - Factory proxy = (Factory) objenesis.newInstance(proxyClass); - proxy.setCallbacks(new Callback[] {callback, NoOp.INSTANCE}); - return proxy; - } - - /** - * Class With Superclass To WorkAround Cglib Bug - */ - public static class ClassWithSuperclassToWorkAroundCglibBug {} -} \ No newline at end of file +} diff --git a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/proxy/GrapheneProxy.java b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/proxy/GrapheneProxy.java index 5f337ee05..dcba81cc5 100644 --- a/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/proxy/GrapheneProxy.java +++ b/graphene-webdriver/graphene-webdriver-impl/src/main/java/org/jboss/arquillian/graphene/proxy/GrapheneProxy.java @@ -24,7 +24,6 @@ import java.lang.reflect.Modifier; import java.lang.reflect.Proxy; -import java.util.Arrays; /** * GrapheneProxy provides methods for wrapping the target of invocation in the diff --git a/graphene-webdriver/graphene-webdriver-impl/src/test/java/org/jboss/arquillian/graphene/proxy/TestClassImposterizer.java b/graphene-webdriver/graphene-webdriver-impl/src/test/java/org/jboss/arquillian/graphene/proxy/TestClassImposterizer.java index 1530bc269..25b764842 100644 --- a/graphene-webdriver/graphene-webdriver-impl/src/test/java/org/jboss/arquillian/graphene/proxy/TestClassImposterizer.java +++ b/graphene-webdriver/graphene-webdriver-impl/src/test/java/org/jboss/arquillian/graphene/proxy/TestClassImposterizer.java @@ -21,7 +21,7 @@ */ package org.jboss.arquillian.graphene.proxy; -import org.jboss.arquillian.graphene.proxy.ClassImposterizer; +import static org.junit.Assert.assertTrue; import net.sf.cglib.proxy.MethodInterceptor; import org.junit.Test; @@ -29,7 +29,6 @@ import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; - /** * @author Lukas Fryc */ @@ -40,10 +39,45 @@ public class TestClassImposterizer { MethodInterceptor interceptor; @Test - public void test() { - ClassImposterizer.INSTANCE.imposterise(interceptor, TestingClass.class, new Class<?>[] {}); + public void testClass() { + Object object = ClassImposterizer.INSTANCE.imposterise(interceptor, TestingClass.class); + assertTrue(object instanceof TestingClass); + } + + @Test + public void testInterface() { + Object object = ClassImposterizer.INSTANCE.imposterise(interceptor, TestingInterface.class); + assertTrue(object instanceof TestingInterface); + } + + @Test + public void testAbstractClass() { + Object object = ClassImposterizer.INSTANCE.imposterise(interceptor, TestingAbstractClass.class); + assertTrue(object instanceof TestingAbstractClass); + } + + @Test + public void testClassAndInterface() { + Object object = ClassImposterizer.INSTANCE.imposterise(interceptor, TestingClass.class, TestingInterface.class); + assertTrue(object instanceof TestingClass); + assertTrue(object instanceof TestingInterface); + } + + @Test(expected = IllegalArgumentException.class) + public void testPrivateInterface() { + Object object = ClassImposterizer.INSTANCE.imposterise(interceptor, TestingPrivateInterface.class); + assertTrue(object instanceof TestingPrivateInterface); } public static class TestingClass { } + + public static class TestingAbstractClass { + } + + public static interface TestingInterface { + } + + private static interface TestingPrivateInterface { + } }