Browse Source

Merge pull request #11315 from rmuir/build_cleanup

remove build duplication
Robert Muir 10 years ago
parent
commit
46c328ae14

+ 0 - 16
dev-tools/elasticsearch_license_header.txt

@@ -1,16 +0,0 @@
-Licensed to Elasticsearch under one or more contributor
-license agreements. See the NOTICE file distributed with
-this work for additional information regarding copyright
-ownership. Elasticsearch licenses this file to you under
-the Apache License, Version 2.0 (the "License"); you may
-not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.

+ 0 - 47
dev-tools/forbidden/all-signatures.txt

@@ -1,47 +0,0 @@
-# Licensed to Elasticsearch under one or more contributor
-# license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright
-# ownership. Elasticsearch licenses this file to you under
-# the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance  with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on
-# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-# either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-@defaultMessage Convert to URI
-java.net.URL#getPath()
-java.net.URL#getFile()
-
-@defaultMessage Use java.nio.file instead of java.io.File API
-java.util.jar.JarFile
-java.util.zip.ZipFile
-java.io.File
-java.io.FileInputStream
-java.io.FileOutputStream
-java.io.PrintStream#<init>(java.lang.String,java.lang.String)
-java.io.PrintWriter#<init>(java.lang.String,java.lang.String)
-java.util.Formatter#<init>(java.lang.String,java.lang.String,java.util.Locale)
-java.io.RandomAccessFile
-java.nio.file.Path#toFile()
-
-@defaultMessage Don't use deprecated lucene apis
-org.apache.lucene.index.DocsEnum
-org.apache.lucene.index.DocsAndPositionsEnum
-org.apache.lucene.queries.TermFilter
-org.apache.lucene.queries.TermsFilter
-org.apache.lucene.search.TermRangeFilter
-org.apache.lucene.search.NumericRangeFilter
-org.apache.lucene.search.PrefixFilter
-
-java.nio.file.Paths @ Use PathUtils.get instead.
-java.nio.file.FileSystems#getDefault() @ use PathUtils.getDefault instead.
-
-@defaultMessage Specify a location for the temp file/directory instead.
-java.nio.file.Files#createTempDirectory(java.lang.String,java.nio.file.attribute.FileAttribute[])
-java.nio.file.Files#createTempFile(java.lang.String,java.lang.String,java.nio.file.attribute.FileAttribute[])

+ 0 - 137
dev-tools/forbidden/core-signatures.txt

@@ -1,137 +0,0 @@
-# Licensed to Elasticsearch under one or more contributor
-# license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright
-# ownership. Elasticsearch licenses this file to you under
-# the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance  with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on
-# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-# either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-@defaultMessage spawns threads with vague names; use a custom thread factory and name threads so that you can tell (by its name) which executor it is associated with
-
-java.util.concurrent.Executors#newFixedThreadPool(int)
-java.util.concurrent.Executors#newSingleThreadExecutor()
-java.util.concurrent.Executors#newCachedThreadPool()
-java.util.concurrent.Executors#newSingleThreadScheduledExecutor()
-java.util.concurrent.Executors#newScheduledThreadPool(int)
-java.util.concurrent.Executors#defaultThreadFactory()
-java.util.concurrent.Executors#privilegedThreadFactory()
-
-java.lang.Character#codePointBefore(char[],int) @ Implicit start offset is error-prone when the char[] is a buffer and the first chars are random chars
-java.lang.Character#codePointAt(char[],int) @ Implicit end offset is error-prone when the char[] is a buffer and the last chars are random chars
-
-@defaultMessage Collections.sort dumps data into an array, sorts the array and reinserts data into the list, one should rather use Lucene's CollectionUtil sort methods which sort in place
-
-java.util.Collections#sort(java.util.List)
-java.util.Collections#sort(java.util.List,java.util.Comparator)
-
-java.io.StringReader#<init>(java.lang.String) @ Use FastStringReader instead
-
-@defaultMessage Reference management is tricky, leave it to SearcherManager
-org.apache.lucene.index.IndexReader#decRef()
-org.apache.lucene.index.IndexReader#incRef()
-org.apache.lucene.index.IndexReader#tryIncRef()
-
-@defaultMessage Pass the precision step from the mappings explicitly instead
-org.apache.lucene.search.NumericRangeQuery#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean)
-org.apache.lucene.search.NumericRangeQuery#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean)
-org.apache.lucene.search.NumericRangeQuery#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean)
-org.apache.lucene.search.NumericRangeQuery#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean)
-org.apache.lucene.search.NumericRangeFilter#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean)
-org.apache.lucene.search.NumericRangeFilter#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean)
-org.apache.lucene.search.NumericRangeFilter#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean)
-org.apache.lucene.search.NumericRangeFilter#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean)
-
-@defaultMessage Only use wait / notify when really needed try to use concurrency primitives, latches or callbacks instead. 
-java.lang.Object#wait()
-java.lang.Object#wait(long)
-java.lang.Object#wait(long,int)
-java.lang.Object#notify()
-java.lang.Object#notifyAll()
-
-@defaultMessage Beware of the behavior of this method on MIN_VALUE
-java.lang.Math#abs(int)
-java.lang.Math#abs(long)
-
-@defaultMessage Please do not try to stop the world
-java.lang.System#gc()
-
-@defaultMessage Use Long.compare instead we are on Java7
-com.google.common.primitives.Longs#compare(long,long)
-
-@defaultMessage Use Channels.* methods to write to channels. Do not write directly.
-java.nio.channels.WritableByteChannel#write(java.nio.ByteBuffer)
-java.nio.channels.FileChannel#write(java.nio.ByteBuffer, long)
-java.nio.channels.GatheringByteChannel#write(java.nio.ByteBuffer[], int, int)
-java.nio.channels.GatheringByteChannel#write(java.nio.ByteBuffer[])
-java.nio.channels.ReadableByteChannel#read(java.nio.ByteBuffer)
-java.nio.channels.ScatteringByteChannel#read(java.nio.ByteBuffer[])
-java.nio.channels.ScatteringByteChannel#read(java.nio.ByteBuffer[], int, int)
-java.nio.channels.FileChannel#read(java.nio.ByteBuffer, long)
-
-@defaultMessage Use Lucene.parseLenient instead it strips off minor version
-org.apache.lucene.util.Version#parseLeniently(java.lang.String)
-
-@defaultMessage unsafe encoders/decoders have problems in the lzf compress library.  Use variants of encode/decode functions which take Encoder/Decoder.
-com.ning.compress.lzf.impl.UnsafeChunkEncoders#createEncoder(int)
-com.ning.compress.lzf.impl.UnsafeChunkEncoders#createNonAllocatingEncoder(int)
-com.ning.compress.lzf.impl.UnsafeChunkEncoders#createEncoder(int, com.ning.compress.BufferRecycler)
-com.ning.compress.lzf.impl.UnsafeChunkEncoders#createNonAllocatingEncoder(int, com.ning.compress.BufferRecycler)
-com.ning.compress.lzf.impl.UnsafeChunkDecoder#<init>()
-com.ning.compress.lzf.parallel.CompressTask
-com.ning.compress.lzf.util.ChunkEncoderFactory#optimalInstance()
-com.ning.compress.lzf.util.ChunkEncoderFactory#optimalInstance(int)
-com.ning.compress.lzf.util.ChunkEncoderFactory#optimalNonAllocatingInstance(int)
-com.ning.compress.lzf.util.ChunkEncoderFactory#optimalInstance(com.ning.compress.BufferRecycler)
-com.ning.compress.lzf.util.ChunkEncoderFactory#optimalInstance(int, com.ning.compress.BufferRecycler)
-com.ning.compress.lzf.util.ChunkEncoderFactory#optimalNonAllocatingInstance(int, com.ning.compress.BufferRecycler)
-com.ning.compress.lzf.util.ChunkDecoderFactory#optimalInstance()
-com.ning.compress.lzf.util.LZFFileInputStream#<init>(java.io.File)
-com.ning.compress.lzf.util.LZFFileInputStream#<init>(java.io.FileDescriptor)
-com.ning.compress.lzf.util.LZFFileInputStream#<init>(java.lang.String)
-com.ning.compress.lzf.util.LZFFileOutputStream#<init>(java.io.File)
-com.ning.compress.lzf.util.LZFFileOutputStream#<init>(java.io.File, boolean)
-com.ning.compress.lzf.util.LZFFileOutputStream#<init>(java.io.FileDescriptor)
-com.ning.compress.lzf.util.LZFFileOutputStream#<init>(java.lang.String)
-com.ning.compress.lzf.util.LZFFileOutputStream#<init>(java.lang.String, boolean)
-com.ning.compress.lzf.LZFEncoder#encode(byte[])
-com.ning.compress.lzf.LZFEncoder#encode(byte[], int, int)
-com.ning.compress.lzf.LZFEncoder#encode(byte[], int, int, com.ning.compress.BufferRecycler)
-com.ning.compress.lzf.LZFEncoder#appendEncoded(byte[], int, int, byte[], int)
-com.ning.compress.lzf.LZFEncoder#appendEncoded(byte[], int, int, byte[], int, com.ning.compress.BufferRecycler)
-com.ning.compress.lzf.LZFCompressingInputStream#<init>(java.io.InputStream)
-com.ning.compress.lzf.LZFDecoder#fastDecoder()
-com.ning.compress.lzf.LZFDecoder#decode(byte[])
-com.ning.compress.lzf.LZFDecoder#decode(byte[], int, int)
-com.ning.compress.lzf.LZFDecoder#decode(byte[], byte[])
-com.ning.compress.lzf.LZFDecoder#decode(byte[], int, int, byte[])
-com.ning.compress.lzf.LZFInputStream#<init>(java.io.InputStream)
-com.ning.compress.lzf.LZFInputStream#<init>(java.io.InputStream, boolean)
-com.ning.compress.lzf.LZFInputStream#<init>(java.io.InputStream, com.ning.compress.BufferRecycler)
-com.ning.compress.lzf.LZFInputStream#<init>(java.io.InputStream, com.ning.compress.BufferRecycler, boolean)
-com.ning.compress.lzf.LZFOutputStream#<init>(java.io.OutputStream)
-com.ning.compress.lzf.LZFOutputStream#<init>(java.io.OutputStream, com.ning.compress.BufferRecycler)
-com.ning.compress.lzf.LZFUncompressor#<init>(com.ning.compress.DataHandler)
-com.ning.compress.lzf.LZFUncompressor#<init>(com.ning.compress.DataHandler, com.ning.compress.BufferRecycler)
-
-@defaultMessage Spawns a new thread which is solely under lucenes control use ThreadPool#estimatedTimeInMillisCounter instead
-org.apache.lucene.search.TimeLimitingCollector#getGlobalTimerThread()
-org.apache.lucene.search.TimeLimitingCollector#getGlobalCounter()
-
-@defaultMessage Don't interrupt threads use FutureUtils#cancel(Future<T>) instead
-java.util.concurrent.Future#cancel(boolean)
-
-@defaultMessage Don't try reading from paths that are not configured in Environment, resolve from Environment instead
-org.elasticsearch.common.io.PathUtils#get(java.lang.String, java.lang.String[])
-org.elasticsearch.common.io.PathUtils#get(java.net.URI)
-
-@defaultMessage Use queries, not filters
-org.apache.lucene.search.FilteredQuery#<init>(org.apache.lucene.search.Query, org.apache.lucene.search.Filter)
-org.apache.lucene.search.FilteredQuery#<init>(org.apache.lucene.search.Query, org.apache.lucene.search.Filter, org.apache.lucene.search.FilteredQuery$FilterStrategy)

+ 0 - 20
dev-tools/forbidden/test-signatures.txt

@@ -1,20 +0,0 @@
-# Licensed to Elasticsearch under one or more contributor
-# license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright
-# ownership. Elasticsearch licenses this file to you under
-# the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance  with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on
-# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-# either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-com.carrotsearch.randomizedtesting.RandomizedTest#globalTempDir() @ Use newTempDirPath() instead
-com.carrotsearch.randomizedtesting.annotations.Seed @ Don't commit hardcoded seeds
-
-org.apache.lucene.codecs.Codec#setDefault(org.apache.lucene.codecs.Codec) @ Use the SuppressCodecs("*") annotation instead

+ 0 - 13
dev-tools/license_header_definition.xml

@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<additionalHeaders>
-    <javadoc_style>
-        <firstLine>/*</firstLine>
-        <beforeEachLine> * </beforeEachLine>
-        <endLine> */</endLine>
-        <!--skipLine></skipLine-->
-        <firstLineDetectionPattern>(\s|\t)*/\*.*$</firstLineDetectionPattern>
-        <lastLineDetectionPattern>.*\*/(\s|\t)*$</lastLineDetectionPattern>
-        <allowBlankLines>false</allowBlankLines>
-        <isMultiline>true</isMultiline>
-    </javadoc_style>
-</additionalHeaders>

+ 0 - 20
dev-tools/pmd/custom.xml

@@ -1,20 +0,0 @@
-<?xml version="1.0"?>
-<ruleset name="Custom ruleset"
-    xmlns="http://pmd.sourceforge.net/ruleset/2.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://pmd.sourceforge.net/ruleset/2.0.0 http://pmd.sourceforge.net/ruleset_2_0_0.xsd">
-  <description>
-  Default ruleset for elasticsearch server project
-  </description>
-  <rule ref="rulesets/java/basic.xml"/>
-  <rule ref="rulesets/java/braces.xml"/>
-  <rule ref="rulesets/java/clone.xml"/>
-  <rule ref="rulesets/java/codesize.xml"/>
-  <rule ref="rulesets/java/coupling.xml">
-     <exclude name="LawOfDemeter" />
-  </rule>
-  <rule ref="rulesets/java/design.xml"/>
-  <rule ref="rulesets/java/unnecessary.xml">
-     <exclude name="UselessParentheses" />
-  </rule>
-</ruleset>

File diff suppressed because it is too large
+ 46 - 677
pom.xml


+ 2 - 0
src/test/java/org/elasticsearch/test/ExternalNode.java

@@ -26,6 +26,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
 import org.elasticsearch.client.Client;
 import org.elasticsearch.client.transport.TransportClient;
 import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.common.SuppressForbidden;
 import org.elasticsearch.common.io.PathUtils;
 import org.elasticsearch.common.logging.ESLogger;
 import org.elasticsearch.common.logging.Loggers;
@@ -91,6 +92,7 @@ final class ExternalNode implements Closeable {
         return externalNode;
     }
 
+    @SuppressForbidden(reason = "needs java.io.File api to start a process")
     synchronized void startInternal(Client client, Settings settings, String nodeName, String clusterName) throws IOException, InterruptedException {
         if (process != null) {
             throw new IllegalStateException("Already started");

+ 3 - 0
src/test/java/org/elasticsearch/test/disruption/LongGCDisruption.java

@@ -19,6 +19,7 @@
 
 package org.elasticsearch.test.disruption;
 
+import org.elasticsearch.common.SuppressForbidden;
 import org.elasticsearch.common.unit.TimeValue;
 
 import java.util.HashSet;
@@ -67,6 +68,7 @@ public class LongGCDisruption extends SingleNodeDisruption {
         return TimeValue.timeValueMillis(0);
     }
 
+    @SuppressForbidden(reason = "stops/resumes threads intentionally")
     protected boolean stopNodeThreads(String node, Set<Thread> nodeThreads) {
         Set<Thread> allThreadsSet = Thread.getAllStackTraces().keySet();
         boolean stopped = false;
@@ -99,6 +101,7 @@ public class LongGCDisruption extends SingleNodeDisruption {
         return stopped;
     }
 
+    @SuppressForbidden(reason = "stops/resumes threads intentionally")
     protected void resumeThreads(Set<Thread> threads) {
         for (Thread thread : threads) {
             thread.resume();

+ 0 - 9
src/test/resources/log4j.properties

@@ -1,9 +0,0 @@
-es.logger.level=INFO
-log4j.rootLogger=${es.logger.level}, out
-
-log4j.logger.org.apache.http=INFO, out
-log4j.additivity.org.apache.http=false
-
-log4j.appender.out=org.apache.log4j.ConsoleAppender
-log4j.appender.out.layout=org.apache.log4j.PatternLayout
-log4j.appender.out.layout.conversionPattern=[%d{ISO8601}][%-5p][%-25c] %m%n

Some files were not shown because too many files changed in this diff