[jira] [Commented] (NUTCH-2633) Fix deprecation warnings when building Nutch master branch under JDK 10.0.2+13

Previous Topic Next Topic
 
classic Classic list List threaded Threaded
1 message Options
Reply | Threaded
Open this post in threaded view
|

[jira] [Commented] (NUTCH-2633) Fix deprecation warnings when building Nutch master branch under JDK 10.0.2+13

JIRA jira@apache.org

    [ https://issues.apache.org/jira/browse/NUTCH-2633?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16576989#comment-16576989 ]

ASF GitHub Bot commented on NUTCH-2633:
---------------------------------------

lewismc closed pull request #374: NUTCH-2633 Fix deprecation warnings when building Nutch master branch…
URL: https://github.com/apache/nutch/pull/374
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/src/java/org/apache/nutch/crawl/AbstractFetchSchedule.java b/src/java/org/apache/nutch/crawl/AbstractFetchSchedule.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/crawl/AdaptiveFetchSchedule.java b/src/java/org/apache/nutch/crawl/AdaptiveFetchSchedule.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/crawl/CrawlDatum.java b/src/java/org/apache/nutch/crawl/CrawlDatum.java
index b50d9c92d..b57fc0b6e 100644
--- a/src/java/org/apache/nutch/crawl/CrawlDatum.java
+++ b/src/java/org/apache/nutch/crawl/CrawlDatum.java
@@ -545,7 +545,7 @@ public boolean evaluate(Expression expr, String url) {
       jcontext.set("fetchTime", (long)(getFetchTime()));
       jcontext.set("modifiedTime", (long)(getModifiedTime()));
       jcontext.set("retries", getRetriesSinceFetch());
-      jcontext.set("interval", new Integer(getFetchInterval()));
+      jcontext.set("interval", Integer.valueOf(getFetchInterval()));
       jcontext.set("score", getScore());
       jcontext.set("signature", StringUtil.toHexString(getSignature()));
             
diff --git a/src/java/org/apache/nutch/crawl/CrawlDbMerger.java b/src/java/org/apache/nutch/crawl/CrawlDbMerger.java
index 25562a606..97730a3dd 100644
--- a/src/java/org/apache/nutch/crawl/CrawlDbMerger.java
+++ b/src/java/org/apache/nutch/crawl/CrawlDbMerger.java
@@ -43,7 +43,6 @@
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.nutch.util.LockUtil;
 import org.apache.nutch.util.NutchConfiguration;
 import org.apache.nutch.util.NutchJob;
 import org.apache.nutch.util.TimingUtil;
diff --git a/src/java/org/apache/nutch/crawl/CrawlDbReader.java b/src/java/org/apache/nutch/crawl/CrawlDbReader.java
index ea4c96467..7c4eb1c5e 100644
--- a/src/java/org/apache/nutch/crawl/CrawlDbReader.java
+++ b/src/java/org/apache/nutch/crawl/CrawlDbReader.java
@@ -28,7 +28,6 @@
 import java.util.Arrays;
 import java.util.Date;
 import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -43,7 +42,6 @@
 import com.tdunning.math.stats.MergingDigest;
 import com.tdunning.math.stats.TDigest;
 
-import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -64,10 +62,8 @@
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.partition.HashPartitioner;
-import org.apache.hadoop.mapreduce.Mapper.Context;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.nutch.util.AbstractChecker;
diff --git a/src/java/org/apache/nutch/crawl/DefaultFetchSchedule.java b/src/java/org/apache/nutch/crawl/DefaultFetchSchedule.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/crawl/FetchSchedule.java b/src/java/org/apache/nutch/crawl/FetchSchedule.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/crawl/FetchScheduleFactory.java b/src/java/org/apache/nutch/crawl/FetchScheduleFactory.java
old mode 100755
new mode 100644
index 09a2fd872..e437800cd
--- a/src/java/org/apache/nutch/crawl/FetchScheduleFactory.java
+++ b/src/java/org/apache/nutch/crawl/FetchScheduleFactory.java
@@ -43,7 +43,7 @@ public synchronized static FetchSchedule getFetchSchedule(Configuration conf) {
       try {
         LOG.info("Using FetchSchedule impl: " + clazz);
         Class<?> implClass = Class.forName(clazz);
-        impl = (FetchSchedule) implClass.newInstance();
+        impl = (FetchSchedule) implClass.getConstructor().newInstance();
         impl.setConf(conf);
         objectCache.setObject(clazz, impl);
       } catch (Exception e) {
diff --git a/src/java/org/apache/nutch/crawl/MimeAdaptiveFetchSchedule.java b/src/java/org/apache/nutch/crawl/MimeAdaptiveFetchSchedule.java
index c6c9ce98f..92a4ab9a2 100644
--- a/src/java/org/apache/nutch/crawl/MimeAdaptiveFetchSchedule.java
+++ b/src/java/org/apache/nutch/crawl/MimeAdaptiveFetchSchedule.java
@@ -154,7 +154,7 @@ private void readMimeFile(Reader mimeFile) throws IOException {
         if (splits.length == 3) {
           // Add a lower cased MIME-type and the factor to the map
           mimeMap.put(StringUtils.lowerCase(splits[0]), new AdaptiveRate(
-              new Float(splits[1]), new Float(splits[2])));
+              Float.valueOf(splits[1]), Float.valueOf(splits[2])));
         } else {
           LOG.warn("Invalid configuration line in: " + line);
         }
diff --git a/src/java/org/apache/nutch/crawl/SignatureFactory.java b/src/java/org/apache/nutch/crawl/SignatureFactory.java
index 6832ffc61..82e6709f8 100644
--- a/src/java/org/apache/nutch/crawl/SignatureFactory.java
+++ b/src/java/org/apache/nutch/crawl/SignatureFactory.java
@@ -51,7 +51,7 @@ public synchronized static Signature getSignature(Configuration conf) {
           LOG.info("Using Signature impl: " + clazz);
         }
         Class<?> implClass = Class.forName(clazz);
-        impl = (Signature) implClass.newInstance();
+        impl = (Signature) implClass.getConstructor().newInstance();
         impl.setConf(conf);
         objectCache.setObject(clazz, impl);
       } catch (Exception e) {
diff --git a/src/java/org/apache/nutch/fetcher/Fetcher.java b/src/java/org/apache/nutch/fetcher/Fetcher.java
index c7230ac5f..f6584c560 100644
--- a/src/java/org/apache/nutch/fetcher/Fetcher.java
+++ b/src/java/org/apache/nutch/fetcher/Fetcher.java
@@ -160,7 +160,7 @@ private AtomicInteger getActiveThreads() {
     private void reportStatus(Context context, FetchItemQueues fetchQueues, int pagesLastSec, int bytesLastSec)
         throws IOException {
       StringBuilder status = new StringBuilder();
-      Long elapsed = new Long((System.currentTimeMillis() - start) / 1000);
+      Long elapsed = Long.valueOf((System.currentTimeMillis() - start) / 1000);
 
       float avgPagesSec = (float) pages.get() / elapsed.floatValue();
       long avgBytesSec = (bytes.get() / 128l) / elapsed.longValue();
diff --git a/src/java/org/apache/nutch/hostdb/ReadHostDb.java b/src/java/org/apache/nutch/hostdb/ReadHostDb.java
index 360b11932..a17a90b16 100644
--- a/src/java/org/apache/nutch/hostdb/ReadHostDb.java
+++ b/src/java/org/apache/nutch/hostdb/ReadHostDb.java
@@ -224,8 +224,8 @@ private void getHostDbRecord(Path hostDb, String host) throws Exception {
     if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
       throw new IOException("Incompatible key (" + keyClass.getName() + ")");
       
-    Text key = (Text) keyClass.newInstance();
-    HostDatum value = (HostDatum) valueClass.newInstance();
+    Text key = (Text) keyClass.getConstructor().newInstance();
+    HostDatum value = (HostDatum) valueClass.getConstructor().newInstance();
     
     for (int i = 0; i < readers.length; i++) {
       while (readers[i].next(key, value)) {
diff --git a/src/java/org/apache/nutch/hostdb/ResolverThread.java b/src/java/org/apache/nutch/hostdb/ResolverThread.java
index 53517b8e2..fe662170a 100644
--- a/src/java/org/apache/nutch/hostdb/ResolverThread.java
+++ b/src/java/org/apache/nutch/hostdb/ResolverThread.java
@@ -61,6 +61,7 @@ public void run() {
     // Resolve the host and act appropriatly
     try {
       // Throws an exception if host is not found
+      @SuppressWarnings("unused")
       InetAddress inetAddr = InetAddress.getByName(host);
 
       if (datum.isEmpty()) {
diff --git a/src/java/org/apache/nutch/indexer/CleaningJob.java b/src/java/org/apache/nutch/indexer/CleaningJob.java
index 7a0f70e78..8a77a9d82 100644
--- a/src/java/org/apache/nutch/indexer/CleaningJob.java
+++ b/src/java/org/apache/nutch/indexer/CleaningJob.java
@@ -77,7 +77,9 @@ public void map(Text key, CrawlDatum value,
 
   public static class DeleterReducer extends
       Reducer<ByteWritable, Text, Text, ByteWritable> {
+    @SuppressWarnings("unused")
     private static final int NUM_MAX_DELETE_REQUEST = 1000;
+    @SuppressWarnings("unused")
     private int numDeletes = 0;
     private int totalDeleted = 0;
 
diff --git a/src/java/org/apache/nutch/indexer/IndexWriter.java b/src/java/org/apache/nutch/indexer/IndexWriter.java
index 4413699d5..b33c5070d 100644
--- a/src/java/org/apache/nutch/indexer/IndexWriter.java
+++ b/src/java/org/apache/nutch/indexer/IndexWriter.java
@@ -28,6 +28,9 @@
    */
   final static String X_POINT_ID = IndexWriter.class.getName();
 
+  /**
+   * @deprecated use {@link #open(IndexWriterParams)}} instead.  
+   */
   @Deprecated
   public void open(Configuration conf, String name) throws IOException;
 
diff --git a/src/java/org/apache/nutch/indexer/IndexingFilters.java b/src/java/org/apache/nutch/indexer/IndexingFilters.java
index ca603d4f0..5ebdd7fb8 100644
--- a/src/java/org/apache/nutch/indexer/IndexingFilters.java
+++ b/src/java/org/apache/nutch/indexer/IndexingFilters.java
@@ -17,9 +17,6 @@
 
 package org.apache.nutch.indexer;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.nutch.plugin.PluginRepository;
 import org.apache.nutch.parse.Parse;
 import org.apache.hadoop.conf.Configuration;
@@ -27,16 +24,11 @@
 import org.apache.nutch.crawl.Inlinks;
 import org.apache.hadoop.io.Text;
 
-import java.lang.invoke.MethodHandles;
-
 /** Creates and caches {@link IndexingFilter} implementing plugins. */
 public class IndexingFilters {
 
   public static final String INDEXINGFILTER_ORDER = "indexingfilter.order";
 
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
   private IndexingFilter[] indexingFilters;
 
   public IndexingFilters(Configuration conf) {
diff --git a/src/java/org/apache/nutch/plugin/Extension.java b/src/java/org/apache/nutch/plugin/Extension.java
index 7c074de33..e73b850d6 100644
--- a/src/java/org/apache/nutch/plugin/Extension.java
+++ b/src/java/org/apache/nutch/plugin/Extension.java
@@ -16,6 +16,7 @@
  */
 package org.apache.nutch.plugin;
 
+import java.lang.reflect.InvocationTargetException;
 import java.util.HashMap;
 
 import org.apache.hadoop.conf.Configuration;
@@ -158,8 +159,13 @@ public Object getExtensionInstance() throws PluginRuntimeException {
         // lazy loading of Plugin in case there is no instance of the plugin
         // already.
         pluginRepository.getPluginInstance(getDescriptor());
-        Object object = extensionClazz.newInstance();
-        if (object instanceof Configurable) {
+        Object object = null;
+        try {
+          object = extensionClazz.getConstructor().newInstance();
+        } catch (IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) {
+          e.printStackTrace();
+        }
+        if (object != null && object instanceof Configurable) {
           ((Configurable) object).setConf(this.conf);
         }
         return object;
diff --git a/src/java/org/apache/nutch/plugin/Plugin.java b/src/java/org/apache/nutch/plugin/Plugin.java
index e78754b8a..8325a5607 100644
--- a/src/java/org/apache/nutch/plugin/Plugin.java
+++ b/src/java/org/apache/nutch/plugin/Plugin.java
@@ -30,7 +30,7 @@
  * instances are used as the point of life cycle managemet of plugin related
  * functionality.
  *
- * The <code>Plugin</code> will be startuped and shutdown by the nutch plugin
+ * The <code>Plugin</code> will be started up and shutdown by the nutch plugin
  * management system.
  *
  * A possible usecase of the <code>Plugin</code> implementation is to create or
@@ -88,6 +88,7 @@ private void setDescriptor(PluginDescriptor descriptor) {
     fDescriptor = descriptor;
   }
 
+  @SuppressWarnings("deprecation")
   protected void finalize() throws Throwable {
     super.finalize();
     shutDown();
diff --git a/src/java/org/apache/nutch/protocol/Content.java b/src/java/org/apache/nutch/protocol/Content.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/protocol/Protocol.java b/src/java/org/apache/nutch/protocol/Protocol.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/protocol/ProtocolException.java b/src/java/org/apache/nutch/protocol/ProtocolException.java
old mode 100755
new mode 100644
diff --git a/src/java/org/apache/nutch/protocol/ProtocolFactory.java b/src/java/org/apache/nutch/protocol/ProtocolFactory.java
index b39155b79..87944a8f0 100644
--- a/src/java/org/apache/nutch/protocol/ProtocolFactory.java
+++ b/src/java/org/apache/nutch/protocol/ProtocolFactory.java
@@ -17,12 +17,9 @@
 
 package org.apache.nutch.protocol;
 
-import java.lang.invoke.MethodHandles;
 import java.net.URL;
 import java.net.MalformedURLException;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.nutch.plugin.Extension;
 import org.apache.nutch.plugin.ExtensionPoint;
 import org.apache.nutch.plugin.PluginRepository;
@@ -40,9 +37,6 @@
  */
 public class ProtocolFactory {
 
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
   private ExtensionPoint extensionPoint;
 
   private Configuration conf;
diff --git a/src/java/org/apache/nutch/protocol/ProtocolStatus.java b/src/java/org/apache/nutch/protocol/ProtocolStatus.java
index 279140684..46f97305f 100644
--- a/src/java/org/apache/nutch/protocol/ProtocolStatus.java
+++ b/src/java/org/apache/nutch/protocol/ProtocolStatus.java
@@ -101,22 +101,22 @@
 
   private static final HashMap<Integer, String> codeToName = new HashMap<>();
   static {
-    codeToName.put(new Integer(SUCCESS), "success");
-    codeToName.put(new Integer(FAILED), "failed");
-    codeToName.put(new Integer(PROTO_NOT_FOUND), "proto_not_found");
-    codeToName.put(new Integer(GONE), "gone");
-    codeToName.put(new Integer(MOVED), "moved");
-    codeToName.put(new Integer(TEMP_MOVED), "temp_moved");
-    codeToName.put(new Integer(NOTFOUND), "notfound");
-    codeToName.put(new Integer(RETRY), "retry");
-    codeToName.put(new Integer(EXCEPTION), "exception");
-    codeToName.put(new Integer(ACCESS_DENIED), "access_denied");
-    codeToName.put(new Integer(ROBOTS_DENIED), "robots_denied");
-    codeToName.put(new Integer(REDIR_EXCEEDED), "redir_exceeded");
-    codeToName.put(new Integer(NOTFETCHING), "notfetching");
-    codeToName.put(new Integer(NOTMODIFIED), "notmodified");
-    codeToName.put(new Integer(WOULDBLOCK), "wouldblock");
-    codeToName.put(new Integer(BLOCKED), "blocked");
+    codeToName.put(Integer.valueOf(SUCCESS), "success");
+    codeToName.put(Integer.valueOf(FAILED), "failed");
+    codeToName.put(Integer.valueOf(PROTO_NOT_FOUND), "proto_not_found");
+    codeToName.put(Integer.valueOf(GONE), "gone");
+    codeToName.put(Integer.valueOf(MOVED), "moved");
+    codeToName.put(Integer.valueOf(TEMP_MOVED), "temp_moved");
+    codeToName.put(Integer.valueOf(NOTFOUND), "notfound");
+    codeToName.put(Integer.valueOf(RETRY), "retry");
+    codeToName.put(Integer.valueOf(EXCEPTION), "exception");
+    codeToName.put(Integer.valueOf(ACCESS_DENIED), "access_denied");
+    codeToName.put(Integer.valueOf(ROBOTS_DENIED), "robots_denied");
+    codeToName.put(Integer.valueOf(REDIR_EXCEEDED), "redir_exceeded");
+    codeToName.put(Integer.valueOf(NOTFETCHING), "notfetching");
+    codeToName.put(Integer.valueOf(NOTMODIFIED), "notmodified");
+    codeToName.put(Integer.valueOf(WOULDBLOCK), "wouldblock");
+    codeToName.put(Integer.valueOf(BLOCKED), "blocked");
   }
 
   public ProtocolStatus() {
@@ -280,7 +280,7 @@ public boolean equals(Object o) {
 
   public String toString() {
     StringBuffer res = new StringBuffer();
-    res.append(codeToName.get(new Integer(code)) + "(" + code
+    res.append(codeToName.get(Integer.valueOf(code)) + "(" + code
         + "), lastModified=" + lastModified);
     if (args != null) {
       if (args.length == 1) {
diff --git a/src/java/org/apache/nutch/segment/ContentAsTextInputFormat.java b/src/java/org/apache/nutch/segment/ContentAsTextInputFormat.java
index 6d751c0ef..6d75cdefd 100644
--- a/src/java/org/apache/nutch/segment/ContentAsTextInputFormat.java
+++ b/src/java/org/apache/nutch/segment/ContentAsTextInputFormat.java
@@ -69,6 +69,7 @@ public void initialize(InputSplit split, TaskAttemptContext context){
 
     }
 
+    @SuppressWarnings("unused")
     public synchronized boolean next(Text key, Text value)
         throws IOException, InterruptedException {
 
diff --git a/src/java/org/apache/nutch/segment/SegmentReader.java b/src/java/org/apache/nutch/segment/SegmentReader.java
index 2b9943561..c09c7ca9a 100644
--- a/src/java/org/apache/nutch/segment/SegmentReader.java
+++ b/src/java/org/apache/nutch/segment/SegmentReader.java
@@ -421,16 +421,16 @@ public void run() {
     Class<?> valueClass = readers[0].getValueClass();
     if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
       throw new IOException("Incompatible key (" + keyClass.getName() + ")");
-    Writable value = (Writable) valueClass.newInstance();
+    Writable value = (Writable) valueClass.getConstructor().newInstance();
     // we don't know the partitioning schema
     for (int i = 0; i < readers.length; i++) {
       if (readers[i].get(key, value) != null) {
         res.add(value);
-        value = (Writable) valueClass.newInstance();
-        Text aKey = (Text) keyClass.newInstance();
+        value = (Writable) valueClass.getConstructor().newInstance();
+        Text aKey = (Text) keyClass.getConstructor().newInstance();
         while (readers[i].next(aKey, value) && aKey.equals(key)) {
           res.add(value);
-          value = (Writable) valueClass.newInstance();
+          value = (Writable) valueClass.getConstructor().newInstance();
         }
       }
       readers[i].close();
@@ -446,13 +446,13 @@ public void run() {
     Class<?> valueClass = readers[0].getValueClass();
     if (!keyClass.getName().equals("org.apache.hadoop.io.Text"))
       throw new IOException("Incompatible key (" + keyClass.getName() + ")");
-    WritableComparable<?> aKey = (WritableComparable<?>) keyClass.newInstance();
-    Writable value = (Writable) valueClass.newInstance();
+    WritableComparable<?> aKey = (WritableComparable<?>) keyClass.getConstructor().newInstance();
+    Writable value = (Writable) valueClass.getConstructor().newInstance();
     for (int i = 0; i < readers.length; i++) {
       while (readers[i].next(aKey, value)) {
         if (aKey.equals(key)) {
           res.add(value);
-          value = (Writable) valueClass.newInstance();
+          value = (Writable) valueClass.getConstructor().newInstance();
         }
       }
       readers[i].close();
diff --git a/src/java/org/apache/nutch/service/impl/LinkReader.java b/src/java/org/apache/nutch/service/impl/LinkReader.java
index 39e11062f..9d2ffcb15 100644
--- a/src/java/org/apache/nutch/service/impl/LinkReader.java
+++ b/src/java/org/apache/nutch/service/impl/LinkReader.java
@@ -60,7 +60,6 @@ public List read(String path) throws FileNotFoundException {
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file, StringUtils.stringifyException(e));
       throw new WebApplicationException();
@@ -93,7 +92,6 @@ public List head(String path, int nrows) throws FileNotFoundException {
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file, StringUtils.stringifyException(e));
       throw new WebApplicationException();
@@ -128,7 +126,6 @@ public List slice(String path, int start, int end)
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file, StringUtils.stringifyException(e));
       throw new WebApplicationException();
@@ -154,7 +151,6 @@ public int count(String path) throws FileNotFoundException {
     } catch(FileNotFoundException fne){
       throw new FileNotFoundException();
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       LOG.error("Error occurred while reading file {} : ", file, StringUtils.stringifyException(e));
       throw new WebApplicationException();
     }
@@ -162,14 +158,14 @@ public int count(String path) throws FileNotFoundException {
   }
 
   private HashMap<String, String> getLinksRow(Writable key, LinkDatum value) {
-    HashMap<String, String> t_row = new HashMap<>();
-    t_row.put("key_url", key.toString());
-    t_row.put("url", value.getUrl());
-    t_row.put("anchor", value.getAnchor());
-    t_row.put("score", String.valueOf(value.getScore()));
-    t_row.put("timestamp", String.valueOf(value.getTimestamp()));
-    t_row.put("linktype", String.valueOf(value.getLinkType()));
-
-    return t_row;
+    HashMap<String, String> tRow = new HashMap<>();
+    tRow.put("key_url", key.toString());
+    tRow.put("url", value.getUrl());
+    tRow.put("anchor", value.getAnchor());
+    tRow.put("score", String.valueOf(value.getScore()));
+    tRow.put("timestamp", String.valueOf(value.getTimestamp()));
+    tRow.put("linktype", String.valueOf(value.getLinkType()));
+
+    return tRow;
   }
 }
diff --git a/src/java/org/apache/nutch/service/impl/NodeReader.java b/src/java/org/apache/nutch/service/impl/NodeReader.java
index 28d6600d1..e52f6a640 100644
--- a/src/java/org/apache/nutch/service/impl/NodeReader.java
+++ b/src/java/org/apache/nutch/service/impl/NodeReader.java
@@ -60,7 +60,6 @@ public List read(String path) throws FileNotFoundException {
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file, StringUtils.stringifyException(e));
       throw new WebApplicationException();
@@ -93,7 +92,6 @@ public List head(String path, int nrows) throws FileNotFoundException {
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file,
           StringUtils.stringifyException(e));
@@ -129,7 +127,6 @@ public List slice(String path, int start, int end)
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file,
           StringUtils.stringifyException(e));
@@ -159,7 +156,6 @@ public int count(String path) throws FileNotFoundException {
       throw new FileNotFoundException();
 
     }catch (IOException e) {
-      // TODO Auto-generated catch block
       e.printStackTrace();
       LOG.error("Error occurred while reading file {} : ", file,
           StringUtils.stringifyException(e));
@@ -170,14 +166,14 @@ public int count(String path) throws FileNotFoundException {
   }
 
   private HashMap<String, String> getNodeRow(Writable key, Node value) {
-    HashMap<String, String> t_row = new HashMap<>();
-    t_row.put("key_url", key.toString());
-    t_row.put("num_inlinks", String.valueOf(value.getNumInlinks()) );
-    t_row.put("num_outlinks", String.valueOf(value.getNumOutlinks()) );
-    t_row.put("inlink_score", String.valueOf(value.getInlinkScore()));
-    t_row.put("outlink_score", String.valueOf(value.getOutlinkScore()));
-    t_row.put("metadata", value.getMetadata().toString());
-
-    return t_row;
+    HashMap<String, String> tRow = new HashMap<>();
+    tRow.put("key_url", key.toString());
+    tRow.put("num_inlinks", String.valueOf(value.getNumInlinks()) );
+    tRow.put("num_outlinks", String.valueOf(value.getNumOutlinks()) );
+    tRow.put("inlink_score", String.valueOf(value.getInlinkScore()));
+    tRow.put("outlink_score", String.valueOf(value.getOutlinkScore()));
+    tRow.put("metadata", value.getMetadata().toString());
+
+    return tRow;
   }
 }
diff --git a/src/java/org/apache/nutch/service/impl/NutchServerPoolExecutor.java b/src/java/org/apache/nutch/service/impl/NutchServerPoolExecutor.java
index 147b61aed..b3bcb2ee1 100644
--- a/src/java/org/apache/nutch/service/impl/NutchServerPoolExecutor.java
+++ b/src/java/org/apache/nutch/service/impl/NutchServerPoolExecutor.java
@@ -48,6 +48,7 @@ protected void beforeExecute(Thread thread, Runnable runnable) {
       runningWorkers.offer(((JobWorker) runnable));
     }
   }
+  @SuppressWarnings("unlikely-arg-type")
   @Override
   protected void afterExecute(Runnable runnable, Throwable throwable) {
     super.afterExecute(runnable, throwable);
diff --git a/src/java/org/apache/nutch/service/model/response/FetchNodeDbInfo.java b/src/java/org/apache/nutch/service/model/response/FetchNodeDbInfo.java
index 6c76a7df7..bac0924b9 100644
--- a/src/java/org/apache/nutch/service/model/response/FetchNodeDbInfo.java
+++ b/src/java/org/apache/nutch/service/model/response/FetchNodeDbInfo.java
@@ -76,15 +76,19 @@ public ChildNode(String childUrl, String anchorText){
       this.anchorText = anchorText;
     }
     
+    @SuppressWarnings("unused")
     public String getAnchorText() {
       return anchorText;
     }
+    @SuppressWarnings("unused")
     public void setAnchorText(String anchorText) {
       this.anchorText = anchorText;
     }
+    @SuppressWarnings("unused")
     public String getChildUrl() {
       return childUrl;
     }
+    @SuppressWarnings("unused")
     public void setChildUrl(String childUrl) {
       this.childUrl = childUrl;
     }
diff --git a/src/java/org/apache/nutch/service/resources/DbResource.java b/src/java/org/apache/nutch/service/resources/DbResource.java
index aeeb27ba3..67771d488 100644
--- a/src/java/org/apache/nutch/service/resources/DbResource.java
+++ b/src/java/org/apache/nutch/service/resources/DbResource.java
@@ -111,6 +111,7 @@ private Response crawlDbStats(Configuration conf, Map<String, String> args, Stri
 
   @Produces(MediaType.APPLICATION_OCTET_STREAM)
   private Response crawlDbDump(Configuration conf, Map<String, String> args, String crawlId){
+    @SuppressWarnings("resource")
     CrawlDbReader dbr = new CrawlDbReader();
     try{
       return Response.ok(dbr.query(args, conf, "dump", crawlId), MediaType.APPLICATION_OCTET_STREAM).build();
@@ -122,6 +123,7 @@ private Response crawlDbDump(Configuration conf, Map<String, String> args, Strin
 
   @Produces(MediaType.APPLICATION_OCTET_STREAM)
   private Response crawlDbTopN(Configuration conf, Map<String, String> args, String crawlId) {
+    @SuppressWarnings("resource")
     CrawlDbReader dbr = new CrawlDbReader();
     try{
       return Response.ok(dbr.query(args, conf, "topN", crawlId), MediaType.APPLICATION_OCTET_STREAM).build();
@@ -132,6 +134,7 @@ private Response crawlDbTopN(Configuration conf, Map<String, String> args, Strin
   }
 
   private Response crawlDbUrl(Configuration conf, Map<String, String> args, String crawlId){
+    @SuppressWarnings("resource")
     CrawlDbReader dbr = new CrawlDbReader();
     try{
       return Response.ok(dbr.query(args, conf, "url", crawlId)).build();
diff --git a/src/java/org/apache/nutch/tools/Benchmark.java b/src/java/org/apache/nutch/tools/Benchmark.java
old mode 100755
new mode 100644
index 0c82fc37f..c8b4a9412
--- a/src/java/org/apache/nutch/tools/Benchmark.java
+++ b/src/java/org/apache/nutch/tools/Benchmark.java
@@ -195,6 +195,7 @@ public BenchmarkResults benchmark(int seeds, int depth, int threads,
     conf.setInt(Generator.GENERATOR_MAX_COUNT, maxPerHost);
     conf.set(Generator.GENERATOR_COUNT_MODE,
         Generator.GENERATOR_COUNT_VALUE_HOST);
+    @SuppressWarnings("unused")
     Job job = NutchJob.getInstance(getConf());
     FileSystem fs = FileSystem.get(conf);
     Path dir = new Path(getConf().get("hadoop.tmp.dir"), "bench-"
@@ -276,6 +277,7 @@ public BenchmarkResults benchmark(int seeds, int depth, int threads,
       LOG.info("crawl finished: " + dir);
     }
     res.elapsed = System.currentTimeMillis() - res.elapsed;
+    @SuppressWarnings("resource")
     CrawlDbReader dbreader = new CrawlDbReader();
     dbreader.processStatJob(crawlDb.toString(), conf, false);
     return res;
diff --git a/src/java/org/apache/nutch/tools/CommonCrawlDataDumper.java b/src/java/org/apache/nutch/tools/CommonCrawlDataDumper.java
index 80adc05ad..c01305980 100644
--- a/src/java/org/apache/nutch/tools/CommonCrawlDataDumper.java
+++ b/src/java/org/apache/nutch/tools/CommonCrawlDataDumper.java
@@ -283,7 +283,7 @@ public void dump(File outputDir, File segmentRootDir, File linkdb, boolean gzip,
         SequenceFile.Reader reader = new SequenceFile.Reader(nutchConfig,
             SequenceFile.Reader.file(segmentPart));
 
-        Writable key = (Writable) reader.getKeyClass().newInstance();
+        Writable key = (Writable) reader.getKeyClass().getConstructor().newInstance();
 
         Content content = null;
         while (reader.next(key)) {
diff --git a/src/java/org/apache/nutch/tools/CommonCrawlFormatWARC.java b/src/java/org/apache/nutch/tools/CommonCrawlFormatWARC.java
index b79336150..6f89b16f2 100644
--- a/src/java/org/apache/nutch/tools/CommonCrawlFormatWARC.java
+++ b/src/java/org/apache/nutch/tools/CommonCrawlFormatWARC.java
@@ -160,8 +160,6 @@ protected URI writeResponse() throws IOException, ParseException {
     record.setType(WARCConstants.WARCRecordType.response);
     record.setUrl(getUrl());
 
-    String fetchTime;
-
     record.setCreate14DigitDate(DateUtils
         .getLog14Date(Long.parseLong(metadata.get("nutch.fetch.time"))));
     record.setMimetype(WARCConstants.HTTP_RESPONSE_MIMETYPE);
diff --git a/src/java/org/apache/nutch/tools/DmozParser.java b/src/java/org/apache/nutch/tools/DmozParser.java
index 217a15e57..fa7e7d603 100644
--- a/src/java/org/apache/nutch/tools/DmozParser.java
+++ b/src/java/org/apache/nutch/tools/DmozParser.java
@@ -113,10 +113,10 @@ else if (lastBad && c == '<') { // fix mis-matched brackets
    */
   private class RDFProcessor extends DefaultHandler {
     String curURL = null, curSection = null;
-    boolean titlePending = false, descPending = false,
-        insideAdultSection = false;
+    boolean titlePending = false, descPending = false;
     Pattern topicPattern = null;
     StringBuffer title = new StringBuffer(), desc = new StringBuffer();
+    @SuppressWarnings("unused")
     XMLReader reader;
     int subsetDenom;
     int hashSkew;
@@ -258,17 +258,6 @@ public void error(SAXParseException spe) {
       }
     }
 
-    /**
-     * Emit the exception message, with line numbers
-     */
-    public void errorError(SAXParseException spe) {
-      if (LOG.isErrorEnabled()) {
-        LOG.error("Fatal err: " + spe.toString() + ": " + spe.getMessage());
-        LOG.error("Last known line is " + location.getLineNumber()
-            + ", column " + location.getColumnNumber());
-      }
-    }
-
     /**
      * Emit exception warning message
      */
diff --git a/src/java/org/apache/nutch/tools/FileDumper.java b/src/java/org/apache/nutch/tools/FileDumper.java
index fcf2f199b..d09ad74d1 100644
--- a/src/java/org/apache/nutch/tools/FileDumper.java
+++ b/src/java/org/apache/nutch/tools/FileDumper.java
@@ -172,7 +172,7 @@ public void dump(File outputDir, File segmentRootDir, String[] mimeTypes, boolea
 
           SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(file));
 
-          Writable key = (Writable) reader.getKeyClass().newInstance();
+          Writable key = (Writable) reader.getKeyClass().getConstructor().newInstance();
           Content content = null;
 
           while (reader.next(key)) {
diff --git a/src/java/org/apache/nutch/tools/arc/ArcSegmentCreator.java b/src/java/org/apache/nutch/tools/arc/ArcSegmentCreator.java
index 499b24607..7685e5f0a 100644
--- a/src/java/org/apache/nutch/tools/arc/ArcSegmentCreator.java
+++ b/src/java/org/apache/nutch/tools/arc/ArcSegmentCreator.java
@@ -344,6 +344,7 @@ public void map(Text key, BytesWritable bytes,
 
           // set the url version into the metadata
           content.getMetadata().set(URL_VERSION, version);
+          @SuppressWarnings("unused")
           ParseStatus pstatus = null;
           pstatus = output(context, segmentName, url, datum, content, status,
               CrawlDatum.STATUS_FETCH_SUCCESS);
diff --git a/src/java/org/apache/nutch/tools/warc/WARCExporter.java b/src/java/org/apache/nutch/tools/warc/WARCExporter.java
index ee9879ad3..a7e08c7fb 100644
--- a/src/java/org/apache/nutch/tools/warc/WARCExporter.java
+++ b/src/java/org/apache/nutch/tools/warc/WARCExporter.java
@@ -259,7 +259,6 @@ public int generateWARC(String output, List<Path> segments) throws IOException{
 
     final Job job = NutchJob.getInstance(getConf());
     job.setJobName("warc-exporter " + output);
-    Configuration conf = job.getConfiguration();
 
     for (final Path segment : segments) {
       LOG.info("warc-exporter: adding segment: {}", segment);
diff --git a/src/java/org/apache/nutch/util/AbstractChecker.java b/src/java/org/apache/nutch/util/AbstractChecker.java
index 8d365ecaf..e0af36da7 100644
--- a/src/java/org/apache/nutch/util/AbstractChecker.java
+++ b/src/java/org/apache/nutch/util/AbstractChecker.java
@@ -94,6 +94,7 @@ protected int processStdin() throws Exception {
     String line;
     while ((line = in.readLine()) != null) {
       StringBuilder output = new StringBuilder();
+      @SuppressWarnings("unused")
       int ret = process(line, output);
       System.out.println(output);
     }
@@ -101,6 +102,7 @@ protected int processStdin() throws Exception {
   }
 
   // Open TCP socket and process input
+  @SuppressWarnings("resource")
   protected void processTCP(int tcpPort) throws Exception {
     ServerSocket server = null;
 
diff --git a/src/java/org/apache/nutch/util/CrawlCompletionStats.java b/src/java/org/apache/nutch/util/CrawlCompletionStats.java
index 116c3113d..4208b5c26 100644
--- a/src/java/org/apache/nutch/util/CrawlCompletionStats.java
+++ b/src/java/org/apache/nutch/util/CrawlCompletionStats.java
@@ -68,28 +68,30 @@
   private static final int MODE_HOST = 1;
   private static final int MODE_DOMAIN = 2;
 
-  private int mode = 0;
-
   public int run(String[] args) throws Exception {
     Option helpOpt = new Option("h", "help", false, "Show this message");
+    @SuppressWarnings("static-access")
     Option inDirs = OptionBuilder
         .withArgName("inputDirs")
         .isRequired()
         .withDescription("Comma separated list of crawl directories (e.g., \"./crawl1,./crawl2\")")
         .hasArgs()
         .create("inputDirs");
+    @SuppressWarnings("static-access")
     Option outDir = OptionBuilder
         .withArgName("outputDir")
         .isRequired()
         .withDescription("Output directory where results should be dumped")
         .hasArgs()
         .create("outputDir");
+    @SuppressWarnings("static-access")
     Option modeOpt = OptionBuilder
         .withArgName("mode")
         .isRequired()
         .withDescription("Set statistics gathering mode (by 'host' or by 'domain')")
         .hasArgs()
         .create("mode");
+    @SuppressWarnings("static-access")
     Option numReducers = OptionBuilder
         .withArgName("numReducers")
         .withDescription("Optional number of reduce jobs to use. Defaults to 1")
diff --git a/src/java/org/apache/nutch/util/EncodingDetector.java b/src/java/org/apache/nutch/util/EncodingDetector.java
index ba3620529..01e65e56b 100644
--- a/src/java/org/apache/nutch/util/EncodingDetector.java
+++ b/src/java/org/apache/nutch/util/EncodingDetector.java
@@ -79,10 +79,12 @@ public EncodingClue(String value, String source, int confidence) {
       this.confidence = confidence;
     }
 
+    @SuppressWarnings("unused")
     public String getSource() {
       return source;
     }
 
+    @SuppressWarnings("unused")
     public String getValue() {
       return value;
     }
@@ -354,6 +356,7 @@ public static void main(String[] args) throws IOException {
         NutchConfiguration.create());
 
     // do everything as bytes; don't want any conversion
+    @SuppressWarnings("resource")
     BufferedInputStream istr = new BufferedInputStream(new FileInputStream(
         args[0]));
     ByteArrayOutputStream ostr = new ByteArrayOutputStream();
diff --git a/src/java/org/apache/nutch/util/GenericWritableConfigurable.java b/src/java/org/apache/nutch/util/GenericWritableConfigurable.java
index 755aad010..086ca9bc0 100644
--- a/src/java/org/apache/nutch/util/GenericWritableConfigurable.java
+++ b/src/java/org/apache/nutch/util/GenericWritableConfigurable.java
@@ -46,7 +46,7 @@ public void readFields(DataInput in) throws IOException {
     byte type = in.readByte();
     Class<?> clazz = getTypes()[type];
     try {
-      set((Writable) clazz.newInstance());
+      set((Writable) clazz.getConstructor().newInstance());
     } catch (Exception e) {
       e.printStackTrace();
       throw new IOException("Cannot initialize the class: " + clazz);
diff --git a/src/java/org/apache/nutch/util/domain/DomainStatistics.java b/src/java/org/apache/nutch/util/domain/DomainStatistics.java
index 1eec59e85..32ba10fc8 100644
--- a/src/java/org/apache/nutch/util/domain/DomainStatistics.java
+++ b/src/java/org/apache/nutch/util/domain/DomainStatistics.java
@@ -63,8 +63,6 @@
   private static final int MODE_SUFFIX = 3;
   private static final int MODE_TLD = 4;
 
-  private int mode = 0;
-
   public int run(String[] args) throws Exception {
     if (args.length < 3) {
       System.err.println("Usage: DomainStatistics inputDirs outDir mode [numOfReducer]");
diff --git a/src/plugin/any23/src/test/org/apache/nutch/any23/TestAny23ParseFilter.java b/src/plugin/any23/src/test/org/apache/nutch/any23/TestAny23ParseFilter.java
index 4271730c3..251dfaf5d 100644
--- a/src/plugin/any23/src/test/org/apache/nutch/any23/TestAny23ParseFilter.java
+++ b/src/plugin/any23/src/test/org/apache/nutch/any23/TestAny23ParseFilter.java
@@ -16,31 +16,18 @@
  */
 package org.apache.nutch.any23;
 
-import java.io.DataInputStream;
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-
-import org.apache.avro.util.Utf8;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
-import org.apache.nutch.indexer.NutchDocument;
-import org.apache.nutch.metadata.Metadata;
-import org.apache.nutch.parse.Outlink;
 import org.apache.nutch.parse.Parse;
-import org.apache.nutch.parse.ParseData;
 import org.apache.nutch.parse.ParseException;
-import org.apache.nutch.parse.ParseImpl;
-import org.apache.nutch.parse.ParseStatus;
 import org.apache.nutch.parse.ParseUtil;
 import org.apache.nutch.parse.ParserNotFound;
 import org.apache.nutch.protocol.Content;
 import org.apache.nutch.protocol.Protocol;
 import org.apache.nutch.protocol.ProtocolFactory;
-import org.apache.nutch.util.MimeUtil;
 import org.apache.nutch.util.NutchConfiguration;
 import org.junit.Assert;
 import org.junit.Before;
diff --git a/src/plugin/creativecommons/src/test/org/creativecommons/nutch/TestCCParseFilter.java b/src/plugin/creativecommons/src/test/org/creativecommons/nutch/TestCCParseFilter.java
old mode 100755
new mode 100644
diff --git a/src/plugin/feed/src/test/org/apache/nutch/parse/feed/TestFeedParser.java b/src/plugin/feed/src/test/org/apache/nutch/parse/feed/TestFeedParser.java
index f5d4807af..924300919 100644
--- a/src/plugin/feed/src/test/org/apache/nutch/parse/feed/TestFeedParser.java
+++ b/src/plugin/feed/src/test/org/apache/nutch/parse/feed/TestFeedParser.java
@@ -17,16 +17,12 @@
 
 package org.apache.nutch.parse.feed;
 
-// JDK imports
-import java.lang.invoke.MethodHandles;
 import java.util.Iterator;
 import java.util.Map;
 
 import org.junit.Assert;
 import org.junit.Test;
-// APACHE imports
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
@@ -58,10 +54,6 @@
   // ./src/plugin/feed/build.xml during plugin compilation.
 
   private String[] sampleFiles = { "rsstest.rss" };
-
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
   /**
    * Calls the {@link FeedParser} on a sample RSS file and checks that there are
    * 3 {@link ParseResult} entries including the below 2 links:
diff --git a/src/plugin/index-basic/src/java/org/apache/nutch/indexer/basic/BasicIndexingFilter.java b/src/plugin/index-basic/src/java/org/apache/nutch/indexer/basic/BasicIndexingFilter.java
index 5e1233b35..0e62fea68 100644
--- a/src/plugin/index-basic/src/java/org/apache/nutch/indexer/basic/BasicIndexingFilter.java
+++ b/src/plugin/index-basic/src/java/org/apache/nutch/indexer/basic/BasicIndexingFilter.java
@@ -17,9 +17,6 @@
 
 package org.apache.nutch.indexer.basic;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.nutch.metadata.Nutch;
 import org.apache.nutch.parse.Parse;
 
@@ -33,7 +30,6 @@
 import org.apache.nutch.crawl.CrawlDatum;
 import org.apache.nutch.crawl.Inlinks;
 
-import java.lang.invoke.MethodHandles;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.Date;
@@ -49,8 +45,6 @@
  * {@code indexer.max.content.length} in nutch-default.xml.
  */
 public class BasicIndexingFilter implements IndexingFilter {
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
 
   private int MAX_TITLE_LENGTH;
   private int MAX_CONTENT_LENGTH;
diff --git a/src/plugin/index-geoip/src/java/org/apache/nutch/indexer/geoip/GeoIPDocumentCreator.java b/src/plugin/index-geoip/src/java/org/apache/nutch/indexer/geoip/GeoIPDocumentCreator.java
index 88d78ef37..38e75b151 100644
--- a/src/plugin/index-geoip/src/java/org/apache/nutch/indexer/geoip/GeoIPDocumentCreator.java
+++ b/src/plugin/index-geoip/src/java/org/apache/nutch/indexer/geoip/GeoIPDocumentCreator.java
@@ -111,8 +111,9 @@ public static NutchDocument createDocFromInsightsService(String serverIp,
     doc.add("isp", traits.getIsp());
     doc.add("org", traits.getOrganization());
     doc.add("userType", traits.getUserType());
+    //for better results, users should upgrade to
+    //https://www.maxmind.com/en/solutions/geoip2-enterprise-product-suite/anonymous-ip-database
     doc.add("isAnonProxy", traits.isAnonymousProxy());
-    doc.add("isSatelliteProv", traits.isSatelliteProvider());
     return doc;
   }
 
diff --git a/src/plugin/index-jexl-filter/src/java/org/apache/nutch/indexer/jexl/JexlIndexingFilter.java b/src/plugin/index-jexl-filter/src/java/org/apache/nutch/indexer/jexl/JexlIndexingFilter.java
index 24284a67b..3fa2294a1 100644
--- a/src/plugin/index-jexl-filter/src/java/org/apache/nutch/indexer/jexl/JexlIndexingFilter.java
+++ b/src/plugin/index-jexl-filter/src/java/org/apache/nutch/indexer/jexl/JexlIndexingFilter.java
@@ -61,7 +61,7 @@ public NutchDocument filter(NutchDocument doc, Parse parse, Text url,
     jcontext.set("fetchTime", (long) (datum.getFetchTime()));
     jcontext.set("modifiedTime", (long) (datum.getModifiedTime()));
     jcontext.set("retries", datum.getRetriesSinceFetch());
-    jcontext.set("interval", new Integer(datum.getFetchInterval()));
+    jcontext.set("interval", Integer.valueOf(datum.getFetchInterval()));
     jcontext.set("score", datum.getScore());
     jcontext.set("signature", StringUtil.toHexString(datum.getSignature()));
     jcontext.set("url", url.toString());
diff --git a/src/plugin/index-links/src/test/org/apache/nutch/indexer/links/TestLinksIndexingFilter.java b/src/plugin/index-links/src/test/org/apache/nutch/indexer/links/TestLinksIndexingFilter.java
index a71f8bac7..0b22a9805 100644
--- a/src/plugin/index-links/src/test/org/apache/nutch/indexer/links/TestLinksIndexingFilter.java
+++ b/src/plugin/index-links/src/test/org/apache/nutch/indexer/links/TestLinksIndexingFilter.java
@@ -36,7 +36,6 @@
 import org.junit.Test;
 
 import java.net.URL;
-import java.util.Iterator;
 
 public class TestLinksIndexingFilter {
 
diff --git a/src/plugin/index-replace/src/java/org/apache/nutch/indexer/replace/ReplaceIndexer.java b/src/plugin/index-replace/src/java/org/apache/nutch/indexer/replace/ReplaceIndexer.java
index cb178b2f1..4066ce05a 100644
--- a/src/plugin/index-replace/src/java/org/apache/nutch/indexer/replace/ReplaceIndexer.java
+++ b/src/plugin/index-replace/src/java/org/apache/nutch/indexer/replace/ReplaceIndexer.java
@@ -201,7 +201,7 @@ private void parseConf(String propertyValue) {
                   continue;
                 }
               }
-              Integer iFlags = (flags > 0) ? new Integer(flags) : null;
+              Integer iFlags = (flags > 0) ? Integer.valueOf(flags) : null;
 
               // Make a FieldReplacer out of these params.
               FieldReplacer fr = new FieldReplacer(fieldName, toFieldName,
diff --git a/src/plugin/indexer-cloudsearch/src/java/org/apache/nutch/indexwriter/cloudsearch/CloudSearchIndexWriter.java b/src/plugin/indexer-cloudsearch/src/java/org/apache/nutch/indexwriter/cloudsearch/CloudSearchIndexWriter.java
index 3973485b0..2d72d9fa8 100644
--- a/src/plugin/indexer-cloudsearch/src/java/org/apache/nutch/indexwriter/cloudsearch/CloudSearchIndexWriter.java
+++ b/src/plugin/indexer-cloudsearch/src/java/org/apache/nutch/indexwriter/cloudsearch/CloudSearchIndexWriter.java
@@ -311,6 +311,7 @@ public void commit() throws IOException {
       batch.setContentLength((long) bb.length);
       batch.setContentType(ContentType.Applicationjson);
       batch.setDocuments(inputStream);
+      @SuppressWarnings("unused")
       UploadDocumentsResult result = client.uploadDocuments(batch);
     } catch (Exception e) {
       LOG.error("Exception while sending batch", e);
diff --git a/src/plugin/indexer-dummy/src/java/org/apache/nutch/indexwriter/dummy/DummyIndexWriter.java b/src/plugin/indexer-dummy/src/java/org/apache/nutch/indexwriter/dummy/DummyIndexWriter.java
index 064d8f627..7a4003674 100644
--- a/src/plugin/indexer-dummy/src/java/org/apache/nutch/indexwriter/dummy/DummyIndexWriter.java
+++ b/src/plugin/indexer-dummy/src/java/org/apache/nutch/indexwriter/dummy/DummyIndexWriter.java
@@ -21,14 +21,10 @@
 import java.io.IOException;
 import java.io.FileWriter;
 import java.io.Writer;
-import java.util.Map;
-
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.nutch.indexer.IndexWriter;
 import org.apache.nutch.indexer.IndexWriterParams;
-import org.apache.nutch.indexer.IndexerMapReduce;
 import org.apache.nutch.indexer.NutchDocument;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/src/plugin/indexer-elastic-rest/src/java/org/apache/nutch/indexwriter/elasticrest/ElasticRestIndexWriter.java b/src/plugin/indexer-elastic-rest/src/java/org/apache/nutch/indexwriter/elasticrest/ElasticRestIndexWriter.java
index 3bd9d41d6..f40f0b8fc 100644
--- a/src/plugin/indexer-elastic-rest/src/java/org/apache/nutch/indexwriter/elasticrest/ElasticRestIndexWriter.java
+++ b/src/plugin/indexer-elastic-rest/src/java/org/apache/nutch/indexwriter/elasticrest/ElasticRestIndexWriter.java
@@ -14,9 +14,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
-//TODO refactor the dependencies out of root ivy file
-
 package org.apache.nutch.indexwriter.elasticrest;
 
 import io.searchbox.client.JestClient;
@@ -54,11 +51,9 @@
 import java.security.NoSuchAlgorithmException;
 import java.security.cert.CertificateException;
 import java.security.cert.X509Certificate;
-import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.LinkedHashSet;
-import java.util.List;
 import java.util.Set;
 import java.util.Date;
 import java.util.concurrent.ExecutionException;
diff --git a/src/plugin/indexer-elastic/src/java/org/apache/nutch/indexwriter/elastic/ElasticIndexWriter.java b/src/plugin/indexer-elastic/src/java/org/apache/nutch/indexwriter/elastic/ElasticIndexWriter.java
index 1540241b0..5fa2def90 100644
--- a/src/plugin/indexer-elastic/src/java/org/apache/nutch/indexwriter/elastic/ElasticIndexWriter.java
+++ b/src/plugin/indexer-elastic/src/java/org/apache/nutch/indexwriter/elastic/ElasticIndexWriter.java
@@ -159,6 +159,7 @@ protected Client makeClient(IndexWriterParams parameters) throws IOException {
 
     // Prefer TransportClient
     if (hosts != null && port > 1) {
+      @SuppressWarnings("resource")
       TransportClient transportClient = new PreBuiltTransportClient(settings);
 
       for (String host : hosts)
diff --git a/src/plugin/indexer-elastic/src/test/org/apache/nutch/indexwriter/elastic/TestElasticIndexWriter.java b/src/plugin/indexer-elastic/src/test/org/apache/nutch/indexwriter/elastic/TestElasticIndexWriter.java
index dc59cd46f..6fb1ab2d3 100644
--- a/src/plugin/indexer-elastic/src/test/org/apache/nutch/indexwriter/elastic/TestElasticIndexWriter.java
+++ b/src/plugin/indexer-elastic/src/test/org/apache/nutch/indexwriter/elastic/TestElasticIndexWriter.java
@@ -136,6 +136,7 @@ public void beforeBulk(long executionId, BulkRequest request) { }
   public void testBulkMaxDocs() throws IOException {
     int numDocs = 10;
     conf.setInt(ElasticConstants.MAX_BULK_DOCS, numDocs);
+    @SuppressWarnings("unused")
     Job job = Job.getInstance(conf);
 
     Map<String, String> parameters = new HashMap<>();
@@ -175,6 +176,7 @@ public void testBulkMaxLength() throws IOException {
     int numDocs = testMaxBulkLength / (key.length() + value.length());
 
     conf.setInt(ElasticConstants.MAX_BULK_LENGTH, testMaxBulkLength);
+    @SuppressWarnings("unused")
     Job job = Job.getInstance(conf);
 
     Map<String, String> parameters = new HashMap<>();
@@ -207,6 +209,7 @@ public void testBackoffPolicy() throws IOException {
     int numDocs = 10;
     conf.setInt(ElasticConstants.MAX_BULK_DOCS, numDocs);
 
+    @SuppressWarnings("unused")
     Job job = Job.getInstance(conf);
 
     Map<String, String> parameters = new HashMap<>();
diff --git a/src/plugin/indexer-rabbit/src/java/org/apache/nutch/indexwriter/rabbit/RabbitDocument.java b/src/plugin/indexer-rabbit/src/java/org/apache/nutch/indexwriter/rabbit/RabbitDocument.java
index dd0c3092d..1f08d42dc 100644
--- a/src/plugin/indexer-rabbit/src/java/org/apache/nutch/indexwriter/rabbit/RabbitDocument.java
+++ b/src/plugin/indexer-rabbit/src/java/org/apache/nutch/indexwriter/rabbit/RabbitDocument.java
@@ -24,6 +24,7 @@
 class RabbitDocument {
   private List<RabbitDocumentField> fields;
 
+  @SuppressWarnings("unused")
   private float documentBoost;
 
   RabbitDocument() {
@@ -49,6 +50,7 @@ void addField(RabbitDocumentField field) {
 
   static class RabbitDocumentField {
     private String key;
+    @SuppressWarnings("unused")
     private float weight;
     private List<Object> values;
 
diff --git a/src/plugin/mimetype-filter/src/java/org/apache/nutch/indexer/filter/MimeTypeIndexingFilter.java b/src/plugin/mimetype-filter/src/java/org/apache/nutch/indexer/filter/MimeTypeIndexingFilter.java
index 99c59a62c..cf92a7df5 100644
--- a/src/plugin/mimetype-filter/src/java/org/apache/nutch/indexer/filter/MimeTypeIndexingFilter.java
+++ b/src/plugin/mimetype-filter/src/java/org/apache/nutch/indexer/filter/MimeTypeIndexingFilter.java
@@ -211,6 +211,7 @@ public Configuration getConf() {
    */
   public static void main(String[] args) throws IOException, IndexingException {
     Option helpOpt = new Option("h", "help", false, "show this help message");
+    @SuppressWarnings("static-access")
     Option rulesOpt = OptionBuilder.withArgName("file").hasArg()
         .withDescription(
             "Rules file to be used in the tests relative to the conf directory")
diff --git a/src/plugin/mimetype-filter/src/test/org/apache/nutch/indexer/filter/MimeTypeIndexingFilterTest.java b/src/plugin/mimetype-filter/src/test/org/apache/nutch/indexer/filter/MimeTypeIndexingFilterTest.java
index bca230f35..4522f992f 100644
--- a/src/plugin/mimetype-filter/src/test/org/apache/nutch/indexer/filter/MimeTypeIndexingFilterTest.java
+++ b/src/plugin/mimetype-filter/src/test/org/apache/nutch/indexer/filter/MimeTypeIndexingFilterTest.java
@@ -45,7 +45,6 @@
   private MimeTypeIndexingFilter filter = new MimeTypeIndexingFilter();
   private String[] MIME_TYPES = { "text/html", "image/png", "application/pdf" };
   private ParseImpl[] parses = new ParseImpl[MIME_TYPES.length];
-  private String sampleDir = System.getProperty("test.data", ".");
 
   @Before
   public void setUp() throws Exception {
diff --git a/src/plugin/parse-html/src/java/org/apache/nutch/parse/html/HtmlParser.java b/src/plugin/parse-html/src/java/org/apache/nutch/parse/html/HtmlParser.java
index 78cd257e4..0d1d17e96 100644
--- a/src/plugin/parse-html/src/java/org/apache/nutch/parse/html/HtmlParser.java
+++ b/src/plugin/parse-html/src/java/org/apache/nutch/parse/html/HtmlParser.java
@@ -344,6 +344,7 @@ public static void main(String[] args) throws Exception {
     String url = "file:" + name;
     File file = new File(name);
     byte[] bytes = new byte[(int) file.length()];
+    @SuppressWarnings("resource")
     DataInputStream in = new DataInputStream(new FileInputStream(file));
     in.readFully(bytes);
     Configuration conf = NutchConfiguration.create();
diff --git a/src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java b/src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java
index 1c7d480cb..81d4485fe 100644
--- a/src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java
+++ b/src/plugin/parse-swf/src/java/org/apache/nutch/parse/swf/SWFParser.java
@@ -246,7 +246,7 @@ public void tagDefineFontInfo(int fontId, String fontName, int flags,
       int[] codes) throws IOException {
     // System.out.println("-defineFontInfo id=" + fontId + ", name=" +
     // fontName);
-    fontCodes.put(new Integer(fontId), codes);
+    fontCodes.put(Integer.valueOf(fontId), codes);
   }
 
   // XXX too much hassle for too little return ... we cannot guess character
@@ -263,7 +263,7 @@ public SWFVectors tagDefineFont2(int id, int flags, String name,
       int numGlyphs, int ascent, int descent, int leading, int[] codes,
       int[] advances, Rect[] bounds, int[] kernCodes1, int[] kernCodes2,
       int[] kernAdjustments) throws IOException {
-    fontCodes.put(new Integer(id), (codes != null) ? codes : new int[0]);
+    fontCodes.put(Integer.valueOf(id), (codes != null) ? codes : new int[0]);
 
     return null;
   }
diff --git a/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/BoilerpipeExtractorRepository.java b/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/BoilerpipeExtractorRepository.java
index a4146b388..8b6108d4c 100644
--- a/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/BoilerpipeExtractorRepository.java
+++ b/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/BoilerpipeExtractorRepository.java
@@ -41,7 +41,7 @@ public static synchronized BoilerpipeExtractor getExtractor(String boilerpipeExt
           Class extractorClass = loader.loadClass(boilerpipeExtractorName);
 
           // Add an instance to the repository
-          extractorRepository.put(boilerpipeExtractorName, (BoilerpipeExtractor)extractorClass.newInstance());
+          extractorRepository.put(boilerpipeExtractorName, (BoilerpipeExtractor)extractorClass.getConstructor().newInstance());
 
         } catch (ClassNotFoundException e) {
           LOG.error("BoilerpipeExtractor " + boilerpipeExtractorName + " not found!");
diff --git a/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java b/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java
index 40d82bcaf..e3469408b 100644
--- a/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java
+++ b/src/plugin/parse-tika/src/java/org/apache/nutch/parse/tika/TikaParser.java
@@ -277,7 +277,7 @@ public void setConf(Configuration conf) {
           throw new RuntimeException("Class " + htmlmapperClassName
               + " does not implement HtmlMapper");
         }
-        HTMLMapper = (HtmlMapper) HTMLMapperClass.newInstance();
+        HTMLMapper = (HtmlMapper) HTMLMapperClass.getConstructor().newInstance();
       } catch (Exception e) {
         LOG.error("Can't generate instance for class " + htmlmapperClassName);
         throw new RuntimeException("Can't generate instance for class "
diff --git a/src/plugin/parse-tika/src/test/org/apache/nutch/parse/tika/TestFeedParser.java b/src/plugin/parse-tika/src/test/org/apache/nutch/parse/tika/TestFeedParser.java
index 2677395d8..3a4d70a64 100644
--- a/src/plugin/parse-tika/src/test/org/apache/nutch/parse/tika/TestFeedParser.java
+++ b/src/plugin/parse-tika/src/test/org/apache/nutch/parse/tika/TestFeedParser.java
@@ -17,12 +17,8 @@
 
 package org.apache.nutch.parse.tika;
 
-import java.lang.invoke.MethodHandles;
-
 import org.junit.Assert;
 import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
@@ -54,9 +50,6 @@
 
   private String[] sampleFiles = { "rsstest.rss" };
 
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
   /**
    * <p>
    * The test method: tests out the following 2 asserts:
diff --git a/src/plugin/parsefilter-regex/src/java/org/apache/nutch/parsefilter/regex/RegexParseFilter.java b/src/plugin/parsefilter-regex/src/java/org/apache/nutch/parsefilter/regex/RegexParseFilter.java
index 2209ceb5f..c2661a566 100644
--- a/src/plugin/parsefilter-regex/src/java/org/apache/nutch/parsefilter/regex/RegexParseFilter.java
+++ b/src/plugin/parsefilter-regex/src/java/org/apache/nutch/parsefilter/regex/RegexParseFilter.java
@@ -55,7 +55,6 @@
   private String regexFile = null;
   
   private Configuration conf;
-  private DocumentFragment doc;
   
   private static final Map<String,RegexRule> rules = new HashMap<>();
   
diff --git a/src/plugin/parsefilter-regex/src/test/org/apache/nutch/parsefilter/regex/TestRegexParseFilter.java b/src/plugin/parsefilter-regex/src/test/org/apache/nutch/parsefilter/regex/TestRegexParseFilter.java
index 7bf21c4ee..238d30042 100644
--- a/src/plugin/parsefilter-regex/src/test/org/apache/nutch/parsefilter/regex/TestRegexParseFilter.java
+++ b/src/plugin/parsefilter-regex/src/test/org/apache/nutch/parsefilter/regex/TestRegexParseFilter.java
@@ -16,8 +16,6 @@
  */
 package org.apache.nutch.parsefilter.regex;
 
-import java.net.MalformedURLException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.metadata.Metadata;
 import org.apache.nutch.parse.Parse;
diff --git a/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileError.java b/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileError.java
index 4fef340ef..2019de00c 100644
--- a/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileError.java
+++ b/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileError.java
@@ -22,6 +22,7 @@
  */
 public class FileError extends FileException {
 
+  @SuppressWarnings("unused")
   private int code;
 
   public int getCode(int code) {
diff --git a/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileResponse.java b/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileResponse.java
index 4b6666af7..ce982702c 100644
--- a/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileResponse.java
+++ b/src/plugin/protocol-file/src/java/org/apache/nutch/protocol/file/FileResponse.java
@@ -226,7 +226,7 @@ private void getFileAsHttpResponse(java.io.File f) throws FileException,
     is.close();
 
     // set headers
-    headers.set(Response.CONTENT_LENGTH, new Long(size).toString());
+    headers.set(Response.CONTENT_LENGTH, Long.valueOf(size).toString());
     headers.set(Response.LAST_MODIFIED,
         HttpDateFormat.toString(f.lastModified()));
 
@@ -255,7 +255,7 @@ private void getDirAsHttpResponse(java.io.File f) throws IOException {
 
     // set headers
     headers.set(Response.CONTENT_LENGTH,
-        new Integer(this.content.length).toString());
+        Integer.valueOf(this.content.length).toString());
     headers.set(Response.CONTENT_TYPE, "text/html");
     headers.set(Response.LAST_MODIFIED,
         HttpDateFormat.toString(f.lastModified()));
diff --git a/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java b/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java
index eeba77664..6d21b5043 100644
--- a/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java
+++ b/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/Ftp.java
@@ -182,6 +182,7 @@ protected void finalize() {
   public static void main(String[] args) throws Exception {
     int timeout = Integer.MIN_VALUE;
     int maxContentLength = Integer.MIN_VALUE;
+    @SuppressWarnings("unused")
     String logLevel = "info";
     boolean followTalk = false;
     boolean keepConnection = false;
diff --git a/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpError.java b/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpError.java
index b63a67ed5..558747a52 100644
--- a/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpError.java
+++ b/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpError.java
@@ -22,6 +22,7 @@
  */
 public class FtpError extends FtpException {
 
+  @SuppressWarnings("unused")
   private int code;
 
   public int getCode(int code) {
diff --git a/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpResponse.java b/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpResponse.java
index 51bc6bfc9..07adb4c98 100644
--- a/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpResponse.java
+++ b/src/plugin/protocol-ftp/src/java/org/apache/nutch/protocol/ftp/FtpResponse.java
@@ -320,7 +320,7 @@ private void getFileAsHttpResponse(String path, long lastModified)
 
       FTPFile ftpFile = (FTPFile) list.get(0);
       this.headers.set(Response.CONTENT_LENGTH,
-          new Long(ftpFile.getSize()).toString());
+          Long.valueOf(ftpFile.getSize()).toString());
       this.headers.set(Response.LAST_MODIFIED,
           HttpDateFormat.toString(ftpFile.getTimestamp()));
       // don't retrieve the file if not changed.
@@ -367,7 +367,7 @@ private void getFileAsHttpResponse(String path, long lastModified)
 
       FTPFile ftpFile = (FTPFile) list.get(0);
       this.headers.set(Response.CONTENT_LENGTH,
-          new Long(ftpFile.getSize()).toString());
+          Long.valueOf(ftpFile.getSize()).toString());
       // this.headers.put("content-type", "text/html");
       this.headers.set(Response.LAST_MODIFIED,
           HttpDateFormat.toString(ftpFile.getTimestamp()));
@@ -428,7 +428,7 @@ private void getDirAsHttpResponse(String path, long lastModified)
       ftp.client.retrieveList(null, list, ftp.maxContentLength, ftp.parser);
       this.content = list2html(list, path, "/".equals(path) ? false : true);
       this.headers.set(Response.CONTENT_LENGTH,
-          new Integer(this.content.length).toString());
+          Integer.valueOf(this.content.length).toString());
       this.headers.set(Response.CONTENT_TYPE, "text/html");
       // this.headers.put("Last-Modified", null);
 
@@ -452,7 +452,7 @@ private void getDirAsHttpResponse(String path, long lastModified)
 
       this.content = list2html(list, path, "/".equals(path) ? false : true);
       this.headers.set(Response.CONTENT_LENGTH,
-          new Integer(this.content.length).toString());
+          Integer.valueOf(this.content.length).toString());
       this.headers.set(Response.CONTENT_TYPE, "text/html");
       // this.headers.put("Last-Modified", null);
 
diff --git a/src/plugin/protocol-htmlunit/src/java/org/apache/nutch/protocol/htmlunit/HttpResponse.java b/src/plugin/protocol-htmlunit/src/java/org/apache/nutch/protocol/htmlunit/HttpResponse.java
index 19c00fde5..361b41eb1 100644
--- a/src/plugin/protocol-htmlunit/src/java/org/apache/nutch/protocol/htmlunit/HttpResponse.java
+++ b/src/plugin/protocol-htmlunit/src/java/org/apache/nutch/protocol/htmlunit/HttpResponse.java
@@ -51,7 +51,9 @@
   private Configuration conf;
   private HttpBase http;
   private URL url;
+  @SuppressWarnings("unused")
   private String orig;
+  @SuppressWarnings("unused")
   private String base;
   private byte[] content;
   private int code;
diff --git a/src/plugin/protocol-http/src/java/org/apache/nutch/protocol/http/Http.java b/src/plugin/protocol-http/src/java/org/apache/nutch/protocol/http/Http.java
old mode 100755
new mode 100644
diff --git a/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummyX509TrustManager.java b/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummyX509TrustManager.java
index 2102f8078..44683cc2c 100644
--- a/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummyX509TrustManager.java
+++ b/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/DummyX509TrustManager.java
@@ -20,7 +20,6 @@
 
 package org.apache.nutch.protocol.httpclient;
 
-import java.lang.invoke.MethodHandles;
 import java.security.KeyStore;
 import java.security.KeyStoreException;
 import java.security.NoSuchAlgorithmException;
@@ -30,16 +29,10 @@
 import javax.net.ssl.TrustManagerFactory;
 import javax.net.ssl.TrustManager;
 import javax.net.ssl.X509TrustManager;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class DummyX509TrustManager implements X509TrustManager {
   private X509TrustManager standardTrustManager = null;
 
-  /** Logger object for this class. */
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
   /**
    * Constructor for DummyX509TrustManager.
    */
diff --git a/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java b/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java
index e02d904c1..c185f9bdc 100644
--- a/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java
+++ b/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/Http.java
@@ -130,7 +130,7 @@ public Http() {
    */
   public void setConf(Configuration conf) {
     super.setConf(conf);
-    this.conf = conf;
+    Http.conf = conf;
     this.maxThreadsTotal = conf.getInt("fetcher.threads.fetch", 10);
     this.proxyUsername = conf.get("http.proxy.username", "");
     this.proxyPassword = conf.get("http.proxy.password", "");
diff --git a/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpBasicAuthentication.java b/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpBasicAuthentication.java
index 35d6bd553..506902d36 100644
--- a/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpBasicAuthentication.java
+++ b/src/plugin/protocol-httpclient/src/java/org/apache/nutch/protocol/httpclient/HttpBasicAuthentication.java
@@ -51,6 +51,7 @@
   private static Map<String, HttpBasicAuthentication> authMap = new TreeMap<String, HttpBasicAuthentication>();
 
   private Configuration conf = null;
+  @SuppressWarnings("unused")
   private String challenge = null;
   private ArrayList<String> credentials = null;
   private String realm = null;
diff --git a/src/plugin/protocol-interactiveselenium/src/java/org/apache/nutch/protocol/interactiveselenium/HttpResponse.java b/src/plugin/protocol-interactiveselenium/src/java/org/apache/nutch/protocol/interactiveselenium/HttpResponse.java
index 7f961d9a2..6d91b33b7 100644
--- a/src/plugin/protocol-interactiveselenium/src/java/org/apache/nutch/protocol/interactiveselenium/HttpResponse.java
+++ b/src/plugin/protocol-interactiveselenium/src/java/org/apache/nutch/protocol/interactiveselenium/HttpResponse.java
@@ -22,6 +22,7 @@
 import java.io.OutputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.PushbackInputStream;
+import java.lang.reflect.InvocationTargetException;
 import java.net.InetSocketAddress;
 import java.net.Socket;
 import java.net.URL;
@@ -342,7 +343,11 @@ private void loadSeleniumHandlers() {
     for (int i = 0; i < handlerNames.length; i++) {
         try {
             String classToLoad = this.getClass().getPackage().getName() + ".handlers." + handlerNames[i];
-            handlers[i] = InteractiveSeleniumHandler.class.cast(Class.forName(classToLoad).newInstance());
+            try {
+              handlers[i] = InteractiveSeleniumHandler.class.cast(Class.forName(classToLoad).getConstructor().newInstance());
+            } catch (IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) {
+              e.printStackTrace();
+            }
             Http.LOG.info("Successfully loaded " + classToLoad);
         } catch (ClassNotFoundException e) {
             Http.LOG.info("Unable to load Handler class for: " + handlerNames[i]);
diff --git a/src/plugin/protocol-okhttp/src/java/org/apache/nutch/protocol/okhttp/OkHttp.java b/src/plugin/protocol-okhttp/src/java/org/apache/nutch/protocol/okhttp/OkHttp.java
old mode 100755
new mode 100644
diff --git a/src/plugin/scoring-orphan/src/java/org/apache/nutch/scoring/orphan/OrphanScoringFilter.java b/src/plugin/scoring-orphan/src/java/org/apache/nutch/scoring/orphan/OrphanScoringFilter.java
index 24cc36625..790025905 100644
--- a/src/plugin/scoring-orphan/src/java/org/apache/nutch/scoring/orphan/OrphanScoringFilter.java
+++ b/src/plugin/scoring-orphan/src/java/org/apache/nutch/scoring/orphan/OrphanScoringFilter.java
@@ -40,7 +40,6 @@
 
   public static Text ORPHAN_KEY_WRITABLE = new Text("_orphan_");
 
-  private Configuration conf;
   private static int DEFAULT_GONE_TIME = 30 * 24 * 60 * 60;
   private static int DEFAULT_ORPHAN_TIME = 40 * 24 * 60 * 60;
 
diff --git a/src/plugin/scoring-similarity/src/java/org/apache/nutch/scoring/similarity/cosine/Model.java b/src/plugin/scoring-similarity/src/java/org/apache/nutch/scoring/similarity/cosine/Model.java
index 05b85da88..b1c56d0ee 100644
--- a/src/plugin/scoring-similarity/src/java/org/apache/nutch/scoring/similarity/cosine/Model.java
+++ b/src/plugin/scoring-similarity/src/java/org/apache/nutch/scoring/similarity/cosine/Model.java
@@ -172,7 +172,6 @@ public static float computeCosineSimilarity(DocVector docVector) {
     String[] ngramStr = conf.getStrings("scoring.similarity.ngrams", "1,1");
     //mingram
     ngramArr[0] = Integer.parseInt(ngramStr[0]);
-    int maxgram;
     if (ngramStr.length > 1) {
       //maxgram
       ngramArr[1] = Integer.parseInt(ngramStr[1]);
diff --git a/src/plugin/subcollection/src/java/org/apache/nutch/indexer/subcollection/SubcollectionIndexingFilter.java b/src/plugin/subcollection/src/java/org/apache/nutch/indexer/subcollection/SubcollectionIndexingFilter.java
index 6bad96443..898d31436 100644
--- a/src/plugin/subcollection/src/java/org/apache/nutch/indexer/subcollection/SubcollectionIndexingFilter.java
+++ b/src/plugin/subcollection/src/java/org/apache/nutch/indexer/subcollection/SubcollectionIndexingFilter.java
@@ -16,15 +16,10 @@
  */
 package org.apache.nutch.indexer.subcollection;
 
-import java.lang.invoke.MethodHandles;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.Text;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.nutch.parse.Parse;
 import org.apache.nutch.util.NutchConfiguration;
 
@@ -76,12 +71,6 @@ public Configuration getConf() {
    */
   public static String metadataSource = "subcollection";
 
-  /**
-   * Logger
-   */
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
   /**
    * "Mark" document to be a part of subcollection
    *
diff --git a/src/plugin/urlfilter-ignoreexempt/src/java/org/apache/nutch/urlfilter/ignoreexempt/ExemptionUrlFilter.java b/src/plugin/urlfilter-ignoreexempt/src/java/org/apache/nutch/urlfilter/ignoreexempt/ExemptionUrlFilter.java
index 07523fe1b..675d85756 100644
--- a/src/plugin/urlfilter-ignoreexempt/src/java/org/apache/nutch/urlfilter/ignoreexempt/ExemptionUrlFilter.java
+++ b/src/plugin/urlfilter-ignoreexempt/src/java/org/apache/nutch/urlfilter/ignoreexempt/ExemptionUrlFilter.java
@@ -20,10 +20,6 @@
 import org.apache.nutch.net.URLExemptionFilter;
 import org.apache.nutch.util.NutchConfiguration;
 import org.apache.nutch.urlfilter.regex.RegexURLFilter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.lang.invoke.MethodHandles;
 import java.io.IOException;
 import java.io.Reader;
 import java.util.regex.Pattern;
@@ -56,11 +52,8 @@
 
   public static final String DB_IGNORE_EXTERNAL_EXEMPTIONS_FILE
       = "db.ignore.external.exemptions.file";
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
 
   private List<Pattern> exemptions;
-  private Configuration conf;
 
   public List<Pattern> getExemptions() {
     return exemptions;
diff --git a/src/plugin/urlmeta/src/java/org/apache/nutch/indexer/urlmeta/URLMetaIndexingFilter.java b/src/plugin/urlmeta/src/java/org/apache/nutch/indexer/urlmeta/URLMetaIndexingFilter.java
index fcd32556c..3e3b8bcbb 100644
--- a/src/plugin/urlmeta/src/java/org/apache/nutch/indexer/urlmeta/URLMetaIndexingFilter.java
+++ b/src/plugin/urlmeta/src/java/org/apache/nutch/indexer/urlmeta/URLMetaIndexingFilter.java
@@ -17,10 +17,6 @@
 
 package org.apache.nutch.indexer.urlmeta;
 
-import java.lang.invoke.MethodHandles;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.nutch.crawl.CrawlDatum;
@@ -69,8 +65,6 @@
  */
 public class URLMetaIndexingFilter implements IndexingFilter {
 
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
   private static final String CONF_PROPERTY = "urlmeta.tags";
   private static String[] urlMetaTags;
   private Configuration conf;
diff --git a/src/plugin/urlmeta/src/java/org/apache/nutch/scoring/urlmeta/URLMetaScoringFilter.java b/src/plugin/urlmeta/src/java/org/apache/nutch/scoring/urlmeta/URLMetaScoringFilter.java
index 543edfd2d..8c9eface0 100644
--- a/src/plugin/urlmeta/src/java/org/apache/nutch/scoring/urlmeta/URLMetaScoringFilter.java
+++ b/src/plugin/urlmeta/src/java/org/apache/nutch/scoring/urlmeta/URLMetaScoringFilter.java
@@ -17,14 +17,11 @@
 
 package org.apache.nutch.scoring.urlmeta;
 
-import java.lang.invoke.MethodHandles;
 import java.util.Collection;
 import java.util.Map.Entry;
 import java.util.Iterator;
 import java.util.List;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.io.Text;
@@ -44,8 +41,6 @@
  */
 public class URLMetaScoringFilter extends Configured implements ScoringFilter {
 
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
   private static final String CONF_PROPERTY = "urlmeta.tags";
   private static String[] urlMetaTags;
   private Configuration conf;
diff --git a/src/plugin/urlnormalizer-ajax/src/java/org/apache/nutch/net/urlnormalizer/ajax/AjaxURLNormalizer.java b/src/plugin/urlnormalizer-ajax/src/java/org/apache/nutch/net/urlnormalizer/ajax/AjaxURLNormalizer.java
index 36794262f..7d1d3f053 100644
--- a/src/plugin/urlnormalizer-ajax/src/java/org/apache/nutch/net/urlnormalizer/ajax/AjaxURLNormalizer.java
+++ b/src/plugin/urlnormalizer-ajax/src/java/org/apache/nutch/net/urlnormalizer/ajax/AjaxURLNormalizer.java
@@ -114,7 +114,6 @@ protected String normalizeHashedFragment(String urlString) throws MalformedURLEx
    * @return String
    */
   protected String normalizeEscapedFragment(String urlString) throws MalformedURLException {
-    int pos = urlString.indexOf(ESCAPED_URL_PART);
     URL u = new URL(urlString);
     StringBuilder sb = new StringBuilder();
 
diff --git a/src/plugin/urlnormalizer-basic/src/java/org/apache/nutch/net/urlnormalizer/basic/BasicURLNormalizer.java b/src/plugin/urlnormalizer-basic/src/java/org/apache/nutch/net/urlnormalizer/basic/BasicURLNormalizer.java
index 24f2e6211..6a33690b7 100644
--- a/src/plugin/urlnormalizer-basic/src/java/org/apache/nutch/net/urlnormalizer/basic/BasicURLNormalizer.java
+++ b/src/plugin/urlnormalizer-basic/src/java/org/apache/nutch/net/urlnormalizer/basic/BasicURLNormalizer.java
@@ -279,7 +279,7 @@ private String unescapePath(String path) {
 
       if (letter < 128 && unescapedCharacters[letter]) {
         // character should be unescaped in URLs
-        sb.append(new Character((char)letter));
+        sb.append(Character.valueOf((char)letter));
       } else {
         // Append the encoded character as uppercase
         sb.append(matcher.group().toUpperCase(Locale.ROOT));
diff --git a/src/plugin/urlnormalizer-protocol/src/test/org/apache/nutch/net/urlnormalizer/protocol/TestProtocolURLNormalizer.java b/src/plugin/urlnormalizer-protocol/src/test/org/apache/nutch/net/urlnormalizer/protocol/TestProtocolURLNormalizer.java
index 8880628e9..22005cee6 100644
--- a/src/plugin/urlnormalizer-protocol/src/test/org/apache/nutch/net/urlnormalizer/protocol/TestProtocolURLNormalizer.java
+++ b/src/plugin/urlnormalizer-protocol/src/test/org/apache/nutch/net/urlnormalizer/protocol/TestProtocolURLNormalizer.java
@@ -16,8 +16,6 @@
  */
 package org.apache.nutch.net.urlnormalizer.protocol;
 
-import java.net.MalformedURLException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.net.URLNormalizers;
 import org.apache.nutch.util.NutchConfiguration;
diff --git a/src/plugin/urlnormalizer-querystring/src/java/org/apache/nutch/net/urlnormalizer/querystring/QuerystringURLNormalizer.java b/src/plugin/urlnormalizer-querystring/src/java/org/apache/nutch/net/urlnormalizer/querystring/QuerystringURLNormalizer.java
index 04f613792..dbaf4d253 100644
--- a/src/plugin/urlnormalizer-querystring/src/java/org/apache/nutch/net/urlnormalizer/querystring/QuerystringURLNormalizer.java
+++ b/src/plugin/urlnormalizer-querystring/src/java/org/apache/nutch/net/urlnormalizer/querystring/QuerystringURLNormalizer.java
@@ -16,15 +16,12 @@
  */
 package org.apache.nutch.net.urlnormalizer.querystring;
 
-import java.lang.invoke.MethodHandles;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.net.URLNormalizer;
@@ -39,9 +36,6 @@
 
   private Configuration conf;
 
-  private static final Logger LOG = LoggerFactory
-      .getLogger(MethodHandles.lookup().lookupClass());
-
   public QuerystringURLNormalizer() {
   }
 
diff --git a/src/plugin/urlnormalizer-querystring/src/test/org/apache/nutch/net/urlnormalizer/querystring/TestQuerystringURLNormalizer.java b/src/plugin/urlnormalizer-querystring/src/test/org/apache/nutch/net/urlnormalizer/querystring/TestQuerystringURLNormalizer.java
index b85c55dbe..e9a02cd2c 100644
--- a/src/plugin/urlnormalizer-querystring/src/test/org/apache/nutch/net/urlnormalizer/querystring/TestQuerystringURLNormalizer.java
+++ b/src/plugin/urlnormalizer-querystring/src/test/org/apache/nutch/net/urlnormalizer/querystring/TestQuerystringURLNormalizer.java
@@ -16,8 +16,6 @@
  */
 package org.apache.nutch.net.urlnormalizer.querystring;
 
-import java.net.MalformedURLException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.net.URLNormalizers;
 import org.apache.nutch.util.NutchConfiguration;
diff --git a/src/plugin/urlnormalizer-slash/src/test/org/apache/nutch/net/urlnormalizer/slash/TestSlashURLNormalizer.java b/src/plugin/urlnormalizer-slash/src/test/org/apache/nutch/net/urlnormalizer/slash/TestSlashURLNormalizer.java
index c3585e46e..c5b38974c 100644
--- a/src/plugin/urlnormalizer-slash/src/test/org/apache/nutch/net/urlnormalizer/slash/TestSlashURLNormalizer.java
+++ b/src/plugin/urlnormalizer-slash/src/test/org/apache/nutch/net/urlnormalizer/slash/TestSlashURLNormalizer.java
@@ -16,8 +16,6 @@
  */
 package org.apache.nutch.net.urlnormalizer.slash;
 
-import java.net.MalformedURLException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.nutch.net.URLNormalizers;
 import org.apache.nutch.util.NutchConfiguration;
diff --git a/src/test/org/apache/nutch/crawl/CrawlDbUpdateTestDriver.java b/src/test/org/apache/nutch/crawl/CrawlDbUpdateTestDriver.java
index 583ed29da..74c54d5b4 100644
--- a/src/test/org/apache/nutch/crawl/CrawlDbUpdateTestDriver.java
+++ b/src/test/org/apache/nutch/crawl/CrawlDbUpdateTestDriver.java
@@ -19,7 +19,6 @@
 
 import java.lang.invoke.MethodHandles;
 import java.io.IOException;
-import java.net.URI;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -27,25 +26,8 @@
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configuration.IntegerRanges;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Counter;
-import org.apache.hadoop.mapreduce.Counters;
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.JobID;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.OutputCommitter;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.Partitioner;
 import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.mapreduce.Reducer.Context;
-import org.apache.hadoop.mapreduce.TaskAttemptID;
-import org.apache.hadoop.mapreduce.TaskInputOutputContext;
-import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/src/test/org/apache/nutch/crawl/CrawlDbUpdateUtil.java b/src/test/org/apache/nutch/crawl/CrawlDbUpdateUtil.java
index 0ad941ceb..1d1f1e7c9 100644
--- a/src/test/org/apache/nutch/crawl/CrawlDbUpdateUtil.java
+++ b/src/test/org/apache/nutch/crawl/CrawlDbUpdateUtil.java
@@ -34,7 +34,6 @@
 import org.apache.hadoop.mapred.Counters;
 import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.Reducer;
diff --git a/src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java b/src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java
index f5ad82fd6..7188203d6 100644
--- a/src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java
+++ b/src/test/org/apache/nutch/crawl/TestCrawlDbMerger.java
@@ -31,7 +31,6 @@
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.MapFile.Writer.Option;
-import org.apache.hadoop.mapreduce.Job;
 import org.apache.nutch.util.NutchConfiguration;
 import org.junit.After;
 import org.junit.Assert;
diff --git a/src/test/org/apache/nutch/segment/TestSegmentMergerCrawlDatums.java b/src/test/org/apache/nutch/segment/TestSegmentMergerCrawlDatums.java
index 93ef9c5ec..db7c067b2 100644
--- a/src/test/org/apache/nutch/segment/TestSegmentMergerCrawlDatums.java
+++ b/src/test/org/apache/nutch/segment/TestSegmentMergerCrawlDatums.java
@@ -74,8 +74,8 @@ public void setUp() throws Exception {
   @Test
   public void testSingleRandomSequence() throws Exception {
     Assert.assertEquals(
-        new Byte(CrawlDatum.STATUS_FETCH_SUCCESS),
-        new Byte(executeSequence(CrawlDatum.STATUS_FETCH_GONE,
+        Byte.valueOf(CrawlDatum.STATUS_FETCH_SUCCESS),
+        Byte.valueOf(executeSequence(CrawlDatum.STATUS_FETCH_GONE,
             CrawlDatum.STATUS_FETCH_SUCCESS, 256, false)));
   }
 
@@ -109,9 +109,9 @@ public void testMostlyRedirects() throws Exception {
     // Merge the segments and get status
     Path mergedSegment = merge(testDir, new Path[] { segment1, segment2,
         segment3, segment4, segment5, segment6, segment7, segment8 });
-    Byte status = new Byte(status = checkMergedSegment(testDir, mergedSegment));
+    Byte status = Byte.valueOf(status = checkMergedSegment(testDir, mergedSegment));
 
-    Assert.assertEquals(new Byte(CrawlDatum.STATUS_FETCH_SUCCESS), status);
+    Assert.assertEquals(Byte.valueOf(CrawlDatum.STATUS_FETCH_SUCCESS), status);
   }
 
   /**
@@ -147,8 +147,8 @@ public void testRandomizedSequences() throws Exception {
   @Test
   public void testRandomTestSequenceWithRedirects() throws Exception {
     Assert.assertEquals(
-        new Byte(CrawlDatum.STATUS_FETCH_SUCCESS),
-        new Byte(executeSequence(CrawlDatum.STATUS_FETCH_GONE,
+        Byte.valueOf(CrawlDatum.STATUS_FETCH_SUCCESS),
+        Byte.valueOf(executeSequence(CrawlDatum.STATUS_FETCH_GONE,
             CrawlDatum.STATUS_FETCH_SUCCESS, 128, true)));
   }
 
@@ -172,9 +172,9 @@ public void testFixedSequence() throws Exception {
     // Merge the segments and get status
     Path mergedSegment = merge(testDir, new Path[] { segment1, segment2,
         segment3 });
-    Byte status = new Byte(status = checkMergedSegment(testDir, mergedSegment));
+    Byte status = Byte.valueOf(status = checkMergedSegment(testDir, mergedSegment));
 
-    Assert.assertEquals(new Byte(CrawlDatum.STATUS_FETCH_SUCCESS), status);
+    Assert.assertEquals(Byte.valueOf(CrawlDatum.STATUS_FETCH_SUCCESS), status);
   }
 
   /**
@@ -192,9 +192,9 @@ public void testRedirFetchInOneSegment() throws Exception {
 
     // Merge the segments and get status
     Path mergedSegment = merge(testDir, new Path[] { segment });
-    Byte status = new Byte(status = checkMergedSegment(testDir, mergedSegment));
+    Byte status = Byte.valueOf(status = checkMergedSegment(testDir, mergedSegment));
 
-    Assert.assertEquals(new Byte(CrawlDatum.STATUS_FETCH_SUCCESS), status);
+    Assert.assertEquals(Byte.valueOf(CrawlDatum.STATUS_FETCH_SUCCESS), status);
   }
 
   /**
@@ -214,9 +214,9 @@ public void testEndsWithRedirect() throws Exception {
 
     // Merge the segments and get status
     Path mergedSegment = merge(testDir, new Path[] { segment1, segment2 });
-    Byte status = new Byte(status = checkMergedSegment(testDir, mergedSegment));
+    Byte status = Byte.valueOf(status = checkMergedSegment(testDir, mergedSegment));
 
-    Assert.assertEquals(new Byte(CrawlDatum.STATUS_FETCH_SUCCESS), status);
+    Assert.assertEquals(Byte.valueOf(CrawlDatum.STATUS_FETCH_SUCCESS), status);
   }
 
   /**
diff --git a/src/test/org/apache/nutch/service/TestNutchServer.java b/src/test/org/apache/nutch/service/TestNutchServer.java
index 021d031c4..4d42f7b80 100644
--- a/src/test/org/apache/nutch/service/TestNutchServer.java
+++ b/src/test/org/apache/nutch/service/TestNutchServer.java
@@ -21,7 +21,6 @@
 import javax.ws.rs.core.Response;
 
 import org.apache.cxf.jaxrs.client.WebClient;
-import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -53,6 +52,7 @@ public void testNutchServerStartup() {
     else {
       LOG.info("Testing admin endpoint");
       WebClient client = WebClient.create(ENDPOINT_ADDRESS + server.getPort());
+      @SuppressWarnings("unused")
       Response response = client.path("admin").get();
       //Assert.assertTrue(response.readEntity(String.class).contains("startDate"));
       response = client.path("stop").get();
diff --git a/src/test/org/apache/nutch/util/WritableTestUtils.java b/src/test/org/apache/nutch/util/WritableTestUtils.java
index 49bcfa97e..0822603c1 100644
--- a/src/test/org/apache/nutch/util/WritableTestUtils.java
+++ b/src/test/org/apache/nutch/util/WritableTestUtils.java
@@ -44,7 +44,7 @@ public static Writable writeRead(Writable before, Configuration conf)
     DataInputBuffer dib = new DataInputBuffer();
     dib.reset(dob.getData(), dob.getLength());
 
-    Writable after = (Writable) before.getClass().newInstance();
+    Writable after = (Writable) before.getClass().getConstructor().newInstance();
     if (conf != null) {
       ((Configurable) after).setConf(conf);
     }


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[hidden email]


> Fix deprecation warnings when building Nutch master branch under JDK 10.0.2+13
> ------------------------------------------------------------------------------
>
>                 Key: NUTCH-2633
>                 URL: https://issues.apache.org/jira/browse/NUTCH-2633
>             Project: Nutch
>          Issue Type: Improvement
>          Components: build
>    Affects Versions: 1.16
>         Environment: java version "10.0.2" 2018-07-17
> Java(TM) SE Runtime Environment 18.3 (build 10.0.2+13)
> Java HotSpot(TM) 64-Bit Server VM 18.3 (build 10.0.2+13, mixed mode)
> Nutch master 01c5d6ea17d7b60d25d4e65462b2a654f10680c3 (Thu Jul 26 14:55:38 2018 +0200)
>            Reporter: Lewis John McGibbney
>            Assignee: Lewis John McGibbney
>            Priority: Major
>             Fix For: 1.16
>
>
> I just got around to making a dev upgrade to >= JDK 10.
> When building master using environment JDK
> I get several compile time deprecations which are reflected in the attached build log.
> Additionally, I get some issues with Ivy... see below
> {code}
> WARNING: An illegal reflective access operation has occurred
> WARNING: Illegal reflective access by org.apache.ivy.util.url.IvyAuthenticator (file:/Users/lmcgibbn/.ant/lib/ivy-2.3.0.jar) to field java.net.Authenticator.theAuthenticator
> WARNING: Please consider reporting this to the maintainers of org.apache.ivy.util.url.IvyAuthenticator
> WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations
> WARNING: All illegal access operations will be denied in a future release
> [ivy:resolve] :: problems summary ::
> [ivy:resolve] :::: ERRORS
> [ivy:resolve] unknown resolver null
> [ivy:resolve] unknown resolver null
> [ivy:resolve] unknown resolver null
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)