Remove unnecessary usages of Supplier interface 80/6780/1
authorMarek Gradzki <mgradzki@cisco.com>
Fri, 19 May 2017 07:30:58 +0000 (09:30 +0200)
committerMarek Gradzki <mgradzki@cisco.com>
Fri, 19 May 2017 08:31:58 +0000 (10:31 +0200)
Change-Id: I6bc2f99806f81c206dcf31711a01fcebe809288a
Signed-off-by: Marek Gradzki <mgradzki@cisco.com>
infra/it/memory-benchmark/src/main/java/io/fd/honeycomb/benchmark/memory/BenchmarkFilesProvider.java
infra/it/memory-benchmark/src/main/java/io/fd/honeycomb/benchmark/memory/MemoryFootprintBenchmark.java
infra/test-utils/test-tools/src/main/java/io/fd/honeycomb/test/tools/ContainerNodeDataProcessor.java
infra/test-utils/test-tools/src/main/java/io/fd/honeycomb/test/tools/ListNodeDataProcessor.java
infra/test-utils/test-tools/src/main/java/io/fd/honeycomb/test/tools/YangDataProcessor.java

index c7717d5..ab27059 100644 (file)
@@ -33,7 +33,7 @@ public interface BenchmarkFilesProvider {
 
     default void outputBenchmarkResult(@Nonnull final MemoryInfo benchmarkResult,
                                        @Nonnull final String outputPath,
-                                       @Nonnull final Supplier<Logger> loggerSupplier) {
+                                       @Nonnull final Logger logger) {
         // specifies output file in form specified_name-memory_info_type.csv
         final Path outPath = Paths.get(outputPath + "-" + benchmarkResult.getMemoryInfoTypeName() + ".csv");
         final CSVFormat csvFormat = CSVFormat.RFC4180.withHeader(MemoryInfo.COMMITTED, MemoryInfo.INIT, MemoryInfo.MAX, MemoryInfo.USED);
@@ -42,7 +42,7 @@ public interface BenchmarkFilesProvider {
             // prints values in same order that header is
             csvPrinter.printRecord(benchmarkResult.getCommitted(), benchmarkResult.getInit(), benchmarkResult.getMax(), benchmarkResult.getUsed());
 
-            loggerSupplier.get().info("Creating output file {}", outPath);
+            logger.info("Creating output file {}", outPath);
             // writes output to separate file
             Files.write(Files.createFile(outPath), Collections.singleton(csvPrinter.getOut().toString()));
         } catch (IOException e) {
index 48d8059..a51819a 100644 (file)
@@ -84,7 +84,7 @@ public class MemoryFootprintBenchmark implements JMXBeanProvider, BenchmarkFiles
 
         // query memory beans with JMX and output results on output path
         queryMemoryBeans(injector.getInstance(JMXServiceURL.class))
-                .forEach(memoryInfo -> outputBenchmarkResult(memoryInfo, outputPath, () -> LOG));
+                .forEach(memoryInfo -> outputBenchmarkResult(memoryInfo, outputPath, LOG));
         // shutdowns server instance
         injector.getInstance(Server.class).stop();
     }
index b7641e0..4e5893e 100644 (file)
 
 package io.fd.honeycomb.test.tools;
 
+import static com.google.common.base.Preconditions.checkState;
+import static io.fd.honeycomb.translate.util.JsonUtils.readContainerEntryJson;
+import static io.fd.honeycomb.translate.util.JsonUtils.readJson;
+
+import java.io.InputStream;
+import javax.annotation.Nonnull;
 import org.opendaylight.controller.md.sal.binding.impl.BindingToNormalizedNodeCodec;
 import org.opendaylight.yangtools.yang.binding.Augmentation;
 import org.opendaylight.yangtools.yang.binding.DataObject;
@@ -28,13 +34,6 @@ import org.opendaylight.yangtools.yang.model.api.SchemaNode;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.annotation.Nonnull;
-import java.io.InputStream;
-
-import static com.google.common.base.Preconditions.checkState;
-import static io.fd.honeycomb.translate.util.JsonUtils.readContainerEntryJson;
-import static io.fd.honeycomb.translate.util.JsonUtils.readJson;
-
 final class ContainerNodeDataProcessor extends AbstractYangContextHolder implements YangDataProcessor {
 
     private static final Logger LOG = LoggerFactory.getLogger(ContainerNodeDataProcessor.class);
@@ -51,7 +50,7 @@ final class ContainerNodeDataProcessor extends AbstractYangContextHolder impleme
         checkState(resourceStream != null, "Resource %s not found", resourcePath);
 
         final YangInstanceIdentifier nodeParent = getNodeParent(yangInstanceIdentifier).orElse(null);
-        final SchemaNode parentSchema = parentSchema(schemaContext(), serializer(), nodeParent, () -> LOG);
+        final SchemaNode parentSchema = parentSchema(schemaContext(), serializer(), nodeParent, LOG);
 
         // to be able to process containers in root of model
         if (isRoot(yangInstanceIdentifier)) {
index c14ea35..dd7efbb 100644 (file)
 
 package io.fd.honeycomb.test.tools;
 
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+import static io.fd.honeycomb.translate.util.JsonUtils.readListEntryFromJson;
+
+import java.io.InputStream;
+import javax.annotation.Nonnull;
 import org.opendaylight.controller.md.sal.binding.impl.BindingToNormalizedNodeCodec;
 import org.opendaylight.yangtools.yang.binding.DataObject;
 import org.opendaylight.yangtools.yang.binding.Identifiable;
@@ -26,13 +32,6 @@ import org.opendaylight.yangtools.yang.model.api.SchemaNode;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.annotation.Nonnull;
-import java.io.InputStream;
-
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkState;
-import static io.fd.honeycomb.translate.util.JsonUtils.readListEntryFromJson;
-
 /**
  * json --> BA processor for list entry data
  */
@@ -55,7 +54,7 @@ final class ListNodeDataProcessor extends AbstractYangContextHolder implements Y
         final InputStream resourceStream = this.getClass().getResourceAsStream(resourcePath);
         checkState(resourceStream != null, "Resource %s not found", resourcePath);
 
-        final SchemaNode parentSchemaNode = parentSchema(schemaContext(), serializer(), listParent, () -> LOG);
+        final SchemaNode parentSchemaNode = parentSchema(schemaContext(), serializer(), listParent, LOG);
         final MapEntryNode data = readListEntryFromJson(schemaContext(), resourceStream, parentSchemaNode, keyedNodeIdentifier);
 
         return nodeBinding(serializer(), nodeIdentifier, data).getValue();
index a353e4b..fe46410 100644 (file)
 
 package io.fd.honeycomb.test.tools;
 
+import java.util.AbstractMap;
+import java.util.Map;
+import java.util.Optional;
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
 import org.opendaylight.controller.md.sal.binding.impl.BindingToNormalizedNodeCodec;
 import org.opendaylight.yangtools.sal.binding.generator.impl.BindingSchemaContextUtils;
 import org.opendaylight.yangtools.yang.binding.DataObject;
@@ -28,13 +33,6 @@ import org.opendaylight.yangtools.yang.model.api.SchemaContext;
 import org.opendaylight.yangtools.yang.model.api.SchemaNode;
 import org.slf4j.Logger;
 
-import javax.annotation.Nonnull;
-import javax.annotation.Nullable;
-import java.util.AbstractMap;
-import java.util.Map;
-import java.util.Optional;
-import java.util.function.Supplier;
-
 interface YangDataProcessor {
 
     /**
@@ -68,11 +66,11 @@ interface YangDataProcessor {
     default SchemaNode parentSchema(@Nonnull final SchemaContext schemaContext,
                                     @Nonnull final BindingToNormalizedNodeCodec serializer,
                                     @Nullable final YangInstanceIdentifier parentYangId,
-                                    @Nonnull final Supplier<Logger> logProvider) {
+                                    @Nonnull final Logger logger) {
         // null or root
         if (parentYangId == null || parentYangId.getPathArguments().size() == 0) {
             // no parent == use schema context as root context
-            logProvider.get().info("Parent is null, providing schema context as parent node");
+            logger.info("Parent is null, providing schema context as parent node");
             return schemaContext;
         }
 
@@ -96,7 +94,7 @@ interface YangDataProcessor {
         }
 
         final DataNodeContainer parentNode = dataNodeContainerOptional.get();
-        logProvider.get().info("Parent schema node resolved as {}", parentNode);
+        logger.info("Parent schema node resolved as {}", parentNode);
         return (SchemaNode) parentNode;
     }