HONEYCOMB-288: JMH's CSV postprocessing to format accepted by Jenkin's plot plugin 85/4385/2
authorMarek Gradzki <mgradzki@cisco.com>
Mon, 19 Dec 2016 10:49:03 +0000 (11:49 +0100)
committerMarek Gradzki <mgradzki@cisco.com>
Mon, 19 Dec 2016 11:41:22 +0000 (12:41 +0100)
Generates 3 plots for write operations:
- simple-container.csv,
- list-in-container.csv,
- complex-list-in-container.csv

and one plot for read operations: operational-read.csv

from benchmark.csv.

Jenkins plot plugin uses only first record line from CSV file:
https://github.com/jenkinsci/plot-plugin/blob/master/src/main/webapp/help-csv.html#L15

therefore each generated CSV contains two lines: header and single record.

Change-Id: I489c3401673d72a4b3ea3ec00197366118d1ff5f
Signed-off-by: Marek Gradzki <mgradzki@cisco.com>
infra/it/benchmark/pom.xml
infra/it/benchmark/src/main/java/io/fd/honeycomb/benchmark/data/DataBrokerConfigWriteBenchmark.java
infra/it/benchmark/src/main/java/io/fd/honeycomb/benchmark/data/DataBrokerOperReadBenchmark.java
infra/it/benchmark/src/main/java/io/fd/honeycomb/benchmark/format/BenchmarkOutputFormatter.java [new file with mode: 0644]

index 54ed6e1..8f9eabc 100644 (file)
             <artifactId>honeycomb-test-model</artifactId>
             <version>${project.version}</version>
         </dependency>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-csv</artifactId>
+            <version>1.4</version>
+        </dependency>
+
     </dependencies>
 
     <build>
                             </arguments>
                         </configuration>
                     </execution>
+                    <execution>
+                        <id>benchmark-post-processing</id>
+                        <phase>test</phase>
+                        <goals>
+                            <goal>java</goal>
+                        </goals>
+                        <configuration>
+                            <mainClass>io.fd.honeycomb.benchmark.format.BenchmarkOutputFormatter</mainClass>
+                            <arguments>
+                                <argument>${project.build.directory}/benchmark.csv</argument>
+                            </arguments>
+                        </configuration>
+                    </execution>
                 </executions>
             </plugin>
         </plugins>
index 145cd2b..6d76ad2 100644 (file)
@@ -92,17 +92,6 @@ public class DataBrokerConfigWriteBenchmark extends AbstractModule implements Fi
     private String data;
     private DataProvider dataProvider;
 
-    /*
-    * TODO HONEYCOMB-288 Visualization notes:
-    * - visualize as 3 graphs, 1 for each data
-    * - each graph should show 4 lines. for the combinations of parameters: submitFrequency and persistence
-    *   (if that's too much split or reduce submitFrequecy values that are shown in graph)
-    *
-    * TODO data need to be prepared for such visualization. Maybe if each benchmark class exposed a method to prepare
-    * that data from aggregated results... it might be easy
-    * (just maven exec plugin + main that invokes some method on all benchmark classes)
-    */
-
     // Infra modules to load
     private final Module[] modules = new Module[] {
             new io.fd.honeycomb.infra.distro.schema.YangBindingProviderModule(),
index 2fd9fa2..86ded0f 100644 (file)
@@ -85,12 +85,6 @@ public class DataBrokerOperReadBenchmark extends AbstractModule implements FileM
     private String data;
     private DataProvider dataProvider;
 
-    /*
-    * TODO HONEYCOMB-288 Visualization notes:
-    * - visualize as 1 graph
-    * - just 3 lines
-    */
-
     // Infra modules to load
     private final Module[] modules = new Module[] {
             new io.fd.honeycomb.infra.distro.schema.YangBindingProviderModule(),
diff --git a/infra/it/benchmark/src/main/java/io/fd/honeycomb/benchmark/format/BenchmarkOutputFormatter.java b/infra/it/benchmark/src/main/java/io/fd/honeycomb/benchmark/format/BenchmarkOutputFormatter.java
new file mode 100644 (file)
index 0000000..dc6552d
--- /dev/null
@@ -0,0 +1,162 @@
+/*
+ * Copyright (c) 2016 Cisco and/or its affiliates.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.fd.honeycomb.benchmark.format;
+
+import com.google.common.base.Charsets;
+import io.fd.honeycomb.benchmark.util.DataProvider;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+import org.apache.commons.csv.CSVFormat;
+import org.apache.commons.csv.CSVPrinter;
+import org.apache.commons.csv.CSVRecord;
+import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Processes JMH CSV file to format accepted by Jenkins plot plugin (header + one data line per CSV file).
+ */
+public final class BenchmarkOutputFormatter {
+    /*
+    Input format of the JMH CSV file:
+
+    "Benchmark","Mode","Threads","Samples","Score","Score Error (99,9%)","Unit","Param: data","Param: dsType","Param: operation","Param: persistence","Param: submitFrequency"
+    "io.fd.honeycomb.benchmark.data.DataBrokerConfigWriteBenchmark.write","thrpt",1,1,"3099,563546",NaN,"ops/s",simple-container,CONFIGURATION,put,true,1
+    ...
+
+     */
+    private static final int SCORE_POSITION = 4;
+    private static final int DATA_TYPE_POSITION = 7;
+    private static final int DS_TYPE_POSITION = 8;
+    private static final int PERSISTENCE_POSITION = 10;
+    private static final int SUBMIT_FREQUENCY_POSITION = 11;
+
+    private static final Logger LOG = LoggerFactory.getLogger(BenchmarkOutputFormatter.class);
+
+    private BenchmarkOutputFormatter() {
+    }
+
+    /**
+     * Produces 4 CSV files (simple-container, list-in-container, complex-list-in-container write + one plot for read).
+     */
+    public static void main(final String[] args) throws Exception {
+        final File csvFile = new File(args[0]);
+        if (!csvFile.exists()) {
+            throw new FileNotFoundException(args[0]);
+        }
+        final String path = csvFile.getParent();
+        LOG.info("Preparing benchmarking plot data from: {}", args[0]);
+
+        final Reader in = new InputStreamReader(new FileInputStream(csvFile), Charsets.UTF_8);
+        final List<CSVRecord> records = CSVFormat.RFC4180.parse(in).getRecords();
+        writeStatistics(processSimpleContainer(records), path, "simple-container.csv");
+        writeStatistics(processListContainer(records), path, "list-in-container.csv");
+        writeStatistics(processComplexListContainer(records), path, "complex-list-in-container.csv");
+        writeStatistics(processReadStatistics(records), path, "operational-read.csv");
+        LOG.info("Finished benchmarking plot data preparation");
+    }
+
+    private static boolean isConfiguration(CSVRecord record) {
+        return LogicalDatastoreType.CONFIGURATION.toString().equals(record.get(DS_TYPE_POSITION));
+    }
+
+    private static List<DataEntry> processSimpleContainer(final List<CSVRecord> list) {
+        return list.stream().filter(
+            record -> DataProvider.SIMPLE_CONTAINER.equals(record.get(DATA_TYPE_POSITION)) && isConfiguration(record))
+            .map(DataEntry::parseWriteData).collect(Collectors.toList());
+    }
+
+    private static List<DataEntry> processListContainer(final List<CSVRecord> list) {
+        return list.stream().filter(
+            record -> DataProvider.LIST_IN_CONTAINER.equals(record.get(DATA_TYPE_POSITION)) && isConfiguration(record))
+            .map(DataEntry::parseWriteData)
+            .collect(Collectors.toList());
+    }
+
+    private static List<DataEntry> processComplexListContainer(final List<CSVRecord> list) {
+        return list.stream().filter(
+            record -> DataProvider.COMPLEX_LIST_IN_CONTAINER.equals(record.get(DATA_TYPE_POSITION))
+                && isConfiguration(record))
+            .map(DataEntry::parseWriteData)
+            .collect(Collectors.toList());
+    }
+
+    private static List<DataEntry> processReadStatistics(final List<CSVRecord> list) {
+        return list.stream()
+            .filter(record -> LogicalDatastoreType.OPERATIONAL.toString().equals(record.get(DS_TYPE_POSITION)))
+            .map(DataEntry::parseReadData)
+            .collect(Collectors.toList());
+    }
+
+    private static void writeStatistics(final List<DataEntry> data, final String path, final String fileName)
+        throws IOException {
+        final String absolutePath = path + '/' + fileName;
+        LOG.debug("Writing benchmark statistics to file {}", absolutePath);
+        final List<String> keys = new ArrayList<>();
+        final List<String> scores = new ArrayList<>();
+        data.stream().forEach(entry -> {
+            keys.add(entry.key);
+            scores.add(entry.score);
+        });
+        LOG.debug("header: {}", keys);
+        LOG.debug("values: {}", scores);
+
+        final StringBuilder buffer = new StringBuilder();
+        final CSVPrinter csv = new CSVPrinter(buffer, CSVFormat.RFC4180);
+        csv.printRecord(keys);
+        csv.printRecord(scores);
+        csv.close();
+
+        try (final FileOutputStream out = new FileOutputStream(absolutePath)) {
+            out.write(buffer.toString().getBytes(Charsets.UTF_8));
+            out.close();
+            LOG.debug("Statistics written successfully");
+        }
+    }
+
+    private static final class DataEntry {
+        private final String key;
+        private final String score;
+
+        private DataEntry(final String key, final String score) {
+            this.key = key;
+            this.score = score;
+        }
+
+        static DataEntry parseWriteData(final CSVRecord record) {
+            return new DataEntry(
+                "persistence=" + record.get(PERSISTENCE_POSITION) + " freq=" + record.get(SUBMIT_FREQUENCY_POSITION),
+                record.get(SCORE_POSITION)
+            );
+        }
+
+        static DataEntry parseReadData(final CSVRecord record) {
+            return new DataEntry(
+                record.get(DATA_TYPE_POSITION),
+                record.get(SCORE_POSITION)
+            );
+        }
+    }
+}