PAL Trending 54/11454/1
authorTibor Frank <tifrank@cisco.com>
Thu, 29 Mar 2018 08:48:42 +0000 (10:48 +0200)
committerTibor Frank <tifrank@cisco.com>
Thu, 29 Mar 2018 08:51:50 +0000 (08:51 +0000)
Change-Id: I6dae970203415dcb6d90217656d4f052350e52df
Signed-off-by: Tibor Frank <tifrank@cisco.com>
(cherry picked from commit c2b4ace3d023b2b58319feaeb942014586fcac11)

25 files changed:
docs/cpta/data/index.rst [new file with mode: 0644]
docs/cpta/index.rst [new file with mode: 0644]
docs/cpta/introduction/index.rst [new file with mode: 0644]
docs/cpta/trending/container_memif.rst [new file with mode: 0644]
docs/cpta/trending/index.rst [new file with mode: 0644]
docs/cpta/trending/ip4.rst [new file with mode: 0644]
docs/cpta/trending/ip4_tunnels.rst [new file with mode: 0644]
docs/cpta/trending/ip6.rst [new file with mode: 0644]
docs/cpta/trending/ipsec.rst [new file with mode: 0644]
docs/cpta/trending/l2.rst [new file with mode: 0644]
docs/cpta/trending/vm_vhost.rst [new file with mode: 0644]
resources/tools/presentation/conf_cpta/conf.py [new file with mode: 0644]
resources/tools/presentation/doc/pal_lld.rst
resources/tools/presentation/generator_CPTA.py [new file with mode: 0644]
resources/tools/presentation/generator_plots.py
resources/tools/presentation/generator_report.py
resources/tools/presentation/input_data_files.py
resources/tools/presentation/input_data_parser.py
resources/tools/presentation/pal.py
resources/tools/presentation/run_cpta.sh [new file with mode: 0755]
resources/tools/presentation/run_report.sh
resources/tools/presentation/specification.yaml
resources/tools/presentation/specification_CPTA.yaml [new file with mode: 0644]
resources/tools/presentation/specification_parser.py
resources/tools/presentation/static_content.py

diff --git a/docs/cpta/data/index.rst b/docs/cpta/data/index.rst
new file mode 100644 (file)
index 0000000..7c47849
--- /dev/null
@@ -0,0 +1,8 @@
+Trending Data
+=============
+
+The data used to generate the trending plots is available in a CSV and
+pretty ASCII formats:
+
+    - `csv format <../_static/vpp/cpta-trending.csv>`_,
+    - `pretty ASCII format <../_static/vpp/cpta-trending.txt>`_.
diff --git a/docs/cpta/index.rst b/docs/cpta/index.rst
new file mode 100644 (file)
index 0000000..c9dd9c5
--- /dev/null
@@ -0,0 +1,9 @@
+Continuous Performance Trending and Analysis
+============================================
+
+.. toctree::
+    :numbered:
+
+    introduction/index
+    trending/index
+    data/index
diff --git a/docs/cpta/introduction/index.rst b/docs/cpta/introduction/index.rst
new file mode 100644 (file)
index 0000000..5d31b33
--- /dev/null
@@ -0,0 +1,8 @@
+VPP Performance Trending
+========================
+
+This auto-generated document contains VPP performance trending graphs and data.
+It is generated using CSIT continuous trending test and analysis jobs and is
+updated daily. More detail is available on
+`CSIT Performance Trending and Analysis <https://wiki.fd.io/view/CSIT/PerformanceTrendingAnalysis>`_
+wiki page.
diff --git a/docs/cpta/trending/container_memif.rst b/docs/cpta/trending/container_memif.rst
new file mode 100644 (file)
index 0000000..6c6251d
--- /dev/null
@@ -0,0 +1,80 @@
+Container memif Connections
+===========================
+
+NIC 10ge2p1x520
+---------------
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-container-memif-l2-1t1c-x520-1.html"></iframe>
+
+    <center><i>Figure 1. Container memif Connections, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-container-memif-l2-1t1c-x520-5.html"></iframe>
+
+    <center><i>Figure 2. Container memif Connections, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-container-memif-l2-2t2c-x520-1.html"></iframe>
+
+    <center><i>Figure 3. Container memif Connections, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-container-memif-l2-2t2c-x520-5.html"></iframe>
+
+    <center><i>Figure 4. Container memif Connections, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-container-memif-l2-4t4c-x520-1.html"></iframe>
+
+    <center><i>Figure 5. Container memif Connections, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-container-memif-l2-4t4c-x520-5.html"></iframe>
+
+    <center><i>Figure 6. Container memif Connections, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+NIC 40ge2p1xl710
+----------------
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-container-memif-l2-1t1c-xl710-1.html"></iframe>
+
+    <center><i>Figure 1. Container memif Connections, NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-container-memif-l2-1t1c-xl710-5.html"></iframe>
+
+    <center><i>Figure 2. Container memif Connections, NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-container-memif-l2-2t2c-xl710-1.html"></iframe>
+
+    <center><i>Figure 3. Container memif Connections, NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-container-memif-l2-2t2c-xl710-5.html"></iframe>
+
+    <center><i>Figure 4. Container memif Connections, NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-container-memif-l2-4t4c-xl710-1.html"></iframe>
+
+    <center><i>Figure 5. Container memif Connections, NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-container-memif-l2-4t4c-xl710-5.html"></iframe>
+
+    <center><i>Figure 6. Container memif Connections, NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
diff --git a/docs/cpta/trending/index.rst b/docs/cpta/trending/index.rst
new file mode 100644 (file)
index 0000000..dcc50bf
--- /dev/null
@@ -0,0 +1,12 @@
+Trending Graphs
+===============
+
+.. toctree::
+
+    l2
+    ip4
+    ip4_tunnels
+    ip6
+    vm_vhost
+    container_memif
+    ipsec
diff --git a/docs/cpta/trending/ip4.rst b/docs/cpta/trending/ip4.rst
new file mode 100644 (file)
index 0000000..660d4fd
--- /dev/null
@@ -0,0 +1,123 @@
+IPv4 Routed-Forwarding
+======================
+
+NIC 10ge2p1x520
+---------------
+
+IPv4 Routed-Forwarding - Base and Scale
+```````````````````````````````````````
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-1t1c-x520-1.html"></iframe>
+
+    <center><i>Figure 1. IPv4 Routed-Forwarding - Base and Scale, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-1t1c-x520-5.html"></iframe>
+
+    <center><i>Figure 2. IPv4 Routed-Forwarding - Base and Scale, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-2t2c-x520-1.html"></iframe>
+
+    <center><i>Figure 3. IPv4 Routed-Forwarding - Base and Scale, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-2t2c-x520-5.html"></iframe>
+
+    <center><i>Figure 4. IPv4 Routed-Forwarding - Base and Scale, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-4t4c-x520-1.html"></iframe>
+
+    <center><i>Figure 5. IPv4 Routed-Forwarding - Base and Scale, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-4t4c-x520-5.html"></iframe>
+
+    <center><i>Figure 6. IPv4 Routed-Forwarding - Base and Scale, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+
+IPv4 Routed-Forwarding - Features
+`````````````````````````````````
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-feature-1t1c-x520-1.html"></iframe>
+
+    <center><i>Figure 1. IPv4 Routed-Forwarding - Features, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-feature-1t1c-x520-5.html"></iframe>
+
+    <center><i>Figure 2. IPv4 Routed-Forwarding - Features, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-feature-2t2c-x520-1.html"></iframe>
+
+    <center><i>Figure 3. IPv4 Routed-Forwarding - Features, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-feature-2t2c-x520-5.html"></iframe>
+
+    <center><i>Figure 4. IPv4 Routed-Forwarding - Features, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-feature-4t4c-x520-1.html"></iframe>
+
+    <center><i>Figure 5. IPv4 Routed-Forwarding - Features, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-feature-4t4c-x520-5.html"></iframe>
+
+    <center><i>Figure 6. IPv4 Routed-Forwarding - Features, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+NIC 40ge2p1xl710
+----------------
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-1t1c-xl710-1.html"></iframe>
+
+    <center><i>Figure 1. IPv4 Routed-Forwarding, NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-1t1c-xl710-5.html"></iframe>
+
+    <center><i>Figure 2. IPv4 Routed-Forwarding, NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-2t2c-xl710-1.html"></iframe>
+
+    <center><i>Figure 3. IPv4 Routed-Forwarding, NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-2t2c-xl710-5.html"></iframe>
+
+    <center><i>Figure 4. IPv4 Routed-Forwarding, NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-4t4c-xl710-1.html"></iframe>
+
+    <center><i>Figure 5. IPv4 Routed-Forwarding, NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-4t4c-xl710-5.html"></iframe>
+
+    <center><i>Figure 6. IPv4 Routed-Forwarding, NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
diff --git a/docs/cpta/trending/ip4_tunnels.rst b/docs/cpta/trending/ip4_tunnels.rst
new file mode 100644 (file)
index 0000000..e7050de
--- /dev/null
@@ -0,0 +1,38 @@
+IPv4 Overlay Tunnels
+====================
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-tunnels-1t1c-x520-1.html"></iframe>
+
+    <center><i>Figure 1. IPv4 Overlay Tunnels - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-tunnels-1t1c-x520-5.html"></iframe>
+
+    <center><i>Figure 2. IPv4 Overlay Tunnels - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-tunnels-2t2c-x520-1.html"></iframe>
+
+    <center><i>Figure 3. IPv4 Overlay Tunnels - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-tunnels-2t2c-x520-5.html"></iframe>
+
+    <center><i>Figure 4. IPv4 Overlay Tunnels - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-tunnels-4t4c-x520-1.html"></iframe>
+
+    <center><i>Figure 5. IPv4 Overlay Tunnels - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip4-tunnels-4t4c-x520-5.html"></iframe>
+
+    <center><i>Figure 6. IPv4 Overlay Tunnels - Weekly trend.</i></center><br><br>
diff --git a/docs/cpta/trending/ip6.rst b/docs/cpta/trending/ip6.rst
new file mode 100644 (file)
index 0000000..1dfbf58
--- /dev/null
@@ -0,0 +1,80 @@
+IPv6 Routed-Forwarding
+======================
+
+NIC 10ge2p1x520
+---------------
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip6-1t1c-x520-1.html"></iframe>
+
+    <center><i>Figure 1. IPv6 Routed-Forwarding, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip6-1t1c-x520-5.html"></iframe>
+
+    <center><i>Figure 2. IPv6 Routed-Forwarding, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip6-2t2c-x520-1.html"></iframe>
+
+    <center><i>Figure 3. IPv6 Routed-Forwarding, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip6-2t2c-x520-5.html"></iframe>
+
+    <center><i>Figure 4. IPv6 Routed-Forwarding, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip6-4t4c-x520-1.html"></iframe>
+
+    <center><i>Figure 5. IPv6 Routed-Forwarding, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip6-4t4c-x520-5.html"></iframe>
+
+    <center><i>Figure 6. IPv6 Routed-Forwarding, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+NIC 40ge2p1xl710
+----------------
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip6-1t1c-xl710-1.html"></iframe>
+
+    <center><i>Figure 1. IPv6 Routed-Forwarding, NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip6-1t1c-xl710-5.html"></iframe>
+
+    <center><i>Figure 2. IPv6 Routed-Forwarding, NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip6-2t2c-xl710-1.html"></iframe>
+
+    <center><i>Figure 3. IPv6 Routed-Forwarding, NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip6-2t2c-xl710-5.html"></iframe>
+
+    <center><i>Figure 4. IPv6 Routed-Forwarding, NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip6-4t4c-xl710-1.html"></iframe>
+
+    <center><i>Figure 5. IPv6 Routed-Forwarding, NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ip6-4t4c-xl710-5.html"></iframe>
+
+    <center><i>Figure 6. IPv6 Routed-Forwarding, NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
diff --git a/docs/cpta/trending/ipsec.rst b/docs/cpta/trending/ipsec.rst
new file mode 100644 (file)
index 0000000..1607348
--- /dev/null
@@ -0,0 +1,38 @@
+IPSec Crypto HW: IP4 Routed-Forwarding
+======================================
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ipsec-1t1c-xl710-1.html"></iframe>
+
+    <center><i>Figure 1. IPSec Crypto HW: IP4 Routed-Forwarding - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ipsec-1t1c-xl710-5.html"></iframe>
+
+    <center><i>Figure 2. IPSec Crypto HW: IP4 Routed-Forwarding - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ipsec-2t2c-xl710-1.html"></iframe>
+
+    <center><i>Figure 3. IPSec Crypto HW: IP4 Routed-Forwarding - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ipsec-2t2c-xl710-5.html"></iframe>
+
+    <center><i>Figure 4. IPSec Crypto HW: IP4 Routed-Forwarding - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ipsec-4t4c-xl710-1.html"></iframe>
+
+    <center><i>Figure 5. IPSec Crypto HW: IP4 Routed-Forwarding - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-ipsec-4t4c-xl710-5.html"></iframe>
+
+    <center><i>Figure 6. IPSec Crypto HW: IP4 Routed-Forwarding - Weekly trend.</i></center><br><br>
diff --git a/docs/cpta/trending/l2.rst b/docs/cpta/trending/l2.rst
new file mode 100644 (file)
index 0000000..7237d43
--- /dev/null
@@ -0,0 +1,122 @@
+L2 Ethernet Switching
+=====================
+
+NIC 10ge2p1x520
+---------------
+
+L2 Ethernet Switching - Base and Scale
+``````````````````````````````````````
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-1t1c-x520-1.html"></iframe>
+
+    <center><i>Figure 1. L2 Ethernet Switching - Base and Scale,  NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-1t1c-x520-5.html"></iframe>
+
+    <center><i>Figure 2. L2 Ethernet Switching - Base and Scale,  NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-2t2c-x520-1.html"></iframe>
+
+    <center><i>Figure 3. L2 Ethernet Switching - Base and Scale,  NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-2t2c-x520-5.html"></iframe>
+
+    <center><i>Figure 4. L2 Ethernet Switching - Base and Scale,  NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-4t4c-x520-1.html"></iframe>
+
+    <center><i>Figure 5. L2 Ethernet Switching - Base and Scale,  NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-4t4c-x520-5.html"></iframe>
+
+    <center><i>Figure 6. L2 Ethernet Switching - Base and Scale,  NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+L2 Ethernet Switching - Features
+````````````````````````````````
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-feature-1t1c-x520-1.html"></iframe>
+
+    <center><i>Figure 1. L2 Ethernet Switching - Features,  NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-feature-1t1c-x520-5.html"></iframe>
+
+    <center><i>Figure 2. L2 Ethernet Switching - Features,  NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-feature-2t2c-x520-1.html"></iframe>
+
+    <center><i>Figure 3. L2 Ethernet Switching - Features,  NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-feature-2t2c-x520-5.html"></iframe>
+
+    <center><i>Figure 4. L2 Ethernet Switching - Features,  NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-feature-4t4c-x520-1.html"></iframe>
+
+    <center><i>Figure 5. L2 Ethernet Switching - Features,  NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-feature-4t4c-x520-5.html"></iframe>
+
+    <center><i>Figure 6. L2 Ethernet Switching - Features,  NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+NIC 40ge2p1xl710
+----------------
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-1t1c-xl710-1.html"></iframe>
+
+    <center><i>Figure 1. L2 Ethernet Switching,  NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-1t1c-xl710-5.html"></iframe>
+
+    <center><i>Figure 2. L2 Ethernet Switching,  NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-2t2c-xl710-1.html"></iframe>
+
+    <center><i>Figure 3. L2 Ethernet Switching,  NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-2t2c-xl710-5.html"></iframe>
+
+    <center><i>Figure 4. L2 Ethernet Switching,  NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-4t4c-xl710-1.html"></iframe>
+
+    <center><i>Figure 5. L2 Ethernet Switching,  NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-l2-4t4c-xl710-5.html"></iframe>
+
+    <center><i>Figure 6. L2 Ethernet Switching,  NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
diff --git a/docs/cpta/trending/vm_vhost.rst b/docs/cpta/trending/vm_vhost.rst
new file mode 100644 (file)
index 0000000..2a49ab8
--- /dev/null
@@ -0,0 +1,116 @@
+VM vhost Connections
+====================
+
+NIC 10ge2p1x520
+---------------
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-ethip4-1t1c-x520-1.html"></iframe>
+
+    <center><i>Figure 1. VM vhost Connections, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-ethip4-1t1c-x520-5.html"></iframe>
+
+    <center><i>Figure 2. VM vhost Connections, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-ethip4-2t2c-x520-1.html"></iframe>
+
+    <center><i>Figure 3. VM vhost Connections, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-ethip4-2t2c-x520-5.html"></iframe>
+
+    <center><i>Figure 4. VM vhost Connections, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-ethip4-4t4c-x520-1.html"></iframe>
+
+    <center><i>Figure 5. VM vhost Connections, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-ethip4-4t4c-x520-5.html"></iframe>
+
+    <center><i>Figure 6. VM vhost Connections, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-eth-1t1c-x520-1.html"></iframe>
+
+    <center><i>Figure 7. VM vhost Connections, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-eth-1t1c-x520-5.html"></iframe>
+
+    <center><i>Figure 8. VM vhost Connections, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-eth-2t2c-x520-1.html"></iframe>
+
+    <center><i>Figure 9. VM vhost Connections, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-eth-2t2c-x520-5.html"></iframe>
+
+    <center><i>Figure 10. VM vhost Connections, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-eth-4t4c-x520-1.html"></iframe>
+
+    <center><i>Figure 11. VM vhost Connections, NIC 10ge2p1x520 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-eth-4t4c-x520-5.html"></iframe>
+
+    <center><i>Figure 12. VM vhost Connections, NIC 10ge2p1x520 - Weekly trend.</i></center><br><br>
+
+NIC 40ge2p1xl710
+----------------
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-eth-1t1c-xl710-1.html"></iframe>
+
+    <center><i>Figure 1. VM vhost Connections, NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-eth-1t1c-xl710-5.html"></iframe>
+
+    <center><i>Figure 2. VM vhost Connections, NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-eth-2t2c-xl710-1.html"></iframe>
+
+    <center><i>Figure 3. VM vhost Connections, NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-eth-2t2c-xl710-5.html"></iframe>
+
+    <center><i>Figure 4. VM vhost Connections, NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-eth-4t4c-xl710-1.html"></iframe>
+
+    <center><i>Figure 5. VM vhost Connections, NIC 40ge2p1xl710 - Daily trend.</i></center><br><br>
+
+.. raw:: html
+
+    <iframe width="1100" height="800" frameborder="0" scrolling="no" src="../_static/vpp/cpta-vm-vhost-eth-4t4c-xl710-5.html"></iframe>
+
+    <center><i>Figure 6. VM vhost Connections, NIC 40ge2p1xl710 - Weekly trend.</i></center><br><br>
diff --git a/resources/tools/presentation/conf_cpta/conf.py b/resources/tools/presentation/conf_cpta/conf.py
new file mode 100644 (file)
index 0000000..9b6e5f3
--- /dev/null
@@ -0,0 +1,103 @@
+# -*- coding: utf-8 -*-
+#
+# CSIT report documentation build configuration file
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+import os
+import sys
+
+sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = ['sphinxcontrib.programoutput',
+              'sphinx.ext.ifconfig']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+#
+source_suffix = ['.rst', '.md']
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'FD.io CSIT'
+copyright = u'2018, FD.io'
+author = u'FD.io CSIT'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+#version = u''
+# The full version, including alpha/beta/rc tags.
+#release = u''
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = 'en'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This patterns also effect to html_static_path and html_extra_path
+exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = False
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+#
+html_theme = 'sphinx_rtd_theme'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#
+# html_theme_options = {}
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_theme_path = ['env/lib/python2.7/site-packages/sphinx_rtd_theme']
+
+# html_static_path = ['_build/_static']
+html_static_path = ['../_tmp/src/_static']
+
+html_context = {
+    'css_files': [
+        '_static/theme_overrides.css',  # overrides for wide tables in RTD theme
+        ],
+    }
index 7ca3ad4..81c2547 100644 (file)
@@ -1368,6 +1368,113 @@ of an element is required, only a new algorithm needs to be implemented
 and integrated.
 
 
+Continuous Performance Measurements and Trending
+------------------------------------------------
+
+Performance analysis and trending execution sequence:
+`````````````````````````````````````````````````````
+
+CSIT PA runs performance analysis, change detection and trending using specified
+trend analysis metrics over the rolling window of last <N> sets of historical
+measurement data. PA is defined as follows:
+
+    #. PA job triggers:
+
+        #. By PT job at its completion.
+        #. Manually from Jenkins UI.
+
+    #. Download and parse archived historical data and the new data:
+
+        #. New data from latest PT job is evaluated against the rolling window
+           of <N> sets of historical data.
+        #. Download RF output.xml files and compressed archived data.
+        #. Parse out the data filtering test cases listed in PA specification
+           (part of CSIT PAL specification file).
+
+    #. Calculate trend metrics for the rolling window of <N> sets of historical data:
+
+        #. Calculate quartiles Q1, Q2, Q3.
+        #. Trim outliers using IQR.
+        #. Calculate TMA and TMSD.
+        #. Calculate normal trending range per test case based on TMA and TMSD.
+
+    #. Evaluate new test data against trend metrics:
+
+        #. If within the range of (TMA +/- 3*TMSD) => Result = Pass,
+           Reason = Normal.
+        #. If below the range => Result = Fail, Reason = Regression.
+        #. If above the range => Result = Pass, Reason = Progression.
+
+    #. Generate and publish results
+
+        #. Relay evaluation result to job result.
+        #. Generate a new set of trend analysis summary graphs and drill-down
+           graphs.
+
+            #. Summary graphs to include measured values with Normal,
+               Progression and Regression markers. MM shown in the background if
+               possible.
+            #. Drill-down graphs to include MM, TMA and TMSD.
+
+        #. Publish trend analysis graphs in html format on
+           https://docs.fd.io/csit/master/trending/.
+
+
+Parameters to specify:
+``````````````````````
+
+- job to be monitored - the Jenkins job which results are used as input data for
+  this test;
+- builds used for trending plot(s) - specified by a list of build numbers or by
+  a range of builds defined by the first and the last buld number;
+- list plots to generate:
+
+  - plot title;
+  - output file name;
+  - data for plots;
+  - tests to be displayed in the plot defined by a filter;
+  - list of parameters to extract from the data;
+  - periods (daily = 1, weekly = 5, monthly = 30);
+  - plot layout
+
+*Example:*
+
+::
+
+    -
+      type: "cpta"
+      title: "Continuous Performance Trending and Analysis"
+      algorithm: "cpta"
+      output-file-type: ".html"
+      output-file: "{DIR[STATIC,VPP]}/cpta"
+      data: "plot-performance-trending"
+      plots:
+        - title: "VPP 1T1C L2 64B Packet Throughput - {period} Trending"
+          output-file-name: "l2-1t1c-x520"
+          data: "plot-performance-trending"
+          filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and ('BASE' or 'SCALE') and '1T1C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST' and not 'MEMIF'"
+          parameters:
+          - "result"
+    #      - "name"
+          periods:
+          - 1
+          - 5
+          - 30
+          layout: "plot-cpta"
+
+        - title: "VPP 2T2C L2 64B Packet Throughput - {period} Trending"
+          output-file-name: "l2-2t2c-x520"
+          data: "plot-performance-trending"
+          filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and ('BASE' or 'SCALE') and '2T2C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST' and not 'MEMIF'"
+          parameters:
+          - "result"
+    #      - "name"
+          periods:
+          - 1
+          - 5
+          - 30
+          layout: "plot-cpta"
+
 API
 ---
 
diff --git a/resources/tools/presentation/generator_CPTA.py b/resources/tools/presentation/generator_CPTA.py
new file mode 100644 (file)
index 0000000..a1921fa
--- /dev/null
@@ -0,0 +1,479 @@
+# Copyright (c) 2018 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generation of Continuous Performance Trending and Analysis.
+"""
+
+import datetime
+import logging
+import csv
+import prettytable
+import plotly.offline as ploff
+import plotly.graph_objs as plgo
+import plotly.exceptions as plerr
+import numpy as np
+import pandas as pd
+
+from collections import OrderedDict
+from utils import find_outliers, archive_input_data, execute_command
+
+
+# Command to build the html format of the report
+HTML_BUILDER = 'sphinx-build -v -c conf_cpta -a ' \
+               '-b html -E ' \
+               '-t html ' \
+               '-D version="Generated on {date}" ' \
+               '{working_dir} ' \
+               '{build_dir}/'
+
+# .css file for the html format of the report
+THEME_OVERRIDES = """/* override table width restrictions */
+.wy-nav-content {
+    max-width: 1200px !important;
+}
+"""
+
+COLORS = ["SkyBlue", "Olive", "Purple", "Coral", "Indigo", "Pink",
+          "Chocolate", "Brown", "Magenta", "Cyan", "Orange", "Black",
+          "Violet", "Blue", "Yellow"]
+
+
+def generate_cpta(spec, data):
+    """Generate all formats and versions of the Continuous Performance Trending
+    and Analysis.
+
+    :param spec: Specification read from the specification file.
+    :param data: Full data set.
+    :type spec: Specification
+    :type data: InputData
+    """
+
+    logging.info("Generating the Continuous Performance Trending and Analysis "
+                 "...")
+
+    ret_code = _generate_all_charts(spec, data)
+
+    cmd = HTML_BUILDER.format(
+        date=datetime.date.today().strftime('%d-%b-%Y'),
+        working_dir=spec.environment["paths"]["DIR[WORKING,SRC]"],
+        build_dir=spec.environment["paths"]["DIR[BUILD,HTML]"])
+    execute_command(cmd)
+
+    with open(spec.environment["paths"]["DIR[CSS_PATCH_FILE]"], "w") as \
+            css_file:
+        css_file.write(THEME_OVERRIDES)
+
+    with open(spec.environment["paths"]["DIR[CSS_PATCH_FILE2]"], "w") as \
+            css_file:
+        css_file.write(THEME_OVERRIDES)
+
+    archive_input_data(spec)
+
+    logging.info("Done.")
+
+    return ret_code
+
+
+def _select_data(in_data, period, fill_missing=False, use_first=False):
+    """Select the data from the full data set. The selection is done by picking
+    the samples depending on the period: period = 1: All, period = 2: every
+    second sample, period = 3: every third sample ...
+
+    :param in_data: Full set of data.
+    :param period: Sampling period.
+    :param fill_missing: If the chosen sample is missing in the full set, its
+    nearest neighbour is used.
+    :param use_first: Use the first sample even though it is not chosen.
+    :type in_data: OrderedDict
+    :type period: int
+    :type fill_missing: bool
+    :type use_first: bool
+    :returns: Reduced data.
+    :rtype: OrderedDict
+    """
+
+    first_idx = min(in_data.keys())
+    last_idx = max(in_data.keys())
+
+    idx = last_idx
+    data_dict = dict()
+    if use_first:
+        data_dict[first_idx] = in_data[first_idx]
+    while idx >= first_idx:
+        data = in_data.get(idx, None)
+        if data is None:
+            if fill_missing:
+                threshold = int(round(idx - period / 2)) + 1 - period % 2
+                idx_low = first_idx if threshold < first_idx else threshold
+                threshold = int(round(idx + period / 2))
+                idx_high = last_idx if threshold > last_idx else threshold
+
+                flag_l = True
+                flag_h = True
+                idx_lst = list()
+                inc = 1
+                while flag_l or flag_h:
+                    if idx + inc > idx_high:
+                        flag_h = False
+                    else:
+                        idx_lst.append(idx + inc)
+                    if idx - inc < idx_low:
+                        flag_l = False
+                    else:
+                        idx_lst.append(idx - inc)
+                    inc += 1
+
+                for i in idx_lst:
+                    if i in in_data.keys():
+                        data_dict[i] = in_data[i]
+                        break
+        else:
+            data_dict[idx] = data
+        idx -= period
+
+    return OrderedDict(sorted(data_dict.items(), key=lambda t: t[0]))
+
+
+def _evaluate_results(in_data, trimmed_data, window=10):
+    """Evaluates if the sample value is regress, normal or progress compared to
+    previous data within the window.
+    We use the intervals defined as:
+    - regress: less than median - 3 * stdev
+    - normal: between median - 3 * stdev and median + 3 * stdev
+    - progress: more than median + 3 * stdev
+
+    :param in_data: Full data set.
+    :param trimmed_data: Full data set without the outliers.
+    :param window: Window size used to calculate moving median and moving stdev.
+    :type in_data: pandas.Series
+    :type trimmed_data: pandas.Series
+    :type window: int
+    :returns: Evaluated results.
+    :rtype: list
+    """
+
+    if len(in_data) > 2:
+        win_size = in_data.size if in_data.size < window else window
+        results = [0.0, ] * win_size
+        median = in_data.rolling(window=win_size).median()
+        stdev_t = trimmed_data.rolling(window=win_size, min_periods=2).std()
+        m_vals = median.values
+        s_vals = stdev_t.values
+        d_vals = in_data.values
+        for day in range(win_size, in_data.size):
+            if np.isnan(m_vals[day - 1]) or np.isnan(s_vals[day - 1]):
+                results.append(0.0)
+            elif d_vals[day] < (m_vals[day - 1] - 3 * s_vals[day - 1]):
+                results.append(0.33)
+            elif (m_vals[day - 1] - 3 * s_vals[day - 1]) <= d_vals[day] <= \
+                    (m_vals[day - 1] + 3 * s_vals[day - 1]):
+                results.append(0.66)
+            else:
+                results.append(1.0)
+    else:
+        results = [0.0, ]
+        try:
+            median = np.median(in_data)
+            stdev = np.std(in_data)
+            if in_data.values[-1] < (median - 3 * stdev):
+                results.append(0.33)
+            elif (median - 3 * stdev) <= in_data.values[-1] <= (
+                    median + 3 * stdev):
+                results.append(0.66)
+            else:
+                results.append(1.0)
+        except TypeError:
+            results.append(None)
+    return results
+
+
+def _generate_trending_traces(in_data, period, moving_win_size=10,
+                              fill_missing=True, use_first=False,
+                              show_moving_median=True, name="", color=""):
+    """Generate the trending traces:
+     - samples,
+     - moving median (trending plot)
+     - outliers, regress, progress
+
+    :param in_data: Full data set.
+    :param period: Sampling period.
+    :param moving_win_size: Window size.
+    :param fill_missing: If the chosen sample is missing in the full set, its
+    nearest neighbour is used.
+    :param use_first: Use the first sample even though it is not chosen.
+    :param show_moving_median: Show moving median (trending plot).
+    :param name: Name of the plot
+    :param color: Name of the color for the plot.
+    :type in_data: OrderedDict
+    :type period: int
+    :type moving_win_size: int
+    :type fill_missing: bool
+    :type use_first: bool
+    :type show_moving_median: bool
+    :type name: str
+    :type color: str
+    :returns: Generated traces (list) and the evaluated result (float).
+    :rtype: tuple(traces, result)
+    """
+
+    if period > 1:
+        in_data = _select_data(in_data, period,
+                               fill_missing=fill_missing,
+                               use_first=use_first)
+
+    data_x = [key for key in in_data.keys()]
+    data_y = [val for val in in_data.values()]
+    data_pd = pd.Series(data_y, index=data_x)
+
+    t_data, outliers = find_outliers(data_pd)
+
+    results = _evaluate_results(data_pd, t_data, window=moving_win_size)
+
+    anomalies = pd.Series()
+    anomalies_res = list()
+    for idx, item in enumerate(in_data.items()):
+        item_pd = pd.Series([item[1], ], index=[item[0], ])
+        if item[0] in outliers.keys():
+            anomalies = anomalies.append(item_pd)
+            anomalies_res.append(0.0)
+        elif results[idx] in (0.33, 1.0):
+            anomalies = anomalies.append(item_pd)
+            anomalies_res.append(results[idx])
+    anomalies_res.extend([0.0, 0.33, 0.66, 1.0])
+
+    # Create traces
+    color_scale = [[0.00, "grey"],
+                   [0.25, "grey"],
+                   [0.25, "red"],
+                   [0.50, "red"],
+                   [0.50, "white"],
+                   [0.75, "white"],
+                   [0.75, "green"],
+                   [1.00, "green"]]
+
+    trace_samples = plgo.Scatter(
+        x=data_x,
+        y=data_y,
+        mode='markers',
+        line={
+            "width": 1
+        },
+        name="{name}-thput".format(name=name),
+        marker={
+            "size": 5,
+            "color": color,
+            "symbol": "circle",
+        },
+    )
+    traces = [trace_samples, ]
+
+    trace_anomalies = plgo.Scatter(
+        x=anomalies.keys(),
+        y=anomalies.values,
+        mode='markers',
+        hoverinfo="none",
+        showlegend=False,
+        legendgroup=name,
+        name="{name}: outliers".format(name=name),
+        marker={
+            "size": 15,
+            "symbol": "circle-open",
+            "color": anomalies_res,
+            "colorscale": color_scale,
+            "showscale": True,
+            "line": {
+                "width": 2
+            },
+            "colorbar": {
+                "y": 0.5,
+                "len": 0.8,
+                "title": "Circles Marking Data Classification",
+                "titleside": 'right',
+                "titlefont": {
+                    "size": 14
+                },
+                "tickmode": 'array',
+                "tickvals": [0.125, 0.375, 0.625, 0.875],
+                "ticktext": ["Outlier", "Regression", "Normal", "Progression"],
+                "ticks": "",
+                "ticklen": 0,
+                "tickangle": -90,
+                "thickness": 10
+            }
+        }
+    )
+    traces.append(trace_anomalies)
+
+    if show_moving_median:
+        data_mean_y = pd.Series(data_y).rolling(
+            window=moving_win_size, min_periods=2).median()
+        trace_median = plgo.Scatter(
+            x=data_x,
+            y=data_mean_y,
+            mode='lines',
+            line={
+                "shape": "spline",
+                "width": 1,
+                "color": color,
+            },
+            name='{name}-trend'.format(name=name)
+        )
+        traces.append(trace_median)
+
+    return traces, results[-1]
+
+
+def _generate_chart(traces, layout, file_name):
+    """Generates the whole chart using pre-generated traces.
+
+    :param traces: Traces for the chart.
+    :param layout: Layout of the chart.
+    :param file_name: File name for the generated chart.
+    :type traces: list
+    :type layout: dict
+    :type file_name: str
+    """
+
+    # Create plot
+    logging.info("    Writing the file '{0}' ...".format(file_name))
+    plpl = plgo.Figure(data=traces, layout=layout)
+    try:
+        ploff.plot(plpl, show_link=False, auto_open=False, filename=file_name)
+    except plerr.PlotlyEmptyDataError:
+        logging.warning(" No data for the plot. Skipped.")
+
+
+def _generate_all_charts(spec, input_data):
+    """Generate all charts specified in the specification file.
+
+    :param spec: Specification.
+    :param input_data: Full data set.
+    :type spec: Specification
+    :type input_data: InputData
+    """
+
+    csv_table = list()
+    # Create the header:
+    builds = spec.cpta["data"].values()[0]
+    builds_lst = [str(build) for build in range(builds[0], builds[-1] + 1)]
+    header = "Build Number:," + ",".join(builds_lst) + '\n'
+    csv_table.append(header)
+
+    results = list()
+    for chart in spec.cpta["plots"]:
+        logging.info("  Generating the chart '{0}' ...".
+                     format(chart.get("title", "")))
+
+        # Transform the data
+        data = input_data.filter_data(chart, continue_on_error=True)
+        if data is None:
+            logging.error("No data.")
+            return
+
+        chart_data = dict()
+        for job in data:
+            for idx, build in job.items():
+                for test_name, test in build.items():
+                    if chart_data.get(test_name, None) is None:
+                        chart_data[test_name] = OrderedDict()
+                    try:
+                        chart_data[test_name][int(idx)] = \
+                            test["result"]["throughput"]
+                    except (KeyError, TypeError):
+                        pass
+
+        # Add items to the csv table:
+        for tst_name, tst_data in chart_data.items():
+            tst_lst = list()
+            for build in builds_lst:
+                item = tst_data.get(int(build), '')
+                tst_lst.append(str(item) if item else '')
+            csv_table.append("{0},".format(tst_name) + ",".join(tst_lst) + '\n')
+
+        for period in chart["periods"]:
+            # Generate traces:
+            traces = list()
+            win_size = 10 if period == 1 else 5 if period < 20 else 3
+            idx = 0
+            for test_name, test_data in chart_data.items():
+                if not test_data:
+                    logging.warning("No data for the test '{0}'".
+                                    format(test_name))
+                    continue
+                test_name = test_name.split('.')[-1]
+                trace, result = _generate_trending_traces(
+                    test_data,
+                    period=period,
+                    moving_win_size=win_size,
+                    fill_missing=True,
+                    use_first=False,
+                    name='-'.join(test_name.split('-')[3:-1]),
+                    color=COLORS[idx])
+                traces.extend(trace)
+                results.append(result)
+                idx += 1
+
+            # Generate the chart:
+            period_name = "Daily" if period == 1 else \
+                "Weekly" if period < 20 else "Monthly"
+            chart["layout"]["title"] = chart["title"].format(period=period_name)
+            _generate_chart(traces,
+                            chart["layout"],
+                            file_name="{0}-{1}-{2}{3}".format(
+                                spec.cpta["output-file"],
+                                chart["output-file-name"],
+                                period,
+                                spec.cpta["output-file-type"]))
+
+        logging.info("  Done.")
+
+    # Write the tables:
+    file_name = spec.cpta["output-file"] + "-trending"
+    with open("{0}.csv".format(file_name), 'w') as file_handler:
+        file_handler.writelines(csv_table)
+
+    txt_table = None
+    with open("{0}.csv".format(file_name), 'rb') as csv_file:
+        csv_content = csv.reader(csv_file, delimiter=',', quotechar='"')
+        header = True
+        for row in csv_content:
+            if txt_table is None:
+                txt_table = prettytable.PrettyTable(row)
+                header = False
+            else:
+                if not header:
+                    for idx, item in enumerate(row):
+                        try:
+                            row[idx] = str(round(float(item) / 1000000, 2))
+                        except ValueError:
+                            pass
+                txt_table.add_row(row)
+        txt_table.align["Build Number:"] = "l"
+    with open("{0}.txt".format(file_name), "w") as txt_file:
+        txt_file.write(str(txt_table))
+
+    # Evaluate result:
+    result = "PASS"
+    for item in results:
+        if item is None:
+            result = "FAIL"
+            break
+        if item == 0.66 and result == "PASS":
+            result = "PASS"
+        elif item == 0.33 or item == 0.0:
+            result = "FAIL"
+
+    logging.info("Partial results: {0}".format(results))
+    logging.info("Result: {0}".format(result))
+
+    return result
index ac77b3d..b7fd420 100644 (file)
@@ -19,6 +19,7 @@ import logging
 import pandas as pd
 import plotly.offline as ploff
 import plotly.graph_objs as plgo
+
 from plotly.exceptions import PlotlyError
 
 from utils import mean
@@ -371,7 +372,6 @@ def plot_http_server_performance_box(plot, input_data):
                                y=df[col],
                                name=name,
                                **plot["traces"]))
-
     try:
         # Create plot
         plpl = plgo.Figure(data=traces, layout=plot["layout"])
index cf8a8d1..6819f35 100644 (file)
 """Report generation.
 """
 
-import subprocess
 import logging
 import datetime
 
-from os import makedirs, environ
-from os.path import isdir
-from shutil import copy, Error, make_archive
+from shutil import make_archive
 
-from utils import get_files
-from errors import PresentationError
+from utils import get_files, execute_command, archive_input_data
 
 
 # .css file for the html format of the report
@@ -82,7 +78,7 @@ def generate_report(release, spec):
         "pdf": generate_pdf_report
     }
 
-    for report_format, versions in spec.output.items():
+    for report_format, versions in spec.output["format"].items():
         report[report_format](release, spec, versions)
 
     archive_input_data(spec)
@@ -110,7 +106,7 @@ def generate_html_report(release, spec, versions):
         date=datetime.date.today().strftime('%d-%b-%Y'),
         working_dir=spec.environment["paths"]["DIR[WORKING,SRC]"],
         build_dir=spec.environment["paths"]["DIR[BUILD,HTML]"])
-    _execute_command(cmd)
+    execute_command(cmd)
 
     with open(spec.environment["paths"]["DIR[CSS_PATCH_FILE]"], "w") as \
             css_file:
@@ -146,7 +142,7 @@ def generate_pdf_report(release, spec, versions):
     for plot in plots:
         file_name = "{0}".format(plot.rsplit(".", 1)[0])
         cmd = convert_plots.format(html=plot, pdf=file_name)
-        _execute_command(cmd)
+        execute_command(cmd)
 
     # Generate the LaTeX documentation
     build_dir = spec.environment["paths"]["DIR[BUILD,LATEX]"]
@@ -155,7 +151,7 @@ def generate_pdf_report(release, spec, versions):
         date=datetime.date.today().strftime('%d-%b-%Y'),
         working_dir=spec.environment["paths"]["DIR[WORKING,SRC]"],
         build_dir=build_dir)
-    _execute_command(cmd)
+    execute_command(cmd)
 
     # Build pdf documentation
     archive_dir = spec.environment["paths"]["DIR[STATIC,ARCH]"]
@@ -174,7 +170,7 @@ def generate_pdf_report(release, spec, versions):
     ]
 
     for cmd in cmds:
-        _execute_command(cmd)
+        execute_command(cmd)
 
     logging.info("  Done.")
 
@@ -193,64 +189,3 @@ def archive_report(spec):
                  base_dir=spec.environment["paths"]["DIR[BUILD,HTML]"])
 
     logging.info("  Done.")
-
-
-def archive_input_data(spec):
-    """Archive the report.
-
-    :param spec: Specification read from the specification file.
-    :type spec: Specification
-    :raises PresentationError: If it is not possible to archive the input data.
-    """
-
-    logging.info("    Archiving the input data files ...")
-
-    if spec.is_debug:
-        extension = spec.debug["input-format"]
-    else:
-        extension = spec.input["file-format"]
-    data_files = get_files(spec.environment["paths"]["DIR[WORKING,DATA]"],
-                           extension=extension)
-    dst = spec.environment["paths"]["DIR[STATIC,ARCH]"]
-    logging.info("      Destination: {0}".format(dst))
-
-    try:
-        if not isdir(dst):
-            makedirs(dst)
-
-        for data_file in data_files:
-            logging.info("      Copying the file: {0} ...".format(data_file))
-            copy(data_file, dst)
-
-    except (Error, OSError) as err:
-        raise PresentationError("Not possible to archive the input data.",
-                                str(err))
-
-    logging.info("    Done.")
-
-
-def _execute_command(cmd):
-    """Execute the command in a subprocess and log the stdout and stderr.
-
-    :param cmd: Command to execute.
-    :type cmd: str
-    :returns: Return code of the executed command.
-    :rtype: int
-    """
-
-    env = environ.copy()
-    proc = subprocess.Popen(
-        [cmd],
-        stdout=subprocess.PIPE,
-        stderr=subprocess.PIPE,
-        shell=True,
-        env=env)
-
-    stdout, stderr = proc.communicate()
-
-    logging.info(stdout)
-    logging.info(stderr)
-
-    if proc.returncode != 0:
-        logging.error("    Command execution failed.")
-    return proc.returncode
index 7dae834..7e19478 100644 (file)
@@ -16,13 +16,14 @@ Download all data.
 """
 
 import re
-
+import gzip
 import logging
 
 from os import rename, remove
 from os.path import join, getsize
 from shutil import move
 from zipfile import ZipFile, is_zipfile, BadZipfile
+
 from httplib import responses
 from requests import get, codes, RequestException, Timeout, TooManyRedirects, \
     HTTPError, ConnectionError
@@ -51,7 +52,13 @@ def download_data_files(spec):
     for job, builds in spec.builds.items():
         for build in builds:
             if job.startswith("csit-"):
-                url = spec.environment["urls"]["URL[JENKINS,CSIT]"]
+                if spec.input["file-name"].endswith(".zip"):
+                    url = spec.environment["urls"]["URL[JENKINS,CSIT]"]
+                elif spec.input["file-name"].endswith(".gz"):
+                    url = spec.environment["urls"]["URL[NEXUS,LOG]"]
+                else:
+                    logging.error("Not supported file format.")
+                    continue
             elif job.startswith("hc2vpp-"):
                 url = spec.environment["urls"]["URL[JENKINS,HC]"]
             else:
@@ -106,25 +113,37 @@ def download_data_files(spec):
                         file_handle.write(chunk)
                 file_handle.close()
 
-                expected_length = None
-                try:
-                    expected_length = int(response.headers["Content-Length"])
-                    logging.debug("  Expected file size: {0}B".
-                                  format(expected_length))
-                except KeyError:
-                    logging.debug("  No information about expected size.")
-
-                real_length = getsize(new_name)
-                logging.debug("  Downloaded size: {0}B".format(real_length))
-
-                if expected_length:
-                    if real_length == expected_length:
+                if spec.input["file-name"].endswith(".zip"):
+                    expected_length = None
+                    try:
+                        expected_length = int(response.
+                                              headers["Content-Length"])
+                        logging.debug("  Expected file size: {0}B".
+                                      format(expected_length))
+                    except KeyError:
+                        logging.debug("  No information about expected size.")
+
+                    real_length = getsize(new_name)
+                    logging.debug("  Downloaded size: {0}B".format(real_length))
+
+                    if expected_length:
+                        if real_length == expected_length:
+                            status = "downloaded"
+                            logging.info("{0}: {1}".format(code,
+                                                           responses[code]))
+                        else:
+                            logging.error("The file size differs from the "
+                                          "expected size.")
+                    else:
                         status = "downloaded"
                         logging.info("{0}: {1}".format(code, responses[code]))
-                    else:
-                        logging.error("The file size differs from the expected "
-                                      "size.")
-                else:
+
+                elif spec.input["file-name"].endswith(".gz"):
+                    rename(new_name, new_name[:-7])
+                    with open(new_name[:-7], 'r') as xml_file:
+                        with gzip.open(new_name, 'wb') as gz_file:
+                            gz_file.write(xml_file.read())
+                    new_name = new_name[:-7]
                     status = "downloaded"
                     logging.info("{0}: {1}".format(code, responses[code]))
 
@@ -185,29 +204,30 @@ def unzip_files(spec):
                 directory = spec.environment["paths"]["DIR[WORKING,DATA]"]
                 file_name = join(build["file-name"])
 
-                if build["status"] == "downloaded" and is_zipfile(file_name):
+                if build["status"] == "downloaded":
                     logging.info("Unziping: '{0}' from '{1}'.".
                                  format(data_file, file_name))
                     new_name = "{0}{1}{2}".format(file_name.rsplit('.')[-2],
                                                   SEPARATOR,
                                                   data_file.split("/")[-1])
                     try:
-                        with ZipFile(file_name, 'r') as zip_file:
-                            zip_file.extract(data_file, directory)
-                        logging.info("Moving {0} to {1} ...".
-                                     format(join(directory, data_file),
-                                            directory))
-                        move(join(directory, data_file), directory)
-                        logging.info("Renaming the file '{0}' to '{1}'".
-                                     format(join(directory,
-                                                 data_file.split("/")[-1]),
-                                            new_name))
-                        rename(join(directory, data_file.split("/")[-1]),
-                               new_name)
+                        if is_zipfile(file_name):
+                            with ZipFile(file_name, 'r') as zip_file:
+                                zip_file.extract(data_file, directory)
+                            logging.info("Moving {0} to {1} ...".
+                                         format(join(directory, data_file),
+                                                directory))
+                            move(join(directory, data_file), directory)
+                            logging.info("Renaming the file '{0}' to '{1}'".
+                                         format(join(directory,
+                                                     data_file.split("/")[-1]),
+                                                new_name))
+                            rename(join(directory, data_file.split("/")[-1]),
+                                   new_name)
+                            spec.set_input_file_name(job, build["build"],
+                                                     new_name)
                         status = "unzipped"
                         spec.set_input_state(job, build["build"], status)
-                        spec.set_input_file_name(job, build["build"],
-                                                   new_name)
                     except (BadZipfile, RuntimeError) as err:
                         logging.error("Failed to unzip the file '{0}': {1}.".
                                       format(file_name, str(err)))
@@ -216,8 +236,7 @@ def unzip_files(spec):
                                       format(data_file, str(err)))
                     finally:
                         if status == "failed":
-                            spec.set_input_file_name(job, build["build"],
-                                                       None)
+                            spec.set_input_file_name(job, build["build"], None)
                 else:
                     raise PresentationError("The file '{0}' does not exist or "
                                             "it is not a zip file".
index e1763b9..87d822f 100644 (file)
@@ -1,4 +1,4 @@
-# Copyright (c) 2017 Cisco and/or its affiliates.
+# Copyright (c) 2018 Cisco and/or its affiliates.
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at:
@@ -23,6 +23,7 @@ import pandas as pd
 import logging
 
 from robot.api import ExecutionResult, ResultVisitor
+from robot import errors
 from collections import OrderedDict
 from string import replace
 
@@ -173,6 +174,9 @@ class ExecutionChecker(ResultVisitor):
 
     REGEX_TCP = re.compile(r'Total\s(rps|cps|throughput):\s([0-9]*).*$')
 
+    REGEX_MRR = re.compile(r'MaxReceivedRate_Results\s\[pkts/(\d*)sec\]:\s'
+                           r'tx\s(\d*),\srx\s(\d*)')
+
     def __init__(self, **metadata):
         """Initialisation.
 
@@ -219,7 +223,7 @@ class ExecutionChecker(ResultVisitor):
         self.parse_msg = {
             "setup-version": self._get_version,
             "teardown-vat-history": self._get_vat_history,
-            "teardown-show-runtime": self._get_show_run
+            "test-show-runtime": self._get_show_run
         }
 
     @property
@@ -372,11 +376,11 @@ class ExecutionChecker(ResultVisitor):
 
         self._data["suites"][suite.longname.lower().replace('"', "'").
             replace(" ", "_")] = {
-            "name": suite.name.lower(),
-            "doc": doc_str,
-            "parent": parent_name,
-            "level": len(suite.longname.split("."))
-        }
+                "name": suite.name.lower(),
+                "doc": doc_str,
+                "parent": parent_name,
+                "level": len(suite.longname.split("."))
+            }
 
         suite.keywords.visit(self)
 
@@ -415,17 +419,20 @@ class ExecutionChecker(ResultVisitor):
         test_result["tags"] = tags
         doc_str = test.doc.replace('"', "'").replace('\n', ' '). \
             replace('\r', '').replace('[', ' |br| [')
-        test_result["doc"] =  replace(doc_str, ' |br| [', '[', maxreplace=1)
+        test_result["doc"] = replace(doc_str, ' |br| [', '[', maxreplace=1)
         test_result["msg"] = test.message.replace('\n', ' |br| '). \
             replace('\r', '').replace('"', "'")
-        if test.status == "PASS" and ("NDRPDRDISC" in tags or "TCP" in tags):
-
+        if test.status == "PASS" and ("NDRPDRDISC" in tags or
+                                      "TCP" in tags or
+                                      "MRR" in tags):
             if "NDRDISC" in tags:
                 test_type = "NDR"
             elif "PDRDISC" in tags:
                 test_type = "PDR"
-            elif "TCP" in tags:  # Change to wrk?
+            elif "TCP" in tags:
                 test_type = "TCP"
+            elif "MRR" in tags:
+                test_type = "MRR"
             else:
                 return
 
@@ -458,6 +465,15 @@ class ExecutionChecker(ResultVisitor):
                 test_result["result"] = dict()
                 test_result["result"]["value"] = int(groups.group(2))
                 test_result["result"]["unit"] = groups.group(1)
+            elif test_type in ("MRR", ):
+                groups = re.search(self.REGEX_MRR, test.message)
+                test_result["result"] = dict()
+                test_result["result"]["duration"] = int(groups.group(1))
+                test_result["result"]["tx"] = int(groups.group(2))
+                test_result["result"]["rx"] = int(groups.group(3))
+                test_result["result"]["throughput"] = int(
+                    test_result["result"]["rx"] /
+                    test_result["result"]["duration"])
         else:
             test_result["status"] = test.status
 
@@ -496,6 +512,9 @@ class ExecutionChecker(ResultVisitor):
             elif keyword.type == "teardown":
                 self._lookup_kw_nr = 0
                 self.visit_teardown_kw(keyword)
+            else:
+                self._lookup_kw_nr = 0
+                self.visit_test_kw(keyword)
         except AttributeError:
             pass
 
@@ -508,6 +527,42 @@ class ExecutionChecker(ResultVisitor):
         """
         pass
 
+    def visit_test_kw(self, test_kw):
+        """Implements traversing through the test keyword and its child
+        keywords.
+
+        :param test_kw: Keyword to process.
+        :type test_kw: Keyword
+        :returns: Nothing.
+        """
+        for keyword in test_kw.keywords:
+            if self.start_test_kw(keyword) is not False:
+                self.visit_test_kw(keyword)
+                self.end_test_kw(keyword)
+
+    def start_test_kw(self, test_kw):
+        """Called when test keyword starts. Default implementation does
+        nothing.
+
+        :param test_kw: Keyword to process.
+        :type test_kw: Keyword
+        :returns: Nothing.
+        """
+        if test_kw.name.count("Show Runtime Counters On All Duts"):
+            self._lookup_kw_nr += 1
+            self._show_run_lookup_nr = 0
+            self._msg_type = "test-show-runtime"
+            test_kw.messages.visit(self)
+
+    def end_test_kw(self, test_kw):
+        """Called when keyword ends. Default implementation does nothing.
+
+        :param test_kw: Keyword to process.
+        :type test_kw: Keyword
+        :returns: Nothing.
+        """
+        pass
+
     def visit_setup_kw(self, setup_kw):
         """Implements traversing through the teardown keyword and its child
         keywords.
@@ -568,12 +623,6 @@ class ExecutionChecker(ResultVisitor):
         if teardown_kw.name.count("Show Vat History On All Duts"):
             self._vat_history_lookup_nr = 0
             self._msg_type = "teardown-vat-history"
-        elif teardown_kw.name.count("Show Statistics On All Duts"):
-            self._lookup_kw_nr += 1
-            self._show_run_lookup_nr = 0
-            self._msg_type = "teardown-show-runtime"
-
-        if self._msg_type:
             teardown_kw.messages.visit(self)
 
     def end_teardown_kw(self, teardown_kw):
@@ -710,7 +759,12 @@ class InputData(object):
         """
 
         with open(build["file-name"], 'r') as data_file:
-            result = ExecutionResult(data_file)
+            try:
+                result = ExecutionResult(data_file)
+            except errors.DataError as err:
+                logging.error("Error occurred while parsing output.xml: {0}".
+                              format(err))
+                return None
         checker = ExecutionChecker(job=job, build=build)
         result.visit(checker)
 
@@ -736,6 +790,11 @@ class InputData(object):
                 logging.info("    Processing the file '{0}'".
                              format(build["file-name"]))
                 data = InputData._parse_tests(job, build)
+                if data is None:
+                    logging.error("Input data file from the job '{job}', build "
+                                  "'{build}' is damaged. Skipped.".
+                                  format(job=job, build=build["build"]))
+                    continue
 
                 build_data = pd.Series({
                     "metadata": pd.Series(data["metadata"].values(),
@@ -793,7 +852,8 @@ class InputData(object):
             index += 1
             tag_filter = tag_filter[:index] + " in tags" + tag_filter[index:]
 
-    def filter_data(self, element, params=None, data_set="tests"):
+    def filter_data(self, element, params=None, data_set="tests",
+                    continue_on_error=False):
         """Filter required data from the given jobs and builds.
 
         The output data structure is:
@@ -818,15 +878,18 @@ class InputData(object):
         all parameters are included.
         :param data_set: The set of data to be filtered: tests, suites,
         metadata.
+        :param continue_on_error: Continue if there is error while reading the
+        data. The Item will be empty then
         :type element: pandas.Series
         :type params: list
         :type data_set: str
+        :type continue_on_error: bool
         :returns: Filtered data.
         :rtype pandas.Series
         """
 
         logging.info("    Creating the data set for the {0} '{1}'.".
-                     format(element["type"], element.get("title", "")))
+                     format(element.get("type", ""), element.get("title", "")))
 
         try:
             if element["filter"] in ("all", "template"):
@@ -847,8 +910,15 @@ class InputData(object):
                 data[job] = pd.Series()
                 for build in builds:
                     data[job][str(build)] = pd.Series()
-                    for test_ID, test_data in \
-                            self.data[job][str(build)][data_set].iteritems():
+                    try:
+                        data_iter = self.data[job][str(build)][data_set].\
+                            iteritems()
+                    except KeyError:
+                        if continue_on_error:
+                            continue
+                        else:
+                            return None
+                    for test_ID, test_data in data_iter:
                         if eval(cond, {"tags": test_data.get("tags", "")}):
                             data[job][str(build)][test_ID] = pd.Series()
                             if params is None:
@@ -866,7 +936,7 @@ class InputData(object):
 
         except (KeyError, IndexError, ValueError) as err:
             logging.error("   Missing mandatory parameter in the element "
-                          "specification.", err)
+                          "specification: {0}".format(err))
             return None
         except AttributeError:
             return None
index 6d613e3..98642c8 100644 (file)
@@ -1,4 +1,4 @@
-# Copyright (c) 2017 Cisco and/or its affiliates.
+# Copyright (c) 2018 Cisco and/or its affiliates.
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at:
@@ -28,6 +28,9 @@ from generator_plots import generate_plots
 from generator_files import generate_files
 from static_content import prepare_static_content
 from generator_report import generate_report
+from generator_CPTA import generate_cpta
+
+from pprint import pprint
 
 
 def parse_args():
@@ -81,8 +84,9 @@ def main():
         spec.read_specification()
     except PresentationError:
         logging.critical("Finished with error.")
-        sys.exit(1)
+        return 1
 
+    ret_code = 0
     try:
         env = Environment(spec.environment, args.force)
         env.set_environment()
@@ -101,22 +105,31 @@ def main():
         generate_tables(spec, data)
         generate_plots(spec, data)
         generate_files(spec, data)
-        generate_report(args.release, spec)
 
-        logging.info("Successfully finished.")
+        if spec.output["output"] == "report":
+            generate_report(args.release, spec)
+            logging.info("Successfully finished.")
+        elif spec.output["output"] == "CPTA":
+            sys.stdout.write(generate_cpta(spec, data))
+            logging.info("Successfully finished.")
+        else:
+            logging.critical("The output '{0}' is not supported.".
+                             format(spec.output["output"]))
+            ret_code = 1
 
     except (KeyError, ValueError, PresentationError) as err:
         logging.info("Finished with an error.")
         logging.critical(str(err))
+        ret_code = 1
     except Exception as err:
         logging.info("Finished with an unexpected error.")
         logging.critical(str(err))
-
+        ret_code = 1
     finally:
         if spec is not None and not spec.is_debug:
             clean_environment(spec.environment)
-        sys.exit(1)
+        return ret_code
 
 
 if __name__ == '__main__':
-    main()
+    sys.exit(main())
diff --git a/resources/tools/presentation/run_cpta.sh b/resources/tools/presentation/run_cpta.sh
new file mode 100755 (executable)
index 0000000..954f17d
--- /dev/null
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+set -x
+
+# set default values in config array
+typeset -A DIR
+
+DIR[WORKING]=_tmp
+
+# Install system dependencies
+sudo apt-get -y update
+sudo apt-get -y install libxml2 libxml2-dev libxslt-dev build-essential \
+    zlib1g-dev unzip
+
+# Create working directories
+mkdir ${DIR[WORKING]}
+
+# Create virtual environment
+virtualenv ${DIR[WORKING]}/env
+. ${DIR[WORKING]}/env/bin/activate
+
+# Install python dependencies:
+pip install -r requirements.txt
+
+export PYTHONPATH=`pwd`
+
+STATUS=$(python pal.py \
+    --specification specification_CPTA.yaml \
+    --logging INFO \
+    --force)
+RETURN_STATUS=$?
+
+echo ${STATUS}
+exit ${RETURN_STATUS}
index 34d6c5d..d294640 100755 (executable)
@@ -23,10 +23,6 @@ then
     sudo sed -i.bak 's/^\(main_memory\s=\s\).*/\110000000/' /usr/share/texlive/texmf-dist/web2c/texmf.cnf
 fi
 
-# Clean-up when finished
-trap 'rm -rf ${DIR[WORKING]}; exit' EXIT
-trap 'rm -rf ${DIR[WORKING]}; exit' ERR
-
 # Create working directories
 mkdir ${DIR[WORKING]}
 
@@ -44,3 +40,6 @@ python pal.py \
     --release ${RELEASE} \
     --logging INFO \
     --force
+
+RETURN_STATUS=$(echo $?)
+exit ${RETURN_STATUS}
index ddf5363..da4443d 100644 (file)
 
 -
   type: "output"
+  output: "report"
   format:
     html:
     - full
diff --git a/resources/tools/presentation/specification_CPTA.yaml b/resources/tools/presentation/specification_CPTA.yaml
new file mode 100644 (file)
index 0000000..96a803f
--- /dev/null
@@ -0,0 +1,774 @@
+# Copyright (c) 2018 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This is the specification of parameters for "Continuous Performance Trending
+# and Analysis" feature provided by PAL.
+
+-
+  type: "environment"
+  configuration:
+    # Debug mode:
+    # - Skip:
+    #   - Download of input data files
+    # - Do:
+    #   - Read data from given zip / xml files
+    #   - Set the configuration as it is done in normal mode
+    # If the section "type: debug" is missing, CFG[DEBUG] is set to 0.
+    CFG[DEBUG]: 0
+
+  paths:
+    # Top level directories:
+    ## Working directory
+    DIR[WORKING]: "_tmp"
+    ## Build directories
+    DIR[BUILD,HTML]: "_build"
+    ## Static .rst files
+    DIR[RST]: "../../../docs/cpta"
+
+    # Static html content
+    DIR[STATIC]: "{DIR[BUILD,HTML]}/_static"
+    DIR[STATIC,VPP]: "{DIR[STATIC]}/vpp"
+    # DIR[STATIC,DPDK]: "{DIR[STATIC]}/dpdk"
+    DIR[STATIC,ARCH]: "{DIR[STATIC]}/archive"
+
+    # Working directories
+    ## Input data files (.zip, .xml)
+    DIR[WORKING,DATA]: "{DIR[WORKING]}/data"
+    ## Static source files from git
+    DIR[WORKING,SRC]: "{DIR[WORKING]}/src"
+    DIR[WORKING,SRC,STATIC]: "{DIR[WORKING,SRC]}/_static"
+
+    # .css patch file
+    DIR[CSS_PATCH_FILE]: "{DIR[STATIC]}/theme_overrides.css"
+    DIR[CSS_PATCH_FILE2]: "{DIR[WORKING,SRC,STATIC]}/theme_overrides.css"
+
+  urls:
+    URL[JENKINS,CSIT]: "https://jenkins.fd.io/view/csit/job"
+    URL[NEXUS,LOG]: "https://logs.fd.io/production/vex-yul-rot-jenkins-1"
+    URL[NEXUS]: "https://docs.fd.io/csit"
+    DIR[NEXUS]: "report/_static/archive"
+
+  make-dirs:
+  # List the directories which are created while preparing the environment.
+  # All directories MUST be defined in "paths" section.
+  - "DIR[WORKING,DATA]"
+  - "DIR[WORKING,SRC,STATIC]"
+  - "DIR[BUILD,HTML]"
+  - "DIR[STATIC,VPP]"
+  - "DIR[STATIC,ARCH]"
+  build-dirs:
+  # List the directories where the results (build) is stored.
+  # All directories MUST be defined in "paths" section.
+  - "DIR[BUILD,HTML]"
+
+-
+  type: "configuration"
+
+  data-sets:
+    plot-performance-trending:
+      csit-vpp-perf-check-1801:
+        start: 1
+        end: "lastCompletedBuild" # "lastSuccessfulBuild"  # take all from the 'start'
+
+  plot-layouts:
+    plot-cpta:
+      title: ""
+      autosize: False
+      showlegend: True
+      width: 1100
+      height: 800
+      yaxis:
+        showticklabels: True
+        title: "Throughput [Mpps]"
+        hoverformat: ".4s"
+        range: []
+        gridcolor: "rgb(238, 238, 238)"
+        linecolor: "rgb(238, 238, 238)"
+        showline: True
+        zeroline: False
+        tickcolor: "rgb(238, 238, 238)"
+        linewidth: 1
+        showgrid: True
+      xaxis:
+        showticklabels: True
+        title: "VPP Performance Trending Job ID"
+        autorange: True
+        showgrid: True
+        gridcolor: "rgb(238, 238, 238)"
+        linecolor: "rgb(238, 238, 238)"
+        fixedrange: False
+        zeroline: False
+        tickcolor: "rgb(238, 238, 238)"
+        showline: True
+        linewidth: 1
+        autotick: True
+      margin:
+        r: 20
+        b: 50
+        t: 50
+        l: 70
+      legend:
+        orientation: "h"
+        traceorder: "normal"
+#        tracegroupgap: 10
+#        bordercolor: "rgb(238, 238, 238)"
+#        borderwidth: 1
+      hoverlabel:
+        namelength: -1
+
+-
+  type: "debug"
+  general:
+    input-format: "xml"  # zip or xml
+    extract: "robot-plugin/output.xml"  # Only for zip
+  builds:
+    # The files must be in the directory DIR[WORKING,DATA]
+    csit-vpp-perf-mrr-daily-master:
+    -
+      build: 1
+      file: "{DIR[WORKING,DATA]}/output_mrr_1.xml"
+    -
+      build: 2
+      file: "{DIR[WORKING,DATA]}/output_mrr_2.xml"
+    -
+      build: 3
+      file: "{DIR[WORKING,DATA]}/output_mrr_3.xml"
+    -
+      build: 4
+      file: "{DIR[WORKING,DATA]}/output_mrr_4.xml"
+    -
+      build: 5
+      file: "{DIR[WORKING,DATA]}/output_mrr_5.xml"
+    -
+      build: 6
+      file: "{DIR[WORKING,DATA]}/output_mrr_5.xml"
+    -
+      build: 7
+      file: "{DIR[WORKING,DATA]}/output_mrr_5.xml"
+    -
+      build: 8
+      file: "{DIR[WORKING,DATA]}/output_mrr_5.xml"
+    -
+      build: 9
+      file: "{DIR[WORKING,DATA]}/output_mrr_5.xml"
+    -
+      build: 10
+      file: "{DIR[WORKING,DATA]}/output_mrr_5.xml"
+    -
+      build: 11
+      file: "{DIR[WORKING,DATA]}/output_mrr_5.xml"
+    -
+      build: 12
+      file: "{DIR[WORKING,DATA]}/output_mrr_5.xml"
+
+-
+  type: "static"
+  src-path: "{DIR[RST]}"
+  dst-path: "{DIR[WORKING,SRC]}"
+
+-
+  type: "input"  # Ignored in debug mode
+  general:
+#    file-name: "output.xml.log.gz"
+#    file-format: ".gz"
+#    download-path: "{job}/{build}/archives/{filename}"
+#    extract: "output.xml"
+    file-name: "robot-plugin.zip"
+    file-format: ".zip"
+    download-path: "{job}/{build}/robot/report/*zip*/{filename}"
+    extract: "robot-plugin/output.xml"
+  builds:
+    csit-vpp-perf-check-1801:
+      start: 1
+      end: "lastCompletedBuild"  # take all from the 'start'
+
+-
+  type: "output"
+  output:
+#   "report"
+    "CPTA"  # Continuous Performance Trending and Analysis
+  format:
+    html:
+    - full
+    pdf:
+    - minimal
+
+################################################################################
+###                                 C P T A                                  ###
+################################################################################
+
+# Plots VPP Continuous Performance Trending and Analysis
+-
+  type: "cpta"
+  title: "Continuous Performance Trending and Analysis"
+  algorithm: "cpta"
+  output-file-type: ".html"
+  output-file: "{DIR[STATIC,VPP]}/cpta"
+  data: "plot-performance-trending"
+  plots:
+
+# L2
+
+    - title: "VPP 1T1C L2 64B Packet Throughput - {period} Trending"
+      output-file-name: "l2-1t1c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and ('BASE' or 'SCALE') and '1T1C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST' and not 'MEMIF'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C L2 64B Packet Throughput - {period} Trending"
+      output-file-name: "l2-2t2c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and ('BASE' or 'SCALE') and '2T2C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST' and not 'MEMIF'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C L2 64B Packet Throughput - {period} Trending"
+      output-file-name: "l2-4t4c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and ('BASE' or 'SCALE') and '4T4C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST' and not 'MEMIF'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 1T1C L2 64B Packet Throughput - {period} Trending"
+      output-file-name: "l2-feature-1t1c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and 'FEATURE' and '1T1C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST' and not 'MEMIF'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C L2 64B Packet Throughput - {period} Trending"
+      output-file-name: "l2-feature-2t2c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and 'FEATURE' and '2T2C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST' and not 'MEMIF'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C L2 64B Packet Throughput - {period} Trending"
+      output-file-name: "l2-feature-4t4c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and 'FEATURE' and '4T4C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST' and not 'MEMIF'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 1T1C L2 64B Packet Throughput - {period} Trending"
+      output-file-name: "l2-1t1c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and 'MRR' and '64B' and ('BASE' or 'SCALE') and '1T1C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST' and not 'MEMIF'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C L2 64B Packet Throughput - {period} Trending"
+      output-file-name: "l2-2t2c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and 'MRR' and '64B' and ('BASE' or 'SCALE') and '2T2C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST' and not 'MEMIF'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C L2 64B Packet Throughput - {period} Trending"
+      output-file-name: "l2-4t4c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and 'MRR' and '64B' and ('BASE' or 'SCALE') and '4T4C' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST' and not 'MEMIF'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+# IPv4
+
+    - title: "VPP 1T1C IPv4 64B Packet Throughput - {period} Trending"
+      output-file-name: "ip4-1t1c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and ('BASE' or 'SCALE') and '1T1C' and 'IP4FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C IPv4 64B Packet Throughput - {period} Trending"
+      output-file-name: "ip4-2t2c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and ('BASE' or 'SCALE') and '2T2C' and 'IP4FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C IPv4 64B Packet Throughput - {period} Trending"
+      output-file-name: "ip4-4t4c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and ('BASE' or 'SCALE') and '4T4C' and 'IP4FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 1T1C IPv4 64B Packet Throughput - {period} Trending"
+      output-file-name: "ip4-feature-1t1c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and 'FEATURE' and '1T1C' and 'IP4FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C IPv4 64B Packet Throughput - {period} Trending"
+      output-file-name: "ip4-feature-2t2c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and 'FEATURE' and '2T2C' and 'IP4FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C IPv4 64B Packet Throughput - {period} Trending"
+      output-file-name: "ip4-feature-4t4c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and 'FEATURE' and '4T4C' and 'IP4FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 1T1C IPv4 64B Packet Throughput - {period} Trending"
+      output-file-name: "ip4-1t1c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and 'MRR' and '64B' and ('BASE' or 'SCALE' or 'FEATURE') and '1T1C' and 'IP4FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C IPv4 64B Packet Throughput - {period} Trending"
+      output-file-name: "ip4-2t2c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and 'MRR' and '64B' and ('BASE' or 'SCALE' or 'FEATURE') and '2T2C' and 'IP4FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C IPv4 64B Packet Throughput - {period} Trending"
+      output-file-name: "ip4-4t4c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and 'MRR' and '64B' and ('BASE' or 'SCALE' or 'FEATURE') and '4T4C' and 'IP4FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+# IPv4 Tunnels
+
+    - title: "VPP 1T1C IPv4 Tunnels 64B Packet Throughput - {period} Trending"
+      output-file-name: "ip4-tunnels-1t1c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and '64B' and 'ENCAP' and 'MRR' and '1T1C' and ('VXLAN' or 'VXLANGPE' or 'LISP' or 'LISPGPE' or 'GRE') and not 'VHOST' and not 'IPSECHW'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C IPv4 Tunnels 64B Packet Throughput - {period} Trending"
+      output-file-name: "ip4-tunnels-2t2c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and '64B' and 'ENCAP' and 'MRR' and '2T2C' and ('VXLAN' or 'VXLANGPE' or 'LISP' or 'LISPGPE' or 'GRE') and not 'VHOST' and not 'IPSECHW'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C IPv4 Tunnels 64B Packet Throughput - {period} Trending"
+      output-file-name: "ip4-tunnels-4t4c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and '64B' and 'ENCAP' and 'MRR' and '4T4C' and ('VXLAN' or 'VXLANGPE' or 'LISP' or 'LISPGPE' or 'GRE') and not 'VHOST' and not 'IPSECHW'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+# IPv6
+
+    - title: "VPP 1T1C IPv6 78B Packet Throughput - {period} Trending"
+      output-file-name: "ip6-1t1c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '78B' and ('BASE' or 'SCALE' or 'FEATURE') and '1T1C' and 'IP6FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C IPv6 78B Packet Throughput - {period} Trending"
+      output-file-name: "ip6-2t2c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '78B' and ('BASE' or 'SCALE' or 'FEATURE') and '2T2C' and 'IP6FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C IPv6 78B Packet Throughput - {period} Trending"
+      output-file-name: "ip6-4t4c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '78B' and ('BASE' or 'SCALE' or 'FEATURE') and '4T4C' and 'IP6FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 1T1C IPv6 78B Packet Throughput - {period} Trending"
+      output-file-name: "ip6-1t1c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and 'MRR' and '78B' and ('BASE' or 'SCALE' or 'FEATURE') and '1T1C' and 'IP6FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C IPv6 78B Packet Throughput - {period} Trending"
+      output-file-name: "ip6-2t2c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and 'MRR' and '78B' and ('BASE' or 'SCALE' or 'FEATURE') and '2T2C' and 'IP6FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C IPv6 78B Packet Throughput - {period} Trending"
+      output-file-name: "ip6-4t4c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and 'MRR' and '78B' and ('BASE' or 'SCALE' or 'FEATURE') and '4T4C' and 'IP6FWD' and not 'IPSEC' and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+# Container memif
+
+    - title: "VPP 1T1C L2 Container memif 64B Packet Throughput - {period} Trending"
+      output-file-name: "container-memif-l2-1t1c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and 'BASE' and '1T1C' and 'MEMIF' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C L2 Container memif 64B Packet Throughput - {period} Trending"
+      output-file-name: "container-memif-l2-2t2c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and 'BASE' and '2T2C' and 'MEMIF' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C L2 Container memif 64B Packet Throughput - {period} Trending"
+      output-file-name: "container-memif-l2-4t4c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and 'MRR' and '64B' and 'BASE' and '4T4C' and 'MEMIF' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 1T1C L2 Container memif 64B Packet Throughput - {period} Trending"
+      output-file-name: "container-memif-l2-1t1c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and 'MRR' and '64B' and 'BASE' and '1T1C' and 'MEMIF' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C L2 Container memif 64B Packet Throughput - {period} Trending"
+      output-file-name: "container-memif-l2-2t2c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and 'MRR' and '64B' and 'BASE' and '2T2C' and 'MEMIF' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C L2 Container memif 64B Packet Throughput - {period} Trending"
+      output-file-name: "container-memif-l2-4t4c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and 'MRR' and '64B' and 'BASE' and '4T4C' and 'MEMIF' and ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD') and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+# VM vhost
+
+    - title: "VPP 1T1C VM vhost ethip4 64B Packet Throughput - {period} Trending"
+      output-file-name: "vm-vhost-ethip4-1t1c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and '64B' and 'MRR' and '1T1C' and 'VHOST' and not ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD')"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C VM vhost ethip4 64B Packet Throughput - {period} Trending"
+      output-file-name: "vm-vhost-ethip4-2t2c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and '64B' and 'MRR' and '2T2C' and 'VHOST' and not ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD')"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C VM vhost ethip4 64B Packet Throughput - {period} Trending"
+      output-file-name: "vm-vhost-ethip4-4t4c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and '64B' and 'MRR' and '4T4C' and 'VHOST' and not ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD')"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 1T1C VM vhost eth 64B Packet Throughput - {period} Trending"
+      output-file-name: "vm-vhost-eth-1t1c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and '64B' and 'MRR' and '1T1C' and 'VHOST' and not 'VXLAN' and not 'IP4FWD' and not 'DOT1Q' and not '2VM'"
+
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C VM vhost eth 64B Packet Throughput - {period} Trending"
+      output-file-name: "vm-vhost-eth-2t2c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and '64B' and 'MRR' and '2T2C' and 'VHOST' and not 'VXLAN' and not 'IP4FWD' and not 'DOT1Q' and not '2VM'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C VM vhost eth 64B Packet Throughput - {period} Trending"
+      output-file-name: "vm-vhost-eth-4t4c-x520"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-X520-DA2' and '64B' and 'MRR' and '4T4C' and 'VHOST' and not 'VXLAN' and not 'IP4FWD' and not 'DOT1Q' and not '2VM'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 1T1C VM vhost eth 64B Packet Throughput - {period} Trending"
+      output-file-name: "vm-vhost-eth-1t1c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and '64B' and 'MRR' and '1T1C' and 'VHOST' and not 'VXLAN' and not 'IP4FWD' and not 'DOT1Q' and not '2VM'"
+
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C VM vhost eth 64B Packet Throughput - {period} Trending"
+      output-file-name: "vm-vhost-eth-2t2c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and '64B' and 'MRR' and '2T2C' and 'VHOST' and not 'VXLAN' and not 'IP4FWD' and not 'DOT1Q' and not '2VM'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C VM vhost eth 64B Packet Throughput - {period} Trending"
+      output-file-name: "vm-vhost-eth-4t4c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and '64B' and 'MRR' and '4T4C' and 'VHOST' and not 'VXLAN' and not 'IP4FWD' and not 'DOT1Q' and not '2VM'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+# IPSec
+
+    - title: "VPP 1T1C IPSec 64B Packet Throughput - {period} Trending"
+      output-file-name: "ipsec-1t1c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and '64B' and 'IP4FWD' and 'MRR' and '1T1C' and 'IPSECHW' and ('IPSECTRAN' or 'IPSECTUN') and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 2T2C IPSec 64B Packet Throughput - {period} Trending"
+      output-file-name: "ipsec-2t2c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and '64B' and 'IP4FWD' and 'MRR' and '2T2C' and 'IPSECHW' and ('IPSECTRAN' or 'IPSECTUN') and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
+
+    - title: "VPP 4T4C IPSec 64B Packet Throughput - {period} Trending"
+      output-file-name: "ipsec-4t4c-xl710"
+      data: "plot-performance-trending"
+      filter: "'NIC_Intel-XL710' and '64B' and 'IP4FWD' and 'MRR' and '4T4C' and 'IPSECHW' and ('IPSECTRAN' or 'IPSECTUN') and not 'VHOST'"
+      parameters:
+      - "result"
+      periods:
+      - 1
+      - 5
+      - 30
+      layout: "plot-cpta"
index 501f9f1..207507e 100644 (file)
@@ -22,6 +22,8 @@ from yaml import load, YAMLError
 from pprint import pformat
 
 from errors import PresentationError
+from utils import get_last_successful_build_number
+from utils import get_last_completed_build_number
 
 
 class Specification(object):
@@ -53,7 +55,8 @@ class Specification(object):
                                "output": dict(),
                                "tables": list(),
                                "plots": list(),
-                               "files": list()}
+                               "files": list(),
+                               "cpta": dict()}
 
     @property
     def specification(self):
@@ -173,6 +176,17 @@ class Specification(object):
         """
         return self._specification["files"]
 
+    @property
+    def cpta(self):
+        """Getter - Continuous Performance Trending and Analysis to be
+        generated.
+
+        :returns: List of specifications of Continuous Performance Trending and
+        Analysis to be generated.
+        :rtype: list
+        """
+        return self._specification["cpta"]
+
     def set_input_state(self, job, build_nr, state):
         """Set the state of input
 
@@ -217,6 +231,44 @@ class Specification(object):
             raise PresentationError("Job '{}' and build '{}' is not defined in "
                                     "specification file.".format(job, build_nr))
 
+    def _get_build_number(self, job, build_type):
+        """Get the number of the job defined by its name:
+         - lastSuccessfulBuild
+         - lastCompletedBuild
+
+        :param job: Job name.
+        :param build_type: Build type:
+         - lastSuccessfulBuild
+         - lastCompletedBuild
+        :type job" str
+        :raises PresentationError: If it is not possible to get the build
+        number.
+        :returns: The build number.
+        :rtype: int
+        """
+
+        # defined as a range <start, end>
+        if build_type == "lastSuccessfulBuild":
+            # defined as a range <start, lastSuccessfulBuild>
+            ret_code, build_nr, _ = get_last_successful_build_number(
+                self.environment["urls"]["URL[JENKINS,CSIT]"], job)
+        elif build_type == "lastCompletedBuild":
+            # defined as a range <start, lastCompletedBuild>
+            ret_code, build_nr, _ = get_last_completed_build_number(
+                self.environment["urls"]["URL[JENKINS,CSIT]"], job)
+        else:
+            raise PresentationError("Not supported build type: '{0}'".
+                                    format(build_type))
+        if ret_code != 0:
+            raise PresentationError("Not possible to get the number of the "
+                                    "build number.")
+        try:
+            build_nr = int(build_nr)
+            return build_nr
+        except ValueError as err:
+            raise PresentationError("Not possible to get the number of the "
+                                    "build number.\nReason: {0}".format(err))
+
     def _get_type_index(self, item_type):
         """Get index of item type (environment, input, output, ...) in
         specification YAML file.
@@ -354,9 +406,23 @@ class Specification(object):
 
         try:
             self._specification["configuration"] = self._cfg_yaml[idx]
+
         except KeyError:
             raise PresentationError("No configuration defined.")
 
+        # Data sets: Replace ranges by lists
+        for set_name, data_set in self.configuration["data-sets"].items():
+            for job, builds in data_set.items():
+                if builds:
+                    if isinstance(builds, dict):
+                        build_nr = builds.get("end", None)
+                        try:
+                            build_nr = int(build_nr)
+                        except ValueError:
+                            # defined as a range <start, build_type>
+                            build_nr = self._get_build_number(job, build_nr)
+                        builds = [x for x in range(builds["start"], build_nr+1)]
+                        self.configuration["data-sets"][set_name][job] = builds
         logging.info("Done.")
 
     def _parse_debug(self):
@@ -412,12 +478,22 @@ class Specification(object):
             for key, value in self._cfg_yaml[idx]["general"].items():
                 self._specification["input"][key] = value
             self._specification["input"]["builds"] = dict()
+
             for job, builds in self._cfg_yaml[idx]["builds"].items():
                 if builds:
+                    if isinstance(builds, dict):
+                        build_nr = builds.get("end", None)
+                        try:
+                            build_nr = int(build_nr)
+                        except ValueError:
+                            # defined as a range <start, build_type>
+                            build_nr = self._get_build_number(job, build_nr)
+                        builds = [x for x in range(builds["start"], build_nr+1)]
                     self._specification["input"]["builds"][job] = list()
                     for build in builds:
-                        self._specification["input"]["builds"][job].\
+                        self._specification["input"]["builds"][job]. \
                             append({"build": build, "status": None})
+
                 else:
                     logging.warning("No build is defined for the job '{}'. "
                                     "Trying to continue without it.".
@@ -440,8 +516,8 @@ class Specification(object):
             raise PresentationError("No output defined.")
 
         try:
-            self._specification["output"] = self._cfg_yaml[idx]["format"]
-        except KeyError:
+            self._specification["output"] = self._cfg_yaml[idx]
+        except (KeyError, IndexError):
             raise PresentationError("No output defined.")
 
         logging.info("Done.")
@@ -535,6 +611,35 @@ class Specification(object):
                 self._specification["files"].append(element)
                 count += 1
 
+            elif element["type"] == "cpta":
+                logging.info("  {:3d} Processing Continuous Performance "
+                             "Trending and Analysis ...".format(count))
+
+                for plot in element["plots"]:
+                    # Add layout to the plots:
+                    layout = plot.get("layout", None)
+                    if layout is not None:
+                        try:
+                            plot["layout"] = \
+                                self.configuration["plot-layouts"][layout]
+                        except KeyError:
+                            raise PresentationError(
+                                "Layout {0} is not defined in the "
+                                "configuration section.".format(layout))
+                    # Add data sets:
+                    if isinstance(plot.get("data", None), str):
+                        data_set = plot["data"]
+                        try:
+                            plot["data"] = \
+                                self.configuration["data-sets"][data_set]
+                        except KeyError:
+                            raise PresentationError(
+                                "Data set {0} is not defined in "
+                                "the configuration section.".
+                                format(data_set))
+                self._specification["cpta"] = element
+                count += 1
+
         logging.info("Done.")
 
     def read_specification(self):
index fe2d072..a02330c 100644 (file)
@@ -34,8 +34,11 @@ def prepare_static_content(spec):
     content.
     """
 
-    src = spec.static["src-path"]
-    dst = spec.static["dst-path"]
+    src = spec.static.get("src-path", None)
+    dst = spec.static.get("dst-path", None)
+    if src is None or dst is None:
+        logging.warning("No static content specified, skipping")
+        return
 
     # Copy all the static content to the build directory:
     logging.info("Copying the static content ...")