diff mbox series

[meta-oe] bonnie++: New recipe for version 2.0

Message ID 8701b8118a57fad2921655c82cea20dcbf92c12d.1703772150.git.joerg.sommer@navimatix.de
State Accepted
Headers show
Series [meta-oe] bonnie++: New recipe for version 2.0 | expand

Commit Message

Jörg Sommer Dec. 28, 2023, 2:02 p.m. UTC
From: Jörg Sommer <joerg.sommer@navimatix.de>

Newer versions of bonnie get published on
<https://doc.coker.com.au/projects/bonnie/>. Unfortunately, the new version
doesn't compile with g++ 11 which requires *fix-csv2html-data.patch* and
configure fails due to cross compilation which gets fixed
with *fix-configure-lfs.patch*

Signed-off-by: Jörg Sommer <joerg.sommer@navimatix.de>
---
 .../bonnie/bonnie++/fix-configure-lfs.patch   |  39 ++++
 .../bonnie/bonnie++/fix-csv2html-data.patch   | 183 ++++++++++++++++++
 .../bonnie/bonnie++_2.00a.bb                  |  33 ++++
 3 files changed, 255 insertions(+)
 create mode 100644 meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch
 create mode 100644 meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch
 create mode 100644 meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb

Comments

Khem Raj Dec. 28, 2023, 6:15 p.m. UTC | #1
On Thu, Dec 28, 2023 at 6:03 AM Jörg Sommer via lists.openembedded.org
<joerg.sommer=navimatix.de@lists.openembedded.org> wrote:
>
> From: Jörg Sommer <joerg.sommer@navimatix.de>
>
> Newer versions of bonnie get published on
> <https://doc.coker.com.au/projects/bonnie/>. Unfortunately, the new version
> doesn't compile with g++ 11 which requires *fix-csv2html-data.patch* and
> configure fails due to cross compilation which gets fixed
> with *fix-configure-lfs.patch*
>
> Signed-off-by: Jörg Sommer <joerg.sommer@navimatix.de>
> ---
>  .../bonnie/bonnie++/fix-configure-lfs.patch   |  39 ++++
>  .../bonnie/bonnie++/fix-csv2html-data.patch   | 183 ++++++++++++++++++
>  .../bonnie/bonnie++_2.00a.bb                  |  33 ++++
>  3 files changed, 255 insertions(+)
>  create mode 100644 meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch
>  create mode 100644 meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch
>  create mode 100644 meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb
>
> diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch
> new file mode 100644
> index 000000000..af20acdcd
> --- /dev/null
> +++ b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch
> @@ -0,0 +1,39 @@
> +Upstream-Status: Submitted [https://salsa.debian.org/etbe/bonnie/-/merge_requests/3/diffs?commit_id=4ffece51791ba75ddca2e664cdce726cc40c92d3]
> +
> +diff --git i/configure.in w/configure.in
> +index 080e40c..f2a2bbe 100644
> +--- i/configure.in
> ++++ w/configure.in
> +@@ -82,8 +82,15 @@ void * thread_func(void * param) { return NULL; }
> +   , thread_ldflags="-lpthread"
> +   , thread_ldflags="-pthread")
> +
> +-AC_SUBST(large_file)
> +-AC_TRY_RUN([#ifndef _LARGEFILE64_SOURCE
> ++AC_ARG_ENABLE(lfs,
> ++      [  --disable-lfs  disable large file support],
> ++      LFS_CHOICE=$enableval, LFS_CHOICE=check)
> ++
> ++if test "$LFS_CHOICE" = yes; then
> ++   bonniepp_cv_large_file=yes
> ++elif test "$LFS_CHOICE" = check; then
> ++   AC_CACHE_CHECK([whether to enable -D_LARGEFILE64_SOURCE], bonniepp_cv_large_file,
> ++      AC_TRY_RUN([#ifndef _LARGEFILE64_SOURCE
> + #define _LARGEFILE64_SOURCE
> + #endif
> + #include <stdio.h>
> +@@ -118,8 +125,12 @@ int main () {
> +   }
> +   close(fd);
> +   return 0;
> +-}], large_file="yes")
> +-if [[ -n "$large_file" ]]; then
> ++}], bonniepp_cv_large_file="yes"))
> ++fi
> ++
> ++AC_SUBST(large_file)
> ++
> ++if [[ -n "$bonniepp_cv_large_file" ]]; then
> +    large_file="#define _LARGEFILE64_SOURCE"
> + fi
> +
> diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch
> new file mode 100644
> index 000000000..4b37b8d65
> --- /dev/null
> +++ b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch
> @@ -0,0 +1,183 @@
> +commit 7e9433a56f22426b11cbc9bd80e0debca67c893b
> +Author: Jörg Sommer <joerg.sommer@navimatix.de>
> +Date:   Mon Jun 26 12:38:30 2023 +0200
> +
> +    csv2html: Explicitly reference data in top level
> +
> +    With g++ 11 *data* became ambiguous with [std::data][1]. Therefore it's
> +    needed to explicitly address the variable from the top level scope.
> +
> +    [1] https://en.cppreference.com/w/cpp/iterator/data
> +
> +Upstream-Status: Submitted [https://salsa.debian.org/etbe/bonnie/-/merge_requests/3/diffs?commit_id=fb13a71d56dab8aaa39233fcaaedfb0ba4ad647d]
> +
> +diff --git a/bon_csv2html.cpp b/bon_csv2html.cpp
> +index e9d9c50..652e330 100644
> +--- a/bon_csv2html.cpp
> ++++ b/bon_csv2html.cpp
> +@@ -87,8 +87,8 @@ int main(int argc, char **argv)
> +     read_in(buf);
> +   }
> +
> +-  props = new PPCCHAR[data.size()];
> +-  for(i = 0; i < data.size(); i++)
> ++  props = new PPCCHAR[::data.size()];
> ++  for(i = 0; i < ::data.size(); i++)
> +   {
> +     props[i] = new PCCHAR[MAX_ITEMS];
> +     props[i][0] = NULL;
> +@@ -109,7 +109,7 @@ int main(int argc, char **argv)
> +   }
> +   calc_vals();
> +   int mid_width = header();
> +-  for(i = 0; i < data.size(); i++)
> ++  for(i = 0; i < ::data.size(); i++)
> +   {
> + // First print the average speed line
> +     printf("<tr>");
> +@@ -171,23 +171,23 @@ int compar(const void *a, const void *b)
> +
> + void calc_vals()
> + {
> +-  ITEM *arr = new ITEM[data.size()];
> ++  ITEM *arr = new ITEM[::data.size()];
> +   for(unsigned int column_ind = 0; column_ind < MAX_ITEMS; column_ind++)
> +   {
> +     switch(vals[column_ind])
> +     {
> +     case eNoCols:
> +     {
> +-      for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++)
> ++      for(unsigned int row_ind = 0; row_ind < ::data.size(); row_ind++)
> +       {
> +         if(column_ind == COL_CONCURRENCY)
> +         {
> +-          if(data[row_ind][column_ind] && strcmp("1", data[row_ind][column_ind]))
> ++          if(::data[row_ind][column_ind] && strcmp("1", ::data[row_ind][column_ind]))
> +             col_used[column_ind] = true;
> +         }
> +         else
> +         {
> +-          if(data[row_ind][column_ind] && strlen(data[row_ind][column_ind]))
> ++          if(::data[row_ind][column_ind] && strlen(::data[row_ind][column_ind]))
> +             col_used[column_ind] = true;
> +         }
> +       }
> +@@ -195,22 +195,22 @@ void calc_vals()
> +     break;
> +     case eCPU:
> +     {
> +-      for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++)
> ++      for(unsigned int row_ind = 0; row_ind < ::data.size(); row_ind++)
> +       {
> +         double work, cpu;
> +         arr[row_ind].val = 0.0;
> +-        if(data[row_ind].size() > column_ind
> +-         && sscanf(data[row_ind][column_ind - 1], "%lf", &work) == 1
> +-         && sscanf(data[row_ind][column_ind], "%lf", &cpu) == 1)
> ++        if(::data[row_ind].size() > column_ind
> ++         && sscanf(::data[row_ind][column_ind - 1], "%lf", &work) == 1
> ++         && sscanf(::data[row_ind][column_ind], "%lf", &cpu) == 1)
> +         {
> +           arr[row_ind].val = cpu / work;
> +         }
> +         arr[row_ind].pos = row_ind;
> +       }
> +-      qsort(arr, data.size(), sizeof(ITEM), compar);
> ++      qsort(arr, ::data.size(), sizeof(ITEM), compar);
> +       int col_count = -1;
> +       double min_col = -1.0, max_col = -1.0;
> +-      for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
> ++      for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
> +       {
> +         // if item is different from previous or if the first row
> +         // (sort_ind == 0) then increment col count
> +@@ -239,7 +239,7 @@ void calc_vals()
> +           min_col /= mult;
> +         }
> +         double range_col = max_col - min_col;
> +-        for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
> ++        for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
> +         {
> +           if(arr[sort_ind].col_ind > -1)
> +           {
> +@@ -250,7 +250,7 @@ void calc_vals()
> +       }
> +       else
> +       {
> +-        for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
> ++        for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
> +         {
> +           if(vals[column_ind] == eLatency)
> +           {
> +@@ -263,25 +263,25 @@ void calc_vals()
> +     case eSpeed:
> +     case eLatency:
> +     {
> +-      for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++)
> ++      for(unsigned int row_ind = 0; row_ind < ::data.size(); row_ind++)
> +       {
> +         arr[row_ind].val = 0.0;
> +-        if(data[row_ind].size() <= column_ind
> +-         || sscanf(data[row_ind][column_ind], "%lf", &arr[row_ind].val) == 0)
> ++        if(::data[row_ind].size() <= column_ind
> ++         || sscanf(::data[row_ind][column_ind], "%lf", &arr[row_ind].val) == 0)
> +           arr[row_ind].val = 0.0;
> +         if(vals[column_ind] == eLatency && arr[row_ind].val != 0.0)
> +         {
> +-          if(strstr(data[row_ind][column_ind], "ms"))
> ++          if(strstr(::data[row_ind][column_ind], "ms"))
> +             arr[row_ind].val *= 1000.0;
> +-          else if(!strstr(data[row_ind][column_ind], "us"))
> ++          else if(!strstr(::data[row_ind][column_ind], "us"))
> +             arr[row_ind].val *= 1000000.0; // is !us && !ms then secs!
> +         }
> +         arr[row_ind].pos = row_ind;
> +       }
> +-      qsort(arr, data.size(), sizeof(ITEM), compar);
> ++      qsort(arr, ::data.size(), sizeof(ITEM), compar);
> +       int col_count = -1;
> +       double min_col = -1.0, max_col = -1.0;
> +-      for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
> ++      for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
> +       {
> +         // if item is different from previous or if the first row
> +         // (sort_ind == 0) then increment col count
> +@@ -310,7 +310,7 @@ void calc_vals()
> +           min_col /= mult;
> +         }
> +         double range_col = max_col - min_col;
> +-        for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
> ++        for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
> +         {
> +           if(arr[sort_ind].col_ind > -1)
> +           {
> +@@ -332,7 +332,7 @@ void calc_vals()
> +       }
> +       else
> +       {
> +-        for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
> ++        for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
> +         {
> +           if(vals[column_ind] == eLatency)
> +           {
> +@@ -481,16 +481,16 @@ void read_in(CPCCHAR buf)
> +     free((void *)arr[0]);
> +     return;
> +   }
> +-  data.push_back(arr);
> ++  ::data.push_back(arr);
> + }
> +
> + void print_item(int num, int item, CPCCHAR extra)
> + {
> +   PCCHAR line_data;
> +   char buf[1024];
> +-  if(int(data[num].size()) > item)
> ++  if(int(::data[num].size()) > item)
> +   {
> +-    line_data = data[num][item];
> ++    line_data = ::data[num][item];
> +     switch(item)
> +     {
> +     case COL_PUT_BLOCK:
> diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb b/meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb
> new file mode 100644
> index 000000000..f31fd09fc
> --- /dev/null
> +++ b/meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb
> @@ -0,0 +1,33 @@
> +SUMMARY = "Tests large file IO and creation/deletion of small files"
> +HOMEPAGE = "https://doc.coker.com.au/projects/bonnie/"
> +SECTION = "benchmark/tests"
> +LICENSE = "GPL-2.0-only"
> +LIC_FILES_CHKSUM = "file://copyright.txt;md5=cd4dde95a6b9d122f0a9150ae9cc3ee0"
> +
> +SRC_URI = "http://www.coker.com.au/bonnie++/${BPN}-${PV}.tgz \
> +    file://fix-configure-lfs.patch \
> +    file://fix-csv2html-data.patch \
> +"
> +SRC_URI[md5sum] = "3a16b3a91b1d38b6f5561e197f81d870"

Drop setting md5sum, its not used any more.

> +SRC_URI[sha256sum] = "a8d33bbd81bc7eb559ce5bf6e584b9b53faea39ccfb4ae92e58f27257e468f0e"
> +
> +SCRIPTS = "bon_csv2html bon_csv2txt"
> +EXES = "bonnie++ zcav"
> +
> +TARGET_CC_ARCH += "${LDFLAGS}"
> +# force lfs to skip configure's check, because we are cross-building
> +EXTRA_OECONF:append="--enable-lfs"

Either there should be a space before --enable-lfs or better make it a
PACKAGECONFIG

> +
> +do_install () {
> +    install -d ${D}/${bindir}
> +    install -d ${D}/${sbindir}
> +    install -m 0755 ${EXES} ${D}/${sbindir}
> +    install -m 0755 ${SCRIPTS} ${D}/${bindir}
> +}
> +
> +PACKAGES =+ "bonnie-scripts"
> +
> +FILES:${PN} = "${sbindir}"
> +FILES:bonnie-scripts = "${bindir}"
> +
> +RDEPENDS:bonnie-scripts += "perl"
> --
> 2.34.1
>
>
> -=-=-=-=-=-=-=-=-=-=-=-
> Links: You receive all messages sent to this group.
> View/Reply Online (#107874): https://lists.openembedded.org/g/openembedded-devel/message/107874
> Mute This Topic: https://lists.openembedded.org/mt/103400703/1997914
> Group Owner: openembedded-devel+owner@lists.openembedded.org
> Unsubscribe: https://lists.openembedded.org/g/openembedded-devel/unsub [raj.khem@gmail.com]
> -=-=-=-=-=-=-=-=-=-=-=-
>
diff mbox series

Patch

diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch
new file mode 100644
index 000000000..af20acdcd
--- /dev/null
+++ b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-configure-lfs.patch
@@ -0,0 +1,39 @@ 
+Upstream-Status: Submitted [https://salsa.debian.org/etbe/bonnie/-/merge_requests/3/diffs?commit_id=4ffece51791ba75ddca2e664cdce726cc40c92d3]
+
+diff --git i/configure.in w/configure.in
+index 080e40c..f2a2bbe 100644
+--- i/configure.in
++++ w/configure.in
+@@ -82,8 +82,15 @@ void * thread_func(void * param) { return NULL; }
+   , thread_ldflags="-lpthread"
+   , thread_ldflags="-pthread")
+ 
+-AC_SUBST(large_file)
+-AC_TRY_RUN([#ifndef _LARGEFILE64_SOURCE
++AC_ARG_ENABLE(lfs,
++      [  --disable-lfs  disable large file support],
++      LFS_CHOICE=$enableval, LFS_CHOICE=check)
++
++if test "$LFS_CHOICE" = yes; then
++   bonniepp_cv_large_file=yes
++elif test "$LFS_CHOICE" = check; then
++   AC_CACHE_CHECK([whether to enable -D_LARGEFILE64_SOURCE], bonniepp_cv_large_file,
++      AC_TRY_RUN([#ifndef _LARGEFILE64_SOURCE
+ #define _LARGEFILE64_SOURCE
+ #endif
+ #include <stdio.h>
+@@ -118,8 +125,12 @@ int main () {
+   }
+   close(fd);
+   return 0;
+-}], large_file="yes")
+-if [[ -n "$large_file" ]]; then
++}], bonniepp_cv_large_file="yes"))
++fi
++
++AC_SUBST(large_file)
++
++if [[ -n "$bonniepp_cv_large_file" ]]; then
+    large_file="#define _LARGEFILE64_SOURCE"
+ fi
+ 
diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch
new file mode 100644
index 000000000..4b37b8d65
--- /dev/null
+++ b/meta-oe/recipes-benchmark/bonnie/bonnie++/fix-csv2html-data.patch
@@ -0,0 +1,183 @@ 
+commit 7e9433a56f22426b11cbc9bd80e0debca67c893b
+Author: Jörg Sommer <joerg.sommer@navimatix.de>
+Date:   Mon Jun 26 12:38:30 2023 +0200
+
+    csv2html: Explicitly reference data in top level
+    
+    With g++ 11 *data* became ambiguous with [std::data][1]. Therefore it's
+    needed to explicitly address the variable from the top level scope.
+    
+    [1] https://en.cppreference.com/w/cpp/iterator/data
+
+Upstream-Status: Submitted [https://salsa.debian.org/etbe/bonnie/-/merge_requests/3/diffs?commit_id=fb13a71d56dab8aaa39233fcaaedfb0ba4ad647d]
+
+diff --git a/bon_csv2html.cpp b/bon_csv2html.cpp
+index e9d9c50..652e330 100644
+--- a/bon_csv2html.cpp
++++ b/bon_csv2html.cpp
+@@ -87,8 +87,8 @@ int main(int argc, char **argv)
+     read_in(buf);
+   }
+ 
+-  props = new PPCCHAR[data.size()];
+-  for(i = 0; i < data.size(); i++)
++  props = new PPCCHAR[::data.size()];
++  for(i = 0; i < ::data.size(); i++)
+   {
+     props[i] = new PCCHAR[MAX_ITEMS];
+     props[i][0] = NULL;
+@@ -109,7 +109,7 @@ int main(int argc, char **argv)
+   }
+   calc_vals();
+   int mid_width = header();
+-  for(i = 0; i < data.size(); i++)
++  for(i = 0; i < ::data.size(); i++)
+   {
+ // First print the average speed line
+     printf("<tr>");
+@@ -171,23 +171,23 @@ int compar(const void *a, const void *b)
+ 
+ void calc_vals()
+ {
+-  ITEM *arr = new ITEM[data.size()];
++  ITEM *arr = new ITEM[::data.size()];
+   for(unsigned int column_ind = 0; column_ind < MAX_ITEMS; column_ind++)
+   {
+     switch(vals[column_ind])
+     {
+     case eNoCols:
+     {
+-      for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++)
++      for(unsigned int row_ind = 0; row_ind < ::data.size(); row_ind++)
+       {
+         if(column_ind == COL_CONCURRENCY)
+         {
+-          if(data[row_ind][column_ind] && strcmp("1", data[row_ind][column_ind]))
++          if(::data[row_ind][column_ind] && strcmp("1", ::data[row_ind][column_ind]))
+             col_used[column_ind] = true;
+         }
+         else
+         {
+-          if(data[row_ind][column_ind] && strlen(data[row_ind][column_ind]))
++          if(::data[row_ind][column_ind] && strlen(::data[row_ind][column_ind]))
+             col_used[column_ind] = true;
+         }
+       }
+@@ -195,22 +195,22 @@ void calc_vals()
+     break;
+     case eCPU:
+     {
+-      for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++)
++      for(unsigned int row_ind = 0; row_ind < ::data.size(); row_ind++)
+       {
+         double work, cpu;
+         arr[row_ind].val = 0.0;
+-        if(data[row_ind].size() > column_ind
+-         && sscanf(data[row_ind][column_ind - 1], "%lf", &work) == 1
+-         && sscanf(data[row_ind][column_ind], "%lf", &cpu) == 1)
++        if(::data[row_ind].size() > column_ind
++         && sscanf(::data[row_ind][column_ind - 1], "%lf", &work) == 1
++         && sscanf(::data[row_ind][column_ind], "%lf", &cpu) == 1)
+         {
+           arr[row_ind].val = cpu / work;
+         }
+         arr[row_ind].pos = row_ind;
+       }
+-      qsort(arr, data.size(), sizeof(ITEM), compar);
++      qsort(arr, ::data.size(), sizeof(ITEM), compar);
+       int col_count = -1;
+       double min_col = -1.0, max_col = -1.0;
+-      for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
++      for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
+       {
+         // if item is different from previous or if the first row
+         // (sort_ind == 0) then increment col count
+@@ -239,7 +239,7 @@ void calc_vals()
+           min_col /= mult;
+         }
+         double range_col = max_col - min_col;
+-        for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
++        for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
+         {
+           if(arr[sort_ind].col_ind > -1)
+           {
+@@ -250,7 +250,7 @@ void calc_vals()
+       }
+       else
+       {
+-        for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
++        for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
+         {
+           if(vals[column_ind] == eLatency)
+           {
+@@ -263,25 +263,25 @@ void calc_vals()
+     case eSpeed:
+     case eLatency:
+     {
+-      for(unsigned int row_ind = 0; row_ind < data.size(); row_ind++)
++      for(unsigned int row_ind = 0; row_ind < ::data.size(); row_ind++)
+       {
+         arr[row_ind].val = 0.0;
+-        if(data[row_ind].size() <= column_ind
+-         || sscanf(data[row_ind][column_ind], "%lf", &arr[row_ind].val) == 0)
++        if(::data[row_ind].size() <= column_ind
++         || sscanf(::data[row_ind][column_ind], "%lf", &arr[row_ind].val) == 0)
+           arr[row_ind].val = 0.0;
+         if(vals[column_ind] == eLatency && arr[row_ind].val != 0.0)
+         {
+-          if(strstr(data[row_ind][column_ind], "ms"))
++          if(strstr(::data[row_ind][column_ind], "ms"))
+             arr[row_ind].val *= 1000.0;
+-          else if(!strstr(data[row_ind][column_ind], "us"))
++          else if(!strstr(::data[row_ind][column_ind], "us"))
+             arr[row_ind].val *= 1000000.0; // is !us && !ms then secs!
+         }
+         arr[row_ind].pos = row_ind;
+       }
+-      qsort(arr, data.size(), sizeof(ITEM), compar);
++      qsort(arr, ::data.size(), sizeof(ITEM), compar);
+       int col_count = -1;
+       double min_col = -1.0, max_col = -1.0;
+-      for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
++      for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
+       {
+         // if item is different from previous or if the first row
+         // (sort_ind == 0) then increment col count
+@@ -310,7 +310,7 @@ void calc_vals()
+           min_col /= mult;
+         }
+         double range_col = max_col - min_col;
+-        for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
++        for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
+         {
+           if(arr[sort_ind].col_ind > -1)
+           {
+@@ -332,7 +332,7 @@ void calc_vals()
+       }
+       else
+       {
+-        for(unsigned int sort_ind = 0; sort_ind < data.size(); sort_ind++)
++        for(unsigned int sort_ind = 0; sort_ind < ::data.size(); sort_ind++)
+         {
+           if(vals[column_ind] == eLatency)
+           {
+@@ -481,16 +481,16 @@ void read_in(CPCCHAR buf)
+     free((void *)arr[0]);
+     return;
+   }
+-  data.push_back(arr);
++  ::data.push_back(arr);
+ }
+ 
+ void print_item(int num, int item, CPCCHAR extra)
+ {
+   PCCHAR line_data;
+   char buf[1024];
+-  if(int(data[num].size()) > item)
++  if(int(::data[num].size()) > item)
+   {
+-    line_data = data[num][item];
++    line_data = ::data[num][item];
+     switch(item)
+     {
+     case COL_PUT_BLOCK:
diff --git a/meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb b/meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb
new file mode 100644
index 000000000..f31fd09fc
--- /dev/null
+++ b/meta-oe/recipes-benchmark/bonnie/bonnie++_2.00a.bb
@@ -0,0 +1,33 @@ 
+SUMMARY = "Tests large file IO and creation/deletion of small files"
+HOMEPAGE = "https://doc.coker.com.au/projects/bonnie/"
+SECTION = "benchmark/tests"
+LICENSE = "GPL-2.0-only"
+LIC_FILES_CHKSUM = "file://copyright.txt;md5=cd4dde95a6b9d122f0a9150ae9cc3ee0"
+
+SRC_URI = "http://www.coker.com.au/bonnie++/${BPN}-${PV}.tgz \
+    file://fix-configure-lfs.patch \
+    file://fix-csv2html-data.patch \
+"
+SRC_URI[md5sum] = "3a16b3a91b1d38b6f5561e197f81d870"
+SRC_URI[sha256sum] = "a8d33bbd81bc7eb559ce5bf6e584b9b53faea39ccfb4ae92e58f27257e468f0e"
+
+SCRIPTS = "bon_csv2html bon_csv2txt"
+EXES = "bonnie++ zcav"
+
+TARGET_CC_ARCH += "${LDFLAGS}"
+# force lfs to skip configure's check, because we are cross-building
+EXTRA_OECONF:append="--enable-lfs"
+
+do_install () {
+    install -d ${D}/${bindir}
+    install -d ${D}/${sbindir}
+    install -m 0755 ${EXES} ${D}/${sbindir}
+    install -m 0755 ${SCRIPTS} ${D}/${bindir}
+}
+
+PACKAGES =+ "bonnie-scripts"
+
+FILES:${PN} = "${sbindir}"
+FILES:bonnie-scripts = "${bindir}"
+
+RDEPENDS:bonnie-scripts += "perl"