Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
B
benchmark
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Chen Yisong
benchmark
Commits
2d088a9f
Commit
2d088a9f
authored
Jun 02, 2016
by
Dominic Hamon
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'ismaelJimenez-added_lambdas'
parents
84cd50b8
e4981431
Hide whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
447 additions
and
223 deletions
+447
-223
README.md
README.md
+8
-0
benchmark_api.h
include/benchmark/benchmark_api.h
+23
-14
reporter.h
include/benchmark/reporter.h
+12
-11
benchmark.cc
src/benchmark.cc
+18
-3
complexity.cc
src/complexity.cc
+58
-72
complexity.h
src/complexity.h
+0
-6
console_reporter.cc
src/console_reporter.cc
+14
-14
csv_reporter.cc
src/csv_reporter.cc
+8
-5
json_reporter.cc
src/json_reporter.cc
+51
-33
complexity_test.cc
test/complexity_test.cc
+254
-64
reporter_output_test.cc
test/reporter_output_test.cc
+1
-1
No files found.
README.md
View file @
2d088a9f
...
...
@@ -142,6 +142,14 @@ BENCHMARK(BM_StringCompare)
->
RangeMultiplier
(
2
)
->
Range
(
1
<<
10
,
1
<<
18
)
->
Complexity
();
```
The following code will specify asymptotic complexity with a lambda function,
that might be used to customize high-order term calculation.
```
c++
BENCHMARK
(
BM_StringCompare
)
->
RangeMultiplier
(
2
)
->
Range
(
1
<<
10
,
1
<<
18
)
->
Complexity
([](
int
n
)
->
double
{
return
n
;
});
```
### Templated benchmarks
Templated benchmarks work the same way: This example produces and consumes
messages of size
`sizeof(v)`
`range_x`
times. It also outputs throughput in the
...
...
include/benchmark/benchmark_api.h
View file @
2d088a9f
...
...
@@ -247,9 +247,14 @@ enum BigO {
oNCubed
,
oLogN
,
oNLogN
,
oAuto
oAuto
,
oLambda
};
// BigOFunc is passed to a benchmark in order to specify the asymptotic
// computational complexity for the benchmark.
typedef
double
(
BigOFunc
)(
int
);
// State is passed to a running Benchmark and contains state for the
// benchmark to use.
class
State
{
...
...
@@ -257,24 +262,24 @@ public:
State
(
size_t
max_iters
,
bool
has_x
,
int
x
,
bool
has_y
,
int
y
,
int
thread_i
,
int
n_threads
);
// Returns true if
f
the benchmark should continue through another iteration.
// Returns true if the benchmark should continue through another iteration.
// NOTE: A benchmark may not return from the test until KeepRunning() has
// returned false.
bool
KeepRunning
()
{
if
(
BENCHMARK_BUILTIN_EXPECT
(
!
started_
,
false
))
{
assert
(
!
finished_
);
started_
=
true
;
ResumeTiming
();
assert
(
!
finished_
);
started_
=
true
;
ResumeTiming
();
}
bool
const
res
=
total_iterations_
++
<
max_iterations
;
if
(
BENCHMARK_BUILTIN_EXPECT
(
!
res
,
false
))
{
assert
(
started_
&&
(
!
finished_
||
error_occurred_
));
if
(
!
error_occurred_
)
{
PauseTiming
();
}
// Total iterations now is one greater than max iterations. Fix this.
total_iterations_
=
max_iterations
;
finished_
=
true
;
assert
(
started_
&&
(
!
finished_
||
error_occurred_
));
if
(
!
error_occurred_
)
{
PauseTiming
();
}
// Total iterations now is one greater than max iterations. Fix this.
total_iterations_
=
max_iterations
;
finished_
=
true
;
}
return
res
;
}
...
...
@@ -358,7 +363,7 @@ public:
// family benchmark, then current benchmark will be part of the computation and complexity_n will
// represent the length of N.
BENCHMARK_ALWAYS_INLINE
void
SetComplexityN
(
size_
t
complexity_n
)
{
void
SetComplexityN
(
in
t
complexity_n
)
{
complexity_n_
=
complexity_n
;
}
...
...
@@ -439,7 +444,7 @@ private:
size_t
bytes_processed_
;
size_t
items_processed_
;
size_
t
complexity_n_
;
in
t
complexity_n_
;
public
:
// FIXME: Make this private somehow.
...
...
@@ -538,6 +543,10 @@ public:
// the asymptotic computational complexity will be shown on the output.
Benchmark
*
Complexity
(
BigO
complexity
=
benchmark
::
oAuto
);
// Set the asymptotic computational complexity for the benchmark. If called
// the asymptotic computational complexity will be shown on the output.
Benchmark
*
Complexity
(
BigOFunc
*
complexity
);
// Support for running multiple copies of the same benchmark concurrently
// in multiple threads. This may be useful when measuring the scaling
// of some piece of code.
...
...
include/benchmark/reporter.h
View file @
2d088a9f
...
...
@@ -20,7 +20,7 @@
#include <utility>
#include <vector>
#include "benchmark_api.h" // For forward declaration of BenchmarkReporter
#include "benchmark_api.h"
// For forward declaration of BenchmarkReporter
namespace
benchmark
{
...
...
@@ -85,7 +85,8 @@ class BenchmarkReporter {
double
max_heapbytes_used
;
// Keep track of arguments to compute asymptotic complexity
BigO
complexity
;
BigO
complexity
;
BigOFunc
*
complexity_lambda
;
int
complexity_n
;
// Inform print function whether the current run is a complexity report
...
...
@@ -147,7 +148,7 @@ class BenchmarkReporter {
// REQUIRES: 'out' is non-null.
static
void
PrintBasicContext
(
std
::
ostream
*
out
,
Context
const
&
context
);
private
:
private
:
std
::
ostream
*
output_stream_
;
std
::
ostream
*
error_stream_
;
};
...
...
@@ -159,31 +160,31 @@ class ConsoleReporter : public BenchmarkReporter {
virtual
bool
ReportContext
(
const
Context
&
context
);
virtual
void
ReportRuns
(
const
std
::
vector
<
Run
>&
reports
);
protected
:
protected
:
virtual
void
PrintRunData
(
const
Run
&
report
);
size_t
name_field_width_
;
};
class
JSONReporter
:
public
BenchmarkReporter
{
public
:
public
:
JSONReporter
()
:
first_report_
(
true
)
{}
virtual
bool
ReportContext
(
const
Context
&
context
);
virtual
void
ReportRuns
(
const
std
::
vector
<
Run
>&
reports
);
virtual
void
Finalize
();
private
:
private
:
void
PrintRunData
(
const
Run
&
report
);
bool
first_report_
;
};
class
CSVReporter
:
public
BenchmarkReporter
{
public
:
public
:
virtual
bool
ReportContext
(
const
Context
&
context
);
virtual
void
ReportRuns
(
const
std
::
vector
<
Run
>&
reports
);
private
:
private
:
void
PrintRunData
(
const
Run
&
report
);
};
...
...
@@ -200,7 +201,7 @@ inline const char* GetTimeUnitString(TimeUnit unit) {
}
inline
double
GetTimeUnitMultiplier
(
TimeUnit
unit
)
{
switch
(
unit
)
{
switch
(
unit
)
{
case
kMillisecond
:
return
1e3
;
case
kMicrosecond
:
...
...
@@ -211,5 +212,5 @@ inline double GetTimeUnitMultiplier(TimeUnit unit) {
}
}
}
// end namespace benchmark
#endif // BENCHMARK_REPORTER_H_
}
// end namespace benchmark
#endif
// BENCHMARK_REPORTER_H_
src/benchmark.cc
View file @
2d088a9f
...
...
@@ -130,7 +130,7 @@ struct ThreadStats {
ThreadStats
()
:
bytes_processed
(
0
),
items_processed
(
0
),
complexity_n
(
0
)
{}
int64_t
bytes_processed
;
int64_t
items_processed
;
int
complexity_n
;
int
complexity_n
;
};
// Timer management class
...
...
@@ -287,7 +287,7 @@ class TimerManager {
};
phase_condition_
.
wait
(
ml
.
native_handle
(),
cb
);
if
(
phase_number_
>
phase_number_cp
)
return
false
;
return
false
;
// else (running_threads_ == entered_) and we are the last thread.
}
// Last thread has reached the barrier
...
...
@@ -317,6 +317,7 @@ struct Benchmark::Instance {
bool
use_real_time
;
bool
use_manual_time
;
BigO
complexity
;
BigOFunc
*
complexity_lambda
;
bool
last_benchmark_instance
;
int
repetitions
;
double
min_time
;
...
...
@@ -362,6 +363,7 @@ public:
void
UseRealTime
();
void
UseManualTime
();
void
Complexity
(
BigO
complexity
);
void
ComplexityLambda
(
BigOFunc
*
complexity
);
void
Threads
(
int
t
);
void
ThreadRange
(
int
min_threads
,
int
max_threads
);
void
ThreadPerCpu
();
...
...
@@ -382,6 +384,7 @@ private:
bool
use_real_time_
;
bool
use_manual_time_
;
BigO
complexity_
;
BigOFunc
*
complexity_lambda_
;
std
::
vector
<
int
>
thread_counts_
;
BenchmarkImp
&
operator
=
(
BenchmarkImp
const
&
);
...
...
@@ -446,6 +449,7 @@ bool BenchmarkFamilies::FindBenchmarks(
instance
.
use_real_time
=
family
->
use_real_time_
;
instance
.
use_manual_time
=
family
->
use_manual_time_
;
instance
.
complexity
=
family
->
complexity_
;
instance
.
complexity_lambda
=
family
->
complexity_lambda_
;
instance
.
threads
=
num_threads
;
instance
.
multithreaded
=
!
(
family
->
thread_counts_
.
empty
());
...
...
@@ -573,6 +577,10 @@ void BenchmarkImp::Complexity(BigO complexity){
complexity_
=
complexity
;
}
void
BenchmarkImp
::
ComplexityLambda
(
BigOFunc
*
complexity
)
{
complexity_lambda_
=
complexity
;
}
void
BenchmarkImp
::
Threads
(
int
t
)
{
CHECK_GT
(
t
,
0
);
thread_counts_
.
push_back
(
t
);
...
...
@@ -697,6 +705,12 @@ Benchmark* Benchmark::Complexity(BigO complexity) {
return
this
;
}
Benchmark
*
Benchmark
::
Complexity
(
BigOFunc
*
complexity
)
{
imp_
->
Complexity
(
oLambda
);
imp_
->
ComplexityLambda
(
complexity
);
return
this
;
}
Benchmark
*
Benchmark
::
Threads
(
int
t
)
{
imp_
->
Threads
(
t
);
return
this
;
...
...
@@ -855,6 +869,7 @@ void RunBenchmark(const benchmark::internal::Benchmark::Instance& b,
report
.
items_per_second
=
items_per_second
;
report
.
complexity_n
=
total
.
complexity_n
;
report
.
complexity
=
b
.
complexity
;
report
.
complexity_lambda
=
b
.
complexity_lambda
;
if
(
report
.
complexity
!=
oNone
)
complexity_reports
.
push_back
(
report
);
}
...
...
@@ -884,7 +899,7 @@ void RunBenchmark(const benchmark::internal::Benchmark::Instance& b,
}
std
::
vector
<
BenchmarkReporter
::
Run
>
additional_run_stats
=
ComputeStats
(
reports
);
reports
.
insert
(
reports
.
end
(),
additional_run_stats
.
begin
(),
additional_run_stats
.
end
());
additional_run_stats
.
end
());
if
((
b
.
complexity
!=
oNone
)
&&
b
.
last_benchmark_instance
)
{
additional_run_stats
=
ComputeBigO
(
complexity_reports
);
...
...
src/complexity.cc
View file @
2d088a9f
...
...
@@ -17,31 +17,30 @@
#include "benchmark/benchmark_api.h"
#include "complexity.h"
#include <algorithm>
#include <cmath>
#include "check.h"
#include "complexity.h"
#include "stat.h"
#include <cmath>
#include <algorithm>
#include <functional>
namespace
benchmark
{
// Internal function to calculate the different scalability forms
std
::
function
<
double
(
int
)
>
FittingCurve
(
BigO
complexity
)
{
BigOFunc
*
FittingCurve
(
BigO
complexity
)
{
switch
(
complexity
)
{
case
oN
:
return
[](
int
n
)
{
return
n
;
};
return
[](
int
n
)
->
double
{
return
n
;
};
case
oNSquared
:
return
[](
int
n
)
{
return
n
*
n
;
};
return
[](
int
n
)
->
double
{
return
n
*
n
;
};
case
oNCubed
:
return
[](
int
n
)
{
return
n
*
n
*
n
;
};
return
[](
int
n
)
->
double
{
return
n
*
n
*
n
;
};
case
oLogN
:
return
[](
int
n
)
{
return
log2
(
n
);
};
return
[](
int
n
)
{
return
log2
(
n
);
};
case
oNLogN
:
return
[](
int
n
)
{
return
n
*
log2
(
n
);
};
return
[](
int
n
)
{
return
n
*
log2
(
n
);
};
case
o1
:
default
:
return
[](
int
)
{
return
1
;
};
return
[](
int
)
{
return
1.0
;
};
}
}
...
...
@@ -49,24 +48,24 @@ std::function<double(int)> FittingCurve(BigO complexity) {
std
::
string
GetBigOString
(
BigO
complexity
)
{
switch
(
complexity
)
{
case
oN
:
return
"
*
N"
;
return
"N"
;
case
oNSquared
:
return
"
* N**
2"
;
return
"
N^
2"
;
case
oNCubed
:
return
"
* N**
3"
;
return
"
N^
3"
;
case
oLogN
:
return
"
*
lgN"
;
return
"lgN"
;
case
oNLogN
:
return
"
*
NlgN"
;
return
"NlgN"
;
case
o1
:
return
"
* 1
"
;
return
"
(1)
"
;
default
:
return
""
;
return
"
f(N)
"
;
}
}
// Find the coefficient for the high-order term in the running time, by
// minimizing the sum of squares of relative error, for the fitting curve
// Find the coefficient for the high-order term in the running time, by
// minimizing the sum of squares of relative error, for the fitting curve
// given by the lambda expresion.
// - n : Vector containing the size of the benchmark tests.
// - time : Vector containing the times for the benchmark tests.
...
...
@@ -75,21 +74,9 @@ std::string GetBigOString(BigO complexity) {
// For a deeper explanation on the algorithm logic, look the README file at
// http://github.com/ismaelJimenez/Minimal-Cpp-Least-Squared-Fit
// This interface is currently not used from the oustide, but it has been
// provided for future upgrades. If in the future it is not needed to support
// Cxx03, then all the calculations could be upgraded to use lambdas because
// they are more powerful and provide a cleaner inferface than enumerators,
// but complete implementation with lambdas will not work for Cxx03
// (e.g. lack of std::function).
// In case lambdas are implemented, the interface would be like :
// -> Complexity([](int n) {return n;};)
// and any arbitrary and valid equation would be allowed, but the option to
// calculate the best fit to the most common scalability curves will still
// be kept.
LeastSq
CalculateLeastSq
(
const
std
::
vector
<
int
>&
n
,
const
std
::
vector
<
double
>&
time
,
std
::
function
<
double
(
int
)
>
fitting_curve
)
{
LeastSq
MinimalLeastSq
(
const
std
::
vector
<
int
>&
n
,
const
std
::
vector
<
double
>&
time
,
BigOFunc
*
fitting_curve
)
{
double
sigma_gn
=
0.0
;
double
sigma_gn_squared
=
0.0
;
double
sigma_time
=
0.0
;
...
...
@@ -105,6 +92,7 @@ LeastSq CalculateLeastSq(const std::vector<int>& n,
}
LeastSq
result
;
result
.
complexity
=
oLambda
;
// Calculate complexity.
result
.
coef
=
sigma_time_gn
/
sigma_gn_squared
;
...
...
@@ -134,29 +122,29 @@ LeastSq MinimalLeastSq(const std::vector<int>& n,
const
std
::
vector
<
double
>&
time
,
const
BigO
complexity
)
{
CHECK_EQ
(
n
.
size
(),
time
.
size
());
CHECK_GE
(
n
.
size
(),
2
);
// Do not compute fitting curve is less than two benchmark runs are given
CHECK_GE
(
n
.
size
(),
2
);
// Do not compute fitting curve is less than two
// benchmark runs are given
CHECK_NE
(
complexity
,
oNone
);
LeastSq
best_fit
;
if
(
complexity
==
oAuto
)
{
std
::
vector
<
BigO
>
fit_curves
=
{
oLogN
,
oN
,
oNLogN
,
oNSquared
,
oNCubed
};
if
(
complexity
==
oAuto
)
{
std
::
vector
<
BigO
>
fit_curves
=
{
oLogN
,
oN
,
oNLogN
,
oNSquared
,
oNCubed
};
// Take o1 as default best fitting curve
best_fit
=
Calculate
LeastSq
(
n
,
time
,
FittingCurve
(
o1
));
best_fit
=
Minimal
LeastSq
(
n
,
time
,
FittingCurve
(
o1
));
best_fit
.
complexity
=
o1
;
// Compute all possible fitting curves and stick to the best one
for
(
const
auto
&
fit
:
fit_curves
)
{
LeastSq
current_fit
=
Calculate
LeastSq
(
n
,
time
,
FittingCurve
(
fit
));
LeastSq
current_fit
=
Minimal
LeastSq
(
n
,
time
,
FittingCurve
(
fit
));
if
(
current_fit
.
rms
<
best_fit
.
rms
)
{
best_fit
=
current_fit
;
best_fit
.
complexity
=
fit
;
}
}
}
else
{
best_fit
=
Calculate
LeastSq
(
n
,
time
,
FittingCurve
(
complexity
));
best_fit
=
Minimal
LeastSq
(
n
,
time
,
FittingCurve
(
complexity
));
best_fit
.
complexity
=
complexity
;
}
...
...
@@ -164,14 +152,13 @@ LeastSq MinimalLeastSq(const std::vector<int>& n,
}
std
::
vector
<
BenchmarkReporter
::
Run
>
ComputeStats
(
const
std
::
vector
<
BenchmarkReporter
::
Run
>&
reports
)
{
const
std
::
vector
<
BenchmarkReporter
::
Run
>&
reports
)
{
typedef
BenchmarkReporter
::
Run
Run
;
std
::
vector
<
Run
>
results
;
auto
error_count
=
std
::
count_if
(
reports
.
begin
(),
reports
.
end
(),
[](
Run
const
&
run
)
{
return
run
.
error_occurred
;
});
auto
error_count
=
std
::
count_if
(
reports
.
begin
(),
reports
.
end
(),
[](
Run
const
&
run
)
{
return
run
.
error_occurred
;
});
if
(
reports
.
size
()
-
error_count
<
2
)
{
// We don't report aggregated data if there was a single run.
...
...
@@ -190,12 +177,11 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
for
(
Run
const
&
run
:
reports
)
{
CHECK_EQ
(
reports
[
0
].
benchmark_name
,
run
.
benchmark_name
);
CHECK_EQ
(
run_iterations
,
run
.
iterations
);
if
(
run
.
error_occurred
)
continue
;
if
(
run
.
error_occurred
)
continue
;
real_accumulated_time_stat
+=
Stat1_d
(
run
.
real_accumulated_time
/
run
.
iterations
,
run
.
iterations
);
Stat1_d
(
run
.
real_accumulated_time
/
run
.
iterations
,
run
.
iterations
);
cpu_accumulated_time_stat
+=
Stat1_d
(
run
.
cpu_accumulated_time
/
run
.
iterations
,
run
.
iterations
);
Stat1_d
(
run
.
cpu_accumulated_time
/
run
.
iterations
,
run
.
iterations
);
items_per_second_stat
+=
Stat1_d
(
run
.
items_per_second
,
run
.
iterations
);
bytes_per_second_stat
+=
Stat1_d
(
run
.
bytes_per_second
,
run
.
iterations
);
}
...
...
@@ -204,10 +190,10 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
Run
mean_data
;
mean_data
.
benchmark_name
=
reports
[
0
].
benchmark_name
+
"_mean"
;
mean_data
.
iterations
=
run_iterations
;
mean_data
.
real_accumulated_time
=
real_accumulated_time_stat
.
Mean
()
*
run_iterations
;
mean_data
.
cpu_accumulated_time
=
cpu_accumulated_time_stat
.
Mean
()
*
run_iterations
;
mean_data
.
real_accumulated_time
=
real_accumulated_time_stat
.
Mean
()
*
run_iterations
;
mean_data
.
cpu_accumulated_time
=
cpu_accumulated_time_stat
.
Mean
()
*
run_iterations
;
mean_data
.
bytes_per_second
=
bytes_per_second_stat
.
Mean
();
mean_data
.
items_per_second
=
items_per_second_stat
.
Mean
();
...
...
@@ -224,10 +210,8 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
stddev_data
.
benchmark_name
=
reports
[
0
].
benchmark_name
+
"_stddev"
;
stddev_data
.
report_label
=
mean_data
.
report_label
;
stddev_data
.
iterations
=
0
;
stddev_data
.
real_accumulated_time
=
real_accumulated_time_stat
.
StdDev
();
stddev_data
.
cpu_accumulated_time
=
cpu_accumulated_time_stat
.
StdDev
();
stddev_data
.
real_accumulated_time
=
real_accumulated_time_stat
.
StdDev
();
stddev_data
.
cpu_accumulated_time
=
cpu_accumulated_time_stat
.
StdDev
();
stddev_data
.
bytes_per_second
=
bytes_per_second_stat
.
StdDev
();
stddev_data
.
items_per_second
=
items_per_second_stat
.
StdDev
();
...
...
@@ -237,8 +221,7 @@ std::vector<BenchmarkReporter::Run> ComputeStats(
}
std
::
vector
<
BenchmarkReporter
::
Run
>
ComputeBigO
(
const
std
::
vector
<
BenchmarkReporter
::
Run
>&
reports
)
{
const
std
::
vector
<
BenchmarkReporter
::
Run
>&
reports
)
{
typedef
BenchmarkReporter
::
Run
Run
;
std
::
vector
<
Run
>
results
;
...
...
@@ -252,19 +235,22 @@ std::vector<BenchmarkReporter::Run> ComputeBigO(
// Populate the accumulators.
for
(
const
Run
&
run
:
reports
)
{
n
.
push_back
(
run
.
complexity_n
);
real_time
.
push_back
(
run
.
real_accumulated_time
/
run
.
iterations
);
cpu_time
.
push_back
(
run
.
cpu_accumulated_time
/
run
.
iterations
);
real_time
.
push_back
(
run
.
real_accumulated_time
/
run
.
iterations
);
cpu_time
.
push_back
(
run
.
cpu_accumulated_time
/
run
.
iterations
);
}
LeastSq
result_cpu
=
MinimalLeastSq
(
n
,
cpu_time
,
reports
[
0
].
complexity
);
LeastSq
result_cpu
;
LeastSq
result_real
;
// result_cpu.complexity is passed as parameter to result_real because in case
// reports[0].complexity is oAuto, the noise on the measured data could make
// the best fit function of Cpu and Real differ. In order to solve this, we
// take the best fitting function for the Cpu, and apply it to Real data.
LeastSq
result_real
=
MinimalLeastSq
(
n
,
real_time
,
result_cpu
.
complexity
);
std
::
string
benchmark_name
=
reports
[
0
].
benchmark_name
.
substr
(
0
,
reports
[
0
].
benchmark_name
.
find
(
'/'
));
if
(
reports
[
0
].
complexity
==
oLambda
)
{
result_cpu
=
MinimalLeastSq
(
n
,
cpu_time
,
reports
[
0
].
complexity_lambda
);
result_real
=
MinimalLeastSq
(
n
,
real_time
,
reports
[
0
].
complexity_lambda
);
}
else
{
result_cpu
=
MinimalLeastSq
(
n
,
cpu_time
,
reports
[
0
].
complexity
);
result_real
=
MinimalLeastSq
(
n
,
real_time
,
result_cpu
.
complexity
);
}
std
::
string
benchmark_name
=
reports
[
0
].
benchmark_name
.
substr
(
0
,
reports
[
0
].
benchmark_name
.
find
(
'/'
));
// Get the data from the accumulator to BenchmarkReporter::Run's.
Run
big_o
;
...
...
src/complexity.h
View file @
2d088a9f
...
...
@@ -60,11 +60,5 @@ struct LeastSq {
// Function to return an string for the calculated complexity
std
::
string
GetBigOString
(
BigO
complexity
);
// Find the coefficient for the high-order term in the running time, by
// minimizing the sum of squares of relative error.
LeastSq
MinimalLeastSq
(
const
std
::
vector
<
int
>&
n
,
const
std
::
vector
<
double
>&
time
,
const
BigO
complexity
=
oAuto
);
}
// end namespace benchmark
#endif // COMPLEXITY_H_
src/console_reporter.cc
View file @
2d088a9f
...
...
@@ -15,9 +15,9 @@
#include "benchmark/reporter.h"
#include "complexity.h"
#include <algorithm>
#include <cstdint>
#include <cstdio>
#include <algorithm>
#include <iostream>
#include <string>
#include <tuple>
...
...
@@ -62,8 +62,8 @@ void ConsoleReporter::ReportRuns(const std::vector<Run>& reports) {
void
ConsoleReporter
::
PrintRunData
(
const
Run
&
result
)
{
auto
&
Out
=
GetOutputStream
();
auto
name_color
=
(
result
.
report_big_o
||
result
.
report_rms
)
?
COLOR_BLUE
:
COLOR_GREEN
;
auto
name_color
=
(
result
.
report_big_o
||
result
.
report_rms
)
?
COLOR_BLUE
:
COLOR_GREEN
;
ColorPrintf
(
Out
,
name_color
,
"%-*s "
,
name_field_width_
,
result
.
benchmark_name
.
c_str
());
...
...
@@ -84,25 +84,25 @@ void ConsoleReporter::PrintRunData(const Run& result) {
if
(
result
.
items_per_second
>
0
)
{
items
=
StrCat
(
" "
,
HumanReadableNumber
(
result
.
items_per_second
),
" items/s"
);
}
}
const
double
real_time
=
result
.
GetAdjustedRealTime
();
const
double
cpu_time
=
result
.
GetAdjustedCPUTime
();
if
(
result
.
report_big_o
)
{
std
::
string
big_o
=
result
.
report_big_o
?
GetBigOString
(
result
.
complexity
)
:
""
;
ColorPrintf
(
Out
,
COLOR_YELLOW
,
"%10.
4f %s %10.4f %s "
,
real_time
,
big_o
.
c_str
(),
cpu_time
,
big_o
.
c_str
());
}
else
if
(
result
.
report_rms
)
{
ColorPrintf
(
Out
,
COLOR_YELLOW
,
"%10.0f %% %10.0f %% "
,
real_time
*
100
,
cpu_time
*
100
);
if
(
result
.
report_big_o
)
{
std
::
string
big_o
=
GetBigOString
(
result
.
complexity
)
;
ColorPrintf
(
Out
,
COLOR_YELLOW
,
"%10.
2f %s %10.2f %s "
,
real_time
,
big_o
.
c_str
(),
cpu_time
,
big_o
.
c_str
());
}
else
if
(
result
.
report_rms
)
{
ColorPrintf
(
Out
,
COLOR_YELLOW
,
"%10.0f %% %10.0f %% "
,
real_time
*
100
,
cpu_time
*
100
);
}
else
{
const
char
*
timeLabel
=
GetTimeUnitString
(
result
.
time_unit
);
ColorPrintf
(
Out
,
COLOR_YELLOW
,
"%10.0f %s %10.0f %s "
,
real_time
,
timeLabel
,
cpu_time
,
timeLabel
);
ColorPrintf
(
Out
,
COLOR_YELLOW
,
"%10.0f %s %10.0f %s "
,
real_time
,
timeLabel
,
cpu_time
,
timeLabel
);
}
if
(
!
result
.
report_big_o
&&
!
result
.
report_rms
)
{
if
(
!
result
.
report_big_o
&&
!
result
.
report_rms
)
{
ColorPrintf
(
Out
,
COLOR_CYAN
,
"%10lld"
,
result
.
iterations
);
}
...
...
src/csv_reporter.cc
View file @
2d088a9f
...
...
@@ -13,9 +13,10 @@
// limitations under the License.
#include "benchmark/reporter.h"
#include "complexity.h"
#include <cstdint>
#include <algorithm>
#include <cstdint>
#include <iostream>
#include <string>
#include <tuple>
...
...
@@ -79,7 +80,7 @@ void CSVReporter::PrintRunData(const Run & run) {
}
// Do not print iteration on bigO and RMS report
if
(
!
run
.
report_big_o
&&
!
run
.
report_rms
)
{
if
(
!
run
.
report_big_o
&&
!
run
.
report_rms
)
{
Out
<<
run
.
iterations
;
}
Out
<<
","
;
...
...
@@ -87,8 +88,10 @@ void CSVReporter::PrintRunData(const Run & run) {
Out
<<
run
.
GetAdjustedRealTime
()
<<
","
;
Out
<<
run
.
GetAdjustedCPUTime
()
<<
","
;
// Do not print timeLabel on RMS report
if
(
!
run
.
report_rms
)
{
// Do not print timeLabel on bigO and RMS report
if
(
run
.
report_big_o
)
{
Out
<<
GetBigOString
(
run
.
complexity
);
}
else
if
(
!
run
.
report_rms
)
{
Out
<<
GetTimeUnitString
(
run
.
time_unit
);
}
Out
<<
","
;
...
...
@@ -108,7 +111,7 @@ void CSVReporter::PrintRunData(const Run & run) {
ReplaceAll
(
&
label
,
"
\"
"
,
"
\"\"
"
);
Out
<<
"
\"
"
<<
label
<<
"
\"
"
;
}
Out
<<
",,"
;
// for error_occurred and error_message
Out
<<
",,"
;
// for error_occurred and error_message
Out
<<
'\n'
;
}
...
...
src/json_reporter.cc
View file @
2d088a9f
...
...
@@ -13,9 +13,10 @@
// limitations under the License.
#include "benchmark/reporter.h"
#include "complexity.h"
#include <cstdint>
#include <algorithm>
#include <cstdint>
#include <iostream>
#include <string>
#include <tuple>
...
...
@@ -99,24 +100,24 @@ void JSONReporter::ReportRuns(std::vector<Run> const& reports) {
first_report_
=
false
;
for
(
auto
it
=
reports
.
begin
();
it
!=
reports
.
end
();
++
it
)
{
out
<<
indent
<<
"{
\n
"
;
PrintRunData
(
*
it
);
out
<<
indent
<<
'}'
;
auto
it_cp
=
it
;
if
(
++
it_cp
!=
reports
.
end
())
{
out
<<
",
\n
"
;
}
out
<<
indent
<<
"{
\n
"
;
PrintRunData
(
*
it
);
out
<<
indent
<<
'}'
;
auto
it_cp
=
it
;
if
(
++
it_cp
!=
reports
.
end
())
{
out
<<
",
\n
"
;
}
}
}
void
JSONReporter
::
Finalize
()
{
// Close the list of benchmarks and the top level object.
GetOutputStream
()
<<
"
\n
]
\n
}
\n
"
;
// Close the list of benchmarks and the top level object.
GetOutputStream
()
<<
"
\n
]
\n
}
\n
"
;
}
void
JSONReporter
::
PrintRunData
(
Run
const
&
run
)
{
std
::
string
indent
(
6
,
' '
);
std
::
ostream
&
out
=
GetOutputStream
();
std
::
string
indent
(
6
,
' '
);
std
::
ostream
&
out
=
GetOutputStream
();
out
<<
indent
<<
FormatKV
(
"name"
,
run
.
benchmark_name
)
<<
",
\n
"
;
...
...
@@ -128,33 +129,50 @@ void JSONReporter::PrintRunData(Run const& run) {
<<
FormatKV
(
"error_message"
,
run
.
error_message
)
<<
",
\n
"
;
}
if
(
!
run
.
report_big_o
&&
!
run
.
report_rms
)
{
if
(
!
run
.
report_big_o
&&
!
run
.
report_rms
)
{
out
<<
indent
<<
FormatKV
(
"iterations"
,
run
.
iterations
)
<<
",
\n
"
;
}
out
<<
indent
<<
FormatKV
(
"real_time"
,
RoundDouble
(
run
.
GetAdjustedRealTime
()))
<<
",
\n
"
;
out
<<
indent
<<
FormatKV
(
"cpu_time"
,
RoundDouble
(
run
.
GetAdjustedCPUTime
()));
out
<<
",
\n
"
<<
indent
<<
FormatKV
(
"time_unit"
,
GetTimeUnitString
(
run
.
time_unit
));
}
else
if
(
run
.
report_big_o
)
{
out
<<
indent
<<
FormatKV
(
"
real_time"
,
RoundDouble
(
run
.
GetAdjustedReal
Time
()))
<<
FormatKV
(
"
cpu_coefficient"
,
RoundDouble
(
run
.
GetAdjustedCPU
Time
()))
<<
",
\n
"
;
out
<<
indent
<<
FormatKV
(
"cpu_time"
,
RoundDouble
(
run
.
GetAdjustedCPUTime
()));
if
(
!
run
.
report_rms
)
{
out
<<
",
\n
"
<<
indent
<<
FormatKV
(
"real_coefficient"
,
RoundDouble
(
run
.
GetAdjustedRealTime
()))
<<
",
\n
"
;
out
<<
indent
<<
FormatKV
(
"big_o"
,
GetBigOString
(
run
.
complexity
))
<<
",
\n
"
;
out
<<
indent
<<
FormatKV
(
"time_unit"
,
GetTimeUnitString
(
run
.
time_unit
));
}
if
(
run
.
bytes_per_second
>
0.0
)
{
out
<<
",
\n
"
<<
indent
<<
FormatKV
(
"bytes_per_second"
,
RoundDouble
(
run
.
bytes_per_second
));
}
if
(
run
.
items_per_second
>
0.0
)
{
out
<<
",
\n
"
<<
indent
<<
FormatKV
(
"items_per_second"
,
RoundDouble
(
run
.
items_per_second
));
}
if
(
!
run
.
report_label
.
empty
())
{
out
<<
",
\n
"
<<
indent
<<
FormatKV
(
"label"
,
run
.
report_label
);
}
out
<<
'\n'
;
}
else
if
(
run
.
report_rms
)
{
out
<<
indent
<<
FormatKV
(
"rms"
,
RoundDouble
(
run
.
GetAdjustedCPUTime
()
*
100
))
<<
'%'
;
}
if
(
run
.
bytes_per_second
>
0.0
)
{
out
<<
",
\n
"
<<
indent
<<
FormatKV
(
"bytes_per_second"
,
RoundDouble
(
run
.
bytes_per_second
));
}
if
(
run
.
items_per_second
>
0.0
)
{
out
<<
",
\n
"
<<
indent
<<
FormatKV
(
"items_per_second"
,
RoundDouble
(
run
.
items_per_second
));
}
if
(
!
run
.
report_label
.
empty
())
{
out
<<
",
\n
"
<<
indent
<<
FormatKV
(
"label"
,
run
.
report_label
);
}
out
<<
'\n'
;
}
}
// end namespace benchmark
}
// end namespace benchmark
test/complexity_test.cc
View file @
2d088a9f
#include "benchmark/benchmark_api.h"
#include <cstdlib>
#include <string>
#undef NDEBUG
#include "benchmark/benchmark.h"
#include "../src/check.h" // NOTE: check.h is for internal use only!
#include "../src/re.h" // NOTE: re.h is for internal use only
#include <cassert>
#include <cstring>
#include <iostream>
#include <sstream>
#include <vector>
#include <
map
>
#include <
utility
>
#include <algorithm>
std
::
vector
<
int
>
ConstructRandomVector
(
int
size
)
{
std
::
vector
<
int
>
v
;
v
.
reserve
(
size
);
for
(
int
i
=
0
;
i
<
size
;
++
i
)
{
v
.
push_back
(
rand
()
%
size
);
namespace
{
// ========================================================================= //
// -------------------------- Testing Case --------------------------------- //
// ========================================================================= //
enum
MatchRules
{
MR_Default
,
// Skip non-matching lines until a match is found.
MR_Next
// Match must occur on the next line.
};
struct
TestCase
{
std
::
string
regex
;
int
match_rule
;
TestCase
(
std
::
string
re
,
int
rule
=
MR_Default
)
:
regex
(
re
),
match_rule
(
rule
)
{}
void
Check
(
std
::
stringstream
&
remaining_output
)
const
{
benchmark
::
Regex
r
;
std
::
string
err_str
;
r
.
Init
(
regex
,
&
err_str
);
CHECK
(
err_str
.
empty
())
<<
"Could not construct regex
\"
"
<<
regex
<<
"
\"
"
<<
" got Error: "
<<
err_str
;
std
::
string
line
;
while
(
remaining_output
.
eof
()
==
false
)
{
CHECK
(
remaining_output
.
good
());
std
::
getline
(
remaining_output
,
line
);
if
(
r
.
Match
(
line
))
return
;
CHECK
(
match_rule
!=
MR_Next
)
<<
"Expected line
\"
"
<<
line
<<
"
\"
to match regex
\"
"
<<
regex
<<
"
\"
"
;
}
CHECK
(
remaining_output
.
eof
()
==
false
)
<<
"End of output reached before match for regex
\"
"
<<
regex
<<
"
\"
was found"
;
}
return
v
;
}
};
std
::
map
<
int
,
int
>
ConstructRandomMap
(
int
size
)
{
std
::
map
<
int
,
int
>
m
;
for
(
int
i
=
0
;
i
<
size
;
++
i
)
{
m
.
insert
(
std
::
make_pair
(
rand
()
%
size
,
rand
()
%
size
));
std
::
vector
<
TestCase
>
ConsoleOutputTests
;
std
::
vector
<
TestCase
>
JSONOutputTests
;
std
::
vector
<
TestCase
>
CSVOutputTests
;
// ========================================================================= //
// -------------------------- Test Helpers --------------------------------- //
// ========================================================================= //
class
TestReporter
:
public
benchmark
::
BenchmarkReporter
{
public
:
TestReporter
(
std
::
vector
<
benchmark
::
BenchmarkReporter
*>
reps
)
:
reporters_
(
reps
)
{}
virtual
bool
ReportContext
(
const
Context
&
context
)
{
bool
last_ret
=
false
;
bool
first
=
true
;
for
(
auto
rep
:
reporters_
)
{
bool
new_ret
=
rep
->
ReportContext
(
context
);
CHECK
(
first
||
new_ret
==
last_ret
)
<<
"Reports return different values for ReportContext"
;
first
=
false
;
last_ret
=
new_ret
;
}
return
last_ret
;
}
virtual
void
ReportRuns
(
const
std
::
vector
<
Run
>&
report
)
{
for
(
auto
rep
:
reporters_
)
rep
->
ReportRuns
(
report
);
}
virtual
void
Finalize
()
{
for
(
auto
rep
:
reporters_
)
rep
->
Finalize
();
}
return
m
;
private
:
std
::
vector
<
benchmark
::
BenchmarkReporter
*>
reporters_
;
};
#define CONCAT2(x, y) x##y
#define CONCAT(x, y) CONCAT2(x, y)
#define ADD_CASES(...) \
int CONCAT(dummy, __LINE__) = AddCases(__VA_ARGS__)
int
AddCases
(
std
::
vector
<
TestCase
>*
out
,
std
::
initializer_list
<
TestCase
>
const
&
v
)
{
for
(
auto
const
&
TC
:
v
)
out
->
push_back
(
TC
);
return
0
;
}
template
<
class
First
>
std
::
string
join
(
First
f
)
{
return
f
;
}
template
<
class
First
,
class
...
Args
>
std
::
string
join
(
First
f
,
Args
&&
...
args
)
{
return
std
::
string
(
std
::
move
(
f
))
+
"[ ]+"
+
join
(
std
::
forward
<
Args
>
(
args
)...);
}
std
::
string
dec_re
=
"[0-9]+
\\
.[0-9]+"
;
#define ADD_COMPLEXITY_CASES(...) \
int CONCAT(dummy, __LINE__) = AddComplexityTest(__VA_ARGS__)
int
AddComplexityTest
(
std
::
vector
<
TestCase
>*
console_out
,
std
::
vector
<
TestCase
>*
json_out
,
std
::
vector
<
TestCase
>*
csv_out
,
std
::
string
big_o_test_name
,
std
::
string
rms_test_name
,
std
::
string
big_o
)
{
std
::
string
big_o_str
=
dec_re
+
" "
+
big_o
;
AddCases
(
console_out
,
{
{
join
(
"^"
+
big_o_test_name
+
""
,
big_o_str
,
big_o_str
)
+
"[ ]*$"
},
{
join
(
"^"
+
rms_test_name
+
""
,
"[0-9]+ %"
,
"[0-9]+ %"
)
+
"[ ]*$"
}
});
AddCases
(
json_out
,
{
{
"
\"
name
\"
:
\"
"
+
big_o_test_name
+
"
\"
,$"
},
{
"
\"
cpu_coefficient
\"
: [0-9]+,$"
,
MR_Next
},
{
"
\"
real_coefficient
\"
: [0-9]{1,5},$"
,
MR_Next
},
{
"
\"
big_o
\"
:
\"
"
+
big_o
+
"
\"
,$"
,
MR_Next
},
{
"
\"
time_unit
\"
:
\"
ns
\"
$"
,
MR_Next
},
{
"}"
,
MR_Next
},
{
"
\"
name
\"
:
\"
"
+
rms_test_name
+
"
\"
,$"
},
{
"
\"
rms
\"
: [0-9]+%$"
,
MR_Next
},
{
"}"
,
MR_Next
}
});
AddCases
(
csv_out
,
{
{
"^
\"
"
+
big_o_test_name
+
"
\"
,,"
+
dec_re
+
","
+
dec_re
+
","
+
big_o
+
",,,,,$"
},
{
"^
\"
"
+
rms_test_name
+
"
\"
,,"
+
dec_re
+
","
+
dec_re
+
",,,,,,$"
}
});
return
0
;
}
}
// end namespace
// ========================================================================= //
// --------------------------- Testing BigO O(1) --------------------------- //
// ========================================================================= //
void
BM_Complexity_O1
(
benchmark
::
State
&
state
)
{
while
(
state
.
KeepRunning
())
{
}
state
.
SetComplexityN
(
state
.
range_x
());
}
BENCHMARK
(
BM_Complexity_O1
)
->
Range
(
1
,
1
<<
18
)
->
Complexity
(
benchmark
::
o1
);
BENCHMARK
(
BM_Complexity_O1
)
->
Range
(
1
,
1
<<
18
)
->
Complexity
([](
int
){
return
1.0
;
});
BENCHMARK
(
BM_Complexity_O1
)
->
Range
(
1
,
1
<<
18
)
->
Complexity
();
const
char
*
big_o_1_test_name
=
"BM_Complexity_O1_BigO"
;
const
char
*
rms_o_1_test_name
=
"BM_Complexity_O1_RMS"
;
const
char
*
enum_auto_big_o_1
=
"
\\
([0-9]+
\\
)"
;
const
char
*
lambda_big_o_1
=
"f
\\
(N
\\
)"
;
static
void
BM_Complexity_O_N
(
benchmark
::
State
&
state
)
{
// Add enum tests
ADD_COMPLEXITY_CASES
(
&
ConsoleOutputTests
,
&
JSONOutputTests
,
&
CSVOutputTests
,
big_o_1_test_name
,
rms_o_1_test_name
,
enum_auto_big_o_1
);
// Add lambda tests
ADD_COMPLEXITY_CASES
(
&
ConsoleOutputTests
,
&
JSONOutputTests
,
&
CSVOutputTests
,
big_o_1_test_name
,
rms_o_1_test_name
,
lambda_big_o_1
);
// ========================================================================= //
// --------------------------- Testing BigO O(N) --------------------------- //
// ========================================================================= //
std
::
vector
<
int
>
ConstructRandomVector
(
int
size
)
{
std
::
vector
<
int
>
v
;
v
.
reserve
(
size
);
for
(
int
i
=
0
;
i
<
size
;
++
i
)
{
v
.
push_back
(
rand
()
%
size
);
}
return
v
;
}
void
BM_Complexity_O_N
(
benchmark
::
State
&
state
)
{
auto
v
=
ConstructRandomVector
(
state
.
range_x
());
const
int
item_not_in_vector
=
state
.
range_x
()
*
2
;
// Test worst case scenario (item not in vector)
while
(
state
.
KeepRunning
())
{
...
...
@@ -40,50 +192,25 @@ static void BM_Complexity_O_N(benchmark::State& state) {
state
.
SetComplexityN
(
state
.
range_x
());
}
BENCHMARK
(
BM_Complexity_O_N
)
->
RangeMultiplier
(
2
)
->
Range
(
1
<<
10
,
1
<<
16
)
->
Complexity
(
benchmark
::
oN
);
BENCHMARK
(
BM_Complexity_O_N
)
->
RangeMultiplier
(
2
)
->
Range
(
1
<<
10
,
1
<<
16
)
->
Complexity
([](
int
n
)
->
double
{
return
n
;
});
BENCHMARK
(
BM_Complexity_O_N
)
->
RangeMultiplier
(
2
)
->
Range
(
1
<<
10
,
1
<<
16
)
->
Complexity
();
static
void
BM_Complexity_O_N_Squared
(
benchmark
::
State
&
state
)
{
std
::
string
s1
(
state
.
range_x
(),
'-'
);
std
::
string
s2
(
state
.
range_x
(),
'-'
);
state
.
SetComplexityN
(
state
.
range_x
());
while
(
state
.
KeepRunning
())
for
(
char
&
c1
:
s1
)
{
for
(
char
&
c2
:
s2
)
{
benchmark
::
DoNotOptimize
(
c1
=
'a'
);
benchmark
::
DoNotOptimize
(
c2
=
'b'
);
}
}
}
BENCHMARK
(
BM_Complexity_O_N_Squared
)
->
Range
(
1
,
1
<<
8
)
->
Complexity
(
benchmark
::
oNSquared
);
const
char
*
big_o_n_test_name
=
"BM_Complexity_O_N_BigO"
;
const
char
*
rms_o_n_test_name
=
"BM_Complexity_O_N_RMS"
;
const
char
*
enum_auto_big_o_n
=
"N"
;
const
char
*
lambda_big_o_n
=
"f
\\
(N
\\
)"
;
static
void
BM_Complexity_O_N_Cubed
(
benchmark
::
State
&
state
)
{
std
::
string
s1
(
state
.
range_x
(),
'-'
);
std
::
string
s2
(
state
.
range_x
(),
'-'
);
std
::
string
s3
(
state
.
range_x
(),
'-'
);
state
.
SetComplexityN
(
state
.
range_x
());
while
(
state
.
KeepRunning
())
for
(
char
&
c1
:
s1
)
{
for
(
char
&
c2
:
s2
)
{
for
(
char
&
c3
:
s3
)
{
benchmark
::
DoNotOptimize
(
c1
=
'a'
);
benchmark
::
DoNotOptimize
(
c2
=
'b'
);
benchmark
::
DoNotOptimize
(
c3
=
'c'
);
}
}
}
}
BENCHMARK
(
BM_Complexity_O_N_Cubed
)
->
DenseRange
(
1
,
8
)
->
Complexity
(
benchmark
::
oNCubed
);
// Add enum tests
ADD_COMPLEXITY_CASES
(
&
ConsoleOutputTests
,
&
JSONOutputTests
,
&
CSVOutputTests
,
big_o_n_test_name
,
rms_o_n_test_name
,
enum_auto_big_o_n
);
static
void
BM_Complexity_O_log_N
(
benchmark
::
State
&
state
)
{
auto
m
=
ConstructRandomMap
(
state
.
range_x
());
const
int
item_not_in_vector
=
state
.
range_x
()
*
2
;
// Test worst case scenario (item not in vector)
while
(
state
.
KeepRunning
())
{
benchmark
::
DoNotOptimize
(
m
.
find
(
item_not_in_vector
));
}
state
.
SetComplexityN
(
state
.
range_x
());
}
BENCHMARK
(
BM_Complexity_O_log_N
)
->
RangeMultiplier
(
2
)
->
Range
(
1
<<
10
,
1
<<
16
)
->
Complexity
(
benchmark
::
oLogN
);
// Add lambda tests
ADD_COMPLEXITY_CASES
(
&
ConsoleOutputTests
,
&
JSONOutputTests
,
&
CSVOutputTests
,
big_o_n_test_name
,
rms_o_n_test_name
,
lambda_big_o_n
);
// ========================================================================= //
// ------------------------- Testing BigO O(N*lgN) ------------------------- //
// ========================================================================= //
static
void
BM_Complexity_O_N_log_N
(
benchmark
::
State
&
state
)
{
auto
v
=
ConstructRandomVector
(
state
.
range_x
());
...
...
@@ -93,14 +220,77 @@ static void BM_Complexity_O_N_log_N(benchmark::State& state) {
state
.
SetComplexityN
(
state
.
range_x
());
}
BENCHMARK
(
BM_Complexity_O_N_log_N
)
->
RangeMultiplier
(
2
)
->
Range
(
1
<<
10
,
1
<<
16
)
->
Complexity
(
benchmark
::
oNLogN
);
BENCHMARK
(
BM_Complexity_O_N_log_N
)
->
RangeMultiplier
(
2
)
->
Range
(
1
<<
10
,
1
<<
16
)
->
Complexity
([](
int
n
)
{
return
n
*
log2
(
n
);
});
BENCHMARK
(
BM_Complexity_O_N_log_N
)
->
RangeMultiplier
(
2
)
->
Range
(
1
<<
10
,
1
<<
16
)
->
Complexity
();
// Test benchmark with no range and check no complexity is calculated.
void
BM_Extreme_Cases
(
benchmark
::
State
&
state
)
{
while
(
state
.
KeepRunning
())
{
const
char
*
big_o_n_lg_n_test_name
=
"BM_Complexity_O_N_log_N_BigO"
;
const
char
*
rms_o_n_lg_n_test_name
=
"BM_Complexity_O_N_log_N_RMS"
;
const
char
*
enum_auto_big_o_n_lg_n
=
"NlgN"
;
const
char
*
lambda_big_o_n_lg_n
=
"f
\\
(N
\\
)"
;
// Add enum tests
ADD_COMPLEXITY_CASES
(
&
ConsoleOutputTests
,
&
JSONOutputTests
,
&
CSVOutputTests
,
big_o_n_lg_n_test_name
,
rms_o_n_lg_n_test_name
,
enum_auto_big_o_n_lg_n
);
// Add lambda tests
ADD_COMPLEXITY_CASES
(
&
ConsoleOutputTests
,
&
JSONOutputTests
,
&
CSVOutputTests
,
big_o_n_lg_n_test_name
,
rms_o_n_lg_n_test_name
,
lambda_big_o_n_lg_n
);
// ========================================================================= //
// --------------------------- TEST CASES END ------------------------------ //
// ========================================================================= //
int
main
(
int
argc
,
char
*
argv
[])
{
// Add --color_print=false to argv since we don't want to match color codes.
char
new_arg
[
64
];
char
*
new_argv
[
64
];
std
::
copy
(
argv
,
argv
+
argc
,
new_argv
);
new_argv
[
argc
++
]
=
std
::
strcpy
(
new_arg
,
"--color_print=false"
);
benchmark
::
Initialize
(
&
argc
,
new_argv
);
benchmark
::
ConsoleReporter
CR
;
benchmark
::
JSONReporter
JR
;
benchmark
::
CSVReporter
CSVR
;
struct
ReporterTest
{
const
char
*
name
;
std
::
vector
<
TestCase
>&
output_cases
;
benchmark
::
BenchmarkReporter
&
reporter
;
std
::
stringstream
out_stream
;
std
::
stringstream
err_stream
;
ReporterTest
(
const
char
*
n
,
std
::
vector
<
TestCase
>&
out_tc
,
benchmark
::
BenchmarkReporter
&
br
)
:
name
(
n
),
output_cases
(
out_tc
),
reporter
(
br
)
{
reporter
.
SetOutputStream
(
&
out_stream
);
reporter
.
SetErrorStream
(
&
err_stream
);
}
}
TestCases
[]
=
{
{
"ConsoleReporter"
,
ConsoleOutputTests
,
CR
},
{
"JSONReporter"
,
JSONOutputTests
,
JR
},
{
"CSVReporter"
,
CSVOutputTests
,
CSVR
}
};
// Create the test reporter and run the benchmarks.
std
::
cout
<<
"Running benchmarks...
\n
"
;
TestReporter
test_rep
({
&
CR
,
&
JR
,
&
CSVR
});
benchmark
::
RunSpecifiedBenchmarks
(
&
test_rep
);
for
(
auto
&
rep_test
:
TestCases
)
{
std
::
string
msg
=
std
::
string
(
"
\n
Testing "
)
+
rep_test
.
name
+
" Output
\n
"
;
std
::
string
banner
(
msg
.
size
()
-
1
,
'-'
);
std
::
cout
<<
banner
<<
msg
<<
banner
<<
"
\n
"
;
std
::
cerr
<<
rep_test
.
err_stream
.
str
();
std
::
cout
<<
rep_test
.
out_stream
.
str
();
for
(
const
auto
&
TC
:
rep_test
.
output_cases
)
TC
.
Check
(
rep_test
.
out_stream
);
std
::
cout
<<
"
\n
"
;
}
return
0
;
}
BENCHMARK
(
BM_Extreme_Cases
)
->
Complexity
(
benchmark
::
oNLogN
);
BENCHMARK
(
BM_Extreme_Cases
)
->
Arg
(
42
)
->
Complexity
();
BENCHMARK_MAIN
()
test/reporter_output_test.cc
View file @
2d088a9f
...
...
@@ -189,7 +189,7 @@ void BM_Complexity_O1(benchmark::State& state) {
}
BENCHMARK
(
BM_Complexity_O1
)
->
Range
(
1
,
1
<<
18
)
->
Complexity
(
benchmark
::
o1
);
std
::
string
bigOStr
=
"[0-9]+
\\
.[0-9]+
\\
* [0-9]+
"
;
std
::
string
bigOStr
=
"[0-9]+
\\
.[0-9]+
\\
([0-9]+
\\
)
"
;
ADD_CASES
(
&
ConsoleOutputTests
,
{
{
join
(
"^BM_Complexity_O1_BigO"
,
bigOStr
,
bigOStr
)
+
"[ ]*$"
},
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment