Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
G
googletest
Project
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Chen Yisong
googletest
Commits
15cbe5f7
Commit
15cbe5f7
authored
Jul 25, 2008
by
shiqian
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Adds --gtest_print_test for printing the elapsed time of tests.
parent
dbc56bf8
Show whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
280 additions
and
7 deletions
+280
-7
gtest-internal-inl.h
src/gtest-internal-inl.h
+6
-1
gtest.cc
src/gtest.cc
+33
-1
gtest_output_test.py
test/gtest_output_test.py
+15
-5
gtest_output_test_golden_lin.txt
test/gtest_output_test_golden_lin.txt
+70
-0
gtest_output_test_golden_win.txt
test/gtest_output_test_golden_win.txt
+61
-0
gtest_unittest.cc
test/gtest_unittest.cc
+95
-0
No files found.
src/gtest-internal-inl.h
View file @
15cbe5f7
...
...
@@ -70,6 +70,7 @@ GTEST_DECLARE_string(color);
GTEST_DECLARE_string
(
filter
);
GTEST_DECLARE_bool
(
list_tests
);
GTEST_DECLARE_string
(
output
);
GTEST_DECLARE_bool
(
print_time
);
GTEST_DECLARE_int32
(
repeat
);
GTEST_DECLARE_int32
(
stack_trace_depth
);
GTEST_DECLARE_bool
(
show_internal_stack_frames
);
...
...
@@ -79,10 +80,11 @@ namespace internal {
// Names of the flags (needed for parsing Google Test flags).
const
char
kBreakOnFailureFlag
[]
=
"break_on_failure"
;
const
char
kCatchExceptionsFlag
[]
=
"catch_exceptions"
;
const
char
kColorFlag
[]
=
"color"
;
const
char
kFilterFlag
[]
=
"filter"
;
const
char
kListTestsFlag
[]
=
"list_tests"
;
const
char
kOutputFlag
[]
=
"output"
;
const
char
k
ColorFlag
[]
=
"color
"
;
const
char
k
PrintTimeFlag
[]
=
"print_time
"
;
const
char
kRepeatFlag
[]
=
"repeat"
;
// This class saves the values of all Google Test flags in its c'tor, and
...
...
@@ -99,6 +101,7 @@ class GTestFlagSaver {
internal_run_death_test_
=
GTEST_FLAG
(
internal_run_death_test
);
list_tests_
=
GTEST_FLAG
(
list_tests
);
output_
=
GTEST_FLAG
(
output
);
print_time_
=
GTEST_FLAG
(
print_time
);
repeat_
=
GTEST_FLAG
(
repeat
);
}
...
...
@@ -112,6 +115,7 @@ class GTestFlagSaver {
GTEST_FLAG
(
internal_run_death_test
)
=
internal_run_death_test_
;
GTEST_FLAG
(
list_tests
)
=
list_tests_
;
GTEST_FLAG
(
output
)
=
output_
;
GTEST_FLAG
(
print_time
)
=
print_time_
;
GTEST_FLAG
(
repeat
)
=
repeat_
;
}
private
:
...
...
@@ -124,6 +128,7 @@ class GTestFlagSaver {
String
internal_run_death_test_
;
bool
list_tests_
;
String
output_
;
bool
print_time_
;
bool
pretty_
;
internal
::
Int32
repeat_
;
}
GTEST_ATTRIBUTE_UNUSED
;
...
...
src/gtest.cc
View file @
15cbe5f7
...
...
@@ -169,6 +169,12 @@ GTEST_DEFINE_string(
"executable's name and, if necessary, made unique by adding "
"digits."
);
GTEST_DEFINE_bool
(
print_time
,
internal
::
BoolFromGTestEnv
(
"print_time"
,
false
),
"True iff "
GTEST_NAME
" should display elapsed time in text output."
);
GTEST_DEFINE_int32
(
repeat
,
internal
::
Int32FromGTestEnv
(
"repeat"
,
1
),
...
...
@@ -2303,6 +2309,7 @@ class PrettyUnitTestResultPrinter : public UnitTestEventListenerInterface {
virtual
void
OnUnitTestStart
(
const
UnitTest
*
unit_test
);
virtual
void
OnGlobalSetUpStart
(
const
UnitTest
*
);
virtual
void
OnTestCaseStart
(
const
TestCase
*
test_case
);
virtual
void
OnTestCaseEnd
(
const
TestCase
*
test_case
);
virtual
void
OnTestStart
(
const
TestInfo
*
test_info
);
virtual
void
OnNewTestPartResult
(
const
TestPartResult
*
result
);
virtual
void
OnTestEnd
(
const
TestInfo
*
test_info
);
...
...
@@ -2349,6 +2356,20 @@ void PrettyUnitTestResultPrinter::OnTestCaseStart(
fflush
(
stdout
);
}
void
PrettyUnitTestResultPrinter
::
OnTestCaseEnd
(
const
TestCase
*
test_case
)
{
if
(
!
GTEST_FLAG
(
print_time
))
return
;
test_case_name_
=
test_case
->
name
();
const
internal
::
String
counts
=
FormatCountableNoun
(
test_case
->
test_to_run_count
(),
"test"
,
"tests"
);
ColoredPrintf
(
COLOR_GREEN
,
"[----------] "
);
printf
(
"%s from %s (%s ms total)
\n\n
"
,
counts
.
c_str
(),
test_case_name_
.
c_str
(),
internal
::
StreamableToString
(
test_case
->
elapsed_time
()).
c_str
());
fflush
(
stdout
);
}
void
PrettyUnitTestResultPrinter
::
OnTestStart
(
const
TestInfo
*
test_info
)
{
ColoredPrintf
(
COLOR_GREEN
,
"[ RUN ] "
);
PrintTestName
(
test_case_name_
.
c_str
(),
test_info
->
name
());
...
...
@@ -2363,7 +2384,12 @@ void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo * test_info) {
ColoredPrintf
(
COLOR_RED
,
"[ FAILED ] "
);
}
PrintTestName
(
test_case_name_
.
c_str
(),
test_info
->
name
());
if
(
GTEST_FLAG
(
print_time
))
{
printf
(
" (%s ms)
\n
"
,
internal
::
StreamableToString
(
test_info
->
result
()
->
elapsed_time
()).
c_str
());
}
else
{
printf
(
"
\n
"
);
}
fflush
(
stdout
);
}
...
...
@@ -2420,9 +2446,14 @@ void PrettyUnitTestResultPrinter::OnUnitTestEnd(
const
internal
::
UnitTestImpl
*
const
impl
=
unit_test
->
impl
();
ColoredPrintf
(
COLOR_GREEN
,
"[==========] "
);
printf
(
"%s from %s ran.
\n
"
,
printf
(
"%s from %s ran."
,
FormatTestCount
(
impl
->
test_to_run_count
()).
c_str
(),
FormatTestCaseCount
(
impl
->
test_case_to_run_count
()).
c_str
());
if
(
GTEST_FLAG
(
print_time
))
{
printf
(
" (%s ms total)"
,
internal
::
StreamableToString
(
impl
->
elapsed_time
()).
c_str
());
}
printf
(
"
\n
"
);
ColoredPrintf
(
COLOR_GREEN
,
"[ PASSED ] "
);
printf
(
"%s.
\n
"
,
FormatTestCount
(
impl
->
successful_test_count
()).
c_str
());
...
...
@@ -3505,6 +3536,7 @@ void InitGoogleTestImpl(int* argc, CharType** argv) {
&
GTEST_FLAG
(
internal_run_death_test
))
||
ParseBoolFlag
(
arg
,
kListTestsFlag
,
&
GTEST_FLAG
(
list_tests
))
||
ParseStringFlag
(
arg
,
kOutputFlag
,
&
GTEST_FLAG
(
output
))
||
ParseBoolFlag
(
arg
,
kPrintTimeFlag
,
&
GTEST_FLAG
(
print_time
))
||
ParseInt32Flag
(
arg
,
kRepeatFlag
,
&
GTEST_FLAG
(
repeat
))
)
{
// Yes. Shift the remainder of the argv list left by one. Note
...
...
test/gtest_output_test.py
View file @
15cbe5f7
...
...
@@ -58,8 +58,10 @@ else:
PROGRAM
=
'gtest_output_test_'
GOLDEN_NAME
=
'gtest_output_test_golden_lin.txt'
COMMAND
=
os
.
path
.
join
(
gtest_test_utils
.
GetBuildDir
(),
PROGRAM
)
+
' --gtest_color=yes'
PROGRAM_PATH
=
os
.
path
.
join
(
gtest_test_utils
.
GetBuildDir
(),
PROGRAM
)
COMMAND_WITH_COLOR
=
PROGRAM_PATH
+
' --gtest_color=yes'
COMMAND_WITH_TIME
=
(
PROGRAM_PATH
+
' --gtest_print_time '
+
'--gtest_filter="FatalFailureTest.*:LoggingTest.*"'
)
GOLDEN_PATH
=
os
.
path
.
join
(
gtest_test_utils
.
GetSourceDir
(),
GOLDEN_NAME
)
...
...
@@ -94,12 +96,19 @@ def RemoveStackTraces(output):
'Stack trace: (omitted)
\n\n
'
,
output
)
def
RemoveTime
(
output
):
"""Removes all time information from a Google Test program's output."""
return
re
.
sub
(
r'\(\d+ ms'
,
'(? ms'
,
output
)
def
NormalizeOutput
(
output
):
"""Normalizes output (the output of gtest_output_test_.exe)."""
output
=
ToUnixLineEnding
(
output
)
output
=
RemoveLocations
(
output
)
output
=
RemoveStackTraces
(
output
)
output
=
RemoveTime
(
output
)
return
output
...
...
@@ -165,8 +174,8 @@ def GetCommandOutput(cmd):
class
GTestOutputTest
(
unittest
.
TestCase
):
def
testOutput
(
self
):
output
=
GetCommandOutput
(
COMMAND
)
output
=
(
GetCommandOutput
(
COMMAND_WITH_COLOR
)
+
GetCommandOutput
(
COMMAND_WITH_TIME
))
golden_file
=
open
(
GOLDEN_PATH
,
'rb'
)
golden
=
golden_file
.
read
()
golden_file
.
close
()
...
...
@@ -176,7 +185,8 @@ class GTestOutputTest(unittest.TestCase):
if
__name__
==
'__main__'
:
if
sys
.
argv
[
1
:]
==
[
GENGOLDEN_FLAG
]:
output
=
GetCommandOutput
(
COMMAND
)
output
=
(
GetCommandOutput
(
COMMAND_WITH_COLOR
)
+
GetCommandOutput
(
COMMAND_WITH_TIME
))
golden_file
=
open
(
GOLDEN_PATH
,
'wb'
)
golden_file
.
write
(
output
)
golden_file
.
close
()
...
...
test/gtest_output_test_golden_lin.txt
View file @
15cbe5f7
...
...
@@ -381,3 +381,73 @@ Expected fatal failure.
[0;31m[ FAILED ] [mExpectFatalFailureTest.FailsWhenStatementReturns
26 FAILED TESTS
The non-test part of the code is expected to have 2 failures.
gtest_output_test_.cc:#: Failure
Value of: false
Actual: false
Expected: true
gtest_output_test_.cc:#: Failure
Value of: 3
Expected: 2
Note: Google Test filter = FatalFailureTest.*:LoggingTest.*
[==========] Running 4 tests from 2 test cases.
[----------] Global test environment set-up.
FooEnvironment::SetUp() called.
BarEnvironment::SetUp() called.
[----------] 3 tests from FatalFailureTest
[ RUN ] FatalFailureTest.FatalFailureInSubroutine
(expecting a failure that x should be 1)
gtest_output_test_.cc:#: Failure
Value of: x
Actual: 2
Expected: 1
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine (? ms)
[ RUN ] FatalFailureTest.FatalFailureInNestedSubroutine
(expecting a failure that x should be 1)
gtest_output_test_.cc:#: Failure
Value of: x
Actual: 2
Expected: 1
[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine (? ms)
[ RUN ] FatalFailureTest.NonfatalFailureInSubroutine
(expecting a failure on false)
gtest_output_test_.cc:#: Failure
Value of: false
Actual: false
Expected: true
[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine (? ms)
[----------] 3 tests from FatalFailureTest (? ms total)
[----------] 1 test from LoggingTest
[ RUN ] LoggingTest.InterleavingLoggingAndAssertions
(expecting 2 failures on (3) >= (a[i]))
i == 0
i == 1
gtest_output_test_.cc:#: Failure
Expected: (3) >= (a[i]), actual: 3 vs 9
i == 2
i == 3
gtest_output_test_.cc:#: Failure
Expected: (3) >= (a[i]), actual: 3 vs 6
[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions (? ms)
[----------] 1 test from LoggingTest (? ms total)
[----------] Global test environment tear-down
BarEnvironment::TearDown() called.
gtest_output_test_.cc:#: Failure
Failed
Expected non-fatal failure.
FooEnvironment::TearDown() called.
gtest_output_test_.cc:#: Failure
Failed
Expected fatal failure.
[==========] 4 tests from 2 test cases ran. (? ms total)
[ PASSED ] 0 tests.
[ FAILED ] 4 tests, listed below:
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine
[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine
[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions
4 FAILED TESTS
test/gtest_output_test_golden_win.txt
View file @
15cbe5f7
...
...
@@ -358,3 +358,64 @@ Expected fatal failure.
[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementReturns
29 FAILED TESTS
The non-test part of the code is expected to have 2 failures.
gtest_output_test_.cc:#: error: Value of: false
Actual: false
Expected: true
gtest_output_test_.cc:#: error: Value of: 3
Expected: 2
Note: Google Test filter = FatalFailureTest.*:LoggingTest.*
[==========] Running 4 tests from 2 test cases.
[----------] Global test environment set-up.
FooEnvironment::SetUp() called.
BarEnvironment::SetUp() called.
[----------] 3 tests from FatalFailureTest
[ RUN ] FatalFailureTest.FatalFailureInSubroutine
(expecting a failure that x should be 1)
gtest_output_test_.cc:#: error: Value of: x
Actual: 2
Expected: 1
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine (? ms)
[ RUN ] FatalFailureTest.FatalFailureInNestedSubroutine
(expecting a failure that x should be 1)
gtest_output_test_.cc:#: error: Value of: x
Actual: 2
Expected: 1
[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine (? ms)
[ RUN ] FatalFailureTest.NonfatalFailureInSubroutine
(expecting a failure on false)
gtest_output_test_.cc:#: error: Value of: false
Actual: false
Expected: true
[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine (? ms)
[----------] 3 tests from FatalFailureTest (? ms total)
[----------] 1 test from LoggingTest
[ RUN ] LoggingTest.InterleavingLoggingAndAssertions
(expecting 2 failures on (3) >= (a[i]))
i == 0
i == 1
gtest_output_test_.cc:#: error: Expected: (3) >= (a[i]), actual: 3 vs 9
i == 2
i == 3
gtest_output_test_.cc:#: error: Expected: (3) >= (a[i]), actual: 3 vs 6
[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions (? ms)
[----------] 1 test from LoggingTest (? ms total)
[----------] Global test environment tear-down
BarEnvironment::TearDown() called.
gtest_output_test_.cc:#: error: Failed
Expected non-fatal failure.
FooEnvironment::TearDown() called.
gtest_output_test_.cc:#: error: Failed
Expected fatal failure.
[==========] 4 tests from 2 test cases ran. (? ms total)
[ PASSED ] 0 tests.
[ FAILED ] 4 tests, listed below:
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine
[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine
[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions
4 FAILED TESTS
test/gtest_unittest.cc
View file @
15cbe5f7
...
...
@@ -808,6 +808,7 @@ class GTestFlagSaverTest : public testing::Test {
testing
::
GTEST_FLAG
(
filter
)
=
""
;
testing
::
GTEST_FLAG
(
list_tests
)
=
false
;
testing
::
GTEST_FLAG
(
output
)
=
""
;
testing
::
GTEST_FLAG
(
print_time
)
=
false
;
testing
::
GTEST_FLAG
(
repeat
)
=
1
;
}
...
...
@@ -827,6 +828,7 @@ class GTestFlagSaverTest : public testing::Test {
EXPECT_STREQ
(
""
,
testing
::
GTEST_FLAG
(
filter
).
c_str
());
EXPECT_FALSE
(
testing
::
GTEST_FLAG
(
list_tests
));
EXPECT_STREQ
(
""
,
testing
::
GTEST_FLAG
(
output
).
c_str
());
EXPECT_FALSE
(
testing
::
GTEST_FLAG
(
print_time
));
EXPECT_EQ
(
1
,
testing
::
GTEST_FLAG
(
repeat
));
testing
::
GTEST_FLAG
(
break_on_failure
)
=
true
;
...
...
@@ -835,6 +837,7 @@ class GTestFlagSaverTest : public testing::Test {
testing
::
GTEST_FLAG
(
filter
)
=
"abc"
;
testing
::
GTEST_FLAG
(
list_tests
)
=
true
;
testing
::
GTEST_FLAG
(
output
)
=
"xml:foo.xml"
;
testing
::
GTEST_FLAG
(
print_time
)
=
true
;
testing
::
GTEST_FLAG
(
repeat
)
=
100
;
}
private
:
...
...
@@ -3471,6 +3474,7 @@ struct Flags {
filter
(
""
),
list_tests
(
false
),
output
(
""
),
print_time
(
false
),
repeat
(
1
)
{}
// Factory methods.
...
...
@@ -3515,6 +3519,14 @@ struct Flags {
return
flags
;
}
// Creates a Flags struct where the gtest_print_time flag has the given
// value.
static
Flags
PrintTime
(
bool
print_time
)
{
Flags
flags
;
flags
.
print_time
=
print_time
;
return
flags
;
}
// Creates a Flags struct where the gtest_repeat flag has the given
// value.
static
Flags
Repeat
(
Int32
repeat
)
{
...
...
@@ -3529,6 +3541,7 @@ struct Flags {
const
char
*
filter
;
bool
list_tests
;
const
char
*
output
;
bool
print_time
;
Int32
repeat
;
};
...
...
@@ -3542,6 +3555,7 @@ class InitGoogleTestTest : public testing::Test {
GTEST_FLAG
(
filter
)
=
""
;
GTEST_FLAG
(
list_tests
)
=
false
;
GTEST_FLAG
(
output
)
=
""
;
GTEST_FLAG
(
print_time
)
=
false
;
GTEST_FLAG
(
repeat
)
=
1
;
}
...
...
@@ -3563,6 +3577,7 @@ class InitGoogleTestTest : public testing::Test {
EXPECT_STREQ
(
expected
.
filter
,
GTEST_FLAG
(
filter
).
c_str
());
EXPECT_EQ
(
expected
.
list_tests
,
GTEST_FLAG
(
list_tests
));
EXPECT_STREQ
(
expected
.
output
,
GTEST_FLAG
(
output
).
c_str
());
EXPECT_EQ
(
expected
.
print_time
,
GTEST_FLAG
(
print_time
));
EXPECT_EQ
(
expected
.
repeat
,
GTEST_FLAG
(
repeat
));
}
...
...
@@ -3950,6 +3965,86 @@ TEST_F(InitGoogleTestTest, OutputXmlDirectory) {
TEST_PARSING_FLAGS
(
argv
,
argv2
,
Flags
::
Output
(
"xml:directory/path/"
));
}
// Tests having a --gtest_print_time flag
TEST_F
(
InitGoogleTestTest
,
PrintTimeFlag
)
{
const
char
*
argv
[]
=
{
"foo.exe"
,
"--gtest_print_time"
,
NULL
};
const
char
*
argv2
[]
=
{
"foo.exe"
,
NULL
};
TEST_PARSING_FLAGS
(
argv
,
argv2
,
Flags
::
PrintTime
(
true
));
}
// Tests having a --gtest_print_time flag with a "true" value
TEST_F
(
InitGoogleTestTest
,
PrintTimeTrue
)
{
const
char
*
argv
[]
=
{
"foo.exe"
,
"--gtest_print_time=1"
,
NULL
};
const
char
*
argv2
[]
=
{
"foo.exe"
,
NULL
};
TEST_PARSING_FLAGS
(
argv
,
argv2
,
Flags
::
PrintTime
(
true
));
}
// Tests having a --gtest_print_time flag with a "false" value
TEST_F
(
InitGoogleTestTest
,
PrintTimeFalse
)
{
const
char
*
argv
[]
=
{
"foo.exe"
,
"--gtest_print_time=0"
,
NULL
};
const
char
*
argv2
[]
=
{
"foo.exe"
,
NULL
};
TEST_PARSING_FLAGS
(
argv
,
argv2
,
Flags
::
PrintTime
(
false
));
}
// Tests parsing --gtest_print_time=f.
TEST_F
(
InitGoogleTestTest
,
PrintTimeFalse_f
)
{
const
char
*
argv
[]
=
{
"foo.exe"
,
"--gtest_print_time=f"
,
NULL
};
const
char
*
argv2
[]
=
{
"foo.exe"
,
NULL
};
TEST_PARSING_FLAGS
(
argv
,
argv2
,
Flags
::
PrintTime
(
false
));
}
// Tests parsing --gtest_print_time=F.
TEST_F
(
InitGoogleTestTest
,
PrintTimeFalse_F
)
{
const
char
*
argv
[]
=
{
"foo.exe"
,
"--gtest_print_time=F"
,
NULL
};
const
char
*
argv2
[]
=
{
"foo.exe"
,
NULL
};
TEST_PARSING_FLAGS
(
argv
,
argv2
,
Flags
::
PrintTime
(
false
));
}
// Tests parsing --gtest_repeat=number
TEST_F
(
InitGoogleTestTest
,
Repeat
)
{
const
char
*
argv
[]
=
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment