blob: bf960903435302ba8af8ee2cc25aad22fd82e78a (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
|
/*
* Copyright © 2012 Linaro Limited
*
* This file is part of the glmark2 OpenGL (ES) 2.0 benchmark.
*
* glmark2 is free software: you can redistribute it and/or modify it under the
* terms of the GNU General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* glmark2 is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* glmark2. If not, see <http://www.gnu.org/licenses/>.
*
* Authors:
* Tom Gall <tom.gall@linaro.org>
*/
#include <fstream>
#include "test-collection.h"
#include "default-tests.h"
#include "options.h"
#include "log.h"
#include "util.h"
TestCollection::~TestCollection()
{
Util::dispose_pointer_vector(tests_);
}
void
TestCollection::add(const std::vector<std::string> &tests)
{
for (std::vector<std::string>::const_iterator iter = tests.begin();
iter != tests.end();
iter++)
{
tests_.push_back(new Benchmark(*iter));
}
}
void
TestCollection::populate_from_options()
{
if (Options::annotate) {
std::vector<std::string> annotate;
annotate.push_back(":show-fps=true:title=#info#");
add(annotate);
}
if (!Options::benchmarks.empty())
add(Options::benchmarks);
if (!Options::benchmark_files.empty())
add_benchmarks_from_files();
if (!benchmarks_contain_normal_scenes())
add(DefaultTests::get());
}
bool
TestCollection::needs_decoration()
{
for (std::vector<Benchmark *>::const_iterator test_iter = tests_.begin();
test_iter != tests_.end();
test_iter++)
{
const Benchmark *bench = *test_iter;
if (bench->needs_decoration())
return true;
}
return false;
}
void
TestCollection::add_benchmarks_from_files()
{
for (std::vector<std::string>::const_iterator iter = Options::benchmark_files.begin();
iter != Options::benchmark_files.end();
iter++)
{
std::ifstream ifs(iter->c_str());
if (!ifs.fail()) {
std::string line;
while (getline(ifs, line)) {
if (!line.empty())
tests_.push_back(new Benchmark(line));
}
}
else {
Log::error("Cannot open benchmark file %s\n",
iter->c_str());
}
}
}
bool
TestCollection::benchmarks_contain_normal_scenes()
{
for (std::vector<Benchmark *>::const_iterator test_iter = tests_.begin();
test_iter != tests_.end();
test_iter++)
{
const Benchmark *bench = *test_iter;
if (!bench->scene().name().empty())
return true;
}
return false;
}
void
TestCollection::report_results()
{
}
|