=== added file 'README.android'
@@ -0,0 +1,23 @@
+The Android version of glmark2 can accept "command line" arguments from either
+an extra intent key or a file. If arguments are specified in an intent key, the
+file is disregarded.
+
+Arguments from an extra intent key
+----------------------------------
+
+The 'args' extra intent key is used to specify arguments. For example:
+
+am start -a android.intent.action.MAIN \
+ -n org.linaro.glmark2/org.linaro.glmark2.Glmark2Activity \
+ -e args '-b :duration=2 -b texture -f /path/file --debug'
+
+Arguments from a file
+---------------------
+
+If the 'args' intent key is not defined, the contents of the file
+'/data/glmark2/args' (if present) are used as command line arguments. The
+arguments can be placed in either a single or multiple lines. For example:
+
+-b :duration -b texture
+-f /path/file
+--debug
=== modified file 'android/src/org/linaro/glmark2/Glmark2SurfaceView.java'
@@ -168,11 +168,12 @@
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
- nativeInit(mView.getActivity().getAssets());
+ String args = mView.getActivity().getIntent().getStringExtra("args");
+ nativeInit(mView.getActivity().getAssets(), args);
}
private Glmark2SurfaceView mView;
- private static native void nativeInit(AssetManager assetManager);
+ private static native void nativeInit(AssetManager assetManager, String args);
private static native void nativeResize(int w, int h);
private static native boolean nativeRender();
private static native void nativeDone();
=== modified file 'src/android.cpp'
@@ -24,6 +24,8 @@
#include <assert.h>
#include <jni.h>
#include <vector>
+#include <string>
+#include <fstream>
#include "canvas-android.h"
#include "benchmark.h"
#include "options.h"
@@ -33,45 +35,136 @@
#include "main-loop.h"
static Canvas *g_canvas;
-static std::vector<Benchmark *> g_benchmarks;
static MainLoop *g_loop;
class MainLoopAndroid : public MainLoop
{
public:
- MainLoopAndroid(Canvas &canvas, const std::vector<Benchmark *> &benchmarks) :
- MainLoop(canvas, benchmarks) {}
-
- virtual void after_scene_setup() {}
-
- virtual void before_scene_teardown()
- {
- Log::info("%s FPS: %u", scene_->info_string().c_str(),
- scene_->average_fps());
- }
-};
-
-static void
-add_default_benchmarks(std::vector<Benchmark *> &benchmarks)
-{
- const std::vector<std::string> &default_benchmarks = DefaultBenchmarks::get();
-
- for (std::vector<std::string>::const_iterator iter = default_benchmarks.begin();
- iter != default_benchmarks.end();
- iter++)
- {
- benchmarks.push_back(new Benchmark(*iter));
- }
+ MainLoopAndroid(Canvas &canvas) :
+ MainLoop(canvas) {}
+
+ virtual void after_scene_setup() {}
+
+ virtual void before_scene_teardown()
+ {
+ Log::info("%s FPS: %u", scene_->info_string().c_str(),
+ scene_->average_fps());
+ }
+};
+
+class MainLoopDecorationAndroid : public MainLoopDecoration
+{
+public:
+ MainLoopDecorationAndroid(Canvas &canvas) :
+ MainLoopDecoration(canvas) {}
+
+ virtual void after_scene_setup() {}
+
+ virtual void before_scene_teardown()
+ {
+ Log::info("%s FPS: %u", scene_->info_string().c_str(),
+ scene_->average_fps());
+ }
+};
+
+/**
+ * Converts an std::vector containing arguments to argc,argv.
+ */
+static void
+arg_vector_to_argv(const std::vector<std::string> &arguments, int &argc, char **&argv)
+{
+ argc = arguments.size() + 1;
+ argv = new char* [argc];
+ argv[0] = strdup("glmark2");
+
+ for (unsigned int i = 0; i < arguments.size(); i++)
+ argv[i + 1] = strdup(arguments[i].c_str());
+}
+
+/**
+ * Populates the command line arguments from the arguments file.
+ *
+ * @param argc the number of arguments
+ * @param argv the argument array
+ */
+static void
+get_args_from_file(const std::string &arguments_file, int &argc, char **&argv)
+{
+ std::vector<std::string> arguments;
+ std::ifstream ifs(arguments_file.c_str());
+
+ if (!ifs.fail()) {
+ std::string line;
+ while (getline(ifs, line)) {
+ if (!line.empty())
+ Util::split(line, ' ', arguments);
+ }
+ }
+
+ arg_vector_to_argv(arguments, argc, argv);
+}
+
+/**
+ * Populates the command line arguments from the arguments file.
+ *
+ * @param argc the number of arguments
+ * @param argv the argument array
+ */
+static void
+get_args_from_string(const std::string &args_str, int &argc, char **&argv)
+{
+ std::vector<std::string> arguments;
+ Util::split(args_str, ' ', arguments);
+
+ arg_vector_to_argv(arguments, argc, argv);
+}
+
+/**
+ * Releases the command line arguments.
+ *
+ * @param argc the number of arguments
+ * @param argv the argument array
+ */
+static void
+release_args(int argc, char **argv)
+{
+ for (int i = 0; i < argc; i++)
+ free(argv[i]);
+
+ delete[] argv;
}
void
Java_org_linaro_glmark2_Glmark2Renderer_nativeInit(JNIEnv* env, jclass clazz,
- jobject asset_manager)
+ jobject asset_manager,
+ jstring args)
{
static_cast<void>(clazz);
-
+ static const std::string arguments_file("/data/glmark2/args");
+ int argc = 0;
+ char **argv = 0;
+
+ /* Load arguments from argument string or arguments file and parse them */
+ if (args) {
+ if (env->GetStringUTFLength(args) > 0) {
+ const char *args_c_str = env->GetStringUTFChars(args, 0);
+ if (args_c_str) {
+ get_args_from_string(std::string(args_c_str), argc, argv);
+ env->ReleaseStringUTFChars(args, args_c_str);
+ }
+ }
+ }
+ else {
+ get_args_from_file(arguments_file, argc, argv);
+ }
+
+ Options::parse_args(argc, argv);
+ release_args(argc, argv);
+
+ /* Force reuse of EGL/GL context */
Options::reuse_context = true;
- Log::init("glmark2", false);
+
+ Log::init("glmark2", Options::show_debug);
Util::android_set_asset_manager(AAssetManager_fromJava(env, asset_manager));
g_canvas = new CanvasAndroid(100, 100);
@@ -93,8 +186,12 @@
Benchmark::register_scene(*new SceneDesktop(*g_canvas));
Benchmark::register_scene(*new SceneBuffer(*g_canvas));
- add_default_benchmarks(g_benchmarks);
- g_loop = new MainLoopAndroid(*g_canvas, g_benchmarks);
+ if (Options::show_fps)
+ g_loop = new MainLoopDecorationAndroid(*g_canvas);
+ else
+ g_loop = new MainLoopAndroid(*g_canvas);
+
+ g_loop->add_benchmarks();
}
void
@@ -135,7 +232,7 @@
static JNINativeMethod glmark2_native_methods[] = {
{
"nativeInit",
- "(Landroid/content/res/AssetManager;)V",
+ "(Landroid/content/res/AssetManager;Ljava/lang/String;)V",
reinterpret_cast<void*>(Java_org_linaro_glmark2_Glmark2Renderer_nativeInit)
},
{
=== modified file 'src/main-loop.cpp'
@@ -23,20 +23,27 @@
#include "main-loop.h"
#include "util.h"
#include "log.h"
+#include "default-benchmarks.h"
#include <string>
#include <sstream>
+#include <fstream>
/************
* MainLoop *
************/
-MainLoop::MainLoop(Canvas &canvas, const std::vector<Benchmark *> &benchmarks) :
- canvas_(canvas), benchmarks_(benchmarks)
+MainLoop::MainLoop(Canvas &canvas) :
+ canvas_(canvas)
{
reset();
}
+MainLoop::~MainLoop()
+{
+ Util::dispose_pointer_vector(benchmarks_);
+}
+
void
MainLoop::reset()
{
@@ -46,6 +53,28 @@
bench_iter_ = benchmarks_.begin();
}
+void
+MainLoop::add_benchmarks()
+{
+ if (!Options::benchmarks.empty())
+ add_custom_benchmarks();
+
+ if (!Options::benchmark_files.empty())
+ add_custom_benchmarks_from_files();
+
+ if (!benchmarks_contain_normal_scenes())
+ add_default_benchmarks();
+
+ bench_iter_ = benchmarks_.begin();
+}
+
+void
+MainLoop::add_benchmarks(const std::vector<Benchmark *> &benchmarks)
+{
+ benchmarks_.insert(benchmarks_.end(), benchmarks.begin(), benchmarks.end());
+ bench_iter_ = benchmarks_.begin();
+}
+
unsigned int
MainLoop::score()
{
@@ -136,12 +165,77 @@
Log::info(format.c_str(), scene_->average_fps());
}
+void
+MainLoop::add_default_benchmarks()
+{
+ const std::vector<std::string> &default_benchmarks = DefaultBenchmarks::get();
+
+ for (std::vector<std::string>::const_iterator iter = default_benchmarks.begin();
+ iter != default_benchmarks.end();
+ iter++)
+ {
+ benchmarks_.push_back(new Benchmark(*iter));
+ }
+}
+
+void
+MainLoop::add_custom_benchmarks()
+{
+ for (std::vector<std::string>::const_iterator iter = Options::benchmarks.begin();
+ iter != Options::benchmarks.end();
+ iter++)
+ {
+ benchmarks_.push_back(new Benchmark(*iter));
+ }
+}
+
+void
+MainLoop::add_custom_benchmarks_from_files()
+{
+ for (std::vector<std::string>::const_iterator iter = Options::benchmark_files.begin();
+ iter != Options::benchmark_files.end();
+ iter++)
+ {
+ std::ifstream ifs(iter->c_str());
+
+ if (!ifs.fail()) {
+ std::string line;
+
+ while (getline(ifs, line)) {
+ if (!line.empty())
+ benchmarks_.push_back(new Benchmark(line));
+ }
+ }
+ else {
+ Log::error("Cannot open benchmark file %s\n",
+ iter->c_str());
+ }
+
+ }
+}
+
+bool
+MainLoop::benchmarks_contain_normal_scenes()
+{
+ for (std::vector<Benchmark *>::const_iterator bench_iter = benchmarks_.begin();
+ bench_iter != benchmarks_.end();
+ bench_iter++)
+ {
+ const Benchmark *bench = *bench_iter;
+ if (!bench->scene().name().empty())
+ return true;
+ }
+
+ return false;
+}
+
+
/**********************
* MainLoopDecoration *
**********************/
-MainLoopDecoration::MainLoopDecoration(Canvas &canvas, const std::vector<Benchmark *> &benchmarks) :
- MainLoop(canvas, benchmarks), fps_renderer_(0), last_fps_(0)
+MainLoopDecoration::MainLoopDecoration(Canvas &canvas) :
+ MainLoop(canvas), fps_renderer_(0), last_fps_(0)
{
}
@@ -194,8 +288,8 @@
* MainLoopValidation *
**********************/
-MainLoopValidation::MainLoopValidation(Canvas &canvas, const std::vector<Benchmark *> &benchmarks) :
- MainLoop(canvas, benchmarks)
+MainLoopValidation::MainLoopValidation(Canvas &canvas) :
+ MainLoop(canvas)
{
}
=== modified file 'src/main-loop.h'
@@ -33,9 +33,9 @@
class MainLoop
{
public:
- MainLoop(Canvas &canvas, const std::vector<Benchmark *> &benchmarks);
+ MainLoop(Canvas &canvas);
- virtual ~MainLoop() {}
+ virtual ~MainLoop();
/**
* Resets the main loop.
@@ -45,6 +45,19 @@
*/
void reset();
+ /**
+ * Adds benchmarks.
+ *
+ * This method takes into account benchmark related command line options
+ * to decide which benchmarks to add.
+ */
+ void add_benchmarks();
+
+ /**
+ * Adds user defined benchmarks.
+ */
+ void add_benchmarks(const std::vector<Benchmark *> &benchmarks);
+
/**
* Gets the current total benchmarking score.
*/
@@ -80,11 +93,17 @@
protected:
Canvas &canvas_;
Scene *scene_;
- const std::vector<Benchmark *> &benchmarks_;
+ std::vector<Benchmark *> benchmarks_;
unsigned int score_;
unsigned int benchmarks_run_;
std::vector<Benchmark *>::const_iterator bench_iter_;
+
+private:
+ void add_default_benchmarks();
+ void add_custom_benchmarks();
+ void add_custom_benchmarks_from_files();
+ bool benchmarks_contain_normal_scenes();
};
/**
@@ -93,7 +112,7 @@
class MainLoopDecoration : public MainLoop
{
public:
- MainLoopDecoration(Canvas &canvas, const std::vector<Benchmark *> &benchmarks);
+ MainLoopDecoration(Canvas &canvas);
virtual ~MainLoopDecoration();
virtual void draw();
@@ -112,7 +131,7 @@
class MainLoopValidation : public MainLoop
{
public:
- MainLoopValidation(Canvas &canvas, const std::vector<Benchmark *> &benchmarks);
+ MainLoopValidation(Canvas &canvas);
virtual void draw();
virtual void before_scene_teardown();
=== modified file 'src/main.cpp'
@@ -46,55 +46,6 @@
using std::string;
void
-add_default_benchmarks(vector<Benchmark *> &benchmarks)
-{
- const vector<string> &default_benchmarks = DefaultBenchmarks::get();
-
- for (vector<string>::const_iterator iter = default_benchmarks.begin();
- iter != default_benchmarks.end();
- iter++)
- {
- benchmarks.push_back(new Benchmark(*iter));
- }
-}
-
-void
-add_custom_benchmarks(vector<Benchmark *> &benchmarks)
-{
- for (vector<string>::const_iterator iter = Options::benchmarks.begin();
- iter != Options::benchmarks.end();
- iter++)
- {
- benchmarks.push_back(new Benchmark(*iter));
- }
-}
-
-void
-add_custom_benchmarks_from_files(vector<Benchmark *> &benchmarks)
-{
- for (vector<string>::const_iterator iter = Options::benchmark_files.begin();
- iter != Options::benchmark_files.end();
- iter++)
- {
- std::ifstream ifs(iter->c_str());
-
- if (!ifs.fail()) {
- std::string line;
-
- while (getline(ifs, line)) {
- if (!line.empty())
- benchmarks.push_back(new Benchmark(line));
- }
- }
- else {
- Log::error("Cannot open benchmark file %s\n",
- iter->c_str());
- }
-
- }
-}
-
-void
add_and_register_scenes(vector<Scene*>& scenes, Canvas& canvas)
{
scenes.push_back(new SceneDefaultOptions(canvas));
@@ -118,21 +69,6 @@
}
}
-static bool
-benchmarks_contain_normal_scenes(vector<Benchmark *> &benchmarks)
-{
- for (vector<Benchmark *>::const_iterator bench_iter = benchmarks.begin();
- bench_iter != benchmarks.end();
- bench_iter++)
- {
- const Benchmark *bench = *bench_iter;
- if (!bench->scene().name().empty())
- return true;
- }
-
- return false;
-}
-
static void
list_scenes()
{
@@ -173,12 +109,13 @@
}
void
-do_benchmark(Canvas &canvas, vector<Benchmark *> &benchmarks)
+do_benchmark(Canvas &canvas)
{
- MainLoop loop_normal(canvas, benchmarks);
- MainLoopDecoration loop_decoration(canvas, benchmarks);
+ MainLoop loop_normal(canvas);
+ MainLoopDecoration loop_decoration(canvas);
MainLoop &loop(Options::show_fps ? loop_decoration : loop_normal);
+ loop.add_benchmarks();
while (loop.step());
@@ -188,9 +125,10 @@
}
void
-do_validation(Canvas &canvas, vector<Benchmark *> &benchmarks)
+do_validation(Canvas &canvas)
{
- MainLoopValidation loop(canvas, benchmarks);
+ MainLoopValidation loop(canvas);
+ loop.add_benchmarks();
while (loop.step());
}
@@ -241,17 +179,6 @@
return 1;
}
- // Add the benchmarks to run
- vector<Benchmark *> benchmarks;
-
- if (!Options::benchmarks.empty())
- add_custom_benchmarks(benchmarks);
- else if (!Options::benchmark_files.empty())
- add_custom_benchmarks_from_files(benchmarks);
-
- if (!benchmarks_contain_normal_scenes(benchmarks))
- add_default_benchmarks(benchmarks);
-
Log::info("=======================================================\n");
Log::info(" glmark2 %s\n", GLMARK_VERSION);
Log::info("=======================================================\n");
@@ -261,11 +188,10 @@
canvas.visible(true);
if (Options::validate)
- do_validation(canvas, benchmarks);
+ do_validation(canvas);
else
- do_benchmark(canvas, benchmarks);
+ do_benchmark(canvas);
- Util::dispose_pointer_vector(benchmarks);
Util::dispose_pointer_vector(scenes);
return 0;