Introduce CSV reporter
diff --git a/include/benchmark/reporter.h b/include/benchmark/reporter.h
index 6aebf0c..bb4ccdf 100644
--- a/include/benchmark/reporter.h
+++ b/include/benchmark/reporter.h
@@ -109,5 +109,14 @@
   bool first_report_;
 };
 
+class CSVReporter : public BenchmarkReporter {
+public:
+  virtual bool ReportContext(const Context& context);
+  virtual void ReportRuns(const std::vector<Run>& reports);
+
+private:
+  void PrintRunData(const Run& report);
+};
+
 } // end namespace benchmark
 #endif // BENCHMARK_REPORTER_H_
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index 17fb42a..40cd9ff 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -2,8 +2,9 @@
 include_directories(${PROJECT_SOURCE_DIR}/src)
 
 # Define the source files
-set(SOURCE_FILES "benchmark.cc" "colorprint.cc" "commandlineflags.cc" "log.cc"
-                 "json_reporter.cc" "reporter.cc" "sleep.cc" "string_util.cc"
+set(SOURCE_FILES "benchmark.cc" "colorprint.cc" "commandlineflags.cc"
+                 "console_reporter.cc" "csv_reporter.cc" "json_reporter.cc"
+                 "log.cc" "reporter.cc" "sleep.cc" "string_util.cc"
                  "sysinfo.cc" "walltime.cc")
 # Determine the correct regular expression engine to use
 if(HAVE_STD_REGEX)
diff --git a/src/benchmark.cc b/src/benchmark.cc
index 01ef559..507fe51 100644
--- a/src/benchmark.cc
+++ b/src/benchmark.cc
@@ -58,7 +58,7 @@
 
 DEFINE_string(benchmark_format, "tabular",
               "The format to use for console output. Valid values are "
-              "'tabular' or 'json'.");
+              "'tabular', 'json', or 'csv'.");
 
 DEFINE_bool(color_print, true, "Enables colorized logging.");
 
@@ -804,9 +804,10 @@
   typedef std::unique_ptr<BenchmarkReporter> PtrType;
   if (FLAGS_benchmark_format == "tabular") {
     return PtrType(new ConsoleReporter);
-  }
-  else if (FLAGS_benchmark_format == "json") {
+  } else if (FLAGS_benchmark_format == "json") {
     return PtrType(new JSONReporter);
+  } else if (FLAGS_benchmark_format == "csv") {
+    return PtrType(new CSVReporter);
   } else {
     std::cerr << "Unexpected format: '" << FLAGS_benchmark_format << "'\n";
     std::exit(1);
@@ -871,7 +872,8 @@
     }
   }
   if (FLAGS_benchmark_format != "tabular" &&
-      FLAGS_benchmark_format != "json") {
+      FLAGS_benchmark_format != "json" &&
+      FLAGS_benchmark_format != "csv") {
     PrintUsageAndExit();
   }
 }
diff --git a/src/console_reporter.cc b/src/console_reporter.cc
new file mode 100644
index 0000000..7a99dfb
--- /dev/null
+++ b/src/console_reporter.cc
@@ -0,0 +1,118 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "benchmark/reporter.h"
+
+#include <cstdint>
+#include <iostream>
+#include <string>
+#include <vector>
+
+#include "check.h"
+#include "colorprint.h"
+#include "string_util.h"
+#include "walltime.h"
+
+namespace benchmark {
+
+bool ConsoleReporter::ReportContext(const Context& context) {
+  name_field_width_ = context.name_field_width;
+
+  std::cerr << "Run on (" << context.num_cpus << " X " << context.mhz_per_cpu
+            << " MHz CPU " << ((context.num_cpus > 1) ? "s" : "") << "\n";
+
+  std::cerr << LocalDateTimeString() << "\n";
+
+  if (context.cpu_scaling_enabled) {
+    std::cerr << "***WARNING*** CPU scaling is enabled, the benchmark "
+                 "real time measurements may be noisy and will incure extra "
+                 "overhead.\n";
+  }
+
+#ifndef NDEBUG
+  std::cerr << "Build Type: DEBUG\n";
+#endif
+
+  int output_width =
+      fprintf(stdout,
+              "%-*s %10s %10s %10s\n",
+              static_cast<int>(name_field_width_),
+              "Benchmark",
+              "Time(ns)", "CPU(ns)",
+              "Iterations");
+  std::cout << std::string(output_width - 1, '-') << "\n";
+
+  return true;
+}
+
+void ConsoleReporter::ReportRuns(const std::vector<Run>& reports) {
+  if (reports.empty()) {
+    return;
+  }
+
+  for (Run const& run : reports) {
+    CHECK_EQ(reports[0].benchmark_name, run.benchmark_name);
+    PrintRunData(run);
+  }
+
+  if (reports.size() < 2) {
+    // We don't report aggregated data if there was a single run.
+    return;
+  }
+
+  Run mean_data;
+  Run stddev_data;
+  BenchmarkReporter::ComputeStats(reports, &mean_data, &stddev_data);
+
+  // Output using PrintRun.
+  PrintRunData(mean_data);
+  PrintRunData(stddev_data);
+}
+
+void ConsoleReporter::PrintRunData(const Run& result) {
+  // Format bytes per second
+  std::string rate;
+  if (result.bytes_per_second > 0) {
+    rate = StrCat(" ", HumanReadableNumber(result.bytes_per_second), "B/s");
+  }
+
+  // Format items per second
+  std::string items;
+  if (result.items_per_second > 0) {
+    items = StrCat(" ", HumanReadableNumber(result.items_per_second),
+                   " items/s");
+  }
+
+  double const multiplier = 1e9; // nano second multiplier
+  ColorPrintf(COLOR_GREEN, "%-*s ",
+              name_field_width_, result.benchmark_name.c_str());
+  if (result.iterations == 0) {
+    ColorPrintf(COLOR_YELLOW, "%10.0f %10.0f ",
+                result.real_accumulated_time * multiplier,
+                result.cpu_accumulated_time * multiplier);
+  } else {
+    ColorPrintf(COLOR_YELLOW, "%10.0f %10.0f ",
+                (result.real_accumulated_time * multiplier) /
+                    (static_cast<double>(result.iterations)),
+                (result.cpu_accumulated_time * multiplier) /
+                    (static_cast<double>(result.iterations)));
+  }
+  ColorPrintf(COLOR_CYAN, "%10lld", result.iterations);
+  ColorPrintf(COLOR_DEFAULT, "%*s %*s %s\n",
+              13, rate.c_str(),
+              18, items.c_str(),
+              result.report_label.c_str());
+}
+
+}  // end namespace benchmark
diff --git a/src/csv_reporter.cc b/src/csv_reporter.cc
new file mode 100644
index 0000000..ed0f4fa
--- /dev/null
+++ b/src/csv_reporter.cc
@@ -0,0 +1,93 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "benchmark/reporter.h"
+
+#include <cstdint>
+#include <iostream>
+#include <string>
+#include <vector>
+
+#include "string_util.h"
+#include "walltime.h"
+
+namespace benchmark {
+
+bool CSVReporter::ReportContext(const Context& context) {
+  std::cerr << "Run on (" << context.num_cpus << " X " << context.mhz_per_cpu
+            << " MHz CPU " << ((context.num_cpus > 1) ? "s" : "") << "\n";
+
+  std::cerr << LocalDateTimeString() << "\n";
+
+  if (context.cpu_scaling_enabled) {
+    std::cerr << "***WARNING*** CPU scaling is enabled, the benchmark "
+                 "real time measurements may be noisy and will incure extra "
+                 "overhead.\n";
+  }
+
+#ifndef NDEBUG
+  std::cerr << "Build Type: DEBUG\n";
+#endif
+  std::cout << "name,iterations,real_time,cpu_time,bytes_per_second,"
+               "items_per_second,label\n";
+  return true;
+}
+
+void CSVReporter::ReportRuns(std::vector<Run> const& reports) {
+  if (reports.empty()) {
+    return;
+  }
+
+  std::vector<Run> reports_cp = reports;
+  if (reports.size() >= 2) {
+    Run mean_data;
+    Run stddev_data;
+    BenchmarkReporter::ComputeStats(reports, &mean_data, &stddev_data);
+    reports_cp.push_back(mean_data);
+    reports_cp.push_back(stddev_data);
+  }
+  for (auto it = reports_cp.begin(); it != reports_cp.end(); ++it) {
+    PrintRunData(*it);
+  }
+}
+
+void CSVReporter::PrintRunData(Run const& run) {
+  double const multiplier = 1e9;  // nano second multiplier
+  double cpu_time = run.cpu_accumulated_time * multiplier;
+  double real_time = run.real_accumulated_time * multiplier;
+  if (run.iterations != 0) {
+    real_time = real_time / static_cast<double>(run.iterations);
+    cpu_time = cpu_time / static_cast<double>(run.iterations);
+  }
+
+  std::cout << run.benchmark_name << ",";
+  std::cout << run.iterations << ",";
+  std::cout << real_time << ",";
+  std::cout << cpu_time << ",";
+
+  if (run.bytes_per_second > 0.0) {
+    std::cout << run.bytes_per_second;
+  }
+  std::cout << ",";
+  if (run.items_per_second > 0.0) {
+    std::cout << run.items_per_second;
+  }
+  std::cout << ",";
+  if (!run.report_label.empty()) {
+    std::cout << run.report_label;
+  }
+  std::cout << '\n';
+}
+
+}  // end namespace benchmark
diff --git a/src/reporter.cc b/src/reporter.cc
index d701ca8..5d6e722 100644
--- a/src/reporter.cc
+++ b/src/reporter.cc
@@ -14,17 +14,11 @@
 
 #include "benchmark/reporter.h"
 
-#include <cstdio>
 #include <cstdlib>
-#include <iostream>
-#include <string>
 #include <vector>
 
 #include "check.h"
-#include "colorprint.h"
 #include "stat.h"
-#include "string_util.h"
-#include "walltime.h"
 
 namespace benchmark {
 
@@ -89,98 +83,4 @@
 BenchmarkReporter::~BenchmarkReporter() {
 }
 
-bool ConsoleReporter::ReportContext(const Context& context) {
-  name_field_width_ = context.name_field_width;
-
-  fprintf(stdout,
-          "Run on (%d X %0.0f MHz CPU%s)\n",
-          context.num_cpus,
-          context.mhz_per_cpu,
-          (context.num_cpus > 1) ? "s" : "");
-
-  std::string walltime_str = LocalDateTimeString();
-  fprintf(stdout, "%s\n", walltime_str.c_str());
-
-  if (context.cpu_scaling_enabled) {
-    fprintf(stdout, "***WARNING*** CPU scaling is enabled, the benchmark "
-                    "real time measurements may be noisy and will incure extra "
-                    "overhead.\n");
-  }
-
-#ifndef NDEBUG
-  fprintf(stdout, "Build Type: DEBUG\n");
-#endif
-
-  int output_width =
-      fprintf(stdout,
-              "%-*s %10s %10s %10s\n",
-              static_cast<int>(name_field_width_),
-              "Benchmark",
-              "Time(ns)", "CPU(ns)",
-              "Iterations");
-  fprintf(stdout, "%s\n", std::string(output_width - 1, '-').c_str());
-
-  return true;
-}
-
-void ConsoleReporter::ReportRuns(const std::vector<Run>& reports) {
-  if (reports.empty()) {
-    return;
-  }
-
-  for (Run const& run : reports) {
-    CHECK_EQ(reports[0].benchmark_name, run.benchmark_name);
-    PrintRunData(run);
-  }
-
-  if (reports.size() < 2) {
-    // We don't report aggregated data if there was a single run.
-    return;
-  }
-
-  Run mean_data;
-  Run stddev_data;
-  BenchmarkReporter::ComputeStats(reports, &mean_data, &stddev_data);
-
-  // Output using PrintRun.
-  PrintRunData(mean_data);
-  PrintRunData(stddev_data);
-  fprintf(stdout, "\n");
-}
-
-void ConsoleReporter::PrintRunData(const Run& result) {
-  // Format bytes per second
-  std::string rate;
-  if (result.bytes_per_second > 0) {
-    rate = StrCat(" ", HumanReadableNumber(result.bytes_per_second), "B/s");
-  }
-
-  // Format items per second
-  std::string items;
-  if (result.items_per_second > 0) {
-    items = StrCat(" ", HumanReadableNumber(result.items_per_second),
-                   " items/s");
-  }
-
-  double const multiplier = 1e9; // nano second multiplier
-  ColorPrintf(COLOR_GREEN, "%-*s ",
-              name_field_width_, result.benchmark_name.c_str());
-  if (result.iterations == 0) {
-    ColorPrintf(COLOR_YELLOW, "%10.0f %10.0f ",
-                result.real_accumulated_time * multiplier,
-                result.cpu_accumulated_time * multiplier);
-  } else {
-    ColorPrintf(COLOR_YELLOW, "%10.0f %10.0f ",
-                (result.real_accumulated_time * multiplier) /
-                    (static_cast<double>(result.iterations)),
-                (result.cpu_accumulated_time * multiplier) /
-                    (static_cast<double>(result.iterations)));
-  }
-  ColorPrintf(COLOR_CYAN, "%10lld", result.iterations);
-  ColorPrintf(COLOR_DEFAULT, "%*s %*s %s\n",
-              13, rate.c_str(),
-              18, items.c_str(),
-              result.report_label.c_str());
-}
-
 } // end namespace benchmark