moveit2
The MoveIt Motion Planning Framework for ROS 2.
moveit_benchmark_statistics.py
Go to the documentation of this file.
1 #!/usr/bin/env python3
2 
3 
36 
37 # Author: Mark Moll, Ioan Sucan, Luis G. Torres
38 
39 from sys import argv, exit
40 from os.path import basename, splitext
41 import sqlite3
42 import datetime
43 import matplotlib
44 
45 matplotlib.use("pdf")
46 from matplotlib import __version__ as matplotlibversion
47 from matplotlib.backends.backend_pdf import PdfPages
48 import matplotlib.pyplot as plt
49 import numpy as np
50 from math import floor
51 from optparse import OptionParser, OptionGroup
52 
53 
54 # Given a text line, split it into tokens (by space) and return the token
55 # at the desired index. Additionally, test that some expected tokens exist.
56 # Return None if they do not.
57 def readLogValue(filevar, desired_token_index, expected_tokens):
58  start_pos = filevar.tell()
59  tokens = filevar.readline().split()
60  for token_index in expected_tokens:
61  if not tokens[token_index] == expected_tokens[token_index]:
62  # undo the read, if we failed to parse.
63  filevar.seek(start_pos)
64  return None
65  return tokens[desired_token_index]
66 
67 
68 def readOptionalLogValue(filevar, desired_token_index, expected_tokens={}):
69  return readLogValue(filevar, desired_token_index, expected_tokens)
70 
71 
72 def readRequiredLogValue(name, filevar, desired_token_index, expected_tokens={}):
73  result = readLogValue(filevar, desired_token_index, expected_tokens)
74  if result == None:
75  raise Exception("Unable to read " + name)
76  return result
77 
78 
79 def ensurePrefix(line, prefix):
80  if not line.startswith(prefix):
81  raise Exception("Expected prefix " + prefix + " was not found")
82  return line
83 
84 
86  start_pos = filevar.tell()
87  line = filevar.readline()
88  if not line.startswith("<<<|"):
89  filevar.seek(start_pos)
90  return None
91  value = ""
92  line = filevar.readline()
93  while not line.startswith("|>>>"):
94  value = value + line
95  line = filevar.readline()
96  if line == None:
97  raise Exception("Expected token |>>> missing")
98  return value
99 
100 
102  ensurePrefix(filevar.readline(), "<<<|")
103  value = ""
104  line = filevar.readline()
105  while not line.startswith("|>>>"):
106  value = value + line
107  line = filevar.readline()
108  if line == None:
109  raise Exception("Expected token |>>> missing")
110  return value
111 
112 
113 def readBenchmarkLog(dbname, filenames):
114  """Parse benchmark log files and store the parsed data in a sqlite3 database."""
115 
116  def isInvalidValue(value):
117  return len(value) == 0 or value in ["nan", "-nan", "inf", "-inf"]
118 
119  conn = sqlite3.connect(dbname)
120  c = conn.cursor()
121  c.execute("PRAGMA FOREIGN_KEYS = ON")
122 
123  # create all tables if they don't already exist
124  c.executescript(
125  """CREATE TABLE IF NOT EXISTS experiments
126  (id INTEGER PRIMARY KEY ON CONFLICT REPLACE AUTOINCREMENT, name VARCHAR(512),
127  totaltime REAL, timelimit REAL, memorylimit REAL, runcount INTEGER,
128  version VARCHAR(128), hostname VARCHAR(1024), cpuinfo TEXT,
129  date DATETIME, seed INTEGER, setup TEXT);
130  CREATE TABLE IF NOT EXISTS plannerConfigs
131  (id INTEGER PRIMARY KEY AUTOINCREMENT,
132  name VARCHAR(512) NOT NULL, settings TEXT);
133  CREATE TABLE IF NOT EXISTS enums
134  (name VARCHAR(512), value INTEGER, description TEXT,
135  PRIMARY KEY (name, value));
136  CREATE TABLE IF NOT EXISTS runs
137  (id INTEGER PRIMARY KEY AUTOINCREMENT, experimentid INTEGER, plannerid INTEGER,
138  FOREIGN KEY (experimentid) REFERENCES experiments(id) ON DELETE CASCADE,
139  FOREIGN KEY (plannerid) REFERENCES plannerConfigs(id) ON DELETE CASCADE);
140  CREATE TABLE IF NOT EXISTS progress
141  (runid INTEGER, time REAL, PRIMARY KEY (runid, time),
142  FOREIGN KEY (runid) REFERENCES runs(id) ON DELETE CASCADE)"""
143  )
144 
145  # add placeholder entry for all_experiments
146  allExperimentsName = "all_experiments"
147  allExperimentsValues = {
148  "totaltime": 0.0,
149  "timelimit": 0.0,
150  "memorylimit": 0.0,
151  "runcount": 0,
152  "version": "0.0.0",
153  "hostname": "",
154  "cpuinfo": "",
155  "date": 0,
156  "seed": 0,
157  "setup": "",
158  }
159  addAllExperiments = len(filenames) > 0
160  if addAllExperiments:
161  c.execute(
162  "INSERT INTO experiments VALUES (?,?,?,?,?,?,?,?,?,?,?,?)",
163  (None, allExperimentsName) + tuple(allExperimentsValues.values()),
164  )
165  allExperimentsId = c.lastrowid
166 
167  for i, filename in enumerate(filenames):
168  print("Processing " + filename)
169  logfile = open(filename, "r")
170  start_pos = logfile.tell()
171  libname = readOptionalLogValue(logfile, 0, {1: "version"})
172  if libname == None:
173  libname = "OMPL"
174  logfile.seek(start_pos)
175  version = readOptionalLogValue(logfile, -1, {1: "version"})
176  if version == None:
177  # set the version number to make Planner Arena happy
178  version = "0.0.0"
179  version = " ".join([libname, version])
180  expname = readRequiredLogValue(
181  "experiment name", logfile, -1, {0: "Experiment"}
182  )
183  hostname = readRequiredLogValue("hostname", logfile, -1, {0: "Running"})
184  date = " ".join(ensurePrefix(logfile.readline(), "Starting").split()[2:])
185  expsetup = readRequiredMultilineValue(logfile)
186  cpuinfo = readOptionalMultilineValue(logfile)
187  rseed = int(
188  readRequiredLogValue("random seed", logfile, 0, {-2: "random", -1: "seed"})
189  )
190  timelimit = float(
192  "time limit", logfile, 0, {-3: "seconds", -2: "per", -1: "run"}
193  )
194  )
195  memorylimit = float(
197  "memory limit", logfile, 0, {-3: "MB", -2: "per", -1: "run"}
198  )
199  )
200  nrrunsOrNone = readOptionalLogValue(
201  logfile, 0, {-3: "runs", -2: "per", -1: "planner"}
202  )
203  nrruns = -1
204  if nrrunsOrNone != None:
205  nrruns = int(nrrunsOrNone)
206  allExperimentsValues["runcount"] += nrruns
207  totaltime = float(
209  "total time", logfile, 0, {-3: "collect", -2: "the", -1: "data"}
210  )
211  )
212  # fill in fields of all_experiments
213  allExperimentsValues["totaltime"] += totaltime
214  allExperimentsValues["memorylimit"] = max(
215  allExperimentsValues["memorylimit"], totaltime
216  )
217  allExperimentsValues["timelimit"] = max(
218  allExperimentsValues["timelimit"], totaltime
219  )
220  # copy the fields of the first file to all_experiments so that they are not empty
221  if i == 0:
222  allExperimentsValues["version"] = version
223  allExperimentsValues["date"] = date
224  allExperimentsValues["setup"] = expsetup
225  allExperimentsValues["hostname"] = hostname
226  allExperimentsValues["cpuinfo"] = cpuinfo
227  numEnums = 0
228  numEnumsOrNone = readOptionalLogValue(logfile, 0, {-2: "enum"})
229  if numEnumsOrNone != None:
230  numEnums = int(numEnumsOrNone)
231  for i in range(numEnums):
232  enum = logfile.readline()[:-1].split("|")
233  c.execute('SELECT * FROM enums WHERE name IS "%s"' % enum[0])
234  if c.fetchone() == None:
235  for j in range(len(enum) - 1):
236  c.execute(
237  "INSERT INTO enums VALUES (?,?,?)", (enum[0], j, enum[j + 1])
238  )
239  c.execute(
240  "INSERT INTO experiments VALUES (?,?,?,?,?,?,?,?,?,?,?,?)",
241  (
242  None,
243  expname,
244  totaltime,
245  timelimit,
246  memorylimit,
247  nrruns,
248  version,
249  hostname,
250  cpuinfo,
251  date,
252  rseed,
253  expsetup,
254  ),
255  )
256  experimentId = c.lastrowid
257  numPlanners = int(
258  readRequiredLogValue("planner count", logfile, 0, {-1: "planners"})
259  )
260  for i in range(numPlanners):
261  plannerName = logfile.readline()[:-1]
262  print("Parsing data for " + plannerName)
263 
264  # read common data for planner
265  numCommon = int(logfile.readline().split()[0])
266  settings = ""
267  for j in range(numCommon):
268  settings = settings + logfile.readline() + ";"
269 
270  # find planner id
271  c.execute(
272  "SELECT id FROM plannerConfigs WHERE (name=? AND settings=?)",
273  (
274  plannerName,
275  settings,
276  ),
277  )
278  p = c.fetchone()
279  if p == None:
280  c.execute(
281  "INSERT INTO plannerConfigs VALUES (?,?,?)",
282  (
283  None,
284  plannerName,
285  settings,
286  ),
287  )
288  plannerId = c.lastrowid
289  else:
290  plannerId = p[0]
291 
292  # get current column names
293  c.execute("PRAGMA table_info(runs)")
294  columnNames = [col[1] for col in c.fetchall()]
295 
296  # read properties and add columns as necessary
297  numProperties = int(logfile.readline().split()[0])
298  propertyNames = ["experimentid", "plannerid"]
299  for j in range(numProperties):
300  field = logfile.readline().split()
301  propertyType = field[-1]
302  propertyName = "_".join(field[:-1])
303  if propertyName not in columnNames:
304  c.execute(
305  "ALTER TABLE runs ADD %s %s" % (propertyName, propertyType)
306  )
307  propertyNames.append(propertyName)
308  # read measurements
309  insertFmtStr = (
310  "INSERT INTO runs ("
311  + ",".join(propertyNames)
312  + ") VALUES ("
313  + ",".join("?" * len(propertyNames))
314  + ")"
315  )
316  numRuns = int(logfile.readline().split()[0])
317  runIds = []
318  for j in range(numRuns):
319  runValues = [
320  None if isInvalidValue(x) else x
321  for x in logfile.readline().split("; ")[:-1]
322  ]
323  values = tuple([experimentId, plannerId] + runValues)
324  c.execute(insertFmtStr, values)
325  # extract primary key of each run row so we can reference them
326  # in the planner progress data table if needed
327  runIds.append(c.lastrowid)
328  # add all run data to all_experiments
329  if addAllExperiments:
330  values = tuple([allExperimentsId, plannerId] + runValues)
331  c.execute(insertFmtStr, values)
332 
333  nextLine = logfile.readline().strip()
334 
335  # read planner progress data if it's supplied
336  if nextLine != ".":
337  # get current column names
338  c.execute("PRAGMA table_info(progress)")
339  columnNames = [col[1] for col in c.fetchall()]
340 
341  # read progress properties and add columns as necessary
342  numProgressProperties = int(nextLine.split()[0])
343  progressPropertyNames = ["runid"]
344  for i in range(numProgressProperties):
345  field = logfile.readline().split()
346  progressPropertyType = field[-1]
347  progressPropertyName = "_".join(field[:-1])
348  if progressPropertyName not in columnNames:
349  c.execute(
350  "ALTER TABLE progress ADD %s %s"
351  % (progressPropertyName, progressPropertyType)
352  )
353  progressPropertyNames.append(progressPropertyName)
354  # read progress measurements
355  insertFmtStr = (
356  "INSERT INTO progress ("
357  + ",".join(progressPropertyNames)
358  + ") VALUES ("
359  + ",".join("?" * len(progressPropertyNames))
360  + ")"
361  )
362  numRuns = int(logfile.readline().split()[0])
363  for j in range(numRuns):
364  dataSeries = logfile.readline().split(";")[:-1]
365  for dataSample in dataSeries:
366  values = tuple(
367  [runIds[j]]
368  + [
369  None if isInvalidValue(x) else x
370  for x in dataSample.split(",")[:-1]
371  ]
372  )
373  try:
374  c.execute(insertFmtStr, values)
375  except sqlite3.IntegrityError:
376  print(
377  "Ignoring duplicate progress data. Consider increasing ompl::tools::Benchmark::Request::timeBetweenUpdates."
378  )
379  pass
380 
381  logfile.readline()
382  logfile.close()
383 
384  if addAllExperiments:
385  updateString = "UPDATE experiments SET"
386  for i, (key, val) in enumerate(allExperimentsValues.items()):
387  if i > 0:
388  updateString += ","
389  updateString += " " + str(key) + "='" + str(val) + "'"
390  updateString += "WHERE id='" + str(allExperimentsId) + "'"
391  c.execute(updateString)
392  conn.commit()
393  c.close()
394 
395 
396 def plotAttribute(cur, planners, attribute, typename):
397  """Create a plot for a particular attribute. It will include data for
398  all planners that have data for this attribute."""
399  labels = []
400  measurements = []
401  nanCounts = []
402  if typename == "ENUM":
403  cur.execute('SELECT description FROM enums where name IS "%s"' % attribute)
404  descriptions = [t[0] for t in cur.fetchall()]
405  numValues = len(descriptions)
406  for planner in planners:
407  cur.execute(
408  "SELECT %s FROM runs WHERE plannerid = %s AND %s IS NOT NULL"
409  % (attribute, planner[0], attribute)
410  )
411  measurement = [t[0] for t in cur.fetchall() if t[0] != None]
412  if len(measurement) > 0:
413  cur.execute(
414  "SELECT count(*) FROM runs WHERE plannerid = %s AND %s IS NULL"
415  % (planner[0], attribute)
416  )
417  nanCounts.append(cur.fetchone()[0])
418  labels.append(planner[1])
419  if typename == "ENUM":
420  scale = 100.0 / len(measurement)
421  measurements.append(
422  [measurement.count(i) * scale for i in range(numValues)]
423  )
424  else:
425  measurements.append(measurement)
426 
427  if len(measurements) == 0:
428  print('Skipping "%s": no available measurements' % attribute)
429  return
430 
431  plt.clf()
432  ax = plt.gca()
433  if typename == "ENUM":
434  width = 0.5
435  measurements = np.transpose(np.vstack(measurements))
436  colsum = np.sum(measurements, axis=1)
437  rows = np.where(colsum != 0)[0]
438  heights = np.zeros((1, measurements.shape[1]))
439  ind = range(measurements.shape[1])
440  legend_labels = []
441  for i in rows:
442  plt.bar(
443  ind,
444  measurements[i],
445  width,
446  bottom=heights[0],
447  color=matplotlib.cm.hot(int(floor(i * 256 / numValues))),
448  label=descriptions[i],
449  )
450  heights = heights + measurements[i]
451  xtickNames = plt.xticks(
452  [x + width / 2.0 for x in ind], labels, rotation=30, fontsize=8, ha="right"
453  )
454  ax.set_ylabel(attribute.replace("_", " ") + " (%)")
455  box = ax.get_position()
456  ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
457  props = matplotlib.font_manager.FontProperties()
458  props.set_size("small")
459  ax.legend(loc="center left", bbox_to_anchor=(1, 0.5), prop=props)
460  elif typename == "BOOLEAN":
461  width = 0.5
462  measurementsPercentage = [sum(m) * 100.0 / len(m) for m in measurements]
463  ind = range(len(measurements))
464  plt.bar(ind, measurementsPercentage, width)
465 
466 
469 
470  xtickNames = plt.xticks(
471  [x + width / 2.0 for x in ind], labels, rotation=30, fontsize=8, ha="right"
472  )
473  ax.set_ylabel(attribute.replace("_", " ") + " (%)")
474  plt.subplots_adjust(
475  bottom=0.3
476  ) # Squish the plot into the upper 2/3 of the page. Leave room for labels
477  else:
478  if int(matplotlibversion.split(".")[0]) < 1:
479  plt.boxplot(measurements, notch=0, sym="k+", vert=1, whis=1.5)
480  else:
481  plt.boxplot(
482  measurements, notch=0, sym="k+", vert=1, whis=1.5, bootstrap=1000
483  )
484  ax.set_ylabel(attribute.replace("_", " "))
485 
486  # xtickNames = plt.xticks(labels, rotation=30, fontsize=10)
487  # plt.subplots_adjust(bottom=0.3) # Squish the plot into the upper 2/3 of the page. Leave room for labels
488 
489 
492 
493  xtickNames = plt.setp(ax, xticklabels=labels)
494  plt.setp(xtickNames, rotation=30, fontsize=8, ha="right")
495  for (
496  tick
497  ) in ax.xaxis.get_major_ticks(): # shrink the font size of the x tick labels
498  tick.label.set_fontsize(8)
499  plt.subplots_adjust(
500  bottom=0.3
501  ) # Squish the plot into the upper 2/3 of the page. Leave room for labels
502  ax.set_xlabel("Motion planning algorithm", fontsize=12)
503  ax.yaxis.grid(True, linestyle="-", which="major", color="lightgrey", alpha=0.5)
504  if max(nanCounts) > 0:
505  maxy = max([max(y) for y in measurements])
506  for i in range(len(labels)):
507  x = i + width / 2 if typename == "BOOLEAN" else i + 1
508 
510  plt.show()
511 
512 
513 def plotProgressAttribute(cur, planners, attribute):
514  """Plot data for a single planner progress attribute. Will create an
515  average time-plot with error bars of the attribute over all runs for
516  each planner."""
517 
518  import numpy.ma as ma
519 
520  plt.clf()
521  ax = plt.gca()
522  ax.set_xlabel("time (s)")
523  ax.set_ylabel(attribute.replace("_", " "))
524  plannerNames = []
525  for planner in planners:
526  cur.execute(
527  """SELECT count(progress.%s) FROM progress INNER JOIN runs
528  ON progress.runid = runs.id AND runs.plannerid=%s
529  AND progress.%s IS NOT NULL"""
530  % (attribute, planner[0], attribute)
531  )
532  if cur.fetchone()[0] > 0:
533  plannerNames.append(planner[1])
534  cur.execute(
535  """SELECT DISTINCT progress.runid FROM progress INNER JOIN runs
536  WHERE progress.runid=runs.id AND runs.plannerid=?""",
537  (planner[0],),
538  )
539  runids = [t[0] for t in cur.fetchall()]
540  timeTable = []
541  dataTable = []
542  for r in runids:
543  # Select data for given run
544  cur.execute(
545  "SELECT time, %s FROM progress WHERE runid = %s ORDER BY time"
546  % (attribute, r)
547  )
548  (time, data) = zip(*(cur.fetchall()))
549  timeTable.append(time)
550  dataTable.append(data)
551  # It's conceivable that the sampling process may have
552  # generated more samples for one run than another; in this
553  # case, truncate all data series to length of shortest
554  # one.
555  fewestSamples = min(len(time[:]) for time in timeTable)
556  times = np.array(timeTable[0][:fewestSamples])
557  dataArrays = np.array([data[:fewestSamples] for data in dataTable])
558  filteredData = ma.masked_array(
559  dataArrays, np.equal(dataArrays, None), dtype=float
560  )
561 
562  means = np.mean(filteredData, axis=0)
563  stddevs = np.std(filteredData, axis=0, ddof=1)
564 
565  # plot average with error bars
566  plt.errorbar(
567  times, means, yerr=2 * stddevs, errorevery=max(1, len(times) // 20)
568  )
569  ax.legend(plannerNames)
570  if len(plannerNames) > 0:
571  plt.show()
572  else:
573  plt.clf()
574 
575 
576 def plotStatistics(dbname, fname):
577  """Create a PDF file with box plots for all attributes."""
578  print("Generating plots...")
579  conn = sqlite3.connect(dbname)
580  c = conn.cursor()
581  c.execute("PRAGMA FOREIGN_KEYS = ON")
582  c.execute("SELECT id, name FROM plannerConfigs")
583  planners = [
584  (t[0], t[1].replace("geometric_", "").replace("control_", ""))
585  for t in c.fetchall()
586  ]
587  c.execute("PRAGMA table_info(runs)")
588  colInfo = c.fetchall()[3:]
589 
590  pp = PdfPages(fname)
591  for col in colInfo:
592  if (
593  col[2] == "BOOLEAN"
594  or col[2] == "ENUM"
595  or col[2] == "INTEGER"
596  or col[2] == "REAL"
597  ):
598  plotAttribute(c, planners, col[1], col[2])
599  pp.savefig(plt.gcf())
600 
601  c.execute("PRAGMA table_info(progress)")
602  colInfo = c.fetchall()[2:]
603  for col in colInfo:
604  plotProgressAttribute(c, planners, col[1])
605  pp.savefig(plt.gcf())
606  plt.clf()
607 
608  pagey = 0.9
609  pagex = 0.06
610  c.execute("""SELECT id, name, timelimit, memorylimit FROM experiments""")
611  experiments = c.fetchall()
612  for experiment in experiments:
613  c.execute(
614  """SELECT count(*) FROM runs WHERE runs.experimentid = %d
615  GROUP BY runs.plannerid"""
616  % experiment[0]
617  )
618  numRuns = [run[0] for run in c.fetchall()]
619  numRuns = numRuns[0] if len(set(numRuns)) == 1 else ",".join(numRuns)
620 
621  plt.figtext(pagex, pagey, 'Experiment "%s"' % experiment[1])
622  plt.figtext(pagex, pagey - 0.05, "Number of averaged runs: %d" % numRuns)
623  plt.figtext(
624  pagex, pagey - 0.10, "Time limit per run: %g seconds" % experiment[2]
625  )
626  plt.figtext(pagex, pagey - 0.15, "Memory limit per run: %g MB" % experiment[3])
627  pagey -= 0.22
628  plt.show()
629  pp.savefig(plt.gcf())
630  pp.close()
631 
632 
633 def saveAsMysql(dbname, mysqldump):
634  # See http://stackoverflow.com/questions/1067060/perl-to-python
635  import re
636 
637  print("Saving as MySQL dump file...")
638 
639  conn = sqlite3.connect(dbname)
640  mysqldump = open(mysqldump, "w")
641 
642  # make sure all tables are dropped in an order that keepd foreign keys valid
643  c = conn.cursor()
644  c.execute("SELECT name FROM sqlite_master WHERE type='table'")
645  table_names = [str(t[0]) for t in c.fetchall()]
646  c.close()
647  last = ["experiments", "planner_configs"]
648  for table in table_names:
649  if table.startswith("sqlite"):
650  continue
651  if not table in last:
652  mysqldump.write("DROP TABLE IF EXISTS `%s`;\n" % table)
653  for table in last:
654  if table in table_names:
655  mysqldump.write("DROP TABLE IF EXISTS `%s`;\n" % table)
656 
657  for line in conn.iterdump():
658  process = False
659  for nope in (
660  "BEGIN TRANSACTION",
661  "COMMIT",
662  "sqlite_sequence",
663  "CREATE UNIQUE INDEX",
664  "CREATE VIEW",
665  ):
666  if nope in line:
667  break
668  else:
669  process = True
670  if not process:
671  continue
672  line = re.sub(r"[\n\r\t ]+", " ", line)
673  m = re.search("CREATE TABLE ([a-zA-Z0-9_]*)(.*)", line)
674  if m:
675  name, sub = m.groups()
676  sub = sub.replace('"', "`")
677  line = """CREATE TABLE IF NOT EXISTS %(name)s%(sub)s"""
678  line = line % dict(name=name, sub=sub)
679  # make sure we use an engine that supports foreign keys
680  line = line.rstrip("\n\t ;") + " ENGINE = InnoDB;\n"
681  else:
682  m = re.search('INSERT INTO "([a-zA-Z0-9_]*)"(.*)', line)
683  if m:
684  line = "INSERT INTO %s%s\n" % m.groups()
685  line = line.replace('"', r"\"")
686  line = line.replace('"', "'")
687 
688  line = re.sub(r"([^'])'t'(.)", "\\1THIS_IS_TRUE\\2", line)
689  line = line.replace("THIS_IS_TRUE", "1")
690  line = re.sub(r"([^'])'f'(.)", "\\1THIS_IS_FALSE\\2", line)
691  line = line.replace("THIS_IS_FALSE", "0")
692  line = line.replace("AUTOINCREMENT", "AUTO_INCREMENT")
693  mysqldump.write(line)
694  mysqldump.close()
695 
696 
697 def computeViews(dbname):
698  conn = sqlite3.connect(dbname)
699  c = conn.cursor()
700  c.execute("PRAGMA FOREIGN_KEYS = ON")
701  c.execute("PRAGMA table_info(runs)")
702  # kinodynamic paths cannot be simplified (or least not easily),
703  # so simplification_time may not exist as a database column
704  if "simplification_time" in [col[1] for col in c.fetchall()]:
705  s0 = """SELECT plannerid, plannerConfigs.name AS plannerName, experimentid, solved, time + simplification_time AS total_time
706  FROM plannerConfigs INNER JOIN experiments INNER JOIN runs
707  ON plannerConfigs.id=runs.plannerid AND experiments.id=runs.experimentid"""
708  else:
709  s0 = """SELECT plannerid, plannerConfigs.name AS plannerName, experimentid, solved, time AS total_time
710  FROM plannerConfigs INNER JOIN experiments INNER JOIN runs
711  ON plannerConfigs.id=runs.plannerid AND experiments.id=runs.experimentid"""
712  s1 = (
713  """SELECT plannerid, plannerName, experimentid, AVG(solved) AS avg_solved, AVG(total_time) AS avg_total_time
714  FROM (%s) GROUP BY plannerid, experimentid"""
715  % s0
716  )
717  s2 = (
718  """SELECT plannerid, experimentid, MIN(avg_solved) AS avg_solved, avg_total_time
719  FROM (%s) GROUP BY plannerName, experimentid ORDER BY avg_solved DESC, avg_total_time ASC"""
720  % s1
721  )
722  c.execute("DROP VIEW IF EXISTS bestPlannerConfigsPerExperiment")
723  c.execute("CREATE VIEW IF NOT EXISTS bestPlannerConfigsPerExperiment AS %s" % s2)
724 
725  s1 = (
726  """SELECT plannerid, plannerName, AVG(solved) AS avg_solved, AVG(total_time) AS avg_total_time
727  FROM (%s) GROUP BY plannerid"""
728  % s0
729  )
730  s2 = (
731  """SELECT plannerid, MIN(avg_solved) AS avg_solved, avg_total_time
732  FROM (%s) GROUP BY plannerName ORDER BY avg_solved DESC, avg_total_time ASC"""
733  % s1
734  )
735  c.execute("DROP VIEW IF EXISTS bestPlannerConfigs")
736  c.execute("CREATE VIEW IF NOT EXISTS bestPlannerConfigs AS %s" % s2)
737 
738  conn.commit()
739  c.close()
740 
741 
742 if __name__ == "__main__":
743  usage = """%prog [options] [<benchmark.log> ...]"""
744  parser = OptionParser("A script to parse benchmarking results.\n" + usage)
745  parser.add_option(
746  "-d",
747  "--database",
748  dest="dbname",
749  default="benchmark.db",
750  help="Filename of benchmark database [default: %default]",
751  )
752  parser.add_option(
753  "-v",
754  "--view",
755  action="store_true",
756  dest="view",
757  default=False,
758  help="Compute the views for best planner configurations",
759  )
760  parser.add_option(
761  "-p",
762  "--plot",
763  dest="plot",
764  default=None,
765  help="Create a PDF of plots with the filename provided",
766  )
767  parser.add_option(
768  "-m",
769  "--mysql",
770  dest="mysqldb",
771  default=None,
772  help="Save SQLite3 database as a MySQL dump file",
773  )
774  (options, args) = parser.parse_args()
775 
776  if len(args) == 0:
777  parser.error("No arguments were provided. Please provide full path of log file")
778 
779  if len(args) > 0:
780  readBenchmarkLog(options.dbname, args)
781  # If we update the database, we recompute the views as well
782  options.view = True
783 
784  if options.view:
785  computeViews(options.dbname)
786 
787  # TODO(sjahr): This is currently broken and needs to be fixed
788  # if options.plot:
789  # plotStatistics(options.dbname, options.plot)
790 
791  if options.mysqldb:
792  saveAsMysql(options.dbname, options.mysqldb)
def plotAttribute(cur, planners, attribute, typename)
def readLogValue(filevar, desired_token_index, expected_tokens)
def readOptionalLogValue(filevar, desired_token_index, expected_tokens={})
def plotProgressAttribute(cur, planners, attribute)
def readRequiredLogValue(name, filevar, desired_token_index, expected_tokens={})
CostFn sum(const std::vector< CostFn > &cost_functions)
void print(PropagationDistanceField &pdf, int numX, int numY, int numZ)