moveit2
The MoveIt Motion Planning Framework for ROS 2.
moveit_benchmark_statistics.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 
36 
37 # Author: Mark Moll, Ioan Sucan, Luis G. Torres
38 
39 from sys import argv, exit
40 from os.path import basename, splitext
41 import sqlite3
42 import datetime
43 import matplotlib
44 
45 matplotlib.use("pdf")
46 from matplotlib import __version__ as matplotlibversion
47 from matplotlib.backends.backend_pdf import PdfPages
48 import matplotlib.pyplot as plt
49 import numpy as np
50 from math import floor
51 from optparse import OptionParser, OptionGroup
52 
53 # Given a text line, split it into tokens (by space) and return the token
54 # at the desired index. Additionally, test that some expected tokens exist.
55 # Return None if they do not.
56 def readLogValue(filevar, desired_token_index, expected_tokens):
57  start_pos = filevar.tell()
58  tokens = filevar.readline().split()
59  for token_index in expected_tokens:
60  if not tokens[token_index] == expected_tokens[token_index]:
61  # undo the read, if we failed to parse.
62  filevar.seek(start_pos)
63  return None
64  return tokens[desired_token_index]
65 
66 
67 def readOptionalLogValue(filevar, desired_token_index, expected_tokens={}):
68  return readLogValue(filevar, desired_token_index, expected_tokens)
69 
70 
71 def readRequiredLogValue(name, filevar, desired_token_index, expected_tokens={}):
72  result = readLogValue(filevar, desired_token_index, expected_tokens)
73  if result == None:
74  raise Exception("Unable to read " + name)
75  return result
76 
77 
78 def ensurePrefix(line, prefix):
79  if not line.startswith(prefix):
80  raise Exception("Expected prefix " + prefix + " was not found")
81  return line
82 
83 
85  start_pos = filevar.tell()
86  line = filevar.readline()
87  if not line.startswith("<<<|"):
88  filevar.seek(start_pos)
89  return None
90  value = ""
91  line = filevar.readline()
92  while not line.startswith("|>>>"):
93  value = value + line
94  line = filevar.readline()
95  if line == None:
96  raise Exception("Expected token |>>> missing")
97  return value
98 
99 
101  ensurePrefix(filevar.readline(), "<<<|")
102  value = ""
103  line = filevar.readline()
104  while not line.startswith("|>>>"):
105  value = value + line
106  line = filevar.readline()
107  if line == None:
108  raise Exception("Expected token |>>> missing")
109  return value
110 
111 
112 def readBenchmarkLog(dbname, filenames):
113  """Parse benchmark log files and store the parsed data in a sqlite3 database."""
114 
115  def isInvalidValue(value):
116  return len(value) == 0 or value in ["nan", "-nan", "inf", "-inf"]
117 
118  conn = sqlite3.connect(dbname)
119  c = conn.cursor()
120  c.execute("PRAGMA FOREIGN_KEYS = ON")
121 
122  # create all tables if they don't already exist
123  c.executescript(
124  """CREATE TABLE IF NOT EXISTS experiments
125  (id INTEGER PRIMARY KEY ON CONFLICT REPLACE AUTOINCREMENT, name VARCHAR(512),
126  totaltime REAL, timelimit REAL, memorylimit REAL, runcount INTEGER,
127  version VARCHAR(128), hostname VARCHAR(1024), cpuinfo TEXT,
128  date DATETIME, seed INTEGER, setup TEXT);
129  CREATE TABLE IF NOT EXISTS plannerConfigs
130  (id INTEGER PRIMARY KEY AUTOINCREMENT,
131  name VARCHAR(512) NOT NULL, settings TEXT);
132  CREATE TABLE IF NOT EXISTS enums
133  (name VARCHAR(512), value INTEGER, description TEXT,
134  PRIMARY KEY (name, value));
135  CREATE TABLE IF NOT EXISTS runs
136  (id INTEGER PRIMARY KEY AUTOINCREMENT, experimentid INTEGER, plannerid INTEGER,
137  FOREIGN KEY (experimentid) REFERENCES experiments(id) ON DELETE CASCADE,
138  FOREIGN KEY (plannerid) REFERENCES plannerConfigs(id) ON DELETE CASCADE);
139  CREATE TABLE IF NOT EXISTS progress
140  (runid INTEGER, time REAL, PRIMARY KEY (runid, time),
141  FOREIGN KEY (runid) REFERENCES runs(id) ON DELETE CASCADE)"""
142  )
143 
144  # add placeholder entry for all_experiments
145  allExperimentsName = "all_experiments"
146  allExperimentsValues = {
147  "totaltime": 0.0,
148  "timelimit": 0.0,
149  "memorylimit": 0.0,
150  "runcount": 0,
151  "version": "0.0.0",
152  "hostname": "",
153  "cpuinfo": "",
154  "date": 0,
155  "seed": 0,
156  "setup": "",
157  }
158  addAllExperiments = len(filenames) > 0
159  if addAllExperiments:
160  c.execute(
161  "INSERT INTO experiments VALUES (?,?,?,?,?,?,?,?,?,?,?,?)",
162  (None, allExperimentsName) + tuple(allExperimentsValues.values()),
163  )
164  allExperimentsId = c.lastrowid
165 
166  for i, filename in enumerate(filenames):
167  print("Processing " + filename)
168  logfile = open(filename, "r")
169  start_pos = logfile.tell()
170  libname = readOptionalLogValue(logfile, 0, {1: "version"})
171  if libname == None:
172  libname = "OMPL"
173  logfile.seek(start_pos)
174  version = readOptionalLogValue(logfile, -1, {1: "version"})
175  if version == None:
176  # set the version number to make Planner Arena happy
177  version = "0.0.0"
178  version = " ".join([libname, version])
179  expname = readRequiredLogValue(
180  "experiment name", logfile, -1, {0: "Experiment"}
181  )
182  hostname = readRequiredLogValue("hostname", logfile, -1, {0: "Running"})
183  date = " ".join(ensurePrefix(logfile.readline(), "Starting").split()[2:])
184  expsetup = readRequiredMultilineValue(logfile)
185  cpuinfo = readOptionalMultilineValue(logfile)
186  rseed = int(
187  readRequiredLogValue("random seed", logfile, 0, {-2: "random", -1: "seed"})
188  )
189  timelimit = float(
191  "time limit", logfile, 0, {-3: "seconds", -2: "per", -1: "run"}
192  )
193  )
194  memorylimit = float(
196  "memory limit", logfile, 0, {-3: "MB", -2: "per", -1: "run"}
197  )
198  )
199  nrrunsOrNone = readOptionalLogValue(
200  logfile, 0, {-3: "runs", -2: "per", -1: "planner"}
201  )
202  nrruns = -1
203  if nrrunsOrNone != None:
204  nrruns = int(nrrunsOrNone)
205  allExperimentsValues["runcount"] += nrruns
206  totaltime = float(
208  "total time", logfile, 0, {-3: "collect", -2: "the", -1: "data"}
209  )
210  )
211  # fill in fields of all_experiments
212  allExperimentsValues["totaltime"] += totaltime
213  allExperimentsValues["memorylimit"] = max(
214  allExperimentsValues["memorylimit"], totaltime
215  )
216  allExperimentsValues["timelimit"] = max(
217  allExperimentsValues["timelimit"], totaltime
218  )
219  # copy the fields of the first file to all_experiments so that they are not empty
220  if i == 0:
221  allExperimentsValues["version"] = version
222  allExperimentsValues["date"] = date
223  allExperimentsValues["setup"] = expsetup
224  allExperimentsValues["hostname"] = hostname
225  allExperimentsValues["cpuinfo"] = cpuinfo
226  numEnums = 0
227  numEnumsOrNone = readOptionalLogValue(logfile, 0, {-2: "enum"})
228  if numEnumsOrNone != None:
229  numEnums = int(numEnumsOrNone)
230  for i in range(numEnums):
231  enum = logfile.readline()[:-1].split("|")
232  c.execute('SELECT * FROM enums WHERE name IS "%s"' % enum[0])
233  if c.fetchone() == None:
234  for j in range(len(enum) - 1):
235  c.execute(
236  "INSERT INTO enums VALUES (?,?,?)", (enum[0], j, enum[j + 1])
237  )
238  c.execute(
239  "INSERT INTO experiments VALUES (?,?,?,?,?,?,?,?,?,?,?,?)",
240  (
241  None,
242  expname,
243  totaltime,
244  timelimit,
245  memorylimit,
246  nrruns,
247  version,
248  hostname,
249  cpuinfo,
250  date,
251  rseed,
252  expsetup,
253  ),
254  )
255  experimentId = c.lastrowid
256  numPlanners = int(
257  readRequiredLogValue("planner count", logfile, 0, {-1: "planners"})
258  )
259  for i in range(numPlanners):
260  plannerName = logfile.readline()[:-1]
261  print("Parsing data for " + plannerName)
262 
263  # read common data for planner
264  numCommon = int(logfile.readline().split()[0])
265  settings = ""
266  for j in range(numCommon):
267  settings = settings + logfile.readline() + ";"
268 
269  # find planner id
270  c.execute(
271  "SELECT id FROM plannerConfigs WHERE (name=? AND settings=?)",
272  (
273  plannerName,
274  settings,
275  ),
276  )
277  p = c.fetchone()
278  if p == None:
279  c.execute(
280  "INSERT INTO plannerConfigs VALUES (?,?,?)",
281  (
282  None,
283  plannerName,
284  settings,
285  ),
286  )
287  plannerId = c.lastrowid
288  else:
289  plannerId = p[0]
290 
291  # get current column names
292  c.execute("PRAGMA table_info(runs)")
293  columnNames = [col[1] for col in c.fetchall()]
294 
295  # read properties and add columns as necessary
296  numProperties = int(logfile.readline().split()[0])
297  propertyNames = ["experimentid", "plannerid"]
298  for j in range(numProperties):
299  field = logfile.readline().split()
300  propertyType = field[-1]
301  propertyName = "_".join(field[:-1])
302  if propertyName not in columnNames:
303  c.execute(
304  "ALTER TABLE runs ADD %s %s" % (propertyName, propertyType)
305  )
306  propertyNames.append(propertyName)
307  # read measurements
308  insertFmtStr = (
309  "INSERT INTO runs ("
310  + ",".join(propertyNames)
311  + ") VALUES ("
312  + ",".join("?" * len(propertyNames))
313  + ")"
314  )
315  numRuns = int(logfile.readline().split()[0])
316  runIds = []
317  for j in range(numRuns):
318  runValues = [
319  None if isInvalidValue(x) else x
320  for x in logfile.readline().split("; ")[:-1]
321  ]
322  values = tuple([experimentId, plannerId] + runValues)
323  c.execute(insertFmtStr, values)
324  # extract primary key of each run row so we can reference them
325  # in the planner progress data table if needed
326  runIds.append(c.lastrowid)
327  # add all run data to all_experiments
328  if addAllExperiments:
329  values = tuple([allExperimentsId, plannerId] + runValues)
330  c.execute(insertFmtStr, values)
331 
332  nextLine = logfile.readline().strip()
333 
334  # read planner progress data if it's supplied
335  if nextLine != ".":
336  # get current column names
337  c.execute("PRAGMA table_info(progress)")
338  columnNames = [col[1] for col in c.fetchall()]
339 
340  # read progress properties and add columns as necessary
341  numProgressProperties = int(nextLine.split()[0])
342  progressPropertyNames = ["runid"]
343  for i in range(numProgressProperties):
344  field = logfile.readline().split()
345  progressPropertyType = field[-1]
346  progressPropertyName = "_".join(field[:-1])
347  if progressPropertyName not in columnNames:
348  c.execute(
349  "ALTER TABLE progress ADD %s %s"
350  % (progressPropertyName, progressPropertyType)
351  )
352  progressPropertyNames.append(progressPropertyName)
353  # read progress measurements
354  insertFmtStr = (
355  "INSERT INTO progress ("
356  + ",".join(progressPropertyNames)
357  + ") VALUES ("
358  + ",".join("?" * len(progressPropertyNames))
359  + ")"
360  )
361  numRuns = int(logfile.readline().split()[0])
362  for j in range(numRuns):
363  dataSeries = logfile.readline().split(";")[:-1]
364  for dataSample in dataSeries:
365  values = tuple(
366  [runIds[j]]
367  + [
368  None if isInvalidValue(x) else x
369  for x in dataSample.split(",")[:-1]
370  ]
371  )
372  try:
373  c.execute(insertFmtStr, values)
374  except sqlite3.IntegrityError:
375  print(
376  "Ignoring duplicate progress data. Consider increasing ompl::tools::Benchmark::Request::timeBetweenUpdates."
377  )
378  pass
379 
380  logfile.readline()
381  logfile.close()
382 
383  if addAllExperiments:
384  updateString = "UPDATE experiments SET"
385  for i, (key, val) in enumerate(allExperimentsValues.items()):
386  if i > 0:
387  updateString += ","
388  updateString += " " + str(key) + "='" + str(val) + "'"
389  updateString += "WHERE id='" + str(allExperimentsId) + "'"
390  c.execute(updateString)
391  conn.commit()
392  c.close()
393 
394 
395 def plotAttribute(cur, planners, attribute, typename):
396  """Create a plot for a particular attribute. It will include data for
397  all planners that have data for this attribute."""
398  labels = []
399  measurements = []
400  nanCounts = []
401  if typename == "ENUM":
402  cur.execute('SELECT description FROM enums where name IS "%s"' % attribute)
403  descriptions = [t[0] for t in cur.fetchall()]
404  numValues = len(descriptions)
405  for planner in planners:
406  cur.execute(
407  "SELECT %s FROM runs WHERE plannerid = %s AND %s IS NOT NULL"
408  % (attribute, planner[0], attribute)
409  )
410  measurement = [t[0] for t in cur.fetchall() if t[0] != None]
411  if len(measurement) > 0:
412  cur.execute(
413  "SELECT count(*) FROM runs WHERE plannerid = %s AND %s IS NULL"
414  % (planner[0], attribute)
415  )
416  nanCounts.append(cur.fetchone()[0])
417  labels.append(planner[1])
418  if typename == "ENUM":
419  scale = 100.0 / len(measurement)
420  measurements.append(
421  [measurement.count(i) * scale for i in range(numValues)]
422  )
423  else:
424  measurements.append(measurement)
425 
426  if len(measurements) == 0:
427  print('Skipping "%s": no available measurements' % attribute)
428  return
429 
430  plt.clf()
431  ax = plt.gca()
432  if typename == "ENUM":
433  width = 0.5
434  measurements = np.transpose(np.vstack(measurements))
435  colsum = np.sum(measurements, axis=1)
436  rows = np.where(colsum != 0)[0]
437  heights = np.zeros((1, measurements.shape[1]))
438  ind = range(measurements.shape[1])
439  legend_labels = []
440  for i in rows:
441  plt.bar(
442  ind,
443  measurements[i],
444  width,
445  bottom=heights[0],
446  color=matplotlib.cm.hot(int(floor(i * 256 / numValues))),
447  label=descriptions[i],
448  )
449  heights = heights + measurements[i]
450  xtickNames = plt.xticks(
451  [x + width / 2.0 for x in ind], labels, rotation=30, fontsize=8, ha="right"
452  )
453  ax.set_ylabel(attribute.replace("_", " ") + " (%)")
454  box = ax.get_position()
455  ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
456  props = matplotlib.font_manager.FontProperties()
457  props.set_size("small")
458  ax.legend(loc="center left", bbox_to_anchor=(1, 0.5), prop=props)
459  elif typename == "BOOLEAN":
460  width = 0.5
461  measurementsPercentage = [sum(m) * 100.0 / len(m) for m in measurements]
462  ind = range(len(measurements))
463  plt.bar(ind, measurementsPercentage, width)
464 
467 
468  xtickNames = plt.xticks(
469  [x + width / 2.0 for x in ind], labels, rotation=30, fontsize=8, ha="right"
470  )
471  ax.set_ylabel(attribute.replace("_", " ") + " (%)")
472  plt.subplots_adjust(
473  bottom=0.3
474  ) # Squish the plot into the upper 2/3 of the page. Leave room for labels
475  else:
476  if int(matplotlibversion.split(".")[0]) < 1:
477  plt.boxplot(measurements, notch=0, sym="k+", vert=1, whis=1.5)
478  else:
479  plt.boxplot(
480  measurements, notch=0, sym="k+", vert=1, whis=1.5, bootstrap=1000
481  )
482  ax.set_ylabel(attribute.replace("_", " "))
483 
484  # xtickNames = plt.xticks(labels, rotation=30, fontsize=10)
485  # plt.subplots_adjust(bottom=0.3) # Squish the plot into the upper 2/3 of the page. Leave room for labels
486 
487 
490 
491  xtickNames = plt.setp(ax, xticklabels=labels)
492  plt.setp(xtickNames, rotation=30, fontsize=8, ha="right")
493  for (
494  tick
495  ) in ax.xaxis.get_major_ticks(): # shrink the font size of the x tick labels
496  tick.label.set_fontsize(8)
497  plt.subplots_adjust(
498  bottom=0.3
499  ) # Squish the plot into the upper 2/3 of the page. Leave room for labels
500  ax.set_xlabel("Motion planning algorithm", fontsize=12)
501  ax.yaxis.grid(True, linestyle="-", which="major", color="lightgrey", alpha=0.5)
502  if max(nanCounts) > 0:
503  maxy = max([max(y) for y in measurements])
504  for i in range(len(labels)):
505  x = i + width / 2 if typename == "BOOLEAN" else i + 1
506 
508  plt.show()
509 
510 
511 def plotProgressAttribute(cur, planners, attribute):
512  """Plot data for a single planner progress attribute. Will create an
513  average time-plot with error bars of the attribute over all runs for
514  each planner."""
515 
516  import numpy.ma as ma
517 
518  plt.clf()
519  ax = plt.gca()
520  ax.set_xlabel("time (s)")
521  ax.set_ylabel(attribute.replace("_", " "))
522  plannerNames = []
523  for planner in planners:
524  cur.execute(
525  """SELECT count(progress.%s) FROM progress INNER JOIN runs
526  ON progress.runid = runs.id AND runs.plannerid=%s
527  AND progress.%s IS NOT NULL"""
528  % (attribute, planner[0], attribute)
529  )
530  if cur.fetchone()[0] > 0:
531  plannerNames.append(planner[1])
532  cur.execute(
533  """SELECT DISTINCT progress.runid FROM progress INNER JOIN runs
534  WHERE progress.runid=runs.id AND runs.plannerid=?""",
535  (planner[0],),
536  )
537  runids = [t[0] for t in cur.fetchall()]
538  timeTable = []
539  dataTable = []
540  for r in runids:
541  # Select data for given run
542  cur.execute(
543  "SELECT time, %s FROM progress WHERE runid = %s ORDER BY time"
544  % (attribute, r)
545  )
546  (time, data) = zip(*(cur.fetchall()))
547  timeTable.append(time)
548  dataTable.append(data)
549  # It's conceivable that the sampling process may have
550  # generated more samples for one run than another; in this
551  # case, truncate all data series to length of shortest
552  # one.
553  fewestSamples = min(len(time[:]) for time in timeTable)
554  times = np.array(timeTable[0][:fewestSamples])
555  dataArrays = np.array([data[:fewestSamples] for data in dataTable])
556  filteredData = ma.masked_array(
557  dataArrays, np.equal(dataArrays, None), dtype=float
558  )
559 
560  means = np.mean(filteredData, axis=0)
561  stddevs = np.std(filteredData, axis=0, ddof=1)
562 
563  # plot average with error bars
564  plt.errorbar(
565  times, means, yerr=2 * stddevs, errorevery=max(1, len(times) // 20)
566  )
567  ax.legend(plannerNames)
568  if len(plannerNames) > 0:
569  plt.show()
570  else:
571  plt.clf()
572 
573 
574 def plotStatistics(dbname, fname):
575  """Create a PDF file with box plots for all attributes."""
576  print("Generating plots...")
577  conn = sqlite3.connect(dbname)
578  c = conn.cursor()
579  c.execute("PRAGMA FOREIGN_KEYS = ON")
580  c.execute("SELECT id, name FROM plannerConfigs")
581  planners = [
582  (t[0], t[1].replace("geometric_", "").replace("control_", ""))
583  for t in c.fetchall()
584  ]
585  c.execute("PRAGMA table_info(runs)")
586  colInfo = c.fetchall()[3:]
587 
588  pp = PdfPages(fname)
589  for col in colInfo:
590  if (
591  col[2] == "BOOLEAN"
592  or col[2] == "ENUM"
593  or col[2] == "INTEGER"
594  or col[2] == "REAL"
595  ):
596  plotAttribute(c, planners, col[1], col[2])
597  pp.savefig(plt.gcf())
598 
599  c.execute("PRAGMA table_info(progress)")
600  colInfo = c.fetchall()[2:]
601  for col in colInfo:
602  plotProgressAttribute(c, planners, col[1])
603  pp.savefig(plt.gcf())
604  plt.clf()
605 
606  pagey = 0.9
607  pagex = 0.06
608  c.execute("""SELECT id, name, timelimit, memorylimit FROM experiments""")
609  experiments = c.fetchall()
610  for experiment in experiments:
611  c.execute(
612  """SELECT count(*) FROM runs WHERE runs.experimentid = %d
613  GROUP BY runs.plannerid"""
614  % experiment[0]
615  )
616  numRuns = [run[0] for run in c.fetchall()]
617  numRuns = numRuns[0] if len(set(numRuns)) == 1 else ",".join(numRuns)
618 
619  plt.figtext(pagex, pagey, 'Experiment "%s"' % experiment[1])
620  plt.figtext(pagex, pagey - 0.05, "Number of averaged runs: %d" % numRuns)
621  plt.figtext(
622  pagex, pagey - 0.10, "Time limit per run: %g seconds" % experiment[2]
623  )
624  plt.figtext(pagex, pagey - 0.15, "Memory limit per run: %g MB" % experiment[3])
625  pagey -= 0.22
626  plt.show()
627  pp.savefig(plt.gcf())
628  pp.close()
629 
630 
631 def saveAsMysql(dbname, mysqldump):
632  # See http://stackoverflow.com/questions/1067060/perl-to-python
633  import re
634 
635  print("Saving as MySQL dump file...")
636 
637  conn = sqlite3.connect(dbname)
638  mysqldump = open(mysqldump, "w")
639 
640  # make sure all tables are dropped in an order that keepd foreign keys valid
641  c = conn.cursor()
642  c.execute("SELECT name FROM sqlite_master WHERE type='table'")
643  table_names = [str(t[0]) for t in c.fetchall()]
644  c.close()
645  last = ["experiments", "planner_configs"]
646  for table in table_names:
647  if table.startswith("sqlite"):
648  continue
649  if not table in last:
650  mysqldump.write("DROP TABLE IF EXISTS `%s`;\n" % table)
651  for table in last:
652  if table in table_names:
653  mysqldump.write("DROP TABLE IF EXISTS `%s`;\n" % table)
654 
655  for line in conn.iterdump():
656  process = False
657  for nope in (
658  "BEGIN TRANSACTION",
659  "COMMIT",
660  "sqlite_sequence",
661  "CREATE UNIQUE INDEX",
662  "CREATE VIEW",
663  ):
664  if nope in line:
665  break
666  else:
667  process = True
668  if not process:
669  continue
670  line = re.sub(r"[\n\r\t ]+", " ", line)
671  m = re.search("CREATE TABLE ([a-zA-Z0-9_]*)(.*)", line)
672  if m:
673  name, sub = m.groups()
674  sub = sub.replace('"', "`")
675  line = """CREATE TABLE IF NOT EXISTS %(name)s%(sub)s"""
676  line = line % dict(name=name, sub=sub)
677  # make sure we use an engine that supports foreign keys
678  line = line.rstrip("\n\t ;") + " ENGINE = InnoDB;\n"
679  else:
680  m = re.search('INSERT INTO "([a-zA-Z0-9_]*)"(.*)', line)
681  if m:
682  line = "INSERT INTO %s%s\n" % m.groups()
683  line = line.replace('"', r"\"")
684  line = line.replace('"', "'")
685 
686  line = re.sub(r"([^'])'t'(.)", "\\1THIS_IS_TRUE\\2", line)
687  line = line.replace("THIS_IS_TRUE", "1")
688  line = re.sub(r"([^'])'f'(.)", "\\1THIS_IS_FALSE\\2", line)
689  line = line.replace("THIS_IS_FALSE", "0")
690  line = line.replace("AUTOINCREMENT", "AUTO_INCREMENT")
691  mysqldump.write(line)
692  mysqldump.close()
693 
694 
695 def computeViews(dbname):
696  conn = sqlite3.connect(dbname)
697  c = conn.cursor()
698  c.execute("PRAGMA FOREIGN_KEYS = ON")
699  c.execute("PRAGMA table_info(runs)")
700  # kinodynamic paths cannot be simplified (or least not easily),
701  # so simplification_time may not exist as a database column
702  if "simplification_time" in [col[1] for col in c.fetchall()]:
703  s0 = """SELECT plannerid, plannerConfigs.name AS plannerName, experimentid, solved, time + simplification_time AS total_time
704  FROM plannerConfigs INNER JOIN experiments INNER JOIN runs
705  ON plannerConfigs.id=runs.plannerid AND experiments.id=runs.experimentid"""
706  else:
707  s0 = """SELECT plannerid, plannerConfigs.name AS plannerName, experimentid, solved, time AS total_time
708  FROM plannerConfigs INNER JOIN experiments INNER JOIN runs
709  ON plannerConfigs.id=runs.plannerid AND experiments.id=runs.experimentid"""
710  s1 = (
711  """SELECT plannerid, plannerName, experimentid, AVG(solved) AS avg_solved, AVG(total_time) AS avg_total_time
712  FROM (%s) GROUP BY plannerid, experimentid"""
713  % s0
714  )
715  s2 = (
716  """SELECT plannerid, experimentid, MIN(avg_solved) AS avg_solved, avg_total_time
717  FROM (%s) GROUP BY plannerName, experimentid ORDER BY avg_solved DESC, avg_total_time ASC"""
718  % s1
719  )
720  c.execute("DROP VIEW IF EXISTS bestPlannerConfigsPerExperiment")
721  c.execute("CREATE VIEW IF NOT EXISTS bestPlannerConfigsPerExperiment AS %s" % s2)
722 
723  s1 = (
724  """SELECT plannerid, plannerName, AVG(solved) AS avg_solved, AVG(total_time) AS avg_total_time
725  FROM (%s) GROUP BY plannerid"""
726  % s0
727  )
728  s2 = (
729  """SELECT plannerid, MIN(avg_solved) AS avg_solved, avg_total_time
730  FROM (%s) GROUP BY plannerName ORDER BY avg_solved DESC, avg_total_time ASC"""
731  % s1
732  )
733  c.execute("DROP VIEW IF EXISTS bestPlannerConfigs")
734  c.execute("CREATE VIEW IF NOT EXISTS bestPlannerConfigs AS %s" % s2)
735 
736  conn.commit()
737  c.close()
738 
739 
740 if __name__ == "__main__":
741  usage = """%prog [options] [<benchmark.log> ...]"""
742  parser = OptionParser("A script to parse benchmarking results.\n" + usage)
743  parser.add_option(
744  "-d",
745  "--database",
746  dest="dbname",
747  default="benchmark.db",
748  help="Filename of benchmark database [default: %default]",
749  )
750  parser.add_option(
751  "-v",
752  "--view",
753  action="store_true",
754  dest="view",
755  default=False,
756  help="Compute the views for best planner configurations",
757  )
758  parser.add_option(
759  "-p",
760  "--plot",
761  dest="plot",
762  default=None,
763  help="Create a PDF of plots with the filename provided",
764  )
765  parser.add_option(
766  "-m",
767  "--mysql",
768  dest="mysqldb",
769  default=None,
770  help="Save SQLite3 database as a MySQL dump file",
771  )
772  (options, args) = parser.parse_args()
773 
774  if len(args) == 0:
775  parser.error("No arguments were provided. Please provide full path of log file")
776 
777  if len(args) > 0:
778  readBenchmarkLog(options.dbname, args)
779  # If we update the database, we recompute the views as well
780  options.view = True
781 
782  if options.view:
783  computeViews(options.dbname)
784 
785  if options.plot:
786  plotStatistics(options.dbname, options.plot)
787 
788  if options.mysqldb:
789  saveAsMysql(options.dbname, options.mysqldb)
def plotAttribute(cur, planners, attribute, typename)
def readLogValue(filevar, desired_token_index, expected_tokens)
def readOptionalLogValue(filevar, desired_token_index, expected_tokens={})
def plotProgressAttribute(cur, planners, attribute)
def readRequiredLogValue(name, filevar, desired_token_index, expected_tokens={})
void print(PropagationDistanceField &pdf, int numX, int numY, int numZ)