Switch to python3 and scipy from slycot

Turns out we need python3 matplotlib to make scipy work well enough to
place the poles correctly for our systems.  Rather than do it piecemeal,
do it all at once.

This includes a python opencv upgrade too to support the new python, and
a matplotlib upgrade.

Change-Id: Ic7517b5ebbfdca9cc90ae6a61d86b474f2f21b29
diff --git a/frc971/analysis/BUILD b/frc971/analysis/BUILD
index 45e947b..2901463 100644
--- a/frc971/analysis/BUILD
+++ b/frc971/analysis/BUILD
@@ -6,22 +6,6 @@
 load("@com_github_google_flatbuffers//:build_defs.bzl", "flatbuffer_cc_library", "flatbuffer_ts_library")
 load("//aos:config.bzl", "aos_config")
 
-py_binary(
-    name = "plot_action",
-    srcs = [
-        "logentry.py",
-        "logreader.py",
-        "plot_action.py",
-        "plotter.py",
-    ],
-    legacy_create_init = False,
-    target_compatible_with = ["@platforms//os:linux"],
-    deps = [
-        ":python_init",
-        "@matplotlib_repo//:matplotlib2.7",
-    ],
-)
-
 py_library(
     name = "python_init",
     srcs = ["__init__.py"],
@@ -41,7 +25,7 @@
         "//aos/events:simulated_event_loop",
         "//aos/events/logging:logger",
         "@com_github_google_glog//:glog",
-        "@python_repo//:python3.5_lib",
+        "@python_repo//:python3.7_lib",
     ],
 )
 
diff --git a/frc971/analysis/logentry.py b/frc971/analysis/logentry.py
deleted file mode 100644
index 08aa469..0000000
--- a/frc971/analysis/logentry.py
+++ /dev/null
@@ -1,275 +0,0 @@
-#!/usr/bin/python
-
-import re
-
-"""
-A regular expression to match the envelope part of the log entry.
-Parsing of the JSON msg is handled elsewhere.
-"""
-LOG_RE = re.compile("""
-  (.*?)              # 1 name
-  \((\d+)\)          # 2 pid
-  \((\d+)\)          # 3 message_index
-  :\s
-  (\w+?)             # 4 level
-  \s+at\s+
-  (\d+\.\d+)s        # 5 time
-  :\s
-  ([A-Za-z0-9_./-]+) # 6 filename
-  :\s
-  (\d+)              # 7 linenumber
-  :\s
-  (.*)               # 8 msg
-  """, re.VERBOSE)
-
-class LogEntry:
-  """
-  This class provides a way to parse log entries.
-  The header portion of the log entry is parsed eagerly.
-  The structured portion of a log entry is parsed on demand.
-  """
-
-  def __init__(self, line):
-    """Populates a LogEntry from a line."""
-    self.line = line
-    m = LOG_RE.match(line)
-    if m is None:
-        print("LOG_RE failed on", line)
-        return
-    self.name = m.group(1)
-    self.pid_index = int(m.group(2))
-    self.msg_index = int(m.group(3))
-    self.level = m.group(4)
-    self.time = float(m.group(5))
-    self.filename = m.group(6)
-    self.linenumber = m.group(7)
-    self.msg = m.group(8)
-    self.struct_name = None
-
-  def __str__(self):
-    """Formats the data cleanly."""
-    return '%s(%d)(%d): %s at %fs: %s: %d: %s' % (
-        self.name, self.pid, self.msg_index, self.level, self.time, self.filename, self.linenumber, self.msg)
-
-  def ParseStruct(self):
-    """Parses the message as a structure.
-
-    Returns:
-      struct_name, struct_type, json dict.
-    """
-    if self.struct_name:
-        # We've already parsed the structural part. Return the cached result
-        return (self.struct_name, self.struct_type, self.struct_json)
-
-    struct_name_index = self.msg.find(':')
-    struct_name = self.msg[0:struct_name_index]
-
-    struct_body = self.msg[struct_name_index+2:]
-    tokens = []
-    this_token = ''
-    # For the various deliminators, append what we have found so far to the
-    # list and the token.
-    for char in struct_body:
-      if char == '{':
-        if this_token:
-          tokens.append(this_token)
-          this_token = ''
-        tokens.append('{')
-      elif char == '}':
-        if this_token:
-          tokens.append(this_token)
-          this_token = ''
-        tokens.append('}')
-      elif char == '[':
-        if this_token:
-          tokens.append(this_token)
-          this_token = ''
-        tokens.append('[')
-      elif char == ']':
-        if this_token:
-          tokens.append(this_token)
-          this_token = ''
-        tokens.append(']')
-      elif char == ':':
-        if this_token:
-          tokens.append(this_token)
-          this_token = ''
-        tokens.append(':')
-      elif char == ',':
-        if this_token:
-          tokens.append(this_token)
-          this_token = ''
-        tokens.append(',')
-      elif char == ' ':
-        if this_token:
-          tokens.append(this_token)
-          this_token = ''
-      else:
-        this_token += char
-    if this_token:
-      tokens.append(this_token)
-
-    struct_type = tokens[0]
-    json = dict()
-    # Now that we have tokens, parse them.
-    self.JsonizeTokens(json, tokens, 1)
-
-    # Cache the result to avoid having to reparse.
-    self.struct_name = struct_name
-    self.struct_type = struct_type
-    self.struct_json = json
-
-    return (struct_name, struct_type, json)
-
-  def JsonizeTokens(self, json, tokens, token_index):
-    """Creates a json-like dictionary from the provided tokens.
-
-    Args:
-      json: dict, The dict to stick the elements in.
-      tokens: list of strings, The list with all the tokens in it.
-      token_index: int, Where to start in the token list.
-
-    Returns:
-      int, The last token used.
-    """
-    # Check that the message starts with a {
-    if tokens[token_index] != '{':
-      print(tokens)
-      print('Expected { at beginning, found', tokens[token_index])
-      return None
-
-    # Eat the {
-    token_index += 1
-
-    # States and state variable for parsing elements.
-    STATE_INIT = 'init'
-    STATE_HAS_NAME = 'name'
-    STATE_HAS_COLON = 'colon'
-    STATE_EXPECTING_SUBMSG = 'submsg'
-    STATE_EXPECTING_COMMA = 'comma'
-    parser_state = STATE_INIT
-
-    while token_index < len(tokens):
-      if tokens[token_index] == '}':
-        # Finish if there is a }
-        return token_index + 1
-      elif tokens[token_index] == '{':
-        if parser_state != STATE_EXPECTING_SUBMSG:
-          print(tokens)
-          print(parser_state)
-          print('Bad input, was not expecting {')
-          return None
-        # Found a submessage, parse it.
-        sub_json = dict()
-        token_index = self.JsonizeTokens(sub_json, tokens, token_index)
-        json[token_name] = sub_json
-        parser_state = STATE_EXPECTING_COMMA
-      else:
-        if parser_state == STATE_INIT:
-          # This token is the name.
-          token_name = tokens[token_index]
-          parser_state = STATE_HAS_NAME
-        elif parser_state == STATE_HAS_NAME:
-          if tokens[token_index] != ':':
-            print(tokens)
-            print(parser_state)
-            print('Bad input, found', tokens[token_index], 'expected :')
-            return None
-          # After a name, comes a :
-          parser_state = STATE_HAS_COLON
-        elif parser_state == STATE_HAS_COLON:
-          # After the colon, figure out what is next.
-          if tokens[token_index] == '[':
-            # Found a sub-array!
-            sub_array = []
-            token_index = self.__JsonizeTokenArray(sub_array, tokens, token_index)
-            json[token_name] = sub_array
-            parser_state = STATE_EXPECTING_COMMA
-          elif tokens[token_index + 1] == '{':
-            # Found a sub-message, trigger parsing it.
-            parser_state = STATE_EXPECTING_SUBMSG
-          else:
-            # This is just an element, move on.
-            json[token_name] = tokens[token_index]
-            parser_state = STATE_EXPECTING_COMMA
-        elif parser_state == STATE_EXPECTING_COMMA:
-          # Complain if there isn't a comma here.
-          if tokens[token_index] != ',':
-            print(tokens)
-            print(parser_state)
-            print('Bad input, found', tokens[token_index], 'expected ,')
-            return None
-          parser_state = STATE_INIT
-        else:
-          print('Bad parser state')
-          return None
-        token_index += 1
-
-    print('Unexpected end')
-    return None
-
-  def __JsonizeTokenArray(self, sub_array, tokens, token_index):
-    """Parses an array from the provided tokens.
-
-    Args:
-      sub_array: list, The list to stick the elements in.
-      tokens: list of strings, The list with all the tokens in it.
-      token_index: int, Where to start in the token list.
-
-    Returns:
-      int, The last token used.
-    """
-    # Make sure the data starts with a '['
-    if tokens[token_index] != '[':
-      print(tokens)
-      print('Expected [ at beginning, found', tokens[token_index + 1])
-      return None
-
-    # Eat the '['
-    token_index += 1
-
-    # Loop through the tokens.
-    while token_index < len(tokens):
-      if tokens[token_index + 1] == ',':
-        # Next item is a comma, so we should just add the element.
-        sub_array.append(tokens[token_index])
-        token_index += 2
-      elif tokens[token_index + 1] == ']':
-        # Next item is a ']', so we should just add the element and finish.
-        sub_array.append(tokens[token_index])
-        token_index += 1
-        return token_index
-      else:
-        # Otherwise, it must be a sub-message.
-        sub_json = dict()
-        token_index = self.JsonizeTokens(sub_json, tokens, token_index + 1)
-        sub_array.append(sub_json)
-        if tokens[token_index] == ',':
-          # Handle there either being another data element.
-          token_index += 1
-        elif tokens[token_index] == ']':
-          # Handle the end of the array.
-          return token_index
-        else:
-          print('Unexpected ', tokens[token_index])
-          return None
-
-    print('Unexpected end')
-    return None
-
-
-if __name__ == '__main__':
-  def ParseLine(line):
-    return LogEntry(line)
-
-  print('motor_writer(2240)(07421): DEBUG   at 0000000819.99620s: ../../frc971/output/motor_writer.cc: 105: sending: .aos.controls.OutputCheck{pwm_value:221, pulse_length:2.233333}')
-  line = ParseLine('motor_writer(2240)(07421): DEBUG   at 0000000819.99620s: ../../frc971/output/motor_writer.cc: 105: sending: .aos.controls.OutputCheck{pwm_value:221, pulse_length:2.233333}')
-  if '.aos.controls.OutputCheck' in line.msg:
-    print(line)
-    print(line.ParseStruct())
-
-  line = ParseLine('claw(2263)(19404): DEBUG   at 0000000820.00000s: ../../aos/controls/control_loop-tmpl.h: 104: position: .frc971.control_loops.ClawGroup.Position{top:.frc971.control_loops.HalfClawPosition{position:1.672153, front:.frc971.HallEffectStruct{current:f, posedge_count:0, negedge_count:52}, calibration:.frc971.HallEffectStruct{current:f, posedge_count:6, negedge_count:13}, back:.frc971.HallEffectStruct{current:f, posedge_count:0, negedge_count:62}, posedge_value:0.642681, negedge_value:0.922207}, bottom:.frc971.control_loops.HalfClawPosition{position:1.353539, front:.frc971.HallEffectStruct{current:f, posedge_count:2, negedge_count:150}, calibration:.frc971.HallEffectStruct{current:f, posedge_count:8, negedge_count:18}, back:.frc971.HallEffectStruct{current:f, posedge_count:0, negedge_count:6}, posedge_value:0.434514, negedge_value:0.759491}}')
-  print(line.ParseStruct())
-
-  line = ParseLine('joystick_proxy(2255)(39560): DEBUG   at 0000000820.00730s: ../../aos/prime/input/joystick_input.cc: 61: sending: .aos.RobotState{joysticks:[.aos.Joystick{buttons:0, axis:[0.000000, 1.000000, 1.000000, 0.000000]}, .aos.Joystick{buttons:0, axis:[-0.401575, 1.000000, -1.007874, 0.000000]}, .aos.Joystick{buttons:0, axis:[0.007874, 0.000000, 1.000000, -1.007874]}, .aos.Joystick{buttons:0, axis:[0.000000, 0.000000, 0.000000, 0.000000]}], test_mode:f, fms_attached:f, enabled:T, autonomous:f, team_id:971, fake:f}')
-  print(line.ParseStruct())
diff --git a/frc971/analysis/logreader.py b/frc971/analysis/logreader.py
deleted file mode 100644
index 7449569..0000000
--- a/frc971/analysis/logreader.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/python
-
-import collections
-from frc971.analysis.logentry import LogEntry
-
-class Dataset(object):
-  def __init__(self):
-    self.time = []
-    self.data = []
-
-  def Add(self, time, data):
-    self.time.append(time)
-    self.data.append(data)
-
-class CollectingLogReader(object):
-  """
-  Reads log files and collected requested data.
-  """
-  def __init__(self):
-    self.signal = collections.OrderedDict()
-
-  def Add(self, binary, struct_instance_name, *data_search_path):
-    """
-    Specifies a specific piece of data to collect
-
-    Args:
-      binary: str, The name of the executable that generated the log.
-      struct_instance_name: str, The name of the struct instance whose data
-                            contents should be collected.
-      data_search_path: [str], The path into the struct of the exact piece of
-                        data to collect.
-
-    Returns:
-      None
-    """
-    self.signal[(binary, struct_instance_name, data_search_path)] = Dataset()
-
-  def HandleFile(self, f):
-    """
-    Parses the specified log file.
-
-    Args:
-      f: str, The filename of the log whose data to parse.
-
-    Returns:
-      None
-    """
-    with open(f, 'r') as fd:
-      for line in fd:
-        try:
-            self.HandleLine(line)
-        except Exception as ex:
-            # It's common for the last line of the file to be malformed.
-            print("Ignoring malformed log entry: ", line, ex)
-
-  def HandleLine(self, line):
-    """
-    Parses a line from a log file and adds the data to the plot data.
-
-    Args:
-      line: str, The line from the log file to parse
-
-    Returns:
-      None
-    """
-    pline = LogEntry(line)
-
-    for key in self.signal:
-      value = self.signal[key]
-      binary = key[0]
-      struct_instance_name = key[1]
-      data_search_path = key[2]
-      boolean_multiplier = False
-      multiplier = 1.0
-
-      # If the plot definition line ends with a "-b X" where X is a number then
-      # that number gets drawn when the value is True. Zero gets drawn when the
-      # value is False.
-      if len(data_search_path) >= 2 and data_search_path[-2] == '-b':
-        multiplier = float(data_search_path[-1])
-        boolean_multiplier = True
-        data_search_path = data_search_path[:-2]
-
-      if len(data_search_path) >= 2 and data_search_path[-2] == '-m':
-        multiplier = float(data_search_path[-1])
-        data_search_path = data_search_path[:-2]
-
-      # Make sure that we're looking at the right binary structure instance.
-      if binary == pline.name:
-        if pline.msg.startswith(struct_instance_name + ': '):
-          # Traverse the structure as specified in `data_search_path`.
-          # This lets the user access very deeply nested structures.
-          _, _, data = pline.ParseStruct()
-          for path in data_search_path:
-            data = data[path]
-
-          if boolean_multiplier:
-            if data == 'T':
-              value.Add(pline.time, multiplier)
-            else:
-              value.Add(pline.time, 0)
-          else:
-            value.Add(pline.time, float(data) * multiplier)
diff --git a/frc971/analysis/plot_action.py b/frc971/analysis/plot_action.py
deleted file mode 100755
index d157065..0000000
--- a/frc971/analysis/plot_action.py
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/usr/bin/python
-
-import sys
-import numpy
-from frc971.analysis.plotter import Plotter
-import argparse
-
-def ReadPlotDefinitions(filename):
-  """
-  Read a file with plotting definitions.
-
-  A plotting definition is a single line that defines what data to search for
-  in order to plot it. The following in a file would duplicate the default
-  behaviour:
-
-    fridge goal height
-    fridge goal angle
-    fridge goal velocity
-    fridge goal angular_velocity
-    fridge output left_arm
-    fridge output right_arm
-    fridge output left_elevator
-    fridge output right_elevator
-
-  Lines are ignored if they start with a hash mark (i.e. '#').
-
-  Lines that end with a "-b X" where X is a number then it designates that line
-  as plotting a boolean value. X is the value plotted when the boolean is true.
-  When the boolean is false then the values is plotted as zero. For example,
-  the following boolean value is drawn to toggle between 2.0 and 0 when the
-  boolean is True and False, respectively:
-
-    fridge status zeroed -b 2.0
-
-  Args:
-    filename: The name of the file to read the definitions from.
-
-  Returns:
-    [[str]]: The definitions in the specified file.
-  """
-  defs = []
-  with open(filename) as fd:
-    for line in fd:
-      raw_defs = line.split()
-
-      # Only add to the list of definitions if the line's not empty and it
-      # doesn't start with a hash.
-      if raw_defs and not raw_defs[0].startswith('#'):
-        defs.append(raw_defs)
-
-  return defs
-
-
-def maybeint(x):
-  try:
-    return int(x)
-  except ValueError:
-    return x
-
-
-def main():
-  # Parse all command line arguments.
-  arg_parser = argparse.ArgumentParser(description='Log Plotter')
-  arg_parser.add_argument('log_file', metavar='LOG_FILE', type=str, \
-      help='The file from which to read logs and plot.')
-  arg_parser.add_argument('--plot-defs', '-p', action='store', type=str, \
-      help='Read the items to plot from this file.')
-  arg_parser.add_argument('--no-binary', '-n', action='store_true', \
-      help='Don\'t print the binary name in the legend.')
-
-  args = arg_parser.parse_args(sys.argv[1:])
-
-  p = Plotter()
-
-  # If the user defines the list of data to plot in a file, read it from there.
-  if args.plot_defs:
-    defs = ReadPlotDefinitions(args.plot_defs)
-    for definition in defs:
-      mapped_definitions = map(maybeint, definition[2:])
-      p.Add(definition[0], definition[1], *mapped_definitions)
-
-  # Otherwise use a pre-defined set of data to plot.
-  else:
-    p.Add('fridge', 'goal', 'height')
-    p.Add('fridge', 'goal', 'angle')
-    p.Add('fridge', 'goal', 'velocity')
-    p.Add('fridge', 'goal', 'angular_velocity')
-
-    p.Add('fridge', 'output', 'left_arm')
-    p.Add('fridge', 'output', 'right_arm')
-    p.Add('fridge', 'output', 'left_elevator')
-    p.Add('fridge', 'output', 'right_elevator')
-
-  p.PlotFile(args.log_file, args.no_binary)
-
-if __name__ == '__main__':
-  main()
diff --git a/frc971/analysis/plotter.py b/frc971/analysis/plotter.py
deleted file mode 100755
index 6d23587..0000000
--- a/frc971/analysis/plotter.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/python
-
-from frc971.analysis.logreader import CollectingLogReader
-import matplotlib
-from matplotlib import pylab
-from matplotlib.font_manager import FontProperties
-
-class Plotter(CollectingLogReader):
-  """
-  A CollectingLogReader that plots collected data.
-  """
-
-  def PlotFile(self, f, no_binary_in_legend=False):
-    """
-    Parses and plots all the data.
-
-    Args:
-      f: str, The filename of the log whose data to parse and plot.
-
-    Returns:
-      None
-    """
-    self.HandleFile(f)
-    self.Plot(no_binary_in_legend)
-
-  def Plot(self, no_binary_in_legend):
-    """
-    Plots all the data after it's parsed.
-
-    This should only be called after `HandleFile` has been called so that there
-    is actual data to plot.
-    """
-    for key in self.signal:
-      value = self.signal[key]
-
-      # Create a legend label using the binary name (optional), the structure
-      # name and the data search path.
-      label = key[1] + '.' + '.'.join(str(x) for x in key[2])
-      if not no_binary_in_legend:
-        label = key[0] + ' ' + label
-
-      pylab.plot(value.time, value.data, label=label)
-
-    # Set legend font size to small and move it to the top center.
-    fontP = FontProperties()
-    fontP.set_size('small')
-    pylab.legend(bbox_to_anchor=(0.2, 1.10), prop=fontP)
-
-    pylab.show()
-