messageboard-2020-05-31-1356.py
01234567890123456789012345678901234567890123456789012345678901234567890123456789









55035504550555065507550855095510551155125513551455155516551755185519552055215522       5523552455255526552755285529553055315532553355345535553655375538 553955405541554255435544554555465547 554855495550555155525553 5554 555555565557555855595560 5561556255635564556555665567 556855695570557155725573557455755576557755785579558055815582558355845585558655875588 558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612 5613   56145615561656175618561956205621562256235624562556265627562856295630563156325633











                            <----SKIPPED LINES---->




  global VERSION_ARDUINO

  def MakeCopy(python_prefix):
    file_extension = '.py'

    live_name = python_prefix + '.py'
    live_path = os.path.join(CODE_REPOSITORY, live_name)

    epoch = os.path.getmtime(live_path)
    last_modified_suffix = EpochDisplayTime(epoch, format_string='-%Y-%m-%d-%H%M')
    version_name = python_prefix + last_modified_suffix + file_extension
    version_path = os.path.join(VERSION_REPOSITORY, version_name)

    shutil.copyfile(live_path, version_path)
    return version_name

  VERSION_MESSAGEBOARD = MakeCopy('messageboard')
  VERSION_ARDUINO = MakeCopy('arduino')









def main():
  """Traffic cop between incoming radio flight messages, configuration, and messageboard.

  This is the main logic, checking for new flights, augmenting the radio signal with
  additional web-scraped data, and generating messages in a form presentable to the
  messageboard.
  """
  RemoveFile(LOGFILE_LOCK)
  VersionControl()

  # Since this clears log files, it should occur first before we start logging
  if '-s' in sys.argv:
    global SIMULATION_COUNTER
    SimulationSetup()

  last_heartbeat_time = HeartbeatRestart()


  # This flag slows down simulation time around a flight, great for debugging the arduinos
  simulation_slowdown = bool('-f' in sys.argv)

  # Redirect any errors to a log file instead of the screen, and add a datestamp
  if not SIMULATION:
    sys.stderr = open(STDERR_FILE, 'a')
    Log('', STDERR_FILE)


  Log('Starting up process %d' % os.getpid())
  already_running_ids = FindRunningParents()
  if already_running_ids:
    for pid in already_running_ids:
      Log('Sending termination signal to %d' % pid)
      os.kill(pid, signal.SIGTERM)



  SetPinMode()

  configuration = ReadAndParseSettings(CONFIG_FILE)
  startup_time = time.time()
  json_desc_dict = {}


  flights = UnpickleObjectFromFile(PICKLE_FLIGHTS, True, max_days=MAX_INSIGHT_HORIZON_DAYS)
  # Clear the loaded flight of any cached data, identified by keys with a specific
  # suffix, since code fixes may change the values for some of those cached elements
  for flight in flights:
    for key in list(flight.keys()):
      if key.endswith(CACHED_ELEMENT_PREFIX):
        flight.pop(key)


  # If we're displaying just a single insight message, we want it to be something
  # unique, to the extent possible; this dict holds a count of the diff types of messages
  # displayed so far
  insight_message_distribution = {}

  # bootstrap the flight insights distribution from a list of insights on each
  # flight (i.e.: flight['insight_types'] for a given flight might look like
  # [1, 2, 7, 9], or [], to indicate which insights were identified; this then
  # transforms that into {0: 25, 1: 18, ...} summing across all flights.
  missing_insights = []
  for flight in flights:
    if 'insight_types' not in flight:
      missing_insights.append(
          '%s on %s' % (DisplayFlightNumber(flight), DisplayTime(flight, '%x %X')))
    distribution = flight.get('insight_types', [])
    for key in distribution:
      insight_message_distribution[key] = (
          insight_message_distribution.get(key, 0) + 1)
  if missing_insights:
    Log('Flights missing insight distributions: %s' % ';'.join(missing_insights))


  remote, servo, to_remote_q, to_servo_q, to_main_q, shutdown = InitArduinos(configuration)

  # used in simulation to print the hour of simulation once per simulated hour
  prev_simulated_hour = ''

  persistent_nearby_aircraft = {} # key = flight number; value = last seen epoch
  persistent_path = {}
  histogram = {}

  # Next up to print is index 0; this is a list of tuples:
  # tuple element#1: flag indicating the type of message that this is
  # tuple element#2: the message itself
  message_queue = []
  next_message_time = time.time()

  # We repeat the loop every x seconds; this ensures that if the processing time is long,
  # we don't wait another x seconds after processing completes
  next_loop_time = time.time() + LOOP_DELAY_SECONDS

  # These files are read only if the version on disk has been modified more recently
  # than the last time it was read
  last_dump_json_timestamp = 0


  WaitUntilKillComplete(already_running_ids)




  Log('Finishing initialization of %d; starting radio polling loop' % os.getpid())
  while (not SIMULATION or SIMULATION_COUNTER < len(DUMP_JSONS)) and not SHUTDOWN_SIGNAL:

    last_heartbeat_time = Heartbeat(last_heartbeat_time)

    new_configuration = ReadAndParseSettings(CONFIG_FILE)
    CheckForNewFilterCriteria(configuration, new_configuration, message_queue, flights)
    configuration = new_configuration

    ResetLogs(configuration)  # clear the logs if requested

    # if this is a SIMULATION, then process every diff dump. But if it isn't a simulation,
    # then only read & do related processing for the next dump if the last-modified
    # timestamp indicates the file has been updated since it was last read.
    tmp_timestamp = 0
    if not SIMULATION:
      dump_json_exists = os.path.exists(DUMP_JSON_FILE)
      if dump_json_exists:
        tmp_timestamp = os.path.getmtime(DUMP_JSON_FILE)




                            <----SKIPPED LINES---->





01234567890123456789012345678901234567890123456789012345678901234567890123456789









55035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651











                            <----SKIPPED LINES---->




  global VERSION_ARDUINO

  def MakeCopy(python_prefix):
    file_extension = '.py'

    live_name = python_prefix + '.py'
    live_path = os.path.join(CODE_REPOSITORY, live_name)

    epoch = os.path.getmtime(live_path)
    last_modified_suffix = EpochDisplayTime(epoch, format_string='-%Y-%m-%d-%H%M')
    version_name = python_prefix + last_modified_suffix + file_extension
    version_path = os.path.join(VERSION_REPOSITORY, version_name)

    shutil.copyfile(live_path, version_path)
    return version_name

  VERSION_MESSAGEBOARD = MakeCopy('messageboard')
  VERSION_ARDUINO = MakeCopy('arduino')


def LogInitTimes(init_times):
  msg = ''
  for n, t in enumerate(init_times[:-1]):
    msg += '%.2fs to get from reading %d to reading %s' % (t[n + 1] - t[n], n, n + 1)
  Log(msg)


def main():
  """Traffic cop between incoming radio flight messages, configuration, and messageboard.

  This is the main logic, checking for new flights, augmenting the radio signal with
  additional web-scraped data, and generating messages in a form presentable to the
  messageboard.
  """
  RemoveFile(LOGFILE_LOCK)
  VersionControl()

  # Since this clears log files, it should occur first before we start logging
  if '-s' in sys.argv:
    global SIMULATION_COUNTER
    SimulationSetup()

  last_heartbeat_time = HeartbeatRestart()
  init_timing = [time.time()]  # time 0

  # This flag slows down simulation time around a flight, great for debugging the arduinos
  simulation_slowdown = bool('-f' in sys.argv)

  # Redirect any errors to a log file instead of the screen, and add a datestamp
  if not SIMULATION:
    sys.stderr = open(STDERR_FILE, 'a')
    Log('', STDERR_FILE)

  init_timing.append(time.time())  # time 1
  Log('Starting up process %d' % os.getpid())
  already_running_ids = FindRunningParents()
  if already_running_ids:
    for pid in already_running_ids:
      Log('Sending termination signal to %d' % pid)
      os.kill(pid, signal.SIGTERM)
  init_timing.append(time.time())  # time 2

  
  SetPinMode()

  configuration = ReadAndParseSettings(CONFIG_FILE)
  startup_time = time.time()
  json_desc_dict = {}

  init_timing.append(time.time())  # time 3
  flights = UnpickleObjectFromFile(PICKLE_FLIGHTS, True, max_days=MAX_INSIGHT_HORIZON_DAYS)
  # Clear the loaded flight of any cached data, identified by keys with a specific
  # suffix, since code fixes may change the values for some of those cached elements
  for flight in flights:
    for key in list(flight.keys()):
      if key.endswith(CACHED_ELEMENT_PREFIX):
        flight.pop(key)
  init_timing.append(time.time())  # time 4

  # If we're displaying just a single insight message, we want it to be something
  # unique, to the extent possible; this dict holds a count of the diff types of messages
  # displayed so far
  insight_message_distribution = {}

  # bootstrap the flight insights distribution from a list of insights on each
  # flight (i.e.: flight['insight_types'] for a given flight might look like
  # [1, 2, 7, 9], or [], to indicate which insights were identified; this then
  # transforms that into {0: 25, 1: 18, ...} summing across all flights.
  missing_insights = []
  for flight in flights:
    if 'insight_types' not in flight:
      missing_insights.append(
          '%s on %s' % (DisplayFlightNumber(flight), DisplayTime(flight, '%x %X')))
    distribution = flight.get('insight_types', [])
    for key in distribution:
      insight_message_distribution[key] = (
          insight_message_distribution.get(key, 0) + 1)
  if missing_insights:
    Log('Flights missing insight distributions: %s' % ';'.join(missing_insights))
  init_timing.append(time.time())  # time 5

  remote, servo, to_remote_q, to_servo_q, to_main_q, shutdown = InitArduinos(configuration)

  # used in simulation to print the hour of simulation once per simulated hour
  prev_simulated_hour = ''

  persistent_nearby_aircraft = {} # key = flight number; value = last seen epoch
  persistent_path = {}
  histogram = {}

  # Next up to print is index 0; this is a list of tuples:
  # tuple element#1: flag indicating the type of message that this is
  # tuple element#2: the message itself
  message_queue = []
  next_message_time = time.time()

  # We repeat the loop every x seconds; this ensures that if the processing time is long,
  # we don't wait another x seconds after processing completes
  next_loop_time = time.time() + LOOP_DELAY_SECONDS

  # These files are read only if the version on disk has been modified more recently
  # than the last time it was read
  last_dump_json_timestamp = 0

  init_timing.append(time.time())  # time 6
  WaitUntilKillComplete(already_running_ids)
  init_timing.append(time.time())  # time 7

  LogInitTimes(init_timing)

  Log('Finishing initialization of %d; starting radio polling loop' % os.getpid())
  while (not SIMULATION or SIMULATION_COUNTER < len(DUMP_JSONS)) and not SHUTDOWN_SIGNAL:

    last_heartbeat_time = Heartbeat(last_heartbeat_time)

    new_configuration = ReadAndParseSettings(CONFIG_FILE)
    CheckForNewFilterCriteria(configuration, new_configuration, message_queue, flights)
    configuration = new_configuration

    ResetLogs(configuration)  # clear the logs if requested

    # if this is a SIMULATION, then process every diff dump. But if it isn't a simulation,
    # then only read & do related processing for the next dump if the last-modified
    # timestamp indicates the file has been updated since it was last read.
    tmp_timestamp = 0
    if not SIMULATION:
      dump_json_exists = os.path.exists(DUMP_JSON_FILE)
      if dump_json_exists:
        tmp_timestamp = os.path.getmtime(DUMP_JSON_FILE)




                            <----SKIPPED LINES---->