messageboard-2020-05-31-1557.py
01234567890123456789012345678901234567890123456789012345678901234567890123456789









323324325326327328329330331332333334335336337338339340341342 343344345346347348 349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403        404405406407408409410411412413414415416417418419420421422423








1275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299 13001301130213031304130513061307130813091310131113121313131413151316131713181319








13351336133713381339134013411342134313441345134613471348134913501351135213531354                       1355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397                      13981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438








15481549155015511552155315541555155615571558155915601561156215631564156515661567 1568 1569157015711572157315741575 1576157715781579158015811582158315841585 158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609








53675368536953705371537253735374537553765377537853795380538153825383538453855386                 53875388538953905391539253935394539553965397539853995400540154025403540454055406








54645465546654675468546954705471547254735474547554765477547854795480548154825483 54845485548654875488548954905491 5492549354945495549654975498549955005501550255035504    55055506550755085509551055115512551355145515 551655175518  551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584








56105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650











                            <----SKIPPED LINES---->




AIRCRAFT_LENGTH['Boeing 737-800 (twin-jet)'] = 39.47
AIRCRAFT_LENGTH['Boeing 737-900 (twin-jet)'] = 42.11
AIRCRAFT_LENGTH['Boeing 747-400 (quad-jet)'] = 36.4
AIRCRAFT_LENGTH['Boeing 747-8 (quad-jet)'] = 76.25
AIRCRAFT_LENGTH['Boeing 757-200 (twin-jet)'] = 47.3
AIRCRAFT_LENGTH['Boeing 757-300 (twin-jet)'] = 54.4
AIRCRAFT_LENGTH['Boeing 767-200 (twin-jet)'] = 48.51
AIRCRAFT_LENGTH['Boeing 767-300 (twin-jet)'] = 54.94
AIRCRAFT_LENGTH['Boeing 777 (twin-jet)'] = (63.73 + 73.86) / 2
AIRCRAFT_LENGTH['Boeing 777-200 (twin-jet)'] = 63.73
AIRCRAFT_LENGTH['Boeing 777-200LR/F (twin-jet)'] = 63.73
AIRCRAFT_LENGTH['Boeing 777-300ER (twin-jet)'] = 73.86
AIRCRAFT_LENGTH['Boeing 787-10 (twin-jet)'] = 68.28
AIRCRAFT_LENGTH['Boeing 787-8 (twin-jet)'] = 56.72
AIRCRAFT_LENGTH['Boeing 787-9 (twin-jet)'] = 62.81
AIRCRAFT_LENGTH['Canadair Regional Jet CRJ-200 (twin-jet)'] = 26.77
AIRCRAFT_LENGTH['Canadair Regional Jet CRJ-700 (twin-jet)'] = 32.3
AIRCRAFT_LENGTH['Canadair Regional Jet CRJ-900 (twin-jet)'] = 36.2
AIRCRAFT_LENGTH['Canadair Challenger 350 (twin-jet)'] = 20.9
AIRCRAFT_LENGTH['Bombardier Challenger 300 (twin-jet)'] = 20.92

AIRCRAFT_LENGTH['Embraer 170/175 (twin-jet)'] = (29.90 + 31.68) / 2
AIRCRAFT_LENGTH['Embraer Phenom 300 (twin-jet)'] = 15.9
AIRCRAFT_LENGTH['EMBRAER 175 (long wing) (twin-jet)'] = 31.68
AIRCRAFT_LENGTH['Embraer ERJ-135 (twin-jet)'] = 26.33
AIRCRAFT_LENGTH['Cessna Caravan (single-turboprop)'] = 11.46
AIRCRAFT_LENGTH['Cessna Citation CJ2+ (twin-jet)'] = 14.53

AIRCRAFT_LENGTH['Cessna Citation II (twin-jet)'] = 14.54
AIRCRAFT_LENGTH['Cessna Citation Latitude (twin-jet)'] = 18.97
AIRCRAFT_LENGTH['Cessna Citation Sovereign (twin-jet)'] = 19.35
AIRCRAFT_LENGTH['Cessna Citation V (twin-jet)'] = 14.91
AIRCRAFT_LENGTH['Cessna Citation X (twin-jet)'] = 22.04
AIRCRAFT_LENGTH['Cessna Skyhawk (piston-single)'] = 8.28
AIRCRAFT_LENGTH['Cessna Skylane (piston-single)'] = 8.84
AIRCRAFT_LENGTH['Cessna T206 Turbo Stationair (piston-single)'] = 8.61
AIRCRAFT_LENGTH['Beechcraft Bonanza (33) (piston-single)'] = 7.65
AIRCRAFT_LENGTH['Beechcraft Super King Air 200 (twin-turboprop)'] = 13.31
AIRCRAFT_LENGTH['Beechcraft Super King Air 350 (twin-turboprop)'] = 14.22
AIRCRAFT_LENGTH['Beechcraft King Air 90 (twin-turboprop)'] = 10.82
AIRCRAFT_LENGTH['Learjet 45 (twin-jet)'] = 17.68
AIRCRAFT_LENGTH['Pilatus PC-12 (single-turboprop)'] = 14.4


def Log(message, file=None, rolling=None):
  """Write a message to a logfile along with a timestamp.

  Args:
    message: string message to write
    file: string representing file name and, if needed, path to the file to write to
    rolling: name of file that will keep only the last n files of file
  """
  # can't define as a default parameter because LOGFILE name is potentially
  # modified based on SIMULATION flag
  if not file:
    file = LOGFILE

  # special case: for the main logfile, we always keep a rolling log
  if not rolling and file == LOGFILE:
    rolling = ROLLING_LOGFILE

  #if file == LOGFILE:
  #  lock = filelock.FileLock(LOGFILE_LOCK)
  #  lock.acquire()

  try:
    with open(file, 'a') as f:
      # by excluding the timestamp, file diffs become easier between runs
      if not SIMULATION or file == LOGFILE:
        f.write('='*80+'\n')
        f.write(str(datetime.datetime.now(TZ))+'\n')
        f.write('\n')
      f.write(str(message)+'\n')
  except IOError:
    Log('Unable to append to ' + file)

  if rolling:
    existing_log_lines = ReadFile(file).splitlines()
    with open(rolling, 'w') as f:
      f.write('\n'.join(existing_log_lines[-1000:]))

  #if file == LOGFILE:
  #  lock.release()










def MaintainRollingWebLog(message, max_count, filename=None):
  """Maintains a rolling text file of at most max_count printed messages.

  Newest data at top and oldest data at the end, of at most max_count messages,
  where the delimiter between each message is identified by a special fixed string.

  Args:
    message: text message to prepend to the file.
    max_count: maximum number of messages to keep in the file; the max_count+1st message
      is deleted.
    filename: the file to update.
  """
  # can't define as a default parameter because ROLLING_MESSAGE_FILE name is potentially
  # modified based on SIMULATION flag
  if not filename:
    filename = ROLLING_MESSAGE_FILE
  rolling_log_header = '='*(SPLITFLAP_CHARS_PER_LINE + 2)
  existing_file = ReadFile(filename)




                            <----SKIPPED LINES---->




    newly_nearby_flight_identifiers = UpdateAircraftList(
        persistent_nearby_aircraft, current_nearby_aircraft, now)

    if newly_nearby_flight_identifiers:

      if len(newly_nearby_flight_identifiers) > 1:
        newly_nearby_flight_identifiers_str = ', '.join(newly_nearby_flight_identifiers)
        newly_nearby_flight_details_str = '\n'.join(
            [str(current_nearby_aircraft[f]) for f in newly_nearby_flight_identifiers])
        Log('Multiple newly-nearby flights: %s\n%s' % (
            newly_nearby_flight_identifiers_str, newly_nearby_flight_details_str))
      flight_identifier = newly_nearby_flight_identifiers[0]

      flight_aware_json = {}
      if SIMULATION:
        json_times = [j[1] for j in FA_JSONS]
        if json_time in json_times:
          flight_aware_json = FA_JSONS[json_times.index(json_time)][0]
      elif flight_identifier[0]:
        flight_number = flight_identifier[0]
        flight_aware_json = GetFlightAwareJson(flight_number)
        if flight_aware_json:
          UpdateStatusLight(GPIO_ERROR_FLIGHT_AWARE_CONNECTION, False)
        else:
          failure_message = 'No json from Flightaware for flight: %s' % flight_number

          Log(failure_message)
          UpdateStatusLight(GPIO_ERROR_FLIGHT_AWARE_CONNECTION, True, failure_message)

      flight_details = {}
      if flight_aware_json:
        flight_details = ParseFlightAwareJson(flight_aware_json)

      if not SIMULATION and log_jsons:
        PickleObjectToFile((flight_aware_json, now), PICKLE_FA_JSON_FILE, True)

      # Augment FlightAware details with radio / radio-derived details
      flight_details.update(current_nearby_aircraft[flight_identifier])

      # Augment with the past location data; the [1] is because recall that
      # persistent_path[key] is actually a 2-tuple, the first element being
      # the most recent time seen, and the second element being the actual
      # path. But we do not need to keep around the most recent time seen any
      # more.
      flight_details['persistent_path'] = persistent_path[flight_identifier][1]





                            <----SKIPPED LINES---->




    Dictionary with attributes about radio range, number of flights seen, etc.
  """
  json_desc_dict = {}
  json_desc_dict['now'] = parsed['now']

  aircraft = [a for a in parsed['aircraft'] if a['seen'] < PERSISTENCE_SECONDS]
  json_desc_dict['radio_range_flights'] = len(aircraft)

  aircraft_with_pos = [a for a in aircraft if 'lat' in a and 'lon' in a]
  current_distances = [HaversineDistanceMeters(
      HOME, (a['lat'], a['lon'])) for a in aircraft_with_pos]
  current_distances = [
      d * FEET_IN_METER / FEET_IN_MILE for d in current_distances if d is not None]
  if current_distances:
    json_desc_dict['radio_range_miles'] = max(current_distances)

  return json_desc_dict


def MergedIdentifier(proposed_id, existing_ids):























  flight_number, squawk = proposed_id

  def CheckPartialMatch(value, position):
    if value is not None:
      return [e for e in existing_ids if e[position] == value and e != proposed_id]
    return []

  matches = CheckPartialMatch(flight_number, 0)
  matches.extend(CheckPartialMatch(squawk, 1))

  if not matches:
    return proposed_id, None

  if not flight_number and matches:
    # arbitrarily choose alpha-first non-null flight_number
    matching_flight_numbers = [m[0] for m in matches if m[0] is not None]
    if matching_flight_numbers:
      flight_number = sorted(matching_flight_numbers)[0]
  if not squawk and matches:
    # arbitrarily choose alpha-first non-null squawk
    matching_squawks = [m[1] for m in matches if m[1] is not None]
    if matching_squawks:
      squawk = sorted(matching_squawks)[0]
  id_to_use = (flight_number, squawk)

  verbose = False
  if verbose:
    message_parts = []
    if proposed_id != id_to_use:
      message_parts.append('Proposed id %s replaced with %s' % (proposed_id, id_to_use))
    if id_to_use in matches:
      matches.remove(id_to_use)
    if matches:
      message_parts.append('%s should be merged with %s' % (matches, id_to_use))
    message = '; '.join(message_parts)
    Log(message)

  return id_to_use, matches


def MergePersistentPath(id_to_use, ids_to_merge, persistent_path):
  verbose = False























  path = []
  timestamps = []

  if id_to_use in persistent_path and id_to_use not in ids_to_merge:
    ids_to_merge.append(id_to_use)

  if verbose:
    print('ids_to_merge: %s' % ids_to_merge)

  for i in ids_to_merge:
    if verbose:
      print('path for %s: %s' % (i, persistent_path[i]))
      print('')
    timestamps.append(persistent_path[i][0])
    path.extend(persistent_path[i][1])
    persistent_path.pop(i)
  persistent_path[id_to_use] = (max(timestamps), sorted(path, key=lambda p: p['now']))
  if verbose:
    print('new path path for %s: %s' % (id_to_use, persistent_path[id_to_use]))
    print('='*80)
    print('')

  return persistent_path


def ParseDumpJson(dump_json, persistent_path):
  """Identifies all airplanes within given distance of home from the dump1090 file.

  Since the dump1090 json will have messages from all flights that the antenna has picked
  up, we want to keep only flights that are within a relevant distance to us, and also to
  extract from the full set of data in the json to just the relevant fields for additional
  analysis.

  While most flights have both a squawk and a flight number, enough are missing one only
  for it to appear later to want to use a 2-tuple of both as an identifier, merging
  flights if they share a common non-null flight number and/or squawk, as the persistent
  identifier across time.

  Args:
    dump_json: The text representation of the json message from dump1090-mutability
    persistent_path: dictionary where keys are flight numbers, and the values are a




                            <----SKIPPED LINES---->





  # if the flight was last seen too far in the past, remove the track info
  for f in list(persistent_path.keys()):
    (last_seen, current_path) = persistent_path[f]
    if last_seen < now - PERSISTENCE_SECONDS:
      persistent_path.pop(f)

  return (nearby_aircraft, now, json_desc_dict, persistent_path)


def GetFlightAwareJson(flight_number):
  """Scrapes the text json message from FlightAware for a given flight number.

  Given a flight number, loads the corresponding FlightAware webpage for that flight and
  extracts the relevant script that contains all the flight details from that page.

  Args:
    flight_number: text flight number (i.e.: SWA1234)

  Returns:

    Text representation of the json message from FlightAware.

  """
  url = 'https://flightaware.com/live/flight/' + flight_number
  try:
    response = requests.get(url)
  except requests.exceptions.RequestException as e:
    Log('Unable to query FA for URL due to %s: %s' % (e, url))
    return ''

  soup = bs4.BeautifulSoup(response.text, 'html.parser')
  l = soup.find_all('script')
  flight_script = None
  for script in l:
    if "trackpollBootstrap" in str(script):
      flight_script = str(script)
      break
  if not flight_script:
    Log('Unable to find trackpollBootstrap script in page: ' + response.text)
    return ''

  first_open_curly_brace = flight_script.find('{')
  last_close_curly_brace = flight_script.rfind('}')
  flight_json = flight_script[first_open_curly_brace:last_close_curly_brace+1]
  return flight_json


def Unidecode(s):
  """Convert a special unicode characters to closest ASCII representation."""
  if s is not None:
    s = unidecode.unidecode(s)
  return s


def ParseFlightAwareJson(flight_json):
  """Strips relevant data about the flight from FlightAware feed.

  The FlightAware json has hundreds of fields about a flight, only a fraction of which
  are relevant to extract. Note that some of the fields are inconsistently populated
  (i.e.: scheduled and actual times for departure and take-off).

  Args:
    flight_json: Text representation of the FlightAware json about a single flight.

  Returns:




                            <----SKIPPED LINES---->




    msg = pin[2]
  if RASPBERRY_PI:
    RPi.GPIO.output(pin[0], value)
    if value:
      pin_setting = 'HIGH'
      relay_light_value = 'OFF'
    else:
      pin_setting = 'LOW'
      relay_light_value = 'ON'
    msg += '; RPi GPIO pin %d set to %s; relay light #%d should now be %s' % (
        pin[0], pin_setting, pin[3], relay_light_value)

  if pin_values[pin[0]] != value:
    if VERBOSE:
      Log(msg)  # log
    pin_values[pin[0]] = value  # update cache
    UpdateDashboard(value, subsystem=pin, failure_message=failure_message)


def UpdateDashboard(value, subsystem=0, failure_message=''):

















  versions = (VERSION_MESSAGEBOARD, VERSION_ARDUINO)
  if subsystem:
    subsystem = subsystem[0]
  PickleObjectToFile(
      (time.time(), subsystem, value, versions, failure_message),
      PICKLE_DASHBOARD, True)


def RemoveFile(file):
  """Removes a file if it exists, returning a boolean indicating if it had existed."""
  if os.path.exists(file):
    os.remove(file)
    return True
  return False


def ConfirmNewFlight(flight, flights):
  """Replaces last-seen flight with new flight if otherwise identical but for identifiers.

  Flights are identified by the radio over time by a tuple of identifiers: flight_number




                            <----SKIPPED LINES---->




            DisplayTime(last_flight, '%x'), DisplayTime(flight, '%x')))

  Log(message)

  args = (PICKLE_FLIGHTS, not SIMULATION, max_days)
  saved_flights = UnpickleObjectFromFile(*args)[:-1]
  files_to_overwrite = UnpickleObjectFromFile(*args, filenames=True)

  for file in files_to_overwrite:
    os.remove(file)
  for f in saved_flights:
    # we would like to use verify=True, but that's too slow without further optimizing the
    # verification step for a loop of data
    PickleObjectToFile(
        f, PICKLE_FLIGHTS, True, timestamp=f['now'], verify=False)

  return False


def HeartbeatRestart():

  if SIMULATION:
    return 0
  UpdateDashboard(True)  # Indicates that this wasn't running a moment before, ...
  UpdateDashboard(False)  # ... and now it is running!
  return time.time()


def Heartbeat(last_heartbeat_time):

  if SIMULATION:
    return last_heartbeat_time
  now = time.time()
  if now - last_heartbeat_time > HEARTBEAT_SECONDS:
    UpdateDashboard(False)
    last_heartbeat_time = now
  return last_heartbeat_time


def VersionControl():
  global VERSION_MESSAGEBOARD
  global VERSION_ARDUINO





  def MakeCopy(python_prefix):
    file_extension = '.py'

    live_name = python_prefix + '.py'
    live_path = os.path.join(CODE_REPOSITORY, live_name)

    epoch = os.path.getmtime(live_path)
    last_modified_suffix = EpochDisplayTime(epoch, format_string='-%Y-%m-%d-%H%M')
    version_name = python_prefix + last_modified_suffix + file_extension
    version_path = os.path.join(VERSION_REPOSITORY, version_name)


    shutil.copyfile(live_path, version_path)
    return version_name



  VERSION_MESSAGEBOARD = MakeCopy('messageboard')
  VERSION_ARDUINO = MakeCopy('arduino')


def LogInitTimes(init_times):
  msg = ''
  for n, t in enumerate(init_times[:-1]):
    msg += '%.2fs to get from reading %d to reading %s' % (init_times[n + 1] - init_times[n], n, n + 1)
  Log(msg)


def main():
  """Traffic cop between incoming radio flight messages, configuration, and messageboard.

  This is the main logic, checking for new flights, augmenting the radio signal with
  additional web-scraped data, and generating messages in a form presentable to the
  messageboard.
  """
  RemoveFile(LOGFILE_LOCK)
  VersionControl()

  # Since this clears log files, it should occur first before we start logging
  if '-s' in sys.argv:
    global SIMULATION_COUNTER
    SimulationSetup()

  last_heartbeat_time = HeartbeatRestart()
  init_timing = [time.time()]  # time 0

  # This flag slows down simulation time around a flight, great for debugging the arduinos
  simulation_slowdown = bool('-f' in sys.argv)

  # Redirect any errors to a log file instead of the screen, and add a datestamp
  if not SIMULATION:
    sys.stderr = open(STDERR_FILE, 'a')
    Log('', STDERR_FILE)

  init_timing.append(time.time())  # time 1
  Log('Starting up process %d' % os.getpid())
  already_running_ids = FindRunningParents()
  if already_running_ids:
    for pid in already_running_ids:
      Log('Sending termination signal to %d' % pid)
      os.kill(pid, signal.SIGTERM)
  init_timing.append(time.time())  # time 2
  
  SetPinMode()

  configuration = ReadAndParseSettings(CONFIG_FILE)
  startup_time = time.time()
  json_desc_dict = {}

  init_timing.append(time.time())  # time 3
  flights = UnpickleObjectFromFile(PICKLE_FLIGHTS, True, max_days=MAX_INSIGHT_HORIZON_DAYS)
  # Clear the loaded flight of any cached data, identified by keys with a specific
  # suffix, since code fixes may change the values for some of those cached elements
  for flight in flights:
    for key in list(flight.keys()):
      if key.endswith(CACHED_ELEMENT_PREFIX):
        flight.pop(key)
  init_timing.append(time.time())  # time 4

  # If we're displaying just a single insight message, we want it to be something
  # unique, to the extent possible; this dict holds a count of the diff types of messages
  # displayed so far
  insight_message_distribution = {}




                            <----SKIPPED LINES---->




  histogram = {}

  # Next up to print is index 0; this is a list of tuples:
  # tuple element#1: flag indicating the type of message that this is
  # tuple element#2: the message itself
  message_queue = []
  next_message_time = time.time()

  # We repeat the loop every x seconds; this ensures that if the processing time is long,
  # we don't wait another x seconds after processing completes
  next_loop_time = time.time() + LOOP_DELAY_SECONDS

  # These files are read only if the version on disk has been modified more recently
  # than the last time it was read
  last_dump_json_timestamp = 0

  init_timing.append(time.time())  # time 6
  WaitUntilKillComplete(already_running_ids)
  init_timing.append(time.time())  # time 7

  LogInitTimes(init_timing)

  Log('Finishing initialization of %d; starting radio polling loop' % os.getpid())
  while (not SIMULATION or SIMULATION_COUNTER < len(DUMP_JSONS)) and not SHUTDOWN_SIGNAL:

    last_heartbeat_time = Heartbeat(last_heartbeat_time)

    new_configuration = ReadAndParseSettings(CONFIG_FILE)
    CheckForNewFilterCriteria(configuration, new_configuration, message_queue, flights)
    configuration = new_configuration

    ResetLogs(configuration)  # clear the logs if requested

    # if this is a SIMULATION, then process every diff dump. But if it isn't a simulation,
    # then only read & do related processing for the next dump if the last-modified
    # timestamp indicates the file has been updated since it was last read.
    tmp_timestamp = 0
    if not SIMULATION:
      dump_json_exists = os.path.exists(DUMP_JSON_FILE)
      if dump_json_exists:
        tmp_timestamp = os.path.getmtime(DUMP_JSON_FILE)




                            <----SKIPPED LINES---->





01234567890123456789012345678901234567890123456789012345678901234567890123456789









323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433








1285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330








13461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413            1414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447   1448   1449145014511452    14531454145514561457145814591460146114621463146414651466146714681469147014711472








158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647








540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461








55195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559 556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583       558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640








56665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706











                            <----SKIPPED LINES---->




AIRCRAFT_LENGTH['Boeing 737-800 (twin-jet)'] = 39.47
AIRCRAFT_LENGTH['Boeing 737-900 (twin-jet)'] = 42.11
AIRCRAFT_LENGTH['Boeing 747-400 (quad-jet)'] = 36.4
AIRCRAFT_LENGTH['Boeing 747-8 (quad-jet)'] = 76.25
AIRCRAFT_LENGTH['Boeing 757-200 (twin-jet)'] = 47.3
AIRCRAFT_LENGTH['Boeing 757-300 (twin-jet)'] = 54.4
AIRCRAFT_LENGTH['Boeing 767-200 (twin-jet)'] = 48.51
AIRCRAFT_LENGTH['Boeing 767-300 (twin-jet)'] = 54.94
AIRCRAFT_LENGTH['Boeing 777 (twin-jet)'] = (63.73 + 73.86) / 2
AIRCRAFT_LENGTH['Boeing 777-200 (twin-jet)'] = 63.73
AIRCRAFT_LENGTH['Boeing 777-200LR/F (twin-jet)'] = 63.73
AIRCRAFT_LENGTH['Boeing 777-300ER (twin-jet)'] = 73.86
AIRCRAFT_LENGTH['Boeing 787-10 (twin-jet)'] = 68.28
AIRCRAFT_LENGTH['Boeing 787-8 (twin-jet)'] = 56.72
AIRCRAFT_LENGTH['Boeing 787-9 (twin-jet)'] = 62.81
AIRCRAFT_LENGTH['Canadair Regional Jet CRJ-200 (twin-jet)'] = 26.77
AIRCRAFT_LENGTH['Canadair Regional Jet CRJ-700 (twin-jet)'] = 32.3
AIRCRAFT_LENGTH['Canadair Regional Jet CRJ-900 (twin-jet)'] = 36.2
AIRCRAFT_LENGTH['Canadair Challenger 350 (twin-jet)'] = 20.9
AIRCRAFT_LENGTH['Bombardier Challenger 300 (twin-jet)'] = 20.92
AIRCRAFT_LENGTH['Bombardier Global Express (twin-jet)'] = (29.5 + 30.3) / 2
AIRCRAFT_LENGTH['Embraer 170/175 (twin-jet)'] = (29.90 + 31.68) / 2
AIRCRAFT_LENGTH['Embraer Phenom 300 (twin-jet)'] = 15.9
AIRCRAFT_LENGTH['EMBRAER 175 (long wing) (twin-jet)'] = 31.68
AIRCRAFT_LENGTH['Embraer ERJ-135 (twin-jet)'] = 26.33
AIRCRAFT_LENGTH['Cessna Caravan (single-turboprop)'] = 11.46
AIRCRAFT_LENGTH['Cessna Citation CJ2+ (twin-jet)'] = 14.53
AIRCRAFT_LENGTH['Cessna Citation CJ3 (twin-jet)'] = 15.59
AIRCRAFT_LENGTH['Cessna Citation II (twin-jet)'] = 14.54
AIRCRAFT_LENGTH['Cessna Citation Latitude (twin-jet)'] = 18.97
AIRCRAFT_LENGTH['Cessna Citation Sovereign (twin-jet)'] = 19.35
AIRCRAFT_LENGTH['Cessna Citation V (twin-jet)'] = 14.91
AIRCRAFT_LENGTH['Cessna Citation X (twin-jet)'] = 22.04
AIRCRAFT_LENGTH['Cessna Skyhawk (piston-single)'] = 8.28
AIRCRAFT_LENGTH['Cessna Skylane (piston-single)'] = 8.84
AIRCRAFT_LENGTH['Cessna T206 Turbo Stationair (piston-single)'] = 8.61
AIRCRAFT_LENGTH['Beechcraft Bonanza (33) (piston-single)'] = 7.65
AIRCRAFT_LENGTH['Beechcraft Super King Air 200 (twin-turboprop)'] = 13.31
AIRCRAFT_LENGTH['Beechcraft Super King Air 350 (twin-turboprop)'] = 14.22
AIRCRAFT_LENGTH['Beechcraft King Air 90 (twin-turboprop)'] = 10.82
AIRCRAFT_LENGTH['Learjet 45 (twin-jet)'] = 17.68
AIRCRAFT_LENGTH['Pilatus PC-12 (single-turboprop)'] = 14.4


def Log(message, file=None, rolling=None):
  """Write a message to a logfile along with a timestamp.

  Args:
    message: string message to write
    file: string representing file name and, if needed, path to the file to write to
    rolling: name of file that will keep only the last n files of file
  """
  # can't define as a default parameter because LOGFILE name is potentially
  # modified based on SIMULATION flag
  if not file:
    file = LOGFILE

  # special case: for the main logfile, we always keep a rolling log
  if not rolling and file == LOGFILE:
    rolling = ROLLING_LOGFILE

  if file == LOGFILE:
    lock = filelock.FileLock(LOGFILE_LOCK)
    lock.acquire()

  try:
    with open(file, 'a') as f:
      # by excluding the timestamp, file diffs become easier between runs
      if not SIMULATION or file == LOGFILE:
        f.write('='*80+'\n')
        f.write(str(datetime.datetime.now(TZ))+'\n')
        f.write('\n')
      f.write(str(message)+'\n')
  except IOError:
    Log('Unable to append to ' + file)

  if rolling:
    existing_log_lines = ReadFile(file).splitlines()
    with open(rolling, 'w') as f:
      f.write('\n'.join(existing_log_lines[-1000:]))

  if file == LOGFILE:
    lock.release()


def LogTimes(times):
  """Logs elapsed time messages from a list of epochs."""
  msg = ''
  for n, t in enumerate(times[:-1]):
    msg += '%.2fs to get from reading %d to reading %s\n' % (times[n + 1] - t, n, n + 1)
  Log(msg)


def MaintainRollingWebLog(message, max_count, filename=None):
  """Maintains a rolling text file of at most max_count printed messages.

  Newest data at top and oldest data at the end, of at most max_count messages,
  where the delimiter between each message is identified by a special fixed string.

  Args:
    message: text message to prepend to the file.
    max_count: maximum number of messages to keep in the file; the max_count+1st message
      is deleted.
    filename: the file to update.
  """
  # can't define as a default parameter because ROLLING_MESSAGE_FILE name is potentially
  # modified based on SIMULATION flag
  if not filename:
    filename = ROLLING_MESSAGE_FILE
  rolling_log_header = '='*(SPLITFLAP_CHARS_PER_LINE + 2)
  existing_file = ReadFile(filename)




                            <----SKIPPED LINES---->




    newly_nearby_flight_identifiers = UpdateAircraftList(
        persistent_nearby_aircraft, current_nearby_aircraft, now)

    if newly_nearby_flight_identifiers:

      if len(newly_nearby_flight_identifiers) > 1:
        newly_nearby_flight_identifiers_str = ', '.join(newly_nearby_flight_identifiers)
        newly_nearby_flight_details_str = '\n'.join(
            [str(current_nearby_aircraft[f]) for f in newly_nearby_flight_identifiers])
        Log('Multiple newly-nearby flights: %s\n%s' % (
            newly_nearby_flight_identifiers_str, newly_nearby_flight_details_str))
      flight_identifier = newly_nearby_flight_identifiers[0]

      flight_aware_json = {}
      if SIMULATION:
        json_times = [j[1] for j in FA_JSONS]
        if json_time in json_times:
          flight_aware_json = FA_JSONS[json_times.index(json_time)][0]
      elif flight_identifier[0]:
        flight_number = flight_identifier[0]
        flight_aware_json, error_message = GetFlightAwareJson(flight_number)
        if flight_aware_json:
          UpdateStatusLight(GPIO_ERROR_FLIGHT_AWARE_CONNECTION, False)
        else:
          failure_message = 'No json from Flightaware for flight %s: %s' % (
              flight_number, error_message[:500])
          Log(failure_message)
          UpdateStatusLight(GPIO_ERROR_FLIGHT_AWARE_CONNECTION, True, failure_message)

      flight_details = {}
      if flight_aware_json:
        flight_details = ParseFlightAwareJson(flight_aware_json)

      if not SIMULATION and log_jsons:
        PickleObjectToFile((flight_aware_json, now), PICKLE_FA_JSON_FILE, True)

      # Augment FlightAware details with radio / radio-derived details
      flight_details.update(current_nearby_aircraft[flight_identifier])

      # Augment with the past location data; the [1] is because recall that
      # persistent_path[key] is actually a 2-tuple, the first element being
      # the most recent time seen, and the second element being the actual
      # path. But we do not need to keep around the most recent time seen any
      # more.
      flight_details['persistent_path'] = persistent_path[flight_identifier][1]





                            <----SKIPPED LINES---->




    Dictionary with attributes about radio range, number of flights seen, etc.
  """
  json_desc_dict = {}
  json_desc_dict['now'] = parsed['now']

  aircraft = [a for a in parsed['aircraft'] if a['seen'] < PERSISTENCE_SECONDS]
  json_desc_dict['radio_range_flights'] = len(aircraft)

  aircraft_with_pos = [a for a in aircraft if 'lat' in a and 'lon' in a]
  current_distances = [HaversineDistanceMeters(
      HOME, (a['lat'], a['lon'])) for a in aircraft_with_pos]
  current_distances = [
      d * FEET_IN_METER / FEET_IN_MILE for d in current_distances if d is not None]
  if current_distances:
    json_desc_dict['radio_range_miles'] = max(current_distances)

  return json_desc_dict


def MergedIdentifier(proposed_id, existing_ids):
  """Identifies what identifier to use for a flight.

  While most flights have both a squawk and a flight number, enough are missing one only
  for it to appear later to want to use a 2-tuple of both as an identifier, merging
  flights if they share a common non-null flight number and/or squawk, as the persistent
  identifier across time.

  Additionally, in very limited circumstances, a squawk may change mid-flight; in that
  case, the first alpha squawk is used.

  This function identifies which identifier to use, and which - if any - should be merged
  into that one identifier from a group of existing identifiers.

  Args:
    proposed_id: The 2-tuple of (flight_number, squawk) of the identified flight.
    existing_ids: An iterable of existing 2-tuple identifiers, some (or none) of which
      may overlap with this flight.

  Returns:
    2-tuple:
      - the 2-tuple suggested identifier to use
      - a potentially empty list of ids to merge with the suggested identifier
  """
  flight_number, squawk = proposed_id

  def CheckPartialMatch(value, position):
    if value is not None:
      return [e for e in existing_ids if e[position] == value and e != proposed_id]
    return []

  matches = CheckPartialMatch(flight_number, 0)
  matches.extend(CheckPartialMatch(squawk, 1))

  if not matches:
    return proposed_id, []

  if not flight_number and matches:
    # arbitrarily choose alpha-first non-null flight_number
    matching_flight_numbers = [m[0] for m in matches if m[0] is not None]
    if matching_flight_numbers:
      flight_number = sorted(matching_flight_numbers)[0]
  if not squawk and matches:
    # arbitrarily choose alpha-first non-null squawk
    matching_squawks = [m[1] for m in matches if m[1] is not None]
    if matching_squawks:
      squawk = sorted(matching_squawks)[0]
  id_to_use = (flight_number, squawk)













  return id_to_use, matches


def MergePersistentPath(id_to_use, ids_to_merge, persistent_path):
  """Merges the persistent paths from multiple flights into a single flight.

  Since the identifiers may change over time of a flight for which we have already
  recorded some in-flight path history, this function allows us to combine all the
  persistent path details and merge it into a single flight. For instance, we may have
  only a squawk for a few seconds, which then changes mid-flight to another squawk,
  and then a few seconds later, we receive a radio signal with both the (new) squawk and
  flight number - thus we have three records to merge to one.

  This function merges all the persistent paths - which are 2-tuples of the most
  recent timestamp and a list of dictionaries - into one integrated persistent path.

  Args:
    id_to_use: The 2-tuple of (flight_number, squawk) of the final id we want the
      flight to have.
    ids_to_merge: an iterable of the ids to merge with the final id_to_use.
    persistent_path: the dictionary of existing persistent paths including at least
      ids_to_merge as keys, potentially also id_to_use, and perhaps additional flights
      as well.

  Returns:
    The merged persistent path, that includes id_to_use as one key, and removed
    ids_to_merge.
  """
  path = []
  timestamps = []

  if id_to_use in persistent_path and id_to_use not in ids_to_merge:
    ids_to_merge.append(id_to_use)




  for i in ids_to_merge:



    timestamps.append(persistent_path[i][0])
    path.extend(persistent_path[i][1])
    persistent_path.pop(i)
  persistent_path[id_to_use] = (max(timestamps), sorted(path, key=lambda p: p['now']))





  return persistent_path


def ParseDumpJson(dump_json, persistent_path):
  """Identifies all airplanes within given distance of home from the dump1090 file.

  Since the dump1090 json will have messages from all flights that the antenna has picked
  up, we want to keep only flights that are within a relevant distance to us, and also to
  extract from the full set of data in the json to just the relevant fields for additional
  analysis.

  While most flights have both a squawk and a flight number, enough are missing one only
  for it to appear later to want to use a 2-tuple of both as an identifier, merging
  flights if they share a common non-null flight number and/or squawk, as the persistent
  identifier across time.

  Args:
    dump_json: The text representation of the json message from dump1090-mutability
    persistent_path: dictionary where keys are flight numbers, and the values are a




                            <----SKIPPED LINES---->





  # if the flight was last seen too far in the past, remove the track info
  for f in list(persistent_path.keys()):
    (last_seen, current_path) = persistent_path[f]
    if last_seen < now - PERSISTENCE_SECONDS:
      persistent_path.pop(f)

  return (nearby_aircraft, now, json_desc_dict, persistent_path)


def GetFlightAwareJson(flight_number):
  """Scrapes the text json message from FlightAware for a given flight number.

  Given a flight number, loads the corresponding FlightAware webpage for that flight and
  extracts the relevant script that contains all the flight details from that page.

  Args:
    flight_number: text flight number (i.e.: SWA1234)

  Returns:
    Two tuple:
     - Text representation of the json message from FlightAware.
     - Text string of error message, if any
  """
  url = 'https://flightaware.com/live/flight/' + flight_number
  try:
    response = requests.get(url)
  except requests.exceptions.RequestException as e:
    error_msg = 'Unable to query FA for URL due to %s: %s' % (e, url)
    Log(error_msg)
    return '', error_msg
  soup = bs4.BeautifulSoup(response.text, 'html.parser')
  l = soup.find_all('script')
  flight_script = None
  for script in l:
    if "trackpollBootstrap" in str(script):
      flight_script = str(script)
      break
  if not flight_script:
    error_msg = 'Unable to find trackpollBootstrap script in page: ' + response.text
    Log(error_msg)
    return '', error_msg
  first_open_curly_brace = flight_script.find('{')
  last_close_curly_brace = flight_script.rfind('}')
  flight_json = flight_script[first_open_curly_brace:last_close_curly_brace+1]
  return flight_json, ''


def Unidecode(s):
  """Convert a special unicode characters to closest ASCII representation."""
  if s is not None:
    s = unidecode.unidecode(s)
  return s


def ParseFlightAwareJson(flight_json):
  """Strips relevant data about the flight from FlightAware feed.

  The FlightAware json has hundreds of fields about a flight, only a fraction of which
  are relevant to extract. Note that some of the fields are inconsistently populated
  (i.e.: scheduled and actual times for departure and take-off).

  Args:
    flight_json: Text representation of the FlightAware json about a single flight.

  Returns:




                            <----SKIPPED LINES---->




    msg = pin[2]
  if RASPBERRY_PI:
    RPi.GPIO.output(pin[0], value)
    if value:
      pin_setting = 'HIGH'
      relay_light_value = 'OFF'
    else:
      pin_setting = 'LOW'
      relay_light_value = 'ON'
    msg += '; RPi GPIO pin %d set to %s; relay light #%d should now be %s' % (
        pin[0], pin_setting, pin[3], relay_light_value)

  if pin_values[pin[0]] != value:
    if VERBOSE:
      Log(msg)  # log
    pin_values[pin[0]] = value  # update cache
    UpdateDashboard(value, subsystem=pin, failure_message=failure_message)


def UpdateDashboard(value, subsystem=0, failure_message=''):
  """Writes to disk a tuple with status details about a particular system.

  The independent monitoring.py module allows us to see in one place the status of all
  the subsystems and of the overall system; it does that monitoring based on these
  tuples of data.

  Args:
    value: Boolean indicating whether a failure has occurred (True) or system is nominal
      (False).
    subsystem: A tuple describing the system; though that description may have multiple
      attributes, the 0th element is the numeric identifier of that system.  monitoring.py
      depends on other attributes of that tuple being present as well.  Since the
      overall system does not have a tuple defined for it, it gets a default identifier
      of 0.
    failure_message: an (optional) message describing why the system / subsystem is
      being disabled or failing.
  """
  versions = (VERSION_MESSAGEBOARD, VERSION_ARDUINO)
  if subsystem:
    subsystem = subsystem[0]
  PickleObjectToFile(
      (time.time(), subsystem, value, versions, failure_message),
      PICKLE_DASHBOARD, True)


def RemoveFile(file):
  """Removes a file if it exists, returning a boolean indicating if it had existed."""
  if os.path.exists(file):
    os.remove(file)
    return True
  return False


def ConfirmNewFlight(flight, flights):
  """Replaces last-seen flight with new flight if otherwise identical but for identifiers.

  Flights are identified by the radio over time by a tuple of identifiers: flight_number




                            <----SKIPPED LINES---->




            DisplayTime(last_flight, '%x'), DisplayTime(flight, '%x')))

  Log(message)

  args = (PICKLE_FLIGHTS, not SIMULATION, max_days)
  saved_flights = UnpickleObjectFromFile(*args)[:-1]
  files_to_overwrite = UnpickleObjectFromFile(*args, filenames=True)

  for file in files_to_overwrite:
    os.remove(file)
  for f in saved_flights:
    # we would like to use verify=True, but that's too slow without further optimizing the
    # verification step for a loop of data
    PickleObjectToFile(
        f, PICKLE_FLIGHTS, True, timestamp=f['now'], verify=False)

  return False


def HeartbeatRestart():
  """Logs a system down / system up pair of heartbeats as system is first starting."""
  if SIMULATION:
    return 0
  UpdateDashboard(True)  # Indicates that this wasn't running a moment before, ...
  UpdateDashboard(False)  # ... and now it is running!
  return time.time()


def Heartbeat(last_heartbeat_time):
  """Logs a system up pair of heartbeat."""
  if SIMULATION:
    return last_heartbeat_time
  now = time.time()
  if now - last_heartbeat_time > HEARTBEAT_SECONDS:
    UpdateDashboard(False)
    last_heartbeat_time = now
  return last_heartbeat_time


def VersionControl():
  """Copies the current instances of messageboard.py and arduino.py into a repository.


  To aid debugging, we want to keep past versions of the code easily accessible, and
  linked to the errors that have been logged. This function copies the python code
  into a version control directory after adding in a date / time stamp to the file name.
  """
  def MakeCopy(python_prefix):
    file_extension = '.py'

    live_name = python_prefix + '.py'
    live_path = os.path.join(CODE_REPOSITORY, live_name)

    epoch = os.path.getmtime(live_path)
    last_modified_suffix = EpochDisplayTime(epoch, format_string='-%Y-%m-%d-%H%M')
    version_name = python_prefix + last_modified_suffix + file_extension
    version_path = os.path.join(VERSION_REPOSITORY, version_name)

    if not os.path.exists(version_path):
      shutil.copyfile(live_path, version_path)
    return version_name

  global VERSION_MESSAGEBOARD
  global VERSION_ARDUINO
  VERSION_MESSAGEBOARD = MakeCopy('messageboard')
  VERSION_ARDUINO = MakeCopy('arduino')









def main():
  """Traffic cop between incoming radio flight messages, configuration, and messageboard.

  This is the main logic, checking for new flights, augmenting the radio signal with
  additional web-scraped data, and generating messages in a form presentable to the
  messageboard.
  """
  RemoveFile(LOGFILE_LOCK)
  VersionControl()

  # Since this clears log files, it should occur first before we start logging
  if '-s' in sys.argv:
    global SIMULATION_COUNTER
    SimulationSetup()

  last_heartbeat_time = HeartbeatRestart()
  init_timing = [time.time()]  # time 0

  # This flag slows down simulation time around a flight, great for debugging the arduinos
  simulation_slowdown = bool('-f' in sys.argv)

  # Redirect any errors to a log file instead of the screen, and add a datestamp
  if not SIMULATION:
    sys.stderr = open(STDERR_FILE, 'a')
    Log('', STDERR_FILE)

  init_timing.append(time.time())  # time 1
  Log('Starting up process %d' % os.getpid())
  already_running_ids = FindRunningParents()
  if already_running_ids:
    for pid in already_running_ids:
      Log('Sending termination signal to %d' % pid)
      os.kill(pid, signal.SIGTERM)
  init_timing.append(time.time())  # time 2

  SetPinMode()

  configuration = ReadAndParseSettings(CONFIG_FILE)
  startup_time = time.time()
  json_desc_dict = {}

  init_timing.append(time.time())  # time 3
  flights = UnpickleObjectFromFile(PICKLE_FLIGHTS, True, max_days=MAX_INSIGHT_HORIZON_DAYS)
  # Clear the loaded flight of any cached data, identified by keys with a specific
  # suffix, since code fixes may change the values for some of those cached elements
  for flight in flights:
    for key in list(flight.keys()):
      if key.endswith(CACHED_ELEMENT_PREFIX):
        flight.pop(key)
  init_timing.append(time.time())  # time 4

  # If we're displaying just a single insight message, we want it to be something
  # unique, to the extent possible; this dict holds a count of the diff types of messages
  # displayed so far
  insight_message_distribution = {}




                            <----SKIPPED LINES---->




  histogram = {}

  # Next up to print is index 0; this is a list of tuples:
  # tuple element#1: flag indicating the type of message that this is
  # tuple element#2: the message itself
  message_queue = []
  next_message_time = time.time()

  # We repeat the loop every x seconds; this ensures that if the processing time is long,
  # we don't wait another x seconds after processing completes
  next_loop_time = time.time() + LOOP_DELAY_SECONDS

  # These files are read only if the version on disk has been modified more recently
  # than the last time it was read
  last_dump_json_timestamp = 0

  init_timing.append(time.time())  # time 6
  WaitUntilKillComplete(already_running_ids)
  init_timing.append(time.time())  # time 7

  LogTimes(init_timing)

  Log('Finishing initialization of %d; starting radio polling loop' % os.getpid())
  while (not SIMULATION or SIMULATION_COUNTER < len(DUMP_JSONS)) and not SHUTDOWN_SIGNAL:

    last_heartbeat_time = Heartbeat(last_heartbeat_time)

    new_configuration = ReadAndParseSettings(CONFIG_FILE)
    CheckForNewFilterCriteria(configuration, new_configuration, message_queue, flights)
    configuration = new_configuration

    ResetLogs(configuration)  # clear the logs if requested

    # if this is a SIMULATION, then process every diff dump. But if it isn't a simulation,
    # then only read & do related processing for the next dump if the last-modified
    # timestamp indicates the file has been updated since it was last read.
    tmp_timestamp = 0
    if not SIMULATION:
      dump_json_exists = os.path.exists(DUMP_JSON_FILE)
      if dump_json_exists:
        tmp_timestamp = os.path.getmtime(DUMP_JSON_FILE)




                            <----SKIPPED LINES---->