01234567890123456789012345678901234567890123456789012345678901234567890123456789
7576777879808182838485868788899091929394 9596979899100101102103 104105106107108109110111112113114115116117118119120121122123 120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244 15871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612 1613161416151616161716181619162016211622162316241625162616271628162916301631163216331634 58255826582758285829583058315832583358345835583658375838583958405841584258435844 58455846 58475848584958505851585258535854585558565857585858595860586158625863586458655866 58765877587858795880588158825883588458855886588758885889589058915892589358945895 5896 58975898589959005901590259035904590559065907590859095910591159125913591459155916 |
<----SKIPPED LINES---->
MIN_METERS = 5000/FEET_IN_METER
# planes not seen within MIN_METERS in PERSISTENCE_SECONDS seconds will be
# dropped from the nearby list
PERSISTENCE_SECONDS = 300
TRUNCATE = 50 # max number of keys to include in a histogram image file
# number of seconds to pause between each radio poll / command processing loop
LOOP_DELAY_SECONDS = 1
# number of seconds to wait between recording heartbeats to the status file
HEARTBEAT_SECONDS = 10
# version control directory
CODE_REPOSITORY = ''
VERSION_REPOSITORY = 'versions/'
VERSION_WEBSITE_PATH = VERSION_REPOSITORY
VERSION_MESSAGEBOARD = None
VERSION_ARDUINO = None
# histogram logic truncates to exactly 30 days of hours
MAX_INSIGHT_HORIZON_DAYS = 31
# This file is where the radio drops its json file
DUMP_JSON_FILE = '/run/readsb/aircraft.json'
# At the time a flight is first identified as being of interest (in that
# it falls within MIN_METERS meters of HOME), it - and core attributes
# derived from FlightAware, if any - is appended to the end of this pickle
# file. However, since this file is cached in working memory, flights older
# than 30 days are flushed from this periodically.
PICKLE_FLIGHTS = 'pickle/flights.pk'
# This allows us to identify the full history (including what was last sent
# to the splitflap display in a programmatic fashion. While it may be
# interesting in its own right, its real use is to handle the "replay"
# button, so we know to enable it if what is displayed is the last flight.
PICKLE_SCREENS = 'pickle/screens.pk'
# Status data about messageboard systems - are they running, etc. Specifically,
# has tuples of data (timestamp, system_id, status), where system_id is either
# the pin id of GPIO, or a 0 to indicate overall system, and status is boolean
PICKLE_SYSTEM_DASHBOARD = 'pickle/dashboard.pk'
# Flight-centric status data - what time was the flight first detected as
# in range, does it pass the display criteria, was a json available for it and
# when, was the local or web api used to communicate with the vestaboard
# and how much later, etc.
PICKLE_FLIGHT_DASHBOARD = 'pickle/flight_status.pk'
CACHED_ELEMENT_PREFIX = 'cached_'
<----SKIPPED LINES---->
"""Returns text from the given file name if available, empty string if not.
Args:
filename: string of the filename to open, potentially also including the
full path.
log_exception: boolean indicating whether to log an exception if file not
found.
Returns:
Return text string of file contents.
"""
try:
with open(filename, 'r') as content_file:
file_contents = content_file.read()
except IOError:
if log_exception:
Log('Unable to read '+filename)
return ''
return file_contents
# because reading is ~25x more expensive than getmtime, we will only read &
# parse if the getmtime is more recent than last call for this file. So this
# dict stores a 2-tuple, the last time read & the resulting parsed return
# value
CACHED_FILES = {}
def ReadAndParseSettings(filename):
"""Reads filename and parses the resulting key-value pairs into a dict."""
global CACHED_FILES
(last_read_time, settings) = CACHED_FILES.get(filename, (0, {}))
if os.path.exists(filename):
last_modified = os.path.getmtime(filename)
if last_modified > last_read_time:
setting_str = ReadFile(filename)
settings = ParseSettings(setting_str)
CACHED_FILES[filename] = (last_modified, settings)
return settings
# File does not - or at least no longer - exists; so remove the cache
if filename in CACHED_FILES:
CACHED_FILES.pop(filename)
return {}
def BuildSettings(d):
<----SKIPPED LINES---->
newly_nearby_flight_identifiers_str = ', '.join(
['%s/%s ' % (*i,) for i in newly_nearby_flight_identifiers])
newly_nearby_flight_details_str = '\n'.join([
str(current_nearby_aircraft[f])
for f in newly_nearby_flight_identifiers])
Log('Multiple newly-nearby flights: %s\n%s' % (
newly_nearby_flight_identifiers_str,
newly_nearby_flight_details_str))
flight_identifier = newly_nearby_flight_identifiers[0]
flight_aware_json = {}
if SIMULATION:
json_times = [j[1] for j in FA_JSONS]
if json_time in json_times:
flight_aware_json = FA_JSONS[json_times.index(json_time)][0]
flight_aware_timestamp = time.time() # "real" timestamp unavailable
elif flight_identifier[0]:
flight_number = flight_identifier[0]
flight_aware_json, error_message, status, flight_aware_timestamp = (
GetFlightAwareJson(flight_number))
if flight_aware_json:
UpdateStatusLight(GPIO_ERROR_FLIGHT_AWARE_CONNECTION, False)
else:
failure_message = 'No json from Flightaware for flight %s: %s' % (
flight_number, error_message[:500])
Log(failure_message)
UpdateStatusLight(
GPIO_ERROR_FLIGHT_AWARE_CONNECTION, True, failure_message)
flight_details = {}
if flight_aware_json:
flight_details = ParseFlightAwareJson(flight_aware_json)
elif flight_identifier[0]: # if there's a flight number but no json
flight_details, derived_attr_msg = FindAttributesFromSimilarFlights(
flight_identifier[0], flights)
error_message = '%s; %s' % (error_message, derived_attr_msg)
if not SIMULATION and log_jsons:
PickleObjectToFile((flight_aware_json, now), PICKLE_FA_JSON_FILE, True)
# Augment FlightAware details with radio / radio-derived details
flight_details.update(current_nearby_aircraft[flight_identifier])
# Augment with the past location data; the [1] is because recall that
# persistent_path[key] is actually a 2-tuple, the first element being
# the most recent time seen, and the second element being the actual
# path. But we do not need to keep around the most recent time seen any
# more.
<----SKIPPED LINES---->
Returns:
Boolean - True if different (and processing needed), False if identical
"""
if SIMULATION_COUNTER == 0:
return True
(this_json, unused_now) = DUMP_JSONS[SIMULATION_COUNTER]
(last_json, unused_now) = DUMP_JSONS[SIMULATION_COUNTER - 1]
return this_json != last_json
def CheckRebootNeeded(
startup_time, message_queue, json_desc_dict, configuration):
"""Reboot based on duration instance has been running.
Reboot needed in one of the following situations:
- All quiet: if running for over 24 hours and all is quiet (message queue
empty, no planes in radio, and backup not currently in process).
- Mostly quiet: if running for over 36 hours and message queue is empty and
it's 4a.
- Reboot requested via html form.
Also checks if reset requested via html form.
"""
reboot = False
global SHUTDOWN_SIGNAL
running_hours = (time.time() - startup_time) / SECONDS_IN_HOUR
restart_days = configuration.get('restart_days', 1)
min_hours = restart_days * HOURS_IN_DAY
if (
running_hours >= min_hours and
not message_queue and
not json_desc_dict.get('radio_range_flights') and
# script /home/pi/splitflap/backup.sh creates temp file in this
# directory; after it is copied to the NAS, it is deleted
not os.listdir('/media/backup')):
msg = ('All quiet reboot triggered based on %d days (%d hours); '
'actual runtime: %.2f hours' %
(restart_days, min_hours, running_hours))
SHUTDOWN_SIGNAL = msg
Log(msg)
<----SKIPPED LINES---->
msg = ('Early morning reboot triggered based on %.1f (%d hours); '
'actual runtime: %.2f hours' %
(restart_days, min_hours, running_hours))
SHUTDOWN_SIGNAL = msg
Log(msg)
reboot = True
if 'soft_reboot' in configuration:
msg = 'Soft reboot requested via web form'
SHUTDOWN_SIGNAL = msg
Log(msg)
reboot = True
RemoveSetting(configuration, 'soft_reboot')
if 'end_process' in configuration:
msg = 'Process end requested via web form'
SHUTDOWN_SIGNAL = msg
Log(msg)
RemoveSetting(configuration, 'end_process')
return reboot
def InterruptRebootFromButton():
"""Sets flag so main loop will terminate when it completes the iteration.
This function is only triggered by an physical button press.
"""
msg = ('Soft reboot requested by button push')
global SHUTDOWN_SIGNAL
SHUTDOWN_SIGNAL = msg
global REBOOT_SIGNAL
REBOOT_SIGNAL = True
RPi.GPIO.output(GPIO_SOFT_RESET[1], False) # signal that reset received
Log(msg)
def InterruptShutdownFromSignal(signalNumber, unused_frame):
"""Sets flag so main loop will terminate when it completes the iteration.
<----SKIPPED LINES---->
|
01234567890123456789012345678901234567890123456789012345678901234567890123456789
75767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136 12131214121512161217121812191220122112221223122412251226122712281229123012311232 12331234123512361237123812391240124112421243124412451246124712481249125012511252 1595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643 58345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880 589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021 |
<----SKIPPED LINES---->
MIN_METERS = 5000/FEET_IN_METER
# planes not seen within MIN_METERS in PERSISTENCE_SECONDS seconds will be
# dropped from the nearby list
PERSISTENCE_SECONDS = 300
TRUNCATE = 50 # max number of keys to include in a histogram image file
# number of seconds to pause between each radio poll / command processing loop
LOOP_DELAY_SECONDS = 1
# number of seconds to wait between recording heartbeats to the status file
HEARTBEAT_SECONDS = 10
# version control directory
CODE_REPOSITORY = ''
VERSION_REPOSITORY = 'versions/'
VERSION_WEBSITE_PATH = VERSION_REPOSITORY
VERSION_MESSAGEBOARD = None
VERSION_ARDUINO = None
# histogram logic truncates to exactly 30 days of hours
MAX_INSIGHT_HORIZON_DAYS = 31
# because reading is ~25x more expensive than getmtime, we will only read &
# parse if the getmtime is more recent than last call for this file. So this
# dict stores a 2-tuple, the last time read & the resulting parsed return
# value, with the key being the desired file name
CACHED_FILES = {}
# This file is where the radio drops its json file
DUMP_JSON_FILE = '/run/readsb/aircraft.json'
# At the time a flight is first identified as being of interest (in that
# it falls within MIN_METERS meters of HOME), it - and core attributes
# derived from FlightAware, if any - is appended to the end of this pickle
# file. However, since this file is cached in working memory, flights older
# than 30 days are flushed from this periodically.
PICKLE_FLIGHTS = 'pickle/flights.pk'
# Network status pickle file; this is used by messageboard.py to check if
# the network on the RPi might possibly be hanging.
NETWORK_PICKLE_FILE = 'network_monitor.pk'
if RASPBERRY_PI:
SUPPORT_PATH = '/home/pi/network_monitor'
NETWORK_PICKLE_FILE = os.path.join(SUPPORT_PATH, NETWORK_PICKLE_FILE)
# This allows us to identify the full history (including what was last sent
# to the splitflap display in a programmatic fashion. While it may be
# interesting in its own right, its real use is to handle the "replay"
# button, so we know to enable it if what is displayed is the last flight.
PICKLE_SCREENS = 'pickle/screens.pk'
# Status data about messageboard systems - are they running, etc. Specifically,
# has tuples of data (timestamp, system_id, status), where system_id is either
# the pin id of GPIO, or a 0 to indicate overall system, and status is boolean
PICKLE_SYSTEM_DASHBOARD = 'pickle/dashboard.pk'
# Flight-centric status data - what time was the flight first detected as
# in range, does it pass the display criteria, was a json available for it and
# when, was the local or web api used to communicate with the vestaboard
# and how much later, etc.
PICKLE_FLIGHT_DASHBOARD = 'pickle/flight_status.pk'
CACHED_ELEMENT_PREFIX = 'cached_'
<----SKIPPED LINES---->
"""Returns text from the given file name if available, empty string if not.
Args:
filename: string of the filename to open, potentially also including the
full path.
log_exception: boolean indicating whether to log an exception if file not
found.
Returns:
Return text string of file contents.
"""
try:
with open(filename, 'r') as content_file:
file_contents = content_file.read()
except IOError:
if log_exception:
Log('Unable to read '+filename)
return ''
return file_contents
def ReadAndParseSettings(filename):
"""Reads filename and parses the resulting key-value pairs into a dict."""
global CACHED_FILES
(last_read_time, settings) = CACHED_FILES.get(filename, (0, {}))
if os.path.exists(filename):
last_modified = os.path.getmtime(filename)
if last_modified > last_read_time:
setting_str = ReadFile(filename)
settings = ParseSettings(setting_str)
CACHED_FILES[filename] = (last_modified, settings)
return settings
# File does not - or at least no longer - exists; so remove the cache
if filename in CACHED_FILES:
CACHED_FILES.pop(filename)
return {}
def BuildSettings(d):
<----SKIPPED LINES---->
newly_nearby_flight_identifiers_str = ', '.join(
['%s/%s ' % (*i,) for i in newly_nearby_flight_identifiers])
newly_nearby_flight_details_str = '\n'.join([
str(current_nearby_aircraft[f])
for f in newly_nearby_flight_identifiers])
Log('Multiple newly-nearby flights: %s\n%s' % (
newly_nearby_flight_identifiers_str,
newly_nearby_flight_details_str))
flight_identifier = newly_nearby_flight_identifiers[0]
flight_aware_json = {}
if SIMULATION:
json_times = [j[1] for j in FA_JSONS]
if json_time in json_times:
flight_aware_json = FA_JSONS[json_times.index(json_time)][0]
flight_aware_timestamp = time.time() # "real" timestamp unavailable
elif flight_identifier[0]:
flight_number = flight_identifier[0]
flight_aware_json, error_message, status, flight_aware_timestamp = (
GetFlightAwareJson(flight_number))
if flight_aware_json: # Definitive success: we have a JSON
UpdateStatusLight(GPIO_ERROR_FLIGHT_AWARE_CONNECTION, False)
else: # Perhaps a failure, or just a warning due to a too-soon query
failure_message = 'No json from Flightaware for flight %s: %s' % (
flight_number, error_message[:500])
Log(failure_message)
if status == 'FAILURE': # Only update dashboard if definitive failure
UpdateStatusLight(
GPIO_ERROR_FLIGHT_AWARE_CONNECTION, True, failure_message)
flight_details = {}
if flight_aware_json:
flight_details = ParseFlightAwareJson(flight_aware_json)
elif flight_identifier[0]: # if there's a flight number but no json
flight_details, derived_attr_msg = FindAttributesFromSimilarFlights(
flight_identifier[0], flights)
error_message = '%s; %s' % (error_message, derived_attr_msg)
if not SIMULATION and log_jsons:
PickleObjectToFile((flight_aware_json, now), PICKLE_FA_JSON_FILE, True)
# Augment FlightAware details with radio / radio-derived details
flight_details.update(current_nearby_aircraft[flight_identifier])
# Augment with the past location data; the [1] is because recall that
# persistent_path[key] is actually a 2-tuple, the first element being
# the most recent time seen, and the second element being the actual
# path. But we do not need to keep around the most recent time seen any
# more.
<----SKIPPED LINES---->
Returns:
Boolean - True if different (and processing needed), False if identical
"""
if SIMULATION_COUNTER == 0:
return True
(this_json, unused_now) = DUMP_JSONS[SIMULATION_COUNTER]
(last_json, unused_now) = DUMP_JSONS[SIMULATION_COUNTER - 1]
return this_json != last_json
def CheckRebootNeeded(
startup_time, message_queue, json_desc_dict, configuration):
"""Reboot based on duration instance has been running.
Reboot needed in one of the following situations:
- All quiet: if running for over 24 hours and all is quiet (message queue
empty, no planes in radio, and backup not currently in process).
- Mostly quiet: if running for over 36 hours and message queue is empty and
it's 4a.
- Reboot requested via html form.
- Persistent network problems failure that may be specific to the RPi
Returns:
Boolean indicating if a reboot is needed (True); otherwise, False. Also
logs a message to the main log about why a reboot may be needed, and updates
the global SHUTDOWN_SIGNAL with the same message sent to the log so that
it can also be displayed on the html dashboard.
"""
reboot = False
global SHUTDOWN_SIGNAL
running_hours = (time.time() - startup_time) / SECONDS_IN_HOUR
restart_days = configuration.get('restart_days', 1)
min_hours = restart_days * HOURS_IN_DAY
if (
running_hours >= min_hours and
not message_queue and
not json_desc_dict.get('radio_range_flights') and
# script /home/pi/splitflap/backup.sh creates temp file in this
# directory; after it is copied to the NAS, it is deleted
not os.listdir('/media/backup')):
msg = ('All quiet reboot triggered based on %d days (%d hours); '
'actual runtime: %.2f hours' %
(restart_days, min_hours, running_hours))
SHUTDOWN_SIGNAL = msg
Log(msg)
<----SKIPPED LINES---->
msg = ('Early morning reboot triggered based on %.1f (%d hours); '
'actual runtime: %.2f hours' %
(restart_days, min_hours, running_hours))
SHUTDOWN_SIGNAL = msg
Log(msg)
reboot = True
if 'soft_reboot' in configuration:
msg = 'Soft reboot requested via web form'
SHUTDOWN_SIGNAL = msg
Log(msg)
reboot = True
RemoveSetting(configuration, 'soft_reboot')
if 'end_process' in configuration:
msg = 'Process end requested via web form'
SHUTDOWN_SIGNAL = msg
Log(msg)
RemoveSetting(configuration, 'end_process')
# Periodically, the RPi seems to lose the network connection even though
# the router is up with a strong signal; when this happens, it does not
# regain the signal until the RPi is restarted. This will be detected
# by the network status being down for at least 30 minutes, as determined
# by the .pk file capturing the network status (generated by the
# network_monitor.py script).
#
# Specifically, if the most recent three values of the .pk are all 0s, and
# we have been up and running for at least 30 minutes, restart.
minimum_uptime_minutes = 30 # 30 minutes
number_of_intervals = 3
run_time_minutes = (time.time() - startup_time) / SECONDS_IN_MINUTE
if run_time_minutes > minimum_uptime_minutes:
results = MostRecentNetworkStatuses(number_of_intervals)
(
network_status_list, last_day, last_interval,
first_day, first_interval) = results
# Sum the list, handling strings in the list as if they were 0s
sum_network_status_list = sum(
[x if isinstance(x, int) else 0 for x in network_status_list])
if network_status_list and not sum_network_status_list: # all zeros
msg = (
'Running for %d yet no network for %d intervals (index %d of day '
'%s to index %d of day %s); rebooting in attempt to re-establish '
'network connectivity' % (
run_time_minutes, number_of_intervals,
first_day, first_interval, last_day, last_interval))
SHUTDOWN_SIGNAL = msg
Log(msg)
reboot = True
return reboot
def MostRecentNetworkStatuses(number_of_intervals):
"""Returns a list of the most recent number of network statuses.
The network status is managed by network_monitory.py, which, every few
minutes updates the .pk file with the current network status. The data
structure is a dictionary with day names (i.e.: 12-30-2022) and a list
of 0s & 1s indicating network down / up respectively, for consecutive
time intervals.
Because the .pk is updated infrequently (i.e.: only once every 10 min),
we can cache it and re-read it only when it changes.
If there are not enough intervals to provide the number_of_intervals
requested, as many as are present will be provided.
Args:
number_of_intervals: integer of time periods desired.
Returns:
A 5-tuple:
- List of the most recent number_of_intervals from the network status
pickle file.
- String representing the most recent day in the list
- Index of the interval from that most recent day
- String representing the earliest day in the list
- Index of the interval from that earliest day
If the list is empty, then the strings in positions 2 & 4 of the return
tuple will be the empty string, and indices in positions 3 & 5 of the
return tuple will be -1.
"""
global CACHED_FILES
filepath = NETWORK_PICKLE_FILE
(last_read_time, network_status) = CACHED_FILES.get(filepath, (0, {}))
if os.path.exists(filepath):
last_modified = os.path.getmtime(filepath)
if last_modified > last_read_time:
network_status = UnpickleObjectFromFile(filepath, False)[0]
CACHED_FILES[filepath] = (last_modified, network_status)
# Now we've read in the network status, or picked it up from cache
day_names = sorted(network_status.keys())
last_day = day_names[-1]
last_interval = len(network_status[last_day])
relevant_status = []
remaining_to_add = number_of_intervals
while len(relevant_status) < number_of_intervals and day_names:
day_pointer = day_names.pop()
relevant_status.extend(list(reversed(network_status[day_pointer])))
first_interval = len(network_status[day_pointer]) - remaining_to_add
remaining_to_add -= len(network_status[day_pointer])
first_day = day_pointer
first_interval = max(first_interval, 0)
return (
relevant_status[:min(len(relevant_status), number_of_intervals)],
last_day, last_interval,
first_day, first_interval)
return [], '', -1, '', -1
def InterruptRebootFromButton():
"""Sets flag so main loop will terminate when it completes the iteration.
This function is only triggered by an physical button press.
"""
msg = ('Soft reboot requested by button push')
global SHUTDOWN_SIGNAL
SHUTDOWN_SIGNAL = msg
global REBOOT_SIGNAL
REBOOT_SIGNAL = True
RPi.GPIO.output(GPIO_SOFT_RESET[1], False) # signal that reset received
Log(msg)
def InterruptShutdownFromSignal(signalNumber, unused_frame):
"""Sets flag so main loop will terminate when it completes the iteration.
<----SKIPPED LINES---->
|