messageboard-2020-06-17-0722.py
01234567890123456789012345678901234567890123456789012345678901234567890123456789









444546474849505152535455565758596061626364 656667686970717273747576777879808182 8384858687888990 919293949596979899100101102103104105106107108109110111112113 114115116 117118119120121122123124125126127128129 130131132133134135 136 137138139140141142143 144145146147148149150151152153 154155156157158159160161162163164165166167 168169170171172173174175176177178179180181182183184185186187188189190191192193194195196 197198199200201202203204205206207208209210211212213214215216








244245246247248249250251252253254255256257258259260261262263 264265266267268269270271272 273274275276277278279280281282283284285286287288289290291292293 294295296297298299300301302303304305306307308309310311312313314








358359360361362363364365366367368369370371372373374375376377378 379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434








445446447448449450451452453454455456457458459460461462463464465 466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570 571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607








633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667 668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716 717718719720721722723724 725726727728 729730731732733 734735736737738739 740741742743744745 746747748749750751752753754755756757758759 760761762763764765766767768769770771772773774775776777778 779780781782783 784785786787788789790791792793794795796797798799800801802  803804805  806807808809810811812813814815816817818819820821822 823824825826827828829830831832833834835836837838839840841842843  844845846847848849850851852853854855856857858859860861862863864865866867868869870871 872 873874875876877878879880881882883884885886887888889890891892893894895896897898899








943944945946947948949950951952953954955956957958959960961962963964  96596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016 1017 1018101910201021102210231024102510261027102810291030103110321033 1034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109 11101111 111211131114111511161117111811191120112111221123112411251126112711281129113011311132 11331134113511361137113811391140114111421143 1144114511461147114811491150115111521153115411551156115711581159116011611162116311641165 1166116711681169117011711172117311741175117611771178117911801181 1182   1183118411851186118711881189119011911192 1193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220 1221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254 12551256125712581259 12601261126212631264126512661267126812691270127112721273 1274127512761277 1278127912801281128212831284128512861287128812891290 12911292129312941295129612971298129913001301130213031304 130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334 1335133613371338133913401341134213431344134513461347 1348 134913501351 1352135313541355135613571358135913601361136213631364136513661367 1368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413 14141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445 144614471448144914501451145214531454145514561457 145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490 149114921493 14941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516 15171518151915201521152215231524152515261527 152815291530153115321533153415351536 15371538153915401541154215431544154515461547154815491550 155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580  15811582158315841585 15861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617 16181619162016211622162316241625162616271628 1629163016311632163316341635163616371638 16391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661 166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685 1686168716881689169016911692169316941695169616971698169917001701170217031704170517061707 170817091710 17111712171317141715171617171718171917201721172217231724172517261727172817291730








17581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798








18551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910 191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940








195319541955195619571958195919601961196219631964196519661967196819691970197119721973 197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000 20012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023








20772078207920802081208220832084208520862087208820892090209120922093209420952096 2097209820992100210121022103 2104 2105210621072108210921102111211221132114211521162117211821192120212121222123212421252126 21272128212921302131213221332134213521362137 2138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162 216321642165216621672168216921702171217221732174217521762177 217821792180218121822183218421852186218721882189219021912192 21932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221 222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254 225522562257225822592260226122622263226422652266226722682269227022712272227322742275








22842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330 2331233223332334 23352336233723382339234023412342234323442345234623472348234923502351  235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378 23792380238123822383 2384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411 24122413241424152416  24172418241924202421242224232424242524262427242824292430 24312432243324342435 2436 243724382439244024412442 2443244424452446 244724482449245024512452245324542455 2456245724582459246024612462 2463 24642465246624672468246924702471247224732474 2475247624772478247924802481 24822483248424852486 24872488248924902491249224932494 2495249624972498249925002501 2502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553 2554 2555255625572558 25592560256125622563256425652566256725682569257025712572 25732574 257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652 26532654265526562657265826592660266126622663266426652666 2667266826692670267126722673 267426752676267726782679268026812682268326842685268626872688 268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721 27222723272427252726272727282729273027312732273327342735273627372738 27392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785  2786278727882789 27902791279227932794279527962797  2798279928002801280228032804280528062807280828092810 281128122813281428152816 28172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842 284328442845284628472848284928502851 2852285328542855285628572858285928602861286228632864 286528662867286828692870287128722873287428752876287728782879 288028812882 2883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907 290829092910291129122913291429152916291729182919  29202921292229232924292529262927292829292930293129322933293429352936293729382939








29422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988  2989299029912992 299329942995299629972998  2999300030013002300330043005300630073008300930103011 3012301330143015301630173018301930203021302230233024302530263027302830293030 303130323033303430353036303730383039304030413042304330443045304630473048304930503051








306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097 30983099310031013102 3103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142 3143314431453146314731483149 31503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187 31883189319031913192319331943195319631973198319932003201320232033204320532063207 320832093210321132123213321432153216321732183219322032213222322332243225322632273228 322932303231323232333234  32353236323732383239324032413242324332443245324632473248324932503251 32523253325432553256325732583259326032613262 32633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285 3286328732883289 32903291 329232933294329532963297329832993300 3301330233033304 33053306 33073308330933103311331233133314331533163317331833193320332133223323332433253326








333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366  3367336833693370337133723373337433753376337733783379 33803381338233833384338533863387338833893390339133923393339433953396339733983399








34283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456 345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492 3493349434953496349734983499350035013502350335043505 35063507350835093510351135123513351435153516351735183519 35203521352235233524352535263527352835293530353135323533353435353536 35373538353935403541354235433544354535463547354835493550355135523553355435553556








3558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582 3583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607 36083609361036113612361336143615361636173618361936203621 362236233624362536263627 3628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691 3692369336943695369636973698369937003701370237033704370537063707370837093710371137123713 371437153716371737183719372037213722 37233724372537263727 3728372937303731373237333734373537363737373837393740 37413742374337443745374637473748374937503751375237533754375537563757375837593760 3761376237633764376537663767376837693770377137723773377437753776377737783779 3780378137823783 3784 37853786378737883789 37903791379237933794379537963797379837993800380138023803 38043805 3806380738083809381038113812381338143815381638173818381938203821 3822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867 38683869387038713872387338743875387638773878387938803881 3882388338843885 388638873888 38893890 38913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913 3914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950 395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986 39873988398939903991399239933994399539963997399839994000  4001400240034004400540064007400840094010 40114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042 404340444045404640474048404940504051405240534054405540564057 40584059406040614062406340644065 406640674068 406940704071 407240734074407540764077407840794080408140824083408440854086 40874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133 41344135413641374138413941404141414241434144414541464147 41484149415041514152 41534154 41554156 41574158415941604161416241634164 416541664167416841694170417141724173417441754176417741784179418041814182 4183418441854186418741884189419041914192419341944195419641974198 419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225 4226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283








428842894290429142924293429442954296429742984299430043014302430343044305430643074308 430943104311431243134314431543164317 4318431943204321432243234324 4325432643274328432943304331433243334334433543364337433843394340434143424343 434443454346434743484349435043514352435343544355435643574358435943604361    43624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393 439443954396439743984399440044014402 4403440444054406440744084409441044114412441344144415441644174418 44194420442144224423442444254426442744284429443044314432443344344435 44364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494 4495 44964497449844994500 450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524








4561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630 4631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655 465646574658 46594660466146624663466446654666466746684669 4670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694 469546964697469846994700470147024703470447054706  4707 47084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754 4755475647574758475947604761 4762476347644765476647674768476947704771477247734774477547764777477847794780 47814782478347844785478647874788478947904791479247934794 4795479647974798479948004801480248034804480548064807 480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837 4838483948404841484248434844 4845 4846484748484849485048514852485348544855 485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888 4889489048914892489348944895489648974898489949004901490249034904490549064907490849094910 491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958  49594960496149624963    49644965496649674968496949704971497249734974497549764977497849794980 4981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014 50155016501750185019 502050215022502350245025 5026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059 506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086 5087508850895090509150925093 50945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116 511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173 517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239 5240524152425243524452455246524752485249 52505251525252535254525552565257 52585259526052615262526352645265  52665267526852695270527152725273527452755276527752785279528052815282 5283528452855286528752885289529052915292529352945295 5296 529752985299530053015302530353045305530653075308530953105311531253135314  53155316 531753185319532053215322532353245325 5326 53275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355 53565357535853595360536153625363536453655366536753685369 53705371537253735374537553765377 537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401 5402540354045405540654075408540954105411  54125413541454155416541754185419542054215422542354245425 5426542754285429543054315432543354345435  54365437543854395440544154425443544454455446 54475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475 5476547754785479548054815482 548354845485548654875488 5489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525 5526552755285529553055315532553355345535 5536553755385539 554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566 556755685569 55705571557255735574557555765577557855795580 55815582558355845585558655875588558955905591559255935594559555965597559855995600








56145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639 5640564156425643564456455646564756485649565056515652 5653565456555656 56575658565956605661 566256635664566556665667566856695670567156725673567456755676567756785679568056815682  56835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705 57065707570857095710571157125713571457155716571757185719572057215722 5723572457255726572757285729573057315732573357345735573657375738573957405741574257435744 5745574657475748574957505751 57525753575457555756575757585759576057615762576357645765576657675768576957705771








57825783578457855786578757885789579057915792579357945795579657975798579958005801 5802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847 584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880 588158825883588458855886588758885889 5890589158925893589458955896 589758985899590059015902590359045905590659075908590959105911  5912591359145915591659175918591959205921592259235924592559265927592859295930 5931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979 598059815982598359845985598659875988 5989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019 6020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044 604560466047 6048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075 607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111 6112 61136114611561166117 611861196120612161226123612461256126 6127612861296130613161326133 613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157 615861596160616161626163616461656166616761686169617061716172617361746175  6176617761786179618061816182618361846185618661876188 61896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214 621562166217621862196220622162226223 62246225622662276228622962306231623262336234623562366237 623862396240624162426243624462456246624762486249625062516252625362546255625662576258



                            <----SKIPPED LINES---->




REBOOT_SIGNAL = False

SIMULATION = False
SIMULATION_COUNTER = 0
SIMULATION_PREFIX = 'SIM_'
PICKLE_DUMP_JSON_FILE = 'pickle/dump_json.pk'
PICKLE_FA_JSON_FILE = 'pickle/fa_json.pk'
DUMP_JSONS = None  # loaded only if in simulation mode
FA_JSONS = None  # loaded only if in simulation mode

HOME_LAT = 37.64406
HOME_LON = -122.43463
HOME = (HOME_LAT, HOME_LON) # lat / lon tuple of antenna
HOME_ALT = 29  #altitude in meters
RADIUS = 6371.0e3  # radius of earth in meters

FEET_IN_METER = 3.28084
FEET_IN_MILE = 5280
METERS_PER_SECOND_IN_KNOTS = 0.514444

MIN_METERS = 5000/FEET_IN_METER # only planes within this distance will be detailed

# planes not seen within MIN_METERS in PERSISTENCE_SECONDS seconds will be dropped from
# the nearby list
PERSISTENCE_SECONDS = 300
TRUNCATE = 50  # max number of keys to include in a histogram image file
# number of seconds to pause between each radio poll / command processing loop
LOOP_DELAY_SECONDS = 1

# number of seconds to wait between recording heartbeats to the status file
HEARTBEAT_SECONDS = 10

# version control directory
CODE_REPOSITORY = ''
VERSION_REPOSITORY = 'versions/'
VERSION_WEBSITE_PATH = VERSION_REPOSITORY
VERSION_MESSAGEBOARD = None
VERSION_ARDUINO = None

MAX_INSIGHT_HORIZON_DAYS = 31  # histogram logic truncates to exactly 30 days of hours


# This file is where the radio drops its json file
DUMP_JSON_FILE = '/run/readsb/aircraft.json'

# At the time a flight is first identified as being of interest (in that it falls
# within MIN_METERS meters of HOME), it - and core attributes derived from FlightAware,
# if any - is appended to the end of this pickle file. However, since this file is
# cached in working memory, flights older than 30 days are flushed from this periodically.

PICKLE_FLIGHTS = 'pickle/flights.pk'

# This allows us to identify the full history (including what was last sent to) the
# splitflap display in a programmatic fashion. While it may be interesting in its own
# right, its real use is to handle the "replay" button, so we know to enable it if what
# is displayed is the last flight.
PICKLE_SCREENS = 'pickle/screens.pk'

# Status data about messageboard - is it running, etc.  Specifically, has tuples
# of data (timestamp, system_id, status), where system_id is either the pin id of GPIO,
# or a 0 to indicate overall system, and status is boolean
PICKLE_DASHBOARD = 'pickle/dashboard.pk'

CACHED_ELEMENT_PREFIX = 'cached_'

# This web-exposed file is used for non-error messages that might highlight data or
# code logic to check into. It is only cleared out manually.
LOGFILE = 'log.txt'
# Identical to the LOGFILE, except it includes just the most recent n lines. Newest
# lines are at the end.
ROLLING_LOGFILE = 'rolling_log.txt' #file for error messages

ROLLING_LOG_SIZE = 1000  # default number of lines which may be overridden by settings file


# Users can trigger .png histograms analogous to the text ones from the web interface;
# this is the folder (within WEBSERVER_PATH) where those files are placed

WEBSERVER_IMAGE_RELATIVE_FOLDER = 'images/'
# Multiple histograms can be generated, i.e. for airline, aircraft, day of week, etc.
# The output files are named by the prefix & suffix, i.e.: prefix + type + . + suffix,
# as in histogram_aircraft.png. These names match up to the names expected by the html
# page that displays the images. Also, note that the suffix is interpreted by matplotlib
# to identify the image format to create.
HISTOGRAM_IMAGE_PREFIX = 'histogram_'
HISTOGRAM_IMAGE_SUFFIX = 'png'
HISTOGRAM_IMAGE_HTML = 'histograms.html'

# This file indicates a pending request for histograms - either png, text-based, or
# both; once it is processed, this file is deleted. The contents are concatenated key-value
# pairs, histogram=all;histogram_history=24h; etc.

HISTOGRAM_CONFIG_FILE = 'secure/histogram.txt'
HISTOGRAM_BOOLEANS = ('histogram_data_summary')
# This contains concatenated key-value configuration attributes in a similar format
# to the HISTOGRAM_CONFIG_FILE that are exposed to the user via the web interface or,
# for a subset of them, through the Arduino interface. They are polled at every iteration
# so that the most current value is always leveraged by the running software.

CONFIG_FILE = 'secure/settings.txt'

CONFIG_BOOLEANS = ('setting_screen_enabled', 'next_flight', 'reset_logs', 'log_jsons')
# A few key settings for the messageboard are its sensitivity to displaying flights -
# though it logs all flights within range, it may not be desirable to display all
# flights to the user. Two key parameters are the maximum altitude, and the furthest
# away we anticipate the flight being at its closest point to HOME. As those two
# parameters are manipulated in the settings, a histogram is displayed with one or
# potentially two series, showing the present and potentially prior-set distribution

# of flights, by hour throughout the day, over the last seven days, normalized to
# flights per day. This allows those parameters to be fine-tuned in a useful way.
# This file is the location, on the webserver, of that image, which needs to be in
# alignment with the html page that displays it.
HOURLY_IMAGE_FILE = 'hours.png'

# This is all messages that have been sent to the board since the last time the
# file was manually cleared. Newest messages are at the bottom. It is visible at the
# webserver.
ALL_MESSAGE_FILE = 'all_messages.txt'  #enumeration of all messages sent to board

# This shows the most recent n messages sent to the board. Newest messages are at the
# top for easier viewing of "what did I miss".
ROLLING_MESSAGE_FILE = 'rolling_messages.txt'

STDERR_FILE = 'stderr.txt'
BACKUP_FILE = 'backup.txt'
SERVICE_VERIFICATION_FILE = 'service-verification.txt'
UPTIMES_FILE = 'uptimes.html'

FLAG_MSG_FLIGHT = 1  # basic flight details
FLAG_MSG_INSIGHT = 2  # random tidbit about a flight
FLAG_MSG_HISTOGRAM = 3  # histogram message
FLAG_MSG_CLEAR = 4  # a blank message to clear the screen
FLAG_MSG_PERSONAL = 5  # user-entered message to display for some duration of time


FLAG_INSIGHT_LAST_SEEN = 0
FLAG_INSIGHT_DIFF_AIRCRAFT = 1
FLAG_INSIGHT_NTH_FLIGHT = 2
FLAG_INSIGHT_GROUNDSPEED = 3
FLAG_INSIGHT_ALTITUDE = 4
FLAG_INSIGHT_VERTRATE = 5
FLAG_INSIGHT_FIRST_DEST = 6
FLAG_INSIGHT_FIRST_ORIGIN = 7
FLAG_INSIGHT_FIRST_AIRLINE = 8
FLAG_INSIGHT_FIRST_AIRCRAFT = 9
FLAG_INSIGHT_LONGEST_DELAY = 10
FLAG_INSIGHT_FLIGHT_DELAY_FREQUENCY = 11
FLAG_INSIGHT_FLIGHT_DELAY_TIME = 12
FLAG_INSIGHT_AIRLINE_DELAY_FREQUENCY = 13
FLAG_INSIGHT_AIRLINE_DELAY_TIME = 14
FLAG_INSIGHT_DESTINATION_DELAY_FREQUENCY = 15
FLAG_INSIGHT_DESTINATION_DELAY_TIME = 16
FLAG_INSIGHT_HOUR_DELAY_FREQUENCY = 17
FLAG_INSIGHT_HOUR_DELAY_TIME = 18
FLAG_INSIGHT_DATE_DELAY_FREQUENCY = 19
FLAG_INSIGHT_DATE_DELAY_TIME = 20
INSIGHT_TYPES = 21

TEMP_FAN_TURN_ON_CELSIUS = 65
TEMP_FAN_TURN_OFF_CELSIUS = 55

# GPIO relay connections
# format: (GPIO pin, true message, false message, relay number, description, initial_state)

GPIO_ERROR_VESTABOARD_CONNECTION = (
    22,
    'ERROR: Vestaboard unavailable',
    'SUCCESS: Vestaboard available',
    1, 'Vestaboard connected', False)
GPIO_ERROR_FLIGHT_AWARE_CONNECTION = (
    23,
    'ERROR: FlightAware not available',
    'SUCCESS: FlightAware available',
    2, 'FlightAware connected', False)
GPIO_ERROR_ARDUINO_SERVO_CONNECTION = (
    24,
    'ERROR: Servos not running or lost connection',
    'SUCCESS: Handshake with servo Arduino received',
    3, 'Hemisphere connected', True)
GPIO_ERROR_ARDUINO_REMOTE_CONNECTION = (
    25,
    'ERROR: Remote not running or lost connection',
    'SUCCESS: Handshake with remote Arduino received',
    4, 'Remote connected', True)




                            <----SKIPPED LINES---->




if RASPBERRY_PI:
  PICKLE_FLIGHTS = MESSAGEBOARD_PATH + PICKLE_FLIGHTS
  PICKLE_DASHBOARD = MESSAGEBOARD_PATH + PICKLE_DASHBOARD
  LOGFILE = MESSAGEBOARD_PATH + LOGFILE
  PICKLE_DUMP_JSON_FILE = MESSAGEBOARD_PATH + PICKLE_DUMP_JSON_FILE
  PICKLE_FA_JSON_FILE = MESSAGEBOARD_PATH + PICKLE_FA_JSON_FILE
  PICKLE_SCREENS = MESSAGEBOARD_PATH + PICKLE_SCREENS
  CODE_REPOSITORY = MESSAGEBOARD_PATH

  HISTOGRAM_CONFIG_FILE = WEBSERVER_PATH + HISTOGRAM_CONFIG_FILE
  CONFIG_FILE = WEBSERVER_PATH + CONFIG_FILE
  ROLLING_MESSAGE_FILE = WEBSERVER_PATH + ROLLING_MESSAGE_FILE
  ALL_MESSAGE_FILE = WEBSERVER_PATH + ALL_MESSAGE_FILE
  ROLLING_LOGFILE = WEBSERVER_PATH + ROLLING_LOGFILE
  STDERR_FILE = WEBSERVER_PATH + STDERR_FILE
  BACKUP_FILE = WEBSERVER_PATH + BACKUP_FILE
  SERVICE_VERIFICATION_FILE = WEBSERVER_PATH + SERVICE_VERIFICATION_FILE
  UPTIMES_FILE = WEBSERVER_PATH + UPTIMES_FILE

  HISTOGRAM_IMAGE_HTML = WEBSERVER_PATH + HISTOGRAM_IMAGE_HTML

  HOURLY_IMAGE_FILE = WEBSERVER_PATH + WEBSERVER_IMAGE_RELATIVE_FOLDER + HOURLY_IMAGE_FILE
  VERSION_REPOSITORY = WEBSERVER_PATH + VERSION_REPOSITORY



TIMEZONE = 'US/Pacific' # timezone of display
TZ = pytz.timezone(TIMEZONE)

KNOWN_AIRPORTS = ('SJC', 'SFO', 'OAK')  # iata codes that we don't need to expand


SPLITFLAP_CHARS_PER_LINE = 22
SPLITFLAP_LINE_COUNT = 6

DIRECTIONS_4 = ['N', 'E', 'S', 'W']
DIRECTIONS_8 = ['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW']
DIRECTIONS_16 = ['N', 'NNE', 'NE', 'ENE', 'E', 'ESE', 'SE', 'SSE',
                 'S', 'SSW', 'SW', 'WSW', 'W', 'WNW', 'NW', 'NNW']

HOURS = ['12a', ' 1a', ' 2a', ' 3a', ' 4a', ' 5a', ' 6a', ' 7a',
         ' 8a', ' 9a', '10a', '11a', '12p', ' 1p', ' 2p', ' 3p',
         ' 4p', ' 5p', ' 6p', ' 7p', ' 8p', ' 9p', '10p', '11p']

SECONDS_IN_MINUTE = 60
MINUTES_IN_HOUR = 60
HOURS_IN_DAY = 24
SECONDS_IN_HOUR = SECONDS_IN_MINUTE * MINUTES_IN_HOUR
MINUTES_IN_DAY = MINUTES_IN_HOUR * HOURS_IN_DAY
SECONDS_IN_DAY = SECONDS_IN_HOUR * HOURS_IN_DAY

# Units confirmed here:

# www.adsbexchange.com/forum/threads/units-in-the-dump1090-json-file.630617/#post-639541
CLIMB_RATE_UNITS = 'fpm'
#speed units from tracker are knots, based on dump-1090/track.c
#https://github.com/SDRplay/dump1090/blob/master/track.c
SPEED_UNITS = 'kn'
DISTANCE_UNITS = 'ft'  # altitude

# For displaying histograms
# If a key is not present, how should it be displayed in histograms?
KEY_NOT_PRESENT_STRING = 'Unknown'
OTHER_STRING = 'Other' # What key strings should be listed last in sequence?
# What key strings should be listed last in sequence?
SORT_AT_END_STRINGS = [OTHER_STRING, KEY_NOT_PRESENT_STRING]
# What is the sorted sequence of keys for days of week?
DAYS_OF_WEEK = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']

AIRCRAFT_LENGTH = {} # in meters
AIRCRAFT_LENGTH['Airbus A220-100 (twin-jet)'] = 35
AIRCRAFT_LENGTH['Airbus A300F4-600 (twin-jet)'] = 54.08
AIRCRAFT_LENGTH['Airbus A319 (twin-jet)'] = 33.84
AIRCRAFT_LENGTH['Airbus A320 (twin-jet)'] = 37.57




                            <----SKIPPED LINES---->




AIRCRAFT_LENGTH['Cessna Citation Sovereign (twin-jet)'] = 19.35
AIRCRAFT_LENGTH['Cessna Citation V (twin-jet)'] = 14.91
AIRCRAFT_LENGTH['Cessna Citation X (twin-jet)'] = 22.04
AIRCRAFT_LENGTH['Cessna Citation Mustang (twin-jet)'] = 12.37
AIRCRAFT_LENGTH['Cessna Skyhawk (piston-single)'] = 8.28
AIRCRAFT_LENGTH['Cessna Skylane (piston-single)'] = 8.84
AIRCRAFT_LENGTH['Cessna T206 Turbo Stationair (piston-single)'] = 8.61
AIRCRAFT_LENGTH['Beechcraft Bonanza (33) (piston-single)'] = 7.65
AIRCRAFT_LENGTH['Beechcraft Super King Air 200 (twin-turboprop)'] = 13.31
AIRCRAFT_LENGTH['Beechcraft Super King Air 350 (twin-turboprop)'] = 14.22
AIRCRAFT_LENGTH['Beechcraft King Air 90 (twin-turboprop)'] = 10.82
AIRCRAFT_LENGTH['Learjet 45 (twin-jet)'] = 17.68
AIRCRAFT_LENGTH['Pilatus PC-12 (single-turboprop)'] = 14.4


def Log(message, file=None, rolling=None):
  """Write a message to a logfile along with a timestamp.

  Args:
    message: string message to write
    file: string representing file name and, if needed, path to the file to write to

    rolling: name of file that will keep only the last n files of file
  """
  # can't define as a default parameter because LOGFILE name is potentially
  # modified based on SIMULATION flag
  if not file:
    file = LOGFILE

  # special case: for the main logfile, we always keep a rolling log
  if not rolling and file == LOGFILE:
    rolling = ROLLING_LOGFILE

  try:
    with open(file, 'a') as f:
      # by excluding the timestamp, file diffs become easier between runs
      if not SIMULATION or file == LOGFILE:
        f.write('='*80+'\n')
        f.write(str(datetime.datetime.now(TZ))+'\n')
        f.write('\n')
      f.write(str(message)+'\n')
  except IOError:
    Log('Unable to append to ' + file)

  if rolling:
    Tail(file, rolling, lines_to_keep=ROLLING_LOG_SIZE)


def Tail(in_name, rolling_name, max_line_length=100, lines_to_keep=1000):
  """Fast pythonic implementation of tail -n.

  Args:
    in_name: name of file for which we want the tail
    rolling_name: name of file to write out
    max_line_length: since this uses seek to find the block of text near the end
      that has at most the lines_to_keep number of lines, we need to estimate the
      max line length over that block of text.  We can afford to be a little
      conservative here.
    lines_to_keep: how many lines to keep in the rolling file.

  Returns:
    Integer number of lines actually kept.
  """
  with open(in_name, 'r') as f:
    f.seek(0, os.SEEK_END)
    f_length = f.tell()

    bytes_to_read = min(max_line_length * lines_to_keep, f_length)
    f.seek(f_length - bytes_to_read)
    end_text = f.read()
    lines = end_text.split('\n')
    # perhaps the file was smaller than lines_to_keep lines, or many lines were
    # longer than max_line_length; in that case, the resulting text block will
    # potentially be smaller than lines_to_keep
    lines_to_keep = min(lines_to_keep, len(lines))

  with open(rolling_name, 'w') as f:
    f.write('\n'.join(lines[-lines_to_keep:]))




                            <----SKIPPED LINES---->





def LogTimes(times, threshold=0, title=''):
  """Logs elapsed time messages from a list tuples of epochs and identifiers."""
  total_time = times[-1][0] - times[0][0]
  if threshold and total_time < threshold:
    return
  msg = 'Code timing\n'
  if title:
    msg = '%s\n' % title
  msg = 'Total time: %.2fs\n' % total_time
  for n, t in enumerate(times[:-1]):
    msg += '%.2fs to get from reading %s to reading %s\n' % (
        times[n + 1][0] - t[0], t[1], times[n + 1][1])
  Log(msg)


def MaintainRollingWebLog(message, max_count, filename=None):
  """Maintains a rolling text file of at most max_count printed messages.

  Newest data at top and oldest data at the end, of at most max_count messages,
  where the delimiter between each message is identified by a special fixed string.


  Args:
    message: text message to prepend to the file.
    max_count: maximum number of messages to keep in the file; the max_count+1st message
      is deleted.
    filename: the file to update.
  """
  # can't define as a default parameter because ROLLING_MESSAGE_FILE name is potentially
  # modified based on SIMULATION flag
  if not filename:
    filename = ROLLING_MESSAGE_FILE
  rolling_log_header = '='*(SPLITFLAP_CHARS_PER_LINE + 2)
  existing_file = ReadFile(filename)
  log_message_count = existing_file.count(rolling_log_header)
  if log_message_count >= max_count:
    message_start_list = [i for i in range(0, len(existing_file))
                          if existing_file[i:].startswith(rolling_log_header)]
    existing_file_to_keep = existing_file[:message_start_list[max_count - 1]]
  else:
    existing_file_to_keep = existing_file

  t = datetime.datetime.now(TZ).strftime('%m/%d/%Y, %H:%M:%S')
  new_message = (
      '\n'.join([rolling_log_header, t, '', message])
      + '\n' + existing_file_to_keep)
  try:
    with open(filename, 'w') as f:
      f.write(new_message)
  except IOError:
    Log('Unable to maintain rolling log at ' + filename)


def UtcToLocalTimeDifference(timezone=TIMEZONE):
  """Calculates number of seconds between UTC and given timezone.

  Returns number of seconds between UTC and given timezone; if no timezone given, uses
  TIMEZONE defined in global variable.

  Args:
    timezone: string representing a valid pytz timezone in pytz.all_timezones.

  Returns:
    Integer number of seconds.
  """
  utcnow = pytz.timezone('utc').localize(datetime.datetime.utcnow())
  home_time = utcnow.astimezone(pytz.timezone(timezone)).replace(tzinfo=None)
  system_time = utcnow.astimezone(tzlocal.get_localzone()).replace(tzinfo=None)

  offset = dateutil.relativedelta.relativedelta(home_time, system_time)
  offset_seconds = offset.hours * SECONDS_IN_HOUR
  return offset_seconds


def IntersectionForTwoPaths(pos1, bearing1, pos2, bearing2):
  """Identifies the lat/lon intersection point given two starting points and two bearings.

  Math provided at: http://www.movable-type.co.uk/scripts/latlong.html in the section:
  "Intersection of two paths given start points and bearings"; returns the latitude and
  longitude for the intersection.

  Args:
    pos1: a 2-tuple defining (lat, lon) in decimal degrees
    bearing1: bearing of pos1
    pos2: a 2-tuple defining (lat, lon) in decimal degrees
    bearing2: bearing of pos2

  Returns:
    Point of intersection as a 2-tuple defining (lat, lon) in decimal degrees
  """
  sin = math.sin
  asin = math.asin
  cos = math.cos
  acos = math.acos
  atan2 = math.atan2
  sqrt = math.sqrt
  radians = math.radians
  degrees = math.degrees
  pi = math.pi

  (lat1, lon1) = pos1
  (lat2, lon2) = pos2
  phi1 = radians(lat1)
  lambda1 = radians(lon1)
  theta1 = radians(bearing1)
  phi2 = radians(lat2)
  lambda2 = radians(lon2)
  theta2 = radians(bearing2)

  delta12 = 2*asin(
      sqrt(sin((phi2-phi1)/2)**2+cos(phi1)*cos(phi2)*sin((lambda2-lambda1)/2)**2))
  thetaa = acos((sin(phi2) - sin(phi1)*cos(delta12)) / (sin(delta12)*cos(phi1)))
  thetab = acos((sin(phi1) - sin(phi2)*cos(delta12)) / (sin(delta12)*cos(phi2)))
  if sin(lambda2-lambda1) > 0:
    theta12 = thetaa
    theta21 = 2*pi - thetab
  else:
    theta12 = 2*pi - thetaa
    theta21 = thetab
  alpha1 = theta1 - theta12
  alpha2 = theta21 - theta2
  alpha3 = acos(-cos(alpha1)*cos(alpha2)+sin(alpha1)*sin(alpha2)*cos(delta12))
  delta13 = atan2(
      sin(delta12)*sin(alpha1)*sin(alpha2),
      cos(alpha2)+cos(alpha1)*cos(alpha3))
  phi3 = asin(sin(phi1)*cos(delta13)+cos(phi1)*sin(delta13)*cos(theta1))

  dlambda13 = atan2(sin(theta1)*sin(delta13)*cos(phi1), cos(delta13)-sin(phi1)*sin(phi3))
  lambda3 = lambda1 + dlambda13
  intersection = (degrees(phi3), degrees(lambda3))
  return intersection


def ConvertBearingToCompassDirection(bearing, length=3, pad=False):
  """Converts a bearing (in degrees) to a compass dir of 1, 2, or 3 chars (N, NW, NNW).

  Args:
    bearing: degrees to be converted
    length: if 1, 2, or 3, converts to one of 4, 8, or 16 headings:
      - 1: N, S, E, W
      - 2: SE, SW, etc. also valid
      - 3: NWN, ESE, etc. also valid
    pad: boolean indicating whether the direction should be right-justified to length
      characters

  Returns:
    String representation of the compass heading.
  """
  if not isinstance(bearing, numbers.Number):
    return bearing

  divisions = 2**(length+1)  # i.e.: 4, 8, or 16
  division_size = 360 / divisions  # i.e.: 90, 45, or 22.5
  bearing_number = round(bearing / division_size)

  if length == 1:
    directions = DIRECTIONS_4
  elif length == 2:
    directions = DIRECTIONS_8
  else:
    directions = DIRECTIONS_16

  direction = directions[bearing_number%divisions]
  if pad:




                            <----SKIPPED LINES---->




  if False in is_numeric:
    return None

  lat1, lon1, lat2, lon2 = [math.radians(x) for x in (*pos1, *pos2)]
  hav = (math.sin((lat2 - lat1) / 2.0)**2
         + math.cos(lat1) * math.cos(lat2) * math.sin((lon2 - lon1) / 2.0)**2)
  distance = 2 * RADIUS * math.asin(math.sqrt(hav))

  # Note: though pyproj has this, having trouble installing on rpi
  #az12, az21, distance = g.inv(lon1, lat1, lon2, lat2)

  return distance


def SpeedInMeters(speed_in_knots):
  """Converts speed in knots to speed in meters per second."""
  return speed_in_knots * METERS_PER_SECOND_IN_KNOTS


def MetersTraveled(speed_in_knots, seconds):
  """Converts speed in knots to distance traveled in meters given an elapsed seconds."""
  return SpeedInMeters(speed_in_knots) * seconds


def ClosestKnownLocation(flight, seconds):
  """Using the path in the flight, returns the most recent location observations.

  Flights in the flight dictionary have their path maintained over all the time
  that the radio continues to observe the flight. This function identifies the closest in
  time observation in the path, given number of seconds after the canonical time
  (or before, if sec is negative).

  Args:
    flight: Flight dictionary of interest.
    seconds: Number of seconds after the canonical time of the flight (i.e.: now).


  Returns:
    Tuple:
    - Dictionary of location attributes including the following keys: speed, lat, lon,
      track, altitude, vertrate, now (which is a timestamp reflecting when these
      observations were made)
    - seconds in the past (as compared to the seconds requested) that this observation
      was made. That is, if a location at seconds=10 was requested, if the closest found
      location attributes were at time of 8 seconds, then this would be +2. Since the
      closest time is found, this can also be negative. Or alternatively, this can be
      thought of as the number of seconds still to project the movement for, where
      positive is the future.
  """
  now = flight['now']
  if 'persistent_path' not in flight:
    location = {
        'speed': flight.get('speed'),
        'lat': flight.get('lat'),
        'lon': flight.get('lon'),
        'track': flight.get('track'),
        'altitude': flight.get('altitude'),
        'vertrate': flight.get('vertrate'),
        'now': now}
    return (location, seconds)

  path = flight['persistent_path']
  path_timestamps = [p['now'] for p in path]
  absolute_deltas = [abs(seconds - (t - now)) for t in path_timestamps]

  min_delta = min(absolute_deltas)
  index = absolute_deltas.index(min_delta)
  closest_now_to_request = path[index]['now']
  closest_observation = {
      'speed': path[index].get('speed'),
      'lat': path[index].get('lat'),
      'lon': path[index].get('lon'),
      'track': path[index].get('track'),
      'altitude': path[index].get('altitude'),
      'vertrate': path[index].get('vertrate'),
      'now': closest_now_to_request}
  # i.e.: suppose:
  #       now = 15000
  #       closest_to_now = 15008
  #       request seconds was for 10
  # So there's still 2 more seconds to elapse until the flight is here
  time_delta_from_request = seconds - (closest_now_to_request - now)
  return (closest_observation, time_delta_from_request)



def FlightAnglesSecondsElapsed(flight, seconds, key_suffix='', canonical_loc=False):
  """Returns angular position of flight given a certain amount of time elapsing from sight.

  As time elapses after the flight was first observed, it will be in a new position. That
  new position is based on the most up-to-date location details observed, as it may have
  been seen more recently than the original location details. Then, based on those most
  recent location details, we can estimate its new location at any given time by
  projecting the bearing, speed, etc. out in time.


  Args:
    flight: Flight dictionary of interest.
    seconds: Number of seconds after the canonical time of the flight (i.e.: now).

    key_suffix: Appended to the keys that are returned in the return dictionary.
    canonical_loc: Boolean indicating whether we should only examine the location details
      stored at seconds=0 in the path, which would be identical to that stored in the
      base dictionary itself. This provides access to the "original" reported loc details
      in the same format as the updated or more current values, primarily so that

      comparisons can be easily made between calculations that might fall back to the
      original values vs. the updated values.

  Returns:
    Dictionary of location attributes including the following keys: azimuth_degrees;
    altitude_degrees; ground_distance_feet; crow_distance_feet; lat; lon.

  """
  seconds_ahead_to_find_loc = seconds
  if canonical_loc:
    seconds_ahead_to_find_loc = 0

  (location, time_to_project) = ClosestKnownLocation(flight, seconds_ahead_to_find_loc)

  if not all([isinstance(x, numbers.Number) for x in (
      location.get('speed'),
      location.get('lat'),
      location.get('lon'),
      location.get('track'),
      location.get('altitude'))]):
    return {}

  if canonical_loc:
    time_to_project = seconds

  meters_traveled = MetersTraveled(location['speed'], time_to_project)
  new_position = TrajectoryLatLon(
      (location['lat'], location['lon']), meters_traveled, location['track'])

  angles = Angles(HOME, HOME_ALT, new_position, location['altitude'] / FEET_IN_METER)

  d = {}
  for key in angles:
    d[key + key_suffix] = angles[key]
  d['lat' + key_suffix] = location['lat']
  d['lon' + key_suffix] = location['lon']

  return d


def Angles(pos1, altitude1, pos2, altitude2):
  """Calculates the angular position of pos 2 from pos 1.

  Calculates the azimuth and the angular altitude to see point 2 from point 1, as well
  as two distance metrics: the "ground distance" and "crow distance". Ground is the
  distance between a plumb line to sea level for the two points; crow also takes into
  account the difference in altitude or elevation, and is the distance a bird would have
  to fly to reach the second point from the first.


  Args:
    pos1: a 2-tuple of lat-lon for the first point (i.e.: HOME), in degrees.
    altitude1: height above sea level of pos1, in meters
    pos2: a 2-tuple of lat-lon for the first point (i.e.: the plane), in degrees.

    altitude2: height above sea level of pos2, in meters

  Returns:
    Dictionary of location attributes including the following keys: azimuth_degrees;
    altitude_degrees; ground_distance_feet; crow_distance_feet.
  """
  sin = math.sin
  cos = math.cos
  atan2 = math.atan2
  atan = math.atan
  sqrt = math.sqrt
  radians = math.radians
  degrees = math.degrees

  if not all([isinstance(x, numbers.Number) for x in (
      *pos1, altitude1, *pos2, altitude2)]):
    return None

  distance = HaversineDistanceMeters(pos1, pos2)  # from home to plumb line of plane


  lat1, lon1, lat2, lon2 = [radians(x) for x in (*pos1, *pos2)]
  d_lon = lon2 - lon1
  # azimuth calc from https://www.omnicalculator.com/other/azimuth


  az = atan2((sin(d_lon)*cos(lat2)), (cos(lat1)*sin(lat2)-sin(lat1)*cos(lat2)*cos(d_lon)))
  az_degrees = degrees(az)
  altitude = altitude2 - altitude1
  alt = atan(altitude / distance)
  alt_degrees = degrees(alt)
  crow_distance = sqrt(altitude**2 + distance**2)  # from home to the plane

  return {'azimuth_degrees': az_degrees, 'altitude_degrees': alt_degrees,
          'ground_distance_feet': distance, 'crow_distance_feet': crow_distance}


def TrajectoryLatLon(pos, distance, track):
  """Calculates lat/lon a plane will be given its starting point and direction / speed.

  Args:
    pos: a 2-tuple of lat-lon for the flight, in degrees.
    distance: the distance, in meters, the flight is traveling from its current lat/lon.

    track: the track or bearing of the plane, in degrees.

  Returns:
    Updated lat/lon for the given trajectory.
  """
  #distance in meters
  #track in degrees
  sin = math.sin
  cos = math.cos
  atan2 = math.atan2
  asin = math.asin
  radians = math.radians
  degrees = math.degrees

  track = radians(track)
  lat1 = radians(pos[0])
  lon1 = radians(pos[1])

  d_div_R = distance/RADIUS
  lat2 = asin(sin(lat1)*cos(d_div_R) + cos(lat1)*sin(d_div_R)*cos(track))
  lon2 = lon1 + atan2(sin(track)*sin(d_div_R)*cos(lat1), cos(d_div_R)-sin(lat1)*sin(lat2))



  lat2_degrees = degrees(lat2)
  lon2_degrees = degrees(lon2)
  return (lat2_degrees, lon2_degrees)


def MinMetersToHome(pos, bearing):
  """Identifies the minimum distance between a given trajectory and HOME.

  Given a trajectory (bearing and lat/lon position), finds the minimum distance (in
  meters) that that trajectory will come to another point.

  Args:
    pos: a 2-tuple defining (lat, lon) in decimal degrees
    bearing: the bearing, or heading, of the trajectory, in degrees

  Returns:
    Minimum distance in meters.
  """
  is_numeric = [isinstance(x, numbers.Number) for x in (*pos, bearing)]
  if False in is_numeric:
    return None

  # To find the minimum distance, we must first find the point at which the minimum
  # distance will occur, which in turn is accomplished by finding the intersection
  # between that trajectory and a trajectory orthogonal (+90 degrees, or -90 degrees)
  # to it but intersecting HOME.
  potential_intersection1 = IntersectionForTwoPaths(pos, bearing, HOME, bearing + 90)

  potential_intersection2 = IntersectionForTwoPaths(pos, bearing, HOME, bearing - 90)

  potential_distance1 = HaversineDistanceMeters(potential_intersection1, HOME)
  potential_distance2 = HaversineDistanceMeters(potential_intersection2, HOME)

  # Since one of those two potential intersection points (i.e.: +90 or -90 degrees) will
  # create an irrational result, and given the strong locality to HOME that is expected
  # from the initial position, the "correct" result is identified by simply taking the
  # minimum distance of the two candidate.
  return min(potential_distance1, potential_distance2)


def SecondsToHhMm(seconds, colon=False):
  """Converts integer number of seconds to xhym string (i.e.: 7h17m) or to 7:17.

  Args:
    seconds: number of seconds
    colon: controls format; if False, format is 7h17m; if True, format is 7:17.

  Returns:
    String representation of hours and minutes.
  """
  if seconds is None:
    return KEY_NOT_PRESENT_STRING[:3]
  minutes = int(abs(seconds) / SECONDS_IN_MINUTE)
  if minutes > MINUTES_IN_HOUR:
    hours = int(minutes / MINUTES_IN_HOUR)
    minutes = minutes % MINUTES_IN_HOUR
    if colon:




                            <----SKIPPED LINES---->




def HourString(flight):
  """Formats now on flight into a a 3-digit string like '12a' or ' 1p'."""
  time_string = DisplayTime(flight)
  if time_string:
    hour_string = time_string[11:13]
    hour_0_23 = int(hour_string)
    is_pm = int(hour_0_23/12) == 1
    hour_number = hour_0_23 % 12
    if hour_number == 0:
      hour_number = 12
    out_string = str(hour_number).rjust(2)
    if is_pm:
      out_string += 'p'
    else:
      out_string += 'a'
  else:
    out_string = KEY_NOT_PRESENT_STRING
  return out_string


def MinuteOfDay(ts=time.time()):
  """Returns integer minute of day (0..1439) for the given timestamp or for now."""


  dt = datetime.datetime.fromtimestamp(ts, TZ)
  minute_of_day = dt.hour * MINUTES_IN_HOUR + dt.minute
  return minute_of_day


def HoursSinceMidnight(timezone=TIMEZONE):
  """Returns the float number of hours elapsed since midnight in the given timezone."""
  tz = pytz.timezone(timezone)
  now = datetime.datetime.now(tz)
  seconds_since_midnight = (
      now - now.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds()
  hours = seconds_since_midnight / SECONDS_IN_HOUR
  return hours


def HoursSinceFlight(now, then):
  """Returns the number of hours between a timestamp and a flight.

  Args:
    now: timezone-aware datetime representation of timestamp
    then: epoch (float)

  Returns:
    Number of hours between now and then (i.e.: now - then; a positive return value
    means now occurred after then).
  """
  then = datetime.datetime.fromtimestamp(then, TZ)
  delta = now - then
  delta_hours = delta.days * HOURS_IN_DAY + delta.seconds / SECONDS_IN_HOUR
  return delta_hours


def DataHistoryHours(flights):
  """Calculates the number of hours between the earliest & last flight in data.

  flights: List of all flights in sequential order, so that the first in list is earliest
      in time.

  Returns:
    Return time difference in hours between the first flight and last flight.
  """
  min_time = flights[0]['now']
  max_time = flights[-1]['now']
  delta_hours = (max_time - min_time) / SECONDS_IN_HOUR
  return round(delta_hours)


def ReadFile(filename, log_exception=False):
  """Returns text from the given file name if available, empty string if not available.

  Args:
    filename: string of the filename to open, potentially also including the full path.

    log_exception: boolean indicating whether to log an exception if file not found.


  Returns:
    Return text string of file contents.
  """
  try:
    with open(filename, 'r') as content_file:
      file_contents = content_file.read()
  except IOError:
    if log_exception:
      Log('Unable to read '+filename)
    return ''
  return file_contents

# because reading is ~25x more expensive than getmtime, we will only read & parse if
# the getmtime is more recent than last call for this file. So this dict stores the
# a tuple, the last time read & the resulting parsed return value

CACHED_FILES = {}
def ReadAndParseSettings(filename):
  """Reads given filename and then parses the resulting key-value pairs into a dict."""
  global CACHED_FILES
  (last_read_time, settings) = CACHED_FILES.get(filename, (0, {}))
  if os.path.exists(filename):
    last_modified = os.path.getmtime(filename)
    if last_modified > last_read_time:
      setting_str = ReadFile(filename)
      settings = ParseSettings(setting_str)
      CACHED_FILES[filename] = (last_modified, settings)
    return settings

  # File does not - or at least no longer - exists; so remove the cache
  if filename in CACHED_FILES:
    CACHED_FILES.pop(filename)

  return {}


def BuildSettings(d):
  """Converts a dict to a string of form key1=value1;...;keyn=valuen; keys alpha sorted."""
  kv_pairs = []
  for key in sorted(list(d.keys())):
    kv_pairs.append('%s=%s' % (key, d[key]))
  s = ';'.join(kv_pairs)
  if s:  # add terminating semicolon
    s += ';'
  return s


def ParseSettings(settings):
  """Parse delimited string of settings in file to a dict of key value pairs.

  Parses a string like 'distance=1426;altitude=32559;on=23;off=24;delay=15;insights=all;'
  into key value pairs.

  Args:
    settings: semicolon-delimited sequence of equal-sign delimited key-value pairs, i.e.:
      key1=value1;key2=value2;....;keyn=valuen.

  Returns:
    Dict of key value pairs contained in the setting file; empty dict if file not
    available or if delimiters missing.
  """
  settings_dict = {}
  for setting in settings.split(';'):
    if '=' in setting:
      kv_list = setting.split('=')
      k = kv_list[0]
      v = kv_list[1]
      if v.isdigit():
        v = int(v)
      else:
        try:
          v = float(v)
        except ValueError:
          pass
      settings_dict[k] = v

  return settings_dict


def RemoveSetting(configuration, setting):
  """Removes the named setting from the configuration file."""
  configuration.pop(setting)
  configuration = BuildSettings(configuration)
  WriteFile(CONFIG_FILE, configuration)
  return configuration


def WriteFile(filename, text, log_exception=False):
  """Writes the text to the file, returning boolean indicating success.

  Args:
    filename: string of the filename to open, potentially also including the full path.

    text: the text to write
    log_exception: boolean indicating whether to log an exception if file not found.


  Returns:
    Boolean indicating whether the write was successful.
  """
  try:
    with open(filename, 'w') as content_file:
      content_file.write(text)
  except IOError:
    if log_exception:
      Log('Unable to write to '+filename)
    return False
  return True


def PrependFileName(full_path, prefix):
  """Converts /dir/file.png to /dir/prefixfile.png."""
  directory, file_name = os.path.split(full_path)
  file_name = prefix+file_name
  return os.path.join(directory, file_name)



def UnpickleObjectFromFile(full_path, date_segmentation, max_days=None, filenames=False):
  """Load a repository of pickled data into memory.

  Args:
    full_path: name (potentially including path) of the pickled file
    date_segmentation: If true, searches for all files that have a prefix of yyyy-mm-dd
      as a prefix to the file name specified in the full path, and loads them in
      sequence for unpickling; if false, uses the full_path as is and loads just that
      single file.
    max_days: Integer that, if specified, indicates maximum number of days of files to
      load back in; otherwise, loads all.  That is, at most max_days files will be read.

    filenames: If true, rather than returning the list of data, returns a list of the
      filenames that would have been read.

  Returns:
    Return a list - either of the data, or of all the file names that would have been
    read.
  """
  if date_segmentation:
    directory, file = os.path.split(full_path)

    d = '[0-9]'
    sep = '-'
    date_format = d*4 + sep + d*2 + sep + d*2  # yyyy-mm-dd
    exp = date_format + sep + file
    pattern = re.compile(exp)
    files = os.listdir(directory)

    if max_days:  # no need to read any files older than x days
      earliest_date = EpochDisplayTime(
          time.time() - (max_days - 1) * SECONDS_IN_DAY, '%Y-%m-%d')
      files = [f for f in files if f[:10] >= earliest_date]


    files = sorted([os.path.join(directory, f) for f in files if pattern.match(f)])
  else:
    if os.path.exists(full_path):
      files = [full_path]
    else:
      return []

  data = []

  if filenames:
    return files

  for file in files:
    try:
      with open(file, 'rb') as f:
        while True:

          data.append(pickle.load(f))



    except (EOFError, pickle.UnpicklingError):
      pass
    except (UnicodeDecodeError) as e:
      Log('Process %s reading file %s gave error %s' % (
          psutil.Process(os.getpid()).name(), f, e))

  return data


cached_object_count = {}

def PickleObjectToFile(data, full_path, date_segmentation, timestamp=None, verify=False):
  """Append one pickled flight to the end of binary file.

  Args:
    data: data to pickle
    full_path: name (potentially including path) of the pickled file
    date_segmentation: boolean indicating whether the date string yyyy-mm-dd should be
      prepended to the file name in full_path based on the current date, so that
      pickled files are segmented by date.
    timestamp: if date_segmentation is True, this is used rather than system time
      to generate the file name.
    verify: boolean indicating if we should verify that the pickled file object count
      increments by one, rewriting entire pickle file if it doesn't. Note that since
      this requires reading the entire pickle file and unpickling, it should only be
      done for small files / objects.

  Returns:
    Name of file to which the data was pickled if successful; None if failed.
  """
  global cached_object_count
  if not timestamp:
    timestamp = time.time()
  date_suffix = EpochDisplayTime(timestamp, '%Y-%m-%d-')
  if date_segmentation:
    full_path = PrependFileName(full_path, date_suffix)

  if full_path not in cached_object_count:
    cached_object_count[full_path] = len(UnpickleObjectFromFile(full_path, False))

  if not os.path.exists(full_path):  # Another method may delete the file
    cached_object_count[full_path] = 0

  try:
    with open(full_path, 'ab') as f:
      f.write(pickle.dumps(data))

  except IOError:
    Log('Unable to append pickle ' + full_path)
    return None

  if verify:
    # file object count should now be one more; if it isn't, the file is corrupted, and
    # rather than continue writing to a corrupted pickle file, we should fix it so we
    # don't lose too much data
    pickled_data = UnpickleObjectFromFile(full_path, False)
    cached_count = cached_object_count[full_path]
    if len(pickled_data) == cached_count + 1:
      cached_object_count[full_path] = cached_count + 1
    else:
      tmp_file_name = full_path + '.tmp'
      try:
        with open(tmp_file_name, 'ab') as f:
          for d in pickled_data:  # rewrite the old data that was retained
            f.write(pickle.dumps(d))
          f.write(pickle.dumps(data))  # new data
      except IOError:
        Log('Unable to append pickle %s in verify step; left tmp file as-is' %
            tmp_file_name)
        return None
      shutil.move(tmp_file_name, full_path)
      cached_object_count[full_path] = len(pickled_data) + 1
      Log('Re-pickled %s: after writing %s, expected len %d to increment, '
          'but it did not; after repickling (and adding the new data), new length = %d' % (

              full_path, data, cached_count, cached_object_count[full_path]))

  return full_path



def UpdateAircraftList(persistent_nearby_aircraft, current_nearby_aircraft, now):
  """Identifies newly seen aircraft and removes aircraft that haven't been seen recently.

  Updates persistent_nearby_aircraft as follows: flights that have been last seen more
  than PERSISTENCE_SECONDS seconds ago are removed; new flights in current_nearby_aircraft
  are added. Also identifies newly-seen aircraft and updates the last-seen timestamp of
  flights that have been seen again.

  Args:
    persistent_nearby_aircraft: dictionary where keys are flight number / squawk tuples,
      and the values are the time the flight was last seen.
    current_nearby_aircraft: dictionary where keys are flight numbers / squawk tuples,
      and the values are themselves dictionaries with key-value pairs about that
      flight, with at least one of the kv-pairs being the time the flight was seen.

    now: the timestamp of the flights in the current_nearby_aircraft.

  Returns:
    A list of newly-nearby flight identifiers (i.e.: 2-tuple of flight number / squawk).

  """
  newly_nearby_flight_identifiers = []
  for flight_identifier in current_nearby_aircraft:
    flight_number = flight_identifier[0]
    # Only add it to the list once we've received a flight number
    if flight_identifier not in persistent_nearby_aircraft and flight_number:
      newly_nearby_flight_identifiers.append(flight_identifier)
    persistent_nearby_aircraft[flight_identifier] = now

  flights_to_delete = []
  for flight_identifier in persistent_nearby_aircraft:
    if (flight_identifier not in current_nearby_aircraft
        and (now - persistent_nearby_aircraft[flight_identifier]) > PERSISTENCE_SECONDS):

      flights_to_delete.append(flight_identifier)
  for flight_identifier in flights_to_delete:
    del persistent_nearby_aircraft[flight_identifier]
  return newly_nearby_flight_identifiers


def ScanForNewFlights(persistent_nearby_aircraft, persistent_path, log_jsons):
  """Determines if there are any new aircraft in the radio message.

  The radio is continuously dumping new json messages to the Raspberry pi with all the
  flights currently observed. This function picks up the latest radio json, and for
  any new nearby flights - there should generally be at most one new flight on each
  pass through - gets additional flight data from FlightAware and augments the flight
  definition with the relevant fields to keep.


  Args:
    persistent_nearby_aircraft: dictionary where keys are flight numbers, and the values
      are the time the flight was last seen.
    persistent_path: dictionary where keys are flight numbers, and the values are a
      sequential list of the location-attributes in the json file; allows for tracking
      the flight path over time.
    log_jsons: boolean indicating whether we should pickle the JSONs.

  Returns:
    A tuple:
    - updated persistent_nearby_aircraft
    - (possibly empty) dictionary of flight attributes of the new flight upon its
      first observation.
    - the time of the radio observation if present; None if no radio dump
    - a dictionary of attributes about the dump itself (i.e.: # of flights; furthest
      observed flight, etc.)
    - persistent_path, a data structure containing past details of a flight's location
      as described in ParseDumpJson
  """
  flight_details = {}
  now = time.time()
  if SIMULATION:
    (dump_json, json_time) = DUMP_JSONS[SIMULATION_COUNTER]
  else:
    dump_json = ReadFile(DUMP_JSON_FILE, log_exception=True)

  json_desc_dict = {}
  current_nearby_aircraft = {}
  if dump_json:

    (current_nearby_aircraft, now, json_desc_dict, persistent_path) = ParseDumpJson(
        dump_json, persistent_path)

    if not SIMULATION and log_jsons:
      PickleObjectToFile((dump_json, now), PICKLE_DUMP_JSON_FILE, True)

    newly_nearby_flight_identifiers = UpdateAircraftList(
        persistent_nearby_aircraft, current_nearby_aircraft, now)

    if newly_nearby_flight_identifiers:

      if len(newly_nearby_flight_identifiers) > 1:
        newly_nearby_flight_identifiers_str = ', '.join(newly_nearby_flight_identifiers)

        newly_nearby_flight_details_str = '\n'.join(

            [str(current_nearby_aircraft[f]) for f in newly_nearby_flight_identifiers])
        Log('Multiple newly-nearby flights: %s\n%s' % (
            newly_nearby_flight_identifiers_str, newly_nearby_flight_details_str))

      flight_identifier = newly_nearby_flight_identifiers[0]

      flight_aware_json = {}
      if SIMULATION:
        json_times = [j[1] for j in FA_JSONS]
        if json_time in json_times:
          flight_aware_json = FA_JSONS[json_times.index(json_time)][0]
      elif flight_identifier[0]:
        flight_number = flight_identifier[0]
        flight_aware_json, error_message = GetFlightAwareJson(flight_number)
        if flight_aware_json:
          UpdateStatusLight(GPIO_ERROR_FLIGHT_AWARE_CONNECTION, False)
        else:
          failure_message = 'No json from Flightaware for flight %s: %s' % (
              flight_number, error_message[:500])
          Log(failure_message)

          UpdateStatusLight(GPIO_ERROR_FLIGHT_AWARE_CONNECTION, True, failure_message)

      flight_details = {}
      if flight_aware_json:
        flight_details = ParseFlightAwareJson(flight_aware_json)

      if not SIMULATION and log_jsons:
        PickleObjectToFile((flight_aware_json, now), PICKLE_FA_JSON_FILE, True)

      # Augment FlightAware details with radio / radio-derived details
      flight_details.update(current_nearby_aircraft[flight_identifier])

      # Augment with the past location data; the [1] is because recall that
      # persistent_path[key] is actually a 2-tuple, the first element being
      # the most recent time seen, and the second element being the actual
      # path. But we do not need to keep around the most recent time seen any
      # more.
      flight_details['persistent_path'] = persistent_path[flight_identifier][1]

  return (
      persistent_nearby_aircraft,
      flight_details,
      now,
      json_desc_dict,
      persistent_path)


def DescribeDumpJson(parsed):
  """Generates a dictionary with descriptive attributes about the dump json file.

  Args:
    parsed: The parsed json file.

  Returns:
    Dictionary with attributes about radio range, number of flights seen, etc.
  """
  json_desc_dict = {}
  json_desc_dict['now'] = parsed['now']

  aircraft = [a for a in parsed['aircraft'] if a['seen'] < PERSISTENCE_SECONDS]
  json_desc_dict['radio_range_flights'] = len(aircraft)

  aircraft_with_pos = [a for a in aircraft if 'lat' in a and 'lon' in a]
  current_distances = [HaversineDistanceMeters(
      HOME, (a['lat'], a['lon'])) for a in aircraft_with_pos]
  current_distances = [

      d * FEET_IN_METER / FEET_IN_MILE for d in current_distances if d is not None]
  if current_distances:
    json_desc_dict['radio_range_miles'] = max(current_distances)

  return json_desc_dict


def SameFlight(f1, f2):
  """True if these two flights are likely the same flight, False otherwise."""
  if f1['flight_number'] == f2['flight_number']:
    return True
  if f1['squawk'] == f2['squawk']:
    return True
  return False


def MergedIdentifier(proposed_id, existing_ids):
  """Identifies what identifier to use for a flight.

  While most flights have both a squawk and a flight number, enough are missing one only
  for it to appear later to want to use a 2-tuple of both as an identifier, merging
  flights if they share a common non-null flight number and/or squawk, as the persistent
  identifier across time.

  Additionally, in very limited circumstances, a squawk may change mid-flight; in that
  case, the first alpha squawk is used.

  This function identifies which identifier to use, and which - if any - should be merged
  into that one identifier from a group of existing identifiers.

  Args:
    proposed_id: The 2-tuple of (flight_number, squawk) of the identified flight.

    existing_ids: An iterable of existing 2-tuple identifiers, some (or none) of which
      may overlap with this flight.

  Returns:
    2-tuple:
      - the 2-tuple suggested identifier to use
      - a potentially empty list of ids to merge with the suggested identifier
  """
  flight_number, squawk = proposed_id

  def CheckPartialMatch(value, position):
    if value is not None:

      return [e for e in existing_ids if e[position] == value and e != proposed_id]
    return []

  matches = CheckPartialMatch(flight_number, 0)
  matches.extend(CheckPartialMatch(squawk, 1))

  if not matches:
    return proposed_id, []

  if not flight_number and matches:
    # arbitrarily choose alpha-first non-null flight_number
    matching_flight_numbers = [m[0] for m in matches if m[0] is not None]
    if matching_flight_numbers:
      flight_number = sorted(matching_flight_numbers)[0]
  if not squawk and matches:
    # arbitrarily choose alpha-first non-null squawk
    matching_squawks = [m[1] for m in matches if m[1] is not None]
    if matching_squawks:
      squawk = sorted(matching_squawks)[0]
  id_to_use = (flight_number, squawk)

  return id_to_use, matches


def MergePersistentPath(id_to_use, ids_to_merge, persistent_path):
  """Merges the persistent paths from multiple flights into a single flight.

  Since the identifiers may change over time of a flight for which we have already
  recorded some in-flight path history, this function allows us to combine all the
  persistent path details and merge it into a single flight. For instance, we may have
  only a squawk for a few seconds, which then changes mid-flight to another squawk,
  and then a few seconds later, we receive a radio signal with both the (new) squawk and
  flight number - thus we have three records to merge to one.


  This function merges all the persistent paths - which are 2-tuples of the most
  recent timestamp and a list of dictionaries - into one integrated persistent path.


  Args:
    id_to_use: The 2-tuple of (flight_number, squawk) of the final id we want the
      flight to have.
    ids_to_merge: an iterable of the ids to merge with the final id_to_use.
    persistent_path: the dictionary of existing persistent paths including at least
      ids_to_merge as keys, potentially also id_to_use, and perhaps additional flights
      as well.

  Returns:
    The merged persistent path, that includes id_to_use as one key, and removed
    ids_to_merge.
  """
  path = []
  timestamps = []

  if id_to_use in persistent_path and id_to_use not in ids_to_merge:
    ids_to_merge.append(id_to_use)

  for i in ids_to_merge:
    timestamps.append(persistent_path[i][0])
    path.extend(persistent_path[i][1])
    persistent_path.pop(i)

  persistent_path[id_to_use] = (max(timestamps), sorted(path, key=lambda p: p['now']))

  return persistent_path


def ParseDumpJson(dump_json, persistent_path):
  """Identifies all airplanes within given distance of home from the dump1090 file.

  Since the dump1090 json will have messages from all flights that the antenna has picked
  up, we want to keep only flights that are within a relevant distance to us, and also to
  extract from the full set of data in the json to just the relevant fields for additional

  analysis.

  While most flights have both a squawk and a flight number, enough are missing one only
  for it to appear later to want to use a 2-tuple of both as an identifier, merging
  flights if they share a common non-null flight number and/or squawk, as the persistent
  identifier across time.

  Args:
    dump_json: The text representation of the json message from dump1090-mutability

    persistent_path: dictionary where keys are flight numbers, and the values are a
      sequential list of the location-attributes in the json file; allows for tracking
      the flight path over time.

  Returns:
    Return tuple:
    - dictionary of all nearby planes, where keys are flight numbers (i.e.: 'SWA7543'),
      and the value is itself a dictionary of attributes.
    - time stamp in the json file.
    - dictionary of attributes about the radio range
    - persistent dictionary of the track of recent flights, where keys are the flight
      numbers and the value is a tuple, the first element being when the flight was last
      seen in this radio, and the second is a list of dictionaries with past location info
      from the radio where it's been seen, i.e.: d[flight] = (timestamp, [{}, {}, {}])

  """
  parsed = json.loads(dump_json)
  now = parsed['now']
  nearby_aircraft = {}

  # Build dictionary summarizing characteristics of the dump_json itself
  json_desc_dict = DescribeDumpJson(parsed)

  for aircraft in parsed['aircraft']:
    simplified_aircraft = {}

    simplified_aircraft['now'] = now

    # flight_number
    flight_number = aircraft.get('flight')
    if flight_number:
      flight_number = flight_number.strip()

    # squawk
    squawk = aircraft.get('squawk')
    if squawk:
      squawk = squawk.strip()

    identifier = (flight_number, squawk)

    # merge any duplicate flights: since the id for nearby_aircraft & persistent_path is
    # the 2-tuple (flight_number, squawk), it's possible for a flight to add or drop
    # one of those two elements over time as the radio signal comes in / falls out.
    # Let's keep the identifier as the non-null values as soon as one is seen.
    id_to_use, ids_to_merge = MergedIdentifier(identifier, persistent_path.keys())



    # Now we need to rename any flight paths with that partial identifier to have
    # the correct new merged_identifier
    if ids_to_merge:
      persistent_path = MergePersistentPath(id_to_use, ids_to_merge, persistent_path)


    if 'lat' in aircraft and 'lon' in aircraft:
      lat = aircraft['lat']
      lon = aircraft['lon']
      if isinstance(lat, numbers.Number) and isinstance(lon, numbers.Number):

        simplified_aircraft['lat'] = lat
        simplified_aircraft['lon'] = lon

        altitude = aircraft.get('altitude', aircraft.get('alt_baro'))
        if isinstance(altitude, numbers.Number):
          simplified_aircraft['altitude'] = altitude

        speed = aircraft.get('speed', aircraft.get('gs'))
        if speed is not None:
          simplified_aircraft['speed'] = speed

        vert_rate = aircraft.get('vert_rate', aircraft.get('baro_rate'))
        if vert_rate is not None:
          simplified_aircraft['vert_rate'] = vert_rate

        track = aircraft.get('track')
        if isinstance(track, numbers.Number):
          min_meters = MinMetersToHome((lat, lon), track)
          simplified_aircraft['track'] = track
          simplified_aircraft['min_feet'] = min_meters * FEET_IN_METER

          # TODO: describe why we want to base this off haversine distance (i.e.:
          # the actual distance from home) vs. MinMetersToHome (i.e.: forecasted min
          # distance from home); it seems like the latter would give us more time
          # to respond? - maybe because there might be other closer flights even though
          # a far away flight might look like it's going to come nearby?

          haversine_distance_meters = HaversineDistanceMeters(HOME, (lat, lon))
          simplified_aircraft['distance'] = haversine_distance_meters
          if haversine_distance_meters < MIN_METERS:
            #nearby_aircraft[id_to_use]['distance'] = haversine_distance_meters
            nearby_aircraft[id_to_use] = simplified_aircraft
            if flight_number:
              nearby_aircraft[id_to_use]['flight_number'] = flight_number
            if squawk:
              nearby_aircraft[id_to_use]['squawk'] = squawk
            # aircraft classification:
            # https://github.com/wiedehopf/adsb-wiki/wiki/ADS-B-aircraft-categories


            category = aircraft.get('category')
            if category is not None:
              nearby_aircraft[id_to_use]['category'] = category

        # keep all that track info - once we start reporting on a nearby flight, it will
        # become part of the flight's persistent record. Also, note that as we are
        # building a list of tracks for each flight, and we are later assigning the
        # flight dictionary to point to the list, we just simply need to continue
        # updating this list to keep the dictionary up to date (i.e.: we don't need

        # to directly touch the flights dictionary in main).
        (last_seen, current_path) = persistent_path.get(id_to_use, (None, []))
        if (  # flight position has been updated with this radio signal
            not current_path or
            simplified_aircraft.get('lat') != current_path[-1].get('lat') or
            simplified_aircraft.get('lon') != current_path[-1].get('lon')):
          current_path.append(simplified_aircraft)
        persistent_path[id_to_use] = (now, current_path)

  # if the flight was last seen too far in the past, remove the track info
  for f in list(persistent_path.keys()):
    (last_seen, current_path) = persistent_path[f]
    if last_seen < now - PERSISTENCE_SECONDS:
      persistent_path.pop(f)

  return (nearby_aircraft, now, json_desc_dict, persistent_path)


def GetFlightAwareJson(flight_number):
  """Scrapes the text json message from FlightAware for a given flight number.

  Given a flight number, loads the corresponding FlightAware webpage for that flight and
  extracts the relevant script that contains all the flight details from that page.


  Args:
    flight_number: text flight number (i.e.: SWA1234)

  Returns:
    Two tuple:
     - Text representation of the json message from FlightAware.
     - Text string of error message, if any
  """
  url = 'https://flightaware.com/live/flight/' + flight_number
  try:
    response = requests.get(url)
  except requests.exceptions.RequestException as e:
    error_msg = 'Unable to query FA for URL due to %s: %s' % (e, url)
    Log(error_msg)
    return '', error_msg
  soup = bs4.BeautifulSoup(response.text, 'html.parser')
  l = soup.find_all('script')
  flight_script = None
  for script in l:
    if "trackpollBootstrap" in str(script):
      flight_script = str(script)
      break
  if not flight_script:

    error_msg = 'Unable to find trackpollBootstrap script in page: ' + response.text
    Log(error_msg)
    return '', error_msg
  first_open_curly_brace = flight_script.find('{')
  last_close_curly_brace = flight_script.rfind('}')
  flight_json = flight_script[first_open_curly_brace:last_close_curly_brace+1]
  return flight_json, ''


def Unidecode(s):
  """Convert a special unicode characters to closest ASCII representation."""
  if s is not None:
    s = unidecode.unidecode(s)
  return s


def ParseFlightAwareJson(flight_json):
  """Strips relevant data about the flight from FlightAware feed.

  The FlightAware json has hundreds of fields about a flight, only a fraction of which
  are relevant to extract. Note that some of the fields are inconsistently populated
  (i.e.: scheduled and actual times for departure and take-off).


  Args:
    flight_json: Text representation of the FlightAware json about a single flight.


  Returns:
    Dictionary of flight attributes extracted from the FlightAware json.
  """
  flight = {}
  parsed_json = json.loads(flight_json)

  fa_flight_number = list(parsed_json['flights'].keys())[0]
  parsed_flight_details = parsed_json['flights'][fa_flight_number]
  flight['fa_flight_number'] = fa_flight_number

  origin = parsed_flight_details.get('origin')
  if origin:
    flight['origin_friendly'] = origin.get('friendlyLocation')
    flight['origin_iata'] = origin.get('iata')

  destination = parsed_flight_details.get('destination')
  if destination:
    flight['destination_friendly'] = destination.get('friendlyLocation')
    flight['destination_iata'] = destination.get('iata')




                            <----SKIPPED LINES---->




    flight['estimated_landing_time'] = landing_time.get('estimated')

  airline = parsed_flight_details.get('airline')
  if airline:
    flight['airline_call_sign'] = Unidecode(airline.get('callsign'))
    flight['airline_short_name'] = Unidecode(airline.get('shortName'))
    flight['airline_full_name'] = Unidecode(airline.get('fullName'))

  if len(parsed_json['flights'].keys()) > 1:
    Log('There are multiple flights in the FlightAware json: ' + parsed_json)

  return flight


def EpochDisplayTime(epoch, format_string='%Y-%m-%d %H:%M:%S.%f%z'):
  """Converts epoch in seconds to formatted time string."""
  return datetime.datetime.fromtimestamp(epoch, TZ).strftime(format_string)


def DisplayTime(flight, format_string='%Y-%m-%d %H:%M:%S.%f%z'):
  """Converts flight 'now' to formatted time string, caching results on flight."""
  cached_key = CACHED_ELEMENT_PREFIX + 'now-' + format_string
  cached_time = flight.get(cached_key)
  if cached_time:
    return cached_time

  epoch_display_time = EpochDisplayTime(flight['now'], format_string)
  flight[cached_key] = epoch_display_time
  return epoch_display_time


def DisplayAirline(flight):
  """Augments flight details with display-ready airline attributes.

  Args:
    flight: dictionary with key-value attributes about the flight.

  Returns:
    String identifying either the airline, or Unknown if not available.
  """
  airline = flight.get('airline_short_name', flight.get('airline_full_name'))




                            <----SKIPPED LINES---->




    aircraft = aircraft.replace('Regional Jet ', '')
    aircraft = aircraft[:SPLITFLAP_CHARS_PER_LINE]
  else:
    aircraft = ''
  return aircraft


def DisplayFlightNumber(flight):
  """Generate a displayable string for flight number, falling back to SQUAWK."""
  squawk = flight.get('squawk', '')
  flight_number = flight.get('flight_number')
  identifier = flight_number
  if not identifier and squawk:
    identifier = 'SQK ' + str(squawk)
  if not identifier:
    identifier = KEY_NOT_PRESENT_STRING
  return identifier


def DisplayAirportCodeIata(flight, key):
  """Returns key if it is present and not evaluating to False; 'Unknown' otherwise."""
  airport_code = flight.get(key)
  if not airport_code:
    airport_code = KEY_NOT_PRESENT_STRING
  return airport_code


def DisplayOriginIata(flight):
  """Generates displayable string for origin airport code."""
  return DisplayAirportCodeIata(flight, 'origin_iata')


def DisplayDestinationIata(flight):
  """Generates displayable string for destination airport code."""
  return DisplayAirportCodeIata(flight, 'destination_iata')


def DisplayAirportCodeFriendly(flight, iata_key, friendly_key):
  """Generates displayable longer name of airport including city if available."""
  airport = flight.get(iata_key)
  if not airport:
    return KEY_NOT_PRESENT_STRING
  if airport in KNOWN_AIRPORTS:
    return airport
  airport += ' ' + flight.get(friendly_key, '').split(',')[0]
  return airport


def DisplayOriginFriendly(flight):
  """Generates displayable longer name of origin airport including city if available."""
  return DisplayAirportCodeFriendly(flight, 'origin_iata', 'origin_friendly')


def DisplayDestinationFriendly(flight):
  """Generates displayable longer name of dest airport including city if available."""
  return DisplayAirportCodeFriendly(flight, 'destination_iata', 'destination_friendly')



def DisplayOriginDestinationPair(flight):
  """Generates displayble origin-destination airport code mindful of screen width.

  If the origin or destination is among a few key airports where the IATA code is
  well-known, then we can display only that code. Otherwise, we'll want to display
  both the code and a longer description of the airport. But we need to be mindful of
  the overall length of the display. So, for instance, these might be produced as
  valid origin-destination pairs:
  SFO-CLT Charlotte       <- Known origin
  Charlotte CLT-SFO       <- Known destination
  Charl CLT-SAN San Diego <- Neither origin nor destination known

  Args:
    flight: dictionary with key-value attributes about the flight.

  Returns:
    String as described.
  """
  origin_iata = DisplayOriginIata(flight)
  destination_iata = DisplayDestinationIata(flight)

  origin_friendly = DisplayOriginFriendly(flight)
  destination_friendly = DisplayDestinationFriendly(flight)

  max_pair_length = SPLITFLAP_CHARS_PER_LINE - len('-')
  if (
      origin_iata not in KNOWN_AIRPORTS and
      destination_iata not in KNOWN_AIRPORTS and




                            <----SKIPPED LINES---->




      origin_length = max_pair_length - destination_length
    elif len(destination_friendly) > max_destination_length:
      origin_length = len(origin_friendly)
      destination_length = max_pair_length - origin_length
    else:
      origin_length = max_origin_length
      destination_length = max_destination_length
  elif origin_iata in KNOWN_AIRPORTS and destination_iata not in KNOWN_AIRPORTS:
    origin_length = len(origin_iata)
    destination_length = max_pair_length - origin_length
  elif destination_iata in KNOWN_AIRPORTS and origin_iata not in KNOWN_AIRPORTS:
    destination_length = len(destination_iata)
    origin_length = max_pair_length - destination_length
  elif destination_iata == origin_iata:
    origin_length = len(origin_iata)
    destination_length = max_pair_length - origin_length
  else:
    destination_length = len(destination_iata)
    origin_length = len(origin_iata)

  if origin_iata == KEY_NOT_PRESENT_STRING and destination_iata == KEY_NOT_PRESENT_STRING:

    origin_destination_pair = KEY_NOT_PRESENT_STRING
  else:
    origin_destination_pair = (
        '%s-%s' %
        (origin_friendly[:origin_length], destination_friendly[:destination_length]))

  return origin_destination_pair


def DisplayDepartureTimes(flight):
  """Generates displayable fields about the flight times including details about the delay.

  Attempts to first find matching "pairs" of flight departure time details (departure vs.
  takeoff) in the belief that aligned nomenclature in the source data reflects an
  aligned concept of time where a flight delay can be best calculated.  Without a
  matching pair (or if perhaps no departure time information is provided), then a delay
  cannot be calculated at all.

  Args:
    flight: dictionary with key-value attributes about the flight.

  Returns:
    Dictionary with the following keys:
    - departure_timestamp: taken from one of potentially four timestamps indicating
      departure
    - departure_time_text: departure time formatted to HH:MM string
    - calculable_delay: boolean indicating whether sufficient data available to calc delay

    - delay_seconds: integer number of seconds of delay
    - delay_text: text of the format "7H16M early", where the descriptor early or late is
      abbreviated if needed to stay within the display width
  """
  cached_key = CACHED_ELEMENT_PREFIX + 'departure_times'
  cached_value = flight.get(cached_key)
  if cached_value:
    return cached_value

  actual_departure = flight.get('actual_departure_time')
  scheduled_departure = flight.get('scheduled_departure_time')
  actual_takeoff_time = flight.get('actual_takeoff_time')
  scheduled_takeoff_time = flight.get('scheduled_takeofftime')
  calculable_delay = False

  scheduled = None
  delay_seconds = None
  delay_text = ''

  if actual_departure and scheduled_departure:
    actual = actual_departure
    scheduled = scheduled_departure
    departure_label = 'Dep'




                            <----SKIPPED LINES---->




    flight: dictionary with key-value attributes about the flight.

  Returns:
    Seconds, if the remaining time is calculable; None otherwise.
  """
  arrival = flight.get('estimated_arrival_time')
  if not arrival:
    arrival = flight.get('estimated_landing_time')
  if not arrival:
    arrival = flight.get('scheduled_arrival_time')
  if not arrival:
    arrival = flight.get('scheduled_landing_time')

  if arrival:
    remaining_seconds = flight['now'] - arrival
  else:
    remaining_seconds = None
  return remaining_seconds



def FlightMeetsDisplayCriteria(flight, configuration, display_all_hours=False, log=False):
  """Returns boolean indicating whether the screen is currently accepting new flight data.

  Based on the configuration file, determines whether the flight data should be displayed.
  Specifically, the configuration:
  - may include 'enabled' indicating whether screen should be driven at all
  - should include 'on' & 'off' parameters indicating minute (from midnight) of operation

  - should include altitude & elevation parameters indicating max values of interest


  Args:
    flight: dictionary of flight attributes.
    configuration: dictionary of configuration attributes.
    display_all_hours: a boolean indicating whether we should ignore whether the
      screen is turned off (either via the enabling, or via the hour settings)
    log: optional boolean indicating whether a flight that fails the criteria should be
      logged with the reason

  Returns:
    Boolean as described.
  """
  flight_altitude = flight.get('altitude', float('inf'))
  config_max_altitude = configuration['setting_max_altitude']

  flight_meets_criteria = True
  if flight_altitude > config_max_altitude:
    flight_meets_criteria = False
    if log:
      Log(
          '%s not displayed because it fails altitude criteria - flight altitude: '
          '%.0f; required altitude: %.0f' % (

              DisplayFlightNumber(flight), flight_altitude, config_max_altitude))
  else:
    flight_distance = flight.get('min_feet', float('inf'))
    config_max_distance = configuration['setting_max_distance']
    if flight_distance > config_max_distance:
      flight_meets_criteria = False
      if log:
        Log(
            '%s not displayed because it fails distance criteria - flight distance: '
            '%.0f; required distance: %.0f' % (
                DisplayFlightNumber(flight), flight_distance, config_max_distance))


  if not display_all_hours and flight_meets_criteria:
    flight_timestamp = flight['now']
    minute_of_day = MinuteOfDay(flight_timestamp)
    if minute_of_day <= configuration['setting_on_time']:
      flight_meets_criteria = False
      if log:
        Log(
            '%s not displayed because it occurs too early - minute_of_day: '
            '%d; setting_on_time: %d' % (
                DisplayFlightNumber(flight), minute_of_day,
                configuration['setting_on_time']))
    elif minute_of_day > configuration['setting_off_time'] + 1:
      flight_meets_criteria = False
      if log:
        Log(
            '%s not displayed because it occurs too late - minute_of_day: '
            '%d; setting_off_time: %d' % (
                DisplayFlightNumber(flight), minute_of_day,
                configuration['setting_off_time']))
    elif configuration.get('setting_screen_enabled', 'off') == 'off':
      flight_meets_criteria = False
      if log:
        Log(
            '%s not displayed because screen disabled' % DisplayFlightNumber(flight))


  return flight_meets_criteria


def IdentifyFlightDisplayed(flights, configuration, display_all_hours=False):
  """Finds the most recent flight in flights that meet the display criteria.

  Args:
    flights: list of flight dictionaries.
    configuration: dictionary of settings.
    display_all_hours: boolean indicating whether we should ignore the time constraints
      (i.e.: whether the screen is enabled, and its turn-on or turn-off times) in
      identifying the most recent flight. That is, if False, then this will only return
      flights that would have been displayed in the ordinarily usage, vs. if True,
      a flight irrespective of the time it would be displayed.


  Returns:
    A flight dictionary if one can be found; None otherwise.
  """
  for n in range(len(flights)-1, -1, -1):  # traverse the flights in reverse
    if FlightMeetsDisplayCriteria(
        flights[n], configuration, display_all_hours=display_all_hours):
      return n
  return None


def CreateMessageAboutFlight(flight):
  """Creates a message to describe interesting attributes about a single flight.

  Generates a multi-line description of a flight. A typical message might look like:

  UAL300 - UNITED        <- Flight number and airline
  BOEING 777-200 (TWIN)  <- Aircraft type
  SFO-HNL HONOLULU       <- Origin & destination
  DEP 02:08 ER REM 5:14  <- Time details: departure time; early / late / ontime; remaining
  185MPH 301DEG D:117FT  <- Trajectory details: speed; bearing; forecast min dist to HOME
  1975FT (+2368FPM)      <- Altitude details: current altitude & rate or ascent / descent

  However, not all of these details are always present, so some may be listed as unknown,
  or entire lines may be left out.

  Args:
    flight: dictionary of flight attributes.

  Returns:
    Printable string (with embedded new line characters)
  """
  lines = []

  # LINE1: UAL1425 - UNITED
  #        ======================
  flight_number = DisplayFlightNumber(flight)
  second_element = DisplayAirline(flight)

  if second_element == KEY_NOT_PRESENT_STRING:
    second_element = flight.get('owner', KEY_NOT_PRESENT_STRING)
    if second_element is None:
      second_element = KEY_NOT_PRESENT_STRING

  if flight_number == KEY_NOT_PRESENT_STRING and second_element == KEY_NOT_PRESENT_STRING:

    line = 'Unknown Flight'
  else:
    line = (flight_number + ' - ' + second_element)[:SPLITFLAP_CHARS_PER_LINE]
  lines.append(line)

  # LINE2: Boeing 737-800 (twin-jet)
  #        ======================
  aircraft_type = DisplayAircraft(flight)
  if aircraft_type:
    lines.append(aircraft_type)

  # LINE3: SFO-CLT Charlotte
  #        Charlotte CLT-SFO
  #        ======================
  origin_destination_pair = DisplayOriginDestinationPair(flight)
  if origin_destination_pair:
    lines.append(origin_destination_pair)

  # LINE4: DEP 02:08 ER REM 5:14
  #        Dep: Unknown
  #        ======================
  departure_time_details = DisplayDepartureTimes(flight)
  line_elements = []
  if departure_time_details:

    if departure_time_details.get('departure_time_text'):
      line_elements.append(departure_time_details['departure_time_text'])

    if departure_time_details.get('delay_text'):
      line_elements.append(departure_time_details['delay_text'])

    remaining_seconds = DisplaySecondsRemaining(flight)
    if remaining_seconds is not None:

      line_elements.append('Rem ' + SecondsToHhMm(remaining_seconds, colon=True))

  if line_elements:
    lines.append(EvenlySpace(line_elements))

  # LINE5: 123mph 297deg D:1383ft
  #        ======================
  speed = flight.get('speed')
  heading = flight.get('track')
  min_feet = flight.get('min_feet')

  line_elements = []
  if speed is not None:
    line_elements.append(str(round(speed)) + SPEED_UNITS)
  if heading is not None:
    line_elements.append(str(round(heading)) + u'\u00b0')  # degrees deg unicode
  if min_feet is not None:
    line_elements.append('D:' + str(round(min_feet)) + DISTANCE_UNITS)
  if line_elements:
    lines.append(EvenlySpace(line_elements))





                            <----SKIPPED LINES---->




  if vert_rate:
    line_elements.append('%+d%s' % (vert_rate, CLIMB_RATE_UNITS))
  if line_elements:
    lines.append(EvenlySpace(line_elements))

  return lines


def EvenlySpace(l):
  """Converts list to string with equal space between each element in list."""
  if not l:
    return ''
  if len(l) == 1:
    return l[0]
  extra_space = SPLITFLAP_CHARS_PER_LINE - sum([len(str(s)) for s in l])
  last_gap = round(extra_space / (len(l) - 1))
  return EvenlySpace([*l[:-2], str(l[-2]) + ' '*last_gap + str(l[-1])])


def RemoveParentheticals(s):
  """Removes all instances of () - and the text contained within - from a string."""
  if not s:
    return s
  if '(' in s and ')' in s:
    open_paren = s.find('(')
    close_paren = s.find(')')
  else:
    return s
  if close_paren < open_paren:
    return s
  s = s.replace(s[open_paren:close_paren+1], '').strip().replace('  ', ' ')
  return RemoveParentheticals(s)


def Ordinal(n):
  """Converts integer n to an ordinal string - i.e.: 2 -> 2nd; 5 -> 5th."""
  return '%d%s' % (n, 'tsnrhtdd'[(math.floor(n/10)%10 != 1)*(n%10 < 4)*n%10::4])


def Screenify(lines, splitflap):
  """Transforms a list of lines to a single text string either for printing or sending.

  Given a list of lines that is a fully-formed message to send to the splitflap display,
  this function transforms the list of strings to a single string that is an
  easier-to-read and more faithful representation of how the message will be displayed.
  The transformations are to add blank lines to the message to make it consistent number
  of lines, and to add border to the sides & top / bottom of the message.


  Args:
    lines: list of strings that comprise the message
    splitflap: boolean, True if directed for splitflap display; false if directed to screen


  Returns:
    String - which includes embedded new line characters, borders, etc. as described
    above, that can be printed to screen as the message.
  """
  divider = '+' + '-'*SPLITFLAP_CHARS_PER_LINE + '+'
  border_character = '|'
  append_character = '\n'

  if splitflap:
    border_character = ''
    append_character = ''

  for unused_n in range(SPLITFLAP_LINE_COUNT-len(lines)):
    lines.append('')
  lines = [
      border_character + line.ljust(SPLITFLAP_CHARS_PER_LINE).upper() + border_character


      for line in lines]

  if not splitflap:
    lines.insert(0, divider)
    lines.append(divider)

  return append_character.join(lines)


def FlightInsightLastSeen(flights, days_ago=2):
  """Generates string indicating when flight was last seen.

  Generates text of the following form.
  - KAL214 was last seen 2d0h ago

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0] was the
      earliest seen, and flights[-1] is the most recent flight for which we are
      attempting to generate an insight.
    days_ago: the minimum time difference for which a message should be generated -
      i.e.: many flights are daily, and so we are not necessarily interested to see
      about every daily flight that it was seen yesterday. However, more infrequent
      flights might be of interest.

  Returns:
    Printable string message; if no message or insights to generate, then an empty string.

  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = DisplayFlightNumber(this_flight)
  this_timestamp = flights[-1]['now']

  last_seen = [f for f in flights[:-1] if DisplayFlightNumber(f) == this_flight_number]
  if last_seen and 'flight_number' in this_flight:
    last_timestamp = last_seen[-1]['now']
    if this_timestamp - last_timestamp > days_ago*SECONDS_IN_DAY:
      message = '%s was last seen %s ago' % (
          this_flight_number, SecondsToDdHh(this_timestamp - last_timestamp))
  return message


def FlightInsightDifferentAircraft(flights, percent_size_difference=0.1):
  """Generates string indicating changes in aircraft for the most recent flight.

  Generates text of the following form for the "focus" flight in the data.
  - Last time ASA1964 was seen on Mar 16, it was with a much larger plane (Airbus A320
    (twin-jet) @ 123ft vs. Airbus A319 (twin-jet) @ 111ft)
  - Last time ASA743 was seen on Mar 19, it was with a different type of airpline
    (Boeing 737-900 (twin-jet) vs. Boeing 737-800 (twin-jet))

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0] was the
      earliest seen, and flights[-1] is the most recent flight for which we are
      attempting to generate an insight.
    percent_size_difference: the minimum size (i.e.: length) difference for the insight
      to warrant including the size details.

  Returns:
    Printable string message; if no message or insights to generate, then an empty string.

  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = DisplayFlightNumber(this_flight)
  last_seen = [f for f in flights[:-1] if DisplayFlightNumber(f) == this_flight_number]



  # Last time this same flight flew a materially different type of aircraft
  if last_seen and 'flight_number' in this_flight:
    last_flight = last_seen[-1]

    last_aircraft = last_flight.get('aircraft_type_friendly')
    last_aircraft_length = AIRCRAFT_LENGTH.get(last_aircraft, 0)

    this_aircraft = this_flight.get('aircraft_type_friendly')
    this_aircraft_length = AIRCRAFT_LENGTH.get(this_aircraft, 0)

    this_likely_commercial_flight = (
        this_flight.get('origin_iata') and this_flight.get('destination_iata'))
    if this_likely_commercial_flight and this_aircraft and not this_aircraft_length:

      Log('%s used in a flight with defined origin & destination but yet is '
          'missing length details' % this_aircraft, file=LOGFILE)

    likely_same_commercial_flight = (
        last_flight.get('origin_iata') == this_flight.get('origin_iata') and

        last_flight.get('destination_iata') == this_flight.get('destination_iata') and

        last_flight.get('airline_call_sign') == this_flight.get('airline_call_sign'))

    this_aircraft_bigger = False
    last_aircraft_bigger = False
    if (likely_same_commercial_flight and
        this_aircraft_length > last_aircraft_length * (1 + percent_size_difference)):

      this_aircraft_bigger = True
      comparative_text = 'larger'
    elif (likely_same_commercial_flight and
          last_aircraft_length > this_aircraft_length * (1 + percent_size_difference)):

      last_aircraft_bigger = True
      comparative_text = 'smaller'

    last_flight_time_string = DisplayTime(last_flight, '%b %-d')
    if this_aircraft and last_aircraft:
      if this_aircraft_bigger or last_aircraft_bigger:
        message = ('%s used a %s plane today compared with last, on %s '
                   '(%s @ %dft vs. %s @ %dft)' % (
                       this_flight_number, comparative_text, last_flight_time_string,

                       RemoveParentheticals(this_aircraft),
                       this_aircraft_length*FEET_IN_METER,
                       RemoveParentheticals(last_aircraft),
                       last_aircraft_length*FEET_IN_METER))
      elif last_aircraft and this_aircraft and last_aircraft != this_aircraft:
        message = (
            '%s used a different aircraft today compared with last, on %s (%s vs. %s)' % (

                this_flight_number, last_flight_time_string, this_aircraft, last_aircraft))


  return message


def FlightInsightNthFlight(flights, hours=1, min_multiple_flights=2):
  """Generates string about seeing many flights to the same destination in a short period.

  Generates text of the following form for the "focus" flight in the data.
  - ASA1337 was the 4th flight to PHX in the last 53 minutes, served by Alaska Airlines,
    American Airlines, Southwest and United
  - SWA3102 was the 2nd flight to SAN in the last 25 minutes, both with Southwest


  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0] was the
      earliest seen, and flights[-1] is the most recent flight for which we are
      attempting to generate an insight.
    hours: the time horizon over which to look for flights with the same destination.

    min_multiple_flights: the minimum number of flights to that same destination to
      warrant generating an insight.

  Returns:
    Printable string message; if no message or insights to generate, then an empty string.

  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = this_flight.get('flight_number', 'This')
  this_destination = this_flight.get('destination_iata', '')
  this_airline = DisplayAirline(this_flight)
  if not this_airline:
    this_airline = KEY_NOT_PRESENT_STRING # in case airline was stored as, say, ''

  this_timestamp = this_flight['now']
  if this_destination and this_destination not in ['SFO', 'LAX']:
    similar_flights = [f for f in flights[:-1] if
                       this_timestamp - f['now'] < SECONDS_IN_HOUR*hours and
                       this_destination == f.get('destination_iata', '')]
    similar_flights_count = len(similar_flights) + 1  # +1 for this_flight
    similar_flights_airlines = list({DisplayAirline(f) for f in similar_flights})


    same_airline = [this_airline] == similar_flights_airlines

    if similar_flights_count >= min_multiple_flights:
      n_minutes = (
          (this_flight['now'] - similar_flights[0]['now'])
          / SECONDS_IN_MINUTE)
      message = ('%s was the %s flight to %s in the last %d minutes' % (
          this_flight_number, Ordinal(similar_flights_count),
          this_destination, n_minutes))
      if same_airline and similar_flights_count == 2:
        message += ', both with %s' % this_airline
      elif same_airline:
        message += ', all with %s' % this_airline
      else:
        similar_flights_airlines.append(this_airline)
        similar_flights_airlines.sort()
        message += ', served by %s and %s' % (
            ', '.join(similar_flights_airlines[:-1]),
            similar_flights_airlines[-1])

  return message


def FlightInsightSuperlativeAttribute(
    flights,
    key,
    label,
    units,
    absolute_list,
    insight_min=True,
    insight_max=True,
    hours=HOURS_IN_DAY):
  """Generates string about a numeric attribute of the flight being an extreme value.

  Generates text of the following form for the "focus" flight in the data.
  - N5286C has the slowest groundspeed (113mph vs. 163mph) in last 24 hours
  - CKS828 has the highest altitude (40000ft vs. 16575ft) in last 24 hours

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0] was the
      earliest seen, and flights[-1] is the most recent flight for which we are
      attempting to generate an insight.
    key: the key of the attribute of interest - i.e.: 'speed'.
    label: the human-readable string that should be displayed in the message - i.e.:
      'groundspeed'.
    units: the string units that should be used to label the value of the key - i.e.:
      'MPH'.
    absolute_list: a 2-tuple of strings that is used to label the min and the max - i.e.:
      ('lowest', 'highest'), or ('slowest', 'fastest').
    insight_min: boolean indicating whether to generate an insight about the min value.

    insight_max: boolean indicating whether to generate an insight about the max value.

    hours: the time horizon over which to look for superlative flights.

  Returns:
    Printable string message; if no message or insights to generate, then an empty string.

  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = this_flight.get('flight_number', 'The last flight')
  first_timestamp = flights[0]['now']
  last_timestamp = flights[-1]['now']
  included_seconds = last_timestamp - first_timestamp

  if included_seconds > SECONDS_IN_HOUR * hours:
    relevant_flights = [
        f for f in flights[:-1]
        if last_timestamp - f['now'] < SECONDS_IN_HOUR * hours]
    value_min = min(
        [f.get(key) for f in relevant_flights if isinstance(f.get(key), numbers.Number)])

    value_max = max(
        [f.get(key) for f in relevant_flights if isinstance(f.get(key), numbers.Number)])

    values_other = len(
        [1 for f in relevant_flights if isinstance(f.get(key), numbers.Number)])

    this_value = this_flight.get(key)

    if this_value and values_other:

      superlative = True
      if (
          isinstance(this_value, numbers.Number) and
          isinstance(value_max, numbers.Number) and
          this_value > value_max and
          insight_max):
        absolute_string = absolute_list[1]
        other_value = value_max
      elif (
          isinstance(this_value, numbers.Number) and
          isinstance(value_min, numbers.Number) and
          this_value < value_min and
          insight_min):
        absolute_string = absolute_list[0]
        other_value = value_min
      else:
        superlative = False

      if superlative:
        message = '%s has the %s %s (%d%s vs. %d%s) in last %d hours' % (
            this_flight_number, absolute_string, label,
            this_value, units, other_value, units, hours)

  return message


def FlightInsightNextFlight(flights, configuration):
  """Generates string about estimated wait until next flight.

  Generates text of the following form for the "focus" flight in the data.
  - Last flight at 2:53a; avg wait is 1h58m & median is 42m, but could be as long as
    8h43m, based on last 20 days

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0] was the
      earliest seen, and flights[-1] is the most recent flight for which we are
      attempting to generate an insight.
    configuration: dictionary of settings.

  Returns:
    Printable string message; if no message because not enough history, then an
    empty string.
  """
  msg = ''
  if not flights:
    return msg

  # m = min of day of this flight
  # find minute of day of prior flights st
  # -- that flight not seen in last 12 hrs
  # -- that min of day >= this
  this_flight = flights[-1]
  this_hour = int(DisplayTime(this_flight, '%-H'))
  this_minute = int(DisplayTime(this_flight, '%-M'))
  this_date = DisplayTime(this_flight, '%x')

  # Flights that we've already seen in the last few hours we do not expect to see
  # again for another few hours, so let's exclude them from the calculation
  exclude_flights_hours = 12
  flight_numbers_seen_in_last_n_hours = [
      f['flight_number'] for f in flights
      if f['now'] > this_flight['now'] - exclude_flights_hours*SECONDS_IN_HOUR
      and 'flight_number' in f]
  still_to_come_flights = [
      f for f in flights[:-1]
      if f.get('flight_number') not in flight_numbers_seen_in_last_n_hours
      and this_date != DisplayTime(f, '%x')]

  # exclude flights that would be filtered out by altitude or distance
  still_to_come_flights = [

      f for f in still_to_come_flights if FlightMeetsDisplayCriteria(f, configuration)]

  # exclude flights more than 30 days in the past
  now = time.time()
  still_to_come_flights = [
      f for f in still_to_come_flights
      if now - f['now'] < MAX_INSIGHT_HORIZON_DAYS * SECONDS_IN_DAY]

  minimum_minutes_next_flight = {}  # min minutes to next flight by day
  for flight in still_to_come_flights:
    date = DisplayTime(flight, '%x')
    hour = int(DisplayTime(flight, '%-H'))
    minutes = int(DisplayTime(flight, '%-M'))
    minutes_after = (hour - this_hour) * MINUTES_IN_HOUR +(minutes - this_minute)

    if minutes_after < 0:
      minutes_after += MINUTES_IN_DAY
    minimum_minutes_next_flight[date] = min(
        minimum_minutes_next_flight.get(date, minutes_after), minutes_after)

  minutes = list(minimum_minutes_next_flight.values())
  if len(minutes) > 1:  # at least one (potentially partial) prior day of history

    average_seconds = (sum(minutes) / len(minutes)) * SECONDS_IN_MINUTE
    max_seconds = max(minutes) * SECONDS_IN_MINUTE

    median_seconds = statistics.median(minutes) * SECONDS_IN_MINUTE
    minimum_percent_diff = 0.5
    median_different = (
        median_seconds > average_seconds * (1 + minimum_percent_diff) or
        average_seconds > median_seconds * (1+ minimum_percent_diff))
    median_text = ''
    if median_different:
      median_text = ' & median is %s' % SecondsToHhMm(median_seconds)

    msg = ('Last flight at %s; avg wait is %s%s, but could '
           'be as long as %s, based on last %d days' % (
               DisplayTime(this_flight, '%-I:%M%p'), SecondsToHhMm(average_seconds),

               median_text, SecondsToHhMm(max_seconds), len(minutes)))

  return msg


def CheckForNewFilterCriteria(prev, new, message_queue, flights):
  """If filter criteria changed, generate new image and perhaps new message."""
  if (new.get('setting_max_distance') != prev.get('setting_max_distance') or
      new.get('setting_max_altitude') != prev.get('setting_max_altitude')):
    FlightCriteriaHistogramPng(
        flights,
        new['setting_max_distance'],
        new['setting_max_altitude'],
        7,
        last_max_distance_feet=prev.get('setting_max_distance'),
        last_max_altitude_feet=prev.get('setting_max_altitude'))

  if (new.get('setting_max_distance') != prev.get('setting_max_distance') or
      new.get('setting_max_altitude') != prev.get('setting_max_altitude') or
      new.get('setting_off_time') != prev.get('setting_off_time') or
      new.get('setting_on_time') != prev.get('setting_on_time')):
    if new.get('next_flight', 'off') == 'on':
      next_flight_message = FlightInsightNextFlight(flights, new)
      if next_flight_message:
        message_queue.append((FLAG_MSG_INSIGHT, next_flight_message))


def PercentileScore(scores, value):
  """Returns the percentile that a particular value is in a list of numbers.

  Roughly inverts numpy.percentile. That is, numpy.percentile(scores_list, percentile)
  to get the value of the list that is at that percentile;
  PercentileScore(scores_list, value) will yield back approximately that percentile.


  If the value matches identical elements in the list, this function takes the average
  position of those identical values to compute a percentile. Thus, for some lists
  (i.e.: where there are lots of flights that have a 0 second delay, or a 100% delay
  frequency), you may not get a percentile of 0 or 100 even with values equal to the
  min or max element in the list.

  Args:
    scores: the list of numbers, including value.
    value: the value for which we want to determine the percentile.

  Returns:
    Returns an integer percentile in the range [0, 100] inclusive.
  """
  count_values_below_score = len([1 for s in scores if s < value])
  # -1 is because value is already in scores
  count_values_at_score = len([1 for s in scores if s == value]) - 1

  percentile = (count_values_below_score + count_values_at_score / 2) / len(scores)
  return round(percentile*100)


def FlightInsightGroupPercentile(
    flights,
    group_function,
    value_function,
    value_string_function,
    group_label,
    value_label,
    filter_function=lambda this, other: True,
    min_days=1,
    lookback_days=MAX_INSIGHT_HORIZON_DAYS,
    min_this_group_size=0,
    min_comparison_group_size=0,
    min_group_qty=0,
    percentile_low=float('-inf'),
    percentile_high=float('inf')):
  """Generates a string about extreme values of groups of flights.

  Generates text of the following form for the "focus" flight in the data.
  - flight SIA31 (n=7) has a delay frequency in the 95th %tile, with 100% of flights
    delayed an average of 6m over the last 4d1h
  - flight UAL300 (n=5) has a delay time in the 1st %tile, with an average delay of 0m
  over the last 4d5h

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0] was the
      earliest seen, and flights[-1] is the most recent flight for which we are
      attempting to generate an insight.
    group_function: function that, when called with a flight, returns the grouping key.
      That is, for example, group_function(flight) = 'B739'
    value_function: function that, when called with a list of flights, returns the
      value to be used for the comparison to identify min / max. Typically, the count,
      but could also be a sum, standard deviation, etc. - for perhaps the greatest
      range in flight altitude. If the group does not have a valid value and so
      should be excluded from comparison - i.e.: average delay of a group of flights
      which did not have a calculable_delay on any flight, this function should
      return None.
    value_string_function: function that, when called with the two parameters flights
      and value, returns a string (inclusive of units and label) that should be
      displayed to describe the quantity. For instance, if value_function returns
      seconds, value_string_function could convert that to a string '3h5m'. Or if
      value_function returns an altitude range, value_string_function could return
      a string 'altitude range of 900ft (1100ft - 2000ft)'.


    group_label: string to identify the group type - i.e.: 'aircraft' or 'flight' in
      the examples above.
    value_label: string to identify the value - i.e.: 'flights' in the examples above,
      but might also be i.e.: longest *delay*, or other quantity descriptor.

    filter_function: an optional function that, when called with the most recent flight
      and another flight filter_function(flights[-1], flight[n]), returns a value
      interpreted as a boolean indicating whether flight n should be included in
      determining the percentile.
    min_days: the minimum amount of history required to start generating insights
      about delays.
    lookback_days: the maximum amount of history which will be considered in generating
      insights about delays.


    min_this_group_size: even if this group has, say, the maximum average delay, if its
      a group of size 1, that is not necessarily very interesting. This sets the
      minimum group size for the focus flight.
    min_comparison_group_size: similarly, comparing the focus group to groups of size
      one does not necessarily produce a meaningful comparison; this sets to minimum
      size for the other groups.
    min_group_qty: when generating a percentile, if there are only 3 or 4 groups among
      which to generate a percentile (i.e.: only a handful of destinations have been
      seen so far, etc.) then it is not necessarily very interesting to generate a
      message; this sets the minimum quantity of groups necessary (including the
      focus group) to generate a message.
    percentile_low: number [0, 100] inclusive that indicates the percentile that the focus
      flight group must equal or be less than for the focus group to trigger an insight.

    percentile_high: number [0, 100] inclusive that indicates the percentile that the
      focus flight group must equal or be greater than for the focus group to trigger an
      insight.

  Returns:
    Printable string message; if no message or insights to generate, then an empty string.

  """
  debug = False
  message = ''
  this_flight = flights[-1]
  first_timestamp = flights[0]['now']
  last_timestamp = this_flight['now']
  included_seconds = last_timestamp - first_timestamp

  if (included_seconds > SECONDS_IN_DAY * min_days and
      group_function(this_flight) != KEY_NOT_PRESENT_STRING):

    relevant_flights = [
        f for f in flights if
        last_timestamp - f['now'] < SECONDS_IN_DAY * lookback_days and
        filter_function(this_flight, f)]

    grouped_flights = {}
    for flight in relevant_flights:
      group = group_function(flight)
      grouping = grouped_flights.get(group, [])
      grouping.append(flight)
      grouped_flights[group] = grouping
    # we will exclude "UNKNOWN" since that is not a coherent group
    if KEY_NOT_PRESENT_STRING in grouped_flights:
      grouped_flights.pop(KEY_NOT_PRESENT_STRING)


    grouped_values = {g: value_function(grouped_flights[g]) for g in grouped_flights}
    this_group = group_function(relevant_flights[-1])
    this_value = grouped_values[this_group]
    this_group_size = len(grouped_flights[this_group])

    # we will exclude groups that are not big enough
    grouped_flights = {
        k: grouped_flights[k] for k in grouped_flights
        if len(grouped_flights[k]) >= min_comparison_group_size or k == this_group}


    # Remove those for which no value could be calculated or which are too small
    grouped_values = {
        g: grouped_values[g] for g in grouped_values
        if grouped_values[g] is not None and g in grouped_flights}

    if debug:
      print()
      print('len(relevant_flights): %d' % len(relevant_flights))
      print('len(grouped_flights): %d' % len(grouped_flights))
      print('grouped_flights.keys(): %s' % sorted(list(grouped_flights.keys())))
      for key in sorted(list(grouped_flights.keys())):
        print('  len(grouped_flights[%s]) = %d' % (key, len(grouped_flights[key])))


    if this_value is not None and len(grouped_values) >= min_group_qty:

      time_horizon_string = ' over the last %s' % SecondsToDdHh(
          last_timestamp - relevant_flights[0]['now'])
      min_comparison_group_size_string = ''
      if min_comparison_group_size > 1:
        min_comparison_group_size_string = ' amongst those with >%d flights' % (
            min_comparison_group_size - 1)

      # FLIGHT X (n=7) is has the Xth percentile of DELAYS, with an average delay of
      # 80 MINUTES
      this_percentile = PercentileScore(grouped_values.values(), this_value)
      if this_group_size >= min_this_group_size and (
          this_percentile <= percentile_low or this_percentile >= percentile_high):


        if debug:
          print('Comparison cohorts for %s (%s)' % (group_label, str(this_group)))

          print('This percentile: %f; min: %f; max: %f' % (
              this_percentile, percentile_low, percentile_high))
          keys = list(grouped_values.keys())
          values = [grouped_values[k] for k in keys]
          print(keys)
          print(values)
          (values, keys) = SortByValues(values, keys)
          for n, value in enumerate(values):
            print('%s: %f (group size: %d)' % (
                keys[n], value, len(grouped_flights[keys[n]])))

        if group_label:
          group_label += ' '

        def TrialMessage():
          message = '%s%s (n=%d) has a %s in the %s %%tile, with %s%s%s' % (
              group_label,
              this_group,
              this_group_size,
              value_label,
              Ordinal(this_percentile),
              value_string_function(grouped_flights[this_group], this_value),
              time_horizon_string,
              min_comparison_group_size_string)
          line_count = len(textwrap.wrap(message, width=SPLITFLAP_CHARS_PER_LINE))

          return (line_count, message)

        (line_count, message) = TrialMessage()
        if line_count > SPLITFLAP_LINE_COUNT:
          min_comparison_group_size_string = ''
          (line_count, message) = TrialMessage()
          if line_count > SPLITFLAP_LINE_COUNT:
            time_horizon_string = ''
            (line_count, message) = TrialMessage()

      elif debug:
        print('Not an outlying group because A and either B or C needed to be true:')


        if not this_group_size >= min_this_group_size:
          print('A this_group_size %d >= min_this_group_size %d' % (
              this_group_size, min_this_group_size))
        else:
          print('A passed')
          if not this_percentile <= percentile_low:
            print('B this_percentile %d <= percentile_low %d' % (
                this_percentile, percentile_low))
          if not this_percentile >= percentile_high:
            print('C this_percentile %d >= percentile_high %d' % (
                this_percentile, percentile_high))

    elif debug:
      print('Not an outlying group because A or B failed:')
      if this_value is None:
        print('A this_value %s' % str(this_value))
      elif len(grouped_values) < min_group_qty:
        print('A passed')
        print('B len(grouped_values) %d >= min_group_qty %d' % (
            len(grouped_values), min_group_qty))




                            <----SKIPPED LINES---->




  return message


def FlightInsightSuperlativeGroup(
    flights,
    group_function,
    value_function,
    value_string_function,
    group_label,
    value_label,
    absolute_list,
    min_days=1,
    lookback_days=MAX_INSIGHT_HORIZON_DAYS,
    min_this_group_size=0,
    min_comparison_group_size=0,
    insight_min=True,
    insight_max=True):
  """Generates a string about extreme values of groups of flights.

  Generates text of the following form for the "focus" flight in the data.
  - aircraft B739 (n=7) is tied with B738 and A303 for the most flights at 7 flights
    over the last 3d7h amongst aircraft with a least 5 flights
  - aircraft B739 (n=7) is tied with 17 others for the most flights at 7 flights over
    the last 3d7h amongst aircraft with a least 5 flights
  - flight UAL1075 (n=12) has the most flights with 12 flights; the next most flights
    is 11 flights over the last 7d5h

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0] was the
      earliest seen, and flights[-1] is the most recent flight for which we are
      attempting to generate an insight.
    group_function: function that, when called with a flight, returns the grouping key.
      That is, for example, group_function(flight) = 'B739'
    value_function: function that, when called with a list of flights, returns the
      value to be used for the comparison to identify min / max. Typically, the count,
      but could also be a sum, standard deviation, etc. - for perhaps the greatest
      range in flight altitude. If the group does not have a valid value and so
      should be excluded from comparison - i.e.: average delay of a group of flights
      which did not have a calculable_delay on any flight, this function should
      return None.
    value_string_function: function that, when called with the two parameters flights
      and value, returns a string (inclusive of units and label) that should be
      displayed to describe the quantity. For instance, if value_function returns
      seconds, value_string_function could convert that to a string '3h5m'. Or if
      value_function returns an altitude range, value_string_function could return
      a string 'altitude range of 900ft (1100ft - 2000ft)'.


    group_label: string to identify the group type - i.e.: 'aircraft' or 'flight' in
      the examples above.
    value_label: string to identify the value - i.e.: 'flights' in the examples above,
      but might also be i.e.: longest *delay*, or other quantity descriptor.

    absolute_list: a 2-tuple of strings that is used to label the min and the max - i.e.:
      ('most', 'least'), or ('lowest average', 'highest average').
    min_days: the minimum amount of history required to start generating insights
      about delays.
    lookback_days: the maximum amount of history which will be considered in generating
      insights about delays.


    min_this_group_size: even if this group has, say, the maximum average delay, if its
      a group of size 1, that is not necessarily very interesting. This sets the
      minimum group size for the focus flight.
    min_comparison_group_size: similarly, comparing the focus group to groups of size
      one does not necessarily produce a meaningful comparison; this sets to minimum
      size for the other groups.
    insight_min: boolean indicating whether to possibly generate insight based on the
      occurrence of the min value.
    insight_max: boolean indicating whether to possibly generate insight based on the
      occurrence of the max value.

  Returns:
    Printable string message; if no message or insights to generate, then an empty string.

  """
  message = ''
  first_timestamp = flights[0]['now']
  last_timestamp = flights[-1]['now']
  included_seconds = last_timestamp - first_timestamp

  if included_seconds > SECONDS_IN_DAY * min_days:

    relevant_flights = [
        f for f in flights if
        last_timestamp - f['now'] < SECONDS_IN_DAY * lookback_days]

    grouped_flights = {}
    for flight in relevant_flights:
      group = group_function(flight)
      grouping = grouped_flights.get(group, [])
      grouping.append(flight)
      grouped_flights[group] = grouping


    grouped_values = {g: value_function(grouped_flights[g]) for g in grouped_flights}
    this_group = group_function(relevant_flights[-1])
    this_value = grouped_values[this_group]
    this_group_size = len(grouped_flights[this_group])

    # we will exclude groups that are not big enough
    grouped_flights = {
        k: grouped_flights[k] for k in grouped_flights
        if len(grouped_flights[k]) > min_comparison_group_size}

    # Remove those for which no value could be calculated or which are too small
    grouped_values = {
        g: grouped_values[g] for g in grouped_values
        if grouped_values[g] is not None and g in grouped_flights}

    other_values = list(grouped_values.values())
    if this_value in other_values:
      other_values.remove(this_value)

    if other_values:
      min_value = min(other_values)




                            <----SKIPPED LINES---->




          superlative = True
          equality = False
          superlative_string = absolute_list[0]
          next_value = min_value
        elif this_value == min_value and insight_min:
          superlative = False
          equality = True
          superlative_string = absolute_list[0]
        else:
          superlative = False
          equality = False

        time_horizon_string = SecondsToDdHh(
            last_timestamp - relevant_flights[0]['now'])
        min_comparison_group_size_string = ''
        if min_comparison_group_size > 1:
          min_comparison_group_size_string = (
              ' amongst %s with at least %d flights' %
              (group_label, min_comparison_group_size))

        # flight x (n=7) is tied with a, b, and c for the (longest average, shortest
        # average) delay at 80 minutes
        # flight x is tied with a, b, and c for the (most frequent, least frequent)
        # delay at 30%
        if equality and this_group_size > min_this_group_size:

          identical_groups = sorted([
              str(g) for g in grouped_values
              if grouped_values[g] == this_value and g != this_group])
          if len(identical_groups) > 4:
            identical_string = '%d others' % len(identical_groups)
          elif len(identical_groups) > 1:
            identical_string = (

                '%s and %s' % (', '.join(identical_groups[:-1]), identical_groups[-1]))
          else:
            identical_string = str(identical_groups[0])

          message = (

              '%s %s (n=%d) is tied with %s for the %s %s at %s over the last %s%s' % (
                  group_label,
                  this_group,
                  this_group_size,
                  identical_string,
                  superlative_string,
                  value_label,
                  value_string_function(flights, this_value),
                  time_horizon_string,
                  min_comparison_group_size_string))

        elif superlative and this_group_size > min_this_group_size:
          message = (
              '%s %s (n=%d) has the %s %s with %s; the next '
              '%s %s is %s over the last %s%s' % (
                  group_label,
                  this_group,
                  this_group_size,
                  superlative_string,
                  value_label,
                  value_string_function(flights, this_value),
                  superlative_string,
                  value_label,
                  value_string_function(flights, next_value),
                  time_horizon_string,
                  min_comparison_group_size_string))

  return message


def AverageDelay(flights):
  """Returns the average delay time for a list of flights.

  Args:
    flights: the list of the raw flight data.

  Returns:
    Average seconds of flight delay, calculated as the total seconds delayed amongst
    all the flights that have a positive delay, divided by the total number of flights
    that have a calculable delay. If no flights have a calculable delay, returns None.

  """
  calculable_delay_seconds = [
      DisplayDepartureTimes(f)['delay_seconds'] for f in flights
      if DisplayDepartureTimes(f)['calculable_delay'] and
      DisplayDepartureTimes(f)['delay_seconds'] > 0]
  average_delay = None
  if calculable_delay_seconds:

    average_delay = sum(calculable_delay_seconds) / len(calculable_delay_seconds)
  return average_delay



def PercentDelay(flights):
  """Returns the percentage of flights that have a positive delay for a list of flights.

  Args:
    flights: the list of the raw flight data.

  Returns:
    Percentage of flights with a delay, calculated as the count of flights with a
    positive delay divided by the total number of flights that have a calculable delay.
    If no flights have a calculable delay, returns None.
  """
  calculable_delay_seconds = [
      DisplayDepartureTimes(f)['delay_seconds'] for f in flights
      if DisplayDepartureTimes(f)['calculable_delay']]
  delay_count = sum([1 for s in calculable_delay_seconds if s > 0])
  percent_delay = None
  if calculable_delay_seconds:
    percent_delay = delay_count / len(calculable_delay_seconds)
  return percent_delay


def FlightInsightFirstInstance(
    flights,
    key,
    label,
    days=7,
    additional_descriptor_fcn=''):
  """Generates string indicating the flight has the first instance of a particular key.

  Generates text of the following form for the "focus" flight in the data.
  - N311CG is the first time aircraft GLF6 (Gulfstream Aerospace Gulfstream G650
    (twin-jet)) has been seen since at least 7d5h ago
  - PCM8679 is the first time airline Westair Industries has been seen since 9d0h ago


  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0] was the
      earliest seen, and flights[-1] is the most recent flight for which we are
      attempting to generate an insight.
    key: the key of the attribute of interest - i.e.: 'destination_iata'.
    label: the human-readable string that should be displayed in the message - i.e.:
      'destination'.
    days: the minimum time of interest for an insight - i.e.: we probably see LAX every
      hour, but we are only interested in particular attributes that have not been
      seen for at least some number of days. Note, however, that the code will go back
      even further to find the last time that attribute was observed, or if never
      observed, indicating "at least".
    additional_descriptor_fcn: a function that, when passed a flight, returns an
      additional parenthetical notation to include about the attribute or flight
      observed - such as expanding the IATA airport code to its full name, etc.

  Returns:
    Printable string message; if no message or insights to generate, then an empty string.

  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = DisplayFlightNumber(this_flight)
  first_timestamp = flights[0]['now']
  last_timestamp = flights[-1]['now']
  included_seconds = last_timestamp - first_timestamp

  if included_seconds > SECONDS_IN_DAY * days:
    this_instance = this_flight.get(key)
    matching = [f for f in flights[:-1] if f.get(key) == this_instance]

    last_potential_observation_sec = included_seconds
    if matching:
      last_potential_observation_sec = last_timestamp - matching[-1]['now']

    if this_instance and last_potential_observation_sec > SECONDS_IN_DAY * days:
      additional_descriptor = ''
      if additional_descriptor_fcn:
        additional_descriptor = ' (%s)' % additional_descriptor_fcn(this_flight)
      last_potential_observation_string = SecondsToDdHh(last_potential_observation_sec)

      if matching:
        message = '%s is the first time %s %s%s has been seen since %s ago' % (
            this_flight_number, label, this_instance, additional_descriptor,
            last_potential_observation_string)
      else:
        message = '%s is the first time %s %s%s has been seen since at least %s ago' % (


            this_flight_number, label, this_instance, additional_descriptor,
            last_potential_observation_string)

  return message


def FlightInsightSuperlativeVertrate(flights, hours=HOURS_IN_DAY):
  """Generates string about the climb rate of the flight being an extreme value.

  Generates text of the following form for the "focus" flight in the data.
  - UAL631   has the fastest ascent rate (5248fpm, 64fpm faster than next fastest) in
    last 24 hours
  - CKS1820 has the fastest descent rate (-1152fpm, -1088fpm faster than next fastest)
    in last 24 hours

  While this is conceptually similar to the more generic FlightInsightSuperlativeVertrate
  function, vert_rate - because it can be either positive or negative, with different

  signs requiring different labeling and comparisons - it needs its own special handling.

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0] was the
      earliest seen, and flights[-1] is the most recent flight for which we are
      attempting to generate an insight.
    hours: the time horizon over which to look for superlative flights.

  Returns:
    Printable string message; if no message or insights to generate, then an empty string.

  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = this_flight.get('flight_number')
  first_timestamp = flights[0]['now']
  last_timestamp = flights[-1]['now']
  sufficient_data = (last_timestamp - first_timestamp) > SECONDS_IN_HOUR * hours
  pinf = float('inf')
  ninf = float('-inf')

  if sufficient_data:
    relevant_flights = [
        f for f in flights[:-1]
        if last_timestamp - f['now'] < SECONDS_IN_HOUR * hours]

    def AscentRate(f, default):
      vert_rate = f.get('vert_rate')
      if isinstance(vert_rate, numbers.Number) and vert_rate > 0:
        return vert_rate
      return default

    other_ascents = len([
        1 for f in relevant_flights

        if isinstance(f.get('vert_rate'), numbers.Number) and AscentRate(f, ninf) > 0])
    if other_ascents:
      ascent_min = min(
          [AscentRate(f, pinf) for f in relevant_flights if AscentRate(f, ninf) > 0])

      ascent_max = max(
          [AscentRate(f, ninf) for f in relevant_flights if AscentRate(f, ninf) > 0])


    def DescentRate(f, default):
      vert_rate = f.get('vert_rate')
      if isinstance(vert_rate, numbers.Number) and vert_rate < 0:
        return vert_rate
      return default

    other_descents = len([
        1 for f in relevant_flights

        if isinstance(f.get('vert_rate'), numbers.Number) and DescentRate(f, pinf) < 0])
    if other_descents:
      descent_min = min(
          [DescentRate(f, pinf) for f in relevant_flights if DescentRate(f, pinf) < 0])

      descent_max = max(
          [DescentRate(f, ninf) for f in relevant_flights if DescentRate(f, pinf) < 0])


    this_vert_rate = this_flight.get('vert_rate')

    if isinstance(this_vert_rate, numbers.Number):
      if this_vert_rate >= 0:
        this_ascent = this_vert_rate
        this_descent = None
      else:
        this_descent = this_vert_rate
        this_ascent = None

      if this_ascent and other_ascents and this_ascent > ascent_max:
        message = ('%s has the fastest ascent rate (%d%s, %d%s faster '
                   'than next fastest) in last %d hours' % (
                       this_flight_number, this_ascent, CLIMB_RATE_UNITS,
                       this_ascent - ascent_max, CLIMB_RATE_UNITS, hours))
      elif this_ascent and other_ascents and this_ascent < ascent_min:
        message = ('%s has the slowest ascent rate (%d%s, %d%s slower '
                   'than next slowest) in last %d hours' % (
                       this_flight_number, this_ascent, CLIMB_RATE_UNITS,




                            <----SKIPPED LINES---->




                       this_flight_number, this_descent, CLIMB_RATE_UNITS,
                       this_descent - descent_min, CLIMB_RATE_UNITS, hours))
      elif this_descent and other_descents and this_descent > descent_max:
        message = ('%s has the slowest descent rate (%d%s, %d%s slower '
                   'than next slowest) in last %d hours' % (
                       this_flight_number, this_descent, CLIMB_RATE_UNITS,
                       descent_max - this_descent, CLIMB_RATE_UNITS, hours))

  return message


def FlightInsightDelays(
    flights,
    min_days=1,
    lookback_days=MAX_INSIGHT_HORIZON_DAYS,
    min_late_percentage=0.75,
    min_this_delay_minutes=0,
    min_average_delay_minutes=0):
  """Generates string about the delays this flight has seen in the past.

  Only if this flight has a caclculable delay itself, this will generate text of the
  following form for the "focus" flight in the data.
  - This 8m delay is the longest UAL1175 has seen in the last 9 days (avg delay is 4m);
    overall stats: 1 early; 9 late; 10 total
  - With todays delay of 7m, UAL1175 is delayed 88% of the time in the last 8 days for
    avg delay of 4m; overall stats: 1 early; 8 late; 9 total

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0] was the
      earliest seen, and flights[-1] is the most recent flight for which we are
      attempting to generate an insight.
    min_days: the minimum amount of history required to start generating insights
      about delays.
    lookback_days: the maximum amount of history which will be considered in generating
      insights about delays.


    min_late_percentage: flights that are not very frequently delayed are not
      necessarily very interesting to generate insights about; this specifies the
      minimum percentage the flight must be late to generate a message that focuses
      on the on-time percentage.
    min_this_delay_minutes: a delay of 1 minute is not necessarily interesting; this
      specifies the minimum delay time this instance of the flight must be late to
      generate a message that focuses on this flight's delay.
    min_average_delay_minutes: an average delay of only 1 minute, even if it happens
      every day, is not necessarily very interesting; this specifies the minimum
      average delay time to generate either type of delay message.

  Returns:
    Printable string message; if no message or insights to generate, then an empty string.

  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = this_flight.get('flight_number', '')
  first_timestamp = flights[0]['now']
  last_timestamp = flights[-1]['now']
  included_seconds = last_timestamp - first_timestamp

  if (included_seconds > SECONDS_IN_DAY * min_days
      and DisplayDepartureTimes(this_flight)['calculable_delay']):
    this_delay_seconds = DisplayDepartureTimes(this_flight)['delay_seconds']
    relevant_flights = [
        f for f in flights if
        last_timestamp - f['now'] < SECONDS_IN_DAY * lookback_days and
        this_flight_number == f.get('flight_number', '')]

    if (
        len(relevant_flights) > 1 and
        this_delay_seconds >= min_this_delay_minutes*SECONDS_IN_MINUTE):
      delay_seconds_list = [




                            <----SKIPPED LINES---->




      overall_stats_elements = []
      if delay_early_count:
        overall_stats_elements.append('%d ER' % delay_early_count)
      if delay_ontime_count:
        overall_stats_elements.append('%d OT' % delay_ontime_count)
      if delay_late_count:
        overall_stats_elements.append('%d LT' % delay_late_count)
      if delay_unknown_count:
        overall_stats_elements.append('%d UNK' % delay_unknown_count)
      overall_stats_text = '; '.join(overall_stats_elements)

      days_history = (int(
          round(last_timestamp - relevant_flights[0]['now']) / SECONDS_IN_DAY)
                      + 1)

      late_percentage = delay_late_count / len(relevant_flights)

      if (superlative and
          delay_late_avg_sec >= min_average_delay_minutes * SECONDS_IN_MINUTE):
        message = (
            'This %s delay is the %s %s has seen in the last %d days (avg delay is %s);'
            ' overall stats: %s' % (
                SecondsToHhMm(this_delay_seconds),
                delay_keyword,
                this_flight_number,
                days_history,
                SecondsToHhMm(delay_late_avg_sec),
                overall_stats_text))
      elif (late_percentage > min_late_percentage and

            delay_late_avg_sec >= min_average_delay_minutes * SECONDS_IN_MINUTE):
        # it's just been delayed frequently!
        message = (
            'With today''s delay of %s, %s is delayed %d%% of the time in the last %d '
            'days for avg delay of %s; overall stats: %s' % (
                SecondsToHhMm(this_delay_seconds),
                this_flight_number,
                int(100 * late_percentage),
                days_history,
                SecondsToHhMm(delay_late_avg_sec),
                overall_stats_text))
  return message


def FlightInsights(flights):
  """Identifies all the insight messages about the most recently seen flight.

  Generates a possibly-empty list of messages about the flight.

  Args:
    flights: List of all flights where the last flight in the list is the focus flight
      for which we are trying to identify something interesting.

  Returns:
    List of 2-tuples, where the first element in the tuple is a flag indicating the type
    of insight message, and the second selement is the printable strings (with embedded
    new line characters) for something interesting about the flight; if there isn't
    anything interesting, returns an empty list.
  """
  messages = []

  def AppendMessageType(message_type, message):
    if message:
      messages.append((message_type, message))

  # This flight number was last seen x days ago

  AppendMessageType(FLAG_INSIGHT_LAST_SEEN, FlightInsightLastSeen(flights, days_ago=2))

  # Yesterday this same flight flew a materially different type of aircraft
  AppendMessageType(
      FLAG_INSIGHT_DIFF_AIRCRAFT,
      FlightInsightDifferentAircraft(flights, percent_size_difference=0.1))

  # This is the 3rd flight to the same destination in the last hour
  AppendMessageType(
      FLAG_INSIGHT_NTH_FLIGHT,
      FlightInsightNthFlight(flights, hours=1, min_multiple_flights=2))

  # This is the [lowest / highest] [speed / altitude / climbrate] in the last 24 hours

  AppendMessageType(FLAG_INSIGHT_GROUNDSPEED, FlightInsightSuperlativeAttribute(
      flights,
      'speed',
      'groundspeed',
      SPEED_UNITS,
      ['slowest', 'fastest'],
      hours=HOURS_IN_DAY))
  AppendMessageType(FLAG_INSIGHT_ALTITUDE, FlightInsightSuperlativeAttribute(
      flights,
      'altitude',
      'altitude',
      DISTANCE_UNITS,
      ['lowest', 'highest'],
      hours=HOURS_IN_DAY))

  AppendMessageType(FLAG_INSIGHT_VERTRATE, FlightInsightSuperlativeVertrate(flights))

  # First instances: destination, first aircraft, etc.
  AppendMessageType(FLAG_INSIGHT_FIRST_DEST, FlightInsightFirstInstance(
      flights, 'destination_iata', 'destination', days=7,
      additional_descriptor_fcn=lambda f: f['destination_friendly']))
  AppendMessageType(FLAG_INSIGHT_FIRST_ORIGIN, FlightInsightFirstInstance(
      flights, 'origin_iata', 'origin', days=7,
      additional_descriptor_fcn=lambda f: f['origin_friendly']))
  AppendMessageType(FLAG_INSIGHT_FIRST_AIRLINE, FlightInsightFirstInstance(
      flights, 'airline_short_name', 'airline', days=7))
  AppendMessageType(FLAG_INSIGHT_FIRST_AIRCRAFT, FlightInsightFirstInstance(
      flights, 'aircraft_type_code', 'aircraft', days=7,
      additional_descriptor_fcn=lambda f: f['aircraft_type_friendly']))

  # This is the longest / shortest delay this flight has seen in the last 30 days at
  # 2h5m; including today, this flight has been delayed x of the last y times.

  AppendMessageType(FLAG_INSIGHT_LONGEST_DELAY, FlightInsightDelays(
      flights, min_late_percentage=0.75,
      min_this_delay_minutes=0,
      min_average_delay_minutes=0))

  def DelayTimeAndFrequencyMessage(
      types_tuple,
      group_function,
      group_label,
      filter_function=lambda this, other: True,
      min_days=1,
      lookback_days=MAX_INSIGHT_HORIZON_DAYS,
      min_this_group_size=0,
      min_comparison_group_size=0,
      min_group_qty=0,
      percentile_low=float('-inf'),
      percentile_high=float('inf')):
    value_function_tuple = (PercentDelay, AverageDelay)
    value_string_function_tuple = (
        lambda flights, value: '%d%% of flights delayed an average of %s' % (




                            <----SKIPPED LINES---->




        lambda flights, value: 'average delay of %s' % SecondsToHhMm(value))
    value_label_tuple = ('delay frequency', 'delay time')
    for n in range(2):
      if types_tuple[n]:
        AppendMessageType(types_tuple[n], FlightInsightGroupPercentile(
            flights,
            group_function=group_function,
            value_function=value_function_tuple[n],
            value_string_function=value_string_function_tuple[n],
            group_label=group_label,
            value_label=value_label_tuple[n],
            filter_function=filter_function,
            min_days=min_days,
            min_this_group_size=min_this_group_size,
            min_comparison_group_size=min_comparison_group_size,
            min_group_qty=min_group_qty,
            lookback_days=lookback_days,
            percentile_low=percentile_low,
            percentile_high=percentile_high))

  # flight UAL1 (n=5) has a delay frequency in the 72nd %tile, with 100% of flights
  # delayed an average of 44m over the last 4d13h
  DelayTimeAndFrequencyMessage(
      (FLAG_INSIGHT_FLIGHT_DELAY_FREQUENCY, FLAG_INSIGHT_FLIGHT_DELAY_TIME),
      group_function=lambda flight: flight.get('flight_number', KEY_NOT_PRESENT_STRING),

      group_label='flight',
      min_days=1,
      min_this_group_size=4,
      min_comparison_group_size=0,
      min_group_qty=0,
      percentile_low=10,
      percentile_high=90)

  # Airline United (n=5) has a delay frequency in the 72nd %tile, with 100% of flights
  # delayed an average of 44m over the last 4d13h
  DelayTimeAndFrequencyMessage(
      (FLAG_INSIGHT_AIRLINE_DELAY_FREQUENCY, FLAG_INSIGHT_AIRLINE_DELAY_TIME),
      group_function=DisplayAirline,
      group_label='airline',
      min_days=1,
      min_this_group_size=10,
      min_comparison_group_size=5,
      min_group_qty=5,
      percentile_low=10,
      percentile_high=80)

  # Destination LAX (n=5) has a delay frequency in the 72nd %tile, with 100% of flights
  # delayed an average of 44m over the last 4d13h
  DelayTimeAndFrequencyMessage(
      (FLAG_INSIGHT_DESTINATION_DELAY_FREQUENCY, FLAG_INSIGHT_DESTINATION_DELAY_TIME),

      group_function=DisplayDestinationFriendly,
      group_label='destination',
      min_days=1,
      min_this_group_size=10,
      min_comparison_group_size=5,
      min_group_qty=5,
      percentile_low=10,
      percentile_high=90)

  # we only want to do this if we're already at ~75% of the number of flights we'd
  # expect to see for the hour
  flight_hours = {}
  for flight in flights:
    if flights[-1]['now'] - flight['now'] < 8.5 * SECONDS_IN_DAY and DisplayTime(

        flight, '%-I%p') == DisplayTime(flights[-1], '%-I%p'):
      flight_hours[DisplayTime(flight, '%-d')] = flight_hours.get(
          DisplayTime(flight, '%-d'), 0) + 1
  min_this_hour_flights = max(3, 0.75 * max(flight_hours.values()))

  # Once we've commented on the insights for an hour or day, we don't want to do it again

  hour_delay_frequency_flag = FLAG_INSIGHT_HOUR_DELAY_FREQUENCY
  hour_delay_time_flag = FLAG_INSIGHT_HOUR_DELAY_TIME
  date_delay_frequency_flag = FLAG_INSIGHT_DATE_DELAY_FREQUENCY
  date_delay_time_flag = FLAG_INSIGHT_DATE_DELAY_TIME
  for flight in flights[:-1]:
    insights = flight.get('insight_types', [])
    this_hour = DisplayTime(flights[-1], '%x %-I%p')
    this_day = DisplayTime(flights[-1], '%x')
    if (this_hour == DisplayTime(flight, '%x %-I%p') and
        FLAG_INSIGHT_HOUR_DELAY_FREQUENCY in insights):
      hour_delay_frequency_flag = None
    if (this_hour == DisplayTime(flight, '%x %-I%p') and
        FLAG_INSIGHT_HOUR_DELAY_TIME in insights):
      hour_delay_time_flag = None
    if (this_day == DisplayTime(flight, '%x') and
        FLAG_INSIGHT_DATE_DELAY_FREQUENCY in insights):
      date_delay_frequency_flag = None
    if (this_day == DisplayTime(flight, '%x') and
        FLAG_INSIGHT_DATE_DELAY_TIME in insights):
      date_delay_time_flag = None

  def TodaysHour(f):
    f_date = DisplayTime(f, '%x')
    f_hour = DisplayTime(f, '%-I%p')
    if f_date == DisplayTime(flights[-1], '%x'):
      return '%s flights today' % f_hour
    return '%s %s' % (f_date, f_hour)

  # Today's 7a flights have a delay frequency in the 72nd %tile, with 100% of flights
  # delayed an average of 44m over the last 4d13h
  DelayTimeAndFrequencyMessage(
      (hour_delay_frequency_flag, hour_delay_time_flag),
      group_function=TodaysHour,
      group_label='',
      filter_function=lambda this, other:
      DisplayTime(this, '%-I%p') == DisplayTime(other, '%-I%p'),
      min_days=3,
      min_this_group_size=min_this_hour_flights,
      min_comparison_group_size=min_this_hour_flights,
      min_group_qty=5,
      percentile_low=10,
      percentile_high=90)

  # we only want to do this if we're already at ~75% of the number of flights we'd
  # expect to see for the day
  flight_days = {}
  for flight in flights:
    if flights[-1]['now'] - flight['now'] < 8.5 * SECONDS_IN_DAY:
      flight_days[DisplayTime(flight, '%-d')] = flight_days.get(
          DisplayTime(flight, '%-d'), 0) + 1
  min_this_day_flights = max(40, 0.75 * max(flight_days.values()))

  # Today (31st) has a delay frequency in the 72nd %tile, with 100% of flights
  # delayed an average of 44m over the last 4d13h
  DelayTimeAndFrequencyMessage(
      (date_delay_frequency_flag, date_delay_time_flag),
      group_function=lambda f:
      '(' + Ordinal(int(DisplayTime(f, '%-d'))) + ')',
      group_label='Today',
      min_days=7,
      min_this_group_size=min_this_day_flights,
      min_comparison_group_size=min_this_day_flights,
      min_group_qty=7,
      lookback_days=28,  # Otherwise, there might be two 1st's of the month to compare

      percentile_low=10,
      percentile_high=90)

  messages = [
      (t, textwrap.wrap(m, width=SPLITFLAP_CHARS_PER_LINE))
      for (t, m) in messages]

  return messages


def CreateFlightInsights(
    flights, flight_insights_enabled_string, insight_message_distribution):
  """Returns the desired quantity of flight insight messages.

  Though the function FlightInsights generates all possible insight messages about a
  flight, the user may have only wanted one. Depending on the setting of
  flight_insights_enabled_string, this function reduces the set of all insights by
  selecting the least-frequently reported type of insight message.

  In order to choose the least-frequently reported type, we need to keep track of what
  has been reported so far, which we do here in insight_message_distribution, and which
  we then update with each pass through this function.


  Args:
    flights: List of all flights where the last flight in the list is the focus flight
      for which we are trying to identify something interesting.
    flight_insights_enabled_string: string indicating how many insights are desired,
      which may be one of 'all', 'one', or 'hide'.
    insight_message_distribution: dictionary, where the keys are one of the flags
      indicating message type, and the values are how frequently that type of insight
      has been displayed in flights.  The dictionary is updated in place.


  Returns:
    Possibly-empty list of messages - the list may be empty if there are no insights,
    or if the setting selected for flight_insights_enabled_string is neither all or one.
    The messages, if included, are printable strings (with embedded new line characters).

  """
  naked_messages = []

  this_flight_insights = []

  if flight_insights_enabled_string not in ('all', 'one'):
    return naked_messages

  insight_messages = FlightInsights(flights)

  if flight_insights_enabled_string == 'all' and insight_messages:
    for (t, m) in insight_messages:
      insight_message_distribution[t] = insight_message_distribution.get(t, 0) + 1

      this_flight_insights.append(t)
      naked_messages.append(m)

  if flight_insights_enabled_string == 'one' and insight_messages:
    types_of_messages = [t for (t, unused_m) in insight_messages]
    frequencies_of_insights = [
        insight_message_distribution.get(t, 0) for t in types_of_messages]
    min_frequency = min(frequencies_of_insights)
    for t in sorted(types_of_messages):
      if insight_message_distribution.get(t, 0) == min_frequency:
        break

    insight_message_distribution[t] = insight_message_distribution.get(t, 0) + 1
    for message_tuple in insight_messages:
      if message_tuple[0] == t:
        naked_messages.append(message_tuple[1])
        this_flight_insights.append(t)
        break

  # Save the distribution displayed for this flight so we needn't regen it in future

  flights[-1]['insight_types'] = this_flight_insights

  return naked_messages


def FlightCriteriaHistogramPng(
    flights,
    max_distance_feet,
    max_altitude_feet,
    max_days,
    filename=HOURLY_IMAGE_FILE,
    last_max_distance_feet=None,
    last_max_altitude_feet=None):
  """Saves as a png file the histogram of the hourly flight data for the given filters.

  Generates a png histogram of the count of flights by hour that meet the specified
  criteria: max altitude, max distance, and within the last number of days. Also
  optionally generates as a separate data series in same chart a histogram with a
  different max altitude and distance. Saves this histogram to disk.


  Args:
    flights: list of the flights.
    max_distance_feet: max distance for which to include flights in the histogram.

    max_altitude_feet: max altitude for which to include flights in the histogram.

    max_days: maximum number of days as described.
    filename: file into which to save the csv.
    last_max_distance_feet: if provided, along with last_max_altitude_feet, generates
      a second data series with different criteria for distance and altitude, for
      which the histogram data will be plotted alongside the first series.

    last_max_altitude_feet: see above.
  """
  if not flights:
    return
  (values, keys, unused_filtered_data) = GenerateHistogramData(
      flights,
      HourString,
      HOURS,
      hours=max_days*HOURS_IN_DAY,
      max_distance_feet=max_distance_feet,
      max_altitude_feet=max_altitude_feet,
      normalize_factor=max_days,
      exhaustive=True)


  comparison = last_max_distance_feet is not None and last_max_altitude_feet is not None
  if comparison:

    (last_values, unused_last_keys, unused_filtered_data) = GenerateHistogramData(
        flights,
        HourString,
        HOURS,
        hours=max_days*HOURS_IN_DAY,
        max_distance_feet=last_max_distance_feet,
        max_altitude_feet=last_max_altitude_feet,
        normalize_factor=max_days,
        exhaustive=True)

  x = numpy.arange(len(keys))
  unused_fig, ax = matplotlib.pyplot.subplots()
  width = 0.35
  ax.bar(
      x - width/2, values, width,
      label='Current - alt: %d; dist: %d' % (max_altitude_feet, max_distance_feet))

  title = 'Daily Flights Expected: %d / day' % sum(values)
  if comparison:
    ax.bar(
        x + width/2, last_values, width,
        label='Prior - alt: %d; dist: %d' % (
            last_max_altitude_feet, last_max_distance_feet))
    title += ' (%+d)' % (round(sum(values) - sum(last_values)))

  ax.set_title(title)
  ax.set_ylabel('Average Observed Flights')
  if comparison:
    ax.legend()
  matplotlib.pyplot.xticks(
      x, keys, rotation='vertical', wrap=True,
      horizontalalignment='right',
      verticalalignment='center')

  matplotlib.pyplot.savefig(filename)
  matplotlib.pyplot.close()


def GenerateHistogramData(
    data,
    keyfunction,
    sort_type,
    truncate=float('inf'),
    hours=float('inf'),
    max_distance_feet=float('inf'),
    max_altitude_feet=float('inf'),
    normalize_factor=0,
    exhaustive=False):
  """Generates sorted data for a histogram from a description of the flights.

  Given an iterable describing the flights, this function generates the label (or key),
  and the frequency (or value) from which a histogram can be rendered.

  Args:
    data: the iterable of the raw data from which the histogram will be generated;
      each element of the iterable is a dictionary, that contains at least the key
      'now', and depending on other parameters, also potentially
      'min_feet' amongst others.
    keyfunction: the function that determines how the key or label of the histogram
      should be generated; it is called for each element of the data iterable. For
      instance, to simply generate a histogram on the attribute 'heading',
      keyfunction would be lambda a: a['heading'].
    sort_type: determines how the keys (and the corresponding values) are sorted:

      'key': the keys are sorted by a simple comparison operator between them, which
          sorts strings alphabetically and numbers numerically.
      'value': the keys are sorted by a comparison between the values, which means
          that more frequency-occurring keys are listed first.
      list: if instead of the strings a list is passed, the keys are then sorted in
          the sequence enumerated in the list. This is useful for, say, ensuring that
          the days of the week (Tues, Wed, Thur, ...) are listed in sequence. Keys
          that are generated by keyfunction but that are not in the given list are
          sorted last (and then amongst those, alphabetically).
    truncate: integer indicating the maximum number of keys to return; if set to 0, or if
      set to a value larger than the number of keys, no truncation occurs. But if set
      to a value less than the number of keys, then the keys with the lowest frequency
      are combined into one key named OTHER_STRING so that the number of keys
      in the resulting histogram (together with OTHER_STRING) is equal to truncate.

    hours: integer indicating the number of hours of history to include. Flights with a
      calcd_display_time more than this many hours in the past are excluded from the
      histogram generation. Note that this is timezone aware, so that if the histogram
      data is generated on a machine with a different timezone than that that recorded

      the original data, the correct number of hours is still honored.
    max_distance_feet: number indicating the geo fence outside of which flights should
      be ignored for the purposes of including the flight data in the histogram.

    max_altitude_feet: number indicating the maximum altitude outside of which flights
      should be ignored for the purposes of including the flight data in the histogram.

    normalize_factor: divisor to apply to all the values, so that we can easily
      renormalize the histogram to display on a percentage or daily basis; if zero,
      no renormalization is applied.
    exhaustive: boolean only relevant if sort_type is a list, in which case, this ensures
      that the returned set of keys (and matching values) contains all the elements in
      the list, including potentially those with a frequency of zero, within the
      restrictions of truncate.

  Returns:
    2-tuple of lists cut and sorted as indicated by parameters above:
    - list of values (or frequency) of the histogram elements
    - list of keys (or labels) of the histogram elements
  """
  histogram_dict = {}
  filtered_data = []

  def IfNoneReturnInf(f, key):
    value = f.get(key)
    if not value:
      value = float('inf')
    return value

  # get timezone & now so that we can generate a timestamp for comparison just once

  if hours:
    now = datetime.datetime.now(TZ)
  for element in data:
    if (
        IfNoneReturnInf(element, 'min_feet') <= max_distance_feet and
        IfNoneReturnInf(element, 'altitude') <= max_altitude_feet and
        HoursSinceFlight(now, element['now']) <= hours):
      filtered_data.append(element)
      key = keyfunction(element)
      if key is None or key == '':
        key = KEY_NOT_PRESENT_STRING
      if key in histogram_dict:
        histogram_dict[key] += 1
      else:
        histogram_dict[key] = 1
  values = list(histogram_dict.values())
  keys = list(histogram_dict.keys())

  if normalize_factor:
    values = [v / normalize_factor for v in values]

  sort_by_enumerated_list = isinstance(sort_type, list)
  if exhaustive and sort_by_enumerated_list:
    missing_keys = set(sort_type).difference(set(keys))
    missing_values = [0 for unused_k in missing_keys]
    keys.extend(missing_keys)
    values.extend(missing_values)

  if keys:  # filters could potentially have removed all data
    if not truncate or len(keys) <= truncate:
      if sort_by_enumerated_list:
        (values, keys) = SortByDefinedList(values, keys, sort_type)
      elif sort_type == 'value':
        (values, keys) = SortByValues(values, keys)
      else:
        (values, keys) = SortByKeys(values, keys)
    else: #Unknown might fall in the middle, and so shouldn't be truncated

      (values, keys) = SortByValues(values, keys, ignore_sort_at_end_strings=True)

      truncated_values = list(values[:truncate-1])
      truncated_keys = list(keys[:truncate-1])
      other_value = sum(values[truncate-1:])
      truncated_values.append(other_value)
      truncated_keys.append(OTHER_STRING)
      if sort_by_enumerated_list:
        (values, keys) = SortByDefinedList(
            truncated_values, truncated_keys, sort_type)
      elif sort_type == 'value':
        (values, keys) = SortByValues(
            truncated_values, truncated_keys, ignore_sort_at_end_strings=False)
      else:
        (values, keys) = SortByKeys(truncated_values, truncated_keys)
  else:
    values = []
    keys = []
  return (values, keys, filtered_data)


def SortByValues(values, keys, ignore_sort_at_end_strings=False):
  """Sorts the list of values in descending sequence, applying same resorting to keys.

  Given a list of keys and values representing a histogram, returns two new lists that
  are sorted so that the values occur in descending sequence and the keys are moved
  around in the same way. This allows the printing of a histogram with the largest
  keys listed first - i.e.: top five airlines.

  Keys identified by SORT_AT_END_STRINGS - such as, perhaps, 'Other' - will optionally
  be placed at the end of the sequence. And where values are identical, the secondary
  sort is based on the keys.

  Args:
    values: list of values for the histogram to be used as the primary sort key.
    keys: list of keys for the histogram that will be moved in the same way as the values.

    ignore_sort_at_end_strings: boolean indicating whether specially-defined keys will be
      sorted at the end.

  Returns:
    2-tuple of (values, keys) lists sorted as described above
  """
  if ignore_sort_at_end_strings:
    sort_at_end_strings = []
  else:
    sort_at_end_strings = SORT_AT_END_STRINGS

  return SortZipped(
      values, keys, True,
      lambda a: (


          False, False, a[1]) if a[1] in sort_at_end_strings else (True, a[0], a[1]))


def SortByKeys(values, keys, ignore_sort_at_end_strings=False):
  """Sorts the list of keys in ascending sequence, applying same resorting to values.

  Given a list of keys and values representing a histogram, returns two new lists that
  are sorted so that the keys occur in ascending alpha sequence and the values are moved
  around in the same way. This allows the printing of a histogram with the first keys
  alphabetically listed first - i.e.: 7am, 8am, 9am.


  Keys identified by SORT_AT_END_STRINGS - such as, perhaps, 'Other' - will optionally
  be placed at the end of the sequence.

  Args:
    values: list of values for the histogram that will be moved in the same way as the
      keys.
    keys: list of keys for the histogram to be used as the primary sort key.
    ignore_sort_at_end_strings: boolean indicating whether specially-defined keys will be
      sorted at the end.

  Returns:
    2-tuple of (values, keys) lists sorted as described above
  """
  if ignore_sort_at_end_strings:
    sort_at_end_strings = []
  else:
    sort_at_end_strings = SORT_AT_END_STRINGS

  return SortZipped(
      values, keys, False,
      lambda a: (True, a[1]) if a[1] in sort_at_end_strings else (False, a[1]))


def SortByDefinedList(values, keys, sort_sequence):
  """Sorts the keys in user-enumerated sequence, applying same resorting to values.

  Given a list of keys and values representing a histogram, returns two new lists that
  are sorted so that the keys occur in the specific sequence identified in the list
  sort_sequence, while the values are moved around in the same way. This allows the
  printing of a histogram with the keys occurring in a canonical order - i.e.: Tuesday,
  Wednesday, Thursday. Keys present in keys but not existing in sort_sequence are then

  sorted at the end, but amongst them, sorted based on the value.

  Args:
    values: list of values for the histogram that will be moved in the same way as the
      keys.
    keys: list of keys for the histogram to be used as the primary sort key.
    sort_sequence: list - which need not be exhaustive - of the keys in their desired
      order.

  Returns:
    2-tuple of (values, keys) lists sorted as described above
  """
  return SortZipped(
      values, keys, False,
      lambda a: (

          False, sort_sequence.index(a[1])) if a[1] in sort_sequence else (True, a[0]))


def SortZipped(x, y, reverse, key):
  """Sorts lists x & y via the function defined in key.

  Applies the same reordering to the two lists x and y, where the reordering is given
  by the function defined in the key applied to the tuple (x[n], y[n]). That is, suppose

  - x = [3, 2, 1]
  - y = ['b', 'c', 'a']
  Then the sort to both lists is done based on how the key is applied to the tuples:

  - [(3, 'b'), (2, 'c'), (1, 'a')]

  If key = lambda a: a[0], then the sort is done based on 3, 2, 1, so the sorted lists are

  - x = [1, 2, 3]
  - y = ['a', 'c', 'b']

  If key = lambda a: a[1], then the sort is done based on ['b', 'c', 'a'], so the sorted
  lists are
  - x = [1, 3, 2]
  - y = ['a', 'b', 'c']

  Args:
    x: First list
    y: Second list
    reverse: Boolean indicating whether the sort should be ascending (True) or descending
      (False)
    key: function applied to the 2-tuple constructed by taking the corresponding values
      of the lists x & y, used to generate the key on which the sort is applied


  Returns:
    2-tuple of (x, y) lists sorted as described above
  """
  zipped_xy = zip(x, y)
  sorted_xy = sorted(zipped_xy, reverse=reverse, key=key)
  # unzip
  (x, y) = list(zip(*sorted_xy))
  return (x, y)


def CreateSingleHistogramChart(
    data,
    keyfunction,
    sort_type,
    title,
    position=None,
    truncate=0,
    hours=float('inf'),
    max_distance_feet=float('inf'),
    max_altitude_feet=float('inf'),
    normalize_factor=0,
    exhaustive=False,
    figsize_inches=(9, 6)):
  """Creates matplotlib.pyplot of histogram that can then be saved or printed.

  Args:
    data: the iterable (i.e.: list) of flight details, where each element in the list is
      a dictionary of the flight attributes.
    keyfunction: a function that when applied to a single flight (i.e.:
      keyfunction(data[0]) returns the key to be used for the histogram.
    data: the iterable of the raw data from which the histogram will be generated;
      each element of the iterable is a dictionary, that contains at least the key
      'now', and depending on other parameters, also potentially
      'min_feet' amongst others.
    keyfunction: the function that determines how the key or label of the histogram
      should be generated; it is called for each element of the data iterable. For
      instance, to simply generate a histogram on the attribute 'heading',
      keyfunction would be lambda a: a['heading'].
    title: the "base" title to include on the histogram; it will additionally be
      augmented with the details about the date range.
    position: Either a 3-digit integer or an iterable of three separate integers
      describing the position of the subplot. If the three integers are nrows, ncols,
      and index in order, the subplot will take the index position on a grid with nrows
      rows and ncols columns. index starts at 1 in the upper left corner and increases
      to the right.
    sort_type: determines how the keys (and the corresponding values) are sorted:

      'key': the keys are sorted by a simple comparison operator between them, which
          sorts strings alphabetically and numbers numerically.
      'value': the keys are sorted by a comparison between the values, which means
          that more frequency-occurring keys are listed first.
      list: if instead of the strings a list is passed, the keys are then sorted in
          the sequence enumerated in the list. This is useful for, say, ensuring that
          the days of the week (Tues, Wed, Thur, ...) are listed in sequence. Keys
          that are generated by keyfunction but that are not in the given list are
          sorted last (and then amongst those, alphabetically).
    truncate: integer indicating the maximum number of keys to return; if set to 0, or if
      set to a value larger than the number of keys, no truncation occurs. But if set
      to a value less than the number of keys, then the keys with the lowest frequency
      are combined into one key named OTHER_STRING so that the number of keys
      in the resulting histogram (together with OTHER_STRING) is equal to truncate.

    hours: integer indicating the number of hours of history to include. Flights with a
      calcd_display_time more than this many hours in the past are excluded from the
      histogram generation. Note that this is timezone aware, so that if the histogram
      data is generated on a machine with a different timezone than that that recorded
      the original data, the correct number of hours is still honored.

    max_distance_feet: number indicating the geo fence outside of which flights should
      be ignored for the purposes of including the flight data in the histogram.

    max_altitude_feet: number indicating the maximum altitude outside of which flights
      should be ignored for the purposes of including the flight data in the histogram.

    normalize_factor: divisor to apply to all the values, so that we can easily
      renormalize the histogram to display on a percentage or daily basis; if zero,
      no renormalization is applied.
    exhaustive: boolean only relevant if sort_type is a list, in which case, this ensures
      that the returned set of keys (and matching values) contains all the elements in
      the list, including potentially those with a frequency of zero, within the
      restrictions of truncate.
    figsize_inches: a 2-tuple of width, height indicating the size of the histogram.

  """
  (values, keys, filtered_data) = GenerateHistogramData(
      data,
      keyfunction,
      sort_type,
      truncate=truncate,
      hours=hours,
      max_distance_feet=max_distance_feet,
      max_altitude_feet=max_altitude_feet,
      normalize_factor=normalize_factor,
      exhaustive=exhaustive)
  if position:
    matplotlib.pyplot.subplot(*position)
  matplotlib.pyplot.figure(figsize=figsize_inches)
  values_coordinates = numpy.arange(len(keys))
  matplotlib.pyplot.bar(values_coordinates, values)

  # The filtering may have removed any flight data, or there may be none to start

  if not filtered_data:
    return

  earliest_flight_time = int(filtered_data[0]['now'])
  last_flight_time = int(filtered_data[-1]['now'])
  date_range_string = ' %d flights over last %s hours' % (
      sum(values), SecondsToDdHh(last_flight_time - earliest_flight_time))

  timestamp_string = 'Last updated %s' % EpochDisplayTime(
      time.time(), format_string='%b %-d, %-I:%M%p')

  full_title = '\n'.join([title, date_range_string, timestamp_string])

  matplotlib.pyplot.title(full_title)

  matplotlib.pyplot.subplots_adjust(bottom=0.15, left=0.09, right=0.99, top=0.89)


  matplotlib.pyplot.xticks(
      values_coordinates, keys, rotation='vertical', wrap=True,
      horizontalalignment='right',
      verticalalignment='center')


def HistogramSettingsHours(how_much_history):
  """Extracts the desired history (in hours) from the histogram configuration string.

  Args:
    how_much_history: string from the histogram config file.

  Returns:
    Number of hours of history to include in the histogram.
  """
  if how_much_history == 'today':
    hours = HoursSinceMidnight()
  elif how_much_history == '24h':
    hours = HOURS_IN_DAY
  elif how_much_history == '7d':
    hours = 7 * HOURS_IN_DAY
  elif how_much_history == '30d':
    hours = 30 * HOURS_IN_DAY
  else:
    Log(
        'Histogram form has invalid value for how_much_history: %s' % how_much_history)

    hours = 7 * HOURS_IN_DAY
  return hours


def HistogramSettingsScreens(max_screens):
  """Extracts the desired number of text screens from the histogram configuration string.

  Args:
    max_screens: string from the histogram config file.

  Returns:
    Number of maximum number of screens to display for a splitflap histogram.
  """
  if max_screens == '_1':
    screen_limit = 1
  elif max_screens == '_2':
    screen_limit = 2
  elif max_screens == '_5':
    screen_limit = 5
  elif max_screens == 'all':
    screen_limit = 0  # no limit on screens
  else:
    Log('Histogram form has invalid value for max_screens: %s' % max_screens)
    screen_limit = 1
  return screen_limit


def HistogramSettingsKeySortTitle(which, hours, flights, max_altitude=45000):
  """Provides the arguments necessary to generate a histogram from the config string.

  The same parameters are used to generate either a splitflap text or web-rendered
  histogram in terms of the histogram title, the keyfunction, and how to sort the keys.
  For a given histogram name (based on the names defined in the histogram config file),
  this provides those parameters.

  Args:
    which: string from the histogram config file indicating the histogram to provide
      settings for.
    hours: how many hours of histogram data have been requested.
    flights: list of the flights in the data set.
    max_altitude: indicates the maximum altitude that should be included on the
      altitude labels.

  Returns:
    A 4-tuple of the parameters used by either CreateSingleHistogramChart or
    MessageboardHistogram, of the keyfunction, sort, title, and hours.
  """
  def DivideAndFormat(dividend, divisor):
    if dividend is None:
      return KEY_NOT_PRESENT_STRING
    if isinstance(dividend, numbers.Number):
      return '%2d' % round(dividend / divisor)
    return dividend[:2]

  def RoundAndFormat(dividend, divisor, digits):
    if dividend is None:
      return KEY_NOT_PRESENT_STRING
    if isinstance(dividend, numbers.Number):




                            <----SKIPPED LINES---->




    key = lambda k: k.get('destination_iata', KEY_NOT_PRESENT_STRING)
    sort = 'value'
    title = 'Destination'
  elif which == 'origin':
    key = lambda k: k.get('origin_iata', KEY_NOT_PRESENT_STRING)
    sort = 'value'
    title = 'Origin'
  elif which == 'hour':
    key = lambda k: DisplayTime(k, '%H')
    sort = 'key'
    title = 'Hour'
  elif which == 'airline':
    key = DisplayAirline
    sort = 'value'
    title = 'Airline'
  elif which == 'aircraft':
    key = lambda k: k.get('aircraft_type_code', KEY_NOT_PRESENT_STRING)
    sort = 'value'
    title = 'Aircraft'
  elif which == 'altitude':
    key = lambda k: DivideAndFormat(k.get('altitude', KEY_NOT_PRESENT_STRING), 1000)

    sort = ['%2d'%x for x in range(0, round((max_altitude+1)/1000))]
    title = 'Altitude (1000ft)'
  elif which == 'bearing':
    key = lambda k: ConvertBearingToCompassDirection(
        k.get('track', KEY_NOT_PRESENT_STRING), pad=True, length=3)
    sort = [d.rjust(3) for d in DIRECTIONS_16]
    title = 'Bearing'
  elif which == 'distance':
    key = lambda k: DivideAndFormat(k.get('min_feet', KEY_NOT_PRESENT_STRING), 100)

    sort = ['%2d'%x for x in range(0, round((MIN_METERS*FEET_IN_METER)/100)+1)]
    title = 'Min Dist (100ft)'
  elif which == 'day_of_week':
    key = lambda k: DisplayTime(k, '%a')
    sort = DAYS_OF_WEEK
    title = 'Day of Week'
    # if less than one week, as requested; if more than one week, in full week multiples

    hours_in_week = 7 * HOURS_IN_DAY
    weeks = hours / hours_in_week
    if weeks > 1:
      hours = hours_in_week * int(hours / hours_in_week)
  elif which == 'day_of_month':
    key = lambda k: DisplayTime(k, '%-d').rjust(2)
    today_day = datetime.datetime.now(TZ).day
    days = list(range(today_day, 0, -1))  # today down to the first of the month
    days.extend(range(31, today_day, -1))  # 31st of the month down to day after today
    days = [str(d).rjust(2) for d in days]
    sort = days
    title = 'Day of Month'

  elif which == 'speed':
    rounding = 25
    field = 'speed'
    min_value = min([f[field] for f in flights if f.get(field)])
    max_value = max([f[field] for f in flights if f.get(field)])
    digits = int(math.log10(max_value)) + 1

    key = lambda k: RoundAndFormat(k.get(field, KEY_NOT_PRESENT_STRING), rounding, digits)
    values = range(int(min_value), int(max_value) + 1)
    sort = sorted(list({RoundAndFormat(v, rounding, digits) for v in values}))
    title = 'Speed (kn)'

  elif which == 'aircraft_length':
    key = DisplayLength
    min_value = min([AircraftLength(f, default=float('inf')) for f in flights])
    max_value = max([AircraftLength(f, default=float('-inf')) for f in flights])
    sort = list(range(round(min_value), round(max_value) + 1))
    title = 'Plane Length (m)'

  elif which == 'vert_rate':
    rounding = 200
    field = 'vert_rate'
    min_value = min([f[field] for f in flights if f.get(field)])
    max_value = max([f[field] for f in flights if f.get(field)])
    digits = max(int(math.log10(max_value)), 1 + int(math.log10(abs(min_value)))) + 1




    key = lambda k: RoundAndFormat(k.get(field, KEY_NOT_PRESENT_STRING), rounding, digits)
    values = range(int(min_value), int(max_value) + 1)
    sort = sorted(list({RoundAndFormat(v, rounding, digits) for v in values}))
    title = 'Ascent Rate (%s)' % CLIMB_RATE_UNITS

  else:
    Log(
        'Histogram form has invalid value for which_histograms: %s' % which)
    return HistogramSettingsKeySortTitle(
        'destination', hours, flights, max_altitude=max_altitude)

  return (key, sort, title, hours)


def ImageHistograms(
    flights,
    which_histograms,
    how_much_history,
    filename_prefix=HISTOGRAM_IMAGE_PREFIX,
    filename_suffix=HISTOGRAM_IMAGE_SUFFIX,
    heartbeat=True):
  """Generates multiple split histogram images.

  Args:
    flights: the iterable of the raw data from which the histogram will be generated;
      each element of the iterable is a dictionary, that contains at least the key
      'now', and depending on other parameters, also potentially
      'min_feet' amongst others.
    which_histograms: string paramater indicating which histogram(s) to generate, which
      can be either the special string 'all', or a string linked to a specific
      histogram.
    how_much_history: string parameter taking a value among ['today', '24h', '7d', '30d].

    filename_prefix: this string indicates the file path and name prefix for the images
      that are created. File names are created in the form [prefix]name.[suffix], i.e.:
      if the prefix is histogram_ and the suffix is png, then the file name might be
      histogram_aircraft.png.
    filename_suffix: see above; also interpreted by savefig to generate the correct
      format.
    heartbeat: boolean indicating whether we should log heartbeats between each histogram
      to make sure monitoring does not mistake this slow procedure for being hung; this
      should be set to false if this is called from outside of messageboard.main.


  Returns:
    List of 2-tuples of histograms generated, where the first element is the histogram
    identifier (i.e.: destination), and the second element is the filename (i.e.:
    histogram_destination.png).
  """
  hours = HistogramSettingsHours(how_much_history)

  histograms_to_generate = []
  if which_histograms in ['destination', 'all']:
    histograms_to_generate.append({'generate': 'destination'})
  if which_histograms in ['origin', 'all']:
    histograms_to_generate.append({'generate': 'origin'})
  if which_histograms in ['hour', 'all']:
    histograms_to_generate.append({'generate': 'hour'})
  if which_histograms in ['airline', 'all']:

    histograms_to_generate.append({'generate': 'airline', 'truncate': int(TRUNCATE/2)})
  if which_histograms in ['aircraft', 'all']:
    histograms_to_generate.append({'generate': 'aircraft'})
  if which_histograms in ['altitude', 'all']:
    histograms_to_generate.append({'generate': 'altitude', 'exhaustive': True})
  if which_histograms in ['bearing', 'all']:
    histograms_to_generate.append({'generate': 'bearing'})
  if which_histograms in ['distance', 'all']:
    histograms_to_generate.append({'generate': 'distance', 'exhaustive': True})
  if which_histograms in ['day_of_week', 'all']:
    histograms_to_generate.append({'generate': 'day_of_week'})
  if which_histograms in ['day_of_month', 'all']:
    histograms_to_generate.append({'generate': 'day_of_month'})

  if which_histograms in ['speed', 'all']:
    histograms_to_generate.append({'generate': 'speed', 'exhaustive': True})
  if which_histograms in ['aircraft_length', 'all']:

    histograms_to_generate.append({'generate': 'aircraft_length', 'exhaustive': True})
  if which_histograms in ['vert_rate', 'all']:
    histograms_to_generate.append({'generate': 'vert_rate', 'exhaustive': True})

  histograms_generated = []
  for histogram in histograms_to_generate:
    this_histogram = which_histograms
    if this_histogram == 'all':
      this_histogram = histogram['generate']
    (key, sort, title, updated_hours) = HistogramSettingsKeySortTitle(
        this_histogram, hours, flights)

    # if multiple histograms are getting generated, this might take a few seconds;
    # logging a heartbeat with each histogram ensures that monitoring.py does not
    # mistake this pause for a hang.
    if heartbeat:
      Heartbeat()

    CreateSingleHistogramChart(
        flights,
        key,
        sort,
        title,
        truncate=histogram.get('truncate', TRUNCATE),
        hours=updated_hours,
        exhaustive=histogram.get('exhaustive', False))

    filename = (
        WEBSERVER_IMAGE_RELATIVE_FOLDER +  # i.e.: images/
        filename_prefix +                  # i.e.: histogram_
        histogram['generate'] +            # i.e.: destination
        '.' + filename_suffix)             # i.e.: .png
    filepath = WEBSERVER_PATH + filename   # i.e.: /var/www/html/ + filename

    matplotlib.pyplot.savefig(filepath)
    matplotlib.pyplot.close()
    histograms_generated.append((histogram['generate'], filename))

  return histograms_generated


def MessageboardHistograms(
    flights,
    which_histograms,
    how_much_history,
    max_screens,
    data_summary,
    heartbeat=True):
  """Generates multiple split flap screen histograms.

  Args:
    flights: the iterable of the raw data from which the histogram will be generated;
      each element of the iterable is a dictionary, that contains at least the key
      'now', and depending on other parameters, also potentially
      'min_feet' amongst others.
    which_histograms: string paramater indicating which histogram(s) to generate, which
      can be either the special string 'all', or a string linked to a specific
      histogram.
    how_much_history: string parameter taking a value among ['today', '24h', '7d', '30d].

    max_screens: string parameter taking a value among ['_1', '_2', '_5', or 'all'].

    data_summary: parameter that evaluates to a boolean indicating whether the data
      summary screen in the histogram should be displayed.
    heartbeat: boolean indicating whether we should log heartbeats between each histogram
      to make sure monitoring does not mistake this slow procedure for being hung; this
      should be set to false if this is called from outside of messageboard.main.


  Returns:
    Returns a list of printable strings (with embedded new line characters) representing
    the histogram, for each screen in the histogram.
  """
  messages = []

  hours = HistogramSettingsHours(how_much_history)
  screen_limit = HistogramSettingsScreens(max_screens)

  histograms_to_generate = []
  if which_histograms in ['destination', 'all']:
    histograms_to_generate.append({
        'generate': 'destination',
        'suppress_percent_sign': True,
        'columns': 3})
  if which_histograms in ['origin', 'all']:
    histograms_to_generate.append({
        'generate': 'origin',
        'suppress_percent_sign': True,
        'columns': 3})
  if which_histograms in ['hour', 'all']:
    histograms_to_generate.append({
        'generate': 'hour',




                            <----SKIPPED LINES---->




  if which_histograms in ['speed', 'all']:
    histograms_to_generate.append({
        'generate': 'speed',
        'columns': 2})
  if which_histograms in ['aircraft_length', 'all']:
    histograms_to_generate.append({
        'generate': 'aircraft_length',
        'columns': 3})
  if which_histograms in ['vert_rate', 'all']:
    histograms_to_generate.append({
        'generate': 'vert_rate',
        'columns': 2})

  for histogram in histograms_to_generate:
    this_histogram = which_histograms
    if this_histogram == 'all':
      this_histogram = histogram['generate']
    (key, sort, title, updated_hours) = HistogramSettingsKeySortTitle(
        this_histogram, hours, flights)

    # if multiple histograms are getting generated, this might take a few seconds;
    # logging a heartbeat with each histogram ensures that monitoring.py does not
    # mistake this pause for a hang.
    if heartbeat:
      Heartbeat()
    histogram = MessageboardHistogram(
        flights,
        key,
        sort,
        title,
        screen_limit=screen_limit,
        columns=histogram.get('columns', 2),
        suppress_percent_sign=histogram.get('suppress_percent_sign', False),
        column_divider=histogram.get('column_divider', ' '),
        data_summary=data_summary,
        hours=updated_hours,
        absolute=histogram.get('absolute', False))
    messages.extend(histogram)

  messages = [(FLAG_MSG_HISTOGRAM, m) for m in messages]

  return messages


def MessageboardHistogram(
    data,
    keyfunction,
    sort_type,
    title,
    screen_limit=1,
    columns=2,
    column_divider=' ',
    data_summary=False,
    hours=0,
    suppress_percent_sign=False,
    absolute=False):
  """Generates a text representation of one histogram that can be rendered on the display.

  Args:
    data: the iterable of the raw data from which the histogram will be generated;
      each element of the iterable is a dictionary, that contains at least the key
      'now', and depending on other parameters, also potentially
      'min_feet' amongst others.
    keyfunction: the function that determines how the key or label of the histogram
      should be generated; it is called for each element of the data iterable. For
      instance, to simply generate a histogram on the attribute 'heading',
      keyfunction would be lambda a: a['heading'].
    sort_type: determines how the keys (and the corresponding values) are sorted; see
      GenerateHistogramData docstring for details
    title: string title, potentially truncated to fit, to be displayed for the histogram

    screen_limit: maximum number of screens to be displayed for the histogram; a value
      of zero is interpreted to mean no limit on screens.
    columns: number of columns of data to be displayed for the histogram; note that the
      keys of the histogram may need to be truncated in length to fit the display
      as more columns are squeezed into the space
    column_divider: string for the character(s) to be used to divide the columns
    data_summary: boolean indicating whether to augment the title with a second header
      line about the data presented in the histogram
    hours: integer indicating the oldest data to be included in the histogram
    suppress_percent_sign: boolean indicating whether to suppress the percent sign
      in the data (but to add it to the title) to reduce the amount of string
      truncation potentially necessary for display of the keys
    absolute: boolean indicating whether to values should be presented as percentage or
      totals; if True, suppress_percent_sign is irrelevant.

  Returns:
    Returns a list of printable strings (with embedded new line characters) representing
    the histogram.
  """
  title_lines = 1
  if data_summary:
    title_lines += 1
  available_entries_per_screen = (SPLITFLAP_LINE_COUNT - title_lines) *  columns
  available_entries_total = available_entries_per_screen * screen_limit
  (values, keys, filtered_data) = GenerateHistogramData(

      data, keyfunction, sort_type, truncate=available_entries_total, hours=hours)

  # The filtering may have removed any flight data, or there may be none to start

  if not filtered_data:
    return []

  screen_count = math.ceil(len(keys) / available_entries_per_screen)

  column_width = int(
      (SPLITFLAP_CHARS_PER_LINE - len(column_divider)*(columns - 1)) / columns)
  leftover_space = SPLITFLAP_CHARS_PER_LINE - (
      column_width*columns + len(column_divider)*(columns - 1))
  extra_divider_chars = math.floor(leftover_space / (columns - 1))
  column_divider = column_divider.ljust(len(column_divider) + extra_divider_chars)


  # i.e.: ' 10%' or ' 10', depending on suppress_percent_sign
  printed_percent_sign = ''
  if absolute:
    digits = math.floor(math.log10(max(values))) + 1
    value_size = digits + 1
    augment_title_units = ' #'
    format_string = '%%%dd' % digits
  else:
    value_size = 3
    augment_title_units = ' %'
    if not suppress_percent_sign:
      value_size += 1
      printed_percent_sign = '%'
      augment_title_units = ''
  column_key_width = column_width - value_size

  total = sum(values)

  if data_summary:
    if hours:
      hours_of_data = min(hours, DataHistoryHours(data))
    else:
      hours_of_data = DataHistoryHours(data)
    time_horizon_text = 'Last %s' % SecondsToDdHh(hours_of_data * SECONDS_IN_HOUR)


    summary_text = '%s (n=%d)' % (time_horizon_text, sum(values))
    summary_text = summary_text.center(SPLITFLAP_CHARS_PER_LINE)

  split_flap_boards = []
  for screen in range(screen_count):
    if screen_count == 1:
      counter = ''
    else:
      counter = ' %d/%d' % (screen+1, screen_count)
    screen_title = '%s%s%s' % (
        title[:SPLITFLAP_CHARS_PER_LINE - len(counter) - len(augment_title_units)],


        augment_title_units, counter)


    screen_title = screen_title.center(SPLITFLAP_CHARS_PER_LINE)
    start_index = screen*available_entries_per_screen
    end_index = min((screen+1)*available_entries_per_screen-1, len(keys)-1)
    number_of_entries = end_index - start_index + 1
    number_of_lines = math.ceil(number_of_entries / columns)

    lines = []

    lines.append(screen_title.upper())
    if data_summary:
      lines.append(summary_text.upper())
    for line_index in range(number_of_lines):
      key_value = []
      for column_index in range(columns):
        index = start_index + column_index*number_of_lines + line_index
        if index <= end_index:
          if absolute:
            value_string = format_string % values[index]
          else:
            # If the % is >=1%, display right-justified 2 digit percent, i.e. ' 5%'
            # Otherwise, if it rounds to at least 0.1%, display i.e. '.5%'
            if values[index]/total*100 >= 0.95:
              value_string = '%2d' % round(values[index]/total*100)
            elif round(values[index]/total*1000)/10 >= 0.1:
              value_string = ('%.1f' % (round(values[index]/total*1000)/10))[1:]
            else:
              value_string = ' 0'
          key_value.append('%s %s%s' % (
              str(keys[index])[:column_key_width].ljust(column_key_width),
              value_string,
              printed_percent_sign))

      line = (column_divider.join(key_value)).upper()
      lines.append(line)

    split_flap_boards.append(lines)

  return split_flap_boards


def TriggerHistograms(flights, histogram_settings, heartbeat=True):
  """Triggers the text-based or web-based histograms.

  Based on the histogram settings, determines whether to generate text or image histograms
  (or both). For image histograms, also generates empty images for the histograms not
  created so that broken image links are not displayed in the webpage.


  Args:
    flights: List of flight attribute dictionaries.
    histogram_settings: Dictionary of histogram parameters.
    heartbeat: boolean indicating whether we should log heartbeats between each histogram
      to make sure monitoring does not mistake this slow procedure for being hung; this
      should be set to false if this is called from outside of messageboard.main.


  Returns:
    List of histogram messages, if text-based histograms are selected; empty list
    otherwise.
  """
  histogram_messages = []

  if histogram_settings['type'] in ('messageboard', 'both'):
    histogram_messages = MessageboardHistograms(
        flights,
        histogram_settings['histogram'],
        histogram_settings['histogram_history'],
        histogram_settings['histogram_max_screens'],
        histogram_settings.get('histogram_data_summary', False),
        heartbeat=heartbeat)
  if histogram_settings['type'] in ('images', 'both'):

    # Since Google Chrome seems to ignore all instructions to not cache, we need
    # to make sure we do not reuse file names - hence the epoch_string - and then

    # we need to 1) update the histograms.html file with the correct file links, and
    # 2) delete the images that are now obsolete.
    epoch_string = '%d_' % round(time.time())

    generated_histograms = ImageHistograms(
        flights,
        histogram_settings['histogram'],
        histogram_settings['histogram_history'],
        filename_prefix=HISTOGRAM_IMAGE_PREFIX + epoch_string,
        heartbeat=heartbeat)
    html_lines = ReadFile(HISTOGRAM_IMAGE_HTML).split('\n')
    replaced_images = []
    for identifier, new_filename in generated_histograms:
      # for each histogram, find the html_line with the matching id

      # Example line: <img id="destination" src="images/histogram_destination.png"><p>
      identifier = '"%s"' % identifier  # add quotations to make sure its complete
      n, line = None, None  # addresses pylint complaint
      found = False
      for n, line in enumerate(html_lines):
        if identifier in line:
          found = True
          break
        found = False
      if found:
        start_char = line.find(WEBSERVER_IMAGE_RELATIVE_FOLDER)
        end_character = (
            line.find(HISTOGRAM_IMAGE_SUFFIX, start_char) + len(HISTOGRAM_IMAGE_SUFFIX))

        old_filename = line[start_char:end_character]
        line = line.replace(old_filename, new_filename)
        html_lines[n] = line
        replaced_images.append(old_filename)
    new_html = '\n'.join(html_lines)
    WriteFile(HISTOGRAM_IMAGE_HTML, new_html)

    # Remove those obsoleted files
    for f in replaced_images:
      RemoveFile(WEBSERVER_PATH + f)

  return histogram_messages


def SaveFlightsByAltitudeDistanceCSV(
    flights,
    max_days=0,
    filename='flights_by_alt_dist.csv',
    precision=100):
  """Extracts hourly histogram into text file for a variety of altitudes and distances.

  Generates a csv with 26 columns:
  - col#1: altitude (in feet)
  - col#2: distance (in feet)
  - cols#3-26: hour of the day

  The first row is a header row; subsequent rows list the number of flights that have
  occurred in the last max_days with an altitude and min distance less than that identified
  in the first two columns. Each row increments elevation or altitude by precision feet,
  up to the max determined by the max altitude and max distance amongst all the flights.


  Args:
    flights: list of the flights.
    max_days: maximum number of days as described.
    filename: file into which to save the csv.
    precision: number of feet to increment the altitude or distance.
  """

  max_altitude = int(round(max([flight.get('altitude', -1) for flight in flights])))

  max_distance = int(round(max([flight.get('min_feet', -1) for flight in flights])))
  min_altitude = int(round(
      min([flight.get('altitude', float('inf')) for flight in flights])))
  min_distance = int(round(
      min([flight.get('min_feet', float('inf')) for flight in flights])))
  max_hours = max_days * HOURS_IN_DAY

  lines = []
  now = datetime.datetime.now()


  header_elements = ['altitude_feet', 'min_distance_feet', *[str(h) for h in HOURS]]
  line = ','.join(header_elements)
  lines.append(line)

  altitudes = list(range(
      precision * int(min_altitude / precision),
      precision * (int(max_altitude / precision) + 2),
      precision))
  distances = list(range(
      precision * int(min_distance / precision),
      precision * (int(max_distance / precision) + 2),
      precision))

  # Flight counts where either the altitude or min_feet is unknown
  line_elements = ['undefined', 'undefined']
  for hour in HOURS:
    line_elements.append(str(len([
        1 for f in flights if
        (not max_hours or HoursSinceFlight(now, f['now']) < max_hours) and
        (f.get('altitude') is None or f.get('min_feet') is None) and
        HourString(f) == hour])))
  line = ','.join(line_elements)
  lines.append(line)

  d = {}
  for flight in flights:
    if 'altitude' in flight and 'min_feet' in flight:
      this_altitude = flight['altitude']
      this_distance = flight['min_feet']
      hour = HourString(flight)
      for altitude in [a for a in altitudes if a >= this_altitude]:
        for distance in [d for d in distances if d >= this_distance]:
          d[(altitude, distance, hour)] = d.get((altitude, distance, hour), 0) + 1

  for altitude in altitudes:
    for distance in distances:
      line_elements = [str(altitude), str(distance)]
      for hour in HOURS:
        line_elements.append(str(d.get((altitude, distance, hour), 0)))
      line = ','.join(line_elements)
      lines.append(line)
  try:
    with open(filename, 'w') as f:
      for line in lines:
        f.write(line+'\n')
  except IOError:
    Log('Unable to write hourly histogram data file ' + filename)


def SaveFlightsToCSV(flights=None, filename='flights.csv'):
  """Saves all the attributes about the flight to a CSV, including on-the-fly attributes.

  Args:
    flights: dictionary of flight attributes; if not provided, loaded from
      PICKLE_FLIGHTS.
    filename: name of desired csv file; if not provided, defaults to flights.csv.

  """
  if not flights:
    flights = UnpickleObjectFromFile(PICKLE_FLIGHTS, True)

  print('='*80)
  print('Number of flights to save to %s: %d' % (filename, len(flights)))

  # list of functions in 2-tuple, where second element is a function that generates
  # something about the flight, and the first element is the name to give that value
  # when extended into the flight definition
  functions = [
      ('display_flight_number', DisplayFlightNumber),
      ('display_airline', DisplayAirline),
      ('display_aircraft', DisplayAircraft),
      ('display_origin_iata', DisplayOriginIata),
      ('display_destination_iata', DisplayDestinationIata),
      ('display_origin_friendly', DisplayOriginFriendly),
      ('display_destination_friendly', DisplayDestinationFriendly),
      ('display_origin_destination_pair', DisplayOriginDestinationPair),
      ('display_seconds_remaining', DisplaySecondsRemaining),
      ('now_datetime', DisplayTime),
      ('now_date', lambda flight: DisplayTime(flight, '%x')),
      ('now_time', lambda flight: DisplayTime(flight, '%X')),
      ('aircraft_length_meters', AircraftLength)]

  for function in functions:
    for flight in flights:
      flight[function[0]] = function[1](flight)

  # these functions return dictionary of values
  functions = [
      lambda f: FlightAnglesSecondsElapsed(f, 0, '_00s'),
      lambda f: FlightAnglesSecondsElapsed(f, 10, '_10s'),
      lambda f: FlightAnglesSecondsElapsed(f, 20, '_20s'),
      DisplayDepartureTimes]
  for function in functions:
    for flight in flights:
      flight.update(function(flight))

  all_keys = set()
  for f in flights:
    all_keys.update(f.keys())
  all_keys = list(all_keys)
  all_keys.sort()

  keys_logical_order = [
      'now_date', 'now_time', 'now_datetime', 'now', 'flight_number', 'squawk',
      'origin_iata', 'destination_iata', 'altitude', 'min_feet', 'vert_rate', 'speed',


      'distance', 'delay_seconds', 'airline_call_sign', 'aircraft_type_friendly',
      'azimuth_degrees_00s', 'azimuth_degrees_10s', 'azimuth_degrees_20s',
      'altitude_degrees_00s', 'altitude_degrees_10s', 'altitude_degrees_20s',
      'ground_distance_feet_00s', 'ground_distance_feet_10s', 'ground_distance_feet_20s',
      'crow_distance_feet_00s', 'crow_distance_feet_10s', 'crow_distance_feet_20s']




  for key in all_keys:
    if key not in keys_logical_order:
      keys_logical_order.append(key)

  max_length = 32000
  def ExcelFormatValue(v):
    s = str(v)
    if len(s) > max_length:  # maximum Excel cell length is 32767 characters
      s = '%d character field truncated to %d characters: %s' % (
          len(s), max_length, s[:max_length])
    return s

  f = open(filename, 'w')
  f.write(','.join(keys_logical_order)+'\n')
  for flight in flights:
    f.write(','.join(
        ['"'+ExcelFormatValue(flight.get(k))+'"' for k in keys_logical_order])+'\n')

  f.close()


def SimulationSetup():
  """Updates global variable file names and loads in JSON data for simulation runs."""
  # Clear file so that shell tail -f process can continue to point to same file
  def ClearFile(filename):
    if os.path.exists(filename):
      with open(filename, 'w') as f:
        f.write('')

  global SIMULATION
  SIMULATION = True

  global DUMP_JSONS
  DUMP_JSONS = UnpickleObjectFromFile(PICKLE_DUMP_JSON_FILE, True)

  global FA_JSONS
  FA_JSONS = UnpickleObjectFromFile(PICKLE_FA_JSON_FILE, True)

  global ALL_MESSAGE_FILE
  ALL_MESSAGE_FILE = PrependFileName(ALL_MESSAGE_FILE, SIMULATION_PREFIX)
  ClearFile(ALL_MESSAGE_FILE)

  global LOGFILE
  LOGFILE = PrependFileName(LOGFILE, SIMULATION_PREFIX)
  ClearFile(LOGFILE)

  global ROLLING_LOGFILE
  ROLLING_LOGFILE = PrependFileName(ROLLING_LOGFILE, SIMULATION_PREFIX)
  ClearFile(ROLLING_LOGFILE)

  global ROLLING_MESSAGE_FILE
  ROLLING_MESSAGE_FILE = PrependFileName(ROLLING_MESSAGE_FILE, SIMULATION_PREFIX)

  ClearFile(ROLLING_MESSAGE_FILE)

  global PICKLE_FLIGHTS
  PICKLE_FLIGHTS = PrependFileName(PICKLE_FLIGHTS, SIMULATION_PREFIX)
  filenames = UnpickleObjectFromFile(PICKLE_FLIGHTS, True, max_days=None, filenames=True)

  for file in filenames:
    ClearFile(file)

  global PICKLE_DASHBOARD
  PICKLE_DASHBOARD = PrependFileName(PICKLE_DASHBOARD, SIMULATION_PREFIX)
  filenames = UnpickleObjectFromFile(PICKLE_DASHBOARD, True, max_days=None, filenames=True)

  for file in filenames:
    ClearFile(file)


def SimulationEnd(message_queue, flights, screens):
  """Clears message buffer, exercises histograms, and other misc test & status code.

  Args:
    message_queue: List of flight messages that have not yet been printed.
    flights: List of flights dictionaries.
    screens: List of past screens displayed to splitflap screen.
  """
  if flights:
    histogram = {
        'type': 'both',
        'histogram':'all',
        'histogram_history':'30d',
        'histogram_max_screens': '_2',
        'histogram_data_summary': 'on'}
    message_queue.extend(TriggerHistograms(flights, histogram))

    while message_queue:
      ManageMessageQueue(message_queue, 0, {'setting_delay': 0}, screens)
    SaveFlightsByAltitudeDistanceCSV(flights)
    SaveFlightsToCSV(flights)

  # repickle to a new .pk with full track info
  file_parts = PICKLE_FLIGHTS.split('.')
  new_pickle_file = '.'.join([file_parts[0] + '_full_path', file_parts[1]])
  RemoveFile(new_pickle_file)
  for flight in flights:
    PickleObjectToFile(flight, new_pickle_file, False)

  print('Simulation complete after %s dump json messages processed' % len(DUMP_JSONS))



def SimulationSlowdownNearFlight(flights, persistent_nearby_aircraft):
  """Slows down simulations when a reported-upon flight is nearby."""
  if flights and flights[-1].get('flight_number') in persistent_nearby_aircraft:
    time.sleep(arduino.WRITE_DELAY_TIME)


def DumpJsonChanges():
  """Identifies if sequential dump json files changes, for simulation optimization.

  If we are logging the radio output faster than it is updating, then there will be
  sequential log files in the json list that are identical; we only need to process the
  first of these, and can ignore subsequent ones, without any change of output in the
  simulation results. This function identifies whether the current active json changed
  from the prior one.

  Returns:
    Boolean - True if different (and processing needed), False if identical
  """
  if SIMULATION_COUNTER == 0:
    return True
  (this_json, unused_now) = DUMP_JSONS[SIMULATION_COUNTER]
  (last_json, unused_now) = DUMP_JSONS[SIMULATION_COUNTER - 1]
  return this_json != last_json



def CheckRebootNeeded(startup_time, message_queue, json_desc_dict, configuration):
  """Reboot based on duration instance has been running.

  Reboot needed in one of the following situations:
  - All quiet: if running for over 24 hours and all is quiet (message queue empty and
    no planes in radio).
  - Mostly quiet: if running for over 36 hours and message queue is empty and it's 3a.

  - Reboot requested via html form.

  Also checks if reset requested via html form.
  """
  reboot = False
  global SHUTDOWN_SIGNAL

  running_hours = (time.time() - startup_time) / SECONDS_IN_HOUR

  if (
      running_hours >= HOURS_IN_DAY and
      not message_queue and
      not json_desc_dict.get('radio_range_flights')):
    msg = 'All quiet reboot needed after running for %.2f hours' % running_hours
    SHUTDOWN_SIGNAL = msg
    Log(msg)
    reboot = True

  if (
      running_hours > HOURS_IN_DAY * 1.5 and
      not message_queue and
      int(EpochDisplayTime(time.time(), '%-H')) >= 3):
    msg = 'Early morning reboot needed after running for %.2f hours' % running_hours

    SHUTDOWN_SIGNAL = msg
    Log(msg)
    reboot = True

  if 'soft_reboot' in configuration:
    msg = 'Soft reboot requested via web form'
    SHUTDOWN_SIGNAL = msg
    Log(msg)
    reboot = True
    RemoveSetting(configuration, 'soft_reboot')

  if 'end_process' in configuration:
    msg = 'Process end requested via web form'
    SHUTDOWN_SIGNAL = msg
    Log(msg)
    RemoveSetting(configuration, 'end_process')

  return reboot


def InterruptRebootFromButton():
  """Sets flag so that the main loop will terminate when it completes the iteration.

  This function is only triggered by an physical button press.
  """
  msg = ('Soft reboot requested by button push')
  global SHUTDOWN_SIGNAL
  SHUTDOWN_SIGNAL = msg

  global REBOOT_SIGNAL
  REBOOT_SIGNAL = True

  RPi.GPIO.output(GPIO_SOFT_RESET[1], False)  # signal that reset received
  Log(msg)


def InterruptShutdownFromSignal(signalNumber, unused_frame):
  """Sets flag so that the main loop will terminate when it completes the iteration.

  The function signature is defined by the python language - i.e.: these two variables
  are passed automatically for registered signals.  This function is only triggered by an
  interrupt signal.
  """
  msg = '%d received termination signal %d (%s)' % (
      os.getpid(), signalNumber,
      signal.Signals(signalNumber).name)  # pylint: disable=E1101
  global SHUTDOWN_SIGNAL
  SHUTDOWN_SIGNAL = msg
  Log(msg)


def PerformGracefulShutdown(queues, shutdown, reboot):
  """Complete the graceful shutdown process by cleaning up.

  Args:
    queues: iterable of queues shared with child processes to be closed
    shutdown: tuple of shared flags with child processes to initiate shutdown in children

    reboot: boolean indicating whether we should trigger a reboot
  """
  reboot_msg = ''
  if reboot:
    reboot_msg = ' and rebooting'
  Log('Shutting down self (%d)%s' % (os.getpid(), reboot_msg))

  for q in queues:
    q.close()
  for v in shutdown:  # send the shutdown signal to child processes
    v.value = 1
  if RASPBERRY_PI:
    RPi.GPIO.cleanup()

  UpdateDashboard(True, failure_message=SHUTDOWN_SIGNAL)

  if reboot or REBOOT_SIGNAL:
    time.sleep(10)  # wait 10 seconds for children to shut down as well
    os.system('sudo reboot')
  sys.exit()


def FindRunningParents():
  """Returns list of proc ids of processes with identically-named python file running.

  In case there are multiple children processes spawned with the same name, such as via
  multiprocessing, this will only return the parent id (since a killed child process
  will likely just be respawned).
  """
  this_process_id = os.getpid()
  this_process_name = os.path.basename(sys.argv[0])
  pids = []
  pid_pairs = []
  for proc in psutil.process_iter():
    try:
      # Check if process name contains this_process_name.
      commands = proc.as_dict(attrs=['cmdline', 'pid', 'ppid'])
      if commands['cmdline']:
        command_running = any(
            [this_process_name in s for s in commands['cmdline']])
        if command_running:
          pids.append(commands['pid'])
          pid_pairs.append((commands['pid'], commands['ppid']))
    except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
      pass

  # Exclude those pids that have a parent that is also a pid
  final_pids = []
  for pid_pair in pid_pairs:
    if pid_pair[1] not in pids:
      final_pids.append(pid_pair[0])
  # Exclude this pid
  final_pids.pop(final_pids.index(this_process_id))

  return sorted(final_pids)


def WaitUntilKillComplete(already_running_ids, max_seconds=30):
  """Prevents main loop from starting until other instance, if any, completes shutdown.

  A termination command send to any other identically-named process may take a few
  seconds to complete because that other process is allowed to finish the current iteration
  in the main loop. Typically, that iteration in the other process will complete before
  this process finishes the initialization and starts. But in limited scenarios, that might
  not happen, such as if the other process is in the middle of generating a lot of
  histogram images, or if this process does not have much data to load.


  This function ensures that this process does not start the main loop until the other
  process terminates. If it detects that the other process is still running, it waits
  for up to max_seconds. If the other process does not terminate before that time limit,
  then this restarts the RPi.
  """
  still_running_ids = FindRunningParents()
  if not still_running_ids:
    return
  # still_running_ids should at least be a subset of already_running_ids

  new_processes = sorted(list(set(still_running_ids).difference(set(already_running_ids))))
  if new_processes:
    # uh-oh! at least one new started up in the interim? exit!
    Log('Kill signal sent to %s from this process %s, but it seems like there is '
        'at least one new process running, %s!' % (
            str(already_running_ids), str(os.getpid()), str(new_processes)))
    sys.exit()
  # phew - they're a subset; so they probably got the signal; just wait a few secs

  elif still_running_ids:
    n = 0
    running_parents = FindRunningParents()
    while running_parents:
      if n == max_seconds:
        Log('Kill signal sent from this process %d to %s, but %s still '
            'running after waiting cume %d seconds; rebooting' % (
                os.getpid(), str(already_running_ids), str(running_parents), n+1))


        PerformGracefulShutdown((), (), True)
      if not n % 3:
        Log('Kill signal sent from this process %d to %s, but %s still '
            'running after waiting cume %d seconds' % (
                os.getpid(), str(already_running_ids), str(running_parents), n))
      n += 1
      time.sleep(1)
      running_parents = FindRunningParents()


def InitArduinoVariables():
  """Initializes and starts the two arduino threads with new shared-memory queues."""
  to_remote_q = multiprocessing.Queue()
  to_servo_q = multiprocessing.Queue()
  to_main_q = multiprocessing.Queue()
  shutdown_remote = multiprocessing.Value('i')  # shared flag to initiate shutdown
  shutdown_servo = multiprocessing.Value('i')  # shared flag to initiate shutdown

  shutdown = (shutdown_remote, shutdown_servo)

  return (to_remote_q, to_servo_q, to_main_q, shutdown)


def RefreshArduinos(
    remote, servo,
    to_remote_q, to_servo_q, to_main_q, shutdown,
    flights, json_desc_dict, configuration, screen_history):
  """Ensure arduinos are running, restarting if needed, & send them the current message.

  Args:
    remote: Running remote Arduino process (or if not previously running, None value)

    servo: Running servo Arduino process (or if not previously running, None value)

    to_remote_q: Multi-processing messaging queue for one-way comm from messageboard
      to remote arduino.
    to_servo_q: Multi-processing messaging queue for one-way comm from messageboard
      to servo arduino.
    to_main_q: Multi-processing messaging queue for one-way comm from arduinos to
      messageboard.
    shutdown: 2-tuple of multiprocessing flags (integers) used to signal to respective
      arduinos when they should be shutdown.
    flights: List of all flights.
    json_desc_dict: Dictionary of additional attributes about radio.
    configuration: Dictionary of configuration settings.
    screen_history: List of past screens displayed to splitflap screen.

  Returns:
    A 2-tuple of the remote and servo running processes.
  """
  remote, servo = ValidateArduinosRunning(
      remote, servo, to_remote_q, to_servo_q, to_main_q, shutdown, configuration)


  EnqueueArduinos(
      flights, json_desc_dict, configuration, to_servo_q, to_remote_q, screen_history)

  return remote, servo


def ValidateArduinosRunning(
    remote, servo, to_remote_q, to_servo_q, to_main_q, shutdown, configuration):
  """Ensures that each of the enabled arduinos are running, restarting if needed.

  Args:
    remote: Running remote Arduino process (or if not previously running, None value)

    servo: Running servo Arduino process (or if not previously running, None value)

    to_remote_q: Multi-processing messaging queue for one-way comm from messageboard
      to remote arduino.
    to_servo_q: Multi-processing messaging queue for one-way comm from messageboard
      to servo arduino.
    to_main_q: Multi-processing messaging queue for one-way comm from arduinos to
      messageboard.
    shutdown: 2-tuple of multiprocessing flags (integers) used to signal to respective
      arduinos when they should be shutdown.
    configuration: Dictionary of configuration settings.

  Returns:
    A 2-tuple of the remote and servo running processes.
  """
  remote = ValidateSingleRunning(
      'enable_remote' in configuration,
      arduino.RemoteMain, p=remote,
      args=(to_remote_q, to_main_q, shutdown[0]))
  servo = ValidateSingleRunning(
      'enable_servos' in configuration,
      arduino.ServoMain, p=servo,
      args=(to_servo_q, to_main_q, shutdown[1]))
  return remote, servo


def ValidateSingleRunning(enabled, start_function, p=None, args=()):
  """Restarts a new instance of multiprocessing process if not running

  Args:
    enabled: Boolean indicating whether this arduino is enabled in the settings file.

    start_function: Function that will be started (with the given args) if a restart
      is needed.
    p: The existing process - if any - that should be checked to make sure its alive.
      If not passed, or if passed but not alive, it is restarted.
    args: A tuple of function parameters to pass unmodified to start_function.

  Returns:
    The running process - either the same one that was passed in, or a new one if a
    restart was needed.
  """
  if not SHUTDOWN_SIGNAL:

    if not enabled:
      if p is not None:  # must have just requested a disabling of single instance

        args[2].value = 1  # trigger a shutdown on the single instance
      return None

    if p is None or not p.is_alive():
      if p is None:
        Log('Process for %s starting for first time' % str(start_function))
      elif VERBOSE:
        Log('Process (%s) for %s died; restarting' % (str(p), str(start_function)))

      args[2].value = 0  # (re)set shutdown flag to allow function to run
      p = multiprocessing.Process(target=start_function, args=args)
      p.daemon = True  # TODO: perhaps value of false will address correlated BT failures?
      p.start()

  return p


def LastFlightAvailable(flights, screen_history):
  """Returns True if last message sent to splitflap is not the last flight; else False."""
  if not screen_history:
    return False

  last_message_tuple = screen_history[-1]
  last_message_type = last_message_tuple[0]
  if last_message_type == FLAG_MSG_FLIGHT:
    last_message_flight = last_message_tuple[2]
    if SameFlight(last_message_flight, flights[-1]):
      return False  # already displaying the last flight!
  return True


def EnqueueArduinos(
    flights, json_desc_dict, configuration, to_servo_q, to_remote_q, screen_history):

  """Send latest data to arduinos via their shared-memory queues.

  Args:
    flights: List of all flights.
    json_desc_dict: Dictionary of additional attributes about radio.
    configuration: Dictionary of configuration settings.
    to_servo_q: Multi-processing messaging queue for one-way comm from messageboard
      to servo arduino.
    to_remote_q: Multi-processing messaging queue for one-way comm from messageboard
      to remote arduino.


    screen_history: List of past screens displayed to splitflap screen.
  """
  last_flight = {}
  if flights:
    last_flight = flights[-1]

  if SIMULATION:
    now = json_desc_dict['now']
  else:
    now = time.time()

  additional_attributes = {}

  today = EpochDisplayTime(now, '%x')

  flight_count_today = len([1 for f in flights if DisplayTime(f, '%x') == today])
  additional_attributes['flight_count_today'] = flight_count_today

  additional_attributes['simulation'] = SIMULATION

  additional_attributes['last_flight_available'] = LastFlightAvailable(
      flights, screen_history)

  message = (last_flight, json_desc_dict, configuration, additional_attributes)
  try:


    if 'enable_servos' in configuration:
      to_servo_q.put(message, block=False)
    if 'enable_remote' in configuration:
      to_remote_q.put(message, block=False)
  except queue.Full:
    msg = 'Message queues to Arduinos full - trigger shutdown'
    Log(msg)
    global SHUTDOWN_SIGNAL
    SHUTDOWN_SIGNAL = msg



def ProcessArduinoCommmands(q, flights, configuration, message_queue, next_message_time):
  """Executes the commands enqueued by the arduinos.

  The commands on the queue q are of the form (command, args), where command is an
  identifier indicating the type of instruction, and the args is a possibly empty tuple
  with the attributes to follow thru.

  Possible commands are updating a GPIO pin, replaying a recent flight to the board,
  generating a histogram, or updating the saved settings.

  Args:
    q: multiprocessing queue provided to both the Arduino processes
    flights: list of flights
    configuration: dictionary of settings
    message_queue: current message queue
    next_message_time: epoch of the next message to display to screen

  Returns:
    A 2-tuple of the (possibly-updated) message_queue and next_message_time.
  """
  while not q.empty():
    command, args = q.get()

    if command == 'pin':
      UpdateStatusLight(*args)

    elif command == 'replay':
      # a command might request info about flight to be (re)displayed, irrespective of
      # whether the screen is on; if so, let's put that message at the front of the message

      # queue, and delete any subsequent insight messages in queue
      replayed_flight_index = IdentifyFlightDisplayed(
          flights, configuration, display_all_hours=True)
      if replayed_flight_index is not None:
        message_queue = DeleteMessageTypes(message_queue, (FLAG_MSG_INSIGHT, ))
        replayed_flight = flights[replayed_flight_index]
        flight_message = CreateMessageAboutFlight(replayed_flight)

        message_queue.insert(0, (FLAG_MSG_FLIGHT, flight_message, replayed_flight))
        next_message_time = time.time()

    elif command == 'histogram':
      if not flights:
        Log('Histogram requested by remote %s but no flights in memory' % str(args))

      else:
        histogram_type, histogram_history = args
        message_queue.extend(MessageboardHistograms(
            flights,
            histogram_type,
            histogram_history,
            '_1',
            False))

    elif command == 'update_configuration':
      updated_settings = args[0]
      Log('Updated settings received from arduino: %s' % updated_settings)
      WriteFile(CONFIG_FILE, updated_settings)

    else:
      Log('Improper command from arduinos: %s / %s' % (command, args))

  return message_queue, next_message_time


def PublishMessage(
    s,
    subscription_id='12fd73cd-75ef-4cae-bbbf-29b2678692c1',
    key='c5f62d44-e30d-4c43-a43e-d4f65f4eb399',
    secret='b00aeb24-72f3-467c-aad2-82ba5e5266ca',
    timeout=3):
  """Publishes a text string to a Vestaboard.

  The message is pushed to the vestaboard splitflap display by way of its web services;
  see https://docs.vestaboard.com/introduction for more details.

  Args:
    s: String to publish.
    subscription_id: string subscription id from Vestaboard.
    key: string key from Vestaboard.
    secret: string secret from Vestaboard.
    timeout: Max duration in seconds that we should wait to establish a connection.

  """
  error_code = False
  # See https://docs.vestaboard.com/characters: any chars needing to be replaced
  special_characters = ((u'\u00b0', '{62}'),)  # degree symbol '°'

  for special_character in special_characters:
    s = s.replace(*(special_character))
  curl = pycurl.Curl()

  # See https://stackoverflow.com/questions/31826814/curl-post-request-into-pycurl-code

  # Set URL value
  curl.setopt(
      pycurl.URL,
      'https://platform.vestaboard.com/subscriptions/%s/message' % subscription_id)

  curl.setopt(pycurl.HTTPHEADER, [
      'X-Vestaboard-Api-Key:%s' % key, 'X-Vestaboard-Api-Secret:%s' % secret])
  curl.setopt(pycurl.TIMEOUT_MS, timeout*1000)
  curl.setopt(pycurl.POST, 1)

  curl.setopt(pycurl.WRITEFUNCTION, lambda x: None) # to keep stdout clean

  # preparing body the way pycurl.READDATA wants it
  body_as_dict = {'text': s}
  body_as_json_string = json.dumps(body_as_dict) # dict to json
  body_as_file_object = io.StringIO(body_as_json_string)

  # prepare and send. See also: pycurl.READFUNCTION to pass function instead
  curl.setopt(pycurl.READDATA, body_as_file_object)
  curl.setopt(pycurl.POSTFIELDSIZE, len(body_as_json_string))
  failure_message = ''
  try:
    curl.perform()
  except pycurl.error as e:
    failure_message = 'curl.perform() failed with message %s' % e
    Log('curl.perform() failed with message %s' % e)
    error_code = True
  else:
    # you may want to check HTTP response code, e.g.
    status_code = curl.getinfo(pycurl.RESPONSE_CODE)
    if status_code != 200:
      Log('Server returned HTTP status code %d for message %s' % (status_code, s))

      error_code = True

  curl.close()

  UpdateStatusLight(GPIO_ERROR_VESTABOARD_CONNECTION, error_code, failure_message)


def TruncateEscapedLine(s):
  """Formats a single line of the personal message for the Vestaboard.

  The Vestaboard has line length limitations, a limited character set, and escape
  characters. This function:
  - replaces some unsupported characters with very similar supported characters
  - truncates the line after the max line length, allowing for escape characters
  - truncates the line after an unsupported character that does not have a replacement


  Args:
    s: input string

  Returns:
    Reformatted potentially-truncated line.
  """
  s = s.upper()
  character_mapping = {
      '[': '(',
      '<': '(',
      ']': ')',
      '>': ')',
      '|': '/',
      '\\': '/'}
  for c in character_mapping:
    s = s.replace(c, character_mapping[c])

  l = 0
  valid_characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$()-+&=;:''"%,./?'




                            <----SKIPPED LINES---->




        pointer = len(s)
      else:
        try:
          escape_value = int(s[pointer+1:end])
        except ValueError:
          escape_value = None
        if escape_value in valid_escape_values:
          validated_s += s[pointer:end+1]
          l += 1
        pointer = end
    else:
      pointer += 1

  return validated_s


def PersonalMessage(configuration, message_queue):
  """Formats and displays the personal message.

  A user-defined message can be displayed to the board whenever there isn't a
  flight message during user-specified hours of the day. This clears the board, if
  requested, and then adds that message to the queue.

  Args:
    configuration: the settings dictionary.
    message_queue: the existing queue, to which the personal message - if any - is added.

  """
  if 'clear_board' in configuration:
    RemoveSetting(configuration, 'clear_board')
    message_queue.append((FLAG_MSG_CLEAR, ''))
  minute_of_day = MinuteOfDay()
  if (
      not message_queue and
      'personal_message_enabled' in configuration and
      configuration['personal_message'] and
      minute_of_day <= configuration['personal_off_time'] and
      minute_of_day > configuration['personal_on_time'] + 1):
    message = configuration['personal_message']
    lines = [TruncateEscapedLine(l) for l in message.split('\n')[:SPLITFLAP_LINE_COUNT]]

    message_queue.append((FLAG_MSG_PERSONAL, lines))
    Log('Personal message added to queue: %s' % str(lines))



def ManageMessageQueue(message_queue, next_message_time, configuration, screens):
  """Check time & if appropriate, display next message from queue.

  Args:
    message_queue: FIFO list of message tuples of (message type, message string).

    next_message_time: epoch at which next message should be displayed
    configuration: dictionary of configuration attributes.
    screens: List of past screens displayed to splitflap screen.

  Returns:
    Next_message_time, potentially updated if a message has been displayed, or unchanged
    if no message was displayed.
  """
  if message_queue and (time.time() >= next_message_time or SIMULATION):

    if SIMULATION:  # drain the queue because the messages come so fast
      messages_to_display = list(message_queue)
      # passed by reference, so clear it out since we drained it to the display
      del message_queue[:]
    else:  # display only one message, being mindful of the display timing
      messages_to_display = [message_queue.pop(0)]

    for message in messages_to_display:
      message_text = message[1]
      if isinstance(message_text, str):
        message_text = textwrap.wrap(message_text, width=SPLITFLAP_CHARS_PER_LINE)


      display_message = Screenify(message_text, False)
      Log(display_message, file=ALL_MESSAGE_FILE)

      # Saving this to disk allows us to identify
      # persistently whats currently on the screen
      PickleObjectToFile(message, PICKLE_SCREENS, True)
      screens.append(message)

      MaintainRollingWebLog(display_message, 25)
      if not SIMULATION:
        splitflap_message = Screenify(message_text, True)
        PublishMessage(splitflap_message)

    next_message_time = time.time() + configuration['setting_delay']
  return next_message_time


def DeleteMessageTypes(q, types_to_delete):
  """Delete messages from the queue if type is in the iterable types."""
  if VERBOSE:
    messages_to_delete = [m for m in q if m[0] in types_to_delete]
    if messages_to_delete:
      Log('Deleting messages from queue due to new-found plane: %s' % messages_to_delete)

  updated_q = [m for m in q if m[0] not in types_to_delete]
  return updated_q


def BootstrapInsightList(full_path=PICKLE_FLIGHTS):
  """(Re)populate flight pickle files with flight insight distributions.

  The set of insights generated for each flight is created at the time the flight was
  first identified, and saved on the flight pickle. This saving allows the current
  running distribution to be recalculated very quickly, but it means that as code
  enabling new insights gets added, those historical distributions may not necessarily
  be considered correct.

  They are "correct" in the sense that that new insight was not available at the time
  that older flight was seen, but it is not correct in the sense that, because this new
  insight is starting out with an incidence in the historical data of zero, this
  new insight may be reported more frequently than desired until it "catches up".


  So this method replays the flight history with the latest insight code, regenerating
  the insight distribution for each flight.
  """
  directory, file = os.path.split(full_path)
  all_files = os.listdir(directory)
  files = sorted([os.path.join(directory, f) for f in all_files if file in f])
  for f in files:

    print('Bootstrapping %s' % f)
    configuration = ReadAndParseSettings(CONFIG_FILE)
    flights = []
    tmp_f = f + 'tmp'

    RemoveFile(tmp_f)

    if os.path.exists(f):
      mtime = os.path.getmtime(f)
      flights = UnpickleObjectFromFile(f, False)
      for (n, flight) in enumerate(flights):
        if n/25 == int(n/25):
          print(' - %d' % n)

        CreateFlightInsights(flights[:n+1], configuration.get('insights', 'hide'), {})
        PickleObjectToFile(flight, tmp_f, False)

      if mtime == os.path.getmtime(f):
        shutil.move(tmp_f, f)
      else:
        print('Aborted: failed to bootstrap %s: file changed while in process' % full_path)

        return


def ResetLogs(config):
  """Clears the non-scrolling logs if reset_logs in config."""
  if 'reset_logs' in config:
    Log('Reset logs')
    for f in (STDERR_FILE, BACKUP_FILE, SERVICE_VERIFICATION_FILE):
      if RemoveFile(f):
        open(f, 'a').close()
    config.pop('reset_logs')
    config = BuildSettings(config)
    WriteFile(CONFIG_FILE, config)
  return config


def CheckTemperature():
  """Turn on fan if temperature exceeds threshold."""
  if RASPBERRY_PI:
    temperature = gpiozero.CPUTemperature().temperature




                            <----SKIPPED LINES---->





  if RASPBERRY_PI:
    RPi.GPIO.setmode(RPi.GPIO.BCM)

  pins = (
      GPIO_ERROR_VESTABOARD_CONNECTION, GPIO_ERROR_FLIGHT_AWARE_CONNECTION,
      GPIO_ERROR_ARDUINO_SERVO_CONNECTION, GPIO_ERROR_ARDUINO_REMOTE_CONNECTION,
      GPIO_ERROR_BATTERY_CHARGE, GPIO_FAN, GPIO_UNUSED_1, GPIO_UNUSED_2)

  for pin in pins:
    initial_state = pin[5]
    pin_values[pin[0]] = initial_state  # Initialize state of pins
    UpdateDashboard(initial_state, pin)

    if RASPBERRY_PI:
      RPi.GPIO.setup(pin[0], RPi.GPIO.OUT)
      RPi.GPIO.output(pin[0], pin_values[pin[0]])
    UpdateDashboard(pin_values[pin[0]], pin)

  if RASPBERRY_PI:  # configure soft reset button

    RPi.GPIO.setup(GPIO_SOFT_RESET[0], RPi.GPIO.IN, pull_up_down=RPi.GPIO.PUD_DOWN)
    RPi.GPIO.setup(GPIO_SOFT_RESET[1], RPi.GPIO.OUT)
    RPi.GPIO.output(GPIO_SOFT_RESET[1], True)
    RPi.GPIO.add_event_detect(GPIO_SOFT_RESET[0], RPi.GPIO.RISING)
    RPi.GPIO.add_event_callback(GPIO_SOFT_RESET[0], InterruptRebootFromButton)


def UpdateStatusLight(pin, value, failure_message=''):
  """Sets the Raspberry Pi GPIO pin high (True) or low (False) based on value."""
  global pin_values

  if value:
    msg = pin[1]
  else:
    msg = pin[2]
  if RASPBERRY_PI:
    RPi.GPIO.output(pin[0], value)
    if value:
      pin_setting = 'HIGH'
      relay_light_value = 'OFF'
    else:
      pin_setting = 'LOW'
      relay_light_value = 'ON'
    msg += '; RPi GPIO pin %d set to %s; relay light #%d should now be %s' % (
        pin[0], pin_setting, pin[3], relay_light_value)

  if pin_values[pin[0]] != value:
    if VERBOSE:
      Log(msg)  # log
    pin_values[pin[0]] = value  # update cache
    UpdateDashboard(value, subsystem=pin, failure_message=failure_message)


def UpdateDashboard(value, subsystem=0, failure_message=''):
  """Writes to disk a tuple with status details about a particular system.

  The independent monitoring.py module allows us to see in one place the status of all
  the subsystems and of the overall system; it does that monitoring based on these
  tuples of data.

  Args:
    value: Boolean indicating whether a failure has occurred (True) or system is nominal
      (False).
    subsystem: A tuple describing the system; though that description may have multiple
      attributes, the 0th element is the numeric identifier of that system.  monitoring.py
      depends on other attributes of that tuple being present as well.  Since the

      overall system does not have a tuple defined for it, it gets a default identifier
      of 0.
    failure_message: an (optional) message describing why the system / subsystem is
      being disabled or failing.
  """
  versions = (VERSION_MESSAGEBOARD, VERSION_ARDUINO)
  if subsystem:
    subsystem = subsystem[0]
  PickleObjectToFile(
      (time.time(), subsystem, value, versions, failure_message),
      PICKLE_DASHBOARD, True)


def RemoveFile(file):
  """Removes a file if it exists, returning a boolean indicating if it had existed."""
  if os.path.exists(file):

    try:
      os.remove(file)
    except PermissionError:
      return False
    return True

  return False


def ConfirmNewFlight(flight, flights):
  """Replaces last-seen flight with new flight if otherwise identical but for identifiers.

  Flights are identified by the radio over time by a tuple of identifiers: flight_number
  and squawk.  Due to unknown communication issues, one or the other may not always
  be transmitted. However, as soon as a new flight is identified that has at least one
  of those identifiers, we report on it and log it to the pickle repository, etc.


  This function checks if the newly identified flight is indeed a duplicate of the
  immediate prior flight by virtue of having the same squawk and/or flight number, and
  further, if the paths overlap.  If the paths do not overlap, then its likely that
  the same flight was seen some minutes apart, and should legitimately be treated as
  a different flight.

  If the new flight is an updated version, then we should replace the prior-pickled-to-
  disk flight and replace the last flight in flights with this new version.


  Args:
    flight: new flight to check if identical to previous flight
    flights: list of all flights seen so far

  Returns:
    Boolean indicating whether flight is a new (True) or an updated version (False).

  """
  # boundary conditions
  if not flight or not flights:
    return flight

  last_flight = flights[-1]

  # flight_number and squawk are new
  if (
      flight.get('flight_number') != last_flight.get('flight_number')
      and flight.get('squawk') != last_flight.get('squawk')):
    return True

  # its a returning flight... but perhaps some time later as its hovering in the area
  last_flight_last_seen = last_flight.get('persistent_path', [last_flight])[-1]['now']


  if flight['now'] - last_flight_last_seen > PERSISTENCE_SECONDS:
    return True

  # it's not a new flight, so:
  # 1) replace the last flight in flights
  message = (
      'Flight (%s; %s) is overwriting the prior '
      'recorded flight (%s; %s) due to updated identifiers' % (
          flight.get('flight_number'), flight.get('squawk'),
          last_flight.get('flight_number'), last_flight.get('squawk')))
  flights[-1] = flight

  # 2) replace the last pickled record
  #
  # There is potential complication in that the last flight and the new flight
  # crossed into a new day, and we are using date segmentation so that the last
  # flight exists in yesterday's file
  max_days = 1
  if not SIMULATION and DisplayTime(flight, '%x') != DisplayTime(last_flight, '%x'):

    max_days = 2
    message += (
        '; in repickling, we crossed days, so pickled flights that might otherwise'
        ' be in %s file are now all located in %s file' % (
            DisplayTime(last_flight, '%x'), DisplayTime(flight, '%x')))

  Log(message)

  args = (PICKLE_FLIGHTS, not SIMULATION, max_days)
  saved_flights = UnpickleObjectFromFile(*args)[:-1]
  files_to_overwrite = UnpickleObjectFromFile(*args, filenames=True)

  for file in files_to_overwrite:
    os.remove(file)
  for f in saved_flights:
    # we would like to use verify=True, but that's too slow without further optimizing the
    # verification step for a loop of data
    PickleObjectToFile(
        f, PICKLE_FLIGHTS, True, timestamp=f['now'], verify=False)

  return False


def HeartbeatRestart():
  """Logs a system down / system up pair of heartbeats as system is first starting."""
  if SIMULATION:
    return 0
  UpdateDashboard(True)  # Indicates that this wasn't running a moment before, ...
  UpdateDashboard(False)  # ... and now it is running!
  return time.time()


def Heartbeat(last_heartbeat_time=None):
  """Logs a system up heartbeat."""
  if SIMULATION:
    return last_heartbeat_time
  now = time.time()
  if not last_heartbeat_time or now - last_heartbeat_time > HEARTBEAT_SECONDS:
    UpdateDashboard(False)  # Send an all-clear message
    last_heartbeat_time = now
  return last_heartbeat_time


def VersionControl():
  """Copies the current instances of messageboard.py and arduino.py into a repository.

  To aid debugging, we want to keep past versions of the code easily accessible, and
  linked to the errors that have been logged. This function copies the python code
  into a version control directory after adding in a date / time stamp to the file name.

  """
  def MakeCopy(python_prefix):
    file_extension = '.py'

    live_name = python_prefix + '.py'
    live_path = os.path.join(CODE_REPOSITORY, live_name)

    epoch = os.path.getmtime(live_path)
    last_modified_suffix = EpochDisplayTime(epoch, format_string='-%Y-%m-%d-%H%M')

    version_name = python_prefix + last_modified_suffix + file_extension
    version_path = os.path.join(VERSION_REPOSITORY, version_name)

    if not os.path.exists(version_path):
      shutil.copyfile(live_path, version_path)
    return version_name

  global VERSION_MESSAGEBOARD
  global VERSION_ARDUINO
  VERSION_MESSAGEBOARD = MakeCopy('messageboard')
  VERSION_ARDUINO = MakeCopy('arduino')


def main():
  """Traffic cop between incoming radio flight messages, configuration, and messageboard.

  This is the main logic, checking for new flights, augmenting the radio signal with
  additional web-scraped data, and generating messages in a form presentable to the
  messageboard.
  """
  VersionControl()

  # Since this clears log files, it should occur first before we start logging
  if '-s' in sys.argv:
    global SIMULATION_COUNTER
    SimulationSetup()

  last_heartbeat_time = HeartbeatRestart()
  init_timing = [(time.time(), 0)]

  # This flag slows down simulation time around a flight, great for debugging the arduinos

  simulation_slowdown = bool('-f' in sys.argv)

  # Redirect any errors to a log file instead of the screen, and add a datestamp
  if not SIMULATION:
    sys.stderr = open(STDERR_FILE, 'a')
    Log('', STDERR_FILE)

  init_timing.append((time.time(), 1))
  Log('Starting up process %d' % os.getpid())
  already_running_ids = FindRunningParents()
  if already_running_ids:
    for pid in already_running_ids:
      Log('Sending termination signal to %d' % pid)
      os.kill(pid, signal.SIGTERM)
  init_timing.append((time.time(), 2))

  SetPinMode()

  configuration = ReadAndParseSettings(CONFIG_FILE)
  Log('Read CONFIG_FILE at %s: %s' % (CONFIG_FILE, str(configuration)))

  startup_time = time.time()
  json_desc_dict = {}

  init_timing.append((time.time(), 3))

  flights = UnpickleObjectFromFile(PICKLE_FLIGHTS, True, max_days=MAX_INSIGHT_HORIZON_DAYS)
  # Clear the loaded flight of any cached data, identified by keys with a specific
  # suffix, since code fixes may change the values for some of those cached elements

  for flight in flights:
    for key in list(flight.keys()):
      if key.endswith(CACHED_ELEMENT_PREFIX):
        flight.pop(key)
  init_timing.append((time.time(), 4))

  screen_history = UnpickleObjectFromFile(PICKLE_SCREENS, True, max_days=2)

  # If we're displaying just a single insight message, we want it to be something
  # unique, to the extent possible; this dict holds a count of the diff types of messages
  # displayed so far
  insight_message_distribution = {}

  # bootstrap the flight insights distribution from a list of insights on each
  # flight (i.e.: flight['insight_types'] for a given flight might look like
  # [1, 2, 7, 9], or [], to indicate which insights were identified; this then
  # transforms that into {0: 25, 1: 18, ...} summing across all flights.
  missing_insights = []
  for flight in flights:
    if 'insight_types' not in flight:
      missing_insights.append(
          '%s on %s' % (DisplayFlightNumber(flight), DisplayTime(flight, '%x %X')))
    distribution = flight.get('insight_types', [])
    for key in distribution:
      insight_message_distribution[key] = (
          insight_message_distribution.get(key, 0) + 1)
  if missing_insights:
    Log('Flights missing insight distributions: %s' % ';'.join(missing_insights))

  init_timing.append((time.time(), 5))

  # initialize objects required for arduinos, but we can only start them in the main
  # loop, because the tail end of the init section needs to confirm that all other
  # messageboard.py processes have exited!
  to_remote_q, to_servo_q, to_main_q, shutdown = InitArduinoVariables()
  remote, servo = None, None

  # used in simulation to print the hour of simulation once per simulated hour
  prev_simulated_hour = ''

  persistent_nearby_aircraft = {} # key = flight number; value = last seen epoch
  persistent_path = {}
  histogram = {}

  # Next up to print is index 0; this is a list of tuples:
  # tuple element#1: flag indicating the type of message that this is
  # tuple element#2: the message itself
  message_queue = []
  next_message_time = time.time()

  # We repeat the loop every x seconds; this ensures that if the processing time is long,
  # we don't wait another x seconds after processing completes
  next_loop_time = time.time() + LOOP_DELAY_SECONDS

  # These files are read only if the version on disk has been modified more recently
  # than the last time it was read
  last_dump_json_timestamp = 0

  init_timing.append((time.time(), 6))
  WaitUntilKillComplete(already_running_ids)
  init_timing.append((time.time(), 7))

  LogTimes(init_timing)

  Log('Finishing initialization of %d; starting radio polling loop' % os.getpid())

  while (not SIMULATION or SIMULATION_COUNTER < len(DUMP_JSONS)) and not SHUTDOWN_SIGNAL:


    last_heartbeat_time = Heartbeat(last_heartbeat_time)

    new_configuration = ReadAndParseSettings(CONFIG_FILE)
    UpdateRollingLogSize(new_configuration)

    CheckForNewFilterCriteria(configuration, new_configuration, message_queue, flights)
    configuration = new_configuration

    ResetLogs(configuration)  # clear the logs if requested
    UpdateRollingLogSize(configuration)

    # if this is a SIMULATION, then process every diff dump. But if it isn't a simulation,
    # then only read & do related processing for the next dump if the last-modified
    # timestamp indicates the file has been updated since it was last read.

    tmp_timestamp = 0
    if not SIMULATION:
      dump_json_exists = os.path.exists(DUMP_JSON_FILE)
      if dump_json_exists:
        tmp_timestamp = os.path.getmtime(DUMP_JSON_FILE)
    if (SIMULATION and DumpJsonChanges()) or (
        not SIMULATION and dump_json_exists and tmp_timestamp > last_dump_json_timestamp):


      last_dump_json_timestamp = tmp_timestamp

      (persistent_nearby_aircraft,
       flight, now,
       json_desc_dict,
       persistent_path) = ScanForNewFlights(
           persistent_nearby_aircraft,
           persistent_path,
           configuration.get('log_jsons', False))

      # because this might just be an updated instance of the previous flight as more
      # identifier information (squawk and or flight number) comes in, we only want to
      # process this if its a truly new flight
      new_flight_flag = ConfirmNewFlight(flight, flights)

      if new_flight_flag:
        flights.append(flight)
        remote, servo = RefreshArduinos(
            remote, servo,
            to_remote_q, to_servo_q, to_main_q, shutdown,
            flights, json_desc_dict, configuration, screen_history)

        if FlightMeetsDisplayCriteria(flight, configuration, log=True):

          flight_message = (FLAG_MSG_FLIGHT, CreateMessageAboutFlight(flight), flight)

          # display the next message about this flight now!
          next_message_time = time.time()
          message_queue.insert(0, flight_message)
          # and delete any queued insight messages about other flights that have
          # not yet displayed, since a newer flight has taken precedence
          message_queue = DeleteMessageTypes(message_queue, (FLAG_MSG_INSIGHT,))

          # Though we also manage the message queue outside this conditional as well,
          # because it can take a half second to generate the flight insights, this allows
          # this message to start displaying on the board immediately, so it's up there
          # when it's most relevant
          next_message_time = ManageMessageQueue(
              message_queue, next_message_time, configuration, screen_history)

          insight_messages = CreateFlightInsights(
              flights, configuration.get('insights'), insight_message_distribution)


          if configuration.get('next_flight', 'off') == 'on':
            next_flight_text = FlightInsightNextFlight(flights, configuration)
            if next_flight_text:
              insight_messages.insert(0, next_flight_text)

          insight_messages = [(FLAG_MSG_INSIGHT, m) for m in insight_messages]

          for insight_message in insight_messages:
            message_queue.insert(0, insight_message)

        else:  # flight didn't meet display criteria
          flight['insight_types'] = []


        PickleObjectToFile(flight, PICKLE_FLIGHTS, True, timestamp=flight['now'])

      else:
        remote, servo = RefreshArduinos(
            remote, servo,
            to_remote_q, to_servo_q, to_main_q, shutdown,
            flights, json_desc_dict, configuration, screen_history)

    message_queue, next_message_time = ProcessArduinoCommmands(
        to_main_q, flights, configuration, message_queue, next_message_time)

    PersonalMessage(configuration, message_queue)

    if SIMULATION:
      if now:
        simulated_hour = EpochDisplayTime(now, '%Y-%m-%d %H:00%z')
      if simulated_hour != prev_simulated_hour:
        print(simulated_hour)
        prev_simulated_hour = simulated_hour

    histogram = ReadAndParseSettings(HISTOGRAM_CONFIG_FILE)
    RemoveFile(HISTOGRAM_CONFIG_FILE)

    # We also need to make sure there are flights on which to generate a histogram! Why
    # might there not be any flights? Primarily during a simulation, if there's a
    # lingering histogram file at the time of history restart.

    if histogram and not flights:
      Log('Histogram requested (%s) but no flights in memory' % histogram)
    if histogram and flights:
      message_queue.extend(TriggerHistograms(flights, histogram))

    # check time & if appropriate, display next message from queue
    next_message_time = ManageMessageQueue(
        message_queue, next_message_time, configuration, screen_history)


    reboot = CheckRebootNeeded(startup_time, message_queue, json_desc_dict, configuration)

    CheckTemperature()

    if not SIMULATION:
      time.sleep(max(0, next_loop_time - time.time()))
      next_loop_time = time.time() + LOOP_DELAY_SECONDS
    else:
      SIMULATION_COUNTER += 1
      if simulation_slowdown:
        SimulationSlowdownNearFlight(flights, persistent_nearby_aircraft)

  if SIMULATION:
    SimulationEnd(message_queue, flights, screen_history)

  PerformGracefulShutdown((to_remote_q, to_servo_q, to_main_q), shutdown, reboot)


if __name__ == "__main__":
  #interrupt, as in ctrl-c
  signal.signal(signal.SIGINT, InterruptShutdownFromSignal)

  #terminate, when another instance found or via kill
  signal.signal(signal.SIGTERM, InterruptShutdownFromSignal)

  if '-i' in sys.argv:
    BootstrapInsightList()
  else:
    main_settings = ReadAndParseSettings(CONFIG_FILE)
    if 'code_profiling_enabled' in main_settings:
      import cProfile
      cProfile.run(
          'main()', 'messageboard_stats-%s.profile' %
          EpochDisplayTime(time.time(), '%Y-%m-%d-%H%M'))
    else:
      main()

01234567890123456789012345678901234567890123456789012345678901234567890123456789









4445464748495051525354555657585960616263646566676869707172737475767778798081 828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227








255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328








372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449








460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624








650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935








9799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234   12351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809








18371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877








193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020








2033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105








21592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367








2376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907 29082909291029112912291329142915291629172918291929202921  29222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076








30793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122 312331243125312631273128312931303131313231333134  3135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192








320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484








348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521  3522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558








35873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720








372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429 443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489








449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745








4782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679  568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712  571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883








589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064








607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143 6144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576



                            <----SKIPPED LINES---->




REBOOT_SIGNAL = False

SIMULATION = False
SIMULATION_COUNTER = 0
SIMULATION_PREFIX = 'SIM_'
PICKLE_DUMP_JSON_FILE = 'pickle/dump_json.pk'
PICKLE_FA_JSON_FILE = 'pickle/fa_json.pk'
DUMP_JSONS = None  # loaded only if in simulation mode
FA_JSONS = None  # loaded only if in simulation mode

HOME_LAT = 37.64406
HOME_LON = -122.43463
HOME = (HOME_LAT, HOME_LON) # lat / lon tuple of antenna
HOME_ALT = 29  #altitude in meters
RADIUS = 6371.0e3  # radius of earth in meters

FEET_IN_METER = 3.28084
FEET_IN_MILE = 5280
METERS_PER_SECOND_IN_KNOTS = 0.514444

# only planes within this distance will be detailed
MIN_METERS = 5000/FEET_IN_METER
# planes not seen within MIN_METERS in PERSISTENCE_SECONDS seconds will be
# dropped from the nearby list
PERSISTENCE_SECONDS = 300
TRUNCATE = 50  # max number of keys to include in a histogram image file
# number of seconds to pause between each radio poll / command processing loop
LOOP_DELAY_SECONDS = 1

# number of seconds to wait between recording heartbeats to the status file
HEARTBEAT_SECONDS = 10

# version control directory
CODE_REPOSITORY = ''
VERSION_REPOSITORY = 'versions/'
VERSION_WEBSITE_PATH = VERSION_REPOSITORY
VERSION_MESSAGEBOARD = None
VERSION_ARDUINO = None

# histogram logic truncates to exactly 30 days of hours
MAX_INSIGHT_HORIZON_DAYS = 31

# This file is where the radio drops its json file
DUMP_JSON_FILE = '/run/readsb/aircraft.json'

# At the time a flight is first identified as being of interest (in that
# it falls within MIN_METERS meters of HOME), it - and core attributes
# derived from FlightAware, if any - is appended to the end of this pickle
# file. However, since this file is cached in working memory, flights older
# than 30 days are flushed from this periodically.
PICKLE_FLIGHTS = 'pickle/flights.pk'

# This allows us to identify the full history (including what was last sent
# to the splitflap display in a programmatic fashion. While it may be
# interesting in its own right, its real use is to handle the "replay"
# button, so we know to enable it if what is displayed is the last flight.
PICKLE_SCREENS = 'pickle/screens.pk'

# Status data about messageboard - is it running, etc.  Specifically, has tuples
# of data (timestamp, system_id, status), where system_id is either the pin id
# of GPIO, or a 0 to indicate overall system, and status is boolean
PICKLE_DASHBOARD = 'pickle/dashboard.pk'

CACHED_ELEMENT_PREFIX = 'cached_'

# This web-exposed file is used for non-error messages that might highlight
# data or code logic to check into. It is only cleared out manually.
LOGFILE = 'log.txt'
# Identical to the LOGFILE, except it includes just the most recent n lines.
# Newest lines are at the end.
ROLLING_LOGFILE = 'rolling_log.txt' #file for error messages

# default number of lines which may be overridden by settings file
ROLLING_LOG_SIZE = 1000

# Users can trigger .png histograms analogous to the text ones from the web
# interface; this is the folder (within WEBSERVER_PATH) where those files are
# placed
WEBSERVER_IMAGE_RELATIVE_FOLDER = 'images/'
# Multiple histograms can be generated, i.e. for airline, aircraft, day of
# week, etc. The output files are named by the prefix & suffix, i.e.: prefix +
# type + . + suffix, as in histogram_aircraft.png. These names match up to the
# names expected by the html page that displays the images. Also, note that the
# suffix is interpreted by matplotlib to identify the image format to create.
HISTOGRAM_IMAGE_PREFIX = 'histogram_'
HISTOGRAM_IMAGE_SUFFIX = 'png'
HISTOGRAM_IMAGE_HTML = 'histograms.html'

# This file indicates a pending request for histograms - either png,
# text-based, or both; once it is processed, this file is deleted. The
# contents are concatenated key-value pairs, histogram=all;
# histogram_history=24h; etc.
HISTOGRAM_CONFIG_FILE = 'secure/histogram.txt'
HISTOGRAM_BOOLEANS = ('histogram_data_summary')
# This contains concatenated key-value configuration attributes in a similar
# format to the HISTOGRAM_CONFIG_FILE that are exposed to the user via the
# web interface or, for a subset of them, through the Arduino interface.
# They are polled at every iteration so that the most current value is
# always leveraged by the running software.
CONFIG_FILE = 'secure/settings.txt'
CONFIG_BOOLEANS = (
    'setting_screen_enabled', 'next_flight', 'reset_logs', 'log_jsons')
# A few key settings for the messageboard are its sensitivity to displaying
# flights - though it logs all flights within range, it may not be desirable
# to display all flights to the user. Two key parameters are the maximum
# altitude, and the furthest away we anticipate the flight being at its
# closest point to HOME. As those two parameters are manipulated in the
# settings, a histogram is displayed with one or potentially two series,
# showing the present and potentially prior-set distribution of flights,
# by hour throughout the day, over the last seven days, normalized to
# flights per day. This allows those parameters to be fine-tuned in a
# useful way. This file is the location, on the webserver, of that image,
# which needs to be in alignment with the html page that displays it.
HOURLY_IMAGE_FILE = 'hours.png'

# This is all messages that have been sent to the board since the last time
# the file was manually cleared. Newest messages are at the bottom. It is
# visible at the webserver.
#enumeration of all messages sent to board
ALL_MESSAGE_FILE = 'all_messages.txt'
# This shows the most recent n messages sent to the board. Newest messages
# are at the top for easier viewing of "what did I miss".
ROLLING_MESSAGE_FILE = 'rolling_messages.txt'

STDERR_FILE = 'stderr.txt'
BACKUP_FILE = 'backup.txt'
SERVICE_VERIFICATION_FILE = 'service-verification.txt'
UPTIMES_FILE = 'uptimes.html'

FLAG_MSG_FLIGHT = 1  # basic flight details
FLAG_MSG_INSIGHT = 2  # random tidbit about a flight
FLAG_MSG_HISTOGRAM = 3  # histogram message
FLAG_MSG_CLEAR = 4  # a blank message to clear the screen
# user-entered message to display for some duration of time
FLAG_MSG_PERSONAL = 5

FLAG_INSIGHT_LAST_SEEN = 0
FLAG_INSIGHT_DIFF_AIRCRAFT = 1
FLAG_INSIGHT_NTH_FLIGHT = 2
FLAG_INSIGHT_GROUNDSPEED = 3
FLAG_INSIGHT_ALTITUDE = 4
FLAG_INSIGHT_VERTRATE = 5
FLAG_INSIGHT_FIRST_DEST = 6
FLAG_INSIGHT_FIRST_ORIGIN = 7
FLAG_INSIGHT_FIRST_AIRLINE = 8
FLAG_INSIGHT_FIRST_AIRCRAFT = 9
FLAG_INSIGHT_LONGEST_DELAY = 10
FLAG_INSIGHT_FLIGHT_DELAY_FREQUENCY = 11
FLAG_INSIGHT_FLIGHT_DELAY_TIME = 12
FLAG_INSIGHT_AIRLINE_DELAY_FREQUENCY = 13
FLAG_INSIGHT_AIRLINE_DELAY_TIME = 14
FLAG_INSIGHT_DESTINATION_DELAY_FREQUENCY = 15
FLAG_INSIGHT_DESTINATION_DELAY_TIME = 16
FLAG_INSIGHT_HOUR_DELAY_FREQUENCY = 17
FLAG_INSIGHT_HOUR_DELAY_TIME = 18
FLAG_INSIGHT_DATE_DELAY_FREQUENCY = 19
FLAG_INSIGHT_DATE_DELAY_TIME = 20
INSIGHT_TYPES = 21

TEMP_FAN_TURN_ON_CELSIUS = 65
TEMP_FAN_TURN_OFF_CELSIUS = 55

# GPIO relay connections
# format: (GPIO pin, true message, false message, relay number,
# description, initial_state)
GPIO_ERROR_VESTABOARD_CONNECTION = (
    22,
    'ERROR: Vestaboard unavailable',
    'SUCCESS: Vestaboard available',
    1, 'Vestaboard connected', False)
GPIO_ERROR_FLIGHT_AWARE_CONNECTION = (
    23,
    'ERROR: FlightAware not available',
    'SUCCESS: FlightAware available',
    2, 'FlightAware connected', False)
GPIO_ERROR_ARDUINO_SERVO_CONNECTION = (
    24,
    'ERROR: Servos not running or lost connection',
    'SUCCESS: Handshake with servo Arduino received',
    3, 'Hemisphere connected', True)
GPIO_ERROR_ARDUINO_REMOTE_CONNECTION = (
    25,
    'ERROR: Remote not running or lost connection',
    'SUCCESS: Handshake with remote Arduino received',
    4, 'Remote connected', True)




                            <----SKIPPED LINES---->




if RASPBERRY_PI:
  PICKLE_FLIGHTS = MESSAGEBOARD_PATH + PICKLE_FLIGHTS
  PICKLE_DASHBOARD = MESSAGEBOARD_PATH + PICKLE_DASHBOARD
  LOGFILE = MESSAGEBOARD_PATH + LOGFILE
  PICKLE_DUMP_JSON_FILE = MESSAGEBOARD_PATH + PICKLE_DUMP_JSON_FILE
  PICKLE_FA_JSON_FILE = MESSAGEBOARD_PATH + PICKLE_FA_JSON_FILE
  PICKLE_SCREENS = MESSAGEBOARD_PATH + PICKLE_SCREENS
  CODE_REPOSITORY = MESSAGEBOARD_PATH

  HISTOGRAM_CONFIG_FILE = WEBSERVER_PATH + HISTOGRAM_CONFIG_FILE
  CONFIG_FILE = WEBSERVER_PATH + CONFIG_FILE
  ROLLING_MESSAGE_FILE = WEBSERVER_PATH + ROLLING_MESSAGE_FILE
  ALL_MESSAGE_FILE = WEBSERVER_PATH + ALL_MESSAGE_FILE
  ROLLING_LOGFILE = WEBSERVER_PATH + ROLLING_LOGFILE
  STDERR_FILE = WEBSERVER_PATH + STDERR_FILE
  BACKUP_FILE = WEBSERVER_PATH + BACKUP_FILE
  SERVICE_VERIFICATION_FILE = WEBSERVER_PATH + SERVICE_VERIFICATION_FILE
  UPTIMES_FILE = WEBSERVER_PATH + UPTIMES_FILE

  HISTOGRAM_IMAGE_HTML = WEBSERVER_PATH + HISTOGRAM_IMAGE_HTML
  HOURLY_IMAGE_FILE = (
      WEBSERVER_PATH + WEBSERVER_IMAGE_RELATIVE_FOLDER + HOURLY_IMAGE_FILE)
  VERSION_REPOSITORY = WEBSERVER_PATH + VERSION_REPOSITORY



TIMEZONE = 'US/Pacific' # timezone of display
TZ = pytz.timezone(TIMEZONE)

# iata codes that we don't need to expand
KNOWN_AIRPORTS = ('SJC', 'SFO', 'OAK')

SPLITFLAP_CHARS_PER_LINE = 22
SPLITFLAP_LINE_COUNT = 6

DIRECTIONS_4 = ['N', 'E', 'S', 'W']
DIRECTIONS_8 = ['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW']
DIRECTIONS_16 = ['N', 'NNE', 'NE', 'ENE', 'E', 'ESE', 'SE', 'SSE',
                 'S', 'SSW', 'SW', 'WSW', 'W', 'WNW', 'NW', 'NNW']

HOURS = ['12a', ' 1a', ' 2a', ' 3a', ' 4a', ' 5a', ' 6a', ' 7a',
         ' 8a', ' 9a', '10a', '11a', '12p', ' 1p', ' 2p', ' 3p',
         ' 4p', ' 5p', ' 6p', ' 7p', ' 8p', ' 9p', '10p', '11p']

SECONDS_IN_MINUTE = 60
MINUTES_IN_HOUR = 60
HOURS_IN_DAY = 24
SECONDS_IN_HOUR = SECONDS_IN_MINUTE * MINUTES_IN_HOUR
MINUTES_IN_DAY = MINUTES_IN_HOUR * HOURS_IN_DAY
SECONDS_IN_DAY = SECONDS_IN_HOUR * HOURS_IN_DAY

# Units confirmed here:
# www.adsbexchange.com/forum/threads/
# units-in-the-dump1090-json-file.630617/#post-639541
CLIMB_RATE_UNITS = 'fpm'
#speed units from tracker are knots, based on dump-1090/track.c
#https://github.com/SDRplay/dump1090/blob/master/track.c
SPEED_UNITS = 'kn'
DISTANCE_UNITS = 'ft'  # altitude

# For displaying histograms
# If a key is not present, how should it be displayed in histograms?
KEY_NOT_PRESENT_STRING = 'Unknown'
OTHER_STRING = 'Other' # What key strings should be listed last in sequence?
# What key strings should be listed last in sequence?
SORT_AT_END_STRINGS = [OTHER_STRING, KEY_NOT_PRESENT_STRING]
# What is the sorted sequence of keys for days of week?
DAYS_OF_WEEK = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']

AIRCRAFT_LENGTH = {} # in meters
AIRCRAFT_LENGTH['Airbus A220-100 (twin-jet)'] = 35
AIRCRAFT_LENGTH['Airbus A300F4-600 (twin-jet)'] = 54.08
AIRCRAFT_LENGTH['Airbus A319 (twin-jet)'] = 33.84
AIRCRAFT_LENGTH['Airbus A320 (twin-jet)'] = 37.57




                            <----SKIPPED LINES---->




AIRCRAFT_LENGTH['Cessna Citation Sovereign (twin-jet)'] = 19.35
AIRCRAFT_LENGTH['Cessna Citation V (twin-jet)'] = 14.91
AIRCRAFT_LENGTH['Cessna Citation X (twin-jet)'] = 22.04
AIRCRAFT_LENGTH['Cessna Citation Mustang (twin-jet)'] = 12.37
AIRCRAFT_LENGTH['Cessna Skyhawk (piston-single)'] = 8.28
AIRCRAFT_LENGTH['Cessna Skylane (piston-single)'] = 8.84
AIRCRAFT_LENGTH['Cessna T206 Turbo Stationair (piston-single)'] = 8.61
AIRCRAFT_LENGTH['Beechcraft Bonanza (33) (piston-single)'] = 7.65
AIRCRAFT_LENGTH['Beechcraft Super King Air 200 (twin-turboprop)'] = 13.31
AIRCRAFT_LENGTH['Beechcraft Super King Air 350 (twin-turboprop)'] = 14.22
AIRCRAFT_LENGTH['Beechcraft King Air 90 (twin-turboprop)'] = 10.82
AIRCRAFT_LENGTH['Learjet 45 (twin-jet)'] = 17.68
AIRCRAFT_LENGTH['Pilatus PC-12 (single-turboprop)'] = 14.4


def Log(message, file=None, rolling=None):
  """Write a message to a logfile along with a timestamp.

  Args:
    message: string message to write
    file: string representing file name and, if needed, path to the
      file to write to
    rolling: name of file that will keep only the last n files of file
  """
  # can't define as a default parameter because LOGFILE name is potentially
  # modified based on SIMULATION flag
  if not file:
    file = LOGFILE

  # special case: for the main logfile, we always keep a rolling log
  if not rolling and file == LOGFILE:
    rolling = ROLLING_LOGFILE

  try:
    with open(file, 'a') as f:
      # by excluding the timestamp, file diffs become easier between runs
      if not SIMULATION or file == LOGFILE:
        f.write('='*80+'\n')
        f.write(str(datetime.datetime.now(TZ))+'\n')
        f.write('\n')
      f.write(str(message)+'\n')
  except IOError:
    Log('Unable to append to ' + file)

  if rolling:
    Tail(file, rolling, lines_to_keep=ROLLING_LOG_SIZE)


def Tail(in_name, rolling_name, max_line_length=100, lines_to_keep=1000):
  """Fast pythonic implementation of tail -n.

  Args:
    in_name: name of file for which we want the tail
    rolling_name: name of file to write out
    max_line_length: since this uses seek to find the block of text near the end
      that has at most the lines_to_keep number of lines, we need to estimate
      the max line length over that block of text.  We can afford to be a
      little conservative here.
    lines_to_keep: how many lines to keep in the rolling file.

  Returns:
    Integer number of lines actually kept.
  """
  with open(in_name, 'r') as f:
    f.seek(0, os.SEEK_END)
    f_length = f.tell()

    bytes_to_read = min(max_line_length * lines_to_keep, f_length)
    f.seek(f_length - bytes_to_read)
    end_text = f.read()
    lines = end_text.split('\n')
    # perhaps the file was smaller than lines_to_keep lines, or many lines were
    # longer than max_line_length; in that case, the resulting text block will
    # potentially be smaller than lines_to_keep
    lines_to_keep = min(lines_to_keep, len(lines))

  with open(rolling_name, 'w') as f:
    f.write('\n'.join(lines[-lines_to_keep:]))




                            <----SKIPPED LINES---->





def LogTimes(times, threshold=0, title=''):
  """Logs elapsed time messages from a list tuples of epochs and identifiers."""
  total_time = times[-1][0] - times[0][0]
  if threshold and total_time < threshold:
    return
  msg = 'Code timing\n'
  if title:
    msg = '%s\n' % title
  msg = 'Total time: %.2fs\n' % total_time
  for n, t in enumerate(times[:-1]):
    msg += '%.2fs to get from reading %s to reading %s\n' % (
        times[n + 1][0] - t[0], t[1], times[n + 1][1])
  Log(msg)


def MaintainRollingWebLog(message, max_count, filename=None):
  """Maintains a rolling text file of at most max_count printed messages.

  Newest data at top and oldest data at the end, of at most max_count messages,
  where the delimiter between each message is identified by a special fixed
  string.

  Args:
    message: text message to prepend to the file.
    max_count: maximum number of messages to keep in the file; the max_count
      +1st message is deleted.
    filename: the file to update.
  """
  # can't define as a default parameter because ROLLING_MESSAGE_FILE name is
  # potentially modified based on SIMULATION flag
  if not filename:
    filename = ROLLING_MESSAGE_FILE
  rolling_log_header = '='*(SPLITFLAP_CHARS_PER_LINE + 2)
  existing_file = ReadFile(filename)
  log_message_count = existing_file.count(rolling_log_header)
  if log_message_count >= max_count:
    message_start_list = [i for i in range(0, len(existing_file))
                          if existing_file[i:].startswith(rolling_log_header)]
    existing_file_to_keep = existing_file[:message_start_list[max_count - 1]]
  else:
    existing_file_to_keep = existing_file

  t = datetime.datetime.now(TZ).strftime('%m/%d/%Y, %H:%M:%S')
  new_message = (
      '\n'.join([rolling_log_header, t, '', message])
      + '\n' + existing_file_to_keep)
  try:
    with open(filename, 'w') as f:
      f.write(new_message)
  except IOError:
    Log('Unable to maintain rolling log at ' + filename)


def UtcToLocalTimeDifference(timezone=TIMEZONE):
  """Calculates number of seconds between UTC and given timezone.

  Returns number of seconds between UTC and given timezone; if no timezone
  given, uses TIMEZONE defined in global variable.

  Args:
    timezone: string representing a valid pytz timezone in pytz.all_timezones.

  Returns:
    Integer number of seconds.
  """
  utcnow = pytz.timezone('utc').localize(datetime.datetime.utcnow())
  home_time = utcnow.astimezone(pytz.timezone(timezone)).replace(tzinfo=None)
  system_time = utcnow.astimezone(tzlocal.get_localzone()).replace(tzinfo=None)

  offset = dateutil.relativedelta.relativedelta(home_time, system_time)
  offset_seconds = offset.hours * SECONDS_IN_HOUR
  return offset_seconds


def IntersectionForTwoPaths(pos1, bearing1, pos2, bearing2):
  """Identifies the lat/lon intersection given starting points and bearings.

  Math provided at: http://www.movable-type.co.uk/scripts/latlong.html in the
  section: "Intersection of two paths given start points and bearings"; returns
  the latitude and longitude for the intersection.

  Args:
    pos1: a 2-tuple defining (lat, lon) in decimal degrees
    bearing1: bearing of pos1
    pos2: a 2-tuple defining (lat, lon) in decimal degrees
    bearing2: bearing of pos2

  Returns:
    Point of intersection as a 2-tuple defining (lat, lon) in decimal degrees
  """
  sin = math.sin
  asin = math.asin
  cos = math.cos
  acos = math.acos
  atan2 = math.atan2
  sqrt = math.sqrt
  radians = math.radians
  degrees = math.degrees
  pi = math.pi

  (lat1, lon1) = pos1
  (lat2, lon2) = pos2
  phi1 = radians(lat1)
  lambda1 = radians(lon1)
  theta1 = radians(bearing1)
  phi2 = radians(lat2)
  lambda2 = radians(lon2)
  theta2 = radians(bearing2)

  delta12 = 2*asin(sqrt(sin((phi2-phi1)/2)**2+cos(phi1)*cos(
      phi2)*sin((lambda2-lambda1)/2)**2))
  thetaa = acos((sin(phi2) - sin(phi1)*cos(delta12)) / (sin(delta12)*cos(phi1)))
  thetab = acos((sin(phi1) - sin(phi2)*cos(delta12)) / (sin(delta12)*cos(phi2)))
  if sin(lambda2-lambda1) > 0:
    theta12 = thetaa
    theta21 = 2*pi - thetab
  else:
    theta12 = 2*pi - thetaa
    theta21 = thetab
  alpha1 = theta1 - theta12
  alpha2 = theta21 - theta2
  alpha3 = acos(-cos(alpha1)*cos(alpha2)+sin(alpha1)*sin(alpha2)*cos(delta12))
  delta13 = atan2(
      sin(delta12)*sin(alpha1)*sin(alpha2),
      cos(alpha2)+cos(alpha1)*cos(alpha3))
  phi3 = asin(sin(phi1)*cos(delta13)+cos(phi1)*sin(delta13)*cos(theta1))
  dlambda13 = atan2(
      sin(theta1)*sin(delta13)*cos(phi1), cos(delta13)-sin(phi1)*sin(phi3))
  lambda3 = lambda1 + dlambda13
  intersection = (degrees(phi3), degrees(lambda3))
  return intersection


def ConvertBearingToCompassDirection(bearing, length=3, pad=False):
  """Converts bearing (in deg) to a dir of 1, 2, or 3 chars (N, NW, NNW).

  Args:
    bearing: degrees to be converted
    length: if 1, 2, or 3, converts to one of 4, 8, or 16 headings:
      - 1: N, S, E, W
      - 2: SE, SW, etc. also valid
      - 3: NWN, ESE, etc. also valid
    pad: boolean indicating whether the direction should be right-justified to
    length characters

  Returns:
    String representation of the compass heading.
  """
  if not isinstance(bearing, numbers.Number):
    return bearing

  divisions = 2**(length+1)  # i.e.: 4, 8, or 16
  division_size = 360 / divisions  # i.e.: 90, 45, or 22.5
  bearing_number = round(bearing / division_size)

  if length == 1:
    directions = DIRECTIONS_4
  elif length == 2:
    directions = DIRECTIONS_8
  else:
    directions = DIRECTIONS_16

  direction = directions[bearing_number%divisions]
  if pad:




                            <----SKIPPED LINES---->




  if False in is_numeric:
    return None

  lat1, lon1, lat2, lon2 = [math.radians(x) for x in (*pos1, *pos2)]
  hav = (math.sin((lat2 - lat1) / 2.0)**2
         + math.cos(lat1) * math.cos(lat2) * math.sin((lon2 - lon1) / 2.0)**2)
  distance = 2 * RADIUS * math.asin(math.sqrt(hav))

  # Note: though pyproj has this, having trouble installing on rpi
  #az12, az21, distance = g.inv(lon1, lat1, lon2, lat2)

  return distance


def SpeedInMeters(speed_in_knots):
  """Converts speed in knots to speed in meters per second."""
  return speed_in_knots * METERS_PER_SECOND_IN_KNOTS


def MetersTraveled(speed_in_knots, seconds):
  """Converts speed in knots to distance in meters given elapsed sec."""
  return SpeedInMeters(speed_in_knots) * seconds


def ClosestKnownLocation(flight, seconds):
  """Returns the most recent location observation from a flight.

  Flights in the flight dictionary have their path maintained over all the time
  that the radio continues to observe the flight. This function identifies the
  closest in time observation in the path, given number of seconds after the
  canonical time (or before, if sec is negative).

  Args:
    flight: Flight dictionary of interest.
    seconds: Number of seconds after the canonical time of the flight (i.e.:
      now).

  Returns:
    Tuple:
    - Dictionary of location attributes including the following keys: speed,
      lat, lon, track, altitude, vertrate, now (which is a timestamp reflecting
      when these observations were made)
    - seconds in the past (as compared to the seconds requested) that this
      observation was made. That is, if a location at seconds=10 was requested,
      if the closest found location attributes were at time of 8 seconds, then
      this would be +2. Since the closest time is found, this can also be
      negative. Or alternatively, this can be thought of as the number of
      seconds still to project the movement for, where positive is the future.
  """
  now = flight['now']
  if 'persistent_path' not in flight:
    location = {
        'speed': flight.get('speed'),
        'lat': flight.get('lat'),
        'lon': flight.get('lon'),
        'track': flight.get('track'),
        'altitude': flight.get('altitude'),
        'vertrate': flight.get('vertrate'),
        'now': now}
    return (location, seconds)

  path = flight['persistent_path']
  path_timestamps = [p['now'] for p in path]
  absolute_deltas = [abs(seconds - (t - now)) for t in path_timestamps]

  min_delta = min(absolute_deltas)
  index = absolute_deltas.index(min_delta)
  closest_now_to_request = path[index]['now']
  closest_observation = {
      'speed': path[index].get('speed'),
      'lat': path[index].get('lat'),
      'lon': path[index].get('lon'),
      'track': path[index].get('track'),
      'altitude': path[index].get('altitude'),
      'vertrate': path[index].get('vertrate'),
      'now': closest_now_to_request}
  # i.e.: suppose:
  #       now = 15000
  #       closest_to_now = 15008
  #       request seconds was for 10
  # So there's still 2 more seconds to elapse until the flight is here
  time_delta_from_request = seconds - (closest_now_to_request - now)
  return (closest_observation, time_delta_from_request)


def FlightAnglesSecondsElapsed(
    flight, seconds, key_suffix='', canonical_loc=False):
  """Returns angular position of flight given time elapsing from flight.

  As time elapses after the flight was first observed, it will be in a new
  position. That new position is based on the most up-to-date location details
  observed, as it may have been seen more recently than the original location
  details. Then, based on those most recent location details, we can estimate
  its new location at any given time by projecting the bearing, speed, etc. out
  in time.

  Args:
    flight: Flight dictionary of interest.
    seconds: Number of seconds after the canonical time of the flight (i.e.:
      now).
    key_suffix: Appended to the keys that are returned in the return dictionary.
    canonical_loc: Boolean indicating whether we should only examine the
      location details stored at seconds=0 in the path, which would be
      identical  to that stored in the base dictionary itself. This provides
      access to the "original" reported loc details in the same format as the
      updated or more current values, primarily so that comparisons can be
      easily made between calculations that might fall back to the original
      values vs. the updated values.

  Returns:
    Dictionary of location attributes including the following keys:
    azimuth_degrees; altitude_degrees; ground_distance_feet;
    crow_distance_feet; lat; lon.
  """
  seconds_ahead_to_find_loc = seconds
  if canonical_loc:
    seconds_ahead_to_find_loc = 0

  (location, time_to_project) = ClosestKnownLocation(
      flight, seconds_ahead_to_find_loc)
  if not all([isinstance(x, numbers.Number) for x in (
      location.get('speed'),
      location.get('lat'),
      location.get('lon'),
      location.get('track'),
      location.get('altitude'))]):
    return {}

  if canonical_loc:
    time_to_project = seconds

  meters_traveled = MetersTraveled(location['speed'], time_to_project)
  new_position = TrajectoryLatLon(
      (location['lat'], location['lon']), meters_traveled, location['track'])
  angles = Angles(
      HOME, HOME_ALT, new_position, location['altitude'] / FEET_IN_METER)

  d = {}
  for key in angles:
    d[key + key_suffix] = angles[key]
  d['lat' + key_suffix] = location['lat']
  d['lon' + key_suffix] = location['lon']

  return d


def Angles(pos1, altitude1, pos2, altitude2):
  """Calculates the angular position of pos 2 from pos 1.

  Calculates the azimuth and the angular altitude to see point 2 from point 1,
  as well as two distance metrics: the "ground distance" and "crow distance".
  Ground is the distance between a plumb line to sea level for the two points;
  crow also takes into account the difference in altitude or elevation, and is
  the distance a bird would have to fly to reach the second point from the
  first.

  Args:
    pos1: a 2-tuple of lat-lon for the first point (i.e.: HOME), in degrees.
    altitude1: height above sea level of pos1, in meters
    pos2: a 2-tuple of lat-lon for the first point (i.e.: the plane), in
      degrees.
    altitude2: height above sea level of pos2, in meters

  Returns:
    Dictionary of location attributes including the following keys:
    azimuth_degrees; altitude_degrees; ground_distance_feet; crow_distance_feet.
  """
  sin = math.sin
  cos = math.cos
  atan2 = math.atan2
  atan = math.atan
  sqrt = math.sqrt
  radians = math.radians
  degrees = math.degrees

  if not all([isinstance(x, numbers.Number) for x in (
      *pos1, altitude1, *pos2, altitude2)]):
    return None


  # from home to plumb line of plane
  distance = HaversineDistanceMeters(pos1, pos2)
  lat1, lon1, lat2, lon2 = [radians(x) for x in (*pos1, *pos2)]
  d_lon = lon2 - lon1
  # azimuth calc from https://www.omnicalculator.com/other/azimuth
  az = atan2(
      (sin(d_lon)*cos(lat2)),
      (cos(lat1)*sin(lat2)-sin(lat1)*cos(lat2)*cos(d_lon)))
  az_degrees = degrees(az)
  altitude = altitude2 - altitude1
  alt = atan(altitude / distance)
  alt_degrees = degrees(alt)
  crow_distance = sqrt(altitude**2 + distance**2)  # from home to the plane

  return {'azimuth_degrees': az_degrees, 'altitude_degrees': alt_degrees,
          'ground_distance_feet': distance, 'crow_distance_feet': crow_distance}


def TrajectoryLatLon(pos, distance, track):
  """Returns lat/lon plane will be given starting point and direction / speed.

  Args:
    pos: a 2-tuple of lat-lon for the flight, in degrees.
    distance: the distance, in meters, the flight is traveling from its current
      lat/lon.
    track: the track or bearing of the plane, in degrees.

  Returns:
    Updated lat/lon for the given trajectory.
  """
  #distance in meters
  #track in degrees
  sin = math.sin
  cos = math.cos
  atan2 = math.atan2
  asin = math.asin
  radians = math.radians
  degrees = math.degrees

  track = radians(track)
  lat1 = radians(pos[0])
  lon1 = radians(pos[1])

  d_div_R = distance/RADIUS
  lat2 = asin(sin(lat1)*cos(d_div_R) + cos(lat1)*sin(d_div_R)*cos(track))
  lon2 = lon1 + atan2(
      sin(track)*sin(d_div_R)*cos(lat1),
      cos(d_div_R)-sin(lat1)*sin(lat2))

  lat2_degrees = degrees(lat2)
  lon2_degrees = degrees(lon2)
  return (lat2_degrees, lon2_degrees)


def MinMetersToHome(pos, bearing):
  """Identifies the minimum distance between a given trajectory and HOME.

  Given a trajectory (bearing and lat/lon position), finds the minimum distance
  (in meters) that that trajectory will come to another point.

  Args:
    pos: a 2-tuple defining (lat, lon) in decimal degrees
    bearing: the bearing, or heading, of the trajectory, in degrees

  Returns:
    Minimum distance in meters.
  """
  is_numeric = [isinstance(x, numbers.Number) for x in (*pos, bearing)]
  if False in is_numeric:
    return None

  # To find the minimum distance, we must first find the point at which the
  # minimum distance will occur, which in turn is accomplished by finding the
  # intersection between that trajectory and a trajectory orthogonal (+90
  # degrees, or -90 degrees) to it but intersecting HOME.
  potential_intersection1 = IntersectionForTwoPaths(
      pos, bearing, HOME, bearing + 90)
  potential_intersection2 = IntersectionForTwoPaths(
      pos, bearing, HOME, bearing - 90)
  potential_distance1 = HaversineDistanceMeters(potential_intersection1, HOME)
  potential_distance2 = HaversineDistanceMeters(potential_intersection2, HOME)

  # Since one of those two potential intersection points (i.e.: +90 or -90
  # degrees) will create an irrational result, and given the strong locality to
  # HOME that is expected from the initial position, the "correct" result is
  # identified by simply taking the minimum distance of the two candidate.
  return min(potential_distance1, potential_distance2)


def SecondsToHhMm(seconds, colon=False):
  """Converts integer number of seconds to xhym string (i.e.: 7h17m) or to 7:17.

  Args:
    seconds: number of seconds
    colon: controls format; if False, format is 7h17m; if True, format is 7:17.

  Returns:
    String representation of hours and minutes.
  """
  if seconds is None:
    return KEY_NOT_PRESENT_STRING[:3]
  minutes = int(abs(seconds) / SECONDS_IN_MINUTE)
  if minutes > MINUTES_IN_HOUR:
    hours = int(minutes / MINUTES_IN_HOUR)
    minutes = minutes % MINUTES_IN_HOUR
    if colon:




                            <----SKIPPED LINES---->




def HourString(flight):
  """Formats now on flight into a a 3-digit string like '12a' or ' 1p'."""
  time_string = DisplayTime(flight)
  if time_string:
    hour_string = time_string[11:13]
    hour_0_23 = int(hour_string)
    is_pm = int(hour_0_23/12) == 1
    hour_number = hour_0_23 % 12
    if hour_number == 0:
      hour_number = 12
    out_string = str(hour_number).rjust(2)
    if is_pm:
      out_string += 'p'
    else:
      out_string += 'a'
  else:
    out_string = KEY_NOT_PRESENT_STRING
  return out_string


def MinuteOfDay(ts=None):
  """Returns integer minute of day (0..1439) for given timestamp or for now."""
  if ts is None:
    ts = time.time()
  dt = datetime.datetime.fromtimestamp(ts, TZ)
  minute_of_day = dt.hour * MINUTES_IN_HOUR + dt.minute
  return minute_of_day


def HoursSinceMidnight(timezone=TIMEZONE):
  """Returns float number of hours elapsed since midnight in given timezone."""
  tz = pytz.timezone(timezone)
  now = datetime.datetime.now(tz)
  seconds_since_midnight = (now - now.replace(
      hour=0, minute=0, second=0, microsecond=0)).total_seconds()
  hours = seconds_since_midnight / SECONDS_IN_HOUR
  return hours


def HoursSinceFlight(now, then):
  """Returns the number of hours between a timestamp and a flight.

  Args:
    now: timezone-aware datetime representation of timestamp
    then: epoch (float)

  Returns:
    Number of hours between now and then (i.e.: now - then; a positive return
    value means now occurred after then).
  """
  then = datetime.datetime.fromtimestamp(then, TZ)
  delta = now - then
  delta_hours = delta.days * HOURS_IN_DAY + delta.seconds / SECONDS_IN_HOUR
  return delta_hours


def DataHistoryHours(flights):
  """Calculates the number of hours between the earliest & last flight in data.

  flights: List of all flights in sequential order, so that the first in list
    is earliest in time.

  Returns:
    Return time difference in hours between the first flight and last flight.
  """
  min_time = flights[0]['now']
  max_time = flights[-1]['now']
  delta_hours = (max_time - min_time) / SECONDS_IN_HOUR
  return round(delta_hours)


def ReadFile(filename, log_exception=False):
  """Returns text from the given file name if available, empty string if not.

  Args:
    filename: string of the filename to open, potentially also including the
      full path.
    log_exception: boolean indicating whether to log an exception if file not
      found.

  Returns:
    Return text string of file contents.
  """
  try:
    with open(filename, 'r') as content_file:
      file_contents = content_file.read()
  except IOError:
    if log_exception:
      Log('Unable to read '+filename)
    return ''
  return file_contents

# because reading is ~25x more expensive than getmtime, we will only read &
# parse if the getmtime is more recent than last call for this file. So this
# dict stores the a tuple, the last time read & the resulting parsed return
# value
CACHED_FILES = {}
def ReadAndParseSettings(filename):
  """Reads filename and parses the resulting key-value pairs into a dict."""
  global CACHED_FILES
  (last_read_time, settings) = CACHED_FILES.get(filename, (0, {}))
  if os.path.exists(filename):
    last_modified = os.path.getmtime(filename)
    if last_modified > last_read_time:
      setting_str = ReadFile(filename)
      settings = ParseSettings(setting_str)
      CACHED_FILES[filename] = (last_modified, settings)
    return settings

  # File does not - or at least no longer - exists; so remove the cache
  if filename in CACHED_FILES:
    CACHED_FILES.pop(filename)

  return {}


def BuildSettings(d):
  """Converts dict to a string of form key1=value1;...;keyn=valuen."""
  kv_pairs = []
  for key in sorted(list(d.keys())):
    kv_pairs.append('%s=%s' % (key, d[key]))
  s = ';'.join(kv_pairs)
  if s:  # add terminating semicolon
    s += ';'
  return s


def ParseSettings(settings):
  """Parse delimited string of settings in file to a dict of key value pairs.

  Parses a string like 'distance=1426;altitude=32559;on=23;off=24;delay=15;
  insights=all;' into key value pairs.

  Args:
    settings: semicolon-delimited sequence of equal-sign delimited key-value
      pairs, i.e.: key1=value1;key2=value2;....;keyn=valuen.

  Returns:
    Dict of key value pairs contained in the setting file; empty dict if file
    not available or if delimiters missing.
  """
  settings_dict = {}
  for setting in settings.split(';'):
    if '=' in setting:
      kv_list = setting.split('=')
      k = kv_list[0]
      v = kv_list[1]
      if v.isdigit():
        v = int(v)
      else:
        try:
          v = float(v)
        except ValueError:
          pass
      settings_dict[k] = v

  return settings_dict


def RemoveSetting(configuration, setting):
  """Removes the named setting from the configuration file."""
  configuration.pop(setting)
  configuration = BuildSettings(configuration)
  WriteFile(CONFIG_FILE, configuration)
  return configuration


def WriteFile(filename, text, log_exception=False):
  """Writes the text to the file, returning boolean indicating success.

  Args:
    filename: string of the filename to open, potentially also including the
      full path.
    text: the text to write
    log_exception: boolean indicating whether to log an exception if file not
      found.

  Returns:
    Boolean indicating whether the write was successful.
  """
  try:
    with open(filename, 'w') as content_file:
      content_file.write(text)
  except IOError:
    if log_exception:
      Log('Unable to write to '+filename)
    return False
  return True


def PrependFileName(full_path, prefix):
  """Converts /dir/file.png to /dir/prefixfile.png."""
  directory, file_name = os.path.split(full_path)
  file_name = prefix+file_name
  return os.path.join(directory, file_name)


def UnpickleObjectFromFile(
    full_path, date_segmentation, max_days=None, filenames=False):
  """Load a repository of pickled data into memory.

  Args:
    full_path: name (potentially including path) of the pickled file
    date_segmentation: If true, searches for all files that have a prefix of
      yyyy-mm-dd as a prefix to the file name specified in the full path, and
      loads them in sequence for unpickling; if false, uses the full_path as is
      and loads just that single file.
    max_days: Integer that, if specified, indicates maximum number of days of
      files to load back in; otherwise, loads all.  That is, at most max_days
      files will be read.
    filenames: If true, rather than returning the list of data, returns a list
      of the filenames that would have been read.

  Returns:
    Return a list - either of the data, or of all the file names that would
    have been read.
  """
  if date_segmentation:
    directory, file = os.path.split(full_path)

    d = '[0-9]'
    sep = '-'
    date_format = d*4 + sep + d*2 + sep + d*2  # yyyy-mm-dd
    exp = date_format + sep + file
    pattern = re.compile(exp)
    files = os.listdir(directory)

    if max_days:  # no need to read any files older than x days
      earliest_date = EpochDisplayTime(
          time.time() - (max_days - 1) * SECONDS_IN_DAY, '%Y-%m-%d')
      files = [f for f in files if f[:10] >= earliest_date]

    files = sorted(
        [os.path.join(directory, f) for f in files if pattern.match(f)])
  else:
    if os.path.exists(full_path):
      files = [full_path]
    else:
      return []

  data = []

  if filenames:
    return files

  for file in files:
    try:
      with open(file, 'rb') as f:
        while True:
          try:
            data.append(pickle.load(f))
          except (UnicodeDecodeError) as e:
            Log('Process %s reading file %s gave error %s' % (
                psutil.Process(os.getpid()).name(), f, e))
    except (EOFError, pickle.UnpicklingError):
      pass




  return data


cached_object_count = {}
def PickleObjectToFile(
    data, full_path, date_segmentation, timestamp=None, verify=False):
  """Append one pickled flight to the end of binary file.

  Args:
    data: data to pickle
    full_path: name (potentially including path) of the pickled file
    date_segmentation: boolean indicating whether the date string yyyy-mm-dd
      should be prepended to the file name in full_path based on the current
      date, so that pickled files are segmented by date.
    timestamp: if date_segmentation is True, this is used rather than system
      time to generate the file name.
    verify: boolean indicating if we should verify that the pickled file object
      count increments by one, rewriting entire pickle file if it doesn't. Note
      that since this requires reading the entire pickle file and unpickling,
      it should only be done for small files / objects.

  Returns:
    Name of file to which the data was pickled if successful; None if failed.
  """
  global cached_object_count
  if not timestamp:
    timestamp = time.time()
  date_suffix = EpochDisplayTime(timestamp, '%Y-%m-%d-')
  if date_segmentation:
    full_path = PrependFileName(full_path, date_suffix)

  if full_path not in cached_object_count:
    cached_object_count[full_path] = len(
        UnpickleObjectFromFile(full_path, False))
  if not os.path.exists(full_path):  # Another method may delete the file
    cached_object_count[full_path] = 0

  try:
    with open(full_path, 'ab') as f:
      f.write(pickle.dumps(data))

  except IOError:
    Log('Unable to append pickle ' + full_path)
    return None

  if verify:
    # file object count should now be one more; if it isn't, the file is
    # corrupted, and rather than continue writing to a corrupted pickle file,
    # we should fix it so we don't lose too much data
    pickled_data = UnpickleObjectFromFile(full_path, False)
    cached_count = cached_object_count[full_path]
    if len(pickled_data) == cached_count + 1:
      cached_object_count[full_path] = cached_count + 1
    else:
      tmp_file_name = full_path + '.tmp'
      try:
        with open(tmp_file_name, 'ab') as f:
          for d in pickled_data:  # rewrite the old data that was retained
            f.write(pickle.dumps(d))
          f.write(pickle.dumps(data))  # new data
      except IOError:
        Log('Unable to append pickle %s in verify step; left tmp file as-is' %
            tmp_file_name)
        return None
      shutil.move(tmp_file_name, full_path)
      cached_object_count[full_path] = len(pickled_data) + 1
      Log('Re-pickled %s: after writing %s, expected len %d to increment, '
          'but it did not; after repickling (and adding the new '
          'data), new length = %d' % (
              full_path, data, cached_count, cached_object_count[full_path]))

  return full_path


def UpdateAircraftList(
    persistent_nearby_aircraft, current_nearby_aircraft, now):
  """Identifies newly seen aircraft and removes aircraft not recently seen.

  Updates persistent_nearby_aircraft as follows: flights that have been last
  seen more than PERSISTENCE_SECONDS seconds ago are removed; new flights in
  current_nearby_aircraft are added. Also identifies newly-seen aircraft and
  updates the last-seen timestamp of flights that have been seen again.

  Args:
    persistent_nearby_aircraft: dictionary where keys are flight number /
      squawk tuples, and the values are the time the flight was last seen.
    current_nearby_aircraft: dictionary where keys are flight numbers / squawk
      tuples, and the values are themselves dictionaries with key-value pairs
      about that flight, with at least one of the kv-pairs being the time the
      flight was seen.
    now: the timestamp of the flights in the current_nearby_aircraft.

  Returns:
    A list of newly-nearby flight identifiers (i.e.: 2-tuple of flight number
    squawk).
  """
  newly_nearby_flight_identifiers = []
  for flight_identifier in current_nearby_aircraft:
    flight_number = flight_identifier[0]
    # Only add it to the list once we've received a flight number
    if flight_identifier not in persistent_nearby_aircraft and flight_number:
      newly_nearby_flight_identifiers.append(flight_identifier)
    persistent_nearby_aircraft[flight_identifier] = now

  flights_to_delete = []
  for flight_identifier in persistent_nearby_aircraft:
    if (flight_identifier not in current_nearby_aircraft
        and (now - persistent_nearby_aircraft[flight_identifier]) >
        PERSISTENCE_SECONDS):
      flights_to_delete.append(flight_identifier)
  for flight_identifier in flights_to_delete:
    del persistent_nearby_aircraft[flight_identifier]
  return newly_nearby_flight_identifiers


def ScanForNewFlights(persistent_nearby_aircraft, persistent_path, log_jsons):
  """Determines if there are any new aircraft in the radio message.

  The radio is continuously dumping new json messages to the Raspberry pi with
  all the flights currently observed. This function picks up the latest radio
  json, and for  any new nearby flights - there should generally be at most one
  new flight on each pass through - gets additional flight data from
  FlightAware and augments the flight definition with the relevant fields to
  keep.

  Args:
    persistent_nearby_aircraft: dictionary where keys are flight numbers, and
      the values are the time the flight was last seen.
    persistent_path: dictionary where keys are flight numbers, and the values
      are a sequential list of the location-attributes in the json file; allows
      for tracking the flight path over time.
    log_jsons: boolean indicating whether we should pickle the JSONs.

  Returns:
    A tuple:
    - updated persistent_nearby_aircraft
    - (possibly empty) dictionary of flight attributes of the new flight upon
      its first observation.
    - the time of the radio observation if present; None if no radio dump
    - a dictionary of attributes about the dump itself (i.e.: # of flights;
      furthest observed flight, etc.)
    - persistent_path, a data structure containing past details of a flight's
      location as described in ParseDumpJson
  """
  flight_details = {}
  now = time.time()
  if SIMULATION:
    (dump_json, json_time) = DUMP_JSONS[SIMULATION_COUNTER]
  else:
    dump_json = ReadFile(DUMP_JSON_FILE, log_exception=True)

  json_desc_dict = {}
  current_nearby_aircraft = {}
  if dump_json:
    (current_nearby_aircraft, now,
     json_desc_dict, persistent_path) = ParseDumpJson(
         dump_json, persistent_path)

    if not SIMULATION and log_jsons:
      PickleObjectToFile((dump_json, now), PICKLE_DUMP_JSON_FILE, True)

    newly_nearby_flight_identifiers = UpdateAircraftList(
        persistent_nearby_aircraft, current_nearby_aircraft, now)

    if newly_nearby_flight_identifiers:

      if len(newly_nearby_flight_identifiers) > 1:
        newly_nearby_flight_identifiers_str = ', '.join(
            newly_nearby_flight_identifiers)
        newly_nearby_flight_details_str = '\n'.join([
            str(current_nearby_aircraft[f])
            for f in newly_nearby_flight_identifiers])
        Log('Multiple newly-nearby flights: %s\n%s' % (
            newly_nearby_flight_identifiers_str,
            newly_nearby_flight_details_str))
      flight_identifier = newly_nearby_flight_identifiers[0]

      flight_aware_json = {}
      if SIMULATION:
        json_times = [j[1] for j in FA_JSONS]
        if json_time in json_times:
          flight_aware_json = FA_JSONS[json_times.index(json_time)][0]
      elif flight_identifier[0]:
        flight_number = flight_identifier[0]
        flight_aware_json, error_message = GetFlightAwareJson(flight_number)
        if flight_aware_json:
          UpdateStatusLight(GPIO_ERROR_FLIGHT_AWARE_CONNECTION, False)
        else:
          failure_message = 'No json from Flightaware for flight %s: %s' % (
              flight_number, error_message[:500])
          Log(failure_message)
          UpdateStatusLight(
              GPIO_ERROR_FLIGHT_AWARE_CONNECTION, True, failure_message)

      flight_details = {}
      if flight_aware_json:
        flight_details = ParseFlightAwareJson(flight_aware_json)

      if not SIMULATION and log_jsons:
        PickleObjectToFile((flight_aware_json, now), PICKLE_FA_JSON_FILE, True)

      # Augment FlightAware details with radio / radio-derived details
      flight_details.update(current_nearby_aircraft[flight_identifier])

      # Augment with the past location data; the [1] is because recall that
      # persistent_path[key] is actually a 2-tuple, the first element being
      # the most recent time seen, and the second element being the actual
      # path. But we do not need to keep around the most recent time seen any
      # more.
      flight_details['persistent_path'] = persistent_path[flight_identifier][1]

  return (
      persistent_nearby_aircraft,
      flight_details,
      now,
      json_desc_dict,
      persistent_path)


def DescribeDumpJson(parsed):
  """Generates dict with descriptive attributes about the dump json file.

  Args:
    parsed: The parsed json file.

  Returns:
    Dictionary with attributes about radio range, number of flights seen, etc.
  """
  json_desc_dict = {}
  json_desc_dict['now'] = parsed['now']

  aircraft = [a for a in parsed['aircraft'] if a['seen'] < PERSISTENCE_SECONDS]
  json_desc_dict['radio_range_flights'] = len(aircraft)

  aircraft_with_pos = [a for a in aircraft if 'lat' in a and 'lon' in a]
  current_distances = [HaversineDistanceMeters(
      HOME, (a['lat'], a['lon'])) for a in aircraft_with_pos]
  current_distances = [
      d * FEET_IN_METER / FEET_IN_MILE
      for d in current_distances if d is not None]
  if current_distances:
    json_desc_dict['radio_range_miles'] = max(current_distances)

  return json_desc_dict


def SameFlight(f1, f2):
  """True if these two flights are likely the same flight, False otherwise."""
  if f1['flight_number'] == f2['flight_number']:
    return True
  if f1['squawk'] == f2['squawk']:
    return True
  return False


def MergedIdentifier(proposed_id, existing_ids):
  """Identifies what identifier to use for a flight.

  While most flights have both a squawk and a flight number, enough are missing
  one only for it to appear later to want to use a 2-tuple of both as an
  identifier, merging flights if they share a common non-null flight number and/
  or squawk, as the persistent identifier across time.

  Additionally, in very limited circumstances, a squawk may change mid-flight;
  in that case, the first alpha squawk is used.

  This function identifies which identifier to use, and which - if any - should
  be merged into that one identifier from a group of existing identifiers.

  Args:
    proposed_id: The 2-tuple of (flight_number, squawk) of the identified
      flight.
    existing_ids: An iterable of existing 2-tuple identifiers, some (or none)
      of which may overlap with this flight.

  Returns:
    2-tuple:
      - the 2-tuple suggested identifier to use
      - a potentially empty list of ids to merge with the suggested identifier
  """
  flight_number, squawk = proposed_id

  def CheckPartialMatch(value, position):
    if value is not None:
      return [
          e for e in existing_ids if e[position] == value and e != proposed_id]
    return []

  matches = CheckPartialMatch(flight_number, 0)
  matches.extend(CheckPartialMatch(squawk, 1))

  if not matches:
    return proposed_id, []

  if not flight_number and matches:
    # arbitrarily choose alpha-first non-null flight_number
    matching_flight_numbers = [m[0] for m in matches if m[0] is not None]
    if matching_flight_numbers:
      flight_number = sorted(matching_flight_numbers)[0]
  if not squawk and matches:
    # arbitrarily choose alpha-first non-null squawk
    matching_squawks = [m[1] for m in matches if m[1] is not None]
    if matching_squawks:
      squawk = sorted(matching_squawks)[0]
  id_to_use = (flight_number, squawk)

  return id_to_use, matches


def MergePersistentPath(id_to_use, ids_to_merge, persistent_path):
  """Merges the persistent paths from multiple flights into a single flight.

  Since the identifiers may change over time of a flight for which we have
  already recorded some in-flight path history, this function allows us to
  combine all the persistent path details and merge it into a single flight.
  For instance, we may have only a squawk for a few seconds, which then changes
  mid-flight to another squawk, and then a few seconds later, we receive a
  radio signal with both the (new) squawk and flight number - thus we have
  three records to merge to one.

  This function merges all the persistent paths - which are 2-tuples of the most
  recent timestamp and a list of dictionaries - into one integrated persistent
  path.

  Args:
    id_to_use: The 2-tuple of (flight_number, squawk) of the final id we want
      the flight to have.
    ids_to_merge: an iterable of the ids to merge with the final id_to_use.
    persistent_path: the dictionary of existing persistent paths including at
      least ids_to_merge as keys, potentially also id_to_use, and perhaps
      additional flights as well.

  Returns:
    The merged persistent path, that includes id_to_use as one key, and removed
    ids_to_merge.
  """
  path = []
  timestamps = []

  if id_to_use in persistent_path and id_to_use not in ids_to_merge:
    ids_to_merge.append(id_to_use)

  for i in ids_to_merge:
    timestamps.append(persistent_path[i][0])
    path.extend(persistent_path[i][1])
    persistent_path.pop(i)
  persistent_path[id_to_use] = (
      max(timestamps), sorted(path, key=lambda p: p['now']))

  return persistent_path


def ParseDumpJson(dump_json, persistent_path):
  """Identifies all planes within given distance of home from the dump1090 file.

  Since the dump1090 json will have messages from all flights that the antenna
  has picked up, we want to keep only flights that are within a relevant
  distance to us, and also to extract from the full set of data in the json to
  just the relevant fields for additional
  analysis.

  While most flights have both a squawk and a flight number, enough are missing
  one only for it to appear later to want to use a 2-tuple of both as an
  identifier, merging flights if they share a common non-null flight number and/
  or squawk, as the persistent identifier across time.

  Args:
    dump_json: The text representation of the json message from
      dump1090-mutability
    persistent_path: dictionary where keys are flight numbers, and the values
      are a sequential list of the location-attributes in the json file; allows
      for tracking the flight path over time.

  Returns:
    Return tuple:
    - dictionary of all nearby planes, where keys are flight numbers (i.e.:
      'SWA7543'), and the value is itself a dictionary of attributes.
    - time stamp in the json file.
    - dictionary of attributes about the radio range
    - persistent dictionary of the track of recent flights, where keys are the
      flight numbers and the value is a tuple, the first element being when the
      flight was last seen in this radio, and the second is a list of
      dictionaries with past location info from the radio where it's been seen,
      i.e.: d[flight] = (timestamp, [{}, {}, {}])
  """
  parsed = json.loads(dump_json)
  now = parsed['now']
  nearby_aircraft = {}

  # Build dictionary summarizing characteristics of the dump_json itself
  json_desc_dict = DescribeDumpJson(parsed)

  for aircraft in parsed['aircraft']:
    simplified_aircraft = {}

    simplified_aircraft['now'] = now

    # flight_number
    flight_number = aircraft.get('flight')
    if flight_number:
      flight_number = flight_number.strip()

    # squawk
    squawk = aircraft.get('squawk')
    if squawk:
      squawk = squawk.strip()

    identifier = (flight_number, squawk)

    # merge any duplicate flights: since the id for nearby_aircraft &
    # persistent_path is the 2-tuple (flight_number, squawk), it's possible for
    # a flight to add or drop one of those two elements over time as the radio
    # signal comes in / falls out. Let's keep the identifier as the non-null
    # values as soon as one is seen.
    id_to_use, ids_to_merge = MergedIdentifier(
        identifier, persistent_path.keys())

    # Now we need to rename any flight paths with that partial identifier to
    # have the correct new merged_identifier
    if ids_to_merge:
      persistent_path = MergePersistentPath(
          id_to_use, ids_to_merge, persistent_path)

    if 'lat' in aircraft and 'lon' in aircraft:
      lat = aircraft['lat']
      lon = aircraft['lon']
      if isinstance(lat, numbers.Number) and isinstance(lon, numbers.Number):

        simplified_aircraft['lat'] = lat
        simplified_aircraft['lon'] = lon

        altitude = aircraft.get('altitude', aircraft.get('alt_baro'))
        if isinstance(altitude, numbers.Number):
          simplified_aircraft['altitude'] = altitude

        speed = aircraft.get('speed', aircraft.get('gs'))
        if speed is not None:
          simplified_aircraft['speed'] = speed

        vert_rate = aircraft.get('vert_rate', aircraft.get('baro_rate'))
        if vert_rate is not None:
          simplified_aircraft['vert_rate'] = vert_rate

        track = aircraft.get('track')
        if isinstance(track, numbers.Number):
          min_meters = MinMetersToHome((lat, lon), track)
          simplified_aircraft['track'] = track
          simplified_aircraft['min_feet'] = min_meters * FEET_IN_METER

          # TODO: describe why we want to base this off haversine distance (
          # i.e.: the actual distance from home) vs. MinMetersToHome (i.e.:
          # forecasted min distance from home); it seems like the latter would
          # give us more time to respond? - maybe because there might be other
          # closer flights even though a far away flight might look like it's
          # going to come nearby?
          haversine_distance_meters = HaversineDistanceMeters(HOME, (lat, lon))
          simplified_aircraft['distance'] = haversine_distance_meters
          if haversine_distance_meters < MIN_METERS:
            #nearby_aircraft[id_to_use]['distance'] = haversine_distance_meters
            nearby_aircraft[id_to_use] = simplified_aircraft
            if flight_number:
              nearby_aircraft[id_to_use]['flight_number'] = flight_number
            if squawk:
              nearby_aircraft[id_to_use]['squawk'] = squawk
            # aircraft classification:
            # https://github.com/wiedehopf/adsb-wiki/wiki/
            # ADS-B-aircraft-categories

            category = aircraft.get('category')
            if category is not None:
              nearby_aircraft[id_to_use]['category'] = category

        # keep all that track info - once we start reporting on a nearby
        # flight, it will become part of the flight's persistent record. Also,
        # note that as we are building a list of tracks for each flight, and we
        # are later assigning the flight dictionary to point to the list, we
        # just simply need to continue updating this list to keep the
        # dictionary up to date (i.e.: we don't need to directly touch the
        # flights dictionary in main).
        (last_seen, current_path) = persistent_path.get(id_to_use, (None, []))
        if (  # flight position has been updated with this radio signal
            not current_path or
            simplified_aircraft.get('lat') != current_path[-1].get('lat') or
            simplified_aircraft.get('lon') != current_path[-1].get('lon')):
          current_path.append(simplified_aircraft)
        persistent_path[id_to_use] = (now, current_path)

  # if the flight was last seen too far in the past, remove the track info
  for f in list(persistent_path.keys()):
    (last_seen, current_path) = persistent_path[f]
    if last_seen < now - PERSISTENCE_SECONDS:
      persistent_path.pop(f)

  return (nearby_aircraft, now, json_desc_dict, persistent_path)


def GetFlightAwareJson(flight_number):
  """Scrapes the text json message from FlightAware for a given flight number.

  Given a flight number, loads the corresponding FlightAware webpage for that
  flight and extracts the relevant script that contains all the flight details
  from that page.

  Args:
    flight_number: text flight number (i.e.: SWA1234)

  Returns:
    Two tuple:
     - Text representation of the json message from FlightAware.
     - Text string of error message, if any
  """
  url = 'https://flightaware.com/live/flight/' + flight_number
  try:
    response = requests.get(url)
  except requests.exceptions.RequestException as e:
    error_msg = 'Unable to query FA for URL due to %s: %s' % (e, url)
    Log(error_msg)
    return '', error_msg
  soup = bs4.BeautifulSoup(response.text, 'html.parser')
  l = soup.find_all('script')
  flight_script = None
  for script in l:
    if "trackpollBootstrap" in str(script):
      flight_script = str(script)
      break
  if not flight_script:
    error_msg = (
        'Unable to find trackpollBootstrap script in page: ' + response.text)
    Log(error_msg)
    return '', error_msg
  first_open_curly_brace = flight_script.find('{')
  last_close_curly_brace = flight_script.rfind('}')
  flight_json = flight_script[first_open_curly_brace:last_close_curly_brace+1]
  return flight_json, ''


def Unidecode(s):
  """Convert a special unicode characters to closest ASCII representation."""
  if s is not None:
    s = unidecode.unidecode(s)
  return s


def ParseFlightAwareJson(flight_json):
  """Strips relevant data about the flight from FlightAware feed.

  The FlightAware json has hundreds of fields about a flight, only a fraction
  of which are relevant to extract. Note that some of the fields are
  inconsistently populated (i.e.: scheduled and actual times for departure and
  take-off).

  Args:
    flight_json: Text representation of the FlightAware json about a single
      flight.

  Returns:
    Dictionary of flight attributes extracted from the FlightAware json.
  """
  flight = {}
  parsed_json = json.loads(flight_json)

  fa_flight_number = list(parsed_json['flights'].keys())[0]
  parsed_flight_details = parsed_json['flights'][fa_flight_number]
  flight['fa_flight_number'] = fa_flight_number

  origin = parsed_flight_details.get('origin')
  if origin:
    flight['origin_friendly'] = origin.get('friendlyLocation')
    flight['origin_iata'] = origin.get('iata')

  destination = parsed_flight_details.get('destination')
  if destination:
    flight['destination_friendly'] = destination.get('friendlyLocation')
    flight['destination_iata'] = destination.get('iata')




                            <----SKIPPED LINES---->




    flight['estimated_landing_time'] = landing_time.get('estimated')

  airline = parsed_flight_details.get('airline')
  if airline:
    flight['airline_call_sign'] = Unidecode(airline.get('callsign'))
    flight['airline_short_name'] = Unidecode(airline.get('shortName'))
    flight['airline_full_name'] = Unidecode(airline.get('fullName'))

  if len(parsed_json['flights'].keys()) > 1:
    Log('There are multiple flights in the FlightAware json: ' + parsed_json)

  return flight


def EpochDisplayTime(epoch, format_string='%Y-%m-%d %H:%M:%S.%f%z'):
  """Converts epoch in seconds to formatted time string."""
  return datetime.datetime.fromtimestamp(epoch, TZ).strftime(format_string)


def DisplayTime(flight, format_string='%Y-%m-%d %H:%M:%S.%f%z'):
  """Converts flight 'now' to formatted time string, caching results."""
  cached_key = CACHED_ELEMENT_PREFIX + 'now-' + format_string
  cached_time = flight.get(cached_key)
  if cached_time:
    return cached_time

  epoch_display_time = EpochDisplayTime(flight['now'], format_string)
  flight[cached_key] = epoch_display_time
  return epoch_display_time


def DisplayAirline(flight):
  """Augments flight details with display-ready airline attributes.

  Args:
    flight: dictionary with key-value attributes about the flight.

  Returns:
    String identifying either the airline, or Unknown if not available.
  """
  airline = flight.get('airline_short_name', flight.get('airline_full_name'))




                            <----SKIPPED LINES---->




    aircraft = aircraft.replace('Regional Jet ', '')
    aircraft = aircraft[:SPLITFLAP_CHARS_PER_LINE]
  else:
    aircraft = ''
  return aircraft


def DisplayFlightNumber(flight):
  """Generate a displayable string for flight number, falling back to SQUAWK."""
  squawk = flight.get('squawk', '')
  flight_number = flight.get('flight_number')
  identifier = flight_number
  if not identifier and squawk:
    identifier = 'SQK ' + str(squawk)
  if not identifier:
    identifier = KEY_NOT_PRESENT_STRING
  return identifier


def DisplayAirportCodeIata(flight, key):
  """Returns value if key present and populated; 'Unknown' otherwise."""
  airport_code = flight.get(key)
  if not airport_code:
    airport_code = KEY_NOT_PRESENT_STRING
  return airport_code


def DisplayOriginIata(flight):
  """Generates displayable string for origin airport code."""
  return DisplayAirportCodeIata(flight, 'origin_iata')


def DisplayDestinationIata(flight):
  """Generates displayable string for destination airport code."""
  return DisplayAirportCodeIata(flight, 'destination_iata')


def DisplayAirportCodeFriendly(flight, iata_key, friendly_key):
  """Generates displayable longer name of airport including city."""
  airport = flight.get(iata_key)
  if not airport:
    return KEY_NOT_PRESENT_STRING
  if airport in KNOWN_AIRPORTS:
    return airport
  airport += ' ' + flight.get(friendly_key, '').split(',')[0]
  return airport


def DisplayOriginFriendly(flight):
  """Generates displayable longer name of origin airport including."""
  return DisplayAirportCodeFriendly(flight, 'origin_iata', 'origin_friendly')


def DisplayDestinationFriendly(flight):
  """Generates displayable longer name of dest airport including."""
  return DisplayAirportCodeFriendly(
      flight, 'destination_iata', 'destination_friendly')


def DisplayOriginDestinationPair(flight):
  """Generates displayble origin-dest airport code mindful of screen width.

  If the origin or destination is among a few key airports where the IATA code
  is well-known, then we can display only that code. Otherwise, we'll want to
  display both the code and a longer description of the airport. But we need to
  be mindful of the overall length of the display. So, for instance, these
  might be produced as valid origin-destination pairs:
  SFO-CLT Charlotte       <- Known origin
  Charlotte CLT-SFO       <- Known destination
  Charl CLT-SAN San Diego <- Neither origin nor destination known

  Args:
    flight: dictionary with key-value attributes about the flight.

  Returns:
    String as described.
  """
  origin_iata = DisplayOriginIata(flight)
  destination_iata = DisplayDestinationIata(flight)

  origin_friendly = DisplayOriginFriendly(flight)
  destination_friendly = DisplayDestinationFriendly(flight)

  max_pair_length = SPLITFLAP_CHARS_PER_LINE - len('-')
  if (
      origin_iata not in KNOWN_AIRPORTS and
      destination_iata not in KNOWN_AIRPORTS and




                            <----SKIPPED LINES---->




      origin_length = max_pair_length - destination_length
    elif len(destination_friendly) > max_destination_length:
      origin_length = len(origin_friendly)
      destination_length = max_pair_length - origin_length
    else:
      origin_length = max_origin_length
      destination_length = max_destination_length
  elif origin_iata in KNOWN_AIRPORTS and destination_iata not in KNOWN_AIRPORTS:
    origin_length = len(origin_iata)
    destination_length = max_pair_length - origin_length
  elif destination_iata in KNOWN_AIRPORTS and origin_iata not in KNOWN_AIRPORTS:
    destination_length = len(destination_iata)
    origin_length = max_pair_length - destination_length
  elif destination_iata == origin_iata:
    origin_length = len(origin_iata)
    destination_length = max_pair_length - origin_length
  else:
    destination_length = len(destination_iata)
    origin_length = len(origin_iata)

  if (origin_iata == KEY_NOT_PRESENT_STRING and
      destination_iata == KEY_NOT_PRESENT_STRING):
    origin_destination_pair = KEY_NOT_PRESENT_STRING
  else:
    origin_destination_pair = ('%s-%s' % (
        origin_friendly[:origin_length],
        destination_friendly[:destination_length]))

  return origin_destination_pair


def DisplayDepartureTimes(flight):
  """Generates displayable fields about flight times and delay.

  Attempts to first find matching "pairs" of flight departure time details
  (departure vs. takeoff) in the belief that aligned nomenclature in the source
  data reflects an aligned concept of time where a flight delay can be best
  calculated.  Without a matching pair (or if perhaps no departure time
  information is provided), then a delay cannot be calculated at all.

  Args:
    flight: dictionary with key-value attributes about the flight.

  Returns:
    Dictionary with the following keys:
    - departure_timestamp: taken from one of potentially four timestamps
      indicating departure
    - departure_time_text: departure time formatted to HH:MM string
    - calculable_delay: boolean indicating whether sufficient data available to
      calc delay
    - delay_seconds: integer number of seconds of delay
    - delay_text: text of the format "7H16M early", where the descriptor early
      or late is abbreviated if needed to stay within the display width
  """
  cached_key = CACHED_ELEMENT_PREFIX + 'departure_times'
  cached_value = flight.get(cached_key)
  if cached_value:
    return cached_value

  actual_departure = flight.get('actual_departure_time')
  scheduled_departure = flight.get('scheduled_departure_time')
  actual_takeoff_time = flight.get('actual_takeoff_time')
  scheduled_takeoff_time = flight.get('scheduled_takeofftime')
  calculable_delay = False

  scheduled = None
  delay_seconds = None
  delay_text = ''

  if actual_departure and scheduled_departure:
    actual = actual_departure
    scheduled = scheduled_departure
    departure_label = 'Dep'




                            <----SKIPPED LINES---->




    flight: dictionary with key-value attributes about the flight.

  Returns:
    Seconds, if the remaining time is calculable; None otherwise.
  """
  arrival = flight.get('estimated_arrival_time')
  if not arrival:
    arrival = flight.get('estimated_landing_time')
  if not arrival:
    arrival = flight.get('scheduled_arrival_time')
  if not arrival:
    arrival = flight.get('scheduled_landing_time')

  if arrival:
    remaining_seconds = flight['now'] - arrival
  else:
    remaining_seconds = None
  return remaining_seconds


def FlightMeetsDisplayCriteria(
    flight, configuration, display_all_hours=False, log=False):
  """Returns boolean indicating whether the screen accepting new flight data.

  Based on the configuration file, determines whether the flight data should be
  displayed. Specifically, the configuration:
  - may include 'enabled' indicating whether screen should be driven at all
  - should include 'on' & 'off' parameters indicating minute (from midnight) of
    operation
  - should include altitude & elevation parameters indicating max values of
    interest

  Args:
    flight: dictionary of flight attributes.
    configuration: dictionary of configuration attributes.
    display_all_hours: a boolean indicating whether we should ignore whether the
      screen is turned off (either via the enabling, or via the hour settings)
    log: optional boolean indicating whether a flight that fails the criteria
      should be logged with the reason

  Returns:
    Boolean as described.
  """
  flight_altitude = flight.get('altitude', float('inf'))
  config_max_altitude = configuration['setting_max_altitude']

  flight_meets_criteria = True
  if flight_altitude > config_max_altitude:
    flight_meets_criteria = False
    if log:
      Log(
          '%s not displayed because it fails altitude criteria - '
          'flight altitude: %.0f; required altitude: %.0f' % (
              DisplayFlightNumber(flight),
              flight_altitude, config_max_altitude))
  else:
    flight_distance = flight.get('min_feet', float('inf'))
    config_max_distance = configuration['setting_max_distance']
    if flight_distance > config_max_distance:
      flight_meets_criteria = False
      if log:
        Log(
            '%s not displayed because it fails distance criteria - '
            'flight distance: %.0f; required distance: %.0f' % (
                DisplayFlightNumber(flight), flight_distance,
                config_max_distance))

  if not display_all_hours and flight_meets_criteria:
    flight_timestamp = flight['now']
    minute_of_day = MinuteOfDay(flight_timestamp)
    if minute_of_day <= configuration['setting_on_time']:
      flight_meets_criteria = False
      if log:
        Log(
            '%s not displayed because it occurs too early - minute_of_day: '
            '%d; setting_on_time: %d' % (
                DisplayFlightNumber(flight), minute_of_day,
                configuration['setting_on_time']))
    elif minute_of_day > configuration['setting_off_time'] + 1:
      flight_meets_criteria = False
      if log:
        Log(
            '%s not displayed because it occurs too late - minute_of_day: '
            '%d; setting_off_time: %d' % (
                DisplayFlightNumber(flight), minute_of_day,
                configuration['setting_off_time']))
    elif configuration.get('setting_screen_enabled', 'off') == 'off':
      flight_meets_criteria = False
      if log:
        Log(
            '%s not displayed because screen disabled' %
            DisplayFlightNumber(flight))

  return flight_meets_criteria


def IdentifyFlightDisplayed(flights, configuration, display_all_hours=False):
  """Finds the most recent flight in flights that meet the display criteria.

  Args:
    flights: list of flight dictionaries.
    configuration: dictionary of settings.
    display_all_hours: boolean indicating whether we should ignore the time
    constraints (i.e.: whether the screen is enabled, and its turn-on or
    turn-off times) in identifying the most recent flight. That is, if False,
    then this will only return flights that would have been displayed in the
    ordinarily usage, vs. if True, a flight irrespective of the time it would
    be displayed.

  Returns:
    A flight dictionary if one can be found; None otherwise.
  """
  for n in range(len(flights)-1, -1, -1):  # traverse the flights in reverse
    if FlightMeetsDisplayCriteria(
        flights[n], configuration, display_all_hours=display_all_hours):
      return n
  return None


def CreateMessageAboutFlight(flight):
  """Creates a message to describe interesting attributes about a single flight.

  Generates a multi-line description of a flight. A typical message might look
  like:
  UAL300 - UNITED        <- Flight number and airline
  BOEING 777-200 (TWIN)  <- Aircraft type
  SFO-HNL HONOLULU       <- Origin & destination
  DEP 02:08 ER REM 5:14  <- Time details: departure; early / late; remaining
  185MPH 301DEG D:117FT  <- Trajectory: speed; bearing; fcst min dist to HOME
  1975FT (+2368FPM)      <- Altitude: current altitude & rate of ascent

  However, not all of these details are always present, so some may be listed
  as unknown, or entire lines may be left out.

  Args:
    flight: dictionary of flight attributes.

  Returns:
    Printable string (with embedded new line characters)
  """
  lines = []

  # LINE1: UAL1425 - UNITED
  #        ======================
  flight_number = DisplayFlightNumber(flight)
  second_element = DisplayAirline(flight)

  if second_element == KEY_NOT_PRESENT_STRING:
    second_element = flight.get('owner', KEY_NOT_PRESENT_STRING)
    if second_element is None:
      second_element = KEY_NOT_PRESENT_STRING

  if (flight_number == KEY_NOT_PRESENT_STRING and
      second_element == KEY_NOT_PRESENT_STRING):
    line = 'Unknown Flight'
  else:
    line = (flight_number + ' - ' + second_element)[:SPLITFLAP_CHARS_PER_LINE]
  lines.append(line)

  # LINE2: Boeing 737-800 (twin-jet)
  #        ======================
  aircraft_type = DisplayAircraft(flight)
  if aircraft_type:
    lines.append(aircraft_type)

  # LINE3: SFO-CLT Charlotte
  #        Charlotte CLT-SFO
  #        ======================
  origin_destination_pair = DisplayOriginDestinationPair(flight)
  if origin_destination_pair:
    lines.append(origin_destination_pair)

  # LINE4: DEP 02:08 ER REM 5:14
  #        Dep: Unknown
  #        ======================
  departure_time_details = DisplayDepartureTimes(flight)
  line_elements = []
  if departure_time_details:

    if departure_time_details.get('departure_time_text'):
      line_elements.append(departure_time_details['departure_time_text'])

    if departure_time_details.get('delay_text'):
      line_elements.append(departure_time_details['delay_text'])

    remaining_seconds = DisplaySecondsRemaining(flight)
    if remaining_seconds is not None:
      line_elements.append(
          'Rem ' + SecondsToHhMm(remaining_seconds, colon=True))

  if line_elements:
    lines.append(EvenlySpace(line_elements))

  # LINE5: 123mph 297deg D:1383ft
  #        ======================
  speed = flight.get('speed')
  heading = flight.get('track')
  min_feet = flight.get('min_feet')

  line_elements = []
  if speed is not None:
    line_elements.append(str(round(speed)) + SPEED_UNITS)
  if heading is not None:
    line_elements.append(str(round(heading)) + u'\u00b0')  # degrees deg unicode
  if min_feet is not None:
    line_elements.append('D:' + str(round(min_feet)) + DISTANCE_UNITS)
  if line_elements:
    lines.append(EvenlySpace(line_elements))





                            <----SKIPPED LINES---->




  if vert_rate:
    line_elements.append('%+d%s' % (vert_rate, CLIMB_RATE_UNITS))
  if line_elements:
    lines.append(EvenlySpace(line_elements))

  return lines


def EvenlySpace(l):
  """Converts list to string with equal space between each element in list."""
  if not l:
    return ''
  if len(l) == 1:
    return l[0]
  extra_space = SPLITFLAP_CHARS_PER_LINE - sum([len(str(s)) for s in l])
  last_gap = round(extra_space / (len(l) - 1))
  return EvenlySpace([*l[:-2], str(l[-2]) + ' '*last_gap + str(l[-1])])


def RemoveParentheticals(s):
  """Removes all instances of () and the text contained within - from string."""
  if not s:
    return s
  if '(' in s and ')' in s:
    open_paren = s.find('(')
    close_paren = s.find(')')
  else:
    return s
  if close_paren < open_paren:
    return s
  s = s.replace(s[open_paren:close_paren+1], '').strip().replace('  ', ' ')
  return RemoveParentheticals(s)


def Ordinal(n):
  """Converts integer n to an ordinal string - i.e.: 2 -> 2nd; 5 -> 5th."""
  return '%d%s' % (n, 'tsnrhtdd'[(math.floor(n/10)%10 != 1)*(n%10 < 4)*n%10::4])


def Screenify(lines, splitflap):
  """Transforms a list of lines to a single text string for display / print.

  Given a list of lines that is a fully-formed message to send to the splitflap
  display, this function transforms the list of strings to a single string that
  is an easier-to-read and more faithful representation of how the message will
  be displayed. The transformations are to add blank lines to the message to
  make it consistent number of lines, and to add border to the sides & top /
  bottom of the message.

  Args:
    lines: list of strings that comprise the message
    splitflap: boolean, True if directed for splitflap display; false if
    directed to screen

  Returns:
    String - which includes embedded new line characters, borders, etc. as
    described above, that can be printed to screen as the message.
  """
  divider = '+' + '-'*SPLITFLAP_CHARS_PER_LINE + '+'
  border_character = '|'
  append_character = '\n'

  if splitflap:
    border_character = ''
    append_character = ''

  for unused_n in range(SPLITFLAP_LINE_COUNT-len(lines)):
    lines.append('')
  lines = [
      border_character +
      line.ljust(SPLITFLAP_CHARS_PER_LINE).upper() +
      border_character
      for line in lines]

  if not splitflap:
    lines.insert(0, divider)
    lines.append(divider)

  return append_character.join(lines)


def FlightInsightLastSeen(flights, days_ago=2):
  """Generates string indicating when flight was last seen.

  Generates text of the following form.
  - KAL214 was last seen 2d0h ago

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0]
      was the earliest seen, and flights[-1] is the most recent flight for
      which we are attempting to generate an insight.
    days_ago: the minimum time difference for which a message should be
      generated - i.e.: many flights are daily, and so we are not necessarily
      interested to see about every daily flight that it was seen yesterday.
      However, more infrequent flights might be of interest.

  Returns:
    Printable string message; if no message or insights to generate, then an
    empty string.
  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = DisplayFlightNumber(this_flight)
  this_timestamp = flights[-1]['now']
  last_seen = [
      f for f in flights[:-1] if DisplayFlightNumber(f) == this_flight_number]
  if last_seen and 'flight_number' in this_flight:
    last_timestamp = last_seen[-1]['now']
    if this_timestamp - last_timestamp > days_ago*SECONDS_IN_DAY:
      message = '%s was last seen %s ago' % (
          this_flight_number, SecondsToDdHh(this_timestamp - last_timestamp))
  return message


def FlightInsightDifferentAircraft(flights, percent_size_difference=0.1):
  """Generates string indicating changes in aircraft for the most recent flight.

  Generates text of the following form for the "focus" flight in the data.
  - Last time ASA1964 was seen on Mar 16, it was with a much larger plane
    (Airbus A320 (twin-jet) @ 123ft vs. Airbus A319 (twin-jet) @ 111ft)
  - Last time ASA743 was seen on Mar 19, it was with a different type of
    airpline (Boeing 737-900 (twin-jet) vs. Boeing 737-800 (twin-jet))

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0]
      was the earliest seen, and flights[-1] is the most recent flight for
      which we are attempting to generate an insight.
    percent_size_difference: the minimum size (i.e.: length) difference for the
      insight to warrant including the size details.

  Returns:
    Printable string message; if no message or insights to generate, then an
    empty string.
  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = DisplayFlightNumber(this_flight)
  last_seen = [
      f for f in flights[:-1]
      if DisplayFlightNumber(f) == this_flight_number]

  # Last time this same flight flew a materially different type of aircraft
  if last_seen and 'flight_number' in this_flight:
    last_flight = last_seen[-1]

    last_aircraft = last_flight.get('aircraft_type_friendly')
    last_aircraft_length = AIRCRAFT_LENGTH.get(last_aircraft, 0)

    this_aircraft = this_flight.get('aircraft_type_friendly')
    this_aircraft_length = AIRCRAFT_LENGTH.get(this_aircraft, 0)

    this_likely_commercial_flight = (
        this_flight.get('origin_iata') and this_flight.get('destination_iata'))
    if (this_likely_commercial_flight and this_aircraft
        and not this_aircraft_length):
      Log('%s used in a flight with defined origin & destination but yet is '
          'missing length details' % this_aircraft, file=LOGFILE)

    likely_same_commercial_flight = (
        last_flight.get('origin_iata') == this_flight.get('origin_iata') and
        last_flight.get(
            'destination_iata') == this_flight.get('destination_iata') and
        last_flight.get(
            'airline_call_sign') == this_flight.get('airline_call_sign'))

    this_aircraft_bigger = False
    last_aircraft_bigger = False
    if (likely_same_commercial_flight and
        this_aircraft_length > last_aircraft_length * (
            1 + percent_size_difference)):
      this_aircraft_bigger = True
      comparative_text = 'larger'
    elif (likely_same_commercial_flight and
          last_aircraft_length > this_aircraft_length * (
              1 + percent_size_difference)):
      last_aircraft_bigger = True
      comparative_text = 'smaller'

    last_flight_time_string = DisplayTime(last_flight, '%b %-d')
    if this_aircraft and last_aircraft:
      if this_aircraft_bigger or last_aircraft_bigger:
        message = ('%s used a %s plane today compared with last, on %s '
                   '(%s @ %dft vs. %s @ %dft)' % (
                       this_flight_number, comparative_text,
                       last_flight_time_string,
                       RemoveParentheticals(this_aircraft),
                       this_aircraft_length*FEET_IN_METER,
                       RemoveParentheticals(last_aircraft),
                       last_aircraft_length*FEET_IN_METER))
      elif last_aircraft and this_aircraft and last_aircraft != this_aircraft:
        message = (
            '%s used a different aircraft today compared'
            ' with last, on %s (%s vs. %s)' % (
                this_flight_number, last_flight_time_string,
                this_aircraft, last_aircraft))

  return message


def FlightInsightNthFlight(flights, hours=1, min_multiple_flights=2):
  """Generates string about seeing frequent flights to the same dest.

  Generates text of the following form for the "focus" flight in the data.
  - ASA1337 was the 4th flight to PHX in the last 53 minutes, served by Alaska
    Airlines, American Airlines, Southwest and United
  - SWA3102 was the 2nd flight to SAN in the last 25 minutes, both with
    Southwest

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0]
      was the earliest seen, and flights[-1] is the most recent flight for
      which we are attempting to generate an insight.
    hours: the time horizon over which to look for flights with the same
      destination.
    min_multiple_flights: the minimum number of flights to that same
      destination to warrant generating an insight.

  Returns:
    Printable string message; if no message or insights to generate, then an
    empty string.
  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = this_flight.get('flight_number', 'This')
  this_destination = this_flight.get('destination_iata', '')
  this_airline = DisplayAirline(this_flight)
  if not this_airline:
    # in case airline was stored as, say, ''
    this_airline = KEY_NOT_PRESENT_STRING
  this_timestamp = this_flight['now']
  if this_destination and this_destination not in ['SFO', 'LAX']:
    similar_flights = [f for f in flights[:-1] if
                       this_timestamp - f['now'] < SECONDS_IN_HOUR*hours and
                       this_destination == f.get('destination_iata', '')]
    similar_flights_count = len(similar_flights) + 1  # +1 for this_flight
    similar_flights_airlines = list(
        {DisplayAirline(f) for f in similar_flights})

    same_airline = [this_airline] == similar_flights_airlines

    if similar_flights_count >= min_multiple_flights:
      n_minutes = (
          (this_flight['now'] - similar_flights[0]['now'])
          / SECONDS_IN_MINUTE)
      message = ('%s was the %s flight to %s in the last %d minutes' % (
          this_flight_number, Ordinal(similar_flights_count),
          this_destination, n_minutes))
      if same_airline and similar_flights_count == 2:
        message += ', both with %s' % this_airline
      elif same_airline:
        message += ', all with %s' % this_airline
      else:
        similar_flights_airlines.append(this_airline)
        similar_flights_airlines.sort()
        message += ', served by %s and %s' % (
            ', '.join(similar_flights_airlines[:-1]),
            similar_flights_airlines[-1])

  return message


def FlightInsightSuperlativeAttribute(
    flights,
    key,
    label,
    units,
    absolute_list,
    insight_min=True,
    insight_max=True,
    hours=HOURS_IN_DAY):
  """Generates string if numeric attribute of the flight being an extreme value.

  Generates text of the following form for the "focus" flight in the data.
  - N5286C has the slowest groundspeed (113mph vs. 163mph) in last 24 hours
  - CKS828 has the highest altitude (40000ft vs. 16575ft) in last 24 hours

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0]
      was the earliest seen, and flights[-1] is the most recent flight for
      which we are attempting to generate an insight.
    key: the key of the attribute of interest - i.e.: 'speed'.
    label: the human-readable string that should be displayed in the message -
      i.e.: 'groundspeed'.
    units: the string units that should be used to label the value of the key -
      i.e.: 'MPH'.
    absolute_list: a 2-tuple of strings that is used to label the min and the
      max - i.e.: ('lowest', 'highest'), or ('slowest', 'fastest').
    insight_min: boolean indicating whether to generate an insight about the
      min value.
    insight_max: boolean indicating whether to generate an insight about the
      max value.
    hours: the time horizon over which to look for superlative flights.

  Returns:
    Printable string message; if no message or insights to generate, then an
    empty string.
  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = this_flight.get('flight_number', 'The last flight')
  first_timestamp = flights[0]['now']
  last_timestamp = flights[-1]['now']
  included_seconds = last_timestamp - first_timestamp

  if included_seconds > SECONDS_IN_HOUR * hours:
    relevant_flights = [
        f for f in flights[:-1]
        if last_timestamp - f['now'] < SECONDS_IN_HOUR * hours]
    value_min = min([
        f.get(key) for f in relevant_flights
        if isinstance(f.get(key), numbers.Number)])
    value_max = max([
        f.get(key) for f in relevant_flights
        if isinstance(f.get(key), numbers.Number)])
    values_other = len(
        [1 for f in relevant_flights if isinstance(f.get(key), numbers.Number)])

    this_value = this_flight.get(key)

    if this_value and values_other:

      superlative = True
      if (
          isinstance(this_value, numbers.Number) and
          isinstance(value_max, numbers.Number) and
          this_value > value_max and
          insight_max):
        absolute_string = absolute_list[1]
        other_value = value_max
      elif (
          isinstance(this_value, numbers.Number) and
          isinstance(value_min, numbers.Number) and
          this_value < value_min and
          insight_min):
        absolute_string = absolute_list[0]
        other_value = value_min
      else:
        superlative = False

      if superlative:
        message = '%s has the %s %s (%d%s vs. %d%s) in last %d hours' % (
            this_flight_number, absolute_string, label,
            this_value, units, other_value, units, hours)

  return message


def FlightInsightNextFlight(flights, configuration):
  """Generates string about estimated wait until next flight.

  Generates text of the following form for the "focus" flight in the data.
  - Last flight at 2:53a; avg wait is 1h58m & median is 42m, but could be as
    long as 8h43m, based on last 20 days

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0]
      was the earliest seen, and flights[-1] is the most recent flight for
      which we are attempting to generate an insight.
    configuration: dictionary of settings.

  Returns:
    Printable string message; if no message because not enough history, then an
    empty string.
  """
  msg = ''
  if not flights:
    return msg

  # m = min of day of this flight
  # find minute of day of prior flights st
  # -- that flight not seen in last 12 hrs
  # -- that min of day >= this
  this_flight = flights[-1]
  this_hour = int(DisplayTime(this_flight, '%-H'))
  this_minute = int(DisplayTime(this_flight, '%-M'))
  this_date = DisplayTime(this_flight, '%x')

  # Flights that we've already seen in the last few hours we do not expect to
  # see again for another few hours, so let's exclude them from the calculation
  exclude_flights_hours = 12
  flight_numbers_seen_in_last_n_hours = [
      f['flight_number'] for f in flights
      if f['now'] > this_flight['now'] - exclude_flights_hours*SECONDS_IN_HOUR
      and 'flight_number' in f]
  still_to_come_flights = [
      f for f in flights[:-1]
      if f.get('flight_number') not in flight_numbers_seen_in_last_n_hours
      and this_date != DisplayTime(f, '%x')]

  # exclude flights that would be filtered out by altitude or distance
  still_to_come_flights = [
      f for f in still_to_come_flights
      if FlightMeetsDisplayCriteria(f, configuration)]

  # exclude flights more than 30 days in the past
  now = time.time()
  still_to_come_flights = [
      f for f in still_to_come_flights
      if now - f['now'] < MAX_INSIGHT_HORIZON_DAYS * SECONDS_IN_DAY]

  minimum_minutes_next_flight = {}  # min minutes to next flight by day
  for flight in still_to_come_flights:
    date = DisplayTime(flight, '%x')
    hour = int(DisplayTime(flight, '%-H'))
    minutes = int(DisplayTime(flight, '%-M'))
    minutes_after = (hour - this_hour) * MINUTES_IN_HOUR + (
        minutes - this_minute)
    if minutes_after < 0:
      minutes_after += MINUTES_IN_DAY
    minimum_minutes_next_flight[date] = min(
        minimum_minutes_next_flight.get(date, minutes_after), minutes_after)

  minutes = list(minimum_minutes_next_flight.values())
  # at least one (potentially partial) prior day of history
  if len(minutes) > 1:
    average_seconds = (sum(minutes) / len(minutes)) * SECONDS_IN_MINUTE
    max_seconds = max(minutes) * SECONDS_IN_MINUTE

    median_seconds = statistics.median(minutes) * SECONDS_IN_MINUTE
    minimum_percent_diff = 0.5
    median_different = (
        median_seconds > average_seconds * (1 + minimum_percent_diff) or
        average_seconds > median_seconds * (1+ minimum_percent_diff))
    median_text = ''
    if median_different:
      median_text = ' & median is %s' % SecondsToHhMm(median_seconds)

    msg = ('Last flight at %s; avg wait is %s%s, but could '
           'be as long as %s, based on last %d days' % (
               DisplayTime(this_flight, '%-I:%M%p'),
               SecondsToHhMm(average_seconds),
               median_text, SecondsToHhMm(max_seconds), len(minutes)))

  return msg


def CheckForNewFilterCriteria(prev, new, message_queue, flights):
  """If filter criteria changed, generate new image and perhaps new message."""
  if (new.get('setting_max_distance') != prev.get('setting_max_distance') or
      new.get('setting_max_altitude') != prev.get('setting_max_altitude')):
    FlightCriteriaHistogramPng(
        flights,
        new['setting_max_distance'],
        new['setting_max_altitude'],
        7,
        last_max_distance_feet=prev.get('setting_max_distance'),
        last_max_altitude_feet=prev.get('setting_max_altitude'))

  if (new.get('setting_max_distance') != prev.get('setting_max_distance') or
      new.get('setting_max_altitude') != prev.get('setting_max_altitude') or
      new.get('setting_off_time') != prev.get('setting_off_time') or
      new.get('setting_on_time') != prev.get('setting_on_time')):
    if new.get('next_flight', 'off') == 'on':
      next_flight_message = FlightInsightNextFlight(flights, new)
      if next_flight_message:
        message_queue.append((FLAG_MSG_INSIGHT, next_flight_message))


def PercentileScore(scores, value):
  """Returns the percentile that a particular value is in a list of numbers.

  Roughly inverts numpy.percentile. That is, numpy.percentile(scores_list,
  percentile) to get the value of the list that is at that percentile;
  PercentileScore(scores_list, value) will yield back approximately that
  percentile.

  If the value matches identical elements in the list, this function takes the
  average position of those identical values to compute a percentile. Thus, for
  some lists (i.e.: where there are lots of flights that have a 0 second delay,
  or a 100% delay frequency), you may not get a percentile of 0 or 100 even
  with values equal to the min or max element in the list.

  Args:
    scores: the list of numbers, including value.
    value: the value for which we want to determine the percentile.

  Returns:
    Returns an integer percentile in the range [0, 100] inclusive.
  """
  count_values_below_score = len([1 for s in scores if s < value])
  # -1 is because value is already in scores
  count_values_at_score = len([1 for s in scores if s == value]) - 1
  percentile = (
      count_values_below_score + count_values_at_score / 2) / len(scores)
  return round(percentile*100)


def FlightInsightGroupPercentile(
    flights,
    group_function,
    value_function,
    value_string_function,
    group_label,
    value_label,
    filter_function=lambda this, other: True,
    min_days=1,
    lookback_days=MAX_INSIGHT_HORIZON_DAYS,
    min_this_group_size=0,
    min_comparison_group_size=0,
    min_group_qty=0,
    percentile_low=float('-inf'),
    percentile_high=float('inf')):
  """Generates a string about extreme values of groups of flights.

  Generates text of the following form for the "focus" flight in the data.
  - flight SIA31 (n=7) has a delay frequency in the 95th %tile, with 100% of
    flights delayed an average of 6m over the last 4d1h
  - flight UAL300 (n=5) has a delay time in the 1st %tile, with an average
    delay of 0m over the last 4d5h

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0]
      was the earliest seen, and flights[-1] is the most recent flight for
      which we are attempting to generate an insight.
    group_function: function that, when called with a flight, returns the
      grouping key. That is, for example, group_function(flight) = 'B739'
    value_function: function that, when called with a list of flights, returns
      the value to be used for the comparison to identify min / max. Typically,
      the count, but could also be a sum, standard deviation, etc. - for
      perhaps the greatest range in flight altitude. If the group does not have
      a valid value and so should be excluded from comparison - i.e.: average
      delay of a group of flights which did not have a calculable_delay on any
      flight, this function should return None.
    value_string_function: function that, when called with the two parameters
      flights and value, returns a string (inclusive of units and label) that
      should be displayed to describe the quantity. For instance, if

      value_function returns seconds, value_string_function could convert that
      to a string '3h5m'. Or if value_function returns an altitude range,
      value_string_function could return a string 'altitude range of 900ft
      (1100ft - 2000ft)'.
    group_label: string to identify the group type - i.e.: 'aircraft' or
      'flight' in the examples above.
    value_label: string to identify the value - i.e.: 'flights' in the examples
      above, but might also be i.e.: longest *delay*, or other quantity
      descriptor.
    filter_function: an optional function that, when called with the most
      recent flight and another flight filter_function(flights[-1], flight[n]),
      returns a value interpreted as a boolean indicating whether flight n
      should be included in determining the percentile.
    min_days: the minimum amount of history required to start generating


      insights about delays.
    lookback_days: the maximum amount of history which will be considered in
      generating insights about delays.
    min_this_group_size: even if this group has, say, the maximum average
      delay, if its a group of size 1, that is not necessarily very
      interesting. This sets the minimum group size for the focus flight.
    min_comparison_group_size: similarly, comparing the focus group to groups
      of size one does not necessarily produce a meaningful comparison; this
      sets to minimum size for the other groups.
    min_group_qty: when generating a percentile, if there are only 3 or 4
      groups among which to generate a percentile (i.e.: only a handful of
      destinations have been seen so far, etc.) then it is not necessarily very
      interesting to generate a message; this sets the minimum quantity of
      groups necessary (including the focus group) to generate a message.
    percentile_low: number [0, 100] inclusive that indicates the percentile
      that the focus flight group must equal or be less than for the focus
      group to trigger an insight.
    percentile_high: number [0, 100] inclusive that indicates the percentile
      that the focus flight group must equal or be greater than for the focus
      group to trigger an insight.

  Returns:
    Printable string message; if no message or insights to generate, then an
    empty string.
  """
  debug = False
  message = ''
  this_flight = flights[-1]
  first_timestamp = flights[0]['now']
  last_timestamp = this_flight['now']
  included_seconds = last_timestamp - first_timestamp

  if (included_seconds > SECONDS_IN_DAY * min_days and
      group_function(this_flight) != KEY_NOT_PRESENT_STRING):

    relevant_flights = [
        f for f in flights if
        last_timestamp - f['now'] < SECONDS_IN_DAY * lookback_days and
        filter_function(this_flight, f)]

    grouped_flights = {}
    for flight in relevant_flights:
      group = group_function(flight)
      grouping = grouped_flights.get(group, [])
      grouping.append(flight)
      grouped_flights[group] = grouping
    # we will exclude "UNKNOWN" since that is not a coherent group
    if KEY_NOT_PRESENT_STRING in grouped_flights:
      grouped_flights.pop(KEY_NOT_PRESENT_STRING)

    grouped_values = {
        g: value_function(grouped_flights[g]) for g in grouped_flights}
    this_group = group_function(relevant_flights[-1])
    this_value = grouped_values[this_group]
    this_group_size = len(grouped_flights[this_group])

    # we will exclude groups that are not big enough
    grouped_flights = {
        k: grouped_flights[k] for k in grouped_flights
        if (len(grouped_flights[k]) >= min_comparison_group_size
            or k == this_group)}

    # Remove those for which no value could be calculated or which are too small
    grouped_values = {
        g: grouped_values[g] for g in grouped_values
        if grouped_values[g] is not None and g in grouped_flights}

    if debug:
      print()
      print('len(relevant_flights): %d' % len(relevant_flights))
      print('len(grouped_flights): %d' % len(grouped_flights))
      print('grouped_flights.keys(): %s' % sorted(list(grouped_flights.keys())))
      for key in sorted(list(grouped_flights.keys())):
        print('  len(grouped_flights[%s]) = %d' % (
            key, len(grouped_flights[key])))

    if this_value is not None and len(grouped_values) >= min_group_qty:

      time_horizon_string = ' over the last %s' % SecondsToDdHh(
          last_timestamp - relevant_flights[0]['now'])
      min_comparison_group_size_string = ''
      if min_comparison_group_size > 1:
        min_comparison_group_size_string = ' amongst those with >%d flights' % (
            min_comparison_group_size - 1)

      # FLIGHT X (n=7) is has the Xth percentile of DELAYS, with an average
      # delay of 80 MINUTES
      this_percentile = PercentileScore(grouped_values.values(), this_value)
      if this_group_size >= min_this_group_size and (
          this_percentile <= percentile_low
          or this_percentile >= percentile_high):

        if debug:
          print('Comparison cohorts for %s (%s)' % (
              group_label, str(this_group)))
          print('This percentile: %f; min: %f; max: %f' % (
              this_percentile, percentile_low, percentile_high))
          keys = list(grouped_values.keys())
          values = [grouped_values[k] for k in keys]
          print(keys)
          print(values)
          (values, keys) = SortByValues(values, keys)
          for n, value in enumerate(values):
            print('%s: %f (group size: %d)' % (
                keys[n], value, len(grouped_flights[keys[n]])))

        if group_label:
          group_label += ' '

        def TrialMessage():
          message = '%s%s (n=%d) has a %s in the %s %%tile, with %s%s%s' % (
              group_label,
              this_group,
              this_group_size,
              value_label,
              Ordinal(this_percentile),
              value_string_function(grouped_flights[this_group], this_value),
              time_horizon_string,
              min_comparison_group_size_string)
          line_count = len(textwrap.wrap(
              message, width=SPLITFLAP_CHARS_PER_LINE))
          return (line_count, message)

        (line_count, message) = TrialMessage()
        if line_count > SPLITFLAP_LINE_COUNT:
          min_comparison_group_size_string = ''
          (line_count, message) = TrialMessage()
          if line_count > SPLITFLAP_LINE_COUNT:
            time_horizon_string = ''
            (line_count, message) = TrialMessage()

      elif debug:
        print(
            'Not an outlying group because A and either'
            ' B or C needed to be true:')
        if not this_group_size >= min_this_group_size:
          print('A this_group_size %d >= min_this_group_size %d' % (
              this_group_size, min_this_group_size))
        else:
          print('A passed')
          if not this_percentile <= percentile_low:
            print('B this_percentile %d <= percentile_low %d' % (
                this_percentile, percentile_low))
          if not this_percentile >= percentile_high:
            print('C this_percentile %d >= percentile_high %d' % (
                this_percentile, percentile_high))

    elif debug:
      print('Not an outlying group because A or B failed:')
      if this_value is None:
        print('A this_value %s' % str(this_value))
      elif len(grouped_values) < min_group_qty:
        print('A passed')
        print('B len(grouped_values) %d >= min_group_qty %d' % (
            len(grouped_values), min_group_qty))




                            <----SKIPPED LINES---->




  return message


def FlightInsightSuperlativeGroup(
    flights,
    group_function,
    value_function,
    value_string_function,
    group_label,
    value_label,
    absolute_list,
    min_days=1,
    lookback_days=MAX_INSIGHT_HORIZON_DAYS,
    min_this_group_size=0,
    min_comparison_group_size=0,
    insight_min=True,
    insight_max=True):
  """Generates a string about extreme values of groups of flights.

  Generates text of the following form for the "focus" flight in the data.
  - aircraft B739 (n=7) is tied with B738 and A303 for the most flights at 7
    flights over the last 3d7h amongst aircraft with a least 5 flights
  - aircraft B739 (n=7) is tied with 17 others for the most flights at 7
    flights over the last 3d7h amongst aircraft with a least 5 flights
  - flight UAL1075 (n=12) has the most flights with 12 flights; the next most
    flights is 11 flights over the last 7d5h

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0]
      was the earliest seen, and flights[-1] is the most recent flight for
      which we are attempting to generate an insight.
    group_function: function that, when called with a flight, returns the
      grouping key. That is, for example, group_function(flight) = 'B739'
    value_function: function that, when called with a list of flights, returns
      the value to be used for the comparison to identify min / max. Typically,
      the count, but could also be a sum, standard deviation, etc. - for
      perhaps the greatest range in flight altitude. If the group does not have
      a valid value and so should be excluded from comparison - i.e.: average
      delay of a group of flights which did not have a calculable_delay on any
      flight, this function should return None.
    value_string_function: function that, when called with the two parameters
      flights and value, returns a string (inclusive of units and label) that
      should be displayed to describe the quantity. For instance, if

      value_function returns seconds, value_string_function could convert that
      to a string '3h5m'. Or if value_function returns an altitude range,
      value_string_function could return a string 'altitude range of 900ft
      (1100ft - 2000ft)'.
    group_label: string to identify the group type - i.e.: 'aircraft' or
      'flight' in the examples above.
    value_label: string to identify the value - i.e.: 'flights' in the examples
      above, but might also be i.e.: longest *delay*, or other quantity
      descriptor.
    absolute_list: a 2-tuple of strings that is used to label the min and the
      max - i.e.: ('most', 'least'), or ('lowest average', 'highest average').
    min_days: the minimum amount of history required to start generating


      insights about delays.
    lookback_days: the maximum amount of history which will be considered in
      generating insights about delays.
    min_this_group_size: even if this group has, say, the maximum average
      delay, if its a group of size 1, that is not necessarily very
      interesting. This sets the minimum group size for the focus flight.
    min_comparison_group_size: similarly, comparing the focus group to groups
      of size one does not necessarily produce a meaningful comparison; this
      sets to minimum size for the other groups.
    insight_min: boolean indicating whether to possibly generate insight based
      on the occurrence of the min value.
    insight_max: boolean indicating whether to possibly generate insight based
      on the occurrence of the max value.

  Returns:
    Printable string message; if no message or insights to generate, then an
    empty string.
  """
  message = ''
  first_timestamp = flights[0]['now']
  last_timestamp = flights[-1]['now']
  included_seconds = last_timestamp - first_timestamp

  if included_seconds > SECONDS_IN_DAY * min_days:

    relevant_flights = [
        f for f in flights if
        last_timestamp - f['now'] < SECONDS_IN_DAY * lookback_days]

    grouped_flights = {}
    for flight in relevant_flights:
      group = group_function(flight)
      grouping = grouped_flights.get(group, [])
      grouping.append(flight)
      grouped_flights[group] = grouping

    grouped_values = {
        g: value_function(grouped_flights[g]) for g in grouped_flights}
    this_group = group_function(relevant_flights[-1])
    this_value = grouped_values[this_group]
    this_group_size = len(grouped_flights[this_group])

    # we will exclude groups that are not big enough
    grouped_flights = {
        k: grouped_flights[k] for k in grouped_flights
        if len(grouped_flights[k]) > min_comparison_group_size}

    # Remove those for which no value could be calculated or which are too small
    grouped_values = {
        g: grouped_values[g] for g in grouped_values
        if grouped_values[g] is not None and g in grouped_flights}

    other_values = list(grouped_values.values())
    if this_value in other_values:
      other_values.remove(this_value)

    if other_values:
      min_value = min(other_values)




                            <----SKIPPED LINES---->




          superlative = True
          equality = False
          superlative_string = absolute_list[0]
          next_value = min_value
        elif this_value == min_value and insight_min:
          superlative = False
          equality = True
          superlative_string = absolute_list[0]
        else:
          superlative = False
          equality = False

        time_horizon_string = SecondsToDdHh(
            last_timestamp - relevant_flights[0]['now'])
        min_comparison_group_size_string = ''
        if min_comparison_group_size > 1:
          min_comparison_group_size_string = (
              ' amongst %s with at least %d flights' %
              (group_label, min_comparison_group_size))

        # flight x (n=7) is tied with a, b, and c for the (longest average,
        # shortest average) delay at 80 minutes
        # flight x is tied with a, b, and c for the (most frequent, least
        # frequent) delay at 30%
        if equality and this_group_size > min_this_group_size:

          identical_groups = sorted([
              str(g) for g in grouped_values
              if grouped_values[g] == this_value and g != this_group])
          if len(identical_groups) > 4:
            identical_string = '%d others' % len(identical_groups)
          elif len(identical_groups) > 1:
            identical_string = (
                '%s and %s' %
                (', '.join(identical_groups[:-1]), identical_groups[-1]))
          else:
            identical_string = str(identical_groups[0])

          message = (
              '%s %s (n=%d) is tied with %s for '
              'the %s %s at %s over the last %s%s' % (
                  group_label,
                  this_group,
                  this_group_size,
                  identical_string,
                  superlative_string,
                  value_label,
                  value_string_function(flights, this_value),
                  time_horizon_string,
                  min_comparison_group_size_string))

        elif superlative and this_group_size > min_this_group_size:
          message = (
              '%s %s (n=%d) has the %s %s with %s; the next '
              '%s %s is %s over the last %s%s' % (
                  group_label,
                  this_group,
                  this_group_size,
                  superlative_string,
                  value_label,
                  value_string_function(flights, this_value),
                  superlative_string,
                  value_label,
                  value_string_function(flights, next_value),
                  time_horizon_string,
                  min_comparison_group_size_string))

  return message


def AverageDelay(flights):
  """Returns the average delay time for a list of flights.

  Args:
    flights: the list of the raw flight data.

  Returns:
    Average seconds of flight delay, calculated as the total seconds delayed
    amongst all the flights that have a positive delay, divided by the total
    number of flights that have a calculable delay. If no flights have a
    calculable delay, returns None.
  """
  calculable_delay_seconds = [
      DisplayDepartureTimes(f)['delay_seconds'] for f in flights
      if DisplayDepartureTimes(f)['calculable_delay'] and
      DisplayDepartureTimes(f)['delay_seconds'] > 0]
  average_delay = None
  if calculable_delay_seconds:
    average_delay = sum(
        calculable_delay_seconds) / len(calculable_delay_seconds)
  return average_delay



def PercentDelay(flights):
  """Returns the percentage of flights that have a positive delay.

  Args:
    flights: the list of the raw flight data.

  Returns:
    Percentage of flights with a delay, calculated as the count of flights with
    a positive delay divided by the total number of flights that have a
    calculable delay. If no flights have a calculable delay, returns None.
  """
  calculable_delay_seconds = [
      DisplayDepartureTimes(f)['delay_seconds'] for f in flights
      if DisplayDepartureTimes(f)['calculable_delay']]
  delay_count = sum([1 for s in calculable_delay_seconds if s > 0])
  percent_delay = None
  if calculable_delay_seconds:
    percent_delay = delay_count / len(calculable_delay_seconds)
  return percent_delay


def FlightInsightFirstInstance(
    flights,
    key,
    label,
    days=7,
    additional_descriptor_fcn=''):
  """Generates string for first time value seen of particular key.

  Generates text of the following form for the "focus" flight in the data.
  - N311CG is the first time aircraft GLF6 (Gulfstream Aerospace Gulfstream G650
    (twin-jet)) has been seen since at least 7d5h ago
  - PCM8679 is the first time airline Westair Industries has been seen since
    9d0h ago

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0]
      was the earliest seen, and flights[-1] is the most recent flight for
      which we are attempting to generate an insight.
    key: the key of the attribute of interest - i.e.: 'destination_iata'.
    label: the human-readable string that should be displayed in the message -
      i.e.: 'destination'.
    days: the minimum time of interest for an insight - i.e.: we probably see
      LAX every hour, but we are only interested in particular attributes that
      have not been seen for at least some number of days. Note, however, that
      the code will go back even further to find the last time that attribute
      was observed, or if never observed, indicating "at least".
    additional_descriptor_fcn: a function that, when passed a flight, returns an
      additional parenthetical notation to include about the attribute or flight
      observed - such as expanding the IATA airport code to its full name, etc.

  Returns:
    Printable string message; if no message or insights to generate, then an
    empty string.
  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = DisplayFlightNumber(this_flight)
  first_timestamp = flights[0]['now']
  last_timestamp = flights[-1]['now']
  included_seconds = last_timestamp - first_timestamp

  if included_seconds > SECONDS_IN_DAY * days:
    this_instance = this_flight.get(key)
    matching = [f for f in flights[:-1] if f.get(key) == this_instance]

    last_potential_observation_sec = included_seconds
    if matching:
      last_potential_observation_sec = last_timestamp - matching[-1]['now']

    if this_instance and last_potential_observation_sec > SECONDS_IN_DAY * days:
      additional_descriptor = ''
      if additional_descriptor_fcn:
        additional_descriptor = ' (%s)' % additional_descriptor_fcn(this_flight)
      last_potential_observation_string = SecondsToDdHh(
          last_potential_observation_sec)
      if matching:
        message = '%s is the first time %s %s%s has been seen since %s ago' % (
            this_flight_number, label, this_instance, additional_descriptor,
            last_potential_observation_string)
      else:
        message = (
            '%s is the first time %s %s%s has been '
            'seen since at least %s ago' % (
                this_flight_number, label, this_instance, additional_descriptor,
                last_potential_observation_string))

  return message


def FlightInsightSuperlativeVertrate(flights, hours=HOURS_IN_DAY):
  """Generates string about the climb rate of the flight being an extreme value.

  Generates text of the following form for the "focus" flight in the data.
  - UAL631   has the fastest ascent rate (5248fpm, 64fpm faster than next
    fastest) in last 24 hours
  - CKS1820 has the fastest descent rate (-1152fpm, -1088fpm faster than next
    fastest) in last 24 hours

  While this is conceptually similar to the more generic
  FlightInsightSuperlativeVertrate function, vert_rate - because it can be
  either positive or negative, with different signs requiring different
  labeling and comparisons - it needs its own special handling.

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0]
      was the earliest seen, and flights[-1] is the most recent flight for
      which we are attempting to generate an insight.
    hours: the time horizon over which to look for superlative flights.

  Returns:
    Printable string message; if no message or insights to generate, then an
    empty string.
  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = this_flight.get('flight_number')
  first_timestamp = flights[0]['now']
  last_timestamp = flights[-1]['now']
  sufficient_data = (last_timestamp - first_timestamp) > SECONDS_IN_HOUR * hours
  pinf = float('inf')
  ninf = float('-inf')

  if sufficient_data:
    relevant_flights = [
        f for f in flights[:-1]
        if last_timestamp - f['now'] < SECONDS_IN_HOUR * hours]

    def AscentRate(f, default):
      vert_rate = f.get('vert_rate')
      if isinstance(vert_rate, numbers.Number) and vert_rate > 0:
        return vert_rate
      return default

    other_ascents = len([
        1 for f in relevant_flights
        if isinstance(
            f.get('vert_rate'), numbers.Number) and AscentRate(f, ninf) > 0])
    if other_ascents:
      ascent_min = min([
          AscentRate(f, pinf) for f in relevant_flights
          if AscentRate(f, ninf) > 0])
      ascent_max = max([
          AscentRate(f, ninf) for f in relevant_flights
          if AscentRate(f, ninf) > 0])

    def DescentRate(f, default):
      vert_rate = f.get('vert_rate')
      if isinstance(vert_rate, numbers.Number) and vert_rate < 0:
        return vert_rate
      return default

    other_descents = len([
        1 for f in relevant_flights
        if isinstance(
            f.get('vert_rate'), numbers.Number) and DescentRate(f, pinf) < 0])
    if other_descents:
      descent_min = min([
          DescentRate(f, pinf) for f in relevant_flights
          if DescentRate(f, pinf) < 0])
      descent_max = max([
          DescentRate(f, ninf) for f in relevant_flights
          if DescentRate(f, pinf) < 0])

    this_vert_rate = this_flight.get('vert_rate')

    if isinstance(this_vert_rate, numbers.Number):
      if this_vert_rate >= 0:
        this_ascent = this_vert_rate
        this_descent = None
      else:
        this_descent = this_vert_rate
        this_ascent = None

      if this_ascent and other_ascents and this_ascent > ascent_max:
        message = ('%s has the fastest ascent rate (%d%s, %d%s faster '
                   'than next fastest) in last %d hours' % (
                       this_flight_number, this_ascent, CLIMB_RATE_UNITS,
                       this_ascent - ascent_max, CLIMB_RATE_UNITS, hours))
      elif this_ascent and other_ascents and this_ascent < ascent_min:
        message = ('%s has the slowest ascent rate (%d%s, %d%s slower '
                   'than next slowest) in last %d hours' % (
                       this_flight_number, this_ascent, CLIMB_RATE_UNITS,




                            <----SKIPPED LINES---->




                       this_flight_number, this_descent, CLIMB_RATE_UNITS,
                       this_descent - descent_min, CLIMB_RATE_UNITS, hours))
      elif this_descent and other_descents and this_descent > descent_max:
        message = ('%s has the slowest descent rate (%d%s, %d%s slower '
                   'than next slowest) in last %d hours' % (
                       this_flight_number, this_descent, CLIMB_RATE_UNITS,
                       descent_max - this_descent, CLIMB_RATE_UNITS, hours))

  return message


def FlightInsightDelays(
    flights,
    min_days=1,
    lookback_days=MAX_INSIGHT_HORIZON_DAYS,
    min_late_percentage=0.75,
    min_this_delay_minutes=0,
    min_average_delay_minutes=0):
  """Generates string about the delays this flight has seen in the past.

  Only if this flight has a caclculable delay itself, this will generate text
  of the following form for the "focus" flight in the data.
  - This 8m delay is the longest UAL1175 has seen in the last 9 days (avg delay
    is 4m); overall stats: 1 early; 9 late; 10 total
  - With todays delay of 7m, UAL1175 is delayed 88% of the time in the last 8
    days for avg delay of 4m; overall stats: 1 early; 8 late; 9 total

  Args:
    flights: the list of the raw data from which the insights will be generated,
      where the flights are listed in order of observation - i.e.: flights[0]
      was the earliest seen, and flights[-1] is the most recent flight for
      which we are attempting to generate an insight.
    min_days: the minimum amount of history required to start generating


      insights about delays.
    lookback_days: the maximum amount of history which will be considered in
      generating insights about delays.
    min_late_percentage: flights that are not very frequently delayed are not
      necessarily very interesting to generate insights about; this specifies
      the minimum percentage the flight must be late to generate a message that
      focuses on the on-time percentage.
    min_this_delay_minutes: a delay of 1 minute is not necessarily interesting;
      this specifies the minimum delay time this instance of the flight must be
      late to generate a message that focuses on this flight's delay.
    min_average_delay_minutes: an average delay of only 1 minute, even if it
      happens every day, is not necessarily very interesting; this specifies
      the minimum average delay time to generate either type of delay message.

  Returns:
    Printable string message; if no message or insights to generate, then an
    empty string.
  """
  message = ''
  this_flight = flights[-1]
  this_flight_number = this_flight.get('flight_number', '')
  first_timestamp = flights[0]['now']
  last_timestamp = flights[-1]['now']
  included_seconds = last_timestamp - first_timestamp

  if (included_seconds > SECONDS_IN_DAY * min_days
      and DisplayDepartureTimes(this_flight)['calculable_delay']):
    this_delay_seconds = DisplayDepartureTimes(this_flight)['delay_seconds']
    relevant_flights = [
        f for f in flights if
        last_timestamp - f['now'] < SECONDS_IN_DAY * lookback_days and
        this_flight_number == f.get('flight_number', '')]

    if (
        len(relevant_flights) > 1 and
        this_delay_seconds >= min_this_delay_minutes*SECONDS_IN_MINUTE):
      delay_seconds_list = [




                            <----SKIPPED LINES---->




      overall_stats_elements = []
      if delay_early_count:
        overall_stats_elements.append('%d ER' % delay_early_count)
      if delay_ontime_count:
        overall_stats_elements.append('%d OT' % delay_ontime_count)
      if delay_late_count:
        overall_stats_elements.append('%d LT' % delay_late_count)
      if delay_unknown_count:
        overall_stats_elements.append('%d UNK' % delay_unknown_count)
      overall_stats_text = '; '.join(overall_stats_elements)

      days_history = (int(
          round(last_timestamp - relevant_flights[0]['now']) / SECONDS_IN_DAY)
                      + 1)

      late_percentage = delay_late_count / len(relevant_flights)

      if (superlative and
          delay_late_avg_sec >= min_average_delay_minutes * SECONDS_IN_MINUTE):
        message = (
            'This %s delay is the %s %s has seen in the '
            'last %d days (avg delay is %s); overall stats: %s' % (
                SecondsToHhMm(this_delay_seconds),
                delay_keyword,
                this_flight_number,
                days_history,
                SecondsToHhMm(delay_late_avg_sec),
                overall_stats_text))
      elif (late_percentage > min_late_percentage and
            delay_late_avg_sec >=
            min_average_delay_minutes * SECONDS_IN_MINUTE):
        # it's just been delayed frequently!
        message = (
            'With today''s delay of %s, %s is delayed %d%% of the time in'
            ' the last %d days for avg delay of %s; overall stats: %s' % (
                SecondsToHhMm(this_delay_seconds),
                this_flight_number,
                int(100 * late_percentage),
                days_history,
                SecondsToHhMm(delay_late_avg_sec),
                overall_stats_text))
  return message


def FlightInsights(flights):
  """Identifies all the insight messages about the most recently seen flight.

  Generates a possibly-empty list of messages about the flight.

  Args:
    flights: List of all flights where the last flight in the list is the focus
    flight for which we are trying to identify something interesting.

  Returns:
    List of 2-tuples, where the first element in the tuple is a flag indicating
    the type of insight message, and the second selement is the printable
    strings (with embedded new line characters) for something interesting about
    the flight; if there isn't anything interesting, returns an empty list.
  """
  messages = []

  def AppendMessageType(message_type, message):
    if message:
      messages.append((message_type, message))

  # This flight number was last seen x days ago
  AppendMessageType(
      FLAG_INSIGHT_LAST_SEEN, FlightInsightLastSeen(flights, days_ago=2))

  # Yesterday this same flight flew a materially different type of aircraft
  AppendMessageType(
      FLAG_INSIGHT_DIFF_AIRCRAFT,
      FlightInsightDifferentAircraft(flights, percent_size_difference=0.1))

  # This is the 3rd flight to the same destination in the last hour
  AppendMessageType(
      FLAG_INSIGHT_NTH_FLIGHT,
      FlightInsightNthFlight(flights, hours=1, min_multiple_flights=2))

  # This is the [lowest / highest] [speed / altitude / climbrate]
  # in the last 24 hours
  AppendMessageType(FLAG_INSIGHT_GROUNDSPEED, FlightInsightSuperlativeAttribute(
      flights,
      'speed',
      'groundspeed',
      SPEED_UNITS,
      ['slowest', 'fastest'],
      hours=HOURS_IN_DAY))
  AppendMessageType(FLAG_INSIGHT_ALTITUDE, FlightInsightSuperlativeAttribute(
      flights,
      'altitude',
      'altitude',
      DISTANCE_UNITS,
      ['lowest', 'highest'],
      hours=HOURS_IN_DAY))
  AppendMessageType(
      FLAG_INSIGHT_VERTRATE, FlightInsightSuperlativeVertrate(flights))

  # First instances: destination, first aircraft, etc.
  AppendMessageType(FLAG_INSIGHT_FIRST_DEST, FlightInsightFirstInstance(
      flights, 'destination_iata', 'destination', days=7,
      additional_descriptor_fcn=lambda f: f['destination_friendly']))
  AppendMessageType(FLAG_INSIGHT_FIRST_ORIGIN, FlightInsightFirstInstance(
      flights, 'origin_iata', 'origin', days=7,
      additional_descriptor_fcn=lambda f: f['origin_friendly']))
  AppendMessageType(FLAG_INSIGHT_FIRST_AIRLINE, FlightInsightFirstInstance(
      flights, 'airline_short_name', 'airline', days=7))
  AppendMessageType(FLAG_INSIGHT_FIRST_AIRCRAFT, FlightInsightFirstInstance(
      flights, 'aircraft_type_code', 'aircraft', days=7,
      additional_descriptor_fcn=lambda f: f['aircraft_type_friendly']))

  # This is the longest / shortest delay this flight has seen in the last 30
  # days at 2h5m; including today, this flight has been delayed x of the last y
  # times.
  AppendMessageType(FLAG_INSIGHT_LONGEST_DELAY, FlightInsightDelays(
      flights, min_late_percentage=0.75,
      min_this_delay_minutes=0,
      min_average_delay_minutes=0))

  def DelayTimeAndFrequencyMessage(
      types_tuple,
      group_function,
      group_label,
      filter_function=lambda this, other: True,
      min_days=1,
      lookback_days=MAX_INSIGHT_HORIZON_DAYS,
      min_this_group_size=0,
      min_comparison_group_size=0,
      min_group_qty=0,
      percentile_low=float('-inf'),
      percentile_high=float('inf')):
    value_function_tuple = (PercentDelay, AverageDelay)
    value_string_function_tuple = (
        lambda flights, value: '%d%% of flights delayed an average of %s' % (




                            <----SKIPPED LINES---->




        lambda flights, value: 'average delay of %s' % SecondsToHhMm(value))
    value_label_tuple = ('delay frequency', 'delay time')
    for n in range(2):
      if types_tuple[n]:
        AppendMessageType(types_tuple[n], FlightInsightGroupPercentile(
            flights,
            group_function=group_function,
            value_function=value_function_tuple[n],
            value_string_function=value_string_function_tuple[n],
            group_label=group_label,
            value_label=value_label_tuple[n],
            filter_function=filter_function,
            min_days=min_days,
            min_this_group_size=min_this_group_size,
            min_comparison_group_size=min_comparison_group_size,
            min_group_qty=min_group_qty,
            lookback_days=lookback_days,
            percentile_low=percentile_low,
            percentile_high=percentile_high))

  # flight UAL1 (n=5) has a delay frequency in the 72nd %tile, with 100% of
  # flights delayed an average of 44m over the last 4d13h
  DelayTimeAndFrequencyMessage(
      (FLAG_INSIGHT_FLIGHT_DELAY_FREQUENCY, FLAG_INSIGHT_FLIGHT_DELAY_TIME),
      group_function=lambda flight: flight.get(
          'flight_number', KEY_NOT_PRESENT_STRING),
      group_label='flight',
      min_days=1,
      min_this_group_size=4,
      min_comparison_group_size=0,
      min_group_qty=0,
      percentile_low=10,
      percentile_high=90)

  # Airline United (n=5) has a delay frequency in the 72nd %tile, with 100% of
  # flights delayed an average of 44m over the last 4d13h
  DelayTimeAndFrequencyMessage(
      (FLAG_INSIGHT_AIRLINE_DELAY_FREQUENCY, FLAG_INSIGHT_AIRLINE_DELAY_TIME),
      group_function=DisplayAirline,
      group_label='airline',
      min_days=1,
      min_this_group_size=10,
      min_comparison_group_size=5,
      min_group_qty=5,
      percentile_low=10,
      percentile_high=80)

  # Destination LAX (n=5) has a delay frequency in the 72nd %tile, with 100% of
  # flights delayed an average of 44m over the last 4d13h
  DelayTimeAndFrequencyMessage(
      (FLAG_INSIGHT_DESTINATION_DELAY_FREQUENCY,
       FLAG_INSIGHT_DESTINATION_DELAY_TIME),
      group_function=DisplayDestinationFriendly,
      group_label='destination',
      min_days=1,
      min_this_group_size=10,
      min_comparison_group_size=5,
      min_group_qty=5,
      percentile_low=10,
      percentile_high=90)

  # we only want to do this if we're already at ~75% of the number of flights
  # we'd expect to see for the hour
  flight_hours = {}
  for flight in flights:
    if (flights[-1]['now'] - flight['now'] < 8.5 * SECONDS_IN_DAY and
        DisplayTime(
            flight, '%-I%p') == DisplayTime(flights[-1], '%-I%p')):
      flight_hours[DisplayTime(flight, '%-d')] = flight_hours.get(
          DisplayTime(flight, '%-d'), 0) + 1
  min_this_hour_flights = max(3, 0.75 * max(flight_hours.values()))

  # Once we've commented on the insights for an hour or day, we don't want to
  # do it again
  hour_delay_frequency_flag = FLAG_INSIGHT_HOUR_DELAY_FREQUENCY
  hour_delay_time_flag = FLAG_INSIGHT_HOUR_DELAY_TIME
  date_delay_frequency_flag = FLAG_INSIGHT_DATE_DELAY_FREQUENCY
  date_delay_time_flag = FLAG_INSIGHT_DATE_DELAY_TIME
  for flight in flights[:-1]:
    insights = flight.get('insight_types', [])
    this_hour = DisplayTime(flights[-1], '%x %-I%p')
    this_day = DisplayTime(flights[-1], '%x')
    if (this_hour == DisplayTime(flight, '%x %-I%p') and
        FLAG_INSIGHT_HOUR_DELAY_FREQUENCY in insights):
      hour_delay_frequency_flag = None
    if (this_hour == DisplayTime(flight, '%x %-I%p') and
        FLAG_INSIGHT_HOUR_DELAY_TIME in insights):
      hour_delay_time_flag = None
    if (this_day == DisplayTime(flight, '%x') and
        FLAG_INSIGHT_DATE_DELAY_FREQUENCY in insights):
      date_delay_frequency_flag = None
    if (this_day == DisplayTime(flight, '%x') and
        FLAG_INSIGHT_DATE_DELAY_TIME in insights):
      date_delay_time_flag = None

  def TodaysHour(f):
    f_date = DisplayTime(f, '%x')
    f_hour = DisplayTime(f, '%-I%p')
    if f_date == DisplayTime(flights[-1], '%x'):
      return '%s flights today' % f_hour
    return '%s %s' % (f_date, f_hour)

  # Today's 7a flights have a delay frequency in the 72nd %tile, with 100% of
  # flights delayed an average of 44m over the last 4d13h
  DelayTimeAndFrequencyMessage(
      (hour_delay_frequency_flag, hour_delay_time_flag),
      group_function=TodaysHour,
      group_label='',
      filter_function=lambda this, other:
      DisplayTime(this, '%-I%p') == DisplayTime(other, '%-I%p'),
      min_days=3,
      min_this_group_size=min_this_hour_flights,
      min_comparison_group_size=min_this_hour_flights,
      min_group_qty=5,
      percentile_low=10,
      percentile_high=90)

  # we only want to do this if we're already at ~75% of the number of flights
  # we'd expect to see for the day
  flight_days = {}
  for flight in flights:
    if flights[-1]['now'] - flight['now'] < 8.5 * SECONDS_IN_DAY:
      flight_days[DisplayTime(flight, '%-d')] = flight_days.get(
          DisplayTime(flight, '%-d'), 0) + 1
  min_this_day_flights = max(40, 0.75 * max(flight_days.values()))

  # Today (31st) has a delay frequency in the 72nd %tile, with 100% of flights
  # delayed an average of 44m over the last 4d13h
  DelayTimeAndFrequencyMessage(
      (date_delay_frequency_flag, date_delay_time_flag),
      group_function=lambda f:
      '(' + Ordinal(int(DisplayTime(f, '%-d'))) + ')',
      group_label='Today',
      min_days=7,
      min_this_group_size=min_this_day_flights,
      min_comparison_group_size=min_this_day_flights,
      min_group_qty=7,
      # Otherwise, there might be two 1st's of the month to compare
      lookback_days=28,
      percentile_low=10,
      percentile_high=90)

  messages = [
      (t, textwrap.wrap(m, width=SPLITFLAP_CHARS_PER_LINE))
      for (t, m) in messages]

  return messages


def CreateFlightInsights(
    flights, flight_insights_enabled_string, insight_message_distribution):
  """Returns the desired quantity of flight insight messages.

  Though the function FlightInsights generates all possible insight messages
  about a flight, the user may have only wanted one. Depending on the setting of
  flight_insights_enabled_string, this function reduces the set of all insights
  by selecting the least-frequently reported type of insight message.

  In order to choose the least-frequently reported type, we need to keep track
  of what has been reported so far, which we do here in
  insight_message_distribution, and which we then update with each pass through
  this function.

  Args:
    flights: List of all flights where the last flight in the list is the focus
      flight for which we are trying to identify something interesting.
    flight_insights_enabled_string: string indicating how many insights are
      desired, which may be one of 'all', 'one', or 'hide'.
    insight_message_distribution: dictionary, where the keys are one of the
      flags indicating message type, and the values are how frequently that
      type of insight has been displayed in flights.  The dictionary is updated
      in place.

  Returns:
    Possibly-empty list of messages - the list may be empty if there are no
    insights, or if the setting selected for flight_insights_enabled_string is
    neither all or one. The messages, if included, are printable strings (with
    embedded new line characters).
  """
  naked_messages = []

  this_flight_insights = []

  if flight_insights_enabled_string not in ('all', 'one'):
    return naked_messages

  insight_messages = FlightInsights(flights)

  if flight_insights_enabled_string == 'all' and insight_messages:
    for (t, m) in insight_messages:
      insight_message_distribution[t] = (
          insight_message_distribution.get(t, 0) + 1)
      this_flight_insights.append(t)
      naked_messages.append(m)

  if flight_insights_enabled_string == 'one' and insight_messages:
    types_of_messages = [t for (t, unused_m) in insight_messages]
    frequencies_of_insights = [
        insight_message_distribution.get(t, 0) for t in types_of_messages]
    min_frequency = min(frequencies_of_insights)
    for t in sorted(types_of_messages):
      if insight_message_distribution.get(t, 0) == min_frequency:
        break

    insight_message_distribution[t] = insight_message_distribution.get(t, 0) + 1
    for message_tuple in insight_messages:
      if message_tuple[0] == t:
        naked_messages.append(message_tuple[1])
        this_flight_insights.append(t)
        break

  # Save the distribution displayed for this flight
  # so we needn't regen it in future
  flights[-1]['insight_types'] = this_flight_insights

  return naked_messages


def FlightCriteriaHistogramPng(
    flights,
    max_distance_feet,
    max_altitude_feet,
    max_days,
    filename=HOURLY_IMAGE_FILE,
    last_max_distance_feet=None,
    last_max_altitude_feet=None):
  """Saves histogram as png file for hourly flight data with given filters.

  Generates a png histogram of the count of flights by hour that meet the
  specified criteria: max altitude, max distance, and within the last number of
  days. Also optionally generates as a separate data series in same chart a
  histogram with a different max altitude and distance. Saves this histogram to
  disk.

  Args:
    flights: list of the flights.
    max_distance_feet: max distance for which to include flights in the
      histogram.
    max_altitude_feet: max altitude for which to include flights in the
      histogram.
    max_days: maximum number of days as described.
    filename: file into which to save the csv.
    last_max_distance_feet: if provided, along with last_max_altitude_feet,
      generates a second data series with different criteria for distance and
      altitude, for which the histogram data will be plotted alongside the
      first series.
    last_max_altitude_feet: see above.
  """
  if not flights:
    return
  (values, keys, unused_filtered_data) = GenerateHistogramData(
      flights,
      HourString,
      HOURS,
      hours=max_days*HOURS_IN_DAY,
      max_distance_feet=max_distance_feet,
      max_altitude_feet=max_altitude_feet,
      normalize_factor=max_days,
      exhaustive=True)

  comparison = (
      last_max_distance_feet is not None and last_max_altitude_feet is not None)
  if comparison:
    (last_values, unused_last_keys,
     unused_filtered_data) = GenerateHistogramData(
         flights,
         HourString,
         HOURS,
         hours=max_days*HOURS_IN_DAY,
         max_distance_feet=last_max_distance_feet,
         max_altitude_feet=last_max_altitude_feet,
         normalize_factor=max_days,
         exhaustive=True)

  x = numpy.arange(len(keys))
  unused_fig, ax = matplotlib.pyplot.subplots()
  width = 0.35
  ax.bar(
      x - width/2, values, width,
      label='Current - alt: %d; dist: %d' % (
          max_altitude_feet, max_distance_feet))
  title = 'Daily Flights Expected: %d / day' % sum(values)
  if comparison:
    ax.bar(
        x + width/2, last_values, width,
        label='Prior - alt: %d; dist: %d' % (
            last_max_altitude_feet, last_max_distance_feet))
    title += ' (%+d)' % (round(sum(values) - sum(last_values)))

  ax.set_title(title)
  ax.set_ylabel('Average Observed Flights')
  if comparison:
    ax.legend()
  matplotlib.pyplot.xticks(
      x, keys, rotation='vertical', wrap=True,
      horizontalalignment='right',
      verticalalignment='center')

  matplotlib.pyplot.savefig(filename)
  matplotlib.pyplot.close()


def GenerateHistogramData(
    data,
    keyfunction,
    sort_type,
    truncate=float('inf'),
    hours=float('inf'),
    max_distance_feet=float('inf'),
    max_altitude_feet=float('inf'),
    normalize_factor=0,
    exhaustive=False):
  """Generates sorted data for a histogram from a description of the flights.

  Given an iterable describing the flights, this function generates the label
  (or key), and the frequency (or value) from which a histogram can be rendered.

  Args:
    data: the iterable of the raw data from which the histogram will be
      generated; each element of the iterable is a dictionary, that contains at
      least the key 'now', and depending on other parameters, also potentially
      'min_feet' amongst others.
    keyfunction: the function that determines how the key or label of the
      histogram should be generated; it is called for each element of the data
      iterable. For instance, to simply generate a histogram on the attribute
      'heading', keyfunction would be lambda a: a['heading'].
    sort_type: determines how the keys (and the corresponding values) are
      sorted:
      'key': the keys are sorted by a simple comparison operator between
        them, which sorts strings alphabetically and numbers numerically.
      'value': the keys are sorted by a comparison between the values, which
        means that more frequency-occurring keys are listed first.
      list: if instead of the strings a list is passed, the keys are then
        sorted in the sequence enumerated in the list. This is useful for, say,
        ensuring that the days of the week (Tues, Wed, Thur, ...) are listed in
        sequence. Keys that are generated by keyfunction but that are not in
        the given list are sorted last (and then amongst those, alphabetically).
    truncate: integer indicating the maximum number of keys to return; if set
      to 0, or if set to a value larger than the number of keys, no truncation
      occurs. But if set to a value less than the number of keys, then the keys
      with the lowest frequency are combined into one key named OTHER_STRING so
      that the number of keys in the resulting histogram (together with
      OTHER_STRING) is equal to truncate.
    hours: integer indicating the number of hours of history to include.
      Flights with a calcd_display_time more than this many hours in the past
      are excluded from the histogram generation. Note that this is timezone
      aware, so that if the histogram data is generated on a machine with a
      different timezone than that that recorded the original data, the
      correct number of hours is still honored.
    max_distance_feet: number indicating the geo fence outside of which flights
      should be ignored for the purposes of including the flight data in the
      histogram.
    max_altitude_feet: number indicating the maximum altitude outside of which
      flights should be ignored for the purposes of including the flight data
      in the histogram.
    normalize_factor: divisor to apply to all the values, so that we can easily
      renormalize the histogram to display on a percentage or daily basis; if
      zero, no renormalization is applied.
    exhaustive: boolean only relevant if sort_type is a list, in which case,
      this ensures that the returned set of keys (and matching values) contains
      all the elements in the list, including potentially those with a
      frequency of zero, within therestrictions of truncate.

  Returns:
    2-tuple of lists cut and sorted as indicated by parameters above:
    - list of values (or frequency) of the histogram elements
    - list of keys (or labels) of the histogram elements
  """
  histogram_dict = {}
  filtered_data = []

  def IfNoneReturnInf(f, key):
    value = f.get(key)
    if not value:
      value = float('inf')
    return value

  # get timezone & now so that we can generate a timestamp
  # for comparison just once
  if hours:
    now = datetime.datetime.now(TZ)
  for element in data:
    if (
        IfNoneReturnInf(element, 'min_feet') <= max_distance_feet and
        IfNoneReturnInf(element, 'altitude') <= max_altitude_feet and
        HoursSinceFlight(now, element['now']) <= hours):
      filtered_data.append(element)
      key = keyfunction(element)
      if key is None or key == '':
        key = KEY_NOT_PRESENT_STRING
      if key in histogram_dict:
        histogram_dict[key] += 1
      else:
        histogram_dict[key] = 1
  values = list(histogram_dict.values())
  keys = list(histogram_dict.keys())

  if normalize_factor:
    values = [v / normalize_factor for v in values]

  sort_by_enumerated_list = isinstance(sort_type, list)
  if exhaustive and sort_by_enumerated_list:
    missing_keys = set(sort_type).difference(set(keys))
    missing_values = [0 for unused_k in missing_keys]
    keys.extend(missing_keys)
    values.extend(missing_values)

  if keys:  # filters could potentially have removed all data
    if not truncate or len(keys) <= truncate:
      if sort_by_enumerated_list:
        (values, keys) = SortByDefinedList(values, keys, sort_type)
      elif sort_type == 'value':
        (values, keys) = SortByValues(values, keys)
      else:
        (values, keys) = SortByKeys(values, keys)
    else: #Unknown might fall in the middle, and so shouldn't be truncated
      (values, keys) = SortByValues(
          values, keys, ignore_sort_at_end_strings=True)

      truncated_values = list(values[:truncate-1])
      truncated_keys = list(keys[:truncate-1])
      other_value = sum(values[truncate-1:])
      truncated_values.append(other_value)
      truncated_keys.append(OTHER_STRING)
      if sort_by_enumerated_list:
        (values, keys) = SortByDefinedList(
            truncated_values, truncated_keys, sort_type)
      elif sort_type == 'value':
        (values, keys) = SortByValues(
            truncated_values, truncated_keys, ignore_sort_at_end_strings=False)
      else:
        (values, keys) = SortByKeys(truncated_values, truncated_keys)
  else:
    values = []
    keys = []
  return (values, keys, filtered_data)


def SortByValues(values, keys, ignore_sort_at_end_strings=False):
  """Sorts list of values in desc sequence, applying same resorting to keys.

  Given a list of keys and values representing a histogram, returns two new
  lists that are sorted so that the values occur in descending sequence and the
  keys are moved around in the same way. This allows the printing of a
  histogram with the largest keys listed first - i.e.: top five airlines.

  Keys identified by SORT_AT_END_STRINGS - such as, perhaps, 'Other' - will
  optionally be placed at the end of the sequence. And where values are
  identical, the secondary sort is based on the keys.

  Args:
    values: list of values for the histogram to be used as the primary sort key.
    keys: list of keys for the histogram that will be moved in the same way as
      the values.
    ignore_sort_at_end_strings: boolean indicating whether specially-defined
      keys will be sorted at the end.

  Returns:
    2-tuple of (values, keys) lists sorted as described above
  """
  if ignore_sort_at_end_strings:
    sort_at_end_strings = []
  else:
    sort_at_end_strings = SORT_AT_END_STRINGS

  return SortZipped(
      values, keys, True,
      lambda a: (
          False,
          False,
          a[1]) if a[1] in sort_at_end_strings else (True, a[0], a[1]))


def SortByKeys(values, keys, ignore_sort_at_end_strings=False):
  """Sorts list of keys in asc sequence, applying same resorting to values.

  Given a list of keys and values representing a histogram, returns two new
  lists that are sorted so that the keys occur in ascending alpha sequence and
  the values are moved around in the same way. This allows the printing of a
  histogram with the first keys alphabetically listed first - i.e.: 7am, 8am,
  9am.

  Keys identified by SORT_AT_END_STRINGS - such as, perhaps, 'Other' - will
  optionally be placed at the end of the sequence.

  Args:
    values: list of values for the histogram that will be moved in the same way
      as the keys.
    keys: list of keys for the histogram to be used as the primary sort key.
    ignore_sort_at_end_strings: boolean indicating whether specially-defined
      keys will be sorted at the end.

  Returns:
    2-tuple of (values, keys) lists sorted as described above
  """
  if ignore_sort_at_end_strings:
    sort_at_end_strings = []
  else:
    sort_at_end_strings = SORT_AT_END_STRINGS

  return SortZipped(
      values, keys, False,
      lambda a: (True, a[1]) if a[1] in sort_at_end_strings else (False, a[1]))


def SortByDefinedList(values, keys, sort_sequence):
  """Sorts keys in user-enumerated sequence, applying same resorting to values.

  Given a list of keys and values representing a histogram, returns two new
  lists that are sorted so that the keys occur in the specific sequence
  identified in the list sort_sequence, while the values are moved around in
  the same way. This allows the printing of a histogram with the keys occurring
  in a canonical order - i.e.: Tuesday,  Wednesday, Thursday. Keys present in
  keys but not existing in sort_sequence are then sorted at the end, but
  amongst them, sorted based on the value.

  Args:
    values: list of values for the histogram that will be moved in the same way
      as the keys.
    keys: list of keys for the histogram to be used as the primary sort key.
    sort_sequence: list - which need not be exhaustive - of the keys in their
      desired order.

  Returns:
    2-tuple of (values, keys) lists sorted as described above
  """
  return SortZipped(
      values, keys, False,
      lambda a: (
          False,
          sort_sequence.index(a[1])) if a[1] in sort_sequence else (True, a[0]))


def SortZipped(x, y, reverse, key):
  """Sorts lists x & y via the function defined in key.

  Applies the same reordering to the two lists x and y, where the reordering is
  given by the function defined in the key applied to the tuple (x[n], y[n]).
  That is, suppose
  - x = [3, 2, 1]
  - y = ['b', 'c', 'a']
  Then the sort to both lists is done based on how the key is applied to the
  tuples:
  - [(3, 'b'), (2, 'c'), (1, 'a')]

  If key = lambda a: a[0], then the sort is done based on 3, 2, 1, so the
  sorted lists are
  - x = [1, 2, 3]
  - y = ['a', 'c', 'b']

  If key = lambda a: a[1], then the sort is done based on ['b', 'c', 'a'], so
  the sorted lists are
  - x = [1, 3, 2]
  - y = ['a', 'b', 'c']

  Args:
    x: First list
    y: Second list
    reverse: Boolean indicating whether the sort should be ascending (True) or
      descending (False)
    key: function applied to the 2-tuple constructed by taking the
      corresponding values of the lists x & y, used to generate the key on
      which the sort is applied

  Returns:
    2-tuple of (x, y) lists sorted as described above
  """
  zipped_xy = zip(x, y)
  sorted_xy = sorted(zipped_xy, reverse=reverse, key=key)
  # unzip
  (x, y) = list(zip(*sorted_xy))
  return (x, y)


def CreateSingleHistogramChart(
    data,
    keyfunction,
    sort_type,
    title,
    position=None,
    truncate=0,
    hours=float('inf'),
    max_distance_feet=float('inf'),
    max_altitude_feet=float('inf'),
    normalize_factor=0,
    exhaustive=False,
    figsize_inches=(9, 6)):
  """Creates matplotlib.pyplot of histogram that can then be saved or printed.

  Args:
    data: the iterable (i.e.: list) of flight details, where each element in
      the list is a dictionary of the flight attributes.
    keyfunction: a function that when applied to a single flight (i.e.:
      keyfunction(data[0]) returns the key to be used for the histogram.
    data: the iterable of the raw data from which the histogram will be
      generated; each element of the iterable is a dictionary, that contains at
      least the key 'now', and depending on other parameters, also potentially
      'min_feet' amongst others.
    keyfunction: the function that determines how the key or label of the
      histogram should be generated; it is called for each element of the data
      iterable. For instance, to simply generate a histogram on the attribute
      'heading', keyfunction would be lambda a: a['heading'].
    title: the "base" title to include on the histogram; it will additionally be
      augmented with the details about the date range.
    position: Either a 3-digit integer or an iterable of three separate integers
      describing the position of the subplot. If the three integers are nrows,
      ncols, and index in order, the subplot will take the index position on a
      grid with nrows rows and ncols columns. index starts at 1 in the upper
      left corner and increases to the right.
    sort_type: determines how the keys (and the corresponding values) are
      sorted:
      'key': the keys are sorted by a simple comparison operator between them,
        which sorts strings alphabetically and numbers numerically.
      'value': the keys are sorted by a comparison between the values, which
        means that more frequency-occurring keys are listed first.
      list: if instead of the strings a list is passed, the keys are then
        sorted in the sequence enumerated in the list. This is useful for, say,
        ensuring that the days of the week (Tues, Wed, Thur, ...) are listed in
        sequence. Keys that are generated by keyfunction but that are not in
        the given list are sorted last (and then amongst those, alphabetically).
    truncate: integer indicating the maximum number of keys to return; if set
      to 0, or if set to a value larger than the number of keys, no truncation
      occurs. But if set to a value less than the number of keys, then the keys
      with the lowest frequency are combined into one key named OTHER_STRING so
      that the number of keys in the resulting histogram (together with
      OTHER_STRING) is equal to truncate.
    hours: integer indicating the number of hours of history to include.
      Flights with a calcd_display_time more than this many hours in the past
      are excluded from the histogram generation. Note that this is timezone
      aware, so that if the histogram data is generated on a machine with a
      different timezone than that that recorded the original data, the correct
      number of hours is still honored.
    max_distance_feet: number indicating the geo fence outside of which flights
      should be ignored for the purposes of including the flight data in the
      histogram.
    max_altitude_feet: number indicating the maximum altitude outside of which
      flights should be ignored for the purposes of including the flight data
      in the histogram.
    normalize_factor: divisor to apply to all the values, so that we can easily
      renormalize the histogram to display on a percentage or daily basis; if
      zero, no renormalization is applied.
    exhaustive: boolean only relevant if sort_type is a list, in which case,
      this ensures that the returned set of keys (and matching values) contains
      all the elements in the list, including potentially those with a
      frequency of zero, within the restrictions of truncate.
    figsize_inches: a 2-tuple of width, height indicating the size of the
      histogram.
  """
  (values, keys, filtered_data) = GenerateHistogramData(
      data,
      keyfunction,
      sort_type,
      truncate=truncate,
      hours=hours,
      max_distance_feet=max_distance_feet,
      max_altitude_feet=max_altitude_feet,
      normalize_factor=normalize_factor,
      exhaustive=exhaustive)
  if position:
    matplotlib.pyplot.subplot(*position)
  matplotlib.pyplot.figure(figsize=figsize_inches)
  values_coordinates = numpy.arange(len(keys))
  matplotlib.pyplot.bar(values_coordinates, values)

  # The filtering may have removed any flight data,
  # or there may be none to start
  if not filtered_data:
    return

  earliest_flight_time = int(filtered_data[0]['now'])
  last_flight_time = int(filtered_data[-1]['now'])
  date_range_string = ' %d flights over last %s hours' % (
      sum(values), SecondsToDdHh(last_flight_time - earliest_flight_time))

  timestamp_string = 'Last updated %s' % EpochDisplayTime(
      time.time(), format_string='%b %-d, %-I:%M%p')

  full_title = '\n'.join([title, date_range_string, timestamp_string])

  matplotlib.pyplot.title(full_title)

  matplotlib.pyplot.subplots_adjust(
      bottom=0.15, left=0.09, right=0.99, top=0.89)

  matplotlib.pyplot.xticks(
      values_coordinates, keys, rotation='vertical', wrap=True,
      horizontalalignment='right',
      verticalalignment='center')


def HistogramSettingsHours(how_much_history):
  """Extracts the desired hours of history from histogram configuration string.

  Args:
    how_much_history: string from the histogram config file.

  Returns:
    Number of hours of history to include in the histogram.
  """
  if how_much_history == 'today':
    hours = HoursSinceMidnight()
  elif how_much_history == '24h':
    hours = HOURS_IN_DAY
  elif how_much_history == '7d':
    hours = 7 * HOURS_IN_DAY
  elif how_much_history == '30d':
    hours = 30 * HOURS_IN_DAY
  else:

    Log('Histogram form has invalid value for how_much_history: %s'
        % how_much_history)
    hours = 7 * HOURS_IN_DAY
  return hours


def HistogramSettingsScreens(max_screens):
  """Extracts the desired number of screens from histogram configuration string.

  Args:
    max_screens: string from the histogram config file.

  Returns:
    Number of maximum number of screens to display for a splitflap histogram.
  """
  if max_screens == '_1':
    screen_limit = 1
  elif max_screens == '_2':
    screen_limit = 2
  elif max_screens == '_5':
    screen_limit = 5
  elif max_screens == 'all':
    screen_limit = 0  # no limit on screens
  else:
    Log('Histogram form has invalid value for max_screens: %s' % max_screens)
    screen_limit = 1
  return screen_limit


def HistogramSettingsKeySortTitle(which, hours, flights, max_altitude=45000):
  """Provides arguments to generate a histogram from the config string.

  The same parameters are used to generate either a splitflap text or
  web-rendered histogram in terms of the histogram title, the keyfunction, and
  how to sort the keys. For a given histogram name (based on the names defined
  in the histogram config file), this provides those parameters.

  Args:
    which: string from the histogram config file indicating the histogram to
      provide settings for.
    hours: how many hours of histogram data have been requested.
    flights: list of the flights in the data set.
    max_altitude: indicates the maximum altitude that should be included on the
      altitude labels.

  Returns:
    A 4-tuple of the parameters used by either CreateSingleHistogramChart or
    MessageboardHistogram, of the keyfunction, sort, title, and hours.
  """
  def DivideAndFormat(dividend, divisor):
    if dividend is None:
      return KEY_NOT_PRESENT_STRING
    if isinstance(dividend, numbers.Number):
      return '%2d' % round(dividend / divisor)
    return dividend[:2]

  def RoundAndFormat(dividend, divisor, digits):
    if dividend is None:
      return KEY_NOT_PRESENT_STRING
    if isinstance(dividend, numbers.Number):




                            <----SKIPPED LINES---->




    key = lambda k: k.get('destination_iata', KEY_NOT_PRESENT_STRING)
    sort = 'value'
    title = 'Destination'
  elif which == 'origin':
    key = lambda k: k.get('origin_iata', KEY_NOT_PRESENT_STRING)
    sort = 'value'
    title = 'Origin'
  elif which == 'hour':
    key = lambda k: DisplayTime(k, '%H')
    sort = 'key'
    title = 'Hour'
  elif which == 'airline':
    key = DisplayAirline
    sort = 'value'
    title = 'Airline'
  elif which == 'aircraft':
    key = lambda k: k.get('aircraft_type_code', KEY_NOT_PRESENT_STRING)
    sort = 'value'
    title = 'Aircraft'
  elif which == 'altitude':
    key = lambda k: DivideAndFormat(
        k.get('altitude', KEY_NOT_PRESENT_STRING), 1000)
    sort = ['%2d'%x for x in range(0, round((max_altitude+1)/1000))]
    title = 'Altitude (1000ft)'
  elif which == 'bearing':
    key = lambda k: ConvertBearingToCompassDirection(
        k.get('track', KEY_NOT_PRESENT_STRING), pad=True, length=3)
    sort = [d.rjust(3) for d in DIRECTIONS_16]
    title = 'Bearing'
  elif which == 'distance':
    key = lambda k: DivideAndFormat(
        k.get('min_feet', KEY_NOT_PRESENT_STRING), 100)
    sort = ['%2d'%x for x in range(0, round((MIN_METERS*FEET_IN_METER)/100)+1)]
    title = 'Min Dist (100ft)'
  elif which == 'day_of_week':
    key = lambda k: DisplayTime(k, '%a')
    sort = DAYS_OF_WEEK
    title = 'Day of Week'
    # if less than one week, as requested; if more than
    # one week, in full week multiples
    hours_in_week = 7 * HOURS_IN_DAY
    weeks = hours / hours_in_week
    if weeks > 1:
      hours = hours_in_week * int(hours / hours_in_week)
  elif which == 'day_of_month':
    key = lambda k: DisplayTime(k, '%-d').rjust(2)
    today_day = datetime.datetime.now(TZ).day
    days = list(range(today_day, 0, -1))  # today down to the first of the month
    days.extend(range(31, today_day, -1))  # 31st of the month to tmr
    days = [str(d).rjust(2) for d in days]
    sort = days
    title = 'Day of Month'

  elif which == 'speed':
    rounding = 25
    field = 'speed'
    min_value = min([f[field] for f in flights if f.get(field)])
    max_value = max([f[field] for f in flights if f.get(field)])
    digits = int(math.log10(max_value)) + 1
    key = lambda k: RoundAndFormat(
        k.get(field, KEY_NOT_PRESENT_STRING), rounding, digits)
    values = range(int(min_value), int(max_value) + 1)
    sort = sorted(list({RoundAndFormat(v, rounding, digits) for v in values}))
    title = 'Speed (kn)'

  elif which == 'aircraft_length':
    key = DisplayLength
    min_value = min([AircraftLength(f, default=float('inf')) for f in flights])
    max_value = max([AircraftLength(f, default=float('-inf')) for f in flights])
    sort = list(range(round(min_value), round(max_value) + 1))
    title = 'Plane Length (m)'

  elif which == 'vert_rate':
    rounding = 200
    field = 'vert_rate'
    min_value = min([f[field] for f in flights if f.get(field)])
    max_value = max([f[field] for f in flights if f.get(field)])
    digits = max(
        int(math.log10(max_value)),
        1 + int(math.log10(abs(min_value)))
        ) + 1
    key = lambda k: RoundAndFormat(
        k.get(field, KEY_NOT_PRESENT_STRING), rounding, digits)
    values = range(int(min_value), int(max_value) + 1)
    sort = sorted(list({RoundAndFormat(v, rounding, digits) for v in values}))
    title = 'Ascent Rate (%s)' % CLIMB_RATE_UNITS

  else:
    Log(
        'Histogram form has invalid value for which_histograms: %s' % which)
    return HistogramSettingsKeySortTitle(
        'destination', hours, flights, max_altitude=max_altitude)

  return (key, sort, title, hours)


def ImageHistograms(
    flights,
    which_histograms,
    how_much_history,
    filename_prefix=HISTOGRAM_IMAGE_PREFIX,
    filename_suffix=HISTOGRAM_IMAGE_SUFFIX,
    heartbeat=True):
  """Generates multiple split histogram images.

  Args:
    flights: the iterable of the raw data from which the histogram will be
      generated; each element of the iterable is a dictionary, that contains at
      least the key 'now', and depending on other parameters, also potentially
      'min_feet' amongst others.
    which_histograms: string paramater indicating which histogram(s) to
      generate, which can be either the special string 'all', or a string
      linked to a specific histogram.
    how_much_history: string parameter taking a value among ['today', '24h',
      '7d', '30d].
    filename_prefix: this string indicates the file path and name prefix for
      the images that are created. File names are created in the form [prefix]
      name.[suffix], i.e.: if the prefix is histogram_ and the suffix is png,
      then the file name might be histogram_aircraft.png.
    filename_suffix: see above; also interpreted by savefig to generate the
      correct format.
    heartbeat: boolean indicating whether we should log heartbeats between each
      histogram to make sure monitoring does not mistake this slow procedure
      for being hung; this should be set to false if this is called from
      outside of messageboard.main.

  Returns:
    List of 2-tuples of histograms generated, where the first element is the
    histogram identifier (i.e.: destination), and the second element is the
    filename (i.e.: histogram_destination.png).
  """
  hours = HistogramSettingsHours(how_much_history)

  histograms_to_generate = []
  if which_histograms in ['destination', 'all']:
    histograms_to_generate.append({'generate': 'destination'})
  if which_histograms in ['origin', 'all']:
    histograms_to_generate.append({'generate': 'origin'})
  if which_histograms in ['hour', 'all']:
    histograms_to_generate.append({'generate': 'hour'})
  if which_histograms in ['airline', 'all']:
    histograms_to_generate.append(
        {'generate': 'airline', 'truncate': int(TRUNCATE/2)})
  if which_histograms in ['aircraft', 'all']:
    histograms_to_generate.append({'generate': 'aircraft'})
  if which_histograms in ['altitude', 'all']:
    histograms_to_generate.append({'generate': 'altitude', 'exhaustive': True})
  if which_histograms in ['bearing', 'all']:
    histograms_to_generate.append({'generate': 'bearing'})
  if which_histograms in ['distance', 'all']:
    histograms_to_generate.append({'generate': 'distance', 'exhaustive': True})
  if which_histograms in ['day_of_week', 'all']:
    histograms_to_generate.append({'generate': 'day_of_week'})
  if which_histograms in ['day_of_month', 'all']:
    histograms_to_generate.append({'generate': 'day_of_month'})

  if which_histograms in ['speed', 'all']:
    histograms_to_generate.append({'generate': 'speed', 'exhaustive': True})
  if which_histograms in ['aircraft_length', 'all']:
    histograms_to_generate.append(
        {'generate': 'aircraft_length', 'exhaustive': True})
  if which_histograms in ['vert_rate', 'all']:
    histograms_to_generate.append({'generate': 'vert_rate', 'exhaustive': True})

  histograms_generated = []
  for histogram in histograms_to_generate:
    this_histogram = which_histograms
    if this_histogram == 'all':
      this_histogram = histogram['generate']
    (key, sort, title, updated_hours) = HistogramSettingsKeySortTitle(
        this_histogram, hours, flights)

    # if multiple histograms are getting generated, this might take a few
    # seconds; logging a heartbeat with each histogram ensures that
    # monitoring.py does not mistake this pause for a hang.
    if heartbeat:
      Heartbeat()

    CreateSingleHistogramChart(
        flights,
        key,
        sort,
        title,
        truncate=histogram.get('truncate', TRUNCATE),
        hours=updated_hours,
        exhaustive=histogram.get('exhaustive', False))

    filename = (
        WEBSERVER_IMAGE_RELATIVE_FOLDER +  # i.e.: images/
        filename_prefix +                  # i.e.: histogram_
        histogram['generate'] +            # i.e.: destination
        '.' + filename_suffix)             # i.e.: .png
    filepath = WEBSERVER_PATH + filename   # i.e.: /var/www/html/ + filename

    matplotlib.pyplot.savefig(filepath)
    matplotlib.pyplot.close()
    histograms_generated.append((histogram['generate'], filename))

  return histograms_generated


def MessageboardHistograms(
    flights,
    which_histograms,
    how_much_history,
    max_screens,
    data_summary,
    heartbeat=True):
  """Generates multiple split flap screen histograms.

  Args:
    flights: the iterable of the raw data from which the histogram will be
      generated; each element of the iterable is a dictionary, that contains at
      least the key 'now', and depending on other parameters, also potentially
      'min_feet' amongst others.
    which_histograms: string paramater indicating which histogram(s) to
      generate, which can be either the special string 'all', or a string
      linked to a specific histogram.
    how_much_history: string parameter taking a value among ['today', '24h',
      '7d', '30d].
    max_screens: string parameter taking a value among ['_1', '_2', '_5', or
      'all'].
    data_summary: parameter that evaluates to a boolean indicating whether the
      data summary screen in the histogram should be displayed.
    heartbeat: boolean indicating whether we should log heartbeats between each
      histogram to make sure monitoring does not mistake this slow procedure
      for being hung; this should be set to false if this is called from
      outside of messageboard.main.

  Returns:
    Returns a list of printable strings (with embedded new line characters)
    representing the histogram, for each screen in the histogram.
  """
  messages = []

  hours = HistogramSettingsHours(how_much_history)
  screen_limit = HistogramSettingsScreens(max_screens)

  histograms_to_generate = []
  if which_histograms in ['destination', 'all']:
    histograms_to_generate.append({
        'generate': 'destination',
        'suppress_percent_sign': True,
        'columns': 3})
  if which_histograms in ['origin', 'all']:
    histograms_to_generate.append({
        'generate': 'origin',
        'suppress_percent_sign': True,
        'columns': 3})
  if which_histograms in ['hour', 'all']:
    histograms_to_generate.append({
        'generate': 'hour',




                            <----SKIPPED LINES---->




  if which_histograms in ['speed', 'all']:
    histograms_to_generate.append({
        'generate': 'speed',
        'columns': 2})
  if which_histograms in ['aircraft_length', 'all']:
    histograms_to_generate.append({
        'generate': 'aircraft_length',
        'columns': 3})
  if which_histograms in ['vert_rate', 'all']:
    histograms_to_generate.append({
        'generate': 'vert_rate',
        'columns': 2})

  for histogram in histograms_to_generate:
    this_histogram = which_histograms
    if this_histogram == 'all':
      this_histogram = histogram['generate']
    (key, sort, title, updated_hours) = HistogramSettingsKeySortTitle(
        this_histogram, hours, flights)

    # if multiple histograms are getting generated, this might take a few
    # seconds; logging a heartbeat with each histogram ensures that
    # monitoring.py does not mistake this pause for a hang.
    if heartbeat:
      Heartbeat()
    histogram = MessageboardHistogram(
        flights,
        key,
        sort,
        title,
        screen_limit=screen_limit,
        columns=histogram.get('columns', 2),
        suppress_percent_sign=histogram.get('suppress_percent_sign', False),
        column_divider=histogram.get('column_divider', ' '),
        data_summary=data_summary,
        hours=updated_hours,
        absolute=histogram.get('absolute', False))
    messages.extend(histogram)

  messages = [(FLAG_MSG_HISTOGRAM, m) for m in messages]

  return messages


def MessageboardHistogram(
    data,
    keyfunction,
    sort_type,
    title,
    screen_limit=1,
    columns=2,
    column_divider=' ',
    data_summary=False,
    hours=0,
    suppress_percent_sign=False,
    absolute=False):
  """Generates text for one histogram that can be rendered on the display.

  Args:
    data: the iterable of the raw data from which the histogram will be
      generated; each element of the iterable is a dictionary, that contains at
      least the key 'now', and depending on other parameters, also potentially
      'min_feet' amongst others.
    keyfunction: the function that determines how the key or label of the
      histogram should be generated; it is called for each element of the data
      iterable. For instance, to simply generate a histogram on the attribute
      'heading', keyfunction would be lambda a: a['heading'].
    sort_type: determines how the keys (and the corresponding values) are
      sorted; see GenerateHistogramData docstring for details
    title: string title, potentially truncated to fit, to be displayed for the
      histogram
    screen_limit: maximum number of screens to be displayed for the histogram;
      a value of zero is interpreted to mean no limit on screens.
    columns: number of columns of data to be displayed for the histogram; note
      that the keys of the histogram may need to be truncated in length to fit
      the display as more columns are squeezed into the space
    column_divider: string for the character(s) to be used to divide the columns
    data_summary: boolean indicating whether to augment the title with a second
      header line about the data presented in the histogram
    hours: integer indicating the oldest data to be included in the histogram
    suppress_percent_sign: boolean indicating whether to suppress the percent
      sign in the data (but to add it to the title) to reduce the amount of
      string truncation potentially necessary for display of the keys
    absolute: boolean indicating whether to values should be presented as
      percentage or totals; if True, suppress_percent_sign is irrelevant.

  Returns:
    Returns a list of printable strings (with embedded new line characters)
    representing the histogram.
  """
  title_lines = 1
  if data_summary:
    title_lines += 1
  available_entries_per_screen = (SPLITFLAP_LINE_COUNT - title_lines) *  columns
  available_entries_total = available_entries_per_screen * screen_limit
  (values, keys, filtered_data) = GenerateHistogramData(
      data, keyfunction, sort_type,
      truncate=available_entries_total, hours=hours)

  # The filtering may have removed any flight data, or
  # there may be none to start
  if not filtered_data:
    return []

  screen_count = math.ceil(len(keys) / available_entries_per_screen)

  column_width = int(
      (SPLITFLAP_CHARS_PER_LINE - len(column_divider)*(columns - 1)) / columns)
  leftover_space = SPLITFLAP_CHARS_PER_LINE - (
      column_width*columns + len(column_divider)*(columns - 1))
  extra_divider_chars = math.floor(leftover_space / (columns - 1))
  column_divider = column_divider.ljust(
      len(column_divider) + extra_divider_chars)

  # i.e.: ' 10%' or ' 10', depending on suppress_percent_sign
  printed_percent_sign = ''
  if absolute:
    digits = math.floor(math.log10(max(values))) + 1
    value_size = digits + 1
    augment_title_units = ' #'
    format_string = '%%%dd' % digits
  else:
    value_size = 3
    augment_title_units = ' %'
    if not suppress_percent_sign:
      value_size += 1
      printed_percent_sign = '%'
      augment_title_units = ''
  column_key_width = column_width - value_size

  total = sum(values)

  if data_summary:
    if hours:
      hours_of_data = min(hours, DataHistoryHours(data))
    else:
      hours_of_data = DataHistoryHours(data)
    time_horizon_text = 'Last %s' % SecondsToDdHh(
        hours_of_data * SECONDS_IN_HOUR)

    summary_text = '%s (n=%d)' % (time_horizon_text, sum(values))
    summary_text = summary_text.center(SPLITFLAP_CHARS_PER_LINE)

  split_flap_boards = []
  for screen in range(screen_count):
    if screen_count == 1:
      counter = ''
    else:
      counter = ' %d/%d' % (screen+1, screen_count)
    screen_title = '%s%s%s' % (
        title[
            :SPLITFLAP_CHARS_PER_LINE - len(counter) -
            len(augment_title_units)],
        augment_title_units,
        counter)

    screen_title = screen_title.center(SPLITFLAP_CHARS_PER_LINE)
    start_index = screen*available_entries_per_screen
    end_index = min((screen+1)*available_entries_per_screen-1, len(keys)-1)
    number_of_entries = end_index - start_index + 1
    number_of_lines = math.ceil(number_of_entries / columns)

    lines = []

    lines.append(screen_title.upper())
    if data_summary:
      lines.append(summary_text.upper())
    for line_index in range(number_of_lines):
      key_value = []
      for column_index in range(columns):
        index = start_index + column_index*number_of_lines + line_index
        if index <= end_index:
          if absolute:
            value_string = format_string % values[index]
          else:
            # If the % is >=1%, display right-justified 2 digit percent, i.e. '
            # 5%' Otherwise, if it rounds to at least 0.1%, display i.e. '.5%'
            if values[index]/total*100 >= 0.95:
              value_string = '%2d' % round(values[index]/total*100)
            elif round(values[index]/total*1000)/10 >= 0.1:
              value_string = ('%.1f' % (round(values[index]/total*1000)/10))[1:]
            else:
              value_string = ' 0'
          key_value.append('%s %s%s' % (
              str(keys[index])[:column_key_width].ljust(column_key_width),
              value_string,
              printed_percent_sign))

      line = (column_divider.join(key_value)).upper()
      lines.append(line)

    split_flap_boards.append(lines)

  return split_flap_boards


def TriggerHistograms(flights, histogram_settings, heartbeat=True):
  """Triggers the text-based or web-based histograms.

  Based on the histogram settings, determines whether to generate text or image
  histograms (or both). For image histograms, also generates empty images for
  the histograms not created so that broken image links are not displayed in
  the webpage.

  Args:
    flights: List of flight attribute dictionaries.
    histogram_settings: Dictionary of histogram parameters.
    heartbeat: boolean indicating whether we should log heartbeats between each
      histogram to make sure monitoring does not mistake this slow procedure
      for being hung; this should be set to false if this is called from
      outside of messageboard.main.

  Returns:
    List of histogram messages, if text-based histograms are selected; empty
    list otherwise.
  """
  histogram_messages = []

  if histogram_settings['type'] in ('messageboard', 'both'):
    histogram_messages = MessageboardHistograms(
        flights,
        histogram_settings['histogram'],
        histogram_settings['histogram_history'],
        histogram_settings['histogram_max_screens'],
        histogram_settings.get('histogram_data_summary', False),
        heartbeat=heartbeat)
  if histogram_settings['type'] in ('images', 'both'):

    # Since Google Chrome seems to ignore all instructions to not cache, we need
    # to make sure we do not reuse file names - hence the epoch_string - and
    # then we need to
    # 1) update the histograms.html file with the correct file links, and
    # 2) delete the images that are now obsolete.
    epoch_string = '%d_' % round(time.time())

    generated_histograms = ImageHistograms(
        flights,
        histogram_settings['histogram'],
        histogram_settings['histogram_history'],
        filename_prefix=HISTOGRAM_IMAGE_PREFIX + epoch_string,
        heartbeat=heartbeat)
    html_lines = ReadFile(HISTOGRAM_IMAGE_HTML).split('\n')
    replaced_images = []
    for identifier, new_filename in generated_histograms:
      # for each histogram, find the html_line with the matching id
      # Example line:
      #   <img id="destination" src="images/histogram_destination.png"><p>
      identifier = '"%s"' % identifier
      n, line = None, None  # addresses pylint complaint
      found = False
      for n, line in enumerate(html_lines):
        if identifier in line:
          found = True
          break
        found = False
      if found:
        start_char = line.find(WEBSERVER_IMAGE_RELATIVE_FOLDER)
        end_character = (
            line.find(HISTOGRAM_IMAGE_SUFFIX, start_char) +
            len(HISTOGRAM_IMAGE_SUFFIX))
        old_filename = line[start_char:end_character]
        line = line.replace(old_filename, new_filename)
        html_lines[n] = line
        replaced_images.append(old_filename)
    new_html = '\n'.join(html_lines)
    WriteFile(HISTOGRAM_IMAGE_HTML, new_html)

    # Remove those obsoleted files
    for f in replaced_images:
      RemoveFile(WEBSERVER_PATH + f)

  return histogram_messages


def SaveFlightsByAltitudeDistanceCSV(
    flights,
    max_days=0,
    filename='flights_by_alt_dist.csv',
    precision=100):
  """Generates CSV of hour of day, altitude, and distance.

  Generates a csv with 26 columns:
  - col#1: altitude (in feet)
  - col#2: distance (in feet)
  - cols#3-26: hour of the day

  The first row is a header row; subsequent rows list the number of flights
  that have occurred in the last max_days with an altitude and min distance
  less than that identified in the first two columns. Each row increments
  elevation or altitude by precision feet, up to the max determined by the max
  altitude and max distance amongst all the flights.

  Args:
    flights: list of the flights.
    max_days: maximum number of days as described.
    filename: file into which to save the csv.
    precision: number of feet to increment the altitude or distance.
  """
  max_altitude = int(round(max([
      flight.get('altitude', -1) for flight in flights])))
  max_distance = int(round(max([
      flight.get('min_feet', -1) for flight in flights])))
  min_altitude = int(round(
      min([flight.get('altitude', float('inf')) for flight in flights])))
  min_distance = int(round(
      min([flight.get('min_feet', float('inf')) for flight in flights])))
  max_hours = max_days * HOURS_IN_DAY

  lines = []
  now = datetime.datetime.now()

  header_elements = [
      'altitude_feet', 'min_distance_feet', *[str(h) for h in HOURS]]
  line = ','.join(header_elements)
  lines.append(line)

  altitudes = list(range(
      precision * int(min_altitude / precision),
      precision * (int(max_altitude / precision) + 2),
      precision))
  distances = list(range(
      precision * int(min_distance / precision),
      precision * (int(max_distance / precision) + 2),
      precision))

  # Flight counts where either the altitude or min_feet is unknown
  line_elements = ['undefined', 'undefined']
  for hour in HOURS:
    line_elements.append(str(len([
        1 for f in flights if
        (not max_hours or HoursSinceFlight(now, f['now']) < max_hours) and
        (f.get('altitude') is None or f.get('min_feet') is None) and
        HourString(f) == hour])))
  line = ','.join(line_elements)
  lines.append(line)

  d = {}
  for flight in flights:
    if 'altitude' in flight and 'min_feet' in flight:
      this_altitude = flight['altitude']
      this_distance = flight['min_feet']
      hour = HourString(flight)
      for altitude in [a for a in altitudes if a >= this_altitude]:
        for distance in [d for d in distances if d >= this_distance]:
          d[(altitude, distance, hour)] = d.get(
              (altitude, distance, hour), 0) + 1
  for altitude in altitudes:
    for distance in distances:
      line_elements = [str(altitude), str(distance)]
      for hour in HOURS:
        line_elements.append(str(d.get((altitude, distance, hour), 0)))
      line = ','.join(line_elements)
      lines.append(line)
  try:
    with open(filename, 'w') as f:
      for line in lines:
        f.write(line+'\n')
  except IOError:
    Log('Unable to write hourly histogram data file ' + filename)


def SaveFlightsToCSV(flights=None, filename='flights.csv'):
  """Saves attributes about the flight to a CSV.

  Args:
    flights: dictionary of flight attributes; if not provided, loaded from
      PICKLE_FLIGHTS.
    filename: name of desired csv file; if not provided, defaults to flights.
      csv.
  """
  if not flights:
    flights = UnpickleObjectFromFile(PICKLE_FLIGHTS, True)

  print('='*80)
  print('Number of flights to save to %s: %d' % (filename, len(flights)))

  # list of functions in 2-tuple, where second element is a function that
  # generates something about the flight, and the first element is the name to
  # give that value when extended into the flight definition
  functions = [
      ('display_flight_number', DisplayFlightNumber),
      ('display_airline', DisplayAirline),
      ('display_aircraft', DisplayAircraft),
      ('display_origin_iata', DisplayOriginIata),
      ('display_destination_iata', DisplayDestinationIata),
      ('display_origin_friendly', DisplayOriginFriendly),
      ('display_destination_friendly', DisplayDestinationFriendly),
      ('display_origin_destination_pair', DisplayOriginDestinationPair),
      ('display_seconds_remaining', DisplaySecondsRemaining),
      ('now_datetime', DisplayTime),
      ('now_date', lambda flight: DisplayTime(flight, '%x')),
      ('now_time', lambda flight: DisplayTime(flight, '%X')),
      ('aircraft_length_meters', AircraftLength)]

  for function in functions:
    for flight in flights:
      flight[function[0]] = function[1](flight)

  # these functions return dictionary of values
  functions = [
      lambda f: FlightAnglesSecondsElapsed(f, 0, '_00s'),
      lambda f: FlightAnglesSecondsElapsed(f, 10, '_10s'),
      lambda f: FlightAnglesSecondsElapsed(f, 20, '_20s'),
      DisplayDepartureTimes]
  for function in functions:
    for flight in flights:
      flight.update(function(flight))

  all_keys = set()
  for f in flights:
    all_keys.update(f.keys())
  all_keys = list(all_keys)
  all_keys.sort()

  keys_logical_order = [
      'now_date', 'now_time', 'now_datetime', 'now', 'flight_number', 'squawk',
      'origin_iata', 'destination_iata', 'altitude',
      'min_feet', 'vert_rate', 'speed',
      'distance', 'delay_seconds',
      'airline_call_sign', 'aircraft_type_friendly',
      'azimuth_degrees_00s', 'azimuth_degrees_10s', 'azimuth_degrees_20s',
      'altitude_degrees_00s', 'altitude_degrees_10s', 'altitude_degrees_20s',
      'ground_distance_feet_00s',
      'ground_distance_feet_10s',
      'ground_distance_feet_20s',
      'crow_distance_feet_00s',
      'crow_distance_feet_10s',
      'crow_distance_feet_20s']
  for key in all_keys:
    if key not in keys_logical_order:
      keys_logical_order.append(key)

  max_length = 32000
  def ExcelFormatValue(v):
    s = str(v)
    if len(s) > max_length:  # maximum Excel cell length is 32767 characters
      s = '%d character field truncated to %d characters: %s' % (
          len(s), max_length, s[:max_length])
    return s

  f = open(filename, 'w')
  f.write(','.join(keys_logical_order)+'\n')
  for flight in flights:
    f.write(','.join(
        ['"'+ExcelFormatValue(flight.get(k))+'"'
         for k in keys_logical_order])+'\n')
  f.close()


def SimulationSetup():
  """Updates global variable file names and loads JSON data for simulations."""
  # Clear file so that shell tail -f process can continue to point to same file
  def ClearFile(filename):
    if os.path.exists(filename):
      with open(filename, 'w') as f:
        f.write('')

  global SIMULATION
  SIMULATION = True

  global DUMP_JSONS
  DUMP_JSONS = UnpickleObjectFromFile(PICKLE_DUMP_JSON_FILE, True)

  global FA_JSONS
  FA_JSONS = UnpickleObjectFromFile(PICKLE_FA_JSON_FILE, True)

  global ALL_MESSAGE_FILE
  ALL_MESSAGE_FILE = PrependFileName(ALL_MESSAGE_FILE, SIMULATION_PREFIX)
  ClearFile(ALL_MESSAGE_FILE)

  global LOGFILE
  LOGFILE = PrependFileName(LOGFILE, SIMULATION_PREFIX)
  ClearFile(LOGFILE)

  global ROLLING_LOGFILE
  ROLLING_LOGFILE = PrependFileName(ROLLING_LOGFILE, SIMULATION_PREFIX)
  ClearFile(ROLLING_LOGFILE)

  global ROLLING_MESSAGE_FILE
  ROLLING_MESSAGE_FILE = PrependFileName(
      ROLLING_MESSAGE_FILE, SIMULATION_PREFIX)
  ClearFile(ROLLING_MESSAGE_FILE)

  global PICKLE_FLIGHTS
  PICKLE_FLIGHTS = PrependFileName(PICKLE_FLIGHTS, SIMULATION_PREFIX)
  filenames = UnpickleObjectFromFile(
      PICKLE_FLIGHTS, True, max_days=None, filenames=True)
  for file in filenames:
    ClearFile(file)

  global PICKLE_DASHBOARD
  PICKLE_DASHBOARD = PrependFileName(PICKLE_DASHBOARD, SIMULATION_PREFIX)
  filenames = UnpickleObjectFromFile(
      PICKLE_DASHBOARD, True, max_days=None, filenames=True)
  for file in filenames:
    ClearFile(file)


def SimulationEnd(message_queue, flights, screens):
  """Clears message buffer, exercises histograms, and other simulation wrap up.

  Args:
    message_queue: List of flight messages that have not yet been printed.
    flights: List of flights dictionaries.
    screens: List of past screens displayed to splitflap screen.
  """
  if flights:
    histogram = {
        'type': 'both',
        'histogram':'all',
        'histogram_history':'30d',
        'histogram_max_screens': '_2',
        'histogram_data_summary': 'on'}
    message_queue.extend(TriggerHistograms(flights, histogram))

    while message_queue:
      ManageMessageQueue(message_queue, 0, {'setting_delay': 0}, screens)
    SaveFlightsByAltitudeDistanceCSV(flights)
    SaveFlightsToCSV(flights)

  # repickle to a new .pk with full track info
  file_parts = PICKLE_FLIGHTS.split('.')
  new_pickle_file = '.'.join([file_parts[0] + '_full_path', file_parts[1]])
  RemoveFile(new_pickle_file)
  for flight in flights:
    PickleObjectToFile(flight, new_pickle_file, False)

  print('Simulation complete after %s dump json messages processed'
        % len(DUMP_JSONS))


def SimulationSlowdownNearFlight(flights, persistent_nearby_aircraft):
  """Slows down simulations when a reported-upon flight is nearby."""
  if flights and flights[-1].get('flight_number') in persistent_nearby_aircraft:
    time.sleep(arduino.WRITE_DELAY_TIME)


def DumpJsonChanges():
  """Identifies if sequential dump json changes, for simulation optimization.

  If we are logging the radio output faster than it is updating, then there
  will be sequential log files in the json list that are identical; we only
  need to process the first of these, and can ignore subsequent ones, without
  any change of output in the simulation results. This function identifies
  whether the current active json changed from the prior one.

  Returns:
    Boolean - True if different (and processing needed), False if identical
  """
  if SIMULATION_COUNTER == 0:
    return True
  (this_json, unused_now) = DUMP_JSONS[SIMULATION_COUNTER]
  (last_json, unused_now) = DUMP_JSONS[SIMULATION_COUNTER - 1]
  return this_json != last_json


def CheckRebootNeeded(
    startup_time, message_queue, json_desc_dict, configuration):
  """Reboot based on duration instance has been running.

  Reboot needed in one of the following situations:
  - All quiet: if running for over 24 hours and all is quiet (message queue
    empty and no planes in radio).
  - Mostly quiet: if running for over 36 hours and message queue is empty and
    it's 3a.
  - Reboot requested via html form.

  Also checks if reset requested via html form.
  """
  reboot = False
  global SHUTDOWN_SIGNAL

  running_hours = (time.time() - startup_time) / SECONDS_IN_HOUR

  if (
      running_hours >= HOURS_IN_DAY and
      not message_queue and
      not json_desc_dict.get('radio_range_flights')):
    msg = 'All quiet reboot needed after running for %.2f hours' % running_hours
    SHUTDOWN_SIGNAL = msg
    Log(msg)
    reboot = True

  if (
      running_hours > HOURS_IN_DAY * 1.5 and
      not message_queue and
      int(EpochDisplayTime(time.time(), '%-H')) >= 3):
    msg = ('Early morning reboot needed after running for %.2f hours'
           % running_hours)
    SHUTDOWN_SIGNAL = msg
    Log(msg)
    reboot = True

  if 'soft_reboot' in configuration:
    msg = 'Soft reboot requested via web form'
    SHUTDOWN_SIGNAL = msg
    Log(msg)
    reboot = True
    RemoveSetting(configuration, 'soft_reboot')

  if 'end_process' in configuration:
    msg = 'Process end requested via web form'
    SHUTDOWN_SIGNAL = msg
    Log(msg)
    RemoveSetting(configuration, 'end_process')

  return reboot


def InterruptRebootFromButton():
  """Sets flag so main loop will terminate when it completes the iteration.

  This function is only triggered by an physical button press.
  """
  msg = ('Soft reboot requested by button push')
  global SHUTDOWN_SIGNAL
  SHUTDOWN_SIGNAL = msg

  global REBOOT_SIGNAL
  REBOOT_SIGNAL = True

  RPi.GPIO.output(GPIO_SOFT_RESET[1], False)  # signal that reset received
  Log(msg)


def InterruptShutdownFromSignal(signalNumber, unused_frame):
  """Sets flag so main loop will terminate when it completes the iteration.

  The function signature is defined by the python language - i.e.: these two
  variables are passed automatically for registered signals.  This function is
  only triggered by an interrupt signal.
  """
  msg = '%d received termination signal %d (%s)' % (
      os.getpid(), signalNumber,
      signal.Signals(signalNumber).name)  # pylint: disable=E1101
  global SHUTDOWN_SIGNAL
  SHUTDOWN_SIGNAL = msg
  Log(msg)


def PerformGracefulShutdown(queues, shutdown, reboot):
  """Complete the graceful shutdown process by cleaning up.

  Args:
    queues: iterable of queues shared with child processes to be closed
    shutdown: tuple of shared flags with child processes to initiate shutdown
      in children
    reboot: boolean indicating whether we should trigger a reboot
  """
  reboot_msg = ''
  if reboot:
    reboot_msg = ' and rebooting'
  Log('Shutting down self (%d)%s' % (os.getpid(), reboot_msg))

  for q in queues:
    q.close()
  for v in shutdown:  # send the shutdown signal to child processes
    v.value = 1
  if RASPBERRY_PI:
    RPi.GPIO.cleanup()

  UpdateDashboard(True, failure_message=SHUTDOWN_SIGNAL)

  if reboot or REBOOT_SIGNAL:
    time.sleep(10)  # wait 10 seconds for children to shut down as well
    os.system('sudo reboot')
  sys.exit()


def FindRunningParents():
  """Returns proc ids of processes with identically-named python file running.

  In case there are multiple children processes spawned with the same name,
  such as via multiprocessing, this will only return the parent id (since a
  killed child process will likely just be respawned).
  """
  this_process_id = os.getpid()
  this_process_name = os.path.basename(sys.argv[0])
  pids = []
  pid_pairs = []
  for proc in psutil.process_iter():
    try:
      # Check if process name contains this_process_name.
      commands = proc.as_dict(attrs=['cmdline', 'pid', 'ppid'])
      if commands['cmdline']:
        command_running = any(
            [this_process_name in s for s in commands['cmdline']])
        if command_running:
          pids.append(commands['pid'])
          pid_pairs.append((commands['pid'], commands['ppid']))
    except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
      pass

  # Exclude those pids that have a parent that is also a pid
  final_pids = []
  for pid_pair in pid_pairs:
    if pid_pair[1] not in pids:
      final_pids.append(pid_pair[0])
  # Exclude this pid
  final_pids.pop(final_pids.index(this_process_id))

  return sorted(final_pids)


def WaitUntilKillComplete(already_running_ids, max_seconds=30):
  """Prevents main loop from starting until other instancs complete shutdown.

  A termination command send to any other identically-named process may take a
  few seconds to complete because that other process is allowed to finish the
  current iteration in the main loop. Typically, that iteration in the other
  process will complete before this process finishes the initialization and
  starts. But in limited scenarios, that might not happen, such as if the other
  process is in the middle of generating a lot of histogram images, or if this
  process does not have much data to load.

  This function ensures that this process does not start the main loop until
  the other process terminates. If it detects that the other process is still
  running, it waits for up to max_seconds. If the other process does not
  terminate before that time limit, then this restarts the RPi.
  """
  still_running_ids = FindRunningParents()
  if not still_running_ids:
    return
  # still_running_ids should at least be a subset of already_running_ids
  new_processes = sorted(
      list(set(still_running_ids).difference(set(already_running_ids))))
  if new_processes:
    # uh-oh! at least one new started up in the interim? exit!
    Log('Kill signal sent to %s from this process %s, but it '
        'seems like there is at least one new process running, %s!' % (
            str(already_running_ids), str(os.getpid()), str(new_processes)))
    sys.exit()
  # phew - they're a subset; so they probably got the
  # signal; just wait a few secs
  elif still_running_ids:
    n = 0
    running_parents = FindRunningParents()
    while running_parents:
      if n == max_seconds:
        Log('Kill signal sent from this process %d to %s, but %s still '
            'running after waiting cume %d seconds; rebooting' % (
                os.getpid(),
                str(already_running_ids),
                str(running_parents), n+1))
        PerformGracefulShutdown((), (), True)
      if not n % 3:
        Log('Kill signal sent from this process %d to %s, but %s still '
            'running after waiting cume %d seconds' % (
                os.getpid(), str(already_running_ids), str(running_parents), n))
      n += 1
      time.sleep(1)
      running_parents = FindRunningParents()


def InitArduinoVariables():
  """Initializes variables for arduino threads with shared-memory queues."""
  to_remote_q = multiprocessing.Queue()
  to_servo_q = multiprocessing.Queue()
  to_main_q = multiprocessing.Queue()
  # shared flags to initiate shutdown
  shutdown_remote = multiprocessing.Value('i')
  shutdown_servo = multiprocessing.Value('i')
  shutdown = (shutdown_remote, shutdown_servo)

  return (to_remote_q, to_servo_q, to_main_q, shutdown)


def RefreshArduinos(
    remote, servo,
    to_remote_q, to_servo_q, to_main_q, shutdown,
    flights, json_desc_dict, configuration, screen_history):
  """Ensure arduinos are running and send them the current message.

  Args:
    remote: Running remote Arduino process (or if not previously running, None
      value)
    servo: Running servo Arduino process (or if not previously running, None
      value)
    to_remote_q: Multi-processing messaging queue for one-way comm from
      messageboard to remote arduino.
    to_servo_q: Multi-processing messaging queue for one-way comm from
      messageboard to servo arduino.
    to_main_q: Multi-processing messaging queue for one-way comm from arduinos
      to messageboard.
    shutdown: 2-tuple of multiprocessing flags (integers) used to signal to
      respective arduinos when they should be shutdown.
    flights: List of all flights.
    json_desc_dict: Dictionary of additional attributes about radio.
    configuration: Dictionary of configuration settings.
    screen_history: List of past screens displayed to splitflap screen.

  Returns:
    A 2-tuple of the remote and servo running processes.
  """
  remote, servo = ValidateArduinosRunning(
      remote, servo,
      to_remote_q, to_servo_q, to_main_q,
      shutdown, configuration)
  EnqueueArduinos(
      flights, json_desc_dict, configuration,
      to_remote_q, to_servo_q, screen_history)
  return remote, servo


def ValidateArduinosRunning(
    remote, servo, to_remote_q, to_servo_q, to_main_q, shutdown, configuration):
  """Ensures that each of the enabled arduinos are running.

  Args:
    remote: Running remote Arduino process (or if not previously running, None
      value)
    servo: Running servo Arduino process (or if not previously running, None
      value)
    to_remote_q: Multi-processing messaging queue for one-way comm from
      messageboard to remote arduino.
    to_servo_q: Multi-processing messaging queue for one-way comm from
      messageboard to servo arduino.
    to_main_q: Multi-processing messaging queue for one-way comm from arduinos
      to messageboard.
    shutdown: 2-tuple of multiprocessing flags (integers) used to signal to
      respective arduinos when they should be shutdown.
    configuration: Dictionary of configuration settings.

  Returns:
    A 2-tuple of the remote and servo running processes.
  """
  remote = ValidateSingleRunning(
      'enable_remote' in configuration,
      arduino.RemoteMain, p=remote,
      args=(to_remote_q, to_main_q, shutdown[0]))
  servo = ValidateSingleRunning(
      'enable_servos' in configuration,
      arduino.ServoMain, p=servo,
      args=(to_servo_q, to_main_q, shutdown[1]))
  return remote, servo


def ValidateSingleRunning(enabled, start_function, p=None, args=()):
  """Restarts a new instance of multiprocessing process if not running

  Args:
    enabled: Boolean indicating whether this arduino is enabled in the settings
      file.
    start_function: Function that will be started (with the given args) if a
      restart is needed.
    p: The existing process - if any - that should be checked to make sure its
      alive. If not passed, or if passed but not alive, it is restarted.
    args: A tuple of function parameters to pass unmodified to start_function.

  Returns:
    The running process - either the same one that was passed in, or a new one
    if a restart was needed.
  """
  if not SHUTDOWN_SIGNAL:

    if not enabled:
      # must have just requested a disabling of single instance
      if p is not None:
        args[2].value = 1  # trigger a shutdown on the single instance
      return None

    if p is None or not p.is_alive():
      if p is None:
        Log('Process for %s starting for first time' % str(start_function))
      elif VERBOSE:
        Log('Process (%s) for %s died; restarting' %
            (str(p), str(start_function)))
      args[2].value = 0  # (re)set shutdown flag to allow function to run
      p = multiprocessing.Process(target=start_function, args=args)
      p.daemon = True
      p.start()

  return p


def LastFlightAvailable(flights, screen_history):
  """Returns True if splitflap display not displaying last flight message."""
  if not screen_history:
    return False

  last_message_tuple = screen_history[-1]
  last_message_type = last_message_tuple[0]
  if last_message_type == FLAG_MSG_FLIGHT:
    last_message_flight = last_message_tuple[2]
    if SameFlight(last_message_flight, flights[-1]):
      return False  # already displaying the last flight!
  return True


def EnqueueArduinos(
    flights, json_desc_dict, configuration,
    to_remote_q, to_servo_q, screen_history):
  """Send latest data to arduinos via their shared-memory queues.

  Args:
    flights: List of all flights.
    json_desc_dict: Dictionary of additional attributes about radio.
    configuration: Dictionary of configuration settings.


    to_remote_q: Multi-processing messaging queue for one-way comm from
      messageboard to remote arduino.
    to_servo_q: Multi-processing messaging queue for one-way comm from
      messageboard to servo arduino.
    screen_history: List of past screens displayed to splitflap screen.
  """
  last_flight = {}
  if flights:
    last_flight = flights[-1]

  if SIMULATION:
    now = json_desc_dict['now']
  else:
    now = time.time()

  additional_attributes = {}

  today = EpochDisplayTime(now, '%x')
  flight_count_today = len(
      [1 for f in flights if DisplayTime(f, '%x') == today])
  additional_attributes['flight_count_today'] = flight_count_today

  additional_attributes['simulation'] = SIMULATION

  additional_attributes['last_flight_available'] = LastFlightAvailable(
      flights, screen_history)

  message = (last_flight, json_desc_dict, configuration, additional_attributes)
  try:
    if 'enable_remote' in configuration:
      to_remote_q.put(message, block=False)
    if 'enable_servos' in configuration:
      to_servo_q.put(message, block=False)


  except queue.Full:
    msg = 'Message queues to Arduinos full - trigger shutdown'
    Log(msg)
    global SHUTDOWN_SIGNAL
    SHUTDOWN_SIGNAL = msg


def ProcessArduinoCommmands(
    q, flights, configuration, message_queue, next_message_time):
  """Executes the commands enqueued by the arduinos.

  The commands on the queue q are of the form (command, args), where command
  is an identifier indicating the type of instruction, and the args is a
  possibly empty tuple with the attributes to follow thru.

  Possible commands are updating a GPIO pin, replaying a recent flight to
  the board, generating a histogram, or updating the saved settings.

  Args:
    q: multiprocessing queue provided to both the Arduino processes
    flights: list of flights
    configuration: dictionary of settings
    message_queue: current message queue
    next_message_time: epoch of the next message to display to screen

  Returns:
    A 2-tuple of the (possibly-updated) message_queue and next_message_time.
  """
  while not q.empty():
    command, args = q.get()

    if command == 'pin':
      UpdateStatusLight(*args)

    elif command == 'replay':
      # a command might request info about flight to be (re)displayed,
      # irrespective of whether the screen is on; if so, let's put that
      # message at the front of the message queue, and delete any
      # subsequent insight messages in queue
      replayed_flight_index = IdentifyFlightDisplayed(
          flights, configuration, display_all_hours=True)
      if replayed_flight_index is not None:
        message_queue = DeleteMessageTypes(message_queue, (FLAG_MSG_INSIGHT, ))
        replayed_flight = flights[replayed_flight_index]
        flight_message = CreateMessageAboutFlight(replayed_flight)
        message_queue.insert(
            0, (FLAG_MSG_FLIGHT, flight_message, replayed_flight))
        next_message_time = time.time()

    elif command == 'histogram':
      if not flights:
        Log('Histogram requested by remote %s but no flights in memory' % str(
            args))
      else:
        histogram_type, histogram_history = args
        message_queue.extend(MessageboardHistograms(
            flights,
            histogram_type,
            histogram_history,
            '_1',
            False))

    elif command == 'update_configuration':
      updated_settings = args[0]
      Log('Updated settings received from arduino: %s' % updated_settings)
      WriteFile(CONFIG_FILE, updated_settings)

    else:
      Log('Improper command from arduinos: %s / %s' % (command, args))

  return message_queue, next_message_time


def PublishMessage(
    s,
    subscription_id='12fd73cd-75ef-4cae-bbbf-29b2678692c1',
    key='c5f62d44-e30d-4c43-a43e-d4f65f4eb399',
    secret='b00aeb24-72f3-467c-aad2-82ba5e5266ca',
    timeout=3):
  """Publishes a text string to a Vestaboard.

  The message is pushed to the vestaboard splitflap display by way of its
  web services; see https://docs.vestaboard.com/introduction for more details.

  Args:
    s: String to publish.
    subscription_id: string subscription id from Vestaboard.
    key: string key from Vestaboard.
    secret: string secret from Vestaboard.
    timeout: Max duration in seconds that we should wait to establish a
      connection.
  """
  error_code = False
  # See https://docs.vestaboard.com/characters: any chars needing to be replaced
  special_characters = ((u'\u00b0', '{62}'),)  # degree symbol '°'

  for special_character in special_characters:
    s = s.replace(*(special_character))
  curl = pycurl.Curl()

  # See https://stackoverflow.com/questions/31826814/
  # curl-post-request-into-pycurl-code
  # Set URL value
  curl.setopt(
      pycurl.URL,
      'https://platform.vestaboard.com/subscriptions/%s/message'
      % subscription_id)
  curl.setopt(pycurl.HTTPHEADER, [
      'X-Vestaboard-Api-Key:%s' % key, 'X-Vestaboard-Api-Secret:%s' % secret])
  curl.setopt(pycurl.TIMEOUT_MS, timeout*1000)
  curl.setopt(pycurl.POST, 1)

  curl.setopt(pycurl.WRITEFUNCTION, lambda x: None) # to keep stdout clean

  # preparing body the way pycurl.READDATA wants it
  body_as_dict = {'text': s}
  body_as_json_string = json.dumps(body_as_dict) # dict to json
  body_as_file_object = io.StringIO(body_as_json_string)

  # prepare and send. See also: pycurl.READFUNCTION to pass function instead
  curl.setopt(pycurl.READDATA, body_as_file_object)
  curl.setopt(pycurl.POSTFIELDSIZE, len(body_as_json_string))
  failure_message = ''
  try:
    curl.perform()
  except pycurl.error as e:
    failure_message = 'curl.perform() failed with message %s' % e
    Log('curl.perform() failed with message %s' % e)
    error_code = True
  else:
    # you may want to check HTTP response code, e.g.
    status_code = curl.getinfo(pycurl.RESPONSE_CODE)
    if status_code != 200:
      Log('Server returned HTTP status code %d for message %s' % (
          status_code, s))
      error_code = True

  curl.close()
  UpdateStatusLight(
      GPIO_ERROR_VESTABOARD_CONNECTION, error_code, failure_message)


def TruncateEscapedLine(s):
  """Formats a single line of the personal message for the Vestaboard.

  The Vestaboard has line length limitations, a limited character set,
  and escape characters. This function:
  - replaces some unsupported characters with very similar supported characters
  - truncates the line after the max line length, allowing for escape characters
  - truncates the line after an unsupported character that does not have
    a replacement

  Args:
    s: input string

  Returns:
    Reformatted potentially-truncated line.
  """
  s = s.upper()
  character_mapping = {
      '[': '(',
      '<': '(',
      ']': ')',
      '>': ')',
      '|': '/',
      '\\': '/'}
  for c in character_mapping:
    s = s.replace(c, character_mapping[c])

  l = 0
  valid_characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$()-+&=;:''"%,./?'




                            <----SKIPPED LINES---->




        pointer = len(s)
      else:
        try:
          escape_value = int(s[pointer+1:end])
        except ValueError:
          escape_value = None
        if escape_value in valid_escape_values:
          validated_s += s[pointer:end+1]
          l += 1
        pointer = end
    else:
      pointer += 1

  return validated_s


def PersonalMessage(configuration, message_queue):
  """Formats and displays the personal message.

  A user-defined message can be displayed to the board whenever there isn't a
  flight message during user-specified hours of the day. This clears the
  board, if requested, and then adds that message to the queue.

  Args:
    configuration: the settings dictionary.
    message_queue: the existing queue, to which the personal message - if
      any - is added.
  """
  if 'clear_board' in configuration:
    RemoveSetting(configuration, 'clear_board')
    message_queue.append((FLAG_MSG_CLEAR, ''))
  minute_of_day = MinuteOfDay()
  if (
      not message_queue and
      'personal_message_enabled' in configuration and
      configuration['personal_message'] and
      minute_of_day <= configuration['personal_off_time'] and
      minute_of_day > configuration['personal_on_time'] + 1):
    message = configuration['personal_message']
    lines = [TruncateEscapedLine(l) for l in
             message.split('\n')[:SPLITFLAP_LINE_COUNT]]
    message_queue.append((FLAG_MSG_PERSONAL, lines))
    Log('Personal message added to queue: %s' % str(lines))


def ManageMessageQueue(
    message_queue, next_message_time, configuration, screens):
  """Check time & if appropriate, display next message from queue.

  Args:
    message_queue: FIFO list of message tuples of (message type,
      message string).
    next_message_time: epoch at which next message should be displayed
    configuration: dictionary of configuration attributes.
    screens: List of past screens displayed to splitflap screen.

  Returns:
    Next_message_time, potentially updated if a message has been displayed,
    or unchanged if no message was displayed.
  """
  if message_queue and (time.time() >= next_message_time or SIMULATION):

    if SIMULATION:  # drain the queue because the messages come so fast
      messages_to_display = list(message_queue)
      # passed by reference, so clear it out since we drained it to the display
      del message_queue[:]
    else:  # display only one message, being mindful of the display timing
      messages_to_display = [message_queue.pop(0)]

    for message in messages_to_display:
      message_text = message[1]
      if isinstance(message_text, str):
        message_text = textwrap.wrap(
            message_text,
            width=SPLITFLAP_CHARS_PER_LINE)
      display_message = Screenify(message_text, False)
      Log(display_message, file=ALL_MESSAGE_FILE)

      # Saving this to disk allows us to identify
      # persistently whats currently on the screen
      PickleObjectToFile(message, PICKLE_SCREENS, True)
      screens.append(message)

      MaintainRollingWebLog(display_message, 25)
      if not SIMULATION:
        splitflap_message = Screenify(message_text, True)
        PublishMessage(splitflap_message)

    next_message_time = time.time() + configuration['setting_delay']
  return next_message_time


def DeleteMessageTypes(q, types_to_delete):
  """Delete messages from the queue if type is in the iterable types."""
  if VERBOSE:
    messages_to_delete = [m for m in q if m[0] in types_to_delete]
    if messages_to_delete:
      Log('Deleting messages from queue due to new-found plane: %s'
          % messages_to_delete)
  updated_q = [m for m in q if m[0] not in types_to_delete]
  return updated_q


def BootstrapInsightList(full_path=PICKLE_FLIGHTS):
  """(Re)populate flight pickle files with flight insight distributions.

  The set of insights generated for each flight is created at the time
  the flight was first identified, and saved on the flight pickle. This
  saving allows the current running distribution to be recalculated very
  quickly, but it means that as code enabling new insights gets added,
  those historical distributions may not necessarily be considered correct.

  They are "correct" in the sense that that new insight was not available
  at the time that older flight was seen, but it is not correct in the sense
  that, because this new insight is starting out with an incidence in the
  historical data of zero, this new insight may be reported more frequently
  than desired until it "catches up".

  So this method replays the flight history with the latest insight code,
  regenerating the insight distribution for each flight.
  """
  directory, file = os.path.split(full_path)
  all_files = os.listdir(directory)
  files = sorted([os.path.join(directory, f) for f in all_files if file in f])
  for f in files:

    print('Bootstrapping %s' % f)
    configuration = ReadAndParseSettings(CONFIG_FILE)
    flights = []
    tmp_f = f + 'tmp'

    RemoveFile(tmp_f)

    if os.path.exists(f):
      mtime = os.path.getmtime(f)
      flights = UnpickleObjectFromFile(f, False)
      for (n, flight) in enumerate(flights):
        if n/25 == int(n/25):
          print(' - %d' % n)
        CreateFlightInsights(
            flights[:n+1], configuration.get('insights', 'hide'), {})
        PickleObjectToFile(flight, tmp_f, False)

      if mtime == os.path.getmtime(f):
        shutil.move(tmp_f, f)
      else:
        print('Aborted: failed to bootstrap %s: file changed while in process'
              % full_path)
        return


def ResetLogs(config):
  """Clears the non-scrolling logs if reset_logs in config."""
  if 'reset_logs' in config:
    Log('Reset logs')
    for f in (STDERR_FILE, BACKUP_FILE, SERVICE_VERIFICATION_FILE):
      if RemoveFile(f):
        open(f, 'a').close()
    config.pop('reset_logs')
    config = BuildSettings(config)
    WriteFile(CONFIG_FILE, config)
  return config


def CheckTemperature():
  """Turn on fan if temperature exceeds threshold."""
  if RASPBERRY_PI:
    temperature = gpiozero.CPUTemperature().temperature




                            <----SKIPPED LINES---->





  if RASPBERRY_PI:
    RPi.GPIO.setmode(RPi.GPIO.BCM)

  pins = (
      GPIO_ERROR_VESTABOARD_CONNECTION, GPIO_ERROR_FLIGHT_AWARE_CONNECTION,
      GPIO_ERROR_ARDUINO_SERVO_CONNECTION, GPIO_ERROR_ARDUINO_REMOTE_CONNECTION,
      GPIO_ERROR_BATTERY_CHARGE, GPIO_FAN, GPIO_UNUSED_1, GPIO_UNUSED_2)

  for pin in pins:
    initial_state = pin[5]
    pin_values[pin[0]] = initial_state  # Initialize state of pins
    UpdateDashboard(initial_state, pin)

    if RASPBERRY_PI:
      RPi.GPIO.setup(pin[0], RPi.GPIO.OUT)
      RPi.GPIO.output(pin[0], pin_values[pin[0]])
    UpdateDashboard(pin_values[pin[0]], pin)

  if RASPBERRY_PI:  # configure soft reset button
    RPi.GPIO.setup(
        GPIO_SOFT_RESET[0], RPi.GPIO.IN, pull_up_down=RPi.GPIO.PUD_DOWN)
    RPi.GPIO.setup(GPIO_SOFT_RESET[1], RPi.GPIO.OUT)
    RPi.GPIO.output(GPIO_SOFT_RESET[1], True)
    RPi.GPIO.add_event_detect(GPIO_SOFT_RESET[0], RPi.GPIO.RISING)
    RPi.GPIO.add_event_callback(GPIO_SOFT_RESET[0], InterruptRebootFromButton)


def UpdateStatusLight(pin, value, failure_message=''):
  """Set the Raspberry Pi GPIO pin high (True) or low (False) based on value."""
  global pin_values

  if value:
    msg = pin[1]
  else:
    msg = pin[2]
  if RASPBERRY_PI:
    RPi.GPIO.output(pin[0], value)
    if value:
      pin_setting = 'HIGH'
      relay_light_value = 'OFF'
    else:
      pin_setting = 'LOW'
      relay_light_value = 'ON'
    msg += '; RPi GPIO pin %d set to %s; relay light #%d should now be %s' % (
        pin[0], pin_setting, pin[3], relay_light_value)

  if pin_values[pin[0]] != value:
    if VERBOSE:
      Log(msg)  # log
    pin_values[pin[0]] = value  # update cache
    UpdateDashboard(value, subsystem=pin, failure_message=failure_message)


def UpdateDashboard(value, subsystem=0, failure_message=''):
  """Writes to disk a tuple with status details about a particular system.

  The independent monitoring.py module allows us to see in one place the
  status of all the subsystems and of the overall system; it does that
  monitoring based on these tuples of data.

  Args:
    value: Boolean indicating whether a failure has occurred (True) or
      system is nominal (False).
    subsystem: A tuple describing the system; though that description may
      have multiple attributes, the 0th element is the numeric identifier
      of that system.  monitoring.py depends on other attributes of that
      tuple being present as well.  Since the overall system does not have
      a tuple defined for it, it gets a default identifier of 0.

    failure_message: an (optional) message describing why the system /
      subsystem is being disabled or failing.
  """
  versions = (VERSION_MESSAGEBOARD, VERSION_ARDUINO)
  if subsystem:
    subsystem = subsystem[0]
  PickleObjectToFile(
      (time.time(), subsystem, value, versions, failure_message),
      PICKLE_DASHBOARD, True)


def RemoveFile(file):
  """Removes a file, returning a boolean indicating if it had existed."""
  if os.path.exists(file):

    try:
      os.remove(file)
    except PermissionError:
      return False
    return True

  return False


def ConfirmNewFlight(flight, flights):
  """Replaces last-seen flight with new flight if identifiers overlap.

  Flights are identified by the radio over time by a tuple of identifiers:
  flight_number and squawk.  Due to unknown communication issues, one or the
  other may not always be transmitted. However, as soon as a new flight is
  identified that has at least one of those identifiers, we report on it
  and log it to the pickle repository, etc.

  This function checks if the newly identified flight is indeed a duplicate
  of the immediate prior flight by virtue of having the same squawk and/or
  flight number, and further, if the paths overlap.  If the paths do not
  overlap, then its likely that the same flight was seen some minutes apart,
  and should legitimately be treated as a different flight.

  If the new flight is an updated version, then we should replace the
  prior-pickled-to-disk flight and replace the last flight in flights with
  this new version.

  Args:
    flight: new flight to check if identical to previous flight
    flights: list of all flights seen so far

  Returns:
    Boolean indicating whether flight is a new (True) or an updated version
    (False).
  """
  # boundary conditions
  if not flight or not flights:
    return flight

  last_flight = flights[-1]

  # flight_number and squawk are new
  if (
      flight.get('flight_number') != last_flight.get('flight_number')
      and flight.get('squawk') != last_flight.get('squawk')):
    return True

  # its a returning flight... but perhaps some time later as its hovering
  # in the area
  last_flight_last_seen = last_flight.get(
      'persistent_path', [last_flight])[-1]['now']
  if flight['now'] - last_flight_last_seen > PERSISTENCE_SECONDS:
    return True

  # it's not a new flight, so:
  # 1) replace the last flight in flights
  message = (
      'Flight (%s; %s) is overwriting the prior '
      'recorded flight (%s; %s) due to updated identifiers' % (
          flight.get('flight_number'), flight.get('squawk'),
          last_flight.get('flight_number'), last_flight.get('squawk')))
  flights[-1] = flight

  # 2) replace the last pickled record
  #
  # There is potential complication in that the last flight and the new flight
  # crossed into a new day, and we are using date segmentation so that the last
  # flight exists in yesterday's file
  max_days = 1
  if not SIMULATION and DisplayTime(flight, '%x') != DisplayTime(
      last_flight, '%x'):
    max_days = 2
    message += (
        '; in repickling, we crossed days, so pickled flights that might '
        'otherwise be in %s file are now all located in %s file' % (
            DisplayTime(last_flight, '%x'), DisplayTime(flight, '%x')))

  Log(message)

  args = (PICKLE_FLIGHTS, not SIMULATION, max_days)
  saved_flights = UnpickleObjectFromFile(*args)[:-1]
  files_to_overwrite = UnpickleObjectFromFile(*args, filenames=True)

  for file in files_to_overwrite:
    os.remove(file)
  for f in saved_flights:
    # we would like to use verify=True, but that's too slow without further
    # optimizing the verification step for a loop of data
    PickleObjectToFile(
        f, PICKLE_FLIGHTS, True, timestamp=f['now'], verify=False)

  return False


def HeartbeatRestart():
  """Logs system down / system up pair of heartbeats as system starts."""
  if SIMULATION:
    return 0
  UpdateDashboard(True)  # Indicates that this wasn't running moment before, ...
  UpdateDashboard(False)  # ... and now it is running!
  return time.time()


def Heartbeat(last_heartbeat_time=None):
  """Logs a system up heartbeat."""
  if SIMULATION:
    return last_heartbeat_time
  now = time.time()
  if not last_heartbeat_time or now - last_heartbeat_time > HEARTBEAT_SECONDS:
    UpdateDashboard(False)  # Send an all-clear message
    last_heartbeat_time = now
  return last_heartbeat_time


def VersionControl():
  """Copies current instances of messageboard.py and arduino.py into repository.

  To aid debugging, we want to keep past versions of the code easily
  accessible, and linked to the errors that have been logged. This function
  copies the python code into a version control directory after adding in a
  date / time stamp to the file name.
  """
  def MakeCopy(python_prefix):
    file_extension = '.py'

    live_name = python_prefix + '.py'
    live_path = os.path.join(CODE_REPOSITORY, live_name)

    epoch = os.path.getmtime(live_path)
    last_modified_suffix = EpochDisplayTime(
        epoch, format_string='-%Y-%m-%d-%H%M')
    version_name = python_prefix + last_modified_suffix + file_extension
    version_path = os.path.join(VERSION_REPOSITORY, version_name)

    if not os.path.exists(version_path):
      shutil.copyfile(live_path, version_path)
    return version_name

  global VERSION_MESSAGEBOARD
  global VERSION_ARDUINO
  VERSION_MESSAGEBOARD = MakeCopy('messageboard')
  VERSION_ARDUINO = MakeCopy('arduino')


def main():
  """Traffic cop between radio, configuration, and messageboard.

  This is the main logic, checking for new flights, augmenting the radio
  signal with additional web-scraped data, and generating messages in a
  form presentable to the messageboard.
  """
  VersionControl()

  # Since this clears log files, it should occur first before we start logging
  if '-s' in sys.argv:
    global SIMULATION_COUNTER
    SimulationSetup()

  last_heartbeat_time = HeartbeatRestart()
  init_timing = [(time.time(), 0)]

  # This flag slows down simulation time around a flight, great for
  # debugging the arduinos
  simulation_slowdown = bool('-f' in sys.argv)

  # Redirect any errors to a log file instead of the screen, and add a datestamp
  if not SIMULATION:
    sys.stderr = open(STDERR_FILE, 'a')
    Log('', STDERR_FILE)

  init_timing.append((time.time(), 1))
  Log('Starting up process %d' % os.getpid())
  already_running_ids = FindRunningParents()
  if already_running_ids:
    for pid in already_running_ids:
      Log('Sending termination signal to %d' % pid)
      os.kill(pid, signal.SIGTERM)
  init_timing.append((time.time(), 2))

  SetPinMode()

  configuration = ReadAndParseSettings(CONFIG_FILE)
  Log('Read CONFIG_FILE at %s: %s' % (CONFIG_FILE, str(configuration)))

  startup_time = time.time()
  json_desc_dict = {}

  init_timing.append((time.time(), 3))
  flights = UnpickleObjectFromFile(
      PICKLE_FLIGHTS, True, max_days=MAX_INSIGHT_HORIZON_DAYS)
  # Clear the loaded flight of any cached data, identified by keys
  # with a specific suffix, since code fixes may change the values for
  # some of those cached elements
  for flight in flights:
    for key in list(flight.keys()):
      if key.endswith(CACHED_ELEMENT_PREFIX):
        flight.pop(key)
  init_timing.append((time.time(), 4))

  screen_history = UnpickleObjectFromFile(PICKLE_SCREENS, True, max_days=2)

  # If we're displaying just a single insight message, we want it to be
  # something unique, to the extent possible; this dict holds a count of
  # the diff types of messages displayed so far
  insight_message_distribution = {}

  # bootstrap the flight insights distribution from a list of insights on each
  # flight (i.e.: flight['insight_types'] for a given flight might look like
  # [1, 2, 7, 9], or [], to indicate which insights were identified; this then
  # transforms that into {0: 25, 1: 18, ...} summing across all flights.
  missing_insights = []
  for flight in flights:
    if 'insight_types' not in flight:
      missing_insights.append('%s on %s' % (
          DisplayFlightNumber(flight), DisplayTime(flight, '%x %X')))
    distribution = flight.get('insight_types', [])
    for key in distribution:
      insight_message_distribution[key] = (
          insight_message_distribution.get(key, 0) + 1)
  if missing_insights:
    Log('Flights missing insight distributions: %s' %
        ';'.join(missing_insights))
  init_timing.append((time.time(), 5))

  # initialize objects required for arduinos, but we can only start them
  # in the main loop, because the tail end of the init section needs to
  # confirm that all other messageboard.py processes have exited!
  to_remote_q, to_servo_q, to_main_q, shutdown = InitArduinoVariables()
  remote, servo = None, None

  # used in simulation to print the hour of simulation once per simulated hour
  prev_simulated_hour = ''

  persistent_nearby_aircraft = {} # key = flight number; value = last seen epoch
  persistent_path = {}
  histogram = {}

  # Next up to print is index 0; this is a list of tuples:
  # tuple element#1: flag indicating the type of message that this is
  # tuple element#2: the message itself
  message_queue = []
  next_message_time = time.time()

  # We repeat the loop every x seconds; this ensures that if the processing
  # time is long, we don't wait another x seconds after processing completes
  next_loop_time = time.time() + LOOP_DELAY_SECONDS

  # These files are read only if the version on disk has been modified more
  # recently than the last time it was read
  last_dump_json_timestamp = 0

  init_timing.append((time.time(), 6))
  WaitUntilKillComplete(already_running_ids)
  init_timing.append((time.time(), 7))

  LogTimes(init_timing)

  Log('Finishing initialization of %d; starting radio polling loop' %
      os.getpid())
  while ((not SIMULATION or SIMULATION_COUNTER < len(DUMP_JSONS))
         and not SHUTDOWN_SIGNAL):

    last_heartbeat_time = Heartbeat(last_heartbeat_time)

    new_configuration = ReadAndParseSettings(CONFIG_FILE)
    UpdateRollingLogSize(new_configuration)
    CheckForNewFilterCriteria(
        configuration, new_configuration, message_queue, flights)
    configuration = new_configuration

    ResetLogs(configuration)  # clear the logs if requested
    UpdateRollingLogSize(configuration)

    # if this is a SIMULATION, then process every diff dump. But if it
    # isn't a simulation, then only read & do related processing for the
    # next dump if the last-modified timestamp indicates the file has been
    # updated since it was last read.
    tmp_timestamp = 0
    if not SIMULATION:
      dump_json_exists = os.path.exists(DUMP_JSON_FILE)
      if dump_json_exists:
        tmp_timestamp = os.path.getmtime(DUMP_JSON_FILE)
    if (SIMULATION and DumpJsonChanges()) or (
        not SIMULATION and dump_json_exists and
        tmp_timestamp > last_dump_json_timestamp):

      last_dump_json_timestamp = tmp_timestamp

      (persistent_nearby_aircraft,
       flight, now,
       json_desc_dict,
       persistent_path) = ScanForNewFlights(
           persistent_nearby_aircraft,
           persistent_path,
           configuration.get('log_jsons', False))

      # because this might just be an updated instance of the previous
      # flight as more identifier information (squawk and or flight number)
      # comes in, we only want to process this if its a truly new flight
      new_flight_flag = ConfirmNewFlight(flight, flights)

      if new_flight_flag:
        flights.append(flight)
        remote, servo = RefreshArduinos(
            remote, servo,
            to_remote_q, to_servo_q, to_main_q, shutdown,
            flights, json_desc_dict, configuration, screen_history)

        if FlightMeetsDisplayCriteria(flight, configuration, log=True):
          flight_message = (
              FLAG_MSG_FLIGHT, CreateMessageAboutFlight(flight), flight)

          # display the next message about this flight now!
          next_message_time = time.time()
          message_queue.insert(0, flight_message)
          # and delete any queued insight messages about other flights that have
          # not yet displayed, since a newer flight has taken precedence
          message_queue = DeleteMessageTypes(message_queue, (FLAG_MSG_INSIGHT,))

          # Though we also manage the message queue outside this conditional
          # as well, because it can take a half second to generate the flight
          # insights, this allows this message to start displaying on the
          # board immediately, so it's up there when it's most relevant
          next_message_time = ManageMessageQueue(
              message_queue, next_message_time, configuration, screen_history)

          insight_messages = CreateFlightInsights(
              flights,
              configuration.get('insights'),
              insight_message_distribution)
          if configuration.get('next_flight', 'off') == 'on':
            next_flight_text = FlightInsightNextFlight(flights, configuration)
            if next_flight_text:
              insight_messages.insert(0, next_flight_text)

          insight_messages = [(FLAG_MSG_INSIGHT, m) for m in insight_messages]

          for insight_message in insight_messages:
            message_queue.insert(0, insight_message)

        else:  # flight didn't meet display criteria
          flight['insight_types'] = []

        PickleObjectToFile(
            flight, PICKLE_FLIGHTS, True, timestamp=flight['now'])

      else:
        remote, servo = RefreshArduinos(
            remote, servo,
            to_remote_q, to_servo_q, to_main_q, shutdown,
            flights, json_desc_dict, configuration, screen_history)

    message_queue, next_message_time = ProcessArduinoCommmands(
        to_main_q, flights, configuration, message_queue, next_message_time)

    PersonalMessage(configuration, message_queue)

    if SIMULATION:
      if now:
        simulated_hour = EpochDisplayTime(now, '%Y-%m-%d %H:00%z')
      if simulated_hour != prev_simulated_hour:
        print(simulated_hour)
        prev_simulated_hour = simulated_hour

    histogram = ReadAndParseSettings(HISTOGRAM_CONFIG_FILE)
    RemoveFile(HISTOGRAM_CONFIG_FILE)

    # We also need to make sure there are flights on which to generate a
    # histogram! Why might there not be any flights? Primarily during a
    # simulation, if there's a lingering histogram file at the time of
    # history restart.
    if histogram and not flights:
      Log('Histogram requested (%s) but no flights in memory' % histogram)
    if histogram and flights:
      message_queue.extend(TriggerHistograms(flights, histogram))

    # check time & if appropriate, display next message from queue
    next_message_time = ManageMessageQueue(
        message_queue, next_message_time, configuration, screen_history)

    reboot = CheckRebootNeeded(
        startup_time, message_queue, json_desc_dict, configuration)

    CheckTemperature()

    if not SIMULATION:
      time.sleep(max(0, next_loop_time - time.time()))
      next_loop_time = time.time() + LOOP_DELAY_SECONDS
    else:
      SIMULATION_COUNTER += 1
      if simulation_slowdown:
        SimulationSlowdownNearFlight(flights, persistent_nearby_aircraft)

  if SIMULATION:
    SimulationEnd(message_queue, flights, screen_history)
  PerformGracefulShutdown(
      (to_remote_q, to_servo_q, to_main_q), shutdown, reboot)


if __name__ == "__main__":
  #interrupt, as in ctrl-c
  signal.signal(signal.SIGINT, InterruptShutdownFromSignal)

  #terminate, when another instance found or via kill
  signal.signal(signal.SIGTERM, InterruptShutdownFromSignal)

  if '-i' in sys.argv:
    BootstrapInsightList()
  else:
    main_settings = ReadAndParseSettings(CONFIG_FILE)
    if 'code_profiling_enabled' in main_settings:
      import cProfile
      cProfile.run(
          'main()', 'messageboard_stats-%s.profile' %
          EpochDisplayTime(time.time(), '%Y-%m-%d-%H%M'))
    else:
      main()