messageboard-2024-02-03-1709.py
01234567890123456789012345678901234567890123456789012345678901234567890123456789









20182019202020212022202320242025202620272028202920302031203220332034203520362037   203820392040204120422043204420452046204720482049205020512052205320542055205620572058











                            <----SKIPPED LINES---->




  global last_query_time
  global last_query_flight_number
  seconds_since_last_query = time.time() - last_query_time
  if last_query_time and seconds_since_last_query < min_query_delay_seconds:
    error_msg = (
        'Unable to query FA for %s at %s since last query to FA was only'
        ' %d seconds ago for %s; min of %d seconds needed: %s' % (
            flight_number,
            EpochDisplayTime(time.time(), format_string='%H:%M:%S'),
            seconds_since_last_query,
            last_query_flight_number,
            min_query_delay_seconds,
            url))
    flight_aware_status_code = 'WARNING'
    return '', error_msg, flight_aware_status_code, time.time()

  last_query_time = time.time()
  last_query_flight_number = flight_number

  # https://pypi.org/project/cloudscraper/



  scraper=cloudscraper.create_scraper()
  try:
    response = scraper.get(url, timeout=5)
    query_time = time.time()
  except requests.exceptions.RequestException as e:
    query_time = time.time()  # did not get to the query_time assignment above
    error_msg = 'Unable to query FA for URL due to %s: %s' % (e, url)
    flight_aware_status_code = 'FAILURE'
    return '', error_msg, flight_aware_status_code, query_time

  soup = bs4.BeautifulSoup(response.text, 'html.parser')
  l = soup.find_all('script')
  flight_script = None
  for script in l:
    if 'trackpollBootstrap' in str(script):
      flight_script = str(script)
      break
  if not flight_script:
    error_msg = (
        'Unable to find trackpollBootstrap script in page: ' + response.text)
    Log(error_msg)




                            <----SKIPPED LINES---->





01234567890123456789012345678901234567890123456789012345678901234567890123456789









20182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061











                            <----SKIPPED LINES---->




  global last_query_time
  global last_query_flight_number
  seconds_since_last_query = time.time() - last_query_time
  if last_query_time and seconds_since_last_query < min_query_delay_seconds:
    error_msg = (
        'Unable to query FA for %s at %s since last query to FA was only'
        ' %d seconds ago for %s; min of %d seconds needed: %s' % (
            flight_number,
            EpochDisplayTime(time.time(), format_string='%H:%M:%S'),
            seconds_since_last_query,
            last_query_flight_number,
            min_query_delay_seconds,
            url))
    flight_aware_status_code = 'WARNING'
    return '', error_msg, flight_aware_status_code, time.time()

  last_query_time = time.time()
  last_query_flight_number = flight_number

  # https://pypi.org/project/cloudscraper/
  # Interestingly, scraper=cloudscraper.create_scraper() works on the Mac,
  # but does not get decrypted if done with the RPi; we need to further
  # fake the browser to be chrome
  scraper=cloudscraper.create_scraper(browser='chrome')
  try:
    response = scraper.get(url, timeout=5)
    query_time = time.time()
  except requests.exceptions.RequestException as e:
    query_time = time.time()  # did not get to the query_time assignment above
    error_msg = 'Unable to query FA for URL due to %s: %s' % (e, url)
    flight_aware_status_code = 'FAILURE'
    return '', error_msg, flight_aware_status_code, query_time

  soup = bs4.BeautifulSoup(response.text, 'html.parser')
  l = soup.find_all('script')
  flight_script = None
  for script in l:
    if 'trackpollBootstrap' in str(script):
      flight_script = str(script)
      break
  if not flight_script:
    error_msg = (
        'Unable to find trackpollBootstrap script in page: ' + response.text)
    Log(error_msg)




                            <----SKIPPED LINES---->