From 614d2c356a8e52b27c70220af6f40a664410746f Mon Sep 17 00:00:00 2001 From: Cyberes Date: Wed, 6 Nov 2024 12:48:45 -0700 Subject: [PATCH] use glotec instead --- feeder/cache.py | 24 +---- feeder/global-image.py | 58 ------------ feeder/lib/cddis_fetch.py | 89 ------------------- feeder/lib/glotec.py | 66 ++++++++++++++ feeder/lib/tecmap.py | 70 --------------- feeder/mqtt.py | 31 +++---- feeder/requirements.txt | 3 +- feeder/server.py | 39 -------- .../space-weather-global-image.service | 15 ---- feeder/systemd/space-weather-server.service | 16 ---- 10 files changed, 83 insertions(+), 328 deletions(-) delete mode 100644 feeder/global-image.py delete mode 100644 feeder/lib/cddis_fetch.py create mode 100644 feeder/lib/glotec.py delete mode 100644 feeder/lib/tecmap.py delete mode 100644 feeder/server.py delete mode 100644 feeder/systemd/space-weather-global-image.service delete mode 100644 feeder/systemd/space-weather-server.service diff --git a/feeder/cache.py b/feeder/cache.py index 2d27a77..051c309 100644 --- a/feeder/cache.py +++ b/feeder/cache.py @@ -1,37 +1,21 @@ import logging -import os import pickle -import sys import time -from datetime import datetime from redis import Redis -from lib.cddis_fetch import fetch_latest_ionex -from lib.tecmap import get_tecmaps, parse_ionex_datetime +from lib.glotec import get_latest_glotec logging.basicConfig(level=logging.INFO) -CDDIS_USERNAME = os.getenv('CDDIS_USERNAME') -CDDIS_PASSWORD = os.getenv('CDDIS_PASSWORD') -if not CDDIS_USERNAME or not CDDIS_PASSWORD: - logging.critical('Must set CDDIS_USERNAME and CDDIS_PASSWORD environment variables') - sys.exit(1) - def main(): redis = Redis(host='localhost', port=6379, db=0) redis.flushall() while True: - utc_hr = datetime.utcnow().hour - logging.info('Fetching latest IONEX data') - logging.info(f'Using hour {utc_hr}') - ionex_data = fetch_latest_ionex(CDDIS_USERNAME, CDDIS_PASSWORD) - parsed_data = [] - for tecmap, epoch in get_tecmaps(ionex_data): - parsed_dt = parse_ionex_datetime(epoch) - parsed_data.append((tecmap, parsed_dt)) - redis.set('tecmap_data', pickle.dumps(parsed_data)) + logging.info('Fetching latest GLOTEC data') + geojson = get_latest_glotec() + redis.set('latest_glotec_data', pickle.dumps(geojson)) logging.info('Scrape complete') time.sleep(1800) # 30 minutes diff --git a/feeder/global-image.py b/feeder/global-image.py deleted file mode 100644 index 4d6341d..0000000 --- a/feeder/global-image.py +++ /dev/null @@ -1,58 +0,0 @@ -import io -import logging -import pickle -import time -from datetime import datetime - -import schedule -from PIL import Image -from redis import Redis - -from lib.tecmap import plot_tec_map - -logging.basicConfig(level=logging.INFO) - -# Entire planet -LAT_RANGE_MIN = -90 -LAT_RANGE_MAX = 90 -LON_RANGE_MIN = -180 -LON_RANGE_MAX = 180 - - -def main(): - redis = Redis(host='localhost', port=6379, db=0) - - utc_hr = datetime.utcnow().hour - logging.info(f'Generating plot for hour {utc_hr}') - - data = redis.get('tecmap_data') - while data is None: - logging.warning('Redis has not been populated yet. Is cache.py running? Sleeping 10s...') - time.sleep(10) - data = redis.get('tecmap_data') - ionex_data = pickle.loads(data) - - for tecmap, epoch in ionex_data: - if epoch.hour == utc_hr: - plt = plot_tec_map(tecmap, [float(LON_RANGE_MIN), float(LON_RANGE_MAX)], [float(LAT_RANGE_MIN), float(LAT_RANGE_MAX)], timestamp_utc=epoch)[1] - buf = io.BytesIO() - plt.savefig(buf, format='png', bbox_inches='tight', pad_inches=0.1, dpi=110) - plt.close() - del plt - - buf.seek(0) - img = Image.open(buf) - buf = io.BytesIO() - img.save(buf, format='PNG') - - redis.set('global_map', buf.getvalue()) - buf.close() - logging.info(f'Finished hour {utc_hr}') - - -if __name__ == '__main__': - main() - schedule.every().hour.at(':00').do(main) - while True: - schedule.run_pending() - time.sleep(1) diff --git a/feeder/lib/cddis_fetch.py b/feeder/lib/cddis_fetch.py deleted file mode 100644 index c380359..0000000 --- a/feeder/lib/cddis_fetch.py +++ /dev/null @@ -1,89 +0,0 @@ -import datetime -import logging -import subprocess -import sys -import tempfile -from pathlib import Path - -import chromedriver_autoinstaller -import requests -from selenium import webdriver -from selenium.webdriver import Keys -from selenium.webdriver.chrome.options import Options -from selenium.webdriver.common.by import By -from selenium.webdriver.support import expected_conditions as EC -from selenium.webdriver.support.ui import WebDriverWait - -IONEX_BASE_URL = 'https://cddis.nasa.gov/archive/gnss/products/ionex/' - - -def fetch_latest_ionex(username: str, password: str): - now = datetime.date.today() - url = IONEX_BASE_URL + str(now.year) - - chromedriver_autoinstaller.install() - options = Options() - options.add_argument('--headless=new') - driver = webdriver.Chrome(options=options) - driver.get(url) - - # Login - username_field = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.ID, "username"))) - username_field.clear() - username_field.send_keys(username) - password_field = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "password"))) - password_field.clear() - password_field.send_keys(password) - password_field.send_keys(Keys.RETURN) - - # Wait until we're redirected to the right page. - WebDriverWait(driver, 30).until(EC.visibility_of_element_located((By.ID, "parDirTextContainer"))) - - # Get the days in the year. - day_elements = driver.find_elements(By.XPATH, '//div[@class="archiveDir"]/div[@class="archiveDirTextContainer"]/a[@class="archiveDirText"]') - day_urls = [element.get_attribute('href') for element in day_elements] - - # Load the latest day. - today_url = day_urls[-2] # last element is predictions for tomorrow so we want the second to last one - logging.info(f'Using day {today_url.split("/")[-1]}') - driver.get(today_url) - - # Find our file. - file_elements = driver.find_elements(By.XPATH, '//a[@class="archiveItemText"]') - file_urls = [element.get_attribute('href') for element in file_elements] - found_url = None - for u in file_urls: - parts = u.split('/') - if parts[-1].startswith('c2pg'): - found_url = u - break - if found_url is None: - print('Did not find c2pg') - sys.exit(1) - - # Download our file. - auth_cookie = None - for cookie in driver.get_cookies(): - if cookie['name'] == 'ProxyAuth': - auth_cookie = cookie['value'] - break - if auth_cookie is None: - print('Did not find ProxyAuth cookie') - sys.exit(1) - - driver.close() - del driver - - # Download data. - zip_data_r = requests.get(found_url, cookies={'ProxyAuth': auth_cookie}) - zip_data_r.raise_for_status() - - # Read data. - tmp_file = tempfile.NamedTemporaryFile() - tmp_file.write(zip_data_r.content) - tmp_dir = tempfile.TemporaryDirectory() - subprocess.run(["7z", "e", tmp_file.name, f"-o{tmp_dir.name}"], check=True, stdout=subprocess.PIPE) - p = Path(tmp_dir.name) - target_file = list(p.iterdir())[-1] - data = target_file.read_text() - return data diff --git a/feeder/lib/glotec.py b/feeder/lib/glotec.py new file mode 100644 index 0000000..c175358 --- /dev/null +++ b/feeder/lib/glotec.py @@ -0,0 +1,66 @@ +import time + +import cartopy.crs as ccrs +import matplotlib.pyplot as plt +import numpy as np +import requests +from dateutil.parser import parse +from dateutil.tz import tzutc, tzlocal +from matplotlib.colors import LinearSegmentedColormap +from mpl_toolkits.axes_grid1 import make_axes_locatable +from scipy.interpolate import griddata + + +def get_latest_glotec(): + r = requests.get('https://services.swpc.noaa.gov/experimental/products/glotec/geojson_2d_urt.json') + r.raise_for_status() + index_json = r.json()[-1] + data_url = 'https://services.swpc.noaa.gov' + index_json['url'] + r2 = requests.get(data_url) + r2.raise_for_status() + return r2.json() + + +def plot_glotec_map(data: dict, lon_range: list, lat_range: list): + lons = [] + lats = [] + tec_values = [] + for feature in data['features']: + lon, lat = feature['geometry']['coordinates'] + tec = feature['properties']['tec'] + lons.append(lon) + lats.append(lat) + tec_values.append(tec) + + lons = np.array(lons) + lats = np.array(lats) + tec_values = np.array(tec_values) + + lon_grid, lat_grid = np.meshgrid(np.linspace(lon_range[0], lon_range[1], 100), np.linspace(lat_range[0], lat_range[1], 100)) + + # Interpolate the TEC values onto the regular grid + tec_grid = griddata((lons, lats), tec_values, (lon_grid, lat_grid), method='linear') + + proj = ccrs.PlateCarree() + f, ax = plt.subplots(1, 1, subplot_kw=dict(projection=proj)) + + colors = ['#33184a', '#4454c3', '#4294ff', '#1ad2d2', '#3cf58e', '#9cfe40', '#dde037', '#fdac34', '#f26014', '#ca2a04', '#7A0403'] + custom_cmap = LinearSegmentedColormap.from_list('custom', colors) + + h = ax.pcolormesh(lon_grid, lat_grid, tec_grid, cmap=custom_cmap, vmin=0, vmax=100, transform=proj) + + ax.coastlines() + + timestamp_utc = parse(data['time_tag']) + timestamp_local = timestamp_utc.replace(tzinfo=tzutc()).astimezone(tzlocal()) + plt.title(timestamp_local.strftime(f'%H:%M %m-%d-%Y {time.tzname[0]}'), fontsize=12, y=1.04) + + plt.suptitle('Global Total Electron Content', fontsize=16, y=0.87) + divider = make_axes_locatable(ax) + ax_cb = divider.new_horizontal(size='5%', pad=0.1, axes_class=plt.Axes) + f.add_axes(ax_cb) + cb = plt.colorbar(h, cax=ax_cb) + plt.rc('text', usetex=True) + cb.set_label('VTEC ($10^{16}*\\mathrm{m}^{-2}$)') + + return tec_grid, plt diff --git a/feeder/lib/tecmap.py b/feeder/lib/tecmap.py deleted file mode 100644 index 1a3094a..0000000 --- a/feeder/lib/tecmap.py +++ /dev/null @@ -1,70 +0,0 @@ -import re -import time -from datetime import datetime - -import cartopy.crs as ccrs -import matplotlib.pyplot as plt -import numpy as np -from dateutil.tz import tzutc, tzlocal -from mpl_toolkits.axes_grid1 import make_axes_locatable - -""" -https://github.com/daniestevez/jupyter_notebooks/blob/master/IONEX.ipynb -""" - - -def parse_ionex_datetime(s: str): - match = re.match(r'\s*(\d{4})\s*(\d{1,2})\s*(\d{1,2})\s*(\d{1,2})\s*(\d{1,2})\s*(\d{1,2})', s) - if match: - year, month, day, hour, minute, second = map(int, match.groups()) - return datetime(year, month, day, hour, minute, second) - else: - raise ValueError("Invalid date format") - - -def parse_map(tecmap, exponent=-1): - tecmap = re.split('.*END OF TEC MAP', tecmap)[0] - return np.stack([np.fromstring(l, sep=' ') for l in re.split('.*LAT/LON1/LON2/DLON/H\\n', tecmap)[1:]]) * 10 ** exponent - - -def get_tecmaps(ionex: str): - for tecmap in ionex.split('START OF TEC MAP')[1:]: - lines = tecmap.split('\n') - epoch = lines[1].strip() if len(lines) > 1 else None - yield parse_map(tecmap), epoch - - -def plot_tec_map(tecmap, lon_range: list, lat_range: list, timestamp_utc: datetime = None): - proj = ccrs.PlateCarree() - f, ax = plt.subplots(1, 1, subplot_kw=dict(projection=proj)) - - # Create arrays of latitudes and longitudes to match the geographical grid of the TEC map data. - # This is hard coded and should never change. - lat = np.arange(-87.5, 87.5, 2.5) - lon = np.arange(-180, 180, 5.0) - - # Create a mask for the data in the lat/lon range - lon_mask = (lon >= lon_range[0]) & (lon < lon_range[1]) - lat_mask = (lat >= lat_range[0]) & (lat < lat_range[1]) - mask = np.ix_(lat_mask, lon_mask) - - # Select only the data in the lat/lon range - tecmap_ranged = tecmap[mask] - - # Plot the TEC map - h = plt.imshow(tecmap_ranged, cmap='viridis', vmin=0, vmax=100, extent=(lon_range[0], lon_range[1], lat_range[0], lat_range[1]), transform=proj) - - # Make graph pretty - ax.coastlines() - if timestamp_utc: - timestamp_local = timestamp_utc.replace(tzinfo=tzutc()).astimezone(tzlocal()) - plt.title(timestamp_local.strftime(f'%H:%M %m-%d-%Y {time.tzname[0]}'), fontsize=12, y=1.04) - plt.suptitle('Vertical Total Electron Count', fontsize=16, y=0.87) - divider = make_axes_locatable(ax) - ax_cb = divider.new_horizontal(size='5%', pad=0.1, axes_class=plt.Axes) - f.add_axes(ax_cb) - cb = plt.colorbar(h, cax=ax_cb) - plt.rc('text', usetex=True) - cb.set_label('TECU ($10^{16} \\mathrm{el}/\\mathrm{m}^2$)') - - return tecmap_ranged, plt diff --git a/feeder/mqtt.py b/feeder/mqtt.py index ac1b985..8703601 100644 --- a/feeder/mqtt.py +++ b/feeder/mqtt.py @@ -8,9 +8,10 @@ from datetime import datetime, timezone import numpy as np import paho.mqtt.client as mqtt +from dateutil.parser import parse from redis import Redis -from lib.tecmap import plot_tec_map +from feeder.lib.glotec import plot_glotec_map logging.basicConfig(level=logging.INFO) @@ -30,12 +31,6 @@ if not LAT_RANGE_MIN or not LAT_RANGE_MAX or not LON_RANGE_MIN or not LON_RANGE_ print(LAT_RANGE_MIN, LAT_RANGE_MAX, LON_RANGE_MIN, LON_RANGE_MAX) sys.exit(1) -CDDIS_USERNAME = os.getenv('CDDIS_USERNAME') -CDDIS_PASSWORD = os.getenv('CDDIS_PASSWORD') -if not CDDIS_USERNAME or not CDDIS_PASSWORD: - logging.critical('Must set CDDIS_USERNAME and CDDIS_PASSWORD environment variables') - sys.exit(1) - client = mqtt.Client(client_id=MQTT_CLIENT_ID) if MQTT_USERNAME and MQTT_PASSWORD: client.username_pw_set(MQTT_USERNAME, MQTT_PASSWORD) @@ -63,29 +58,25 @@ def main(): redis = Redis(host='localhost', port=6379, db=0) while True: - data = redis.get('tecmap_data') + data = redis.get('latest_glotec_data') while data is None: logging.warning('Redis has not been populated yet. Is cache.py running? Sleeping 10s...') time.sleep(10) - data = redis.get('tecmap_data') - ionex_data = pickle.loads(data) + data = redis.get('latest_glotec_data') + geojson = pickle.loads(data) utc_hr = datetime.now(timezone.utc).hour logging.info(f'Using hour {utc_hr}') - avg_tec = None - for tecmap, epoch in ionex_data: - if epoch.hour == utc_hr: - tecmap_ranged, _ = plot_tec_map(tecmap, [float(LON_RANGE_MIN), float(LON_RANGE_MAX)], [float(LAT_RANGE_MIN), float(LAT_RANGE_MAX)]) - avg_tec = np.mean(tecmap_ranged) - logging.info(f'Data timestamp: {epoch.isoformat()}') - break + glotec_map_ranged, _ = plot_glotec_map(geojson, [float(LON_RANGE_MIN), float(LON_RANGE_MAX)], [float(LAT_RANGE_MIN), float(LAT_RANGE_MAX)]) + avg_tec = np.mean(glotec_map_ranged) + logging.info(f'Data timestamp: {parse(geojson["time_tag"]).isoformat()}') latest = round(avg_tec, 1) - publish('vtec', latest) + publish('latest_glotec_data', latest) del data - del ionex_data - del tecmap_ranged + del geojson + del glotec_map_ranged del avg_tec del latest gc.collect() diff --git a/feeder/requirements.txt b/feeder/requirements.txt index f3250f5..bfbc91a 100644 --- a/feeder/requirements.txt +++ b/feeder/requirements.txt @@ -11,4 +11,5 @@ Pillow flask==3.0.3 schedule==1.2.2 gunicorn==23.0.0 -python-dateutil==2.9.0.post0 \ No newline at end of file +python-dateutil==2.9.0.post0 +scipy==1.14.1 \ No newline at end of file diff --git a/feeder/server.py b/feeder/server.py deleted file mode 100644 index a860d10..0000000 --- a/feeder/server.py +++ /dev/null @@ -1,39 +0,0 @@ -import datetime -import io - -import redis -from PIL import Image, ImageDraw, ImageFont -from flask import Flask, send_file, make_response - -NO_MAP_STR = 'NO GLOBAL MAP AVAILABLE' - -app = Flask(__name__) -redis_client = redis.Redis(host='localhost', port=6379) - - -@app.route('/global') -def serve_global_map(): - global_map_data = redis_client.get('global_map') - if global_map_data is None: - img = Image.new('RGB', (633, 356), color=(255, 255, 255)) - d = ImageDraw.Draw(img) - fnt = ImageFont.load_default(size=30) - w, h = fnt.getbbox(NO_MAP_STR)[2:4] - d.text(((500 - w) / 2, (300 - h) / 2), NO_MAP_STR, font=fnt, fill=(0, 0, 0)) - buf = io.BytesIO() - img.save(buf, format='PNG') - buf.seek(0) - return send_file(buf, mimetype='image/png') - - buf = io.BytesIO(global_map_data) - buf.seek(0) - response = make_response(send_file(buf, mimetype='image/png')) - expires = datetime.datetime.now() - expires = expires + datetime.timedelta(minutes=10) - response.headers['Cache-Control'] = 'public, max-age=600' - response.headers['Expires'] = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") - return response - - -if __name__ == '__main__': - app.run() diff --git a/feeder/systemd/space-weather-global-image.service b/feeder/systemd/space-weather-global-image.service deleted file mode 100644 index b6cf1a9..0000000 --- a/feeder/systemd/space-weather-global-image.service +++ /dev/null @@ -1,15 +0,0 @@ -[Unit] -Description=Space Weather Global Image Generator -After=network.target space-weather-cache.service - -[Service] -Type=simple -User=homeassistant -EnvironmentFile=/etc/secrets/space-weather -ExecStart=/srv/ha-noaa-space-weather/venv/bin/python /srv/ha-noaa-space-weather/feeder/global-image.py -SyslogIdentifier=space-weather-global-image -Restart=on-failure -RestartSec=5s - -[Install] -WantedBy=multi-user.target diff --git a/feeder/systemd/space-weather-server.service b/feeder/systemd/space-weather-server.service deleted file mode 100644 index 335677a..0000000 --- a/feeder/systemd/space-weather-server.service +++ /dev/null @@ -1,16 +0,0 @@ -[Unit] -Description=Space Weather Server -After=network.target - -[Service] -Type=simple -User=homeassistant -EnvironmentFile=/etc/secrets/space-weather -WorkingDirectory=/srv/ha-noaa-space-weather/feeder -ExecStart=/srv/ha-noaa-space-weather/venv/bin/gunicorn --workers 7 --bind 0.0.0.0:5000 server:app --access-logfile '-' --error-logfile '-' -SyslogIdentifier=space-weather-server -Restart=on-failure -RestartSec=5s - -[Install] -WantedBy=multi-user.target