use glotec instead

This commit is contained in:
Cyberes 2024-11-06 12:48:45 -07:00
parent 8138ea21fc
commit 614d2c356a
10 changed files with 83 additions and 328 deletions

View File

@ -1,37 +1,21 @@
import logging import logging
import os
import pickle import pickle
import sys
import time import time
from datetime import datetime
from redis import Redis from redis import Redis
from lib.cddis_fetch import fetch_latest_ionex from lib.glotec import get_latest_glotec
from lib.tecmap import get_tecmaps, parse_ionex_datetime
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
CDDIS_USERNAME = os.getenv('CDDIS_USERNAME')
CDDIS_PASSWORD = os.getenv('CDDIS_PASSWORD')
if not CDDIS_USERNAME or not CDDIS_PASSWORD:
logging.critical('Must set CDDIS_USERNAME and CDDIS_PASSWORD environment variables')
sys.exit(1)
def main(): def main():
redis = Redis(host='localhost', port=6379, db=0) redis = Redis(host='localhost', port=6379, db=0)
redis.flushall() redis.flushall()
while True: while True:
utc_hr = datetime.utcnow().hour logging.info('Fetching latest GLOTEC data')
logging.info('Fetching latest IONEX data') geojson = get_latest_glotec()
logging.info(f'Using hour {utc_hr}') redis.set('latest_glotec_data', pickle.dumps(geojson))
ionex_data = fetch_latest_ionex(CDDIS_USERNAME, CDDIS_PASSWORD)
parsed_data = []
for tecmap, epoch in get_tecmaps(ionex_data):
parsed_dt = parse_ionex_datetime(epoch)
parsed_data.append((tecmap, parsed_dt))
redis.set('tecmap_data', pickle.dumps(parsed_data))
logging.info('Scrape complete') logging.info('Scrape complete')
time.sleep(1800) # 30 minutes time.sleep(1800) # 30 minutes

View File

@ -1,58 +0,0 @@
import io
import logging
import pickle
import time
from datetime import datetime
import schedule
from PIL import Image
from redis import Redis
from lib.tecmap import plot_tec_map
logging.basicConfig(level=logging.INFO)
# Entire planet
LAT_RANGE_MIN = -90
LAT_RANGE_MAX = 90
LON_RANGE_MIN = -180
LON_RANGE_MAX = 180
def main():
redis = Redis(host='localhost', port=6379, db=0)
utc_hr = datetime.utcnow().hour
logging.info(f'Generating plot for hour {utc_hr}')
data = redis.get('tecmap_data')
while data is None:
logging.warning('Redis has not been populated yet. Is cache.py running? Sleeping 10s...')
time.sleep(10)
data = redis.get('tecmap_data')
ionex_data = pickle.loads(data)
for tecmap, epoch in ionex_data:
if epoch.hour == utc_hr:
plt = plot_tec_map(tecmap, [float(LON_RANGE_MIN), float(LON_RANGE_MAX)], [float(LAT_RANGE_MIN), float(LAT_RANGE_MAX)], timestamp_utc=epoch)[1]
buf = io.BytesIO()
plt.savefig(buf, format='png', bbox_inches='tight', pad_inches=0.1, dpi=110)
plt.close()
del plt
buf.seek(0)
img = Image.open(buf)
buf = io.BytesIO()
img.save(buf, format='PNG')
redis.set('global_map', buf.getvalue())
buf.close()
logging.info(f'Finished hour {utc_hr}')
if __name__ == '__main__':
main()
schedule.every().hour.at(':00').do(main)
while True:
schedule.run_pending()
time.sleep(1)

View File

@ -1,89 +0,0 @@
import datetime
import logging
import subprocess
import sys
import tempfile
from pathlib import Path
import chromedriver_autoinstaller
import requests
from selenium import webdriver
from selenium.webdriver import Keys
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
IONEX_BASE_URL = 'https://cddis.nasa.gov/archive/gnss/products/ionex/'
def fetch_latest_ionex(username: str, password: str):
now = datetime.date.today()
url = IONEX_BASE_URL + str(now.year)
chromedriver_autoinstaller.install()
options = Options()
options.add_argument('--headless=new')
driver = webdriver.Chrome(options=options)
driver.get(url)
# Login
username_field = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.ID, "username")))
username_field.clear()
username_field.send_keys(username)
password_field = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "password")))
password_field.clear()
password_field.send_keys(password)
password_field.send_keys(Keys.RETURN)
# Wait until we're redirected to the right page.
WebDriverWait(driver, 30).until(EC.visibility_of_element_located((By.ID, "parDirTextContainer")))
# Get the days in the year.
day_elements = driver.find_elements(By.XPATH, '//div[@class="archiveDir"]/div[@class="archiveDirTextContainer"]/a[@class="archiveDirText"]')
day_urls = [element.get_attribute('href') for element in day_elements]
# Load the latest day.
today_url = day_urls[-2] # last element is predictions for tomorrow so we want the second to last one
logging.info(f'Using day {today_url.split("/")[-1]}')
driver.get(today_url)
# Find our file.
file_elements = driver.find_elements(By.XPATH, '//a[@class="archiveItemText"]')
file_urls = [element.get_attribute('href') for element in file_elements]
found_url = None
for u in file_urls:
parts = u.split('/')
if parts[-1].startswith('c2pg'):
found_url = u
break
if found_url is None:
print('Did not find c2pg')
sys.exit(1)
# Download our file.
auth_cookie = None
for cookie in driver.get_cookies():
if cookie['name'] == 'ProxyAuth':
auth_cookie = cookie['value']
break
if auth_cookie is None:
print('Did not find ProxyAuth cookie')
sys.exit(1)
driver.close()
del driver
# Download data.
zip_data_r = requests.get(found_url, cookies={'ProxyAuth': auth_cookie})
zip_data_r.raise_for_status()
# Read data.
tmp_file = tempfile.NamedTemporaryFile()
tmp_file.write(zip_data_r.content)
tmp_dir = tempfile.TemporaryDirectory()
subprocess.run(["7z", "e", tmp_file.name, f"-o{tmp_dir.name}"], check=True, stdout=subprocess.PIPE)
p = Path(tmp_dir.name)
target_file = list(p.iterdir())[-1]
data = target_file.read_text()
return data

66
feeder/lib/glotec.py Normal file
View File

@ -0,0 +1,66 @@
import time
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
import numpy as np
import requests
from dateutil.parser import parse
from dateutil.tz import tzutc, tzlocal
from matplotlib.colors import LinearSegmentedColormap
from mpl_toolkits.axes_grid1 import make_axes_locatable
from scipy.interpolate import griddata
def get_latest_glotec():
r = requests.get('https://services.swpc.noaa.gov/experimental/products/glotec/geojson_2d_urt.json')
r.raise_for_status()
index_json = r.json()[-1]
data_url = 'https://services.swpc.noaa.gov' + index_json['url']
r2 = requests.get(data_url)
r2.raise_for_status()
return r2.json()
def plot_glotec_map(data: dict, lon_range: list, lat_range: list):
lons = []
lats = []
tec_values = []
for feature in data['features']:
lon, lat = feature['geometry']['coordinates']
tec = feature['properties']['tec']
lons.append(lon)
lats.append(lat)
tec_values.append(tec)
lons = np.array(lons)
lats = np.array(lats)
tec_values = np.array(tec_values)
lon_grid, lat_grid = np.meshgrid(np.linspace(lon_range[0], lon_range[1], 100), np.linspace(lat_range[0], lat_range[1], 100))
# Interpolate the TEC values onto the regular grid
tec_grid = griddata((lons, lats), tec_values, (lon_grid, lat_grid), method='linear')
proj = ccrs.PlateCarree()
f, ax = plt.subplots(1, 1, subplot_kw=dict(projection=proj))
colors = ['#33184a', '#4454c3', '#4294ff', '#1ad2d2', '#3cf58e', '#9cfe40', '#dde037', '#fdac34', '#f26014', '#ca2a04', '#7A0403']
custom_cmap = LinearSegmentedColormap.from_list('custom', colors)
h = ax.pcolormesh(lon_grid, lat_grid, tec_grid, cmap=custom_cmap, vmin=0, vmax=100, transform=proj)
ax.coastlines()
timestamp_utc = parse(data['time_tag'])
timestamp_local = timestamp_utc.replace(tzinfo=tzutc()).astimezone(tzlocal())
plt.title(timestamp_local.strftime(f'%H:%M %m-%d-%Y {time.tzname[0]}'), fontsize=12, y=1.04)
plt.suptitle('Global Total Electron Content', fontsize=16, y=0.87)
divider = make_axes_locatable(ax)
ax_cb = divider.new_horizontal(size='5%', pad=0.1, axes_class=plt.Axes)
f.add_axes(ax_cb)
cb = plt.colorbar(h, cax=ax_cb)
plt.rc('text', usetex=True)
cb.set_label('VTEC ($10^{16}*\\mathrm{m}^{-2}$)')
return tec_grid, plt

View File

@ -1,70 +0,0 @@
import re
import time
from datetime import datetime
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
import numpy as np
from dateutil.tz import tzutc, tzlocal
from mpl_toolkits.axes_grid1 import make_axes_locatable
"""
https://github.com/daniestevez/jupyter_notebooks/blob/master/IONEX.ipynb
"""
def parse_ionex_datetime(s: str):
match = re.match(r'\s*(\d{4})\s*(\d{1,2})\s*(\d{1,2})\s*(\d{1,2})\s*(\d{1,2})\s*(\d{1,2})', s)
if match:
year, month, day, hour, minute, second = map(int, match.groups())
return datetime(year, month, day, hour, minute, second)
else:
raise ValueError("Invalid date format")
def parse_map(tecmap, exponent=-1):
tecmap = re.split('.*END OF TEC MAP', tecmap)[0]
return np.stack([np.fromstring(l, sep=' ') for l in re.split('.*LAT/LON1/LON2/DLON/H\\n', tecmap)[1:]]) * 10 ** exponent
def get_tecmaps(ionex: str):
for tecmap in ionex.split('START OF TEC MAP')[1:]:
lines = tecmap.split('\n')
epoch = lines[1].strip() if len(lines) > 1 else None
yield parse_map(tecmap), epoch
def plot_tec_map(tecmap, lon_range: list, lat_range: list, timestamp_utc: datetime = None):
proj = ccrs.PlateCarree()
f, ax = plt.subplots(1, 1, subplot_kw=dict(projection=proj))
# Create arrays of latitudes and longitudes to match the geographical grid of the TEC map data.
# This is hard coded and should never change.
lat = np.arange(-87.5, 87.5, 2.5)
lon = np.arange(-180, 180, 5.0)
# Create a mask for the data in the lat/lon range
lon_mask = (lon >= lon_range[0]) & (lon < lon_range[1])
lat_mask = (lat >= lat_range[0]) & (lat < lat_range[1])
mask = np.ix_(lat_mask, lon_mask)
# Select only the data in the lat/lon range
tecmap_ranged = tecmap[mask]
# Plot the TEC map
h = plt.imshow(tecmap_ranged, cmap='viridis', vmin=0, vmax=100, extent=(lon_range[0], lon_range[1], lat_range[0], lat_range[1]), transform=proj)
# Make graph pretty
ax.coastlines()
if timestamp_utc:
timestamp_local = timestamp_utc.replace(tzinfo=tzutc()).astimezone(tzlocal())
plt.title(timestamp_local.strftime(f'%H:%M %m-%d-%Y {time.tzname[0]}'), fontsize=12, y=1.04)
plt.suptitle('Vertical Total Electron Count', fontsize=16, y=0.87)
divider = make_axes_locatable(ax)
ax_cb = divider.new_horizontal(size='5%', pad=0.1, axes_class=plt.Axes)
f.add_axes(ax_cb)
cb = plt.colorbar(h, cax=ax_cb)
plt.rc('text', usetex=True)
cb.set_label('TECU ($10^{16} \\mathrm{el}/\\mathrm{m}^2$)')
return tecmap_ranged, plt

View File

@ -8,9 +8,10 @@ from datetime import datetime, timezone
import numpy as np import numpy as np
import paho.mqtt.client as mqtt import paho.mqtt.client as mqtt
from dateutil.parser import parse
from redis import Redis from redis import Redis
from lib.tecmap import plot_tec_map from feeder.lib.glotec import plot_glotec_map
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
@ -30,12 +31,6 @@ if not LAT_RANGE_MIN or not LAT_RANGE_MAX or not LON_RANGE_MIN or not LON_RANGE_
print(LAT_RANGE_MIN, LAT_RANGE_MAX, LON_RANGE_MIN, LON_RANGE_MAX) print(LAT_RANGE_MIN, LAT_RANGE_MAX, LON_RANGE_MIN, LON_RANGE_MAX)
sys.exit(1) sys.exit(1)
CDDIS_USERNAME = os.getenv('CDDIS_USERNAME')
CDDIS_PASSWORD = os.getenv('CDDIS_PASSWORD')
if not CDDIS_USERNAME or not CDDIS_PASSWORD:
logging.critical('Must set CDDIS_USERNAME and CDDIS_PASSWORD environment variables')
sys.exit(1)
client = mqtt.Client(client_id=MQTT_CLIENT_ID) client = mqtt.Client(client_id=MQTT_CLIENT_ID)
if MQTT_USERNAME and MQTT_PASSWORD: if MQTT_USERNAME and MQTT_PASSWORD:
client.username_pw_set(MQTT_USERNAME, MQTT_PASSWORD) client.username_pw_set(MQTT_USERNAME, MQTT_PASSWORD)
@ -63,29 +58,25 @@ def main():
redis = Redis(host='localhost', port=6379, db=0) redis = Redis(host='localhost', port=6379, db=0)
while True: while True:
data = redis.get('tecmap_data') data = redis.get('latest_glotec_data')
while data is None: while data is None:
logging.warning('Redis has not been populated yet. Is cache.py running? Sleeping 10s...') logging.warning('Redis has not been populated yet. Is cache.py running? Sleeping 10s...')
time.sleep(10) time.sleep(10)
data = redis.get('tecmap_data') data = redis.get('latest_glotec_data')
ionex_data = pickle.loads(data) geojson = pickle.loads(data)
utc_hr = datetime.now(timezone.utc).hour utc_hr = datetime.now(timezone.utc).hour
logging.info(f'Using hour {utc_hr}') logging.info(f'Using hour {utc_hr}')
avg_tec = None glotec_map_ranged, _ = plot_glotec_map(geojson, [float(LON_RANGE_MIN), float(LON_RANGE_MAX)], [float(LAT_RANGE_MIN), float(LAT_RANGE_MAX)])
for tecmap, epoch in ionex_data: avg_tec = np.mean(glotec_map_ranged)
if epoch.hour == utc_hr: logging.info(f'Data timestamp: {parse(geojson["time_tag"]).isoformat()}')
tecmap_ranged, _ = plot_tec_map(tecmap, [float(LON_RANGE_MIN), float(LON_RANGE_MAX)], [float(LAT_RANGE_MIN), float(LAT_RANGE_MAX)])
avg_tec = np.mean(tecmap_ranged)
logging.info(f'Data timestamp: {epoch.isoformat()}')
break
latest = round(avg_tec, 1) latest = round(avg_tec, 1)
publish('vtec', latest) publish('latest_glotec_data', latest)
del data del data
del ionex_data del geojson
del tecmap_ranged del glotec_map_ranged
del avg_tec del avg_tec
del latest del latest
gc.collect() gc.collect()

View File

@ -12,3 +12,4 @@ flask==3.0.3
schedule==1.2.2 schedule==1.2.2
gunicorn==23.0.0 gunicorn==23.0.0
python-dateutil==2.9.0.post0 python-dateutil==2.9.0.post0
scipy==1.14.1

View File

@ -1,39 +0,0 @@
import datetime
import io
import redis
from PIL import Image, ImageDraw, ImageFont
from flask import Flask, send_file, make_response
NO_MAP_STR = 'NO GLOBAL MAP AVAILABLE'
app = Flask(__name__)
redis_client = redis.Redis(host='localhost', port=6379)
@app.route('/global')
def serve_global_map():
global_map_data = redis_client.get('global_map')
if global_map_data is None:
img = Image.new('RGB', (633, 356), color=(255, 255, 255))
d = ImageDraw.Draw(img)
fnt = ImageFont.load_default(size=30)
w, h = fnt.getbbox(NO_MAP_STR)[2:4]
d.text(((500 - w) / 2, (300 - h) / 2), NO_MAP_STR, font=fnt, fill=(0, 0, 0))
buf = io.BytesIO()
img.save(buf, format='PNG')
buf.seek(0)
return send_file(buf, mimetype='image/png')
buf = io.BytesIO(global_map_data)
buf.seek(0)
response = make_response(send_file(buf, mimetype='image/png'))
expires = datetime.datetime.now()
expires = expires + datetime.timedelta(minutes=10)
response.headers['Cache-Control'] = 'public, max-age=600'
response.headers['Expires'] = expires.strftime("%a, %d %b %Y %H:%M:%S GMT")
return response
if __name__ == '__main__':
app.run()

View File

@ -1,15 +0,0 @@
[Unit]
Description=Space Weather Global Image Generator
After=network.target space-weather-cache.service
[Service]
Type=simple
User=homeassistant
EnvironmentFile=/etc/secrets/space-weather
ExecStart=/srv/ha-noaa-space-weather/venv/bin/python /srv/ha-noaa-space-weather/feeder/global-image.py
SyslogIdentifier=space-weather-global-image
Restart=on-failure
RestartSec=5s
[Install]
WantedBy=multi-user.target

View File

@ -1,16 +0,0 @@
[Unit]
Description=Space Weather Server
After=network.target
[Service]
Type=simple
User=homeassistant
EnvironmentFile=/etc/secrets/space-weather
WorkingDirectory=/srv/ha-noaa-space-weather/feeder
ExecStart=/srv/ha-noaa-space-weather/venv/bin/gunicorn --workers 7 --bind 0.0.0.0:5000 server:app --access-logfile '-' --error-logfile '-'
SyslogIdentifier=space-weather-server
Restart=on-failure
RestartSec=5s
[Install]
WantedBy=multi-user.target