diff --git a/pylib/iemweb/autoplot/scripts/p51.py b/pylib/iemweb/autoplot/scripts/p51.py index a7a688a63..b5147538a 100644 --- a/pylib/iemweb/autoplot/scripts/p51.py +++ b/pylib/iemweb/autoplot/scripts/p51.py @@ -13,6 +13,7 @@ from pyiem.database import get_sqlalchemy_conn from pyiem.exceptions import NoDataFound from pyiem.plot import figure +from sqlalchemy import text PDICT = { "all": "Show All Three Plots", @@ -124,14 +125,18 @@ def plotter(ctx: dict): climosite = ctx["_nt"].sts[station]["climate_site"] with get_sqlalchemy_conn("coop") as conn: climo = pd.read_sql( - f""" - SELECT day, sday, gddxx(%s, %s, high, low) as {glabel}, + text(f""" + SELECT day, sday, gddxx(:gddbase, :gddceil, high, low) as {glabel}, sdd86(high, low) as sdd86, precip - from alldata WHERE station = %s and + from alldata WHERE station = :station and year >= 1951 ORDER by day ASC - """, + """), conn, - params=(gddbase, gddceil, ctx["_nt"].sts[station]["climate_site"]), + params={ + "gddbase": gddbase, + "gddceil": gddceil, + "station": ctx["_nt"].sts[station]["climate_site"], + }, index_col="day", ) if climo.empty: @@ -182,17 +187,23 @@ def plotter(ctx: dict): climo = pd.DataFrame(rows) # build the obs - with get_sqlalchemy_conn("iem") as conn: + with get_sqlalchemy_conn("isuag") as conn: df = pd.read_sql( - f"""SELECT day, to_char(day, 'mmdd') as sday, - gddxx(%s, %s, max_tmpf, min_tmpf) as o{glabel}, - coalesce(pday, 0) as oprecip, - sdd86(max_tmpf, min_tmpf) as osdd86 from summary s JOIN stations t - ON (s.iemid = t.iemid) - WHERE t.id = %s and t.network = %s and - to_char(day, 'mmdd') != '0229' ORDER by day ASC""", + text(f""" + SELECT valid as day, to_char(valid, 'mmdd') as sday, + gddxx(:gddbase, :gddceil, c2f(tair_c_max_qc), c2f(tair_c_min_qc)) + as o{glabel}, + coalesce(rain_in_tot_qc, 0) as oprecip, + sdd86( c2f(tair_c_max_qc), c2f(tair_c_min_qc)) as osdd86 + from sm_daily + WHERE station = :station and to_char(valid, 'mmdd') != '0229' + ORDER by day ASC"""), conn, - params=(gddbase, gddceil, station, ctx["network"]), + params={ + "gddbase": gddbase, + "gddceil": gddceil, + "station": station, + }, index_col=None, ) # Now we need to join the frames @@ -200,14 +211,14 @@ def plotter(ctx: dict): df = df.sort_values("day", ascending=True) df = df.set_index("day") df["precip_diff"] = df["oprecip"] - df["cprecip"] - df[glabel + "_diff"] = df["o" + glabel] - df["c" + glabel] + df[f"{glabel}_diff"] = df[f"o{glabel}"] - df[f"c{glabel}"] ab = ctx["_nt"].sts[station]["archive_begin"] if ab is None: raise NoDataFound("Unknown station metadata.") fig = figure(apctx=ctx) if whichplots == "all": - ax1 = fig.add_axes([0.1, 0.7, 0.8, 0.2]) + ax1 = fig.add_axes((0.1, 0.7, 0.8, 0.2)) ax2 = fig.add_axes( [0.1, 0.6, 0.8, 0.1], sharex=ax1, facecolor="#EEEEEE" ) @@ -218,17 +229,17 @@ def plotter(ctx: dict): "SDD(base=86)" ) elif whichplots == "gdd": - ax1 = fig.add_axes([0.14, 0.31, 0.8, 0.57]) + ax1 = fig.add_axes((0.14, 0.31, 0.8, 0.57)) ax2 = fig.add_axes( [0.14, 0.11, 0.8, 0.2], sharex=ax1, facecolor="#EEEEEE" ) title = f"GDD(base={gddbase:.0f},ceil={gddceil:.0f})" elif whichplots == "precip": - ax3 = fig.add_axes([0.1, 0.11, 0.8, 0.75]) + ax3 = fig.add_axes((0.1, 0.11, 0.8, 0.75)) ax1 = ax3 title = "Precipitation" else: # sdd - ax4 = fig.add_axes([0.1, 0.1, 0.8, 0.8]) + ax4 = fig.add_axes((0.1, 0.1, 0.8, 0.8)) ax1 = ax4 title = "Stress Degree Days (base=86)" diff --git a/pylib/iemweb/request/daily.py b/pylib/iemweb/request/daily.py index e86552179..7eae6054f 100644 --- a/pylib/iemweb/request/daily.py +++ b/pylib/iemweb/request/daily.py @@ -1,5 +1,7 @@ """.. title:: IEM Computed Daily Summaries +Return to `API Services `_ + Documentation for /cgi-bin/request/daily.py ------------------------------------------- @@ -14,13 +16,17 @@ Request all high temperature data for Ames, IA (AMW) for the month of January 2019: - https://mesonet.agron.iastate.edu/cgi-bin/request/daily.py?sts=2019-01-01&ets=2019-01-31&network=IA_ASOS&stations=AMW&var=max_temp_f&format=csv +https://mesonet.agron.iastate.edu/cgi-bin/request/daily.py?\ +sts=2019-01-01&ets=2019-01-31&network=IA_ASOS&stations=AMW&\ +var=max_temp_f&format=csv Request daily precipitation and the climatology for all stations in Washington state on 23 June 2023 in Excel format: - https://mesonet.agron.iastate.edu/cgi-bin/request/daily.py?sts=2023-06-23&ets=2023-06-23&network=WA_ASOS&stations=_ALL&var=precip_in,climo_precip_in&format=excel +https://mesonet.agron.iastate.edu/cgi-bin/request/daily.py?\ +sts=2023-06-23&ets=2023-06-23&network=WA_ASOS&stations=_ALL&\ +var=precip_in,climo_precip_in&format=excel """ diff --git a/pylib/iemweb/request/gis/lsr.py b/pylib/iemweb/request/gis/lsr.py index 4a7aae6d7..fee19e274 100644 --- a/pylib/iemweb/request/gis/lsr.py +++ b/pylib/iemweb/request/gis/lsr.py @@ -48,8 +48,8 @@ """ -import datetime import zipfile +from datetime import timedelta from io import BytesIO, StringIO import fiona @@ -196,7 +196,7 @@ def get_time_domain(form): if form["recent"] is not None: # Allow for specifying a recent number of seconds ets = utc() - sts = ets - datetime.timedelta(seconds=form["recent"]) + sts = ets - timedelta(seconds=form["recent"]) return sts, ets if form["sts"] is None: raise IncompleteWebRequest("GET start time parameters missing") diff --git a/pylib/iemweb/request/gis/nexrad_storm_attrs.py b/pylib/iemweb/request/gis/nexrad_storm_attrs.py index 7c372b039..cee00fac6 100644 --- a/pylib/iemweb/request/gis/nexrad_storm_attrs.py +++ b/pylib/iemweb/request/gis/nexrad_storm_attrs.py @@ -33,8 +33,8 @@ """ -import datetime import zipfile +from datetime import timedelta from io import BytesIO, StringIO import shapefile @@ -94,7 +94,7 @@ def run(environ, start_response): len(environ["radar"]) > 2 and (environ["ets"] - environ["sts"]).days > 6 ): - environ["ets"] = environ["sts"] + datetime.timedelta(days=7) + environ["ets"] = environ["sts"] + timedelta(days=7) fn = f"stormattr_{environ['sts']:%Y%m%d%H%M}_{environ['ets']:%Y%m%d%H%M}" with get_sqlalchemy_conn("radar") as conn: diff --git a/pylib/iemweb/request/gis/pireps.py b/pylib/iemweb/request/gis/pireps.py index ae86cc6d9..1ad522c77 100644 --- a/pylib/iemweb/request/gis/pireps.py +++ b/pylib/iemweb/request/gis/pireps.py @@ -1,5 +1,7 @@ """.. title:: Pilot Weather Report (PIREP) Data Service +Return to `API Services `_ + Documentation for /cgi-bin/request/gis/pireps.py ------------------------------------------------ @@ -30,8 +32,8 @@ """ -import datetime import zipfile +from datetime import timedelta from io import BytesIO, StringIO import shapefile @@ -140,7 +142,7 @@ def run(environ, start_response): ) else: if (environ["ets"] - environ["sts"]).days > 120: - environ["ets"] = environ["sts"] + datetime.timedelta(days=120) + environ["ets"] = environ["sts"] + timedelta(days=120) sql = f""" SELECT to_char(valid at time zone 'UTC', 'YYYYMMDDHH24MI') as utctime, case when is_urgent then 'T' else 'F' end, diff --git a/pylib/iemweb/request/grx/time_mot_loc.py b/pylib/iemweb/request/grx/time_mot_loc.py index 82937b939..51b8f8b2d 100644 --- a/pylib/iemweb/request/grx/time_mot_loc.py +++ b/pylib/iemweb/request/grx/time_mot_loc.py @@ -1,5 +1,7 @@ """.. title:: NWS TOR+SVR Warning Time-Mot-Loc +Return to `API Services `_ + Changelog --------- @@ -15,9 +17,9 @@ """ -import datetime import math import re +from datetime import timedelta from io import StringIO from zoneinfo import ZoneInfo @@ -89,7 +91,7 @@ def gentext(sio, row, grversion): lons.append(float(token[0])) lats.append(float(token[1])) for seconds in [0, duration / 2.0, duration]: - valid = tml_valid + datetime.timedelta(seconds=seconds) + valid = tml_valid + timedelta(seconds=seconds) ts = valid.strftime("%H%Mz") sio.write( f"Color: 255 255 0\n{time_range}" @@ -107,7 +109,7 @@ def gentext(sio, row, grversion): sio.write("Color: 255 255 255\nThreshold:10\n\n") for lon, lat in zip(lons, lats): for minute in range(int(duration / 60.0) + 1): - valid = tml_valid + datetime.timedelta(minutes=minute) + valid = tml_valid + timedelta(minutes=minute) ts = valid.strftime("%H%Mz") lon2, lat2 = extrapolate(lon, lat, smps * minute * 60, drct) sio.write(f"Place: {lat2:.4f},{lon2:.4f},{ts}\n") @@ -131,8 +133,8 @@ def application(environ, start_response): tmlabel = valid.strftime("%H%Mz") if grversion >= 1.5 or environ["valid"]: # Pull larger window of data to support TimeRange - t1 = valid - datetime.timedelta(hours=2) - t2 = valid + datetime.timedelta(hours=2) + t1 = valid - timedelta(hours=2) + t2 = valid + timedelta(hours=2) tmlabel = valid.strftime("%b %d %Y %H%Mz") cursor.execute( f"""SELECT ST_x(tml_geom) as lon, ST_y(tml_geom) as lat, diff --git a/pylib/iemweb/request/maxcsv.py b/pylib/iemweb/request/maxcsv.py index eabc5ea0f..a908440a4 100644 --- a/pylib/iemweb/request/maxcsv.py +++ b/pylib/iemweb/request/maxcsv.py @@ -79,8 +79,8 @@ """ -import datetime import re +from datetime import date, timedelta, timezone from zoneinfo import ZoneInfo # third party @@ -127,8 +127,8 @@ def figure_phase(p1: float, p2: float) -> str: def do_monthly_summary(station, year, month): """Compute some requested monthly summary stats.""" - sts = datetime.date(year, month, 1) - ets = (sts + datetime.timedelta(days=35)).replace(day=1) + sts = date(year, month, 1) + ets = (sts + timedelta(days=35)).replace(day=1) with get_sqlalchemy_conn("iem") as conn: df = pd.read_sql( text( @@ -182,16 +182,16 @@ def do_moonphase(lon, lat): { "new_moon": ephem.next_new_moon(utc()) .datetime() - .replace(tzinfo=datetime.timezone.utc), + .replace(tzinfo=timezone.utc), "full_moon": ephem.next_full_moon(utc()) .datetime() - .replace(tzinfo=datetime.timezone.utc), + .replace(tzinfo=timezone.utc), "first_quarter": ephem.next_first_quarter_moon(utc()) .datetime() - .replace(tzinfo=datetime.timezone.utc), + .replace(tzinfo=timezone.utc), "last_quarter": ephem.next_last_quarter_moon(utc()) .datetime() - .replace(tzinfo=datetime.timezone.utc), + .replace(tzinfo=timezone.utc), } ).sort_values(ascending=True) # Figure out the timezone @@ -237,20 +237,16 @@ def do_moon(lon, lat): obs.lat = str(lat) obs.long = str(lon) obs.date = utc().strftime("%Y/%m/%d %H:%M") - r1 = obs.next_rising(moon).datetime().replace(tzinfo=datetime.timezone.utc) + r1 = obs.next_rising(moon).datetime().replace(tzinfo=timezone.utc) p1 = moon.moon_phase obs.date = r1.strftime("%Y/%m/%d %H:%M") - s1 = ( - obs.next_setting(moon).datetime().replace(tzinfo=datetime.timezone.utc) - ) + s1 = obs.next_setting(moon).datetime().replace(tzinfo=timezone.utc) # Figure out the next rise time obs.date = s1.strftime("%Y/%m/%d %H:%M") - r2 = obs.next_rising(moon).datetime().replace(tzinfo=datetime.timezone.utc) + r2 = obs.next_rising(moon).datetime().replace(tzinfo=timezone.utc) p2 = moon.moon_phase obs.date = r2.strftime("%Y/%m/%d %H:%M") - s2 = ( - obs.next_setting(moon).datetime().replace(tzinfo=datetime.timezone.utc) - ) + s2 = obs.next_setting(moon).datetime().replace(tzinfo=timezone.utc) label = figure_phase(p1, p2) # Figure out the timezone pgconn, cursor = get_dbconnc("mesosite") @@ -339,7 +335,7 @@ def do_webcams(network): return df -def do_iowa_azos(date, itoday=False): +def do_iowa_azos(dt: date, itoday=False): """Dump high and lows for Iowa ASOS""" with get_sqlalchemy_conn("iem") as conn: df = pd.read_sql( @@ -349,12 +345,12 @@ def do_iowa_azos(date, itoday=False): st_y(geom) as latitude, st_x(geom) as longitude, s.day, s.max_tmpf::int as high, s.min_tmpf::int as low, coalesce(pday, 0) as precip - from stations n JOIN summary_{date.year} s on (n.iemid = s.iemid) + from stations n JOIN summary_{dt.year} s on (n.iemid = s.iemid) WHERE n.network = 'IA_ASOS' and s.day = :dt """ ), conn, - params={"dt": date}, + params={"dt": dt}, index_col="locationid", ) if itoday: @@ -798,9 +794,9 @@ def router(appname): elif appname == "iarwis": df = do_iarwis() elif appname == "iowayesterday": - df = do_iowa_azos(datetime.date.today() - datetime.timedelta(days=1)) + df = do_iowa_azos(date.today() - timedelta(days=1)) elif appname == "iowatoday": - df = do_iowa_azos(datetime.date.today(), True) + df = do_iowa_azos(date.today(), True) elif appname == "kcrgcitycam": df = do_webcams("KCRG") elif appname == "uvi": diff --git a/scripts/GIS/attribute2shape.py b/scripts/GIS/attribute2shape.py index b39a31734..f0dea711d 100644 --- a/scripts/GIS/attribute2shape.py +++ b/scripts/GIS/attribute2shape.py @@ -3,13 +3,14 @@ Run every minute from RUN_1MIN.sh """ -import datetime import os import subprocess import zipfile +from datetime import timedelta import shapefile -from pyiem.util import get_dbconnc, logger, utc +from pyiem.database import get_dbconnc +from pyiem.util import logger, utc LOG = logger() INFORMATION = """ @@ -100,7 +101,7 @@ def main(): # Delete anything older than 20 minutes now = utc() - ets = now - datetime.timedelta(minutes=20) + ets = now - timedelta(minutes=20) shp = shpschema() diff --git a/scripts/coop/cfs_extract.py b/scripts/coop/cfs_extract.py index b87c517a7..e9610d51f 100644 --- a/scripts/coop/cfs_extract.py +++ b/scripts/coop/cfs_extract.py @@ -7,7 +7,7 @@ Run at 5 AM local from RUN_10_AFTER.sh """ -import datetime +from datetime import timedelta import numpy as np import pygrib @@ -34,8 +34,8 @@ def do_agg(dkey, fname, ts, data): lat, lon = grib.latlons() data["y"] = lat[:, 0] data["x"] = lon[0, :] - ftime = ts + datetime.timedelta(hours=grib.forecastTime) - cst = ftime - datetime.timedelta(hours=7) + ftime = ts + timedelta(hours=grib.forecastTime) + cst = ftime - timedelta(hours=7) key = cst.strftime("%Y-%m-%d") d = data["fx"].setdefault( key, dict(precip=None, high=None, low=None, srad=None) @@ -58,8 +58,8 @@ def do_temp(dkey, fname, func, ts, data): return gribs = pygrib.open(fn) for grib in gribs: - ftime = ts + datetime.timedelta(hours=grib.forecastTime) - cst = ftime - datetime.timedelta(hours=7) + ftime = ts + timedelta(hours=grib.forecastTime) + cst = ftime - timedelta(hours=7) key = cst.strftime("%Y-%m-%d") if key not in data["fx"]: continue @@ -164,7 +164,7 @@ def dbsave(ts, data): def main(): """Go!""" # Extract 12 UTC Data - ts = utc() - datetime.timedelta(days=4) + ts = utc() - timedelta(days=4) ts = ts.replace(hour=12, minute=0, second=0, microsecond=0) data = process(ts) dbsave(ts, data) diff --git a/scripts/coop/day_precip.py b/scripts/coop/day_precip.py index 16a9cc6cf..3f42df42c 100644 --- a/scripts/coop/day_precip.py +++ b/scripts/coop/day_precip.py @@ -2,10 +2,10 @@ Daily precip something """ -import datetime import os import subprocess import tempfile +from datetime import datetime from pyiem.database import get_dbconn from pyiem.network import Table as NetworkTable @@ -25,7 +25,7 @@ def main(): tmpfd.write(" NWS COOP STATION DAY PRECIPITATION TOTALS\n") tmpfd.write(" AS CALCULATED ON THE IEM SERVER\n") - now = datetime.datetime.now() + now = datetime.now() # Now we load climatology mrain = {} diff --git a/scripts/coop/use_acis.py b/scripts/coop/use_acis.py index a410262bb..ee0ab50f1 100644 --- a/scripts/coop/use_acis.py +++ b/scripts/coop/use_acis.py @@ -1,7 +1,7 @@ """Use data provided by ACIS to replace IEM COOP data.""" -import datetime import sys +from datetime import date, datetime, timedelta from zoneinfo import ZoneInfo import click @@ -58,8 +58,8 @@ def main(state, station): # We are only asking for the last 720 days of data, so might as well only # do stations that are currently known to be `online` nt = NetworkTable(network, only_online=station is None) - ets = datetime.date.today() - datetime.timedelta(days=1) - sts = ets - datetime.timedelta(days=720) + ets = date.today() - timedelta(days=1) + sts = ets - timedelta(days=720) pgconn, cursor = get_dbconnc("iem") # Lame for now cursor.close() @@ -106,10 +106,10 @@ def main(state, station): continue updates = 0 for row in j["data"]: - date = datetime.datetime.strptime(row[0], "%Y-%m-%d") + dt = datetime.strptime(row[0], "%Y-%m-%d") data = {} hour = None - current = obsdf.loc[date] + current = obsdf.loc[dt] for i, col in enumerate("obst maxt mint pcpn snow snwd".split()): val = safe(row[i + 1][0]) if not is_new(val, current[col]): @@ -119,10 +119,10 @@ def main(state, station): hour = row[i + 1][1] if not data or hour < 0: continue - valid = datetime.datetime( - date.year, - date.month, - date.day, + valid = datetime( + dt.year, + dt.month, + dt.day, hour if hour < 24 else 23, 0 if hour < 24 else 59, tzinfo=tz, diff --git a/scripts/coop/year_precip.py b/scripts/coop/year_precip.py index 3500ab501..e2d4708ad 100644 --- a/scripts/coop/year_precip.py +++ b/scripts/coop/year_precip.py @@ -1,11 +1,11 @@ """Yearly precip something""" -import datetime import os import subprocess +from datetime import datetime +from pyiem.database import get_dbconn from pyiem.network import Table as NetworkTable -from pyiem.util import get_dbconn def main(): @@ -22,7 +22,7 @@ def main(): fp.write(" NWS COOP STATION YEAR PRECIPITATION TOTALS\n") fp.write(" AS CALCULATED ON THE IEM SERVER\n") - now = datetime.datetime.now() + now = datetime.now() jdays = now.strftime("%j") mrain = {} diff --git a/scripts/current/rwis_station.py b/scripts/current/rwis_station.py index 427397a52..a7a3dc799 100644 --- a/scripts/current/rwis_station.py +++ b/scripts/current/rwis_station.py @@ -1,6 +1,6 @@ """Iowa RWIS station plot!""" -import datetime +from datetime import datetime from pyiem.database import get_dbconnc from pyiem.plot import MapPlot @@ -8,7 +8,7 @@ def main(): """Go Main Go""" - now = datetime.datetime.now() + now = datetime.now() pgconn, icursor = get_dbconnc("iem") # Compute normal from the climate database @@ -30,7 +30,7 @@ def main(): mp = MapPlot( axisbg="white", title="Iowa DOT RWIS Mesoplot", - subtitle="plot valid %s" % (now.strftime("%-d %b %Y %H:%M %P"),), + subtitle=f"plot valid {now:%-d %b %Y %H:%M %P}", ) mp.plot_station(data) mp.drawcounties(color="#EEEEEE") diff --git a/scripts/dbutil/add_iem_data_entry.py b/scripts/dbutil/add_iem_data_entry.py index 64766cd49..37895c3d8 100644 --- a/scripts/dbutil/add_iem_data_entry.py +++ b/scripts/dbutil/add_iem_data_entry.py @@ -3,7 +3,7 @@ called from SYNC_STATIONS.sh """ -import datetime +from datetime import datetime, timedelta from pyiem.database import get_dbconnc from pyiem.util import logger @@ -38,7 +38,7 @@ def main(): "not s.metasite" ) - now = datetime.datetime.now() + now = datetime.now() for row in icursor: LOG.info( @@ -47,7 +47,7 @@ def main(): row["network"], ) - for valid in [now, now - datetime.timedelta(days=1)]: + for valid in [now, now - timedelta(days=1)]: add_summary(icursor2, valid.date(), row["iemid"]) icursor2.execute( "INSERT into current (valid, iemid) VALUES ('1980-01-01', %s)", diff --git a/scripts/dbutil/asos2archive.py b/scripts/dbutil/asos2archive.py index fe39b75c4..61a713d7c 100644 --- a/scripts/dbutil/asos2archive.py +++ b/scripts/dbutil/asos2archive.py @@ -10,11 +10,12 @@ Run from RUN_10MIN.sh """ -import datetime import sys +from datetime import datetime, timedelta, timezone +from pyiem.database import get_dbconnc from pyiem.reference import ISO8601 -from pyiem.util import get_dbconnc, get_properties, logger, set_property, utc +from pyiem.util import get_properties, logger, set_property, utc LOG = logger() PROPERTY_NAME = "asos2archive_last" @@ -43,8 +44,8 @@ def get_first_updated(): LOG.warning("iem property %s is not set, abort!", PROPERTY_NAME) sys.exit() - dt = datetime.datetime.strptime(propvalue, ISO8601) - return dt.replace(tzinfo=datetime.timezone.utc) + dt = datetime.strptime(propvalue, ISO8601) + return dt.replace(tzinfo=timezone.utc) def compute_time(argv): @@ -53,14 +54,14 @@ def compute_time(argv): utcnow = utcnow.replace(minute=0, second=0, microsecond=0) if len(argv) == 1: # noargs - yesterday = utcnow - datetime.timedelta(hours=24) + yesterday = utcnow - timedelta(hours=24) sts = yesterday.replace(hour=0) ets = sts.replace(hour=23, minute=59) elif len(argv) == 4: sts = utc(int(argv[1]), int(argv[2]), int(argv[3])) ets = sts.replace(hour=23, minute=59) else: - lasthour = utcnow - datetime.timedelta(minutes=60) + lasthour = utcnow - timedelta(minutes=60) sts = lasthour.replace(minute=0) ets = lasthour.replace(minute=59) return sts, ets @@ -225,6 +226,8 @@ def main(): first_updated.strftime("%Y-%m-%dT%H:%M"), last_updated.strftime("%Y-%m-%dT%H:%M"), ) + icursor.close() + iempgconn.close() set_property(PROPERTY_NAME, last_updated) diff --git a/scripts/dbutil/compute_isusm_sts.py b/scripts/dbutil/compute_isusm_sts.py index 37276fbdb..ae34f1a5a 100644 --- a/scripts/dbutil/compute_isusm_sts.py +++ b/scripts/dbutil/compute_isusm_sts.py @@ -1,7 +1,8 @@ """Figure out when the ISUSM data started...""" +from pyiem.database import get_dbconn from pyiem.network import Table as NetworkTable -from pyiem.util import get_dbconn, logger +from pyiem.util import logger LOG = logger() diff --git a/scripts/dbutil/compute_network_extent.py b/scripts/dbutil/compute_network_extent.py index 9a592d64d..4039f2435 100644 --- a/scripts/dbutil/compute_network_extent.py +++ b/scripts/dbutil/compute_network_extent.py @@ -1,6 +1,7 @@ """Compute the spatial extent of a network""" -from pyiem.util import get_dbconn, logger +from pyiem.database import get_dbconn +from pyiem.util import logger LOG = logger() diff --git a/scripts/dbutil/network_timezone.py b/scripts/dbutil/network_timezone.py index af636776a..a50046f0e 100644 --- a/scripts/dbutil/network_timezone.py +++ b/scripts/dbutil/network_timezone.py @@ -2,7 +2,7 @@ Need something to set the time zone of networks """ -from pyiem.util import get_dbconn +from pyiem.database import get_dbconn def main(): diff --git a/scripts/dbutil/rwis2archive.py b/scripts/dbutil/rwis2archive.py index dba043d4d..0e17afc15 100644 --- a/scripts/dbutil/rwis2archive.py +++ b/scripts/dbutil/rwis2archive.py @@ -4,11 +4,12 @@ called from RUN_10_AFTER.sh """ -import datetime import sys +from datetime import datetime, timezone +from pyiem.database import get_dbconnc from pyiem.reference import ISO8601 -from pyiem.util import get_dbconnc, get_properties, logger, set_property, utc +from pyiem.util import get_properties, logger, set_property, utc LOG = logger() PROPERTY_NAME = "rwis2archive_last" @@ -22,8 +23,8 @@ def get_first_updated(): LOG.warning("iem property %s is not set, abort!", PROPERTY_NAME) sys.exit() - dt = datetime.datetime.strptime(propvalue, ISO8601) - return dt.replace(tzinfo=datetime.timezone.utc) + dt = datetime.strptime(propvalue, ISO8601) + return dt.replace(tzinfo=timezone.utc) def process_traffic(first_updated, last_updated): diff --git a/scripts/dbutil/set_climate.py b/scripts/dbutil/set_climate.py index 8cfab7652..e57a709f6 100644 --- a/scripts/dbutil/set_climate.py +++ b/scripts/dbutil/set_climate.py @@ -2,7 +2,8 @@ Assign a climate site to each site in the mesosite database, within reason """ -from pyiem.util import get_dbconn, logger +from pyiem.database import get_dbconn +from pyiem.util import logger LOG = logger() diff --git a/scripts/dbutil/set_climate_climate.py b/scripts/dbutil/set_climate_climate.py index 17f1a981c..1796e0fcf 100644 --- a/scripts/dbutil/set_climate_climate.py +++ b/scripts/dbutil/set_climate_climate.py @@ -3,7 +3,7 @@ site. So a climate site like Ames gets assigned to the IAC005 (central Iowa) """ -from pyiem.util import get_dbconn +from pyiem.database import get_dbconn def main(): diff --git a/scripts/dbutil/set_county.py b/scripts/dbutil/set_county.py index 0fe03f244..2cb494489 100644 --- a/scripts/dbutil/set_county.py +++ b/scripts/dbutil/set_county.py @@ -1,6 +1,6 @@ """Need to set station metadata for county name for a given site.""" -from pyiem.util import get_dbconn +from pyiem.database import get_dbconn def set_county(mcursor2, iemid, ugc, ugcname): diff --git a/scripts/dbutil/set_timezone.py b/scripts/dbutil/set_timezone.py index 5ba9a4fb0..45c82feb8 100644 --- a/scripts/dbutil/set_timezone.py +++ b/scripts/dbutil/set_timezone.py @@ -4,7 +4,8 @@ http://efele.net/maps/tz/world/ """ -from pyiem.util import get_dbconnc, logger +from pyiem.database import get_dbconnc +from pyiem.util import logger LOG = logger() diff --git a/scripts/dbutil/set_wfo.py b/scripts/dbutil/set_wfo.py index 9450bf4c2..3dac359f6 100644 --- a/scripts/dbutil/set_wfo.py +++ b/scripts/dbutil/set_wfo.py @@ -1,6 +1,7 @@ """Assign a WFO to sites in the metadata tables that have no WFO set.""" -from pyiem.util import get_dbconn, logger +from pyiem.database import get_dbconn +from pyiem.util import logger LOG = logger() diff --git a/scripts/dbutil/xcheck_madis.py b/scripts/dbutil/xcheck_madis.py index 638a54181..dff8633bd 100644 --- a/scripts/dbutil/xcheck_madis.py +++ b/scripts/dbutil/xcheck_madis.py @@ -1,8 +1,8 @@ """See if MADIS knows any station metadata.""" -import datetime import os import warnings +from datetime import timedelta from netCDF4 import chartostring from pandas import read_sql @@ -28,7 +28,7 @@ def main(): now = utc() i = 0 while i < 10: - now -= datetime.timedelta(hours=1) + now -= timedelta(hours=1) testfn = now.strftime("/mesonet/data/madis/mesonet1/%Y%m%d_%H00_10.nc") if os.path.isfile(testfn): break diff --git a/scripts/dl/download_gfs.py b/scripts/dl/download_gfs.py index 662e09f1b..8cbc7efdf 100644 --- a/scripts/dl/download_gfs.py +++ b/scripts/dl/download_gfs.py @@ -6,8 +6,8 @@ RUN from RUN_20_AFTER.sh """ -import datetime import os +from datetime import datetime, timedelta, timezone import click import httpx @@ -94,21 +94,23 @@ def fetch(valid, hr): @click.command() -@click.option("--valid", type=click.DateTime(), help="Valid UTC Timestamp") -def main(valid): +@click.option( + "--valid", type=click.DateTime(), help="Valid UTC Timestamp", required=True +) +def main(valid: datetime): """Go Main Go""" - valid = valid.replace(tzinfo=datetime.timezone.utc) + valid = valid.replace(tzinfo=timezone.utc) # script is called every hour, just short circuit the un-needed hours if valid.hour % 6 != 0: return - times = [valid, valid - datetime.timedelta(hours=6)] + times = [valid, valid - timedelta(hours=6)] for ts in times: for hr in range(0, 385, 6): if need_to_run(ts, hr): fetch(ts, hr) # now cull old content for hr in range(72, 97, 6): - cull(valid - datetime.timedelta(hours=hr)) + cull(valid - timedelta(hours=hr)) if __name__ == "__main__": diff --git a/scripts/era5/fetch_era5.py b/scripts/era5/fetch_era5.py index b9929e55b..018a93867 100644 --- a/scripts/era5/fetch_era5.py +++ b/scripts/era5/fetch_era5.py @@ -4,6 +4,7 @@ """ import os +import subprocess import sys import tempfile import warnings @@ -140,7 +141,7 @@ def run(valid, domain: str, force): zipfn, ) # unzip - os.system(f"unzip {zipfn}") + subprocess.call(["unzip", "-q", zipfn]) os.unlink(zipfn) with ncopen("data_0.nc") as ncin, ncopen(ncoutfn, "a") as nc: ingest(ncin, nc, valid, domain) diff --git a/scripts/gfs/gfs_4inch.py b/scripts/gfs/gfs_4inch.py index 8656308cc..973c5452b 100644 --- a/scripts/gfs/gfs_4inch.py +++ b/scripts/gfs/gfs_4inch.py @@ -3,7 +3,7 @@ Called from gfs2iemre.py for just the 6z run. """ -import datetime +from datetime import datetime, timedelta import numpy as np import pandas as pd @@ -86,12 +86,12 @@ def main(): lats = nc.variables["lat"][:] lons, lats = np.meshgrid(lons, lats) fx = nc.gfs_forecast - basedt = datetime.datetime.strptime( + basedt = datetime.strptime( nc.variables["time"].units.split()[2], "%Y-%m-%d", ) for day in range(20): - dt = basedt + datetime.timedelta(days=day) + dt = basedt + timedelta(days=day) soilk = nc.variables["tsoil"][day, :, :] if np.ma.is_masked(np.ma.max(soilk)): continue diff --git a/scripts/iemplot/dump_altm.py b/scripts/iemplot/dump_altm.py index 605a1afa5..be06d8473 100644 --- a/scripts/iemplot/dump_altm.py +++ b/scripts/iemplot/dump_altm.py @@ -27,7 +27,7 @@ def main(): fh.write(" PARM = ALTM\n\n STN YYMMDD/HHMM ALTM\n") for sid, row in df.iterrows(): - fh.write(" %4s %s %8.2f\n" % (sid, ts, row["altm"])) + fh.write(f" {sid:4s} {ts} {row['altm']:8.2f}\n") if __name__ == "__main__": diff --git a/scripts/ingestors/ncei/91_20/ingest.py b/scripts/ingestors/ncei/91_20/ingest.py index 33e1cb470..c30bc6bb0 100644 --- a/scripts/ingestors/ncei/91_20/ingest.py +++ b/scripts/ingestors/ncei/91_20/ingest.py @@ -2,13 +2,14 @@ https://www.ncei.noaa.gov/data/normals-daily/1991-2020/access/ """ -import datetime +from datetime import datetime from io import StringIO import httpx import pandas as pd +from pyiem.database import get_dbconn from pyiem.network import Table as NetworkTable -from pyiem.util import get_dbconn, logger +from pyiem.util import logger from tqdm import tqdm LOG = logger() @@ -76,7 +77,7 @@ def ingest(pgconn, sid): df.at[0, "precip"] = df.at[0, "pcum"] df.at[59, "precip"] = df.at[58, "precip"] for _, row in df.iterrows(): - now = datetime.datetime(2000, row["month"], row["day"]) + now = datetime(2000, row["month"], row["day"]) cursor.execute( "INSERT into ncei_climate91 (station, valid, high, low, precip, " "snow) VALUES (%s, %s, %s, %s, %s, %s)", diff --git a/scripts/ingestors/ncei/91_20/merge_stations.py b/scripts/ingestors/ncei/91_20/merge_stations.py index f824f64ca..be9572c5b 100644 --- a/scripts/ingestors/ncei/91_20/merge_stations.py +++ b/scripts/ingestors/ncei/91_20/merge_stations.py @@ -1,6 +1,6 @@ """Create NCEI91 station entries.""" -from pyiem.util import get_dbconn +from pyiem.database import get_dbconn def compute_stations(cursor): diff --git a/scripts/ingestors/ncei/ingest_fisherporter.py b/scripts/ingestors/ncei/ingest_fisherporter.py index a0262deee..5588e08db 100644 --- a/scripts/ingestors/ncei/ingest_fisherporter.py +++ b/scripts/ingestors/ncei/ingest_fisherporter.py @@ -7,8 +7,9 @@ from datetime import datetime import pandas as pd +from pyiem.database import get_dbconn from pyiem.network import Table as NetworkTable -from pyiem.util import get_dbconn, logger +from pyiem.util import logger LOG = logger() diff --git a/scripts/month/obs_precip.py b/scripts/month/obs_precip.py index 8fb5eac21..3bb1a9708 100644 --- a/scripts/month/obs_precip.py +++ b/scripts/month/obs_precip.py @@ -1,17 +1,17 @@ """Generate a map of this month's observed precip""" -import datetime +from datetime import datetime -import pyiem.tracker +from pyiem import tracker +from pyiem.database import get_dbconnc from pyiem.plot import MapPlot -from pyiem.util import get_dbconnc def main(): """Go Main Go""" - now = datetime.datetime.now() + now = datetime.now() - qdict = pyiem.tracker.loadqc() + qdict = tracker.loadqc() pgconn, icursor = get_dbconnc("iem") diff --git a/scripts/month/obs_precip_coop.py b/scripts/month/obs_precip_coop.py index ba071e1df..13db7a02b 100644 --- a/scripts/month/obs_precip_coop.py +++ b/scripts/month/obs_precip_coop.py @@ -1,18 +1,18 @@ """Generate a map of this month's observed precip""" -import datetime +from datetime import date, timedelta +from pyiem.database import get_dbconnc from pyiem.plot import MapPlot -from pyiem.util import get_dbconnc def main(): """Go Main Go""" - now = datetime.date.today() + now = date.today() pgconn, icursor = get_dbconnc("iem") day1 = now.replace(day=1) - day2 = (now + datetime.timedelta(days=35)).replace(day=1) + day2 = (now + timedelta(days=35)).replace(day=1) # Compute normal from the climate database sql = """SELECT id, diff --git a/scripts/month/plot_sdd.py b/scripts/month/plot_sdd.py index 0197dea22..356278730 100644 --- a/scripts/month/plot_sdd.py +++ b/scripts/month/plot_sdd.py @@ -1,18 +1,19 @@ """Generate a plot of SDD""" -import datetime import sys +from datetime import datetime +from pyiem.database import get_dbconn from pyiem.network import Table as NetworkTable from pyiem.plot import MapPlot -from pyiem.util import get_dbconn, logger +from pyiem.util import logger LOG = logger() def main(): """Go Main Go""" - now = datetime.datetime.now() + now = datetime.now() pgconn = get_dbconn("coop") ccursor = pgconn.cursor() diff --git a/scripts/roads/merge_roads.py b/scripts/roads/merge_roads.py index 2b10b5b33..707527a7e 100644 --- a/scripts/roads/merge_roads.py +++ b/scripts/roads/merge_roads.py @@ -1,6 +1,6 @@ """Compare the REST service with our current database.""" -import datetime +from datetime import timedelta import httpx from ingest_roads_rest import LOG, URI @@ -65,7 +65,7 @@ def main(): longname, idot_id, archive_begin, - archive_begin + datetime.timedelta(days=365), + archive_begin + timedelta(days=365), ), ) segid = cursor.fetchone()[0] diff --git a/scripts/yieldfx/baseline2db.py b/scripts/yieldfx/baseline2db.py index 741022a84..f55192e6b 100644 --- a/scripts/yieldfx/baseline2db.py +++ b/scripts/yieldfx/baseline2db.py @@ -1,8 +1,8 @@ """Copy the provided baseline data to the database""" -import datetime import glob import os +from datetime import date, timedelta from pyiem.database import get_dbconn from pyiem.util import convert_value, logger @@ -25,7 +25,7 @@ def main(): ) for row in icursor: if row[1] is None or row[2] is None: - dsm[row[0]] = dsm[row[0] - datetime.timedelta(days=1)] + dsm[row[0]] = dsm[row[0] - timedelta(days=1)] else: dsm[row[0]] = { "wind_speed": convert_value(row[1], "knot", "meter / second"), @@ -45,9 +45,9 @@ def main(): if not line.startswith("19") and not line.startswith("20"): continue tokens = line.split() - valid = datetime.date( - int(tokens[0]), 1, 1 - ) + datetime.timedelta(days=int(tokens[1]) - 1) + valid = date(int(tokens[0]), 1, 1) + timedelta( + days=int(tokens[1]) - 1 + ) cursor.execute( """ INSERT into yieldfx_baseline (station, valid, diff --git a/scripts/yieldfx/county_csv.py b/scripts/yieldfx/county_csv.py index ca7a64f41..144900d20 100644 --- a/scripts/yieldfx/county_csv.py +++ b/scripts/yieldfx/county_csv.py @@ -1,8 +1,8 @@ """Here we are, dumping CSV.""" -import datetime import os import subprocess +from datetime import date, timedelta import numpy as np import pandas as pd @@ -62,9 +62,9 @@ def process(df, ncfn, south, west): "year,yday,prcp (mm/day),srad (W/m^2),tmax (deg c)," "tmin (deg c)\n" ) - base = datetime.date(1980, 1, 1) + base = date(1980, 1, 1) for tstep, days in enumerate(nc.variables["time"]): - ts = base + datetime.timedelta(days=int(days)) + ts = base + timedelta(days=int(days)) fp.write( "%s,%s,%.3f,%.3f,%.3f,%.3f\n" % ( @@ -76,9 +76,9 @@ def process(df, ncfn, south, west): nc_tmin[tstep], ) ) - base = datetime.date(2019, 1, 1) + base = date(2019, 1, 1) for tstep, days in enumerate(nc2019.variables["time"]): - ts = base + datetime.timedelta(days=int(days)) + ts = base + timedelta(days=int(days)) fp.write( "%s,%s,%.3f,%.3f,%.3f,%.3f\n" % ( diff --git a/scripts/yieldfx/psims_baseline.py b/scripts/yieldfx/psims_baseline.py index c0923c0d9..55a5d4760 100644 --- a/scripts/yieldfx/psims_baseline.py +++ b/scripts/yieldfx/psims_baseline.py @@ -3,9 +3,9 @@ Run from RUN_2AM.sh for yesterday and eight days ago, so to pick up POWER. """ -import datetime import os import sys +from datetime import date, datetime, timedelta import click import numpy as np @@ -76,7 +76,7 @@ def make_netcdf(ncfn, valid, west, south): srad.long_name = "daylight average incident shortwave radiation" -def copy_iemre(nc, ncdate0, ncdate1, islice, jslice): +def copy_iemre(nc, ncdate0: date, ncdate1: date, islice, jslice): """Copy IEMRE data from a given year to **inclusive** dates.""" rencfn = get_daily_ncname(ncdate0.year) if not os.path.isfile(rencfn): @@ -84,12 +84,12 @@ def copy_iemre(nc, ncdate0, ncdate1, islice, jslice): return with ncopen(rencfn) as renc: # Compute offsets for yieldfx file - tidx0 = (ncdate0 - datetime.date(1980, 1, 1)).days - tidx1 = (ncdate1 - datetime.date(1980, 1, 1)).days + tidx0 = (ncdate0 - date(1980, 1, 1)).days + tidx1 = (ncdate1 - date(1980, 1, 1)).days yfx_slice = slice(tidx0, tidx1 + 1) # Compute offsets for the reanalysis file - tidx0 = (ncdate0 - datetime.date(ncdate0.year, 1, 1)).days - tidx1 = (ncdate1 - datetime.date(ncdate0.year, 1, 1)).days + tidx0 = (ncdate0 - date(ncdate0.year, 1, 1)).days + tidx1 = (ncdate1 - date(ncdate0.year, 1, 1)).days re_slice = slice(tidx0, tidx1 + 1) highc = convert_value( @@ -137,8 +137,8 @@ def tile_extraction(nc, valid, west, south, fullmode): for year in range(1980, valid.year + 1): copy_iemre( nc, - datetime.date(year, 1, 1), - datetime.date(year, 12, 31), + date(year, 1, 1), + date(year, 12, 31), islice, jslice, ) @@ -149,7 +149,7 @@ def tile_extraction(nc, valid, west, south, fullmode): def qc(nc): """Quick QC of the file.""" for i, time in enumerate(nc.variables["time"][:]): - ts = datetime.date(1980, 1, 1) + datetime.timedelta(days=int(time)) + ts = date(1980, 1, 1) + timedelta(days=int(time)) avgv = np.mean(nc.variables["srad"][i, :, :]) if avgv > 0: continue @@ -174,7 +174,7 @@ def workflow(valid, ncfn, west, south, fullmode): @click.command() @click.option("--date", "dt", type=click.DateTime(), help="Date to process") @click.option("--full", is_flag=True, help="Full replacement mode") -def main(dt, full): +def main(dt: datetime, full): """Go Main Go""" if dt is not None: dt = dt.date() diff --git a/tests/iemweb/autoplot/urllist.txt b/tests/iemweb/autoplot/urllist.txt index c35b31bfb..162b714df 100644 --- a/tests/iemweb/autoplot/urllist.txt +++ b/tests/iemweb/autoplot/urllist.txt @@ -47,10 +47,11 @@ /plotting/auto/plot/44/plot:bar::opt:fema::network:WFO::station:DMX::state:IA::fema:7::limit:yes::c:single::phenomena:TO::significance:W::syear:1986::eyear:2024::s:jan1::_r:t::dpi:100.png /plotting/auto/plot/44/plot:line::opt:fema::network:WFO::station:DMX::state:IA::fema:7::limit:no::c:svrtor::phenomena:TO::significance:W::syear:1986::eyear:2024::s:jan1::_r:t::dpi:100.png /plotting/auto/plot/44/plot:bar::opt:state::network:WFO::station:_ALL::state:VA::fema:7::limit:no::c:single::phenomena:FW::significance:W::syear:1986::eyear:2024::s:jul1::_r:43::dpi:100.png -/plotting/auto/plot/51/network:ISUSM::station:AEEI4::sdate:2024-05-01::edate:2024-07-27::base:50::ceil:86::which:all::_r:t::dpi:100.png -/plotting/auto/plot/51/network:ISUSM::station:AEEI4::sdate:2024-05-01::edate:2024-07-27::base:50::ceil:86::which:gdd::_r:t::dpi:100.png -/plotting/auto/plot/51/network:ISUSM::station:AEEI4::sdate:2024-05-01::edate:2024-07-27::base:50::ceil:86::which:precip::_r:t::dpi:100.png -/plotting/auto/plot/51/network:ISUSM::station:AEEI4::sdate:2024-05-01::edate:2024-07-27::base:50::ceil:86::which:sdd::_r:t::dpi:100.png +/plotting/auto/plot/48/ugc:IAC169::phenomena:TO::significance:W::_r:t::dpi:100.png +/plotting/auto/plot/51/network:ISUSM::station:BOOI4::sdate:2024-05-01::edate:2024-07-27::base:50::ceil:86::which:all::_r:t::dpi:100.png +/plotting/auto/plot/51/network:ISUSM::station:BOOI4::sdate:2024-05-01::edate:2024-07-27::base:50::ceil:86::which:gdd::_r:t::dpi:100.png +/plotting/auto/plot/51/network:ISUSM::station:BOOI4::sdate:2024-05-01::edate:2024-07-27::base:50::ceil:86::which:precip::_r:t::dpi:100.png +/plotting/auto/plot/51/network:ISUSM::station:BOOI4::sdate:2024-05-01::edate:2024-07-27::base:50::ceil:86::which:sdd::_r:t::dpi:100.png /plotting/auto/plot/52/for:wfo::network:WFO::station:DMX::ugc:IAC169::sdate:2020-01-18::days:10::_r:t::dpi:100::_cb:1.png /plotting/auto/plot/52/for:wfo::network:WFO::station:XXX::ugc:IAC169::sdate:2015-01-01::days:10::_r:t::dpi:100.png /plotting/auto/plot/54/network1:IA_ASOS::zstation1:DSM::network2:IA_ASOS::zstation2:AMW::varname:low::cmap:Greens::_r:t::dpi:100.png @@ -95,6 +96,7 @@ /plotting/auto/plot/86/domain:europe::csector:IN::var:rsds::ptype:c::date:2024-01-01::cmap:magma::_r:t::dpi:100.png /plotting/auto/plot/86/csector:midwest::var:p01d_12z::ptype:c::date:2023-01-01::cmap:magma::_r:43::dpi:100.png /plotting/auto/plot/86/csector:midwest::var:p01d_12z::ptype:g::date:2023-01-01::cmap:magma::_r:43::dpi:100.png +/plotting/auto/plot/86/csector:TN::var:low_tmpk_12z::ptype:c::date:2023-01-01::clip:yes::cmap:magma::_r:t::dpi:100.png /plotting/auto/plot/89/year:2024::daythres:0.50::period:7::trailthres:0.50::state:IA::_r:t::dpi:100.png /plotting/auto/plot/90/t:state::v:events::ilabel:yes::geo:ugc::drawc:yes::year:2024::year2:2024::sdate:1999-01-01%200000::edate:2024-08-12%200000::network:WFO::station:DMX::state:IA::phenomena:TO::significance:W::cmap:jet::_r:t::dpi:100.png /plotting/auto/plot/90/t:state::v:events::ilabel:yes::geo:polygon::drawc:yes::year:2024::year2:2024::sdate:1999-01-01%200000::edate:2024-08-12%200000::network:WFO::station:DMX::state:IA::phenomena:TO::significance:W::cmap:jet::_r:t::dpi:100.png