#!/usr/local/anaconda3/bin/python3.7 -W ignore import datetime as dt import os import pickle import sys import time import matplotlib.dates as mdate import numpy as np from numpy import format_float_positional as ffp import pandas as pd from pyproj import Geod # Import utilities and classes import tchov_utils.getimerg_pycharm as getimerg import tchov_utils.plothovmoller_pycharm as plothov import tchov_utils.aggregate as agg import tchov_utils.process_ships as ships from tchov_utils.settings_tchov_imerg_pycharm import globalSettings """ Author: Jonathan Case (ENSCO, Inc./SPoRT) Original Design Date: 11 June 2020 Name: tchov_imerg.py Description: Script reads in IMERG Level 3 half-hourly precip rates, combines into hourly precip bins, reads in Hurricane hourly storm track lat/lon positions, and creates storm-centered precip means at specified circular radii from the storm center. Finally, if all works well, this script will create a hovmoller diagram of precip rates of time versus radii. Will also generate diurnal mean hovmoller diagram of hourly-averaged precip rates. Requirements: For scp to remote server, user must have an account and passwordless ssh keys set up between local workstation and remote server. Optional arguments: Starting and Ending dates Updates: v1.1: (06/11/2020): Renamed to "tchov_prec1h_stormCentric.py" to be applied more generically to any tropical cyclone. -J.Case (06/11/2020): Renamed settings file to "settings_tchov_prec1h.py" -J.Case (06/11/2020): Isolated hovmoller plots into a new method called "plot_hovmoller", which will create either a multi-day or diurnal mean 1-h precip hovmoller plot. -J.Case (06/11/2020): Cleaned up direct references to hurricane Dorian; and added new settings variable "tcname". v1.2: (07/1/2020): Finalized local time capability for hovmoller plots. v1.3: (07/16/2020): Finalized compass quadrant capability for hov plots. -J.Case (7/21/2020): Finalized 4-panel plotting for quadrant analysis in hovmollers. v1.4: (9/09/2020): Made settings and IMERG read routine more generic to support IMERG-Final, Late, or Early data files. v1.5: (9/11/2020): Finalized inclusion of analysis by shear quadrant (i.e., up/downshear left/right quadrants) using SHIPS shear heading data. v1.6: (9/21/2020): Direct read of SHIPS archive5/7 file into pandas df. -J.Case (9/22/2020): Direct reading of SHIPS real-time files into pandas df. v2.0: -J.Case (11/3/2020): Convert code to a python class structure for increased portability. v2.1: (ready for code audit!) v2.2: -J.Case (Sep 2021): Additional code clean-up and rotation of shear quadrant plots so that left[right]-shear plots are in left[right] column, and down[up]-shear plots are in top[bottom] row. -J.Case: SHIPS pre-processing moved into Procships class in process_ships.py -J.Case: Extracted azimuthal averaging into method within Getimerg class. v2.3: -J.Case (Oct 2021): Added data aggregation and pickle writing capability with the AggregateIMERG class in aggregate.py. (ready for 2nd code audit!) """ def print_mesg(log: int, level: int, item): if log == level: print(item) def assess_args(dattype: str): use_ships = False numargs = len(sys.argv) if dattype == 'real-time' and numargs != 4: print('ERROR! Usage: tchov_imerg.py ') print('where: is the stormid [ATCF id],') print(' is the ending date/hour, and') print(' is the ending date/hour.') print('Argument list provided: ', str(sys.argv)) print('Exiting script...') sys.exit() if dattype[:-1] == 'archive' and numargs < 4: print('Using full-storm start and end dates from SHIPS archive.') use_ships = True return use_ships def get_storm_index(lat_arr: np.ndarray, lon_arr: np.ndarray, lat_center: float, lon_center: float): """ Obtain the array indices closest to the input TC lon/lat_center coord. Args: lat_arr: latitude array lon_arr: longitude array lat_center: Latitude float, scalar lon_center: Longitude float, scalar Returns: Tuple(longitude_ind, latitude_ind) """ # Generate the distance arrays for both lat and lon lat_diff = np.abs(lat_arr - lat_center) lon_diff = np.abs(lon_arr - lon_center) # Determine index where the smallest diffs occurs. min_latdiff = min(lat_diff) min_londiff = min(lon_diff) lat_min_pos = np.where(lat_diff == min_latdiff) lon_min_pos = np.where(lon_diff == min_londiff) print(f'min_latdiff {ffp(min_latdiff, precision=3)} at position ' f'{lat_min_pos[0][0]}') print(f'min_londiff {ffp(min_londiff, precision=3)} at position ' f'{lon_min_pos[0][0]}') return lat_min_pos[0][0], lon_min_pos[0][0] def check_ships_dates(ships_df, sdate, edate): miss = False yyyymmddhh = sdate while int(yyyymmddhh) <= int(edate): yyyymmddhhmm = f'{yyyymmddhh}00' t0, yyyy0, mm0, mon0, dd0, hhmm0 = mrg.get_times(yyyymmddhhmm, 0) t6, yyyy6, mm6, mon6, dd6, hhmm6 = mrg.get_times(yyyymmddhhmm, 360) hh0 = int(hhmm0[0:2]) times_temp = dt.datetime(int(yyyy0), int(mm0), int(dd0), hour=hh0) if (ships_df.Timestamp == times_temp).any(): yyyymmddhhmm = f'{yyyy6}{mm6}{dd6}{hhmm6}' yyyymmddhh = yyyymmddhhmm[0:10] else: miss = True break return miss def interp_ships(doshipspkl, shipstype, starttime, endtime, prectype, precdir, shipsdf=pd.DataFrame(), shipsdf_1h=pd.DataFrame()): if doshipspkl and (shipstype != 'real-time'): shipsdf_1h_temp = shipsc.get_ships_df_interp(starttime, endtime, prectype, precdir, shipsindf=shipsdf, dopkl=True) else: shipsdf_1h_temp = shipsc.get_ships_df_interp(starttime, endtime, prectype, precdir, dopkl=False) try: shipsdf_1h except NameError: shipsdf_1h = shipsdf_1h_temp else: shipsdf_1h = shipsdf_1h.append(shipsdf_1h_temp) return shipsdf_1h def get_dmean(nday, nday_quad, nday_qshr, nrngs, mean_arr, mean_quad_arr, mean_qshr_arr, log): for irng in range(0, nrngs): print_mesg(log, 2, f'In get_dmean') for ihh in range(0, 24): if nday[ihh][irng] > 0: mean_arr[ihh][irng] = \ mean_arr[ihh][irng] / float(nday[ihh][irng]) else: mean_arr[ihh][irng] = maskval for iquad in range(0, 4): if nday_quad[iquad][ihh][irng] > 0: mean_quad_arr[iquad][ihh][irng] = \ mean_quad_arr[iquad][ihh][irng] / \ float(nday_quad[iquad][ihh][irng]) else: mean_quad_arr[iquad][ihh][irng] = maskval if nday_qshr[iquad][ihh][irng] > 0: mean_qshr_arr[iquad][ihh][irng] = \ mean_qshr_arr[iquad][ihh][irng] / \ float(nday_qshr[iquad][ihh][irng]) else: mean_qshr_arr[iquad][ihh][irng] = maskval return mean_arr, mean_quad_arr, mean_qshr_arr if __name__ == "__main__": """ Optional Args: (1) stormid (NHC convention of Automated Tropical Cyclone Forecast ID) -BB##YYYY, where BB = basin (AL = Atlantic), ## is the 2-digit storm number, and YYYY is the 4-digit year for archive TCs. -BB##YY for real-time TCs, where YY is the last 2-digits of year. (2) sYYYYMMDDHH: starting date (year, month, day, hour string) (3) eYYYYMMDDHH: ending date (year, month, day, hour string) Optional args can be one of three choices: -None: code uses stormid in settings file and start/end date from data. -1 arg: MUST be stormid to over-ride that in the settings file -3 args: MUST be stormid, and sYYYYMMDD eYYYYMMDD to analyze. -any other combination of args will error out the program. """ start_main = time.time() # Import dictionary settings from settings_tchov_imerg.py # NOTE: have not yet been able to establish a "clean" way to # import and read the settings dictionary in a class structure. # import tchov_utils.readsettings as readset logging = globalSettings['logging'] xinch = globalSettings['xinch'] yinch = globalSettings['yinch'] dpi = globalSettings['dpi'] tfont = globalSettings['titleFont'] cfont = globalSettings['cbarFont'] prectype = globalSettings['prectype'] precdir = globalSettings['precdir'] pColFile = globalSettings['pColFile'] shipsfile = globalSettings['shipsfile'] shipspklf = globalSettings['shipspklf'] stormid = globalSettings['stormid'] tcname = globalSettings['tcname'] shipstype = globalSettings['shipstype'] imgdir = globalSettings['imgdir'] shipsrtdir = globalSettings['shipsrtdir'] deltalatlon = globalSettings['deltalatlon'] deltaradius = globalSettings['deltaradius'] maxradius = globalSettings['maxradius'] earthR = globalSettings['earthR'] proj = globalSettings['proj'] precunit = globalSettings['precunit'] dosubset = globalSettings['dosubset'] subsize = globalSettings['subsize'] maskland = globalSettings['maskland'] maskperc = globalSettings['maskperc'] plotprec = globalSettings['plotprec'] plotdist = globalSettings['plotdist'] plotpbin = globalSettings['plotpbin'] plot_multi_day = globalSettings['plot_multi_day'] plot_diurnal = globalSettings['plot_diurnal'] plot_quad = globalSettings['plot_quad'] ptime = globalSettings['plot_times'] invtime = globalSettings['inverttime'] doaggreg = globalSettings['doaggreg'] doaggcsv = globalSettings['doaggcsv'] doaggpkl = globalSettings['doaggpkl'] doscp = globalSettings['doscp'] server = globalSettings['server'] userid = globalSettings['userid'] remdir = globalSettings['remdir'] if precunit == 'inches': clevs = globalSettings['clevs_in'] clevs_multi = globalSettings['clevs_in_multi'] clevs_diurnal = globalSettings['clevs_in_diurnal'] else: clevs = globalSettings['clevs_mm'] clevs_multi = globalSettings['clevs_mm_multi'] clevs_diurnal = globalSettings['clevs_mm_diurnal'] # Print out settings to verify. print('\n---------------------------------------------------------------') print(' CONFIGURABLE SETTING ENTRIES from settings_tchov_imerg.py ') print('---------------------------------------------------------------') print('logging level: (0=low, 1=medium, 2=high) ', logging) print('xinch: ', xinch) print('yinch: ', yinch) print('dpi: ', dpi) print('titleFont: ', tfont) print('cbarFont: ', cfont) print('prectype: ', prectype) print('precdir: ', precdir) print('pickle color file: ', pColFile) print('shipsfile: ', shipsfile) print('shipspklf: ', shipspklf) print('stormid: ', stormid) print('tcname: ', tcname) print('shipstype: ', shipstype) print('imgdir: ', imgdir) print('shipsrtdir: ', shipsrtdir) print('deltalatlon: ', deltalatlon) print('deltaradius: ', deltaradius) print('maxradius: ', maxradius) print('earthR: ', earthR) print('proj: ', proj) print('precunit: ', precunit) print('dosubset: ', dosubset) print('subsize: ', subsize) print('maskland: ', maskland) print('maskperc: ', maskperc) print('plotprec: ', plotprec) print('plotdist: ', plotdist) print('plotpbin: ', plotpbin) print('plot_multi_day: ', plot_multi_day) print('plot_diurnal: ', plot_diurnal) print('plot_quad: ', plot_quad) print('plot_times: ', ptime) print('inverttime: ', invtime) print('doaggreg: ', doaggreg) print('doaggcsv: ', doaggcsv) print('doaggpkl: ', doaggpkl) print('doscp: ', doscp) print('server: ', server) print('userid: ', userid) print('remdir: ', remdir) print('clevs for hourly plots: ', clevs) print('clevs for multi-day hovmoller: ', clevs_multi) print('clevs for diurnal mean hovmoller: ', clevs_diurnal) print('\nBeginning executable code.\n') # Instantiate IMERG class mrg = getimerg.Getimerg(logging, prectype, precdir) ptypestr = 'imerg' + prectype[0] ptypelab = 'IMERG-' + prectype maskval = np.float32('nan') glons = 3600 # IMERG global lon dim, resolution 0.1-deg glats = 1800 # IMERG global lat dim, resolution 0.1-deg # 3/11/2022: Obtain landsea mask for GPM/IMERG grid imerg_global_mask = np.transpose(mrg.get_landsea_mask()) # List and dictionary for precip masking by compass and/or shear quadrants. # Dimension '4' in the quad variables is as follows: # 0 = NW quadrant (downshear-left) # 1 = NE quadrant (downshear-right) # 2 = SW quadrant (upshear-left) # 3 = SE quadrant (upshear-right) quad = ['nw', 'ne', 'sw', 'se'] qShrLab = { 'nw': 'downshr-left', 'ne': 'downshr-right', 'sw': 'upshr-left', 'se': 'upshr-right', } # Grads color numbers from precplot_dorian_apcp1h_stormCentric.gs.sed # template: 0 43 46 4 33 36 39 22 23 24 25 27 29 64 55 0 fp = open(pColFile, 'rb') c = pickle.load(fp) fp.close() colors = [ 'silver', c['43'], c['46'], c['4'], c['33'], c['36'], c['39'], c['22'], c['23'], c['24'], c['25'], c['27'], c['29'], c['64'], c['55'], c['58'] ] # Check for appropriate number of args and determine if we'll get # dates/times from the internal SHIPS data. use_ships_time = assess_args(shipstype) # Establish the DataFrame for the range of days requested, or for the # entire record of the archive tropical cyclone being processed. # Then interpolate SHIPS 6-h data to a 1-hourly DataFrame. print('\n Reading and preparing SHIPS data.') start_time = time.time() try: stormid = sys.argv[1] print(f'Over-riding stormid settings. New stormid = {stormid}.') # except AttributeError: except IndexError: print(f'Using stormid {stormid} from settings file.') shipsc = ships.Procships(shipstype, shipsrtdir, shipsfile, shipspklf, stormid, logging) doshipspkl = False if os.path.exists(shipspklf): doshipspkl = True storm_sdattim = storm_edattim = None syyyymmddhh = eyyyymmddhh = None shipsdf = ships_df_1h = vars_df = pd.DataFrame() if doshipspkl and (shipstype != 'real-time'): shipsdf, storm_sdattim, storm_edattim = shipsc.get_ships_df_dates() # Determine start and end dates for processing TC. if use_ships_time: eyyyymmddhh = storm_edattim.strftime('%Y%m%d%H') syyyymmddhh = storm_sdattim.strftime('%Y%m%d%H') else: if len(sys.argv) == 4: stormid = sys.argv[1] syyyymmddhh = sys.argv[2] eyyyymmddhh = sys.argv[3] else: print('ERROR! Usage: tchov_imerg.py ' '') print('where: is the optional stormid [ATCF id],') print(' is the start date/hour, and') print(' is the end date/hour.') print('Both sYYYYMMDDHH and eYYYYMMDDHH much be provided!') print('Argument list given: ', str(sys.argv)) print('Exiting script...') sys.exit() # Introduce an iterative procedure to parse out blocks of SHIPS 6-h records # and merge into a single 1h dataframe. This procedure is to account for # storms that dissipate and re-intensify such that there are gaps in the # 6-hourly SHIPS records. print(f'Processing {stormid} from {syyyymmddhh} to {eyyyymmddhh}.') # First, check if any times are missing for the range of dates/times. missing_flag = False if doshipspkl and (shipstype != 'real-time'): shipsdf_temp = shipsdf.reset_index() # print('SHIPS_TEMP 6h DF: \n', shipsdf_temp) missing_flag = check_ships_dates(shipsdf_temp, syyyymmddhh, eyyyymmddhh) # print('missing flag', missing_flag) # If any missing times, then process the shipsdf in "chunks" and append. if missing_flag: missing = dointerp = False yyyymmddhh = starttime_temp = stoptime_temp = syyyymmddhh while yyyymmddhh <= eyyyymmddhh: yyyymmddhhmm = f'{yyyymmddhh}00' t6, yyyy6, mm6, mon6, dd6, hhmm6 = mrg.get_times(yyyymmddhhmm, 360) hh6 = int(hhmm6[0:2]) times_temp = dt.datetime(int(yyyy6), int(mm6), int(dd6), hour=hh6) yyyymmddhhmm = f'{yyyy6}{mm6}{dd6}{hhmm6}' yyyymmddhh = yyyymmddhhmm[0:10] if (shipsdf_temp.Timestamp == times_temp).any(): dointerp = True if missing: starttime_temp = yyyymmddhh missing = False stoptime_temp = yyyymmddhh if yyyymmddhh == eyyyymmddhh: print(f'Do SHIPS 1h intrp: {starttime_temp}-{eyyyymmddhh}') ships_df_1h = interp_ships(doshipspkl, shipstype, starttime_temp, eyyyymmddhh, prectype, precdir, shipsdf=shipsdf, shipsdf_1h=ships_df_1h) break print(f'{times_temp} is in shipsdf. Adjust start/end times:' f' {starttime_temp} {stoptime_temp}') else: missing = True if dointerp: print(f'{times_temp} MISSING in shipsdf. Do SHIPS 1h intrp:' f' {starttime_temp} to {stoptime_temp}') ships_df_1h = interp_ships(doshipspkl, shipstype, starttime_temp, stoptime_temp, prectype, precdir, shipsdf=shipsdf, shipsdf_1h=ships_df_1h) dointerp = False else: starttime_temp = stoptime_temp = yyyymmddhh print(f'{times_temp} MISSING in shipsdf. Adjust times:' f' {starttime_temp} {stoptime_temp}') # No missing times in shipsdf else: ships_df_1h = interp_ships(doshipspkl, shipstype, syyyymmddhh, eyyyymmddhh, prectype, precdir, shipsdf=shipsdf) pd.set_option('display.max_rows', None) print_mesg(logging, 1, f'Final 1h interp df: \n{ships_df_1h.reset_index()}') ships_time = ffp(time.time() - start_time, precision=4) print_mesg(logging, 1, f'Time to prepare SHIPS data: {ships_time} seconds') nhours = len(ships_df_1h.index) - 1 print_mesg(logging, 1, f'nhours: {nhours}') imgdir = f'{imgdir}/{stormid}/' if not os.path.isdir(imgdir): cmd = 'mkdir -p ' + imgdir os.system(cmd) hdir = f'{remdir}/{stormid}' pdir = f'{hdir}/hourly_images/prec1h' if doscp: print(f'\n Ensuring remote dirs are created on {server}\n') cmd = f'ssh -q {userid}@{server} mkdir -p {pdir}' os.system(cmd) rads = np.arange(deltaradius, maxradius + deltaradius, deltaradius) nrads = rads.shape[0] radsstr = np.empty(shape=[nrads], dtype='object') print('Radii bins: (km) ', rads) print('Shape of rads: ', nrads) if dosubset: nlats = nlons = int(subsize) else: nlats = int(glats) nlons = int(glons) lat2d = np.zeros([nlats, nlons], dtype=np.float) lon2d = np.zeros([nlats, nlons], dtype=np.float) prec1h = np.zeros([nlats, nlons], dtype=np.float) if maskland: imerg_mask = np.zeros([nlats, nlons], dtype=np.float) p1hbin = np.zeros([nlats, nlons], dtype=np.float) p1hbin_quad = np.zeros([4, nlats, nlons], dtype=np.float) p1hbin_qshr = np.zeros([4, nlats, nlons], dtype=np.float) quad2d = np.zeros([nlats, nlons], dtype=np.float) utctimes = np.empty(shape=[nhours], dtype='object') loctimes = np.empty(shape=[nhours], dtype='object') mean = np.zeros([nhours, nrads], dtype=np.float) mean_quad = np.zeros([4, nhours, nrads], dtype=np.float) mean_qshr = np.zeros([4, nhours, nrads], dtype=np.float) meanvalstr = np.empty(shape=[nhours, nrads], dtype='object') meanvalstr_quad = np.empty(shape=[4, nhours, nrads], dtype='object') meanvalstr_qshr = np.empty(shape=[4, nhours, nrads], dtype='object') dmean = np.zeros([24, nrads], dtype=np.float) dmean_quad = np.zeros([4, 24, nrads], dtype=np.float) dmean_qshr = np.zeros([4, 24, nrads], dtype=np.float) dmean_loc = np.zeros([24, nrads], dtype=np.float) dmean_loc_quad = np.zeros([4, 24, nrads], dtype=np.float) dmean_loc_qshr = np.zeros([4, 24, nrads], dtype=np.float) nday_utc = np.zeros([24, nrads], dtype=np.int) nday_utc_quad = np.zeros([4, 24, nrads], dtype=np.int) nday_utc_qshr = np.zeros([4, 24, nrads], dtype=np.int) nday_loc = np.zeros([24, nrads], dtype=np.int) nday_loc_quad = np.zeros([4, 24, nrads], dtype=np.int) nday_loc_qshr = np.zeros([4, 24, nrads], dtype=np.int) # For output PKL files prec1h_out = np.zeros([nhours, nlats, nlons], dtype=np.float) lon2d_out = np.zeros([nhours, nlats, nlons], dtype=np.float) lat2d_out = np.zeros([nhours, nlats, nlons], dtype=np.float) dist2d_out = np.zeros([nhours, nlats, nlons], dtype=np.float) quad2d_out = np.zeros([nhours, nlats, nlons], dtype=np.float) syyyymmddhhmm = f'{syyyymmddhh}00' eyyyymmddhhmm = f'{eyyyymmddhh}00' stime, syyyy, smm, smon, sdd, shhmm = mrg.get_times(syyyymmddhhmm, 0) etime, eyyyy, emm, emon, edd, ehhmm = mrg.get_times(eyyyymmddhhmm, 0) shh = shhmm[0:2] ehh = ehhmm[0:2] jnk1, eyyyyp1, emmp1, jnk2, eddp1, jnk3 = mrg.get_times(eyyyymmddhhmm, 1440) print('Start times: ', stime, syyyy, smm, smon, sdd, shhmm) print(' End times: ', etime, eyyyy, emm, emon, edd, ehhmm) # Create plotting class phov = plothov.Plothovmoller(rads, xinch, yinch, dpi, tfont, cfont, colors, ptime, invtime, doscp, userid, server) # Loop through dates/times by hour nday = itothr = 0 stime_loc = etime_loc = None yyyymmddhh = syyyymmddhh yyyymmddhhmm = f'{yyyymmddhh}00' df_temp = ships_df_1h.reset_index() while yyyymmddhh != eyyyymmddhh: t00, yyyy00, mm00, mon00, dd00, hhmm00 = mrg.get_times(yyyymmddhhmm, 0) t30, yyyy30, mm30, mon30, dd30, hhmm30 = mrg.get_times(yyyymmddhhmm, 30) t60, yyyy60, mm60, mon60, dd60, hhmm60 = mrg.get_times(yyyymmddhhmm, 60) if not (df_temp.Timestamp == t00).any(): print(f'Datetime {t00} not in ships_df_1h. Continuing....') yyyymmddhhmm = yyyy60 + mm60 + dd60 + hhmm60 yyyymmddhh = yyyymmddhhmm[0:10] continue vtimestr = hhmm60 + 'z ' + dd60 + ' ' + mon60 + ' ' + yyyy60 # NOTE: file time is assigned the ending hour valid time. outtime = f'{yyyy60}{mm60}{dd60}_{hhmm60}' ihr = int(hhmm00[0:2]) #nday_utc[ihr] += 1 #print(f'\nDoing times t00 / t30 ; nday_utc / ihr_utc / itothr: ' # f'{t00} / {t30} ; {nday_utc[ihr]:>2} / {ihr:>2} / {itothr:>3}') rowdata = ships_df_1h.loc[t00] clat = rowdata.loc['Lat (deg N)'] clon = rowdata.loc['Lon (deg E)'] maxwind = rowdata.loc['Max Wind (kt)'] shear_mag = rowdata.loc['Shear Mag (kt)'] shear_head = rowdata.loc['Shear Head (deg)'] umet = rowdata.loc['U-Met (kt)'] vmet = rowdata.loc['V-Met (kt)'] rh700_500 = rowdata.loc['RH 700-500 mb'] # MSLP, RH850-700, and RH500-300 not available in RT SHIPS files. if shipstype == 'real-time': mslp = maskval rh850_700 = maskval rh500_300 = maskval else: mslp = rowdata.loc['MSLP (hPa)'] rh850_700 = rowdata.loc['RH 850-700 mb'] rh500_300 = rowdata.loc['RH 500-300 mb'] print(f'\n{t00}: lat/lon/slp/mxWnd/shrMg/shrHd/u/v/RH85-7/7-5/5-3: ' f'{clat:6.2f} {clon:7.2f} {mslp:6.1f} {maxwind:5.1f} ' f'{shear_mag:5.1f} {shear_head:5.1f} {umet:5.1f} {vmet:5.1f}' f'{rh850_700:6.1f} {rh700_500:5.1f} {rh500_300:5.1f}') lonmin = clon - deltalatlon lonmax = clon + deltalatlon latmin = clat - deltalatlon latmax = clat + deltalatlon geo_extent = [lonmin, lonmax, latmin, latmax] print_mesg(logging, 1, f'\nCenter lat/lon: {clat} / {clon}') print_mesg(logging, 1, f'Plot Lons: {lonmin} {lonmax}; Lats: {latmin} ' f'{latmax}') # J.Case (6/29/2020) -- Estimate sidereal time (local solar) # from input longitude in degrees (- for West lon) t00_loc, yyyy00_loc, mm00_loc, mon00_loc, dd00_loc, hhmm00_loc = \ mrg.get_times(yyyymmddhhmm, 4.0 * clon) print('time / time_local: ', t00, t00_loc) utctimes[itothr] = t00 loctimes[itothr] = t00_loc if itothr == 0: stime_loc = dt.datetime.strptime(t00_loc, '%Y-%m-%d %H:%M:%S') # Compute end date/time, so the last time will give values we want. etime_loc = dt.datetime.strptime(t00_loc, '%Y-%m-%d %H:%M:%S') ihr_loc = int(round((float(etime_loc.strftime('%H')) + float(etime_loc.strftime('%M')) / 60.0 + float(etime_loc.strftime('%S')) / 3600.0), 0)) if ihr_loc >= 24: ihr_loc = ihr_loc - 24 #nday_loc[ihr_loc] += 1 #print('stime_loc / etime_loc / ihr_loc / nday_loc: ', # stime_loc, etime_loc, ihr_loc, nday_loc[ihr_loc]) # Read in arrays and attributes print_mesg(logging, 1, '\n**Reading IMERG data arrays**') gprec1h, lons, lats, units, _FillValue = mrg.get_imerg(t00, t30) # Convert gprec1h to inches if specified in settings. if precunit == 'inches': gprec1h = gprec1h / 25.4 # J.Case (10/7/2021) -- Introduce grid subset centered on TC clat/clon jbeg = ibeg = 0 jend = glats iend = glons if dosubset: jtc, itc = get_storm_index(lats, lons, clat, clon) jbeg = jtc - int(subsize/2) jend = jtc + int(subsize/2) ibeg = itc - int(subsize/2) iend = itc + int(subsize/2) print('nlats/jbeg/jend/jsubset: ', nlats, jbeg, jend, jend - jbeg) print('nlons/ibeg/iend/isubset: ', nlons, ibeg, iend, iend - ibeg) prec1h = gprec1h[jbeg:jend, ibeg:iend] print('array dims of gprec1h/prec1h: ', gprec1h.shape, prec1h.shape) if maskland: imerg_mask = imerg_global_mask[jbeg:jend, ibeg:iend] print('array dims of gmask/mask: ', imerg_global_mask.shape, imerg_mask.shape) prec1h = np.where(imerg_mask >= maskperc, prec1h, np.float32('nan')) # lat2d and lon2d arrays needed for quadrant analysis. print('\n**Preparing IMERG LAT / LON 2D arrays**\n') lon2d, lat2d = np.meshgrid(lons[ibeg:iend], lats[jbeg:jend], sparse=False, indexing='xy') # Optionally plot prec1h field if plotprec: pngFile = f'{imgdir}{ptypestr}-{stormid}-1h_{outtime}z.png' title = f'{ptypelab} 1-h precip ({precunit}) ending {vtimestr}' phov.plot_png(geo_extent, proj, lon2d, lat2d, prec1h, pngFile, title, clevs, rads, pdir) # Compute dist & heading from center lon/lat at each imerg grid point. print_mesg(logging, 1, '\nComputing dist and head from TC ctr lon/lat') start_time = time.time() # Convert clon/clat to radians, then get distances at all IMERG points rclon = np.deg2rad(clon) rclat = np.deg2rad(clat) rlon2d = np.deg2rad(lon2d) rlat2d = np.deg2rad(lat2d) dlon = rlon2d - rclon dlat = rlat2d - rclat simple = True # Simple / fast distance method if simple: aa = np.sin(dlat/2)**2 + \ np.cos(rclat)*np.cos(rlat2d)*np.sin(dlon/2)**2 cc = 2 * np.arctan2(np.sqrt(aa), np.sqrt(1 - aa)) dist2d = earthR * cc # More exact / slower distance method else: g = Geod(ellps='WGS84') az, azb, dist2d = g.inv(lon2d, lat2d, np.full_like(lon2d, clon), np.full_like(lat2d, clat), radians=False) dist2d = 0.001 * dist2d # Convert m to km az[az < 0.0] += 360.0 # Convert neg azimuths to positive # Compute bearing at all IMERG pts (TC clat/lon relative to true north) xx = np.cos(rlat2d) * np.sin(dlon) yy = np.cos(rclat) * np.sin(rlat2d) - \ np.sin(rclat) * np.cos(rlat2d) * np.cos(dlon) bearing2d = np.arctan2(xx, yy) bearing2d = (np.rad2deg(bearing2d) + 360) % 360 beardiff = shear_head - bearing2d beardiff[beardiff < 0] += 360 # Convert neg beardiff to positive dtime = ffp(time.time() - start_time, precision=4) print_mesg(logging, 1, f'Time to compute dist and head: {dtime} sec') print('\nPerforming radial binning of prec1h:') print_mesg(logging, 1, ' --Doing radial range {0:-3d} to {1:-3d} km ' .format(0, rads[0])) # Process rads[0] first, then loop through rest of radial ranges. # J.Case (9/28/2021) -- moved masking calculations into Getimerg class. p1hbin, p1hbin_quad, p1hbin_qshr, quad2d = \ mrg.azimuth_avg(prec1h, nlats, nlons, lat2d, lon2d, dist2d, beardiff, clat, clon, 0, rads[0]) print_mesg(logging, 2, f'\nDISTANCE ARRAY (km): \n {dist2d}') print_mesg(logging, 2, f'\nBEARING (deg from N): \n {bearing2d}') print_mesg(logging, 2, f'\nBEARDIFF (shrhd-bear): \n {beardiff}') print_mesg(logging, 2, f'\nQUAD2D (shrquad 0-3): \n {quad2d}') # Optionally plot dist2d, bearing2d, & quad2d; scp to remote server. if plotdist: pngFile = f'{imgdir}distance-{stormid}_{outtime}z.png' # title = f'Distance from {tcname} center (km) at time {vtimestr}' title = f'Distance from {stormid} center (km) at time {vtimestr}' distlevs = rads phov.plot_png(geo_extent, proj, lon2d, lat2d, dist2d, pngFile, title, distlevs, rads, pdir) # pngFile = f'{imgdir}bearing-{stormid}_{outtime}z.png' # # title = f'Bearing from {tcname} center (deg) at time {vtimestr}' # title = f'Bearing from {stormid} center (deg) at time {vtimestr}' # distlevs = np.arange(-30, 390, 30) # phov.plot_png(geo_extent, proj, lon2d, lat2d, bearing2d, pngFile, # title, distlevs, rads, pdir) pngFile = f'{imgdir}bearingdiff-{stormid}_{outtime}z.png' title = f'ShearHead - Bearing (deg) at time {vtimestr}' distlevs = np.arange(-45, 405, 45) phov.plot_png(geo_extent, proj, lon2d, lat2d, beardiff, pngFile, title, distlevs, rads, pdir) pngFile = f'{imgdir}quad2d-{stormid}_{outtime}z.png' title = f'Shear quad (0=LD; 1=RD; 2=LU; 3=RU) at time {vtimestr}' distlevs = np.arange(-0.5, 4.5, 1.0) phov.plot_png(geo_extent, proj, lon2d, lat2d, quad2d, pngFile, title, distlevs, rads, pdir) if maskland: pngFile = f'{imgdir}landseamask-{stormid}_{outtime}z.png' title = f'IMERG landsea mask at time {vtimestr}' distlevs = np.arange(10, 110, 10) phov.plot_png(geo_extent, proj, lon2d, lat2d, imerg_mask, pngFile, title, distlevs, rads, pdir) # For output PKL files prec1h_out[itothr, :, :] = prec1h lon2d_out[itothr, :, :] = lon2d lat2d_out[itothr, :, :] = lat2d dist2d_out[itothr, :, :] = dist2d quad2d_out[itothr, :, :] = quad2d # J.Case (9/29/2022) -- correct so that diurnal mean is summed only # when the radial mean precip is not 'nan'. mean[itothr][0] = ffp(np.nanmean(p1hbin), precision=3) if not np.isnan(mean[itothr][0]): dmean[ihr][0] += mean[itothr][0] dmean_loc[ihr_loc][0] += mean[itothr][0] nday_utc[ihr][0] += 1 nday_loc[ihr_loc][0] += 1 print(f'\nt00 / t30 ; itothr / ihr_utc / nday_utc: ' f'{t00} / {t30} ; {itothr:>3} / {ihr:>2} / {nday_utc[ihr][0]:>2}') print('stime_loc / etime_loc ; itothr / ihr_loc / nday_loc: ', f'{stime_loc} / {etime_loc} ; {itothr:>3} / {ihr_loc:>2} /' f' {nday_loc[ihr_loc][0]:>2}') #stime_loc, etime_loc, ihr_loc, nday_loc[ihr_loc]) meanvalstr[itothr][0] = '{0:6.3f}'.format(mean[itothr][0]) radsstr[0] = '{0:3d}'.format(rads[0]) for ii in range(0, 4): mean_quad[ii][itothr][0] = ffp(np.nanmean(p1hbin_quad[ii][:][:]), precision=3) if not np.isnan(mean_quad[ii][itothr][0]): dmean_quad[ii][ihr][0] += mean_quad[ii][itothr][0] dmean_loc_quad[ii][ihr_loc][0] += mean_quad[ii][itothr][0] nday_utc_quad[ii][ihr][0] += 1 nday_loc_quad[ii][ihr_loc][0] += 1 meanvalstr_quad[ii][itothr][0] = \ '{0:6.3f}'.format(mean_quad[ii][itothr][0]) mean_qshr[ii][itothr][0] = ffp(np.nanmean(p1hbin_qshr[ii][:][:]), precision=3) if not np.isnan(mean_qshr[ii][itothr][0]): dmean_qshr[ii][ihr][0] += mean_qshr[ii][itothr][0] dmean_loc_qshr[ii][ihr_loc][0] += mean_qshr[ii][itothr][0] nday_utc_qshr[ii][ihr][0] += 1 nday_loc_qshr[ii][ihr_loc][0] += 1 meanvalstr_qshr[ii][itothr][0] = \ '{0:6.3f}'.format(mean_quad[ii][itothr][0]) # Optionally plot radially-masked precip and quad-masked precip if plotpbin: pngFile = f'{imgdir}{ptypestr}-{stormid}-1h-p{str(rads[0])}bin' \ f'_{outtime}z.png' title = f'{ptypelab} 0-{str(rads[0])}km 1h precip ({precunit}) ' \ f'ending {vtimestr}' phov.plot_png(geo_extent, proj, lon2d, lat2d, p1hbin, pngFile, title, clevs, rads, pdir) if plot_quad: for ii in range(0, 4): p1hbin = p1hbin_quad[ii][:][:] pngFile = f'{imgdir}{ptypestr}-{quad[ii]}-{stormid}' \ f'-1h-p{str(rads[0])}bin_{outtime}z.png' title = f'{ptypelab} {quad[ii].upper()} ' \ f'0-{str(rads[0])}km 1h pcp ({precunit}) ' \ f'ending {vtimestr}' phov.plot_png(geo_extent, proj, lon2d, lat2d, p1hbin, pngFile, title, clevs, rads, pdir) p1hbin = p1hbin_qshr[ii][:][:] pngFile = f'{imgdir}{ptypestr}-{qShrLab[quad[ii]]}-' \ f'{stormid}-1h-p{str(rads[0])}bin_{outtime}z.png' title = f'{ptypelab} {qShrLab[quad[ii]]} ' \ f'0-{str(rads[0])}km 1-h pcp ({precunit}) ' \ f'ending {vtimestr}' phov.plot_png(geo_extent, proj, lon2d, lat2d, p1hbin, pngFile, title, clevs, rads, pdir) # Now loop through rest of radial ranges. for rr in range(1, nrads): print_mesg(logging, 1, ' --Doing radial range {0:-3d} to {1:-3d} ' 'km '.format(rads[rr-1], rads[rr])) # Obtain azimuthally and quadrant-masked precip arrays p1hbin, p1hbin_quad, p1hbin_qshr, quad2d = \ mrg.azimuth_avg(prec1h, nlats, nlons, lat2d, lon2d, dist2d, beardiff, clat, clon, rads[rr-1], rads[rr]) mean[itothr][rr] = ffp(np.nanmean(p1hbin), precision=3) if not np.isnan(mean[itothr][rr]): dmean[ihr][rr] += mean[itothr][rr] dmean_loc[ihr_loc][rr] += mean[itothr][rr] nday_utc[ihr][rr] += 1 nday_loc[ihr_loc][rr] += 1 meanvalstr[itothr][rr] = '{0:6.3f}'.format(mean[itothr][rr]) radsstr[rr] = '{0:3d}'.format(rads[rr]) for ii in range(0, 4): mean_quad[ii][itothr][rr] = \ ffp(np.nanmean(p1hbin_quad[ii][:][:]), precision=3) if not np.isnan(mean_quad[ii][itothr][rr]): dmean_quad[ii][ihr][rr] += mean_quad[ii][itothr][rr] dmean_loc_quad[ii][ihr_loc][rr] += mean_quad[ii][itothr][rr] nday_utc_quad[ii][ihr][rr] += 1 nday_loc_quad[ii][ihr_loc][rr] += 1 meanvalstr_quad[ii][itothr][rr] = \ '{0:6.3f}'.format(mean_quad[ii][itothr][rr]) mean_qshr[ii][itothr][rr] = \ ffp(np.nanmean(p1hbin_qshr[ii][:][:]), precision=3) if not np.isnan(mean_qshr[ii][itothr][rr]): dmean_qshr[ii][ihr][rr] += mean_qshr[ii][itothr][rr] dmean_loc_qshr[ii][ihr_loc][rr] += mean_qshr[ii][itothr][rr] nday_utc_qshr[ii][ihr][rr] += 1 nday_loc_qshr[ii][ihr_loc][rr] += 1 meanvalstr_qshr[ii][itothr][rr] = \ '{0:6.3f}'.format(mean_qshr[ii][itothr][rr]) # Optionally plot radially-masked precip and quad-masked precip if plotpbin: pngFile = f'{imgdir}{ptypestr}-{stormid}-1h-p{str(rads[rr])}' \ f'bin_{outtime}z.png' title = f'{ptypelab} {str(rads[rr - 1])}-{str(rads[rr])}' \ f'km 1h precip ({precunit}) ending {vtimestr}' phov.plot_png(geo_extent, proj, lon2d, lat2d, p1hbin, pngFile, title, clevs, rads, pdir) if plot_quad: for ii in range(0, 4): p1hbin = p1hbin_quad[ii][:][:] pngFile = f'{imgdir}{ptypestr}-{quad[ii]}-{stormid}' \ f'-1h-p{str(rads[rr])}bin_{outtime}z.png' title = f'{ptypelab} {quad[ii].upper()} ' \ f'{str(rads[rr-1])}-{str(rads[rr])}km ' \ f'1h pcp ({precunit}) ending {vtimestr}' phov.plot_png(geo_extent, proj, lon2d, lat2d, p1hbin, pngFile, title, clevs, rads, pdir) p1hbin = p1hbin_qshr[ii][:][:] pngFile = f'{imgdir}{ptypestr}-{qShrLab[quad[ii]]}' \ f'-{stormid}-1h-p{str(rads[rr])}bin_' \ f'{outtime}z.png' title = f'{ptypelab} {qShrLab[quad[ii]]} ' \ f'{str(rads[rr-1])}-{str(rads[rr])}km ' \ f'1h pcp ({precunit}) ending {vtimestr}' phov.plot_png(geo_extent, proj, lon2d, lat2d, p1hbin, pngFile, title, clevs, rads, pdir) print('\n --Radial precip means at itothr = ', itothr) print(' Radii ( km ): ', ', '.join( rr for rr in radsstr[0:nrads])) print(' Means (', precunit, '): ', ','.join( rr for rr in meanvalstr[itothr][0:nrads])) print(' NW Quad Means (', precunit, '): ', ','.join( rr for rr in meanvalstr_quad[0][itothr][0:nrads])) print(' NE Quad Means (', precunit, '): ', ','.join( rr for rr in meanvalstr_quad[1][itothr][0:nrads])) print(' SW Quad Means (', precunit, '): ', ','.join( rr for rr in meanvalstr_quad[2][itothr][0:nrads])) print(' SE Quad Means (', precunit, '): ', ','.join( rr for rr in meanvalstr_quad[3][itothr][0:nrads])) print(' Dnshr-Lft Means (', precunit, '): ', ','.join( rr for rr in meanvalstr_qshr[0][itothr][0:nrads])) print(' Dnshr-Rgt Means (', precunit, '): ', ','.join( rr for rr in meanvalstr_qshr[1][itothr][0:nrads])) print(' Upshr-Lft Means (', precunit, '): ', ','.join( rr for rr in meanvalstr_qshr[2][itothr][0:nrads])) print(' Upshr-Rgt Means (', precunit, '): ', ','.join( rr for rr in meanvalstr_qshr[3][itothr][0:nrads])) # Move on to next hour by incrementing time. yyyymmddhhmm = yyyy60 + mm60 + dd60 + hhmm60 itothr += 1 yyyymmddhh = yyyymmddhhmm[0:10] ############################################# # Make and plot the multi-day hovmoller plots ############################################# datestr = 's' + syyyymmddhh + '-e' + eyyyymmddhh rrstr = str(rads[0]) + '-' + str(rads[nrads - 1]) + 'km' shr_mag_ar = ships_df_1h['Shear Mag (kt)'].to_numpy() shr_head_ar = ships_df_1h['Shear Head (deg)'].to_numpy() # print('\nHourly Shear Mag: \n', shr_mag_ar) # print('\nHourly Shear Head: \n', shr_head_ar) # sys.exit() if plot_multi_day: print(f'\nMaking multi-day hovmoller plot for {stormid} from ' f'{syyyymmddhh} to {eyyyymmddhh} \n') xaxis = rads # title = f'{ptypelab} Hovmoller: {tcname} Hrly Precip ({precunit}) ' \ title = f'{ptypelab} Hovmoller: {stormid} Hrly Precip ({precunit}) ' \ f'by Radial Bin' pngFile = f'{imgdir}{ptypestr}-hov-{stormid}-multiday_' \ f'{datestr}_{rrstr}_{ptime}.png' pngFile_quad = f'{imgdir}{ptypestr}-hov-quad-{stormid}-multiday_' \ f'{datestr}_{rrstr}_{ptime}.png' pngFile_qshr = f'{imgdir}{ptypestr}-hov-quadshr-{stormid}' \ f'-multiday_{datestr}_{rrstr}_{ptime}.png' if ptime == 'utc': # UTC time convention ytimes = mdate.drange( dt.datetime(int(syyyy), int(smm), int(sdd), int(shh), 0, 0), dt.datetime(int(eyyyy), int(emm), int(edd), int(ehh), 0, 0), dt.timedelta(hours=1)) else: # local solar/sidereal times hourDiff = (etime_loc - stime_loc).total_seconds() / 3600.0 deltaHour = hourDiff / float(itothr) + 1.0E-7 print('hourDiff / deltaHour = ', hourDiff, ffp(deltaHour, precision=3)) ytimes = mdate.drange(stime_loc, etime_loc, dt.timedelta(hours=deltaHour)) print_mesg(logging, 2, f'\nMulti-day ytimes:\n{mdate.num2date(ytimes)}') phov.plot_hovship_sgl(ytimes, mean, shr_mag_ar, shr_head_ar, pngFile, title, clevs_multi, 'multiday', hdir) if plot_quad: phov.plot_hovship_quad(ytimes, mean_quad, shr_mag_ar, shr_head_ar, pngFile_quad, title, clevs_multi, 'multiday', hdir) phov.plot_hovship_quad(ytimes, mean_qshr, shr_mag_ar, shr_head_ar, pngFile_qshr, title, clevs_multi, 'multiday', hdir, shr=True) # Make diurnal mean hovmoller plot, averaged by hour across all days. if plot_diurnal: print(f'\nMake diurnal mean hovmoller plot for {stormid} from ' f'{syyyymmddhh} to {eyyyymmddhh} \n') xaxis = rads # title = f'{ptypelab} Diurnal Mean Hovmoller: {tcname} \n Hrly Precip' \ title = f'{ptypelab} Diurnal Mean Hovmoller: {stormid} \n Hrly Precip' \ f' ({precunit}) by Radial Bin valid {syyyymmddhh} to' \ f' {eyyyymmddhh}' pngFile = f'{imgdir}{ptypestr}-hov-{stormid}' \ f'-diurnalmean_{datestr}_{rrstr}_{ptime}.png' pngFile_quad = f'{imgdir}{ptypestr}-hov-quad-{stormid}' \ f'-diurnalmean_{datestr}_{rrstr}_{ptime}.png' pngFile_qshr = f'{imgdir}{ptypestr}-hov-quadshr-{stormid}' \ f'-diurnalmean_{datestr}_{rrstr}_{ptime}.png' ytimes = mdate.drange( dt.datetime(int(eyyyy), int(emm), int(edd), 0, 0, 0), dt.datetime(int(eyyyyp1), int(emmp1), int(eddp1), 0, 0, 0), dt.timedelta(hours=1)) for ihh in range(0, 24): for irng in range(0, nrads): print(f'LOC_HR {ihh} RNG {irng} nday_qshr0/1/2/3: ' f'{nday_loc_qshr[0][ihh][irng]} ' f'{nday_loc_qshr[1][ihh][irng]} ' f'{nday_loc_qshr[2][ihh][irng]} ' f'{nday_loc_qshr[3][ihh][irng]}') for ihh in range(0, 24): for irng in range(0, nrads): print(f'LOC_HR {ihh} RNG {irng} sum_qshr0/1/2/3: ' f'{dmean_loc_qshr[0][ihh][irng]} ' f'{dmean_loc_qshr[1][ihh][irng]} ' f'{dmean_loc_qshr[2][ihh][irng]} ' f'{dmean_loc_qshr[3][ihh][irng]}') if ptime == 'utc': # UTC time convention array_a, array_q, array_qs = \ get_dmean(nday_utc, nday_utc_quad, nday_utc_qshr, nrads, dmean, dmean_quad, dmean_qshr, logging) else: # local solar/sidereal times array_a, array_q, array_qs = \ get_dmean(nday_loc, nday_loc_quad, nday_loc_qshr, nrads, dmean_loc, dmean_loc_quad, dmean_loc_qshr, logging) phov.plot_hov_single(ytimes, array_a, pngFile, title, clevs_diurnal, 'diurnal', hdir) if plot_quad: phov.plot_hov_quad(ytimes, array_q, pngFile_quad, title, clevs_diurnal, 'diurnal', hdir) phov.plot_hov_quad(ytimes, array_qs, pngFile_qshr, title, clevs_diurnal, 'diurnal', hdir, shr=True) ######################################### # Store aggregated stats into pickle file ######################################### if doaggreg and shipstype != 'real-time': outdir = '/raid1/sport/data/IMERG/PKL' # drop last row of ships_df_1h, add loctimes, and re-order columns. output_df = ships_df_1h.iloc[:-1].reset_index() output_df['Localtimes'] = loctimes cols = ['Timestamp', 'Localtimes', 'Lat (deg N)', 'Lon (deg E)', 'Max Wind (kt)', 'Shear Mag (kt)', 'Shear Head (deg)', 'MSLP (hPa)', 'RH 850-700 mb', 'RH 700-500 mb', 'RH 500-300 mb'] output_df = output_df[cols] print_mesg(logging, 2, f'\nRe-ordered output_df with Localtimes added:' f'\n {output_df}') # prepare output_df dataframe with all processed hourly vars. for ihh in range(0, itothr): tempdf = pd.DataFrame( [[utctimes[ihh], mean[ihh, :], mean_quad[0, ihh, :], mean_quad[1, ihh, :], mean_quad[2, ihh, :], mean_quad[3, ihh, :], mean_qshr[0, ihh, :], mean_qshr[1, ihh, :], mean_qshr[2, ihh, :], mean_qshr[3, ihh, :], prec1h_out[ihh, :, :], lon2d_out[ihh, :, :], lat2d_out[ihh, :, :], dist2d_out[ihh, :, :], quad2d_out[ihh, :, :]]], columns=['Timestamp', 'azavg', 'azavg_nw', 'azavg_ne', 'azavg_sw', 'azavg_se', 'azavg_dsleft', 'azavg_dsright', 'azavg_usleft', 'azavg_usright', 'prec1h_imerg', 'lon2d_imerg', 'lat2d_imerg', 'dist2d_imerg', 'quad2d_imerg']) try: vars_df except NameError: vars_df = tempdf else: vars_df = vars_df.append(tempdf) print_mesg(logging, 2, f'\nvars_df before reset index: \n {vars_df}') vars_df = vars_df.reset_index() vars_df.drop(columns=['index'], inplace=True) print_mesg(logging, 2, f'\nvars_df after reset index: \n {vars_df}') output_df = pd.concat([output_df, vars_df], axis=1) output_df = output_df.loc[:, ~output_df.columns.duplicated()] output_df.set_index('Timestamp', inplace=True) print_mesg(logging, 2, f'\nFinal DF to output: \n {output_df}') # Write out data to pkl file by calling AggregateIMERG class ostats = agg.AggregateIMERG(prectype, outdir, stormid, output_df) # Test read of output_df from pickle file # pklfile = f'{outdir}/{stormid}-imerg-{prectype}-stats.pkl' # testdf = pd.read_pickle(pklfile) # print('\nTest of Final DF after reading pklfile: \n', testdf) time_main = ffp(time.time() - start_main, precision=3) print(f'\n**Time to complete TC processing: {time_main} seconds**\n') sys.exit()