# -*- coding: utf-8 -*- #--------------------------------------------------------------------# # Snow Load Index # # Filename: snow_load.py # # Description: Generates snow rates and 'leaf off' snow load amounts # # Authors: Nathan Foster/Mike Muccilli/Josh Kastman # # Change Log: # # Established Functions - Kastman 2019-03-19 # # Updated for rolling 24 hr output - Kastman 2021-6-28 # #--------------------------------------------------------------------# #imports import arcpy, os, shutil, traceback, time, sys from time import gmtime, strftime from arcpy import env import logging from shutil import copyfile import glob import wssi_config as cfg import wssi_general_functions as gfunc # set up log file log_stamp = strftime("%Y%m%d%H", gmtime()) logging.basicConfig(filename=cfg.log_dir+'/wssi_log_'+str(log_stamp)+'_SnLd.log',level=logging.DEBUG, format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p') def snow_load(): sn_ld_start = strftime("%a, %d %b %Y %H:%M:%S", gmtime()) try: if arcpy.CheckExtension("Spatial") == "Available": arcpy.CheckOutExtension("Spatial") else: # Raise a custom exceptio raise LicenseError except LicenseError: print "Spatial Analyst license is unavailable" #--------------# # Arc Settings # #--------------# arcpy.env.overwriteOutput = True arcpy.SetLogHistory(False) #-----------------# # Set local lists # #-----------------# file = cfg.work_dir+'precp_num_list.txt' data = open(file, 'r') precip_num_text = data.readline() precip_num_list = precip_num_text.split() file = cfg.work_dir+'precip_hrs.txt' data = open(file, 'r') precip_hrs_txt = data.readline() precip_hrs = precip_hrs_txt.split() #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # DAY 4 TEMPORARILY DISABLED ! #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! try: precip_num_list_d4 = cfg.precip_num_list_d4 precip_d4_hrs = cfg.precip_d4_hrs rdict = cfg.rdict rdict_d4 = cfg.rdict_d4 except: print(traceback.format_exc()) print ("DAY 4 ERROR") logging.error(traceback.format_exc()) logging.error("DAY 4 ERROR") pass all_hrs = [] for hr in precip_hrs: all_hrs.append(hr) for d4hr in precip_d4_hrs: all_hrs.append(d4hr) rlist_6hr = gfunc.rolling_periods(fp = precip_hrs[-1], rHR=4, step=6, hr_list = precip_hrs) rlist_nums = gfunc.rolling_periods(fp = precip_num_list[-1], rHR=4, step=6, hr_list = precip_num_list) #--------------------------------# # Total Snowfall for Full Period # #--------------------------------# tot_precip_list = [] tot_precip_d4_list = [] for s in precip_num_list: tot_precip_list.append(cfg.grb_dir+"snow"+str(s)+".flt") try: tot_precip_d4_list.append(cfg.grb_dir+"snow"+str(s)+".flt") except: print(traceback.format_exc()) print ("DAY 4 ERROR") logging.error(traceback.format_exc()) logging.error("DAY 4 ERROR") pass tot_snow_join = ';'.join(tot_precip_list) print tot_snow_join #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # DAY 4 TEMPORARILY DISABLED ! #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! try: for s4 in precip_num_list_d4: tot_precip_d4_list.append(cfg.grb_dir+"wpc_d4snow"+str(s4)+".flt") tot_snow_d4_join = ';'.join(tot_precip_d4_list) #print tot_sn_list arcpy.gp.CellStatistics_sa(tot_snow_join, cfg.work_dir+"total_snow.tif", "SUM", "DATA") arcpy.gp.CellStatistics_sa(tot_snow_d4_join, cfg.work_dir+"total_snow_thru_d4.tif", "SUM", "DATA") #print ("total snow generated") logging.debug('Total Snofall Generated through Day 3 and 4') except: print(traceback.format_exc()) print ("DAY 4 ERROR") logging.error(traceback.format_exc()) logging.error("DAY 4 ERROR") pass #------------------------------------------# # Total Snowfall for Rolling 24 HR Periods # #------------------------------------------# for r in range (0,len(rlist_nums)): ravg = [] for s in range (0,4): #print cfg.grb_dir+"snow"+str(rlist_nums[r][s])+".flt" ravg.append(cfg.grb_dir+"snow"+str(rlist_nums[r][s])+".flt") ravg_join = ';'.join(ravg) #print (ravg_join) arcpy.gp.CellStatistics_sa(ravg_join, cfg.work_dir+"snow_r24h_f"+rdict[str(rlist_nums[r][-1])]+".tif", "SUM", "DATA") #print ('rolling 24 hr snow generated: snow_r24h_f'+rdict[str(rlist_nums[r][-1])]+'.tif') logging.debug('rolling 24 hr snow generated: snow_r24h_f'+rdict[str(rlist_nums[r][-1])]+'.tif') #----------------# # Day 4 Snowfall # #----------------# #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # DAY 4 TEMPORARILY DISABLED ! #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! try: d4_flts = cfg.grb_dir+"wpc_d4snow1.flt;"+cfg.grb_dir+"wpc_d4snow2.flt;"+cfg.grb_dir+"wpc_d4snow3.flt;"+cfg.grb_dir+"wpc_d4snow4.flt;" arcpy.gp.CellStatistics_sa(d4_flts, cfg.work_dir+"snow_r24h_f96.tif", "SUM", "DATA") print 'rolling 24 hr snow generated: '+cfg.work_dir+"snow_r24h_f96.tif" logging.debug('rolling 24 hr snow generated: '+cfg.work_dir+"snow_r24h_f96.tif") except: print(traceback.format_exc()) print ("DAY 4 ERROR") logging.error(traceback.format_exc()) logging.error("DAY 4 ERROR") pass #-----------------------# # Divide Loop; Days 1-3 # #-----------------------# ############## # DAYs 1 - 3 # ############## ratio3_files = [] for n in precip_num_list: snow = cfg.grb_dir+"snow"+str(n)+".flt" qpf = cfg.grb_dir+"qpf"+str(n)+".flt" #--------------------- ---------------------------------------------------------# # Generate 6 HR SLR. This var will be used globally so this is written to disk # #------------------------------------------------------------------------------# slr = cfg.work_dir+"slr_f"+rdict[str(n)]+"hr.tif" rnull = cfg.in_mem+"ratio_null"+str(n) ratio2 = cfg.in_mem+"ratio2_"+str(n) ratio3 = cfg.in_mem+"ratio3_"+rdict[str(n)] arcpy.gp.Divide_sa(snow, qpf, slr) arcpy.gp.IsNull_sa(slr, rnull) arcpy.gp.Con_sa(rnull, slr, ratio2, "0", "\"VALUE\" = 0") arcpy.gp.Divide_sa("1", ratio2, ratio3) ratio3_files.append(ratio3) arcpy.Delete_management(rnull) arcpy.Delete_management(ratio2) #print "QPF & Snow Processed for "+str(n)+"; Ratio generated for: "+rdict[str(n)] logging.debug("QPF & Snow Processed for "+str(n)+"; Ratio generated for: "+rdict[str(n)]) # repeat process for day 4. Sloppy but need to keep sperate because it's experimental ######### # DAY 4 # ######### #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # DAY 4 TEMPORARILY DISABLED ! #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! try: ratio3_files_d4 = [] for n in precip_num_list_d4: snow = cfg.grb_dir+"wpc_d4snow"+str(n)+".flt" qpf = cfg.grb_dir+"wpc_d4qpf"+str(n)+".flt" #--------------------- ---------------------------------------------------------# # Generate 6 HR SLR. This var will be used globally so this is written to disk # #------------------------------------------------------------------------------# slr = cfg.work_dir+"slr_f"+rdict_d4[str(n)]+"hr.tif" rnull = cfg.in_mem+"D4_ratio_null"+str(n) ratio2 = cfg.in_mem+"D4_ratio2_"+str(n) ratio3_d4 = cfg.work_dir+"D4_ratio3_"+rdict_d4[str(n)]+".tif" arcpy.gp.Divide_sa(snow, qpf, slr) arcpy.gp.IsNull_sa(slr, rnull) arcpy.gp.Con_sa(rnull, slr, ratio2, "0", "\"VALUE\" = 0") arcpy.gp.Divide_sa("1", ratio2, ratio3_d4) ratio3_files_d4.append(ratio3_d4) arcpy.Delete_management(rnull) arcpy.Delete_management(ratio2) print "Day 4 QPF & Snow Processed for "+str(n)+"; Ratio generated for: "+rdict_d4[str(n)] logging.debug("Day 4 QPF & Snow Processed for "+str(n)+"; Ratio generated for: "+rdict_d4[str(n)]) except: print(traceback.format_exc()) print ("DAY 4 ERROR") logging.error(traceback.format_exc()) logging.error("DAY 4 ERROR") pass ############## # DAYs 1 - 3 # ############## #---------------------------# # Mean Rate for Full Peroid # #---------------------------# ratio3_join = ';'.join(ratio3_files) #print (ratio3_join) fmnr = cfg.in_mem+'full_mean_ratio' fmnr_n = cfg.in_mem+'full_mean_ratio_null' fmnr2 = cfg.in_mem+'full_mean_ratio2' full_mean_ratio_re = cfg.work_dir+"full_ratio_re.tif" arcpy.gp.CellStatistics_sa(ratio3_join, fmnr, "MEAN", "DATA") arcpy.gp.IsNull_sa(fmnr, fmnr_n) arcpy.gp.Con_sa(fmnr_n, fmnr, fmnr2, "0", "\"VALUE\" = 0") arcpy.gp.Reclassify_sa(fmnr2, "Value", "0 0.001 0;0.001 0.028500000000000001 1;0.028500000000000001 0.033000000000000002 2;0.033000000000000002 0.040000000000000001 3;0.040000000000000001 0.050000000000000003 4;0.050000000000000003 0.066000000000000003 5;0.066000000000000003 0.083000000000000004 6;0.083000000000000004 0.10000000000000001 7;0.10000000000000001 0.125 8;0.125 0.16600000000000001 9;0.16600000000000001 1 10", full_mean_ratio_re, "DATA") arcpy.Delete_management(fmnr) arcpy.Delete_management(fmnr_n) arcpy.Delete_management(fmnr2) #print "Mean Ratio found for "+str(timecheck) ######### # DAY 4 # ######### #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # DAY 4 TEMPORARILY DISABLED ! #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! try: #---------------------------# # Mean Rate for Full Peroid # #---------------------------# D4_ratio3_join = ';'.join(ratio3_files_d4) print ("D4 ratio3 join "+D4_ratio3_join) fmnr = cfg.in_mem+'D4_full_mean_ratio' fmnr_n = cfg.in_mem+'D4_full_mean_ratio_null' fmnr2 = cfg.in_mem+'D4_full_mean_ratio2' full_mean_ratio_re = cfg.work_dir+"D4_full_ratio_re.tif" arcpy.gp.CellStatistics_sa(D4_ratio3_join, fmnr, "MEAN", "DATA") arcpy.gp.IsNull_sa(fmnr, fmnr_n) arcpy.gp.Con_sa(fmnr_n, fmnr, fmnr2, "0", "\"VALUE\" = 0") arcpy.gp.Reclassify_sa(fmnr2, "Value", "0 0.001 0;0.001 0.028500000000000001 1;0.028500000000000001 0.033000000000000002 2;0.033000000000000002 0.040000000000000001 3;0.040000000000000001 0.050000000000000003 4;0.050000000000000003 0.066000000000000003 5;0.066000000000000003 0.083000000000000004 6;0.083000000000000004 0.10000000000000001 7;0.10000000000000001 0.125 8;0.125 0.16600000000000001 9;0.16600000000000001 1 10", full_mean_ratio_re, "DATA") arcpy.Delete_management(fmnr) arcpy.Delete_management(fmnr_n) arcpy.Delete_management(fmnr2) #print "Mean Ratio found for "+str(timecheck) except: print(traceback.format_exc()) print ("DAY 4 ERROR") logging.error(traceback.format_exc()) logging.error("DAY 4 ERROR") pass ############## # DAYs 1 - 3 # ############## #------------------------------------------# # Daily Rolling - Only needed for days 1-3 # #------------------------------------------# for r in range (0,len(rlist_6hr)): ravg = [] for s in range (0,4): #ravg.append(in_mem+model+'_ratio3_f'+str(rlist_6hr[r][s])) #print cfg.in_mem+'ratio3_'+str(rlist_6hr[r][s]) ravg.append(cfg.in_mem+'ratio3_'+str(rlist_6hr[r][s])) ravg_join = ';'.join(ravg) #print (ravg_join) #print (ravg[-1][-3:]) rmean_ratio =cfg.in_mem+'rmean_ratio_f'+str(ravg[-1][-3:]) rmean_ratio_null = cfg.in_mem+'rmean_ratio_f'+str(ravg[-1][-3:])+'_null' rmean_ratio2 = cfg.in_mem+'rmean_ratio2_f'+str(ravg[-1][-3:]) rmean_ratio_rc = cfg.work_dir+'rmean_ratio_3'+str(ravg[-1][-3:])+'.tif' arcpy.gp.CellStatistics_sa(ravg_join, rmean_ratio, "MEAN", "DATA") arcpy.gp.IsNull_sa(rmean_ratio, rmean_ratio_null) arcpy.gp.Con_sa(rmean_ratio_null, rmean_ratio, rmean_ratio2, "0", "\"VALUE\" = 0") arcpy.gp.Reclassify_sa(rmean_ratio2, "Value", "0 0.001 0;0.001 0.028500000000000001 1;0.028500000000000001 0.033000000000000002 2;0.033000000000000002 0.040000000000000001 3;0.040000000000000001 0.050000000000000003 4;0.050000000000000003 0.066000000000000003 5;0.066000000000000003 0.083000000000000004 6;0.083000000000000004 0.10000000000000001 7;0.10000000000000001 0.125 8;0.125 0.16600000000000001 9;0.16600000000000001 1 10", rmean_ratio_rc, "DATA") #print rmean_ratio_rc arcpy.Delete_management(rmean_ratio) arcpy.Delete_management(rmean_ratio_null) arcpy.Delete_management(rmean_ratio2) arcpy.Delete_management(ratio3_join) # Set Mask environment arcpy.env.mask = cfg.States ############## # DAYs 1 - 3 # ############## # Process: Times total 72 hr period arcpy.gp.Times_sa(cfg.work_dir+"full_ratio_re.tif", cfg.work_dir+"total_snow.tif", cfg.work_dir+'full_snowload.tif') arcpy.DefineProjection_management(cfg.work_dir+'full_snowload.tif', "PROJCS['NDFD',GEOGCS['GCS_Unknown',DATUM['D_unknown',SPHEROID['Sphere',6371000.0,0.0]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['false_easting',0.0],PARAMETER['false_northing',0.0],PARAMETER['central_meridian',-95.0],PARAMETER['standard_parallel_1',25.0],PARAMETER['standard_parallel_2',25.0],PARAMETER['latitude_of_origin',25.0],UNIT['Meter',1.0]]") #print 'applying evergreen forest correction' #logging.debug ('applying evergreen forest correction') arcpy.gp.Times_sa(cfg.work_dir+'full_snowload.tif', cfg.evergreen, cfg.work_dir+'full_snowload2.tif') arcpy.gp.Reclassify_sa(cfg.work_dir+'full_snowload2.tif', "Value", "-1 0 0;0 20 1;20 30 2;30 40 3;40 50 4;50 60 5;60 70 6;70 80 7;80 90 8;90 100 9;100 1000 10", cfg.work_dir+'full_snowload_re.tif', "NODATA") ######### # DAY 4 # ######### #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # DAY 4 TEMPORARILY DISABLED ! #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! try: # Process: Times total 72 hr period arcpy.gp.Times_sa(cfg.work_dir+"D4_full_ratio_re.tif", cfg.work_dir+"total_snow_thru_d4.tif", cfg.work_dir+'d4_snld.tif') arcpy.DefineProjection_management(cfg.work_dir+'d4_snld.tif', "PROJCS['NDFD',GEOGCS['GCS_Unknown',DATUM['D_unknown',SPHEROID['Sphere',6371000.0,0.0]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['false_easting',0.0],PARAMETER['false_northing',0.0],PARAMETER['central_meridian',-95.0],PARAMETER['standard_parallel_1',25.0],PARAMETER['standard_parallel_2',25.0],PARAMETER['latitude_of_origin',25.0],UNIT['Meter',1.0]]") #print 'applying evergreen forest correction' #logging.debug ('applying evergreen forest correction') arcpy.gp.Times_sa(cfg.work_dir+'d4_snld.tif', cfg.evergreen, cfg.work_dir+'d4_snld2.tif') arcpy.gp.Reclassify_sa(cfg.work_dir+'d4_snld2.tif', "Value", "-1 0 0;0 20 1;20 30 2;30 40 3;40 50 4;50 60 5;60 70 6;70 80 7;80 90 8;90 100 9;100 1000 10", cfg.work_dir+'d4_snowload_re.tif', "NODATA") except: print(traceback.format_exc()) print ("DAY 4 ERROR") logging.error(traceback.format_exc()) logging.error("DAY 4 ERROR") pass #################### # DAY 1 -3 Rolling # #################### for v in precip_hrs[3:]: try: #print v arcpy.gp.Times_sa(cfg.work_dir+'rmean_ratio_3_'+str(v)+'.tif', cfg.work_dir+"snow_r24h_f"+v+".tif", cfg.in_mem+v[:3]+"hr_snld") arcpy.DefineProjection_management(cfg.in_mem+v[:3]+"hr_snld", "PROJCS['NDFD',GEOGCS['GCS_Unknown',DATUM['D_unknown',SPHEROID['Sphere',6371000.0,0.0]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['false_easting',0.0],PARAMETER['false_northing',0.0],PARAMETER['central_meridian',-95.0],PARAMETER['standard_parallel_1',25.0],PARAMETER['standard_parallel_2',25.0],PARAMETER['latitude_of_origin',25.0],UNIT['Meter',1.0]]") arcpy.gp.Times_sa(cfg.in_mem+v[:3]+"hr_snld", cfg.evergreen, cfg.in_mem+v[:3]+"hr_snld2") arcpy.gp.Reclassify_sa(cfg.in_mem+v[:3]+"hr_snld2", "Value", "-1 0 0;0 20 1;20 30 2;30 40 3;40 50 4;50 60 5;60 70 6;70 80 7;80 90 8;90 100 9;100 1000 10", cfg.work_dir+"v"+v[:3]+"hr_snowload_re.tif", "NODATA") arcpy.Delete_management(cfg.in_mem+v[:3]+"hr_snld") print "generated snow load: "+cfg.work_dir+v[:3]+"hr_snowload_re.tif" logging.debug("generated snow load: "+cfg.work_dir+v[:3]+"hr_snowload_re.tif") except Exception as e: print(traceback.format_exc()) #logging.error(traceback.format_exc()) ######### # Day 4 # ######### #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # DAY 4 TEMPORARILY DISABLED ! #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! try: arcpy.gp.Times_sa(cfg.work_dir+"D4_ratio3_96.tif", cfg.work_dir+"snow_r24h_f96.tif", cfg.in_mem+"96hr_snld") arcpy.DefineProjection_management(cfg.in_mem+"96hr_snld", "PROJCS['NDFD',GEOGCS['GCS_Unknown',DATUM['D_unknown',SPHEROID['Sphere',6371000.0,0.0]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Lambert_Conformal_Conic'],PARAMETER['false_easting',0.0],PARAMETER['false_northing',0.0],PARAMETER['central_meridian',-95.0],PARAMETER['standard_parallel_1',25.0],PARAMETER['standard_parallel_2',25.0],PARAMETER['latitude_of_origin',25.0],UNIT['Meter',1.0]]") arcpy.gp.Times_sa(cfg.in_mem+"96hr_snld", cfg.evergreen, cfg.in_mem+"96hr_snld2") arcpy.gp.Reclassify_sa(cfg.in_mem+"96hr_snld2", "Value", "-1 0 0;0 20 1;20 30 2;30 40 3;40 50 4;50 60 5;60 70 6;70 80 7;80 90 8;90 100 9;100 1000 10", cfg.work_dir+"v96hr_snowload_re.tif", "NODATA") arcpy.Delete_management(cfg.in_mem) except: print(traceback.format_exc()) print ("DAY 4 ERROR") logging.error(traceback.format_exc()) logging.error("DAY 4 ERROR") pass #----------------------------------------------------------------------------------------------------# # Copy total, day1 and day2 output to components output folder. Day3 will be coppied in summary step # #----------------------------------------------------------------------------------------------------# os.chdir(cfg.work_dir) if precip_num_list[-1] == str(11): d3 = "v66hr_snowload_re.tif" else: d3 = "v72hr_snowload_re.tif" #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # DAY 4 TEMPORARILY DISABLED ! #!!!!!!!!!!!!!!!!!!!!!!!!!!!!! infiles = ['full_snowload_re.tif',"v24hr_snowload_re.tif","v48hr_snowload_re.tif",d3,"v96hr_snowload_re.tif"] outfiles = ['sn_ld_fp.tif','sn_ld_d1.tif','sn_ld_d2.tif','sn_ld_d3.tif','sn_ld_d4.tif'] for i,o in zip(infiles,outfiles): try: copyfile(cfg.work_dir+i, cfg.comp_out+o) except: print(traceback.format_exc()) logging.error(traceback.format_exc()) pass rolling_web_list = [] for file in glob.glob("*hr_snowload_re.tif"): rolling_web_list.append(file) #print rolling_web_list # format web_CONUS_wsii_f24.png for f in rolling_web_list: rf = 'r24h_SnowLoad_f'+f[1:3]+'.tif' copyfile(cfg.work_dir+f, cfg.roll_out+rf) print 'Finished Snow Load Index' logging.debug ('Finished Snow Load Index') #arcpy.Delete_management(cfg.in_mem) arcpy.CheckInExtension('Spatial') sn_ld_end = strftime("%a, %d %b %Y %H:%M:%S", gmtime()) logging.debug ("WSSI Snow Load Start Time "+sn_ld_start) logging.debug ("WSSI Snow Load End Time "+sn_ld_end) ######################################################################## # execute # snow_load() logging.shutdown()