Skip to content

Commit cf3a0cf

Browse files
committed
fmk - update to GIS specified event mapping, using workflowInput to process multiple raster files in an event, also now processing raster files with multiple layers of info
1 parent db235b4 commit cf3a0cf

File tree

2 files changed

+116
-21
lines changed

2 files changed

+116
-21
lines changed

modules/performRegionalMapping/GISSpecifiedEvents/GISSpecifiedEvent.py

Lines changed: 36 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,8 @@
3737
# Stevan Gavrilovic
3838
#
3939

40+
import os
41+
import json
4042
import argparse # noqa: I001
4143
from pathlib import Path
4244
import xml.etree.ElementTree as ET
@@ -68,9 +70,12 @@ def is_xml_file(filename): # noqa: D103
6870
return False
6971

7072

71-
def create_event(asset_file: str, event_grid_file: str): # noqa: C901, D103, N803, RUF100
73+
def create_event(asset_file: str, event_grid_file: str, workflow_input: str): # noqa: C901, D103, N803, RUF100
74+
75+
event_grid_file_path = os.path.dirname(event_grid_file)
76+
7277
if is_raster_file(event_grid_file):
73-
return create_raster_event(asset_file, event_grid_file)
78+
create_raster_event(asset_file, event_grid_file, 0)
7479
elif is_xml_file(event_grid_file): # noqa: RET505
7580
# Here you would call a function to handle XML files
7681
# For now, we'll just raise a NotImplementedError
@@ -80,11 +85,39 @@ def create_event(asset_file: str, event_grid_file: str): # noqa: C901, D103, N8
8085
f'{event_grid_file} is not a raster. Only rasters are currently supported.' # noqa: EM102
8186
)
8287

88+
#
89+
# open input file, see if other raster files to deal with
90+
#
91+
92+
# path to file and open it
93+
json_path = os.path.join(os.path.dirname(os.getcwd()), workflow_input)
94+
with open(json_path, 'r') as f:
95+
data = json.load(f)
96+
97+
#
98+
# If multiple exists, update event_file
99+
# note: create_raster_event modified for this purpose
100+
#
101+
102+
#print(f'ORIGINAL: {event_grid_file}')
103+
104+
multiple_entries = data.get("RegionalEvent", {}).get("multiple", [])
105+
for i, entry in enumerate(multiple_entries):
106+
107+
# is this assumption correct on file paths?
108+
next_file = data['RegionalEvent']['multiple'][i]['eventFile']
109+
next_file_path = os.path.join(event_grid_file_path, next_file)
110+
create_raster_event(asset_file, next_file_path, i+1)
111+
112+
113+
114+
83115

84116
if __name__ == '__main__':
85117
parser = argparse.ArgumentParser()
86118
parser.add_argument('--assetFile')
87119
parser.add_argument('--filenameEVENTgrid')
120+
parser.add_argument('--workflowInput')
88121
args = parser.parse_args()
89122

90-
create_event(args.assetFile, args.filenameEVENTgrid)
123+
create_event(args.assetFile, args.filenameEVENTgrid, args.workflowInput)

modules/performRegionalMapping/GISSpecifiedEvents/RasterEvent.py

Lines changed: 80 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@
3737
# Stevan Gavrilovic
3838
#
3939

40+
import os
4041
import argparse # noqa: I001
4142
import json, csv # noqa: E401
4243
from pathlib import Path
@@ -53,13 +54,18 @@ def sample_raster_at_latlon(src, lat, lon): # noqa: D103
5354
if row < 0 or row >= src.height or col < 0 or col >= src.width:
5455
raise IndexError('Transformed coordinates are out of raster bounds') # noqa: EM101, TRY003
5556

56-
# Read the raster value at the given row and column
57-
raster_value = src.read(1)[row, col]
57+
# Read the raster value at the given row and column for all layers NOT just the first
58+
#raster_value = src.read(1)[row, col]
59+
data = src.read()
60+
raster_values = data[:, row, col]
61+
return raster_values # noqa: RET504
5862

59-
return raster_value # noqa: RET504
6063

64+
def create_event(asset_file, event_grid_file, num_entry): # noqa: C901, D103, N803, RUF100
6165

62-
def create_event(asset_file, event_grid_file): # noqa: C901, D103, N803, RUF100
66+
#print(f'asset_file: {asset_file}, entry: {num_entry}')
67+
#print(f'event_grid_file: {event_grid_file}, entry: {num_entry}')
68+
6369
# read the event grid data file
6470
event_grid_path = Path(event_grid_file).resolve()
6571
event_dir = event_grid_path.parent
@@ -94,7 +100,13 @@ def create_event(asset_file, event_grid_file): # noqa: C901, D103, N803, RUF100
94100
# Load the asset data
95101
asset_data = json.load(asset_file)
96102

97-
im_tag = asset_data['RegionalEvent']['intensityMeasures'][0]
103+
if num_entry == 0:
104+
im_tag = asset_data['RegionalEvent']['intensityMeasures']
105+
units = asset_data['RegionalEvent']['units']
106+
else:
107+
im_tag = asset_data['RegionalEvent']['multiple'][num_entry-1]['intensityMeasures']
108+
units = asset_data['RegionalEvent']['multiple'][num_entry-1]['units']
109+
98110

99111
# Extract the latitude and longitude
100112
lat = float(asset_data['GeneralInformation']['location']['latitude'])
@@ -113,31 +125,81 @@ def create_event(asset_file, event_grid_file): # noqa: C901, D103, N803, RUF100
113125
val = sample_raster_at_latlon(
114126
src=src, lat=lat_transformed, lon=lon_transformed
115127
)
116-
117-
data = [[im_tag], [val]]
128+
129+
data = [im_tag, val]
118130

119131
# Save the simcenter file name
120132
file_name = f'Site_{asset_id}.csvx{0}x{int(asset_id):05d}'
121133

122134
data_final.append([file_name, lat, lon])
123135

124136
csv_save_path = event_dir / f'Site_{asset_id}.csv'
125-
with open(csv_save_path, 'w', newline='') as file: # noqa: PTH123
126-
# Create a CSV writer object
127-
writer = csv.writer(file)
128137

129-
# Write the data to the CSV file
130-
writer.writerows(data)
138+
if num_entry == 0:
139+
140+
# if first entry
141+
# create the csv file and add the data
142+
143+
with open(csv_save_path, 'w', newline='') as file: # noqa: PTH123
144+
# Create a CSV writer object
145+
writer = csv.writer(file)
146+
147+
# Write the data to the CSV file
148+
writer.writerows(data)
149+
else:
150+
151+
# subsequent entries
152+
# read existing file, append header and row data,
153+
# and finally write new file with updated data
154+
#
155+
156+
# Read the existing file
157+
if os.path.exists(csv_save_path):
158+
with open(csv_save_path, mode='r') as f:
159+
reader = csv.DictReader(f)
160+
rows = list(reader)
161+
fieldnames = reader.fieldnames or []
162+
else:
163+
rows = []
164+
fieldnames = []
165+
166+
# extend field names and row data with additional stuff to be added
167+
# IS IM_TAG a single value or an array .. should be array!
168+
169+
extra = dict(zip(im_tag, val))
170+
for k in extra:
171+
if k not in fieldnames:
172+
fieldnames.append(k)
173+
for row in rows:
174+
row.update(extra)
175+
176+
# Overwrite existing file
177+
with open(csv_save_path, mode='w', newline='') as f:
178+
writer = csv.DictWriter(f, fieldnames=fieldnames)
179+
writer.writeheader()
180+
writer.writerows(rows)
181+
131182

132183
# prepare a dictionary of events
133184
event_list_json = [[file_name, 1.0]]
134185

135-
asset_data['Events'] = [{}]
136-
asset_data['Events'][0] = {
137-
'EventFolderPath': str(event_dir),
138-
'Events': event_list_json,
139-
'type': 'intensityMeasure',
140-
}
186+
187+
# in asset file, add event info including now units
188+
if num_entry == 0:
189+
190+
# if first, add an event field
191+
asset_data['Events'] = [{}]
192+
asset_data['Events'][0] = {
193+
'EventFolderPath': str(event_dir),
194+
'Events': event_list_json,
195+
'type': 'intensityMeasure',
196+
'units': units
197+
}
198+
199+
else:
200+
201+
# if additional, update units to include new
202+
asset_data['Events'][0]['units'].update(units)
141203

142204
with open(asset_file_path, 'w', encoding='utf-8') as f: # noqa: PTH123
143205
json.dump(asset_data, f, indent=2)

0 commit comments

Comments
 (0)