4747
4848
4949def sample_raster_at_latlon (src , lat , lon ): # noqa: D103
50+
5051 # Get the row and column indices in the raster
5152 row , col = rowcol (src .transform , lon , lat ) # Note the order: lon, lat
5253
@@ -61,12 +62,33 @@ def sample_raster_at_latlon(src, lat, lon): # noqa: D103
6162 return raster_values # noqa: RET504
6263
6364
64- def create_event (asset_file , event_grid_file , num_entry ): # noqa: C901, D103, N803, RUF100
65+ def create_event (asset_file , event_grid_file , num_entry , do_parallel ): # noqa: C901, D103, N803, RUF100
6566
6667 #print(f'asset_file: {asset_file}, entry: {num_entry}')
6768 #print(f'event_grid_file: {event_grid_file}, entry: {num_entry}')
6869
6970 # read the event grid data file
71+
72+ num_processes = 1
73+ process_id = 0
74+ run_parallel = False
75+
76+ if do_parallel == 'True' :
77+ mpi_spec = importlib .util .find_spec ('mpi4py' )
78+ found = mpi_spec is not None
79+ if found :
80+ from mpi4py import MPI
81+
82+ run_parallel = True
83+ comm = MPI .COMM_WORLD
84+ num_processes = comm .Get_size ()
85+ process_id = comm .Get_rank ()
86+ if num_processes < 2 : # noqa: PLR2004
87+ do_parallel = 'False'
88+ run_parallel = False
89+ num_processes = 1
90+ process_id = 0
91+
7092 event_grid_path = Path (event_grid_file ).resolve ()
7193 event_dir = event_grid_path .parent
7294 event_grid_file = event_grid_path .name
@@ -90,146 +112,140 @@ def create_event(asset_file, event_grid_file, num_entry): # noqa: C901, D103, N
90112 ['GP_file' , 'Latitude' , 'Longitude' ],
91113 ]
92114
93- # Iterate through each asset
94- for asset in asset_dict :
95- asset_id = asset ['id' ]
96- asset_file_path = asset ['file' ]
97-
98- # Load the corresponding file for each asset
99- with open (asset_file_path , encoding = 'utf-8' ) as asset_file : # noqa: PTH123, PLR1704
100- # Load the asset data
101- asset_data = json .load (asset_file )
102-
103- if num_entry == 0 :
104- im_tag = asset_data ['RegionalEvent' ]['intensityMeasures' ]
105- units = asset_data ['RegionalEvent' ]['units' ]
106- else :
107- im_tag = asset_data ['RegionalEvent' ]['multiple' ][num_entry - 1 ]['intensityMeasures' ]
108- units = asset_data ['RegionalEvent' ]['multiple' ][num_entry - 1 ]['units' ]
115+ count = 0
116+ for i , asset in enumerate (asset_dict ):
117+ if run_parallel == False or (i % num_processes ) == process_id : # noqa: E712
118+
119+ # for asset in asset_dict:
120+ asset_id = asset ['id' ]
121+ asset_file_path = asset ['file' ]
122+
123+ # Load the corresponding file for each asset
124+ with open (asset_file_path , encoding = 'utf-8' ) as asset_file : # noqa: PTH123, PLR1704
125+ # Load the asset data
126+ asset_data = json .load (asset_file )
127+
128+ if num_entry == 0 :
129+ im_tag = asset_data ['RegionalEvent' ]['intensityMeasures' ]
130+ units = asset_data ['RegionalEvent' ]['units' ]
131+ else :
132+ im_tag = asset_data ['RegionalEvent' ]['multiple' ][num_entry - 1 ]['intensityMeasures' ]
133+ units = asset_data ['RegionalEvent' ]['multiple' ][num_entry - 1 ]['units' ]
109134
110135
111- # Extract the latitude and longitude
112- lat = float (asset_data ['GeneralInformation' ]['location' ]['latitude' ])
113- lon = float (asset_data ['GeneralInformation' ]['location' ]['longitude' ])
114-
115- # Transform the coordinates
116- lon_transformed , lat_transformed = transformer .transform (lon , lat )
117-
118- # Check if the transformed coordinates are within the raster bounds
119- bounds = src .bounds
120- if (
121- bounds .left <= lon_transformed <= bounds .right
122- and bounds .bottom <= lat_transformed <= bounds .top
123- ):
124- try :
125- val = sample_raster_at_latlon (
126- src = src , lat = lat_transformed , lon = lon_transformed
127- )
136+ # Extract the latitude and longitude
137+ lat = float (asset_data ['GeneralInformation' ]['location' ]['latitude' ])
138+ lon = float (asset_data ['GeneralInformation' ]['location' ]['longitude' ])
139+
140+ # Transform the coordinates
141+ lon_transformed , lat_transformed = transformer .transform (lon , lat )
142+
143+ # Check if the transformed coordinates are within the raster bounds
144+ bounds = src .bounds
145+ if (
146+ bounds .left <= lon_transformed <= bounds .right
147+ and bounds .bottom <= lat_transformed <= bounds .top
148+ ):
149+ try :
150+ val = sample_raster_at_latlon (
151+ src = src , lat = lat_transformed , lon = lon_transformed
152+ )
128153
129- data = [im_tag , val ]
154+ data = [im_tag , val ]
130155
131- # Save the simcenter file name
132- file_name = f'Site_{ asset_id } .csvx{ 0 } x{ int (asset_id ):05d} '
156+ # Save the simcenter file name
157+ file_name = f'Site_{ asset_id } .csvx{ 0 } x{ int (asset_id ):05d} '
133158
134- data_final .append ([file_name , lat , lon ])
159+ data_final .append ([file_name , lat , lon ])
135160
136- csv_save_path = event_dir / f'Site_{ asset_id } .csv'
161+ csv_save_path = event_dir / f'Site_{ asset_id } .csv'
137162
138- if num_entry == 0 :
163+ if num_entry == 0 :
139164
140- # if first entry
141- # create the csv file and add the data
142-
143- with open (csv_save_path , 'w' , newline = '' ) as file : # noqa: PTH123
144- # Create a CSV writer object
145- writer = csv .writer (file )
146-
147- # Write the data to the CSV file
148- writer .writerows (data )
149- else :
150-
151- # subsequent entries
152- # read existing file, append header and row data,
153- # and finally write new file with updated data
154- #
165+ # if first entry
166+ # create the csv file and add the data
155167
156- # Read the existing file
157- if os .path .exists (csv_save_path ):
158- with open (csv_save_path , mode = 'r' ) as f :
159- reader = csv .DictReader (f )
160- rows = list (reader )
161- fieldnames = reader .fieldnames or []
168+ with open (csv_save_path , 'w' , newline = '' ) as file : # noqa: PTH123
169+
170+ # Create a CSV writer object
171+ writer = csv .writer (file )
172+
173+ # Write the data to the CSV file
174+ writer .writerows (data )
162175 else :
163- rows = []
164- fieldnames = []
165-
166- # extend field names and row data with additional stuff to be added
167- # IS IM_TAG a single value or an array .. should be array!
168-
169- extra = dict (zip (im_tag , val ))
170- for k in extra :
171- if k not in fieldnames :
172- fieldnames .append (k )
173- for row in rows :
174- row .update (extra )
176+
177+ # subsequent entries
178+ # read existing file, append header and row data,
179+ # and finally write new file with updated data
180+ #
181+
182+ # Read the existing file
183+ if os .path .exists (csv_save_path ):
184+ with open (csv_save_path , mode = 'r' ) as f :
185+ reader = csv .DictReader (f )
186+ rows = list (reader )
187+ fieldnames = reader .fieldnames or []
188+ else :
189+ rows = []
190+ fieldnames = []
191+
192+ # extend field names and row data with additional stuff to be added
193+ # IS IM_TAG a single value or an array .. should be array!
194+
195+ extra = dict (zip (im_tag , val ))
196+ for k in extra :
197+ if k not in fieldnames :
198+ fieldnames .append (k )
199+
200+ for row in rows :
201+ row .update (extra )
175202
176- # Overwrite existing file
177- with open (csv_save_path , mode = 'w' , newline = '' ) as f :
178- writer = csv .DictWriter (f , fieldnames = fieldnames )
179- writer .writeheader ()
180- writer .writerows (rows )
203+ # Overwrite existing file
204+ with open (csv_save_path , mode = 'w' , newline = '' ) as f :
205+ writer = csv .DictWriter (f , fieldnames = fieldnames )
206+ writer .writeheader ()
207+ writer .writerows (rows )
181208
182209
183- # prepare a dictionary of events
184- event_list_json = [[file_name , 1.0 ]]
210+ # prepare a dictionary of events
211+ event_list_json = [[file_name , 1.0 ]]
185212
186213
187- # in asset file, add event info including now units
188- if num_entry == 0 :
214+ # in asset file, add event info including now units
215+ if num_entry == 0 :
189216
190- # if first, add an event field
191- asset_data ['Events' ] = [{}]
192- asset_data ['Events' ][0 ] = {
193- 'EventFolderPath' : str (event_dir ),
194- 'Events' : event_list_json ,
195- 'type' : 'intensityMeasure' ,
196- 'units' : units
197- }
217+ # if first, add an event field
218+ asset_data ['Events' ] = [{}]
219+ asset_data ['Events' ][0 ] = {
220+ 'EventFolderPath' : str (event_dir ),
221+ 'Events' : event_list_json ,
222+ 'type' : 'intensityMeasure' ,
223+ 'units' : units
224+ }
198225
199- else :
226+ else :
200227
201- # if additional, update units to include new
202- asset_data ['Events' ][0 ]['units' ].update (units )
203-
204- with open (asset_file_path , 'w' , encoding = 'utf-8' ) as f : # noqa: PTH123
205- json .dump (asset_data , f , indent = 2 )
206-
207- except IndexError as e :
208- print (f'Error for asset ID { asset_id } : { e } ' ) # noqa: T201
209- else :
210- print (f'Asset ID: { asset_id } is outside the raster bounds' ) # noqa: T201
211-
212- # # save the event dictionary to the BIM
213- # asset_data['Events'] = [{}]
214- # asset_data['Events'][0] = {
215- # # "EventClassification": "Earthquake",
216- # 'EventFolderPath': str(event_dir),
217- # 'Events': event_list_json,
218- # 'type': event_type,
219- # # "type": "SimCenterEvents"
220- # }
221-
222- # with open(asset_file, 'w', encoding='utf-8') as f: # noqa: PTH123, RUF100
223- # json.dump(asset_data, f, indent=2)
224-
225- # Save the final event grid
226- csv_save_path = event_dir / 'EventGrid.csv'
227- with open (csv_save_path , 'w' , newline = '' ) as file : # noqa: PTH123
228- # Create a CSV writer object
229- writer = csv .writer (file )
230-
231- # Write the data to the CSV file
232- writer .writerows (data_final )
228+ # if additional, update units to include new
229+ asset_data ['Events' ][0 ]['units' ].update (units )
230+
231+ with open (asset_file_path , 'w' , encoding = 'utf-8' ) as f : # noqa: PTH123
232+ json .dump (asset_data , f , indent = 2 )
233+
234+ except IndexError as e :
235+ print (f'Error for asset ID { asset_id } : { e } ' ) # noqa: T201
236+ else :
237+ print (f'Asset ID: { asset_id } is outside the raster bounds' ) # noqa: T201
238+
239+
240+ if process_id == 0 :
241+ # Save the final event grid
242+ csv_save_path = event_dir / 'EventGrid.csv'
243+ with open (csv_save_path , 'w' , newline = '' ) as file : # noqa: PTH123
244+ # Create a CSV writer object
245+ writer = csv .writer (file )
246+
247+ # Write the data to the CSV file
248+ writer .writerows (data_final )
233249
234250 # Perform cleanup
235251 src .close ()
0 commit comments