Skip to content

Commit

Permalink
Revert some of the changes in commit 703420a to attain a backward com…
Browse files Browse the repository at this point in the history
…patibility with older python/pandas versions.
  • Loading branch information
Shintaro Bunya committed Jul 8, 2024
1 parent d691576 commit e3d4873
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 9 deletions.
8 changes: 4 additions & 4 deletions adda/adda.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,8 +172,8 @@ def main(args):
data_adc,meta_adc=rpl.fetch_station_product(urls, return_sample_min=args.return_sample_min, fort63_style=fort63_style )

# Revert Harvester filling of nans to -99999 back to nans
data_adc = data_adc.replace('-99999',np.nan).infer_objects(copy=False)
meta_adc = meta_adc.replace('-99999',np.nan).infer_objects(copy=False)
data_adc = data_adc.replace('-99999',np.nan)
meta_adc = meta_adc.replace('-99999',np.nan)

# Get the grid coordinates for the url
adc_coords = get_adcirc_stations.extract_adcirc_grid_coords( urls )
Expand Down Expand Up @@ -242,8 +242,8 @@ def main(args):
knockout_dict=None, station_list_file=station_file)
# Get data at highest resolution
data_obs,meta_obs=obs.fetch_station_product((obs_starttime,obs_endtime), return_sample_min=0)
data_obs = data_obs.replace('-99999',np.nan).infer_objects(copy=False)
meta_obs = meta_obs.replace('-99999',np.nan).infer_objects(copy=False)
data_obs = data_obs.replace('-99999',np.nan)
meta_obs = meta_obs.replace('-99999',np.nan)
temp=io_utilities.write_csv(data_obs, rootdir=rootdir,subdir=iosubdir,fileroot='data_obs')

# Remove stations with too many nans
Expand Down
4 changes: 2 additions & 2 deletions harvester/fetch_station_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def replace_and_fill(df):
"""
Replace all Nans with 'None" values with GLOBAL_FILL_VALUE
"""
df=df.fillna(GLOBAL_FILL_VALUE).infer_objects(copy=False)
df=df.fillna(GLOBAL_FILL_VALUE)
return df

def stations_resample(df, sample_mins=15)->pd.DataFrame:
Expand Down Expand Up @@ -191,7 +191,7 @@ def interpolate_and_resample(self, dx, n_pad=0, sample_mins=15, int_limit=3)->pd
timeout = dt.datetime.strptime(max(dx.index+np.timedelta64(n_pad,'h')).strftime(dformat), dformat)
# Generate the NEW augmented time range
actualRange = dx.index
normalRange = pd.date_range(str(timein), str(timeout), freq=f'{sample_mins*60.0}s') # This gets us the stepping we want
normalRange = pd.date_range(str(timein), str(timeout), freq=f'{sample_mins*60.0}S') # This gets us the stepping we want
datanormal=[x for x in normalRange if x not in actualRange]
# Assemble the union of values for the final data set. Exclude entries that already exist in the real data
dappend = pd.concat([dx,pd.DataFrame(index=datanormal)],axis=0)
Expand Down
6 changes: 3 additions & 3 deletions processing/compute_error_field.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def interpolate_and_sample( diurnal_range, df_in )-> pd.DataFrame:
df_out.sort_index(inplace=True) # this is sorted with intervening nans that need to be imputed
#df_out.to_csv('check_pre.csv',float_format='%.3f')
#df_out_int = df_out.interpolate(method='linear')
df_out_int = df_out.infer_objects(copy=False).interpolate(method='values')
df_out_int = df_out.interpolate(method='values')
#df_out_int.to_csv('check_po1.csv',float_format='%.3f')
df_out_int = df_out_int.loc[diurnal_range]
df_out_int.index.name='TIME'
Expand Down Expand Up @@ -217,13 +217,13 @@ def _tidal_transform_data(self):
n_range = self.adc.index.tolist()
n_range.sort()
timein, timeout = n_range[0], n_range[-1]
normalRange = pd.date_range(str(timein), str(timeout), freq='3600s')
normalRange = pd.date_range(str(timein), str(timeout), freq='3600S')
n_hours_per_period = self.n_hours_per_period
n_hours_per_tide = self.n_hours_per_tide
n_pad = self.n_pad # This is used to push inteprlation end-nans to outside the time bounds

time_step = int(3600*n_hours_per_tide/n_hours_per_period) # Always scale to an hour (3600s)
diurnal_range = pd.date_range(timein, timeout+np.timedelta64(n_pad,'h'), freq=str(time_step)+'s').to_list()
diurnal_range = pd.date_range(timein, timeout+np.timedelta64(n_pad,'h'), freq=str(time_step)+'S').to_list()

#self.adc.to_csv('check_adc_po3a.csv',float_format='%.3f')
self.adc = interpolate_and_sample( diurnal_range, self.adc )
Expand Down

0 comments on commit e3d4873

Please sign in to comment.