diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 7559e64..c0c0cec 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -14,6 +14,7 @@ "extensions": [ "github.vscode-pull-request-github", "mhutchie.git-graph", + "ms-python.black-formatter", "ms-python.python", "streetsidesoftware.code-spell-checker", "tamasfe.even-better-toml" @@ -22,6 +23,8 @@ "files.insertFinalNewline": true, "files.trimTrailingWhitespace": true, "[python]": { + "editor.defaultFormatter": "ms-python.black-formatter", + "editor.formatOnSave": true, "editor.insertSpaces": true, "editor.tabSize": 4, "python.languageServer": "Pylance" diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index fedef08..e4088a2 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -28,7 +28,6 @@ jobs: with: python-version: ${{ matrix.python }} - run: pip install poetry==1.7.1 && poetry install - - name: Run a comprehensive merge test - run: | - deshima-rawdata download 20231108052231 -d data -e - demerge -d data -m "--debug --loadtype Tsignal" 20231108052231 + - run: black --check demerge + - run: deshima-rawdata download 20231108052231 -d data -e + - run: demerge -d data -m "--debug --loadtype Tsignal" 20231108052231 diff --git a/demerge/merge/__init__.py b/demerge/merge/__init__.py index a8c878b..c5a6573 100644 --- a/demerge/merge/__init__.py +++ b/demerge/merge/__init__.py @@ -5,6 +5,7 @@ (C) 2023 内藤システムズ """ + __all__ = ["merge"] @@ -28,63 +29,80 @@ def merge( - ddbfits_path='', - corresp_path='', - obsinst_path='', - antenna_path='', - readout_path='', - skychop_path='', - weather_path='', - misti_path='', - cabin_path='', - **kwargs - ): + ddbfits_path="", + corresp_path="", + obsinst_path="", + antenna_path="", + readout_path="", + skychop_path="", + weather_path="", + misti_path="", + cabin_path="", + **kwargs, +): # その他の引数の処理と既定値の設定 - coordinate = kwargs.pop('coordinate', 'azel') - loadtype = kwargs.pop('loadtype', 'fshift') + coordinate = kwargs.pop("coordinate", "azel") + loadtype = kwargs.pop("loadtype", "fshift") + # find R, sky - findR = kwargs.pop("findR", False) - ch = kwargs.pop("ch", 0) - Rth = kwargs.pop("Rth", 280) - skyth = kwargs.pop("skyth", 150) + findR = kwargs.pop("findR", False) + ch = kwargs.pop("ch", 0) + Rth = kwargs.pop("Rth", 280) + skyth = kwargs.pop("skyth", 150) cutnum = kwargs.pop("cutnum", 1) + # still - still = kwargs.pop("still", False) - period = kwargs.pop("period", 2) # 秒 + still = kwargs.pop("still", False) + period = kwargs.pop("period", 2) # 秒 + # shuttle - shuttle = kwargs.pop("shuttle", False) + shuttle = kwargs.pop("shuttle", False) lon_min_off = kwargs.pop("lon_min_off", 0) lon_max_off = kwargs.pop("lon_max_off", 0) - lon_min_on = kwargs.pop("lon_min_on", 0) - lon_max_on = kwargs.pop("lon_max_on", 0) + lon_min_on = kwargs.pop("lon_min_on", 0) + lon_max_on = kwargs.pop("lon_max_on", 0) + # その他一時的な補正 - offset_time_antenna = kwargs.pop("offset_time_antenna", 0) # ms(integerでないとnp.timedeltaに変換できないので注意) + # ms(integerでないとnp.timedeltaに変換できないので注意) + offset_time_antenna = kwargs.pop("offset_time_antenna", 0) # 時刻と各種データを読み込む - readout_hdul = fits.open(readout_path, mode="readonly") - ddbfits_hdul = fits.open(ddbfits_path, mode="readonly") - weather_table = ascii.read(weather_path) - antenna_table = ascii.read(antenna_path)[:-1] # 最後の1行は終端を表す意味のないデータが入っているため無視する - obsinst_params = utils.load_obsinst(obsinst_path) # 観測スクリプトに含まれているパラメタを抽出する + readout_hdul = fits.open(readout_path, mode="readonly") + ddbfits_hdul = fits.open(ddbfits_path, mode="readonly") + weather_table = ascii.read(weather_path) + # 最後の1行は終端を表す意味のないデータが入っているため無視する + antenna_table = ascii.read(antenna_path)[:-1] + # 観測スクリプトに含まれているパラメタを抽出する + obsinst_params = utils.load_obsinst(obsinst_path) # 必要に応じて時刻はnp.datetime64[ns]へ変換する - times = utils.convert_timestamp(readout_hdul['READOUT'].data['timestamp']) - times = np.array(times).astype('datetime64[ns]') + times = utils.convert_timestamp(readout_hdul["READOUT"].data["timestamp"]) + times = np.array(times).astype("datetime64[ns]") times_misti, az_misti, el_misti, pwv_misti = utils.retrieve_misti_log(misti_path) + times_cabin, _, lower_cabin_temp = utils.retrieve_cabin_temps(cabin_path) + lower_cabin_temp = lower_cabin_temp + 273.15 # 度CからKへ変換 - times_cabin, upper_cabin_temp, lower_cabin_temp = utils.retrieve_cabin_temps(cabin_path) - lower_cabin_temp = lower_cabin_temp + 273.15 # 度CからKへ変換 - - times_weather = utils.convert_asciitime(weather_table['time'], '%Y-%m-%dT%H:%M:%S.%f') - times_weather = np.array(times_weather).astype('datetime64[ns]') + times_weather = utils.convert_asciitime( + asciitime=weather_table["time"], + form_fitstime="%Y-%m-%dT%H:%M:%S.%f", + ) + times_weather = np.array(times_weather).astype("datetime64[ns]") times_skychop, states_skychop = utils.retrieve_skychop_states(skychop_path) times_skychop = utils.convert_timestamp(times_skychop) - times_skychop = np.array(times_skychop).astype('datetime64[ns]') + times_skychop = np.array(times_skychop).astype("datetime64[ns]") - times_antenna = utils.convert_asciitime(antenna_table['time'], '%Y-%m-%dT%H:%M:%S.%f') - times_antenna = np.array(times_antenna).astype('datetime64[ns]') + np.timedelta64(offset_time_antenna, 'ms') + times_antenna = utils.convert_asciitime( + asciitime=antenna_table["time"], + form_fitstime="%Y-%m-%dT%H:%M:%S.%f", + ) + # fmt: off + times_antenna = ( + np.array(times_antenna).astype("datetime64[ns]") + + np.timedelta64(offset_time_antenna, "ms") + ) + # fmt: on ddb_version = ddbfits_hdul["PRIMARY"].header["DDB_ID"] @@ -94,137 +112,222 @@ def merge( corresp=corresp, to=loadtype, T_room=lower_cabin_temp[0], - T_amb=np.nanmean(weather_table['tmperature']) + 273.15, + T_amb=np.nanmean(weather_table["tmperature"]) + 273.15, ) - if loadtype == 'Tsignal': - long_name = 'Brightness' - units = 'K' - elif loadtype == 'fshift': - long_name = 'df/f' - units = 'dimensionless' + if loadtype == "Tsignal": + long_name = "Brightness" + units = "K" + elif loadtype == "fshift": + long_name = "df/f" + units = "dimensionless" else: - raise KeyError('Invalid loadtype: {}'.format(loadtype)) + raise KeyError("Invalid loadtype: {}".format(loadtype)) ddbfits_hdul.close() readout_hdul.close() # モードに応じて経度(lon)と緯度(lat)を選択(azelかradecか)する - if coordinate == 'azel': - if 'az-prg(no-cor)' in antenna_table.colnames: - az_prg = antenna_table['az-prg(no-cor)'] - el_prog = antenna_table['el-prog(no-cor)'] - elif 'az-prg(no-col)' in antenna_table.colnames: - az_prg = antenna_table['az-prg(no-col)'] - el_prog = antenna_table['el-prog(no-col)'] + if coordinate == "azel": + if "az-prg(no-cor)" in antenna_table.colnames: + az_prg = antenna_table["az-prg(no-cor)"] + el_prog = antenna_table["el-prog(no-cor)"] + elif "az-prg(no-col)" in antenna_table.colnames: + az_prg = antenna_table["az-prg(no-col)"] + el_prog = antenna_table["el-prog(no-col)"] else: - raise KeyError('{}ファイルにaz-prg(no-cor)列またはaz-prg(no-col)列がありません。'.format(antenna_path)) - - lon = az_prg + antenna_table['az-real'] - antenna_table['az-prg'] - lat = el_prog + antenna_table['el-real'] - antenna_table['el-prg'] - lon_origin = antenna_table['az-prog(center)'] - lat_origin = antenna_table['el-prog(center)'] - elif coordinate == 'radec': - lon = antenna_table['ra-prg'] - lat = antenna_table['dec-prg'] - lon_origin = np.full_like(lon, obsinst_params['ra']) # 観測スクリプトに設定されているRA,DEC - lat_origin = np.full_like(lat, obsinst_params['dec']) + raise KeyError( + str(antenna_path) + + "ファイルにaz-prg(no-cor)列またはaz-prg(no-col)列がありません。" + ) + + lon = az_prg + antenna_table["az-real"] - antenna_table["az-prg"] + lat = el_prog + antenna_table["el-real"] - antenna_table["el-prg"] + lon_origin = antenna_table["az-prog(center)"] + lat_origin = antenna_table["el-prog(center)"] + elif coordinate == "radec": + lon = antenna_table["ra-prg"] + lat = antenna_table["dec-prg"] + # 観測スクリプトに設定されているRA,DEC + lon_origin = np.full_like(lon, obsinst_params["ra"]) + lat_origin = np.full_like(lat, obsinst_params["dec"]) else: - raise KeyError('Invalid coodinate type: {}'.format(coordinate)) + raise KeyError("Invalid coodinate type: {}".format(coordinate)) # 補間関数で扱うためにSCANTYPE(文字列)を適当な整数に対応させる - states = np.array(antenna_table['type']) - state_types = {state_type:i for i, state_type in enumerate(np.unique(states))} + states = np.array(antenna_table["type"]) + state_types = {state_type: i for i, state_type in enumerate(np.unique(states))} state_type_numbers = np.zeros(states.shape[0], dtype=int) for state_type, i in state_types.items(): state_type_numbers[states == state_type] = i # 補間のためにDataArrayへ格納する - response_xr = xr.DataArray(data=response, dims=['time', 'chan'], coords=[times, corresp.index]) - lon_xr = xr.DataArray(data=lon, coords={'time': times_antenna}) - lat_xr = xr.DataArray(data=lat, coords={'time': times_antenna}) - lon_origin_xr = xr.DataArray(data=lon_origin, coords={'time': times_antenna}) - lat_origin_xr = xr.DataArray(data=lat_origin, coords={'time': times_antenna}) - temperature_xr = xr.DataArray(data=weather_table['tmperature'], coords={'time': times_weather}) - humidity_xr = xr.DataArray(data=weather_table['vapor-pressure'], coords={'time': times_weather}) - pressure_xr = xr.DataArray(data=weather_table['presure'], coords={'time': times_weather}) - wind_speed_xr = xr.DataArray(data=weather_table['aux1'], coords={'time': times_weather}) - wind_direction_xr = xr.DataArray(data=weather_table['aux2'], coords={'time': times_weather}) - skychop_state_xr = xr.DataArray(data=states_skychop, coords={'time': times_skychop}) - aste_cabin_temperature_xr = xr.DataArray(data=lower_cabin_temp, coords={'time': times_cabin}) - aste_subref_x_xr = xr.DataArray(data=antenna_table['x'], coords={'time': times_antenna}) - aste_subref_y_xr = xr.DataArray(data=antenna_table['y'], coords={'time': times_antenna}) - aste_subref_z_xr = xr.DataArray(data=antenna_table['z'], coords={'time': times_antenna}) - aste_subref_xt_xr = xr.DataArray(data=antenna_table['xt'], coords={'time': times_antenna}) - aste_subref_yt_xr = xr.DataArray(data=antenna_table['yt'], coords={'time': times_antenna}) - aste_subref_zt_xr = xr.DataArray(data=antenna_table['zt'], coords={'time': times_antenna}) - aste_misti_lon_xr = xr.DataArray(data=az_misti, coords={'time': times_misti}) - aste_misti_lat_xr = xr.DataArray(data=el_misti, coords={'time': times_misti}) - aste_misti_pwv_xr = xr.DataArray(data=pwv_misti, coords={'time': times_misti}) - state_type_numbers_xr = xr.DataArray(data=state_type_numbers, coords={'time': times_antenna}) + response_xr = xr.DataArray( + data=response, + dims=["time", "chan"], + coords=[times, corresp.index], + ) + lon_xr = xr.DataArray( + data=lon, + coords={"time": times_antenna}, + ) + lat_xr = xr.DataArray( + data=lat, + coords={"time": times_antenna}, + ) + lon_origin_xr = xr.DataArray( + data=lon_origin, + coords={"time": times_antenna}, + ) + lat_origin_xr = xr.DataArray( + data=lat_origin, + coords={"time": times_antenna}, + ) + temperature_xr = xr.DataArray( + data=weather_table["tmperature"], + coords={"time": times_weather}, + ) + humidity_xr = xr.DataArray( + data=weather_table["vapor-pressure"], + coords={"time": times_weather}, + ) + pressure_xr = xr.DataArray( + data=weather_table["presure"], + coords={"time": times_weather}, + ) + wind_speed_xr = xr.DataArray( + data=weather_table["aux1"], + coords={"time": times_weather}, + ) + wind_direction_xr = xr.DataArray( + data=weather_table["aux2"], + coords={"time": times_weather}, + ) + skychop_state_xr = xr.DataArray( + data=states_skychop, + coords={"time": times_skychop}, + ) + aste_cabin_temperature_xr = xr.DataArray( + data=lower_cabin_temp, + coords={"time": times_cabin}, + ) + aste_subref_x_xr = xr.DataArray( + data=antenna_table["x"], + coords={"time": times_antenna}, + ) + aste_subref_y_xr = xr.DataArray( + data=antenna_table["y"], + coords={"time": times_antenna}, + ) + aste_subref_z_xr = xr.DataArray( + data=antenna_table["z"], + coords={"time": times_antenna}, + ) + aste_subref_xt_xr = xr.DataArray( + data=antenna_table["xt"], + coords={"time": times_antenna}, + ) + aste_subref_yt_xr = xr.DataArray( + data=antenna_table["yt"], + coords={"time": times_antenna}, + ) + aste_subref_zt_xr = xr.DataArray( + data=antenna_table["zt"], + coords={"time": times_antenna}, + ) + aste_misti_lon_xr = xr.DataArray( + data=az_misti, + coords={"time": times_misti}, + ) + aste_misti_lat_xr = xr.DataArray( + data=el_misti, + coords={"time": times_misti}, + ) + aste_misti_pwv_xr = xr.DataArray( + data=pwv_misti, + coords={"time": times_misti}, + ) + state_type_numbers_xr = xr.DataArray( + data=state_type_numbers, + coords={"time": times_antenna}, + ) # Tsignalsの時刻に合わせて補間する - lon = lon_xr.interp_like(response_xr) - lat = lat_xr.interp_like(response_xr) - lon_origin = lon_origin_xr.interp_like(response_xr) - lat_origin = lat_origin_xr.interp_like(response_xr) - temperature = temperature_xr.interp_like(response_xr) - humidity = humidity_xr.interp_like(response_xr) - pressure = pressure_xr.interp_like(response_xr) - wind_speed = wind_speed_xr.interp_like(response_xr) - wind_direction = wind_direction_xr.interp_like(response_xr) - aste_subref_x = aste_subref_x_xr.interp_like(response_xr) - aste_subref_y = aste_subref_y_xr.interp_like(response_xr) - aste_subref_z = aste_subref_z_xr.interp_like(response_xr) - aste_subref_xt = aste_subref_xt_xr.interp_like(response_xr) - aste_subref_yt = aste_subref_yt_xr.interp_like(response_xr) - aste_subref_zt = aste_subref_zt_xr.interp_like(response_xr) - skychop_state = skychop_state_xr.interp_like(response_xr, method='nearest') - state_type_numbers = state_type_numbers_xr.interp_like(response_xr, method='nearest') + lon = lon_xr.interp_like(response_xr) + lat = lat_xr.interp_like(response_xr) + lon_origin = lon_origin_xr.interp_like(response_xr) + lat_origin = lat_origin_xr.interp_like(response_xr) + temperature = temperature_xr.interp_like(response_xr) + humidity = humidity_xr.interp_like(response_xr) + pressure = pressure_xr.interp_like(response_xr) + wind_speed = wind_speed_xr.interp_like(response_xr) + wind_direction = wind_direction_xr.interp_like(response_xr) + aste_subref_x = aste_subref_x_xr.interp_like(response_xr) + aste_subref_y = aste_subref_y_xr.interp_like(response_xr) + aste_subref_z = aste_subref_z_xr.interp_like(response_xr) + aste_subref_xt = aste_subref_xt_xr.interp_like(response_xr) + aste_subref_yt = aste_subref_yt_xr.interp_like(response_xr) + aste_subref_zt = aste_subref_zt_xr.interp_like(response_xr) + skychop_state = skychop_state_xr.interp_like( + response_xr, + method="nearest", + ) + state_type_numbers = state_type_numbers_xr.interp_like( + response_xr, + method="nearest", + ) aste_cabin_temperature = np.nan - aste_misti_lon = np.nan - aste_misti_lat = np.nan - aste_misti_pwv = np.nan + aste_misti_lon = np.nan + aste_misti_lat = np.nan + aste_misti_pwv = np.nan - if cabin_path != '' and cabin_path != None: + if cabin_path != "" and cabin_path is not None: aste_cabin_temperature = aste_cabin_temperature_xr.interp_like(response_xr) - if misti_path != '' and misti_path != None: + if misti_path != "" and misti_path is not None: aste_misti_lon = aste_misti_lon_xr.interp_like(response_xr) aste_misti_lat = aste_misti_lat_xr.interp_like(response_xr) aste_misti_pwv = aste_misti_pwv_xr.interp_like(response_xr) # 補間後のSTATETYPEを文字列に戻す - state = np.full_like(state_type_numbers, 'GRAD', dtype=' B, 0 -> A) - beam = np.where(skychop_state, 'B', 'A') + beam = np.where(skychop_state, "B", "A") # 静止データの周期に応じてOFFマスクとSCANマスクを設定する if still: - seconds = (times - times[0])/np.timedelta64(1, 's') + seconds = (times - times[0]) / np.timedelta64(1, "s") for i in range(int(seconds[-1]) // period + 1): - off_mask = (period*(2*i) <= seconds) & (seconds < period*(2*i + 1)) - on_mask = (period*(2*i + 1) <= seconds) & (seconds < period*(2*i + 2)) - state[off_mask] = 'OFF' - state[on_mask] = 'SCAN' + # fmt: off + off_mask = ( + (period * (2 * i) <= seconds) + & (seconds < period * (2 * i + 1)) + ) + on_mask = ( + (period * (2 * i + 1) <= seconds) + & (seconds < period * (2 * i + 2)) + ) + # fmt: on + state[off_mask] = "OFF" + state[on_mask] = "SCAN" # shuttle観測のマスクを設定する if shuttle: mask_off = (lon_min_off < lon) & (lon < lon_max_off) - mask_on = (lon_min_on < lon) & (lon < lon_max_on) - state[mask_off] = 'OFF' - state[mask_on] = 'SCAN' - state[(~mask_off) & (~mask_on)] = 'JUNK' + mask_on = (lon_min_on < lon) & (lon < lon_max_on) + state[mask_off] = "OFF" + state[mask_on] = "SCAN" + state[(~mask_off) & (~mask_on)] = "JUNK" # Rとskyの部分を探し、その変化点も含めてJUNKな部分を調べる。 if findR: # Rの部分とその変化の部分を探す indices = np.where(response[:, ch] >= Rth) - state[indices] = 'R' + state[indices] = "R" # cutnum個だけ左右を切り取った配列を作り、互いに異なる部分を探す。そこはおおよそ変化が起きている部分と考えられる。 # @@ -253,112 +356,245 @@ def merge( # 状態がRへ変化する場合と、状態Rから別の状態へ変化する場合でmask_movingのでき方が違う。 # state_cut = state[cutnum:] != state[:-cutnum] - - state_right_shift = np.hstack( [[False]*cutnum, state_cut] ) # 左側をFalseで埋めて右にずらす - state_left_shift = np.hstack( [state_cut, [False]*cutnum] ) # 右側をFalseで埋めて左にずらす - state_R = ( state == 'R' ) + # 左側をFalseで埋めて右にずらす + state_right_shift = np.hstack([[False] * cutnum, state_cut]) + # 右側をFalseで埋めて左にずらす + state_left_shift = np.hstack([state_cut, [False] * cutnum]) + state_R = state == "R" mask_moving = state_R & state_left_shift | state_right_shift - state[mask_moving] = 'JUNK' + state[mask_moving] = "JUNK" - indices = (response[:, ch] > skyth) & (state != 'R') - state[indices] = 'JUNK' + indices = (response[:, ch] > skyth) & (state != "R") + state[indices] = "JUNK" - indices = (response[:, ch] <= skyth) & (state == 'R') - state[indices] = 'JUNK' + indices = (response[:, ch] <= skyth) & (state == "R") + state[indices] = "JUNK" # SKYの部分とその変化の部分を探す - indices = np.where(response[:, ch] <= skyth) - tmp = state.copy() # 最終的にSKYを残さないためにコピーを扱う - tmp[indices] = 'SKY' # 一時的にSKYをマークする - - tmp_cut = tmp[cutnum:] != tmp[:-cutnum] # cutnum個だけ左右にずらした配列を作り、変化を探す。 - - tmp_right_shift = np.hstack( [[False]*cutnum, tmp_cut] ) - tmp_left_shift = np.hstack( [tmp_cut, [False]*cutnum] ) - tmp_sky = ( tmp == 'SKY' ) - + indices = np.where(response[:, ch] <= skyth) + # 最終的にSKYを残さないためにコピーを扱う + tmp = state.copy() + # 一時的にSKYをマークする + tmp[indices] = "SKY" + + # cutnum個だけ左右にずらした配列を作り、変化を探す。 + tmp_cut = tmp[cutnum:] != tmp[:-cutnum] + tmp_right_shift = np.hstack([[False] * cutnum, tmp_cut]) + tmp_left_shift = np.hstack([tmp_cut, [False] * cutnum]) + tmp_sky = tmp == "SKY" mask_moving = tmp_sky & tmp_left_shift | tmp_right_shift - state[mask_moving] = 'JUNK' # 変化の部分はJUNKに置き換える(Rとは違いSKYは残らない) + # 変化の部分はJUNKに置き換える(Rとは違いSKYは残らない) + state[mask_moving] = "JUNK" return MS.new( - data =response, - long_name =long_name, - units =units, - time =times, - chan =corresp.masterid, - beam =beam, - state =state, - lon =lon, - lat =lat, - lon_origin =lon_origin, - lat_origin =lat_origin, - temperature =temperature, - pressure =pressure, - humidity =humidity, - wind_speed =wind_speed, - wind_direction =wind_direction, - frequency =corresp.kidfreq, - aste_cabin_temperature =aste_cabin_temperature, - aste_subref_x =aste_subref_x, - aste_subref_y =aste_subref_y, - aste_subref_z =aste_subref_z, - aste_subref_xt =aste_subref_xt, - aste_subref_yt =aste_subref_yt, - aste_subref_zt =aste_subref_zt, - aste_misti_lon =aste_misti_lon, - aste_misti_lat =aste_misti_lat, - aste_misti_pwv =aste_misti_pwv, - d2_mkid_id =corresp.index, - d2_mkid_type =corresp.kidtype, - d2_mkid_frequency =corresp.kidfreq, + data=response, + long_name=long_name, + units=units, + time=times, + chan=corresp.masterid, + beam=beam, + state=state, + lon=lon, + lat=lat, + lon_origin=lon_origin, + lat_origin=lat_origin, + temperature=temperature, + pressure=pressure, + humidity=humidity, + wind_speed=wind_speed, + wind_direction=wind_direction, + frequency=corresp.kidfreq, + aste_cabin_temperature=aste_cabin_temperature, + aste_subref_x=aste_subref_x, + aste_subref_y=aste_subref_y, + aste_subref_z=aste_subref_z, + aste_subref_xt=aste_subref_xt, + aste_subref_yt=aste_subref_yt, + aste_subref_zt=aste_subref_zt, + aste_misti_lon=aste_misti_lon, + aste_misti_lat=aste_misti_lat, + aste_misti_pwv=aste_misti_pwv, + d2_mkid_id=corresp.index, + d2_mkid_type=corresp.kidtype, + d2_mkid_frequency=corresp.kidfreq, d2_skychopper_isblocking=skychop_state, - d2_demerge_version =DEMERGE_VERSION, - d2_ddb_version =ddb_version, - beam_major =0.005, # 18 arcsec MergeToDfits()でも固定値が指定されていた - beam_minor =0.005, # 18 arcsec MergeToDfits()でも固定値が指定されていた - beam_pa =0.005, # 18 arcsec MergeToDfits()でも固定値が指定されていた - exposure =1./196, # MergeToDfits()でも固定値が指定されていた - interval =1./196, # MergeToDfits()でも固定値が指定されていた - observation =obsinst_params['observation'], - observer =obsinst_params['observer'], - object =obsinst_params['obs_object'], + d2_demerge_version=DEMERGE_VERSION, + d2_ddb_version=ddb_version, + # 18 arcsec MergeToDfits()でも固定値が指定されていた + beam_major=0.005, + # 18 arcsec MergeToDfits()でも固定値が指定されていた + beam_minor=0.005, + # 18 arcsec MergeToDfits()でも固定値が指定されていた + beam_pa=0.005, + # MergeToDfits()でも固定値が指定されていた + exposure=1.0 / 196, + # MergeToDfits()でも固定値が指定されていた + interval=1.0 / 196, + observation=obsinst_params["observation"], + observer=obsinst_params["observer"], + object=obsinst_params["obs_object"], ) + def cli() -> None: """Demsオブジェクトを作成する""" parser = argparse.ArgumentParser() # 必須引数 - parser.add_argument('filename', type=str, help='出力ファイルへのパスを指定して下さい(.zarr.zip)') - parser.add_argument('--ddb', type=str, required=True, help='DDBファイルへのパスを指定して下さい(.fits.gz)') - parser.add_argument('--corresp', type=str, required=True, help='Master-to-KID ID対応ファイルへのパスを指定して下さい(.json)') - parser.add_argument('--obs', type=str, required=True, help='obsファイルへのパスを指定して下さい(.obs)') - parser.add_argument('--antenna', type=str, required=True, help='antennaファイルへのパスを指定して下さい(.antenna)') - parser.add_argument('--readout', type=str, required=True, help='reduced readoutファイルへのパスを指定して下さい(.fits)') - parser.add_argument('--skychop', type=str, required=True, help='skychopファイルへのパスを指定して下さい(.skychop)') - parser.add_argument('--weather', type=str, required=True, help='weatherファイルへのパスを指定して下さい(.weather)') + parser.add_argument( + "filename", + type=str, + help="出力ファイルへのパスを指定して下さい(.zarr.zip)", + ) + parser.add_argument( + "--ddb", + type=str, + required=True, + help="DDBファイルへのパスを指定して下さい(.fits.gz)", + ) + parser.add_argument( + "--corresp", + type=str, + required=True, + help="Master-to-KID ID対応ファイルへのパスを指定して下さい(.json)", + ) + parser.add_argument( + "--obs", + type=str, + required=True, + help="obsファイルへのパスを指定して下さい(.obs)", + ) + parser.add_argument( + "--antenna", + type=str, + required=True, + help="antennaファイルへのパスを指定して下さい(.antenna)", + ) + parser.add_argument( + "--readout", + type=str, + required=True, + help="reduced readoutファイルへのパスを指定して下さい(.fits)", + ) + parser.add_argument( + "--skychop", + type=str, + required=True, + help="skychopファイルへのパスを指定して下さい(.skychop)", + ) + parser.add_argument( + "--weather", + type=str, + required=True, + help="weatherファイルへのパスを指定して下さい(.weather)", + ) # オプション引数 - parser.add_argument('--misti', type=str, default='', help='mistiファイルへのパスを指定して下さい(.misti)') - parser.add_argument('--cabin', type=str, default='', help='cabinファイルへのパスを指定して下さい(.cabin)') - parser.add_argument('--coordinate', type=str, default='azel', help='座標系(azel/radec)を文字列で指定します') - parser.add_argument('--loadtype', type=str, default='fshift', help='読み込むデータを文字列で指定します(既定値: fshift, fshiftかTsignalを指定できます)') - parser.add_argument('--findR', action='store_true', help='指定するとFindR, Skyを実行します') - parser.add_argument('--ch', type=int, default=0, help='findRに利用するチャネルを整数で指定します') - parser.add_argument('--Rth', type=float, default=280.0, help='R閾値を実数で指定します') - parser.add_argument('--skyth', type=float, default=150.0, help='sky閾値を実数で指定します') - parser.add_argument('--cutnum', type=int, default=1, help='findRでのカット数を整数で指定します') - parser.add_argument('--still', action='store_true', help='指定するとstill観測用の解析を行います') - parser.add_argument('--period', type=int, default=2, help='still観測の1/2周期(秒)を整数で指定します') - parser.add_argument('--shuttle', action='store_true', help='指定するとshuttle観測用の解析を行います') - parser.add_argument('--lon_min_off', type=float, default=0.0, help='shuttle観測時のOFFにするlongitudeの最小値を実数で指定します') - parser.add_argument('--lon_max_off', type=float, default=0.0, help='shuttle観測時のOFFにするlongitudeの最大値を実数で指定します') - parser.add_argument('--lon_min_on', type=float, default=0.0, help='shuttle観測時のONにするlongitudeの最小値を実数で指定します') - parser.add_argument('--lon_max_on', type=float, default=0.0, help='shuttle観測時のONにするlongitudeの最大値を実数で指定します') - parser.add_argument('--debug', action='store_true', help='指定すると全ての引数の値をログとして表示します') - - parser.add_argument('--offset_time_antenna', type=int, default=0, help='TODとAntennaログの時刻のずれの補正値(ms)') + parser.add_argument( + "--misti", + type=str, + default="", + help="mistiファイルへのパスを指定して下さい(.misti)", + ) + parser.add_argument( + "--cabin", + type=str, + default="", + help="cabinファイルへのパスを指定して下さい(.cabin)", + ) + parser.add_argument( + "--coordinate", + type=str, + default="azel", + help="座標系(azel/radec)を文字列で指定します", + ) + parser.add_argument( + "--loadtype", + type=str, + default="fshift", + help="読み込むデータを文字列で指定します(既定値: fshift, fshiftかTsignalを指定できます)", + ) + parser.add_argument( + "--findR", + action="store_true", + help="指定するとFindR, Skyを実行します", + ) + parser.add_argument( + "--ch", + type=int, + default=0, + help="findRに利用するチャネルを整数で指定します", + ) + parser.add_argument( + "--Rth", + type=float, + default=280.0, + help="R閾値を実数で指定します", + ) + parser.add_argument( + "--skyth", + type=float, + default=150.0, + help="sky閾値を実数で指定します", + ) + parser.add_argument( + "--cutnum", + type=int, + default=1, + help="findRでのカット数を整数で指定します", + ) + parser.add_argument( + "--still", + action="store_true", + help="指定するとstill観測用の解析を行います", + ) + parser.add_argument( + "--period", + type=int, + default=2, + help="still観測の1/2周期(秒)を整数で指定します", + ) + parser.add_argument( + "--shuttle", + action="store_true", + help="指定するとshuttle観測用の解析を行います", + ) + parser.add_argument( + "--lon_min_off", + type=float, + default=0.0, + help="shuttle観測時のOFFにするlongitudeの最小値を実数で指定します", + ) + parser.add_argument( + "--lon_max_off", + type=float, + default=0.0, + help="shuttle観測時のOFFにするlongitudeの最大値を実数で指定します", + ) + parser.add_argument( + "--lon_min_on", + type=float, + default=0.0, + help="shuttle観測時のONにするlongitudeの最小値を実数で指定します", + ) + parser.add_argument( + "--lon_max_on", + type=float, + default=0.0, + help="shuttle観測時のONにするlongitudeの最大値を実数で指定します", + ) + parser.add_argument( + "--debug", + action="store_true", + help="指定すると全ての引数の値をログとして表示します", + ) + parser.add_argument( + "--offset_time_antenna", + type=int, + default=0, + help="TODとAntennaログの時刻のずれの補正値(ms)", + ) # 引数の読み取り a = parser.parse_args() @@ -368,13 +604,13 @@ def cli() -> None: logger.setLevel(DEBUG) basicConfig( - datefmt='%Y-%m-%d %H:%M:%S', - format='[%(asctime)s %(name)s %(levelname)s] %(message)s', + datefmt="%Y-%m-%d %H:%M:%S", + format="[%(asctime)s %(name)s %(levelname)s] %(message)s", ) # 引数と値をロガーに記録 for key, val in vars(a).items(): - logger.debug(f'{key}: {val!r}') + logger.debug(f"{key}: {val!r}") # マージの実行 dems = merge( @@ -385,25 +621,23 @@ def cli() -> None: readout_path=a.readout, skychop_path=a.skychop, weather_path=a.weather, - misti_path =a.misti, - cabin_path =a.cabin, - - coordinate =a.coordinate, - loadtype =a.loadtype, - findR =a.findR, - ch =a.ch, - Rth =a.Rth, - skyth =a.skyth, - cutnum =a.cutnum, - still =a.still, - period =a.period, - shuttle =a.shuttle, + misti_path=a.misti, + cabin_path=a.cabin, + coordinate=a.coordinate, + loadtype=a.loadtype, + findR=a.findR, + ch=a.ch, + Rth=a.Rth, + skyth=a.skyth, + cutnum=a.cutnum, + still=a.still, + period=a.period, + shuttle=a.shuttle, lon_min_off=a.lon_min_off, lon_max_off=a.lon_max_off, - lon_min_on =a.lon_min_on, - lon_max_on =a.lon_max_on, - - offset_time_antenna=a.offset_time_antenna + lon_min_on=a.lon_min_on, + lon_max_on=a.lon_max_on, + offset_time_antenna=a.offset_time_antenna, ) dems.to_zarr(a.filename, mode="w") diff --git a/demerge/merge/utils.py b/demerge/merge/utils.py index 505048b..09140f0 100644 --- a/demerge/merge/utils.py +++ b/demerge/merge/utils.py @@ -8,18 +8,19 @@ 2021 NAITO systems modfied. 2023 NAITO systems modfied. """ + __all__ = [ - 'FORM_FITSTIME', - 'FORM_FITSTIME_P', - 'DEFAULT_ROOM_T', - 'create_bintablehdu', - 'load_obsinst', - 'get_maskid_corresp' - 'Tlos_model', - 'convert_readout', - 'convert_asciitime', - 'convert_timestamp', - 'update_corresp', + "FORM_FITSTIME", + "FORM_FITSTIME_P", + "DEFAULT_ROOM_T", + "create_bintablehdu", + "load_obsinst", + "get_maskid_corresp", + "Tlos_model", + "convert_readout", + "convert_asciitime", + "convert_timestamp", + "update_corresp", ] @@ -40,12 +41,12 @@ # constants -FORM_FITSTIME = '%Y-%m-%dT%H:%M:%S' # YYYY-mm-ddTHH:MM:SS -FORM_FITSTIME_P = '%Y-%m-%dT%H:%M:%S.%f' # YYYY-mm-ddTHH:MM:SS.ss +FORM_FITSTIME = "%Y-%m-%dT%H:%M:%S" # YYYY-mm-ddTHH:MM:SS +FORM_FITSTIME_P = "%Y-%m-%dT%H:%M:%S.%f" # YYYY-mm-ddTHH:MM:SS.ss -CABIN_Q_MARGIN = 5*60 # seconds. Margin for cabin data query. -DEFAULT_ROOM_T = 17. + 273. # Kelvin -DEFAULT_AMB_T = 0. + 273. # Kelvin +CABIN_Q_MARGIN = 5 * 60 # seconds. Margin for cabin data query. +DEFAULT_ROOM_T = 17.0 + 273.0 # Kelvin +DEFAULT_AMB_T = 0.0 + 273.0 # Kelvin # constants (master-to-KID correspondence) @@ -59,52 +60,61 @@ def create_bintablehdu(hd): """Create Binary Table HDU from 'hdu_dict'""" header = fits.Header() - for (i, j) in zip(hd['hdr_vals'].items(), hd['hdr_coms'].items()): + for i, j in zip(hd["hdr_vals"].items(), hd["hdr_coms"].items()): header[i[0]] = i[1], j[1] columns = [ fits.Column(name=i[0], format=j[1], array=i[1], unit=k[1]) for (i, j, k) in zip( - hd['col_vals'].items(), - hd['col_form'].items(), - hd['col_unit'].items() + hd["col_vals"].items(), + hd["col_form"].items(), + hd["col_unit"].items(), ) ] hdu = fits.BinTableHDU.from_columns(columns, header) - for i in hd['hdr_coms'].items(): + for i in hd["hdr_coms"].items(): hdu.header.comments[i[0]] = i[1] return hdu + def load_obsinst(obsinst): """Get data for 'OBSINFO'""" - if not '.obs' in obsinst: - raise ValueError('The input file must be an observational instruction!!') + if not ".obs" in obsinst: + raise ValueError("The input file must be an observational instruction!!") - with open(obsinst, 'r') as f: + with open(obsinst, "r") as f: equinox = 2000 # Default parameter for line in f: - if 'SET ANTENNA_G TRK_TYPE' in line: - trktype = line.split()[-1].strip('\'') - elif 'SET ANTENNA_G SRC_NAME' in line: - obs_object = line.split()[-1].strip('\'') - elif 'SET ANTENNA_G SRC_POS' in line: - srcpos = [float(c) for c in line.split()[-1].strip('()').split(',')] - elif 'SET ANTENNA_G EPOCH' in line: - equinox = line.split()[-1].strip('\'JB') - elif 'SET DES OBS_USER' in line: - observer = line.split()[-1].strip('\'') - elif 'SET DES PROJECT' in line: - project = line.split()[-1].strip('\'') - elif 'SET DES PROJECT' in line: - project = line.split()[-1].strip('\'') - elif '% OBS=' in line: - observation = line.split('=')[-1].strip() - if trktype == 'RADEC': - ra = srcpos[0] + if "SET ANTENNA_G TRK_TYPE" in line: + trktype = line.split()[-1].strip("'") + elif "SET ANTENNA_G SRC_NAME" in line: + obs_object = line.split()[-1].strip("'") + elif "SET ANTENNA_G SRC_POS" in line: + srcpos = [float(c) for c in line.split()[-1].strip("()").split(",")] + elif "SET ANTENNA_G EPOCH" in line: + equinox = line.split()[-1].strip("'JB") + elif "SET DES OBS_USER" in line: + observer = line.split()[-1].strip("'") + elif "SET DES PROJECT" in line: + project = line.split()[-1].strip("'") + elif "SET DES PROJECT" in line: + project = line.split()[-1].strip("'") + elif "% OBS=" in line: + observation = line.split("=")[-1].strip() + if trktype == "RADEC": + ra = srcpos[0] dec = srcpos[1] else: - ra = 0 + ra = 0 dec = 0 - return {'observer': observer, 'obs_object': obs_object, 'ra': ra, 'dec': dec, 'equinox': equinox, 'project': project, 'observation': observation} + return { + "observer": observer, + "obs_object": obs_object, + "ra": ra, + "dec": dec, + "equinox": equinox, + "project": project, + "observation": observation, + } def get_corresp_frame(ddb: fits.HDUList, corresp_file: str) -> pd.DataFrame: @@ -118,6 +128,7 @@ def get_corresp_frame(ddb: fits.HDUList, corresp_file: str) -> pd.DataFrame: DataFrame of correspondence between KID ID and each KID attribute. """ + def native(array: NDArray[Any]) -> NDArray[Any]: """Convert the byte order of an array to native.""" return array.astype(array.dtype.type) @@ -125,45 +136,45 @@ def native(array: NDArray[Any]) -> NDArray[Any]: frames: list[pd.DataFrame] = [] # DataFrame of KIDDES HDU - data = ddb['KIDDES'].data + data = ddb["KIDDES"].data frame = pd.DataFrame( index=pd.Index( - native(data['masterid']), - name='masterid', + native(data["masterid"]), + name="masterid", ), - data = { - 'kidtype': native(data['attribute']), + data={ + "kidtype": native(data["attribute"]), }, ) frames.append(frame) # DataFrame of KIDFILT HDU - data = ddb['KIDFILT'].data + data = ddb["KIDFILT"].data frame = pd.DataFrame( index=pd.Index( - native(data['masterid']), - name='masterid' + data=native(data["masterid"]), + name="masterid", ), data={ - 'kidfreq': native(data["F_filter, df_filter"][:, 0]), - 'kidQ': native(data['Q_filter, dQ_filter'][:, 0]), + "kidfreq": native(data["F_filter, df_filter"][:, 0]), + "kidQ": native(data["Q_filter, dQ_filter"][:, 0]), }, ) - frame['kidfreq'] *= 1e9 + frame["kidfreq"] *= 1e9 frames.append(frame) # DataFrame of KIDRESP HDU - if 'KIDRESP' in ddb: - data = ddb['KIDRESP'].data + if "KIDRESP" in ddb: + data = ddb["KIDRESP"].data frame = pd.DataFrame( index=pd.Index( - native(data['masterid']), - name='masterid', + data=native(data["masterid"]), + name="masterid", ), data={ - "p0": native(data['cal params'][:, 0]), - "etaf": native(data['cal params'][:, 1]), - "T0": native(data['cal params'][:, 2]), + "p0": native(data["cal params"][:, 0]), + "etaf": native(data["cal params"][:, 1]), + "T0": native(data["cal params"][:, 2]), }, ) frames.append(frame) @@ -177,8 +188,8 @@ def native(array: NDArray[Any]) -> NDArray[Any]: corresp = json.load(f) index = pd.Index( - [corresp.get(str(i), -1) for i in frame.index], - name='kidid' + data=[corresp.get(str(i), -1) for i in frame.index], + name="kidid", ) frame = frame.reset_index().set_index(index) @@ -191,7 +202,7 @@ def native(array: NDArray[Any]) -> NDArray[Any]: def convert_readout( readout: fits.HDUList, corresp: pd.DataFrame, - to: Literal['Tsignal', 'fshift'], + to: Literal["Tsignal", "fshift"], T_room: float, T_amb: float, ): @@ -205,22 +216,22 @@ def convert_readout( T_amb: 外気温(K) """ - kidcols = readout['READOUT'].data.columns[2:].names - linph = np.array([readout['READOUT'].data[n] for n in kidcols]).T[1] - linyfc = np.array(readout['KIDSINFO'].data['yfc, linyfc']).T[1] - Qr = np.array(readout['KIDSINFO'].data['Qr, dQr (Sky)']).T[0] - fr = np.array(readout['KIDSINFO'].data['fr, dfr (Sky)']).T[0] - fr_room = np.array(readout['KIDSINFO'].data['fr, dfr (Room)']).T[0] + kidcols = readout["READOUT"].data.columns[2:].names + linph = np.array([readout["READOUT"].data[n] for n in kidcols]).T[1] + linyfc = np.array(readout["KIDSINFO"].data["yfc, linyfc"]).T[1] + Qr = np.array(readout["KIDSINFO"].data["Qr, dQr (Sky)"]).T[0] + fr = np.array(readout["KIDSINFO"].data["fr, dfr (Sky)"]).T[0] + fr_room = np.array(readout["KIDSINFO"].data["fr, dfr (Room)"]).T[0] if np.isnan(fr_room).all(): fshift = (linph - linyfc) / (4.0 * Qr) else: fshift = (linph - linyfc) / (4.0 * Qr) + (fr - fr_room) / fr - if to == 'fshift': + if to == "fshift": return fshift[:, corresp.index.values] - if to == 'Tsignal': + if to == "Tsignal": return Tlos_model( dx=fshift[:, corresp.index.values], p0=corresp.p0.values, @@ -230,25 +241,32 @@ def convert_readout( Tamb=T_amb, ) - raise ValueError(f'Invalid output type: {to}') + raise ValueError(f"Invalid output type: {to}") def Tlos_model(dx, p0, etaf, T0, Troom, Tamb): """Calibrate 'amplitude' and 'phase' to 'power'""" - return (dx + p0*np.sqrt(Troom+T0))**2 / (p0**2 * etaf) - T0/etaf - (1-etaf)/etaf*Tamb + return ( + (dx + p0 * np.sqrt(Troom + T0)) ** 2 / (p0**2 * etaf) + - T0 / etaf + - (1 - etaf) / etaf * Tamb + ) + def convert_asciitime(asciitime, form_fitstime): """Ascii time""" - asciitime = [datetime.strptime('%14.6f' %t, '%Y%m%d%H%M%S.%f') for t in asciitime] + asciitime = [datetime.strptime("%14.6f" % t, "%Y%m%d%H%M%S.%f") for t in asciitime] asciitime = [datetime.strftime(t, form_fitstime) for t in asciitime] return np.array(asciitime) + def convert_timestamp(timestamp): """Timestamp""" timestamp = [datetime.utcfromtimestamp(t) for t in timestamp] timestamp = [datetime.strftime(t, FORM_FITSTIME_P) for t in timestamp] return np.array(timestamp) + def retrieve_cabin_temps(filename=None): """キャビン内温度を取得する @@ -263,29 +281,30 @@ def retrieve_cabin_temps(filename=None): tupleの各要素はnumpy.array。要素数は同じ。 また、ファイル名が空の場合はデフォルト値が入った配列が返される。 """ - if filename=='' or filename==None: + if filename == "" or filename is None: return ( - np.array(["1970-01-01"]).astype('datetime64[ns]'), + np.array(["1970-01-01"]).astype("datetime64[ns]"), np.array([20.0]).astype(np.float64), np.array([20.0]).astype(np.float64), ) - table = ascii.read(filename, format='no_header') + table = ascii.read(filename, format="no_header") # 日付と時刻を取得して文字列でタイムスタンプを作成しそれをnumpy.datetime64へ変換する # テーブルの1列目と2列目がそれぞれ日付と時刻 datetimes = [] - for date, time in zip(table['col1'], table['col2']): - s = '{}T{}'.format(date, time) - s = s.replace('/', '-') + for date, time in zip(table["col1"], table["col2"]): + s = "{}T{}".format(date, time) + s = s.replace("/", "-") datetimes.append(s) - datetimes = np.array(datetimes).astype('datetime64[ns]') - upper_cabin_temps = np.array(table['col3']).astype(np.float64) - lower_cabin_temps = np.array(table['col4']).astype(np.float64) + datetimes = np.array(datetimes).astype("datetime64[ns]") + upper_cabin_temps = np.array(table["col3"]).astype(np.float64) + lower_cabin_temps = np.array(table["col4"]).astype(np.float64) return (datetimes, upper_cabin_temps, lower_cabin_temps) + def retrieve_skychop_states(filename): """skychopファイル(text file)からskychopの時系列状態を取得する @@ -308,18 +327,25 @@ def retrieve_skychop_states(filename): "#"から始まるコメントがファイル冒頭に数行ある。 """ data = None - if filename.endswith('.xz'): - with lzma.open(filename, 'rt') as f: + if filename.endswith(".xz"): + with lzma.open(filename, "rt") as f: data = f.read() else: - with open(filename, 'rt') as f: + with open(filename, "rt") as f: data = f.read() - table = ascii.read(data, guess=False, format='no_header', delimiter=' ', names=['datetime', 'state']) - datetimes = np.array(table['datetime']).astype(np.float64) - states = np.array(table['state']).astype(np.int8) + table = ascii.read( + data, + guess=False, + format="no_header", + delimiter=" ", + names=["datetime", "state"], + ) + datetimes = np.array(table["datetime"]).astype(np.float64) + states = np.array(table["state"]).astype(np.int8) return (datetimes, states) + def retrieve_misti_log(filename): """mistiファイルからの時系列データを取得する @@ -343,28 +369,41 @@ def retrieve_misti_log(filename): "#"から始まるコメントがファイル冒頭に数行ある。 """ - if filename=='' or filename==None: - return (np.array([np.nan]).astype('datetime64[ns]'), - np.array([np.nan]).astype(np.float64), - np.array([np.nan]).astype(np.float64), - np.array([np.nan]).astype(np.float64)) + if filename == "" or filename is None: + return ( + np.array([np.nan]).astype("datetime64[ns]"), + np.array([np.nan]).astype(np.float64), + np.array([np.nan]).astype(np.float64), + np.array([np.nan]).astype(np.float64), + ) column_names = [ - 'date', - 'time', - 'az', - 'el', - 'pwv', - 'Tround', + "date", + "time", + "az", + "el", + "pwv", + "Tround", ] - table = ascii.read(filename, guess=False, format='no_header', delimiter=' ', names=column_names) + table = ascii.read( + filename, + guess=False, + format="no_header", + delimiter=" ", + names=column_names, + ) - az = np.array(table['az']).astype(np.float64) - el = np.array(table['el']).astype(np.float64) - pwv = np.array(table['pwv']).astype(np.float64)/1000.0 # umからmmへ変換 + az = np.array(table["az"]).astype(np.float64) + el = np.array(table["el"]).astype(np.float64) + pwv = np.array(table["pwv"]).astype(np.float64) / 1000.0 # umからmmへ変換 datetimes = [] for row in table: - datetimes.append(datetime.strptime('{} {}'.format(row['date'], row['time']), '%Y/%m/%d %H:%M:%S.%f')) + datetimes.append( + datetime.strptime( + "{} {}".format(row["date"], row["time"]), + format="%Y/%m/%d %H:%M:%S.%f", + ) + ) - return (np.array(datetimes).astype('datetime64[ns]'), az, el, pwv) + return np.array(datetimes).astype("datetime64[ns]"), az, el, pwv diff --git a/poetry.lock b/poetry.lock index 32d1f70..68b7c36 100644 --- a/poetry.lock +++ b/poetry.lock @@ -112,6 +112,52 @@ six = ">=1.12.0" astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] +[[package]] +name = "black" +version = "24.4.2" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "certifi" version = "2024.6.2" @@ -222,6 +268,20 @@ files = [ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "colorama" version = "0.4.6" @@ -927,6 +987,17 @@ docs = ["sphinx"] gmpy = ["gmpy2 (>=2.1.0a4)"] tests = ["pytest (>=4.6)"] +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + [[package]] name = "numcodecs" version = "0.12.1" @@ -1113,6 +1184,17 @@ files = [ qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] testing = ["docopt", "pytest"] +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + [[package]] name = "pexpect" version = "4.9.0" @@ -1213,6 +1295,22 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa typing = ["typing-extensions"] xmp = ["defusedxml"] +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + [[package]] name = "prompt-toolkit" version = "3.0.47" @@ -1583,6 +1681,17 @@ files = [ {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, ] +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + [[package]] name = "tqdm" version = "4.66.4" @@ -1764,4 +1873,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = ">=3.9, <3.13" -content-hash = "d5edfe250880737c9585fb12c63563a049acf107d3011637d5e3afb30ca90917" +content-hash = "de10250114b285995dbe64cb18eb1db1afb89337da61cbacfbd7d09095b598b3" diff --git a/pyproject.toml b/pyproject.toml index fe71df6..38a2370 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,7 @@ sympy = "^1.10" zarr = "^2.14" [tool.poetry.group.dev.dependencies] +black = "^24.4" deshima-rawdata = "^2024.4.2" ipython = "^8.18" @@ -43,6 +44,9 @@ demerge = "demerge:cli" analyze = "demerge.analysis:cli" merge = "demerge.merge:cli" +[tool.black] +exclude = "^/demerge/analysis/utils" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api"