17
17
try :
18
18
import EcoFOCIpy .math .geomag .geomag .geomag as geomag
19
19
import EcoFOCIpy .math .geotools as geotools
20
+
20
21
ECOFOCIPY_AVAILABLE = True
21
22
except ImportError :
22
23
ECOFOCIPY_AVAILABLE = False
@@ -30,7 +31,9 @@ class adcp(object):
30
31
apply magnetic declination corrections, and calculate depth information for bins.
31
32
"""
32
33
33
- def __init__ (self , serial_no : str , deployment_dir : Optional [Union [str , Path ]] = None ):
34
+ def __init__ (
35
+ self , serial_no : str , deployment_dir : Optional [Union [str , Path ]] = None
36
+ ):
34
37
"""
35
38
Initializes the ADCP parser.
36
39
@@ -48,14 +51,18 @@ def __init__(self, serial_no: str, deployment_dir: Optional[Union[str, Path]] =
48
51
self .ein_df : Optional [pd .DataFrame ] = None
49
52
self .scal_df : Optional [pd .DataFrame ] = None
50
53
51
- def _get_filepath (self , extension : str , file_path : Optional [Union [str , Path ]]) -> Path :
54
+ def _get_filepath (
55
+ self , extension : str , file_path : Optional [Union [str , Path ]]
56
+ ) -> Path :
52
57
"""Constructs the full file path or validates an existing one."""
53
58
if file_path :
54
59
p = Path (file_path )
55
60
elif self .deployment_dir :
56
61
p = self .deployment_dir / f"{ self .serial_no } { extension } "
57
62
else :
58
- raise ValueError ("Must provide either a deployment directory or a direct file path." )
63
+ raise ValueError (
64
+ "Must provide either a deployment directory or a direct file path."
65
+ )
59
66
60
67
if not p .exists ():
61
68
raise FileNotFoundError (f"The specified ADCP file does not exist: { p } " )
@@ -88,20 +95,34 @@ def _load_data_file(
88
95
header = None ,
89
96
names = column_names ,
90
97
)
91
- df ["date_time" ] = pd .to_datetime (df ["date" ] + " " + df ["time" ], format = "%y/%m/%d %H:%M:%S" )
98
+ df ["date_time" ] = pd .to_datetime (
99
+ df ["date" ] + " " + df ["time" ], format = "%y/%m/%d %H:%M:%S"
100
+ )
92
101
93
102
if datetime_index :
94
103
df = df .set_index ("date_time" ).drop (columns = ["date" , "time" ])
95
104
96
105
return df
97
106
98
- def load_vel_file (self , file_path : Optional [Union [str , Path ]] = None , datetime_index : bool = True ) -> pd .DataFrame :
107
+ def load_vel_file (
108
+ self , file_path : Optional [Union [str , Path ]] = None , datetime_index : bool = True
109
+ ) -> pd .DataFrame :
99
110
"""Loads a .VEL (velocity) file."""
100
- cols = ["date" , "time" , "bin" , "u_curr_comp" , "v_curr_comp" , "w_curr_comp" , "w_curr_comp_err" ]
111
+ cols = [
112
+ "date" ,
113
+ "time" ,
114
+ "bin" ,
115
+ "u_curr_comp" ,
116
+ "v_curr_comp" ,
117
+ "w_curr_comp" ,
118
+ "w_curr_comp_err" ,
119
+ ]
101
120
self .vel_df = self ._load_data_file (".VEL" , cols , file_path , datetime_index )
102
121
return self .vel_df
103
122
104
- def load_pg_file (self , file_path : Optional [Union [str , Path ]] = None , datetime_index : bool = True ) -> pd .DataFrame :
123
+ def load_pg_file (
124
+ self , file_path : Optional [Union [str , Path ]] = None , datetime_index : bool = True
125
+ ) -> pd .DataFrame :
105
126
"""
106
127
Loads a .PG (Percent Good) file.
107
128
@@ -111,23 +132,48 @@ def load_pg_file(self, file_path: Optional[Union[str, Path]] = None, datetime_in
111
132
3) Percentage of measurements where more than one beam was bad.
112
133
4) Percentage of measurements with four-beam solutions (useful for QC).
113
134
"""
114
- cols = ["date" , "time" , "bin" , "pg3beam-good" , "pgtransf-good" , "pg1beam-bad" , "pg4beam-good" ]
135
+ cols = [
136
+ "date" ,
137
+ "time" ,
138
+ "bin" ,
139
+ "pg3beam-good" ,
140
+ "pgtransf-good" ,
141
+ "pg1beam-bad" ,
142
+ "pg4beam-good" ,
143
+ ]
115
144
self .pg_df = self ._load_data_file (".PG" , cols , file_path , datetime_index )
116
145
return self .pg_df
117
146
118
- def load_ein_file (self , file_path : Optional [Union [str , Path ]] = None , datetime_index : bool = True ) -> pd .DataFrame :
147
+ def load_ein_file (
148
+ self , file_path : Optional [Union [str , Path ]] = None , datetime_index : bool = True
149
+ ) -> pd .DataFrame :
119
150
"""Loads an .EIN (Echo Intensity) file."""
120
151
cols = ["date" , "time" , "bin" , "agc1" , "agc2" , "agc3" , "agc4" ]
121
152
self .ein_df = self ._load_data_file (".EIN" , cols , file_path , datetime_index )
122
153
return self .ein_df
123
154
124
- def load_scal_file (self , file_path : Optional [Union [str , Path ]] = None , datetime_index : bool = True ) -> pd .DataFrame :
155
+ def load_scal_file (
156
+ self , file_path : Optional [Union [str , Path ]] = None , datetime_index : bool = True
157
+ ) -> pd .DataFrame :
125
158
"""Loads a .SCA (Scalar) file."""
126
- cols = ["date" , "time" , "unknown" , "temperature" , "heading" , "pitch" , "roll" , "heading_stdev" , "pitch_stdev" , "roll_stdev" ]
159
+ cols = [
160
+ "date" ,
161
+ "time" ,
162
+ "unknown" ,
163
+ "temperature" ,
164
+ "heading" ,
165
+ "pitch" ,
166
+ "roll" ,
167
+ "heading_stdev" ,
168
+ "pitch_stdev" ,
169
+ "roll_stdev" ,
170
+ ]
127
171
self .scal_df = self ._load_data_file (".SCA" , cols , file_path , datetime_index )
128
172
return self .scal_df
129
173
130
- def load_rpt_file (self , file_path : Optional [Union [str , Path ]] = None ) -> Tuple [List [str ], Dict [str , float ]]:
174
+ def load_rpt_file (
175
+ self , file_path : Optional [Union [str , Path ]] = None
176
+ ) -> Tuple [List [str ], Dict [str , float ]]:
131
177
"""
132
178
Loads a .RPT (Report) file to extract instrument setup parameters.
133
179
@@ -138,7 +184,7 @@ def load_rpt_file(self, file_path: Optional[Union[str, Path]] = None) -> Tuple[L
138
184
Tuple[List[str], Dict[str, float]]: A tuple containing the raw lines of the
139
185
report file and a dictionary of extracted setup parameters.
140
186
"""
141
- full_path = self ._get_filepath (' .RPT' , file_path )
187
+ full_path = self ._get_filepath (" .RPT" , file_path )
142
188
143
189
lines = full_path .read_text ().splitlines ()
144
190
@@ -147,15 +193,17 @@ def load_rpt_file(self, file_path: Optional[Union[str, Path]] = None) -> Tuple[L
147
193
if not parts :
148
194
continue
149
195
if "Bin length" in line :
150
- self .setup [' bin_length' ] = float (parts [2 ])
196
+ self .setup [" bin_length" ] = float (parts [2 ])
151
197
elif "Distance" in line :
152
- self .setup [' distance_to_first_bin' ] = float (parts [4 ])
198
+ self .setup [" distance_to_first_bin" ] = float (parts [4 ])
153
199
elif "Number of bins" in line :
154
- self .setup [' num_of_bins' ] = int (parts [3 ])
200
+ self .setup [" num_of_bins" ] = int (parts [3 ])
155
201
156
202
return lines , self .setup
157
203
158
- def mag_dec_corr (self , lat : float , lon_w : float , deployment_date : pd .Timestamp ) -> float :
204
+ def mag_dec_corr (
205
+ self , lat : float , lon_w : float , deployment_date : pd .Timestamp
206
+ ) -> float :
159
207
"""
160
208
Calculates and applies magnetic declination correction to velocity data.
161
209
@@ -175,20 +223,23 @@ def mag_dec_corr(self, lat: float, lon_w: float, deployment_date: pd.Timestamp)
175
223
ValueError: If the velocity data (`vel_df`) has not been loaded.
176
224
"""
177
225
if not ECOFOCIPY_AVAILABLE :
178
- raise ImportError ("EcoFOCIpy is required for magnetic declination correction but is not installed." )
226
+ raise ImportError (
227
+ "EcoFOCIpy is required for magnetic declination correction but is not installed."
228
+ )
179
229
if self .vel_df is None :
180
- raise ValueError ("Velocity data must be loaded before applying magnetic correction." )
230
+ raise ValueError (
231
+ "Velocity data must be loaded before applying magnetic correction."
232
+ )
181
233
182
234
t = geomag .GeoMag ()
183
- declination = t .GeoMag (lat , - 1 * lon_w , time = deployment_date ).declination
235
+ declination = t .GeoMag (lat , lon_w , time = deployment_date ).dec
184
236
185
237
u_rotated , v_rotated = geotools .rotate_coord (
186
- self .vel_df ['u_curr_comp' ],
187
- self .vel_df ['v_curr_comp' ],
188
- declination )
238
+ self .vel_df ["u_curr_comp" ], self .vel_df ["v_curr_comp" ], declination
239
+ )
189
240
190
- self .vel_df [' u_curr_comp' ] = u_rotated
191
- self .vel_df [' v_curr_comp' ] = v_rotated
241
+ self .vel_df [" u_curr_comp" ] = u_rotated
242
+ self .vel_df [" v_curr_comp" ] = v_rotated
192
243
193
244
return declination
194
245
@@ -199,7 +250,7 @@ def bins2depth(self, inst_depth: float = None):
199
250
Args:
200
251
inst_depth (float, optional): Deployment Depth of Instrument.
201
252
"""
202
- start = inst_depth - self .setup ['distance' ]
203
- stop = start - self .setup ['numofbins' ] * self .setup [' bin_length' ]
253
+ start = inst_depth - self .setup ["distance_to_first_bin" ]
254
+ stop = start - self .setup ["num_of_bins" ] * self .setup [" bin_length" ]
204
255
205
- return np .arange (start , stop , - 1 * self .setup [' bin_length' ])
256
+ return np .arange (start , stop , - 1 * self .setup [" bin_length" ])
0 commit comments