Skip to content
Snippets Groups Projects

intergrate rt130

Merged Lan Dam requested to merge intergrate_rt130 into master
1 file
+ 2
0
Compare changes
  • Side-by-side
  • Inline
@@ -3,23 +3,27 @@ RT130 object to hold and process RefTek data
@@ -3,23 +3,27 @@ RT130 object to hold and process RefTek data
"""
"""
import os
import os
from pathlib import Path
from pathlib import Path
from typing import Tuple, List, Union
from typing import Union, List, Tuple, Dict
import traceback
import traceback
import numpy as np
import numpy as np
 
from obspy.core import Stream
from sohstationviewer.model.reftek.reftek_data import core, soh_packet
from sohstationviewer.model.reftek.log_info import LogInfo
from sohstationviewer.model.data_type_model import (
DataTypeModel, ThreadStopped, ProcessingDataError)
from sohstationviewer.model.handling_data import read_text
from sohstationviewer.model.handling_data_reftek import (
check_reftek_header, read_reftek_stream)
from sohstationviewer.conf import constants
from sohstationviewer.conf import constants
from sohstationviewer.controller.util import validate_file
from sohstationviewer.controller.util import validate_file
from sohstationviewer.view.util.enums import LogType
from sohstationviewer.view.util.enums import LogType
 
from sohstationviewer.model.general_data.general_data import \
 
GeneralData, ThreadStopped, ProcessingDataError
 
from sohstationviewer.model.general_data.general_data_helper import read_text
class RT130(DataTypeModel):
from sohstationviewer.model.reftek_data.reftek_helper import (
 
check_reftek_header, read_reftek_stream,
 
retrieve_gaps_from_stream_header)
 
from sohstationviewer.model.reftek_data.reftek_reader import core, soh_packet
 
from sohstationviewer.model.reftek_data.log_info import LogInfo
 
 
 
class RT130(GeneralData):
"""
"""
read and process reftek file into object with properties can be used to
read and process reftek file into object with properties can be used to
plot SOH data, mass position data, waveform data and gaps
plot SOH data, mass position data, waveform data and gaps
@@ -39,12 +43,39 @@ class RT130(DataTypeModel):
@@ -39,12 +43,39 @@ class RT130(DataTypeModel):
"""
"""
self.rt130_waveform_data_req: bool = kwarg['rt130_waveform_data_req']
self.rt130_waveform_data_req: bool = kwarg['rt130_waveform_data_req']
"""
"""
 
stream_header_by_key_chan: stream header by key, chan_id to get key
 
list, gaps by sta_id, nets by sta_id, channels by sta_id
 
"""
 
self.stream_header_by_key_chan: Dict[str, Dict[str, Stream]] = {}
 
"""
 
gaps_by_key_chan: gap list for each key/chan_id to separate data at
 
gaps, overlaps
 
"""
 
self.gaps_by_key_chan: Dict[Union[str, Tuple[str, str]],
 
Dict[str, List[List[int]]]] = {}
 
"""
found_data_streams: list of data streams found to help inform user
found_data_streams: list of data streams found to help inform user
why the selected data streams don't show up
why the selected data streams don't show up
"""
"""
self.found_data_streams: List[int] = []
self.found_data_streams: List[int] = []
 
self.processing_data()
self.processing_data()
 
def processing_data(self):
 
if self.creator_thread.isInterruptionRequested():
 
raise ThreadStopped()
 
self.read_folder(self.dir)
 
 
if self.creator_thread.isInterruptionRequested():
 
raise ThreadStopped()
 
self.selected_key = self.select_key()
 
if self.selected_key is None:
 
raise ThreadStopped()
 
 
if self.creator_thread.isInterruptionRequested():
 
raise ThreadStopped()
 
self.finalize_data()
 
def finalize_data(self):
def finalize_data(self):
"""
"""
This function should be called after all folders finish reading to
This function should be called after all folders finish reading to
@@ -52,6 +83,7 @@ class RT130(DataTypeModel):
@@ -52,6 +83,7 @@ class RT130(DataTypeModel):
+ check not found data stream to give user a warning if needed
+ check not found data stream to give user a warning if needed
+ other tasks in super().finalize_data()
+ other tasks in super().finalize_data()
"""
"""
 
self.track_info("Finalizing...", LogType.INFO)
self.track_info(
self.track_info(
"Prepare SOH data from log data", LogType.INFO)
"Prepare SOH data from log data", LogType.INFO)
self.prepare_soh_data_from_log_data()
self.prepare_soh_data_from_log_data()
@@ -64,7 +96,18 @@ class RT130(DataTypeModel):
@@ -64,7 +96,18 @@ class RT130(DataTypeModel):
f"{', '.join(map(str, not_found_data_streams))}")
f"{', '.join(map(str, not_found_data_streams))}")
self.processing_log.append((msg, LogType.WARNING))
self.processing_log.append((msg, LogType.WARNING))
super().finalize_data()
self.sort_all_data()
 
self.combine_all_data()
 
self.apply_convert_factor_to_data_dicts()
 
 
retrieve_gaps_from_stream_header(
 
self.stream_header_by_key_chan, self.gaps_by_key_chan,
 
self.gaps, self.gap_minimum, self.read_start, self.read_end)
 
 
for key in self.data_time:
 
if self.data_time[key] == [constants.HIGHEST_INT, 0]:
 
# this happens when there is text or ascii only in the data
 
self.data_time[key] = [self.read_start, self.read_end]
def read_folder(self, folder: str) -> None:
def read_folder(self, folder: str) -> None:
"""
"""
@@ -86,6 +129,8 @@ class RT130(DataTypeModel):
@@ -86,6 +129,8 @@ class RT130(DataTypeModel):
total = sum([len(files) for _, _, files in os.walk(self.dir)])
total = sum([len(files) for _, _, files in os.walk(self.dir)])
for folder in folders:
for folder in folders:
 
if not os.path.isdir(folder):
 
raise ProcessingDataError(f"Path '{folder}' not exist")
for path, subdirs, files in os.walk(folder):
for path, subdirs, files in os.walk(folder):
for file_name in files:
for file_name in files:
if self.creator_thread.isInterruptionRequested():
if self.creator_thread.isInterruptionRequested():
@@ -119,7 +164,11 @@ class RT130(DataTypeModel):
@@ -119,7 +164,11 @@ class RT130(DataTypeModel):
+ If there is more than one, show all keys, let user choose one to
+ If there is more than one, show all keys, let user choose one to
return.
return.
"""
"""
keys = sorted(list(self.stream_header_by_key_chan.keys()))
self.keys = sorted(list(set(
 
list(self.soh_data.keys()) +
 
list(self.mass_pos_data.keys()) +
 
list(self.waveform_data.keys()))))
 
keys = self.keys
if len(keys) == 0:
if len(keys) == 0:
msg = 'No required data found for the data set.'
msg = 'No required data found for the data set.'
raise ProcessingDataError(msg)
raise ProcessingDataError(msg)
@@ -206,9 +255,8 @@ class RT130(DataTypeModel):
@@ -206,9 +255,8 @@ class RT130(DataTypeModel):
cur_key = (rt130._data[0]['unit_id'].decode(),
cur_key = (rt130._data[0]['unit_id'].decode(),
f"{rt130._data[0]['experiment_number']}")
f"{rt130._data[0]['experiment_number']}")
self.populate_cur_key_for_all_data(cur_key)
self.populate_cur_key_for_all_data(cur_key)
if data_stream != 9:
# don't get event info for mass position
self.get_ehet_in_log_data(rt130, cur_key)
self.get_ehet_in_log_data(rt130, cur_key)
self.get_mass_pos_data_and_waveform_data(rt130, data_stream, cur_key)
self.get_mass_pos_data_and_waveform_data(rt130, data_stream, cur_key)
def get_ehet_in_log_data(self, rt130: core.Reftek130,
def get_ehet_in_log_data(self, rt130: core.Reftek130,
@@ -311,3 +359,18 @@ class RT130(DataTypeModel):
@@ -311,3 +359,18 @@ class RT130(DataTypeModel):
'endTmEpoch': self.data_time[k][1]
'endTmEpoch': self.data_time[k][1]
}
}
self.soh_data[k][c_name]['tracesInfo'] = [tr]
self.soh_data[k][c_name]['tracesInfo'] = [tr]
 
 
def populate_cur_key_for_all_data(self, cur_key: Tuple[str, str]) -> None:
 
"""
 
Set up new data set's key for all data
 
 
:param cur_key: current processing key: DAS SN, experiment number
 
"""
 
if cur_key not in self.log_data:
 
self.log_data[cur_key] = {}
 
self.soh_data[cur_key] = {}
 
self.mass_pos_data[cur_key] = {}
 
self.waveform_data[cur_key] = {}
 
self.gaps[cur_key] = []
 
self.data_time[cur_key] = [constants.HIGHEST_INT, 0]
 
self.stream_header_by_key_chan[cur_key] = {}
Loading