Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • software_public/passoft/sohstationviewer
1 result
Show changes
Commits on Source (6)
Showing
with 1710 additions and 1228 deletions
......@@ -10,73 +10,76 @@ from obspy import UTCDateTime
from sohstationviewer.conf import constants as const
from sohstationviewer.view.util.enums import LogType
maxInt = 1E100
maxFloat = 1.0E100
MAX_INT = 1E100
MAX_FLOAT = 1.0E100
# TODO: put this in DB
MassPosVoltRanges = {"regular": [0.5, 2.0, 4.0, 7.0],
"trillium": [0.5, 1.8, 2.4, 3.5]}
MassPosColorPallets = {"B": ["C", "G", "Y", "R", "M"],
"W": ["B", "B", "B", "B", "B"]}
mass_pos_volt_ranges = {"regular": [0.5, 2.0, 4.0, 7.0],
"trillium": [0.5, 1.8, 2.4, 3.5]}
mass_pos_color_pallets = {"B": ["C", "G", "Y", "R", "M"],
"W": ["B", "B", "B", "B", "B"]}
def getMassposValueColors(rangeOpt: str, chan_id: str, cMode: str,
processing_log: List[Tuple[str, LogType]],
retType: str = 'str'
) -> Optional[Union[str, List[Tuple[float, str]]]]:
def get_masspos_value_colors(
range_opt: str, chan_id: str, c_mode: str,
processing_log: List[Tuple[str, LogType]],
ret_type: str = 'str'
) -> Optional[Union[str, List[Tuple[float, str]]]]:
"""
Create a map between value and color based on given rangeOpt and cMode to
Create a map between value and color based on given rangeOpt and c_mode to
display mass position plots.
:param rangeOpt: massPosVoltRangeOpt got from Options Menu - MP coloring
:param range_opt: massPosVoltRangeOpt got from Options Menu - MP coloring
in Main Window to define different values for mass position.
(regular/trillium)
:param chan_id: ID of the channel
:param cMode: color mode (B/W)
:param c_mode: color mode (B/W)
:param processing_log: list of processing info and type
:param retType: request return type
:param ret_type: request return type
:return: [(value, color), (value, color) ...]
if retType is 'str', return "value:color|value:color"
"""
if rangeOpt.lower() not in MassPosVoltRanges.keys():
if range_opt.lower() not in mass_pos_volt_ranges.keys():
processing_log.append(
(
f"{chan_id}: The current selected Mass Position color range is"
f" '{rangeOpt}' isn't allowable. The accept ranges are: "
f"{', '.join(MassPosVoltRanges.keys())}",
f" '{range_opt}' isn't allowable. The accept ranges are: "
f"{', '.join(mass_pos_volt_ranges.keys())}",
LogType.ERROR
)
)
return
massPosVoltRange = MassPosVoltRanges[rangeOpt]
massPosColorPallet = MassPosColorPallets[cMode]
valueColors = []
for i in range(len(massPosVoltRange)):
if retType == 'str':
valueColors.append(
"%s:%s" % (massPosVoltRange[i], massPosColorPallet[i]))
mass_pos_volt_range = mass_pos_volt_ranges[range_opt]
mass_pos_color_pallet = mass_pos_color_pallets[c_mode]
value_colors = []
for i in range(len(mass_pos_volt_range)):
if ret_type == 'str':
value_colors.append(
"%s:%s" % (mass_pos_volt_range[i], mass_pos_color_pallet[i]))
else:
valueColors.append((massPosVoltRange[i], massPosColorPallet[i]))
if i == len(massPosVoltRange) - 1:
if retType == 'str':
valueColors.append(
value_colors.append(
(mass_pos_volt_range[i], mass_pos_color_pallet[i])
)
if i == len(mass_pos_volt_range) - 1:
if ret_type == 'str':
value_colors.append(
"%s:+%s" % (
massPosVoltRange[i], massPosColorPallet[i + 1]))
mass_pos_volt_range[i], mass_pos_color_pallet[i + 1]))
else:
valueColors.append(
(massPosVoltRange[i], massPosColorPallet[i + 1]))
if retType == 'str':
return '|'.join(valueColors)
return valueColors
value_colors.append(
(mass_pos_volt_range[i], mass_pos_color_pallet[i + 1]))
if ret_type == 'str':
return '|'.join(value_colors)
return value_colors
def formatTime(time: Union[UTCDateTime, float], dateMode: str,
timeMode: Optional[str] = None) -> str:
def format_time(time: Union[UTCDateTime, float], date_mode: str,
time_mode: Optional[str] = None) -> str:
"""
Format time according to date_mode and time_mode
:param time: time to be format, can be UTCDateTime or epoch time
:param dateMode: the format of date
:param timeMode: the format of time
:param date_mode: the format of date
:param time_mode: the format of time
:return: the formatted time string
"""
if isinstance(time, UTCDateTime):
......@@ -87,83 +90,83 @@ def formatTime(time: Union[UTCDateTime, float], dateMode: str,
# https://docs.python.org/3/library/datetime.html#
# strftime-and-strptime-format-codes
format = ''
if dateMode == 'YYYY-MM-DD':
if date_mode == 'YYYY-MM-DD':
format = '%Y-%m-%d'
elif dateMode == 'YYYYMMDD':
elif date_mode == 'YYYYMMDD':
format = '%Y%m%d'
elif dateMode == 'YYYY:DOY':
elif date_mode == 'YYYY:DOY':
format = '%Y:%j'
if timeMode == 'HH:MM:SS':
if time_mode == 'HH:MM:SS':
format += " %H:%M:%S"
ret = t.strftime(format)
return ret
def getTitle(key: Union[str, Tuple[str, int]], minTime: float, maxTime: float,
dateMode: str) -> str:
def get_title(key: Union[str, Tuple[str, int]], min_time: float,
max_time: float, date_mode: str) -> str:
"""
Create title for the plot.
:param key: str or (str, str) sta_id for mseed, (unit_id, exp_no) for rt130
:param minTime: start time of the plot
:param maxTime: end time of the plot
:param dateMode: format of date
:param min_time: start time of the plot
:param max_time: end time of the plot
:param date_mode: format of date
:return: title for the plot
"""
diff = maxTime - minTime
diff = max_time - min_time
hours = diff / 3600
return ("%s %s to %s (%s)" %
(key,
formatTime(minTime, dateMode, "HH:MM:SS"),
formatTime(maxTime, dateMode, "HH:MM:SS"),
format_time(min_time, date_mode, "HH:MM:SS"),
format_time(max_time, date_mode, "HH:MM:SS"),
round(hours, 2))
)
def getGaps(gaps: List[List[float]], gapMin: float
) -> List[List[float]]:
def get_gaps(gaps: List[List[float]], gap_min: float
) -> List[List[float]]:
"""
:param gaps: list of gaps
:param gapMin: minimum of gaps count in minutes
:param gap_min: minimum of gaps count in minutes
:return: list of gaps of which gaps smaller than gapMin have been removed
"""
gapMinSec = gapMin * 60
return [g for g in gaps if (g[1] - g[0]) >= gapMinSec]
gap_min_sec = gap_min * 60
return [g for g in gaps if (g[1] - g[0]) >= gap_min_sec]
def getTimeTicks(earliest: float, latest: float, dateFormat: str,
labelTotal: int
) -> Tuple[List[float], List[float], List[str]]:
def get_time_ticks(earliest: float, latest: float, date_format: str,
label_total: int
) -> Tuple[List[float], List[float], List[str]]:
"""
split time range to use for tick labels
Ex: getTimeTicks(1595542860.0, 1595607663.91, 'YYYY-MM-DD', 3)
:param earliest: earliest epoch time
:param latest: latest epoch time
:param dateFormat: (YYYY:DOY, YYYY-MM-DD or YYYYMMMDD)
:param date_format: (YYYY:DOY, YYYY-MM-DD or YYYYMMMDD)
(selected in Menu Options - Date Format)
:param labelTotal: number of time label to be displayed,
:param label_total: number of time label to be displayed,
others will show as ticks oly
:return:
times: list of times to show ticks
majorTimes: list of times for displayed labels
majorTimelabels: list of labels displayed
major_times: list of times for displayed labels
major_time_labels: list of labels displayed
"""
timeRange = latest - earliest
if timeRange >= 2592000.0:
time_range = latest - earliest
if time_range >= 2592000.0:
mode = "DD"
# Time of the nearest midnight before the earliest time.
time = (earliest // 86400.0) * 86400
interval = 864000.0
elif timeRange >= 864000.0:
elif time_range >= 864000.0:
mode = "D"
time = (earliest // 86400.0) * 86400
interval = 86400.0
elif timeRange >= 3600.0:
elif time_range >= 3600.0:
mode = "H"
# Nearest hour.
time = (earliest // 3600.0) * 3600
interval = 3600.0
elif timeRange >= 60.0:
elif time_range >= 60.0:
mode = "M"
# Nearest minute.
time = (earliest // 60.0) * 60
......@@ -173,76 +176,76 @@ def getTimeTicks(earliest: float, latest: float, dateFormat: str,
time = (earliest // 1)
interval = 1.0
times = []
timeLabels = []
time_labels = []
time += interval
while time < latest:
times.append(time)
timeLabel = formatTime(time, dateFormat, 'HH:MM:SS')
time_label = format_time(time, date_format, 'HH:MM:SS')
if mode == "DD" or mode == "D":
timeLabel = timeLabel[:-9]
time_label = time_label[:-9]
elif mode == "H":
timeLabel = timeLabel[:-3]
time_label = time_label[:-3]
elif mode == "M" or mode == "S":
timeLabel = timeLabel
timeLabels.append(timeLabel)
time_label = time_label
time_labels.append(time_label)
time += interval
ln = len(timeLabels)
d = math.ceil(len(timeLabels) / labelTotal)
majorTimes = [times[i] for i in range(ln) if i % d == 0]
majorTimelabels = [timeLabels[i] for i in range(ln) if i % d == 0]
return times, majorTimes, majorTimelabels
ln = len(time_labels)
d = math.ceil(len(time_labels) / label_total)
major_times = [times[i] for i in range(ln) if i % d == 0]
major_time_labels = [time_labels[i] for i in range(ln) if i % d == 0]
return times, major_times, major_time_labels
def getDayTicks() -> Tuple[List[int], List[int], List[str]]:
def get_day_ticks() -> Tuple[List[int], List[int], List[str]]:
"""
Get information for displaying time on plotting widget.
:return:
times: list of indexes of every hour in each_day_5_min_list
majorTimes: list of indexes of every 4 hours in each_day_5_min_list
majorTimeLabels: 2 digit numbers of every 4 hours in a day
major_times: list of indexes of every 4 hours in each_day_5_min_list
major_time_labels: 2 digit numbers of every 4 hours in a day
"""
times = list(range(const.NO_5M_1H, const.NO_5M_DAY, const.NO_5M_1H))
majorTimes = list(range(4 * const.NO_5M_1H,
const.NO_5M_DAY,
4 * const.NO_5M_1H))
majorTimeLabels = ["%02d" % int(t / const.NO_5M_1H) for t in majorTimes]
return times, majorTimes, majorTimeLabels
major_times = list(range(4 * const.NO_5M_1H,
const.NO_5M_DAY,
4 * const.NO_5M_1H))
major_time_labels = ["%02d" % int(t / const.NO_5M_1H) for t in major_times]
return times, major_times, major_time_labels
def getUnitBitweight(chanDB: Dict, bitweightOpt: str) -> str:
def get_unit_bitweight(chan_db_info: Dict, bitweight_opt: str) -> str:
"""
Get the format to diplay value including fixed point decimal and unit based
on the information from the database.
:param chanDB: channel's info got from database
:param bitweightOpt: option for bitweight (none, low, high)
Get the format to display value including fixed point decimal and unit
based on the information from the database.
:param chan_db_info: channel's info got from database
:param bitweight_opt: option for bitweight (none, low, high)
(Menu Options - Q330 Gain)
:return unitBitweight: format for displayed value on the left of each
:return unit_bitweight: format for displayed value on the left of each
plot with fixed point decimal and unit
"""
plotType = chanDB['plotType']
plot_type = chan_db_info['plotType']
# Not all channels use/have this field.
unit = chanDB['unit']
unit = chan_db_info['unit']
try:
fixPoint = chanDB['fixPoint']
fix_point = chan_db_info['fixPoint']
except Exception:
fixPoint = 0
unitBitweight = ''
fix_point = 0
unit_bitweight = ''
if plotType in ['linesDots', 'linesSRate', 'linesMasspos']:
if plot_type in ['linesDots', 'linesSRate', 'linesMasspos']:
if fixPoint == 0:
unitBitweight = "{}%s" % unit
if fix_point == 0:
unit_bitweight = "{}%s" % unit
else:
unitBitweight = "{:.%sf}%s" % (fixPoint, unit)
if chanDB['channel'] == 'SEISMIC':
if fixPoint != 0:
unitBitweight = "{:.%sf}%s" % (fixPoint, unit)
unit_bitweight = "{:.%sf}%s" % (fix_point, unit)
if chan_db_info['channel'] == 'SEISMIC':
if fix_point != 0:
unit_bitweight = "{:.%sf}%s" % (fix_point, unit)
else:
if bitweightOpt in ["low", "high"]:
unitBitweight = "{}V"
if bitweight_opt in ["low", "high"]:
unit_bitweight = "{}V"
else:
unitBitweight = "{}%s" % unit
return unitBitweight
unit_bitweight = "{}%s" % unit
return unit_bitweight
......@@ -18,90 +18,92 @@ from sohstationviewer.model.mseed.mseed import MSeed
from sohstationviewer.database.extract_data import get_signature_channels
from sohstationviewer.model.data_type_model import DataTypeModel
from sohstationviewer.controller.util import validateFile, displayTrackingInfo
from sohstationviewer.controller.util import (
validate_file, display_tracking_info
)
from sohstationviewer.view.util.enums import LogType
def loadData(dataType: str, tracking_box: QTextBrowser, listOfDir: List[str],
reqWFChans: List[str] = [], reqSOHChans: List[str] = [],
readStart: Optional[float] = None,
readEnd: Optional[float] = None) -> DataTypeModel:
def load_data(data_type: str, tracking_box: QTextBrowser, dir_list: List[str],
req_wf_chans: List[str] = [], req_soh_chans: List[str] = [],
read_start: Optional[float] = None,
read_end: Optional[float] = None) -> DataTypeModel:
"""
Load the data stored in listOfDir and store it in a DataTypeModel object.
Load the data stored in list_of_dir and store it in a DataTypeModel object.
The concrete class of the data object is based on dataType. Run on the same
thread as its caller, and so will block the GUI if called on the main
thread. It is advisable to use model.data_loader.DataLoader to load data
unless it is necessary to load data in the main thread (e.g. if there is
a need to access the call stack).
:param dataType: type of data read
:param data_type: type of data read
:param tracking_box: widget to display tracking info
:param listOfDir: list of directories selected by users
:param reqWFChans: requested waveform channel list
:param reqSOHChans: requested soh channel list
:param readStart: start time of read data
:param readEnd: finish time of read data
:return dataObject: object that keep the data read from
listOfDir
:param dir_list: list of directories selected by users
:param req_wf_chans: requested waveform channel list
:param req_soh_chans: requested soh channel list
:param read_start: start time of read data
:param read_end: finish time of read data
:return data_object: object that keep the data read from
list_of_dir
"""
dataObject = None
for d in listOfDir:
if dataObject is None:
data_object = None
for d in dir_list:
if data_object is None:
try:
dataObject = DataTypeModel.create_data_object(
dataType, tracking_box, d,
reqWFChans=reqWFChans, reqSOHChans=reqSOHChans,
readStart=readStart, readEnd=readEnd)
data_object = DataTypeModel.create_data_object(
data_type, tracking_box, d,
req_wf_chans=req_wf_chans, req_soh_chans=req_soh_chans,
read_start=read_start, read_end=read_end)
except Exception:
fmt = traceback.format_exc()
msg = f"Dir {d} can't be read due to error: {str(fmt)}"
displayTrackingInfo(tracking_box, msg, LogType.WARNING)
display_tracking_info(tracking_box, msg, LogType.WARNING)
# if dataObject.hasData():
# if data_object.has_data():
# continue
# If no data can be read from the first dir, throw exception
# raise Exception("No data can be read from ", d)
# TODO: will work with select more than one dir later
# else:
# dataObject.readDir(d)
# data_object.readDir(d)
# return dataObject.plottingData
return dataObject
# return data_object.plottingData
return data_object
def readChannels(tracking_box: QTextBrowser, listOfDir: List[str]
) -> Set[str]:
def read_channels(tracking_box: QTextBrowser, list_of_dir: List[str]
) -> Set[str]:
"""
Scan available channels (to be used in channel preferences dialog). Since
channels for RT130 is hard code, this function won't be applied for it.
:param tracking_box: widget to display tracking info
:param listOfDir: list of directories selected by users
:return dataObject.channels: set of channels present in listofDir
:param list_of_dir: list of directories selected by users
:return data_object.channels: set of channels present in listofDir
"""
dataObject = None
for d in listOfDir:
if dataObject is None:
# dataObject = Reftek.Reftek(parent, d)
# if dataObject.hasData():
data_object = None
for d in list_of_dir:
if data_object is None:
# data_object = Reftek.Reftek(parent, d)
# if data_object.has_data():
# continue
# dataObject = MSeed_Text(parent, d, readChanOnly=True)
dataObject = MSeed(tracking_box, d, readChanOnly=True)
if len(dataObject.channels) == 0:
# data_object = MSeed_Text(parent, d, read_chan_only=True)
data_object = MSeed(tracking_box, d, readChanOnly=True)
if len(data_object.channels) == 0:
# If no data can be read from the first dir, throw exception
raise Exception("No data can be read from ", d)
else:
dataObject.readDir(d, readChanOnly=True)
return dataObject.channels
data_object.readDir(d, readChanOnly=True)
return data_object.channels
def detectDataType(tracking_box: QTextBrowser, listOfDir: List[str]
) -> Optional[str]:
def detect_data_type(tracking_box: QTextBrowser, list_of_dir: List[str]
) -> Optional[str]:
"""
Detect data type for the given directories using getDataTypeFromFile
Detect data type for the given directories using get_data_type_from_file
:param tracking_box: widget to display tracking info
:param listOfDir: list of directories selected by users
:param list_of_dir: list of directories selected by users
:return:
+ if there are more than one data types detected,
return None with a warning message
......@@ -109,47 +111,48 @@ def detectDataType(tracking_box: QTextBrowser, listOfDir: List[str]
return None with a warning message
+ if data type found, return data_type,
"""
sign_chan_dataType_dict = get_signature_channels()
sign_chan_data_type_dict = get_signature_channels()
dirDataTypeDict = {}
for d in listOfDir:
dataType = "Unknown"
dir_data_type_dict = {}
for d in list_of_dir:
data_type = "Unknown"
for path, subdirs, files in os.walk(d):
for fileName in files:
path2file = Path(path).joinpath(fileName)
if not validateFile(path2file, fileName):
for file_name in files:
path2file = Path(path).joinpath(file_name)
if not validate_file(path2file, file_name):
continue
ret = getDataTypeFromFile(path2file, sign_chan_dataType_dict)
ret = get_data_type_from_file(path2file,
sign_chan_data_type_dict)
if ret is not None:
dataType, chan = ret
data_type, chan = ret
break
if dataType != "Unknown":
if data_type != "Unknown":
break
if dataType == "Unknown":
dirDataTypeDict[d] = ("Unknown", '_')
if data_type == "Unknown":
dir_data_type_dict[d] = ("Unknown", '_')
else:
dirDataTypeDict[d] = (dataType, chan)
dataTypeList = {d[0] for d in dirDataTypeDict.values()}
if len(dataTypeList) > 1:
dirDataTypeStr = json.dumps(dirDataTypeDict)
dirDataTypeStr = re.sub(r'\{|\}|"', '', dirDataTypeStr)
dirDataTypeStr = re.sub(r', ', '\n', dirDataTypeStr)
dir_data_type_dict[d] = (data_type, chan)
data_type_list = {d[0] for d in dir_data_type_dict.values()}
if len(data_type_list) > 1:
dir_data_type_str = json.dumps(dir_data_type_dict)
dir_data_type_str = re.sub(r'\{|\}|"', '', dir_data_type_str)
dir_data_type_str = re.sub(r', ', '\n', dir_data_type_str)
msg = (f"There are more than one types of data detected:\n"
f"{dirDataTypeStr}\n\n"
f"{dir_data_type_str}\n\n"
f"Please have only data that related to each other.")
displayTrackingInfo(tracking_box, msg, LogType.ERROR)
display_tracking_info(tracking_box, msg, LogType.ERROR)
return
elif dataTypeList == {'Unknown'}:
elif data_type_list == {'Unknown'}:
msg = ("There are no known data detected.\n"
"Please select different folder(s).")
displayTrackingInfo(tracking_box, msg, LogType.ERROR)
display_tracking_info(tracking_box, msg, LogType.ERROR)
return
return list(dirDataTypeDict.values())[0][0]
return list(dir_data_type_dict.values())[0][0]
def getDataTypeFromFile(
def get_data_type_from_file(
path2file: Path,
sign_chan_data_type_dict: Dict[str, str]
) -> Optional[Tuple[str, str]]:
......
......@@ -16,15 +16,15 @@ import numpy as np
from sohstationviewer.view.util.enums import LogType
def validateFile(path2file: Union[str, Path], fileName: str):
def validate_file(path2file: Union[str, Path], file_name: str):
"""
Check if fileName given is a file and not info file
:param path2file: absolute path to file
:param fileName: name of the file
:param file_name: name of the file
:return: True if pass checking, False if not.
"""
if fileName.strip() == '.DS_Store' or fileName.startswith('._'):
if file_name.strip() == '.DS_Store' or file_name.startswith('._'):
# skip mac's info file
return False
......@@ -34,17 +34,18 @@ def validateFile(path2file: Union[str, Path], fileName: str):
@QtCore.Slot()
def displayTrackingInfo(trackingBox: QTextBrowser, text: str,
type: LogType = LogType.INFO):
def display_tracking_info(tracking_box: QTextBrowser, text: str,
type: LogType = LogType.INFO):
"""
Display text in the given widget with different background and text colors
:param trackingBox: widget to display tracking info
:param tracking_box: widget to display tracking info
:param text: info to be displayed
:param type: (info/warning/error) type of info to be displayed in
different color
"""
if trackingBox is None:
print(text)
return
if tracking_box is None:
print(f"{type.name}: {text}")
return
......@@ -58,129 +59,120 @@ def displayTrackingInfo(trackingBox: QTextBrowser, text: str,
else:
msg['color'] = 'blue'
msg['bgcolor'] = 'white'
htmlText = """<body>
html_text = """<body>
<div style='color:%(color)s; background-color:%(bgcolor)s'>
%(text)s
</div>
</body>"""
trackingBox.setHtml(htmlText % msg)
tracking_box.setHtml(html_text % msg)
# parent.update()
trackingBox.repaint()
tracking_box.repaint()
def getDirSize(dir: str) -> Tuple[int, int]:
def get_dir_size(dir: str) -> Tuple[int, int]:
"""
Get size of directory and size of file.
:param dir: absolute path to directory
:return:
totalSize: total size of the directory
totalFile: total file of the directory
total_size: total size of the directory
total_file: total file of the directory
"""
totalSize = 0
totalFile = 0
total_size = 0
total_file = 0
for path, subdirs, files in os.walk(dir):
for fileName in files:
if not validateFile(os.path.join(path, fileName), fileName):
for file_name in files:
if not validate_file(os.path.join(path, file_name), file_name):
continue
fp = os.path.join(path, fileName)
fp = os.path.join(path, file_name)
# print("file %s: %s" % (fp, os.path.getsize(fp)))
totalSize += os.path.getsize(fp)
totalFile += 1
return totalSize, totalFile
total_size += os.path.getsize(fp)
total_file += 1
return total_size, total_file
def getTime6(timeStr: str) -> Tuple[float, int]:
def get_time_6(time_str: str) -> Tuple[float, int]:
"""
Get time from 6 parts string.
(year:day of year:hour:minute:second:millisecond)
Ex: 01:251:09:41:35:656/ 2001:251:09:41:35:656
in which year in the first part can be 2 digits or 6 digits
:param timeStr: 6 part time string
:return the epoch time and the year of timeStr.
:param time_str: 6 part time string
:return the epoch time and the year of time_str.
"""
year = timeStr.split(':')[0]
year = time_str.split(':')[0]
if len(year) == 2:
return getTime6_2y(timeStr)
return get_time_6_2y(time_str)
else:
return getTime6_4y(timeStr)
return get_time_6_4y(time_str)
def getTime6_2y(timeStr: str) -> Tuple[float, int]:
def get_time_6_2y(time_str: str) -> Tuple[float, int]:
"""
Get time from 6 parts string in which year has 2 digits.
Ex: 01:251:09:41:35:656
:param timeStr: 6 part time string with 2 digits for year
:return the epoch time and the year of timeStr.
:param time_str: 6 part time string with 2 digits for year
:return the epoch time and the year of time_str.
"""
# pad 0 so the last part has 6 digits to match with the format str
timeStr = timeStr.ljust(22, "0")
time = datetime.strptime(timeStr, "%y:%j:%H:%M:%S:%f")
utcTime = UTCDateTime(time)
return utcTime.timestamp, time.year
time_str = time_str.ljust(22, "0")
time = datetime.strptime(time_str, "%y:%j:%H:%M:%S:%f")
utc_time = UTCDateTime(time)
return utc_time.timestamp, time.year
def getTime6_4y(timeStr: str) -> Tuple[float, int]:
def get_time_6_4y(time_str: str) -> Tuple[float, int]:
"""
Get time from 6 parts string in which year has 4 digits.
Ex: 2001:251:09:41:35:656
:param timeStr: 6 part time string with 4 digits for year
:return the epoch time and the year of timeStr.
:param time_str: 6 part time string with 4 digits for year
:return the epoch time and the year of time_str.
"""
# pad 0 so the last part has 6 digits to match with the format str
timeStr = timeStr.ljust(24, "0")
time = datetime.strptime(timeStr, "%Y:%j:%H:%M:%S:%f")
utcTime = UTCDateTime(time)
return utcTime.timestamp, time.year
time_str = time_str.ljust(24, "0")
time = datetime.strptime(time_str, "%Y:%j:%H:%M:%S:%f")
utc_time = UTCDateTime(time)
return utc_time.timestamp, time.year
def getTime4(timeStr: str, trackingYear: int, yAdded: bool
) -> Tuple[float, int, bool]:
def get_time_4(time_str: str, tracking_year: int, y_added: bool
) -> Tuple[float, int, bool]:
"""
Get time from 4 parts string. (day of year:hour:minute:second)
Ex: 253:19:41:42
:param timeStr: time string
:param trackingYear: year that has been detected
:param yAdded: flag to tell if year has been plussed 1 or not
:param time_str: time string
:param tracking_year: year that has been detected
:param y_added: flag to tell if year has been plussed 1 or not
:return:
+ utcTime.timestamp: epoch time
+ utc_time.timestamp: epoch time
+ time.year: year
+ yAdded: flag to tell if year has been plussed 1 or not
+ y_added: flag to tell if year has been plussed 1 or not
"""
if not yAdded:
if not y_added:
# first day => move to next year
doy = int(timeStr.split(':')[0])
doy = int(time_str.split(':')[0])
if doy == 1:
trackingYear += 1
yAdded = True
timeStr = f'{str(trackingYear)}:{timeStr}'
time = datetime.strptime(timeStr, "%Y:%j:%H:%M:%S")
utcTime = UTCDateTime(time)
return utcTime.timestamp, time.year, yAdded
tracking_year += 1
y_added = True
time_str = f'{str(tracking_year)}:{time_str}'
time = datetime.strptime(time_str, "%Y:%j:%H:%M:%S")
utc_time = UTCDateTime(time)
return utc_time.timestamp, time.year, y_added
def getVal(text: str) -> float:
def get_val(text: str) -> float:
"""
Get the value part of a string with non-number substring following.
:param text: value string including unit
:return: value part including +/-, remove str that follows
"""
REVal = '^\+?\-?[0-9]+\.?[0-9]?' # noqa: W605
return float(re.search(REVal, text).group())
def isBinaryStr(text):
"""
:param text: text to check
:return: True if text is a binary string or False if not
"""
return lambda b: bool(b.translate(None, text))
re_val = '^\+?\-?[0-9]+\.?[0-9]?' # noqa: W605
return float(re.search(re_val, text).group())
def rtnPattern(text: str, upper: bool = False) -> str:
def rtn_pattern(text: str, upper: bool = False) -> str:
"""
This function is from logpeek's rtnPattern.
This function is from logpeek's rtn_pattern.
return routine pattern of the string with:
+ 0 for digit
+ a for lowercase
......@@ -207,35 +199,35 @@ def rtnPattern(text: str, upper: bool = False) -> str:
return rtn
def add_thousand_separator(Value: float) -> str:
def add_thousand_separator(value: float) -> str:
"""
This function is from logpeek's fmti
Given Value will be convert to a string integer with thousand separators
:param Value: string of value with unit
:return NewValue: new value with no unit and with thousand separators
"""
Value = int(Value)
if Value > -1000 and Value < 1000:
return str(Value)
Value = str(Value)
NewValue = ""
:param value: string of value with unit
:return new_value: new value with no unit and with thousand separators
"""
value = int(value)
if value > -1000 and value < 1000:
return str(value)
value = str(value)
new_value = ""
# There'll never be a + sign.
if Value[0] == "-":
Offset = 1
if value[0] == "-":
offset = 1
else:
Offset = 0
CountDigits = 0
for i in range(len(Value) - 1, -1 + Offset, -1):
NewValue = Value[i] + NewValue
CountDigits += 1
if CountDigits == 3 and i != 0:
NewValue = "," + NewValue
CountDigits = 0
if Offset != 0:
if NewValue.startswith(","):
NewValue = NewValue[1:]
NewValue = Value[0] + NewValue
return NewValue
offset = 0
count_digits = 0
for i in range(len(value) - 1, -1 + offset, -1):
new_value = value[i] + new_value
count_digits += 1
if count_digits == 3 and i != 0:
new_value = "," + new_value
count_digits = 0
if offset != 0:
if new_value.startswith(","):
new_value = new_value[1:]
new_value = value[0] + new_value
return new_value
def apply_convert_factor(c_data: dict, convert_factor: float):
......
No preview for this file type
......@@ -8,7 +8,7 @@ from typing import Union, List, Optional
from PySide2 import QtCore, QtWidgets
from sohstationviewer.conf import constants
from sohstationviewer.controller.util import displayTrackingInfo
from sohstationviewer.controller.util import display_tracking_info
from sohstationviewer.model.data_type_model import DataTypeModel, ThreadStopped
from sohstationviewer.view.util.enums import LogType
......@@ -37,31 +37,31 @@ class DataLoaderWorker(QtCore.QObject):
self.read_start = read_start
self.read_end = read_end
self.parent_thread = parent_thread
# displayTrackingInfo updates a QtWidget, which can only be done in the
# main thread. Since self.run runs in a background thread, we need to
# use signal-slot mechanism to ensure that displayTrackingInfo runs in
# display_tracking_info updates a QtWidget, which can only be done in
# the read. Since self.run runs in a background thread, we need to use
# signal-slot mechanism to ensure that display_tracking_info runs in
# the main thread.
self.notification.connect(displayTrackingInfo)
self.notification.connect(display_tracking_info)
self.end_msg = None
def run(self):
try:
if self.data_type == 'RT130':
from sohstationviewer.model.reftek.reftek import RT130
ObjectType = RT130
object_type = RT130
else:
from sohstationviewer.model.mseed.mseed import MSeed
ObjectType = MSeed
object_type = MSeed
# Create data object without loading any data in order to connect
# its unpause slot to the loader's unpause signal
dataObject = ObjectType.get_empty_instance()
self.button_chosen.connect(dataObject.receive_pause_response,
data_object = object_type.get_empty_instance()
self.button_chosen.connect(data_object.receive_pause_response,
type=QtCore.Qt.DirectConnection)
dataObject.__init__(
data_object.__init__(
self.tracking_box, self.folder,
reqWFChans=self.req_wf_chans,
reqSOHhans=self.req_soh_chans, readStart=self.read_start,
readEnd=self.read_end, creator_thread=self.parent_thread,
req_wf_chans=self.req_wf_chans,
req_soh_chans=self.req_soh_chans, read_start=self.read_start,
read_end=self.read_end, creator_thread=self.parent_thread,
notification_signal=self.notification,
pause_signal=self.button_dialog
)
......@@ -76,10 +76,11 @@ class DataLoaderWorker(QtCore.QObject):
self.failed.emit()
else:
self.end_msg = f'Finished loading data stored in {self.folder}'
self.finished.emit(dataObject)
self.finished.emit(data_object)
class DataLoader:
class DataLoader(QtCore.QObject):
finished = QtCore.Signal()
"""
The class that coordinate the loading of data using multiple threads. The
code inside has to be encapsulated in a class because a connection between
......@@ -88,6 +89,7 @@ class DataLoader:
"""
def __init__(self):
super().__init__()
self.running = False
self.thread: Optional[QtCore.QThread] = None
self.worker: Optional[DataLoaderWorker] = None
......@@ -110,7 +112,6 @@ class DataLoader:
:param req_soh_chans: list of requested SOH channel
:param read_start: the time before which no data is read
:param read_end: the time after which no data is read
:return:
"""
if self.running:
# TODO: implement showing an error window
......@@ -130,13 +131,13 @@ class DataLoader:
parent_thread=self.thread
)
self.connect_worker_signals()
self.worker.moveToThread(self.thread)
def connect_worker_signals(self):
"""
Connect the signals of the data loader to the appropriate slots.
All connections with a signal of the data loader made after this method
is called will be ignored.
"""
# Connection order from https://realpython.com/python-pyqt-qthread
self.thread.started.connect(self.worker.run)
......@@ -145,9 +146,10 @@ class DataLoader:
self.worker.failed.connect(self.thread.quit)
self.worker.stopped.connect(self.thread.quit)
self.thread.finished.connect(self.thread.deleteLater)
self.thread.finished.connect(self.load_end)
self.thread.finished.connect(self.worker.deleteLater)
self.thread.finished.connect(self.finished)
self.thread.finished.connect(self.thread.deleteLater)
self.worker.button_dialog.connect(self.create_button_dialog)
......@@ -166,8 +168,8 @@ class DataLoader:
Currently does the following:
- Set running state of self to False
"""
displayTrackingInfo(self.worker.tracking_box,
self.worker.end_msg, LogType.INFO)
display_tracking_info(self.worker.tracking_box,
self.worker.end_msg, LogType.INFO)
print(self.worker.end_msg)
self.running = False
......@@ -197,11 +199,11 @@ class DataLoader:
buttons.append(
msg_box.addButton(label, QtWidgets.QMessageBox.ActionRole)
)
abortButton = msg_box.addButton(QtWidgets.QMessageBox.Abort)
abort_button = msg_box.addButton(QtWidgets.QMessageBox.Abort)
msg_box.exec_()
if msg_box.clickedButton() == abortButton:
if msg_box.clickedButton() == abort_button:
# The default choice is the first item, so we default to it if the
# user presses the abort button. An alternative choice is to stop
# when the user presses the abort button.
......
......@@ -7,7 +7,7 @@ from typing import List, Tuple, Dict, Optional, Union
from PySide2 import QtCore
from sohstationviewer.controller.util import displayTrackingInfo
from sohstationviewer.controller.util import display_tracking_info
from sohstationviewer.conf import constants
from sohstationviewer.view.util.enums import LogType
from sohstationviewer.database.process_db import execute_db
......@@ -30,11 +30,11 @@ class ThreadStopped(Exception):
class DataTypeModel():
def __init__(self, trackingBox: QtWidgets.QTextBrowser, folder: str,
readChanOnly: bool = False,
reqWFChans: Union[List[str], List[int]] = [],
reqSOHChans: List[str] = [], readStart: float = 0,
readEnd: float = constants.HIGHEST_INT,
def __init__(self, tracking_box: QtWidgets.QTextBrowser, folder: str,
read_chan_only: bool = False,
req_wf_chans: Union[List[str], List[int]] = [],
req_soh_chans: List[str] = [], read_start: float = 0,
read_end: float = constants.HIGHEST_INT,
creator_thread: Optional[QtCore.QThread] = None,
notification_signal: Optional[QtCore.Signal] = None,
pause_signal: Optional[QtCore.Signal] = None,
......@@ -42,13 +42,13 @@ class DataTypeModel():
"""
Super class for different data type to process data from data files
:param trackingBox: widget to display tracking info
:param tracking_box: widget to display tracking info
:param folder: path to the folder of data
:param readChanOnly: if True, only read for channel name
:param reqWFChans: requested waveform channel list
:param reqSOHChans: requested SOH channel list
:param readStart: requested start time to read
:param readEnd: requested end time to read
:param read_chan_only: if True, only read for channel name
:param req_wf_chans: requested waveform channel list
:param req_soh_chans: requested SOH channel list
:param read_start: requested start time to read
:param read_end: requested end time to read
:param creator_thread: the thread the current DataTypeModel instance is
being created in. If None, the DataTypeModel instance is being
created in the main thread
......@@ -57,13 +57,13 @@ class DataTypeModel():
:param pause_signal: signal used to notify the main thread that the
data loader is paused.
"""
self.trackingBox = trackingBox
self.tracking_box = tracking_box
self.dir = folder
self.reqSOHChans = reqSOHChans
self.reqWFChans = reqWFChans
self.readChanOnly = readChanOnly
self.readStart = readStart
self.readEnd = readEnd
self.req_soh_chans = req_soh_chans
self.req_wf_chans = req_wf_chans
self.read_chan_only = read_chan_only
self.read_start = read_start
self.read_end = read_end
if creator_thread is None:
err_msg = (
'A signal is not None while running in main thread'
......@@ -76,9 +76,9 @@ class DataTypeModel():
self.notification_signal = notification_signal
self.pause_signal = pause_signal
"""
processingLog: [(message, type)] - record the progress of processing
processing_log: [(message, type)] - record the progress of processing
"""
self.processingLog: List[Tuple[str, LogType]] = []
self.processing_log: List[Tuple[str, LogType]] = []
DataKey = Union[Tuple[str, str], str]
......@@ -86,12 +86,12 @@ class DataTypeModel():
Log data: info from log channels, soh messages, text file in dict:
{'TEXT': [str,], key:{chan_id: [str,],},}
In which 'TEXT': is the chan_id given by sohview for text only file.
Note: logData for RT130's dataset has only one channel: SOH
Note: log_data for RT130's dataset has only one channel: SOH
"""
self.logData = {'TEXT': []} # noqa
self.log_data = {'TEXT': []} # noqa
"""
waveformData: data of waveform in dict:
waveform_data: data of waveform in dict:
{set_key: {
'files_info': {
chan_id - str: [{
......@@ -135,10 +135,10 @@ class DataTypeModel():
(np.memmap is used instead of data for waveform because waveform data
are too large and make system crashed when try to merge and process)
"""
self.waveformData = {}
self.waveform_data = {}
"""
SOHData: data for SOH info in dict:
soh_data: data for SOH info in dict:
{set_key - str or (str, str): {
chan_id - str: {
'org_trace': {
......@@ -164,11 +164,11 @@ class DataTypeModel():
}
}
"""
self.SOHData = {}
self.soh_data = {}
"""
massPosData: data for mass position info in the format similar to
SOHData but there are 2 axes for drawing a mass possition channel,
mass_pos_data: data for mass position info in the format similar to
soh_data but there are 2 axes for drawing a mass possition channel,
one is 'ax' to display in PlottingWidget, the other is 'ax_wf' to
display in WaveformWidget.
Channel name for mseed mass position: VP*
......@@ -197,21 +197,21 @@ class DataTypeModel():
}
}
"""
self.massPosData = {}
self.mass_pos_data = {}
"""
dataTime: time range of data sets:
data_time: time range of data sets:
{setKey: [earliestepoch, latestepoch]} - {str: [float, float],}
"""
self.dataTime: Dict[DataKey, List[float]] = {}
self.data_time: Dict[DataKey, List[float]] = {}
"""
The given data may include more than one data set which is station_id
in mseed or (unit_id, exp_no) in reftek. User are allow to choose which
data set to be displayed
selectedKey: str - key of the data set to be displayed
selected_key: str - key of the data set to be displayed
"""
self.selectedKey: Optional[str] = None
self.selected_key: Optional[str] = None
"""
gaps: gaps info in dict:
......@@ -220,16 +220,16 @@ class DataTypeModel():
self.gaps: Dict[DataKey, List[List[float]]] = {}
"""
tmpDir: str - dir to keep memmap files.
tmp_dir: str - dir to keep memmap files.
Will be deleted when object is deleted
"""
self.tmpDir: str = mkdtemp()
self.tmp_dir: str = mkdtemp()
self.save_temp_data_folder_to_database()
try:
os.mkdir(self.tmpDir)
os.mkdir(self.tmp_dir)
except FileExistsError:
shutil.rmtree(self.tmpDir)
os.mkdir(self.tmpDir)
shutil.rmtree(self.tmp_dir)
os.mkdir(self.tmp_dir)
self._pauser = QtCore.QSemaphore()
self.pause_response = None
......@@ -237,77 +237,78 @@ class DataTypeModel():
def __del__(self):
print("delete dataType Object")
try:
shutil.rmtree(self.tmpDir)
shutil.rmtree(self.tmp_dir)
except OSError as e:
self.trackInfo(
"Error deleting %s : %s" % (self.tmpDir, e.strerror),
self.track_info(
"Error deleting %s : %s" % (self.tmp_dir, e.strerror),
LogType.ERROR)
print("Error deleting %s : %s" % (self.tmpDir, e.strerror))
print("Error deleting %s : %s" % (self.tmp_dir, e.strerror))
print("finish deleting")
def hasData(self) -> bool:
def has_data(self) -> bool:
"""
:return bool - True if there is any data can be read.
False if there is no valid data
"""
if (len(self.logData) == 0 and len(self.SOHData) == 0 and
len(self.massPosData) == 0 and len(self.waveformData) == 0):
if (len(self.log_data) == 0 and len(self.soh_data) == 0 and
len(self.mass_pos_data) == 0 and len(self.waveform_data) == 0):
return False
return True
def trackInfo(self, text: str, type: LogType) -> None:
def track_info(self, text: str, type: LogType) -> None:
"""
Display tracking info in tracking_box.
Add all errors/warnings to processing_log.
:param text: str - message to display
:param type: str - type of message (error/warning/info)
"""
# displayTrackingInfo updates a QtWidget, which can only be done in the
# main thread. So, if we are running in a background thread
# display_tracking_info updates a QtWidget, which can only be done in
# the main thread. So, if we are running in a background thread
# (i.e. self.creator_thread is not None), we need to use signal slot
# mechanism to ensure that displayTrackingInfo is run in the main
# mechanism to ensure that display_tracking_info is run in the main
# thread.
if self.notification_signal is None:
displayTrackingInfo(self.trackingBox, text, type)
display_tracking_info(self.tracking_box, text, type)
else:
self.notification_signal.emit(self.trackingBox, text, type)
self.notification_signal.emit(self.tracking_box, text, type)
if type != LogType.INFO:
self.processingLog.append((text, type))
self.processing_log.append((text, type))
@classmethod
def create_data_object(cls, data_type, tracking_box, folder,
readChanOnly=False, reqWFChans=[], reqSOHChans=[],
readStart=0, readEnd=constants.HIGHEST_INT):
read_chan_only=False, req_wf_chans=[],
req_soh_chans=[],
read_start=0, read_end=constants.HIGHEST_INT):
"""
Create a DataTypeModel object, with the concrete class being based on
data_type. Run on the same thread as its caller, and so will block the
GUI if called on the main thread. Do not call this method directly.
Instead, call the wrapper controller.processing.loadData.
Instead, call the wrapper controller.processing.load_data.
:param data_type: str - type of data read
:param tracking_box: QTextBrowser - widget to display tracking info
:param folder: [str,] - the data directory
:param readChanOnly: if True, only read channel name
:param reqWFChans: [str,] - requested waveform channel list
:param reqSOHChans: [str,] - requested soh channel list
:param readStart: [float,] - start time of read data
:param readEnd: [float,] - finish time of read data
:param read_chan_only: if True, only read channel name
:param req_wf_chans: [str,] - requested waveform channel list
:param req_soh_chans: [str,] - requested soh channel list
:param read_start: [float,] - start time of read data
:param read_end: [float,] - finish time of read data
:return: DataTypeModel - object that keep the data read from
folder
"""
if data_type == 'RT130':
from sohstationviewer.model.reftek.reftek import RT130
dataObject = RT130(
tracking_box, folder, readChanOnly=readChanOnly,
reqWFChans=reqWFChans, reqSOHChans=reqSOHChans,
readStart=readStart, readEnd=readEnd)
data_object = RT130(
tracking_box, folder, readChanOnly=read_chan_only,
reqWFChans=req_wf_chans, reqSOHChans=req_soh_chans,
readStart=read_start, readEnd=read_end)
else:
from sohstationviewer.model.mseed.mseed import MSeed
dataObject = MSeed(
tracking_box, folder, readChanOnly=readChanOnly,
reqWFChans=reqWFChans, reqSOHChans=reqSOHChans,
readStart=readStart, readEnd=readEnd)
return dataObject
data_object = MSeed(
tracking_box, folder, readChanOnly=read_chan_only,
reqWFChans=req_wf_chans, reqSOHChans=req_soh_chans,
readStart=read_start, readEnd=read_end)
return data_object
def pause(self) -> None:
"""
......@@ -363,5 +364,5 @@ class DataTypeModel():
return cls.__new__(cls)
def save_temp_data_folder_to_database(self):
execute_db(f'UPDATE PersistentData SET FieldValue="{self.tmpDir}" '
execute_db(f'UPDATE PersistentData SET FieldValue="{self.tmp_dir}" '
f'WHERE FieldName="tempDataDirectory"')
"""
This module provides access to a class that loads data in a separate thread.
"""
import math
import numpy as np
from PySide2 import QtCore
from sohstationviewer.conf import constants as const
# Global flag that determines whether the user requested to stop processing and
# its corresponding global lock.
stop = False
stop_lock = QtCore.QMutex()
class DownsamplerWorkerSignals(QtCore.QObject):
"""
Object that contains the signals for DownsamplerWorker.
Signals:
finished: emitted when a DownsamplerWorker instance finishes, contains
the result of the computation done by the instance
stopped: emitted when a DownsamplerWorker instance is stopped as a
result of a request from the user
"""
finished = QtCore.Signal(np.ndarray, np.ndarray, np.ndarray)
stopped = QtCore.Signal()
class DownsamplerWorker(QtCore.QRunnable):
"""
The worker class that executes the code to downsample data for plotting.
"""
def __init__(self, times, data, log_indexes=None, rq_points=0,
do_downsample=True):
super().__init__()
# The data to be downsampled
self.times = times
self.data = data
self.log_indexes = log_indexes
# The maximum number of data points that should exist after
# downsampling. Due to how the downsampling process works, there might
# be more data points than this limit once processing is finished.
self.rq_points = rq_points
self.signals = DownsamplerWorkerSignals()
# Whether to downsample the data. Intended for times when a user of
# this class has some data sets that might not need downsampling.
self.do_downsample = do_downsample
def run(self):
global stop
try:
times = self.times
data = self.data
log_indexes = self.log_indexes
if self.do_downsample:
times, data, log_indexes = downsample(
self.times, self.data, self.log_indexes, self.rq_points
)
except StopRequested:
# The way we set things up, only one background thread can do
# downsampling at a time. When we stop processing waveform data,
# all other queued background downsampling tasks are killed,
# leaving the sole running downsampler. As a result, we can reset
# the stop flag with no issue because no other downsampler will
# read it afterward.
stop = False
self.signals.stopped.emit()
else:
if not stop:
self.signals.finished.emit(
times, data, log_indexes
)
class Downsampler:
"""
The class that coordinate the downsampling of data in background threads.
Intended to be used in cases where there are multiple data sets that need
to be downsampled. For instances where there is only one data set,
DownsamplerWorker offers an easier-to-use API at the cost of having to
do your own thread management.
"""
def __init__(self):
self.thread_pool = QtCore.QThreadPool()
# Setting the maximum number of thread executed in the background in
# this thread pool to 1. Doing so helps to prevent the situation where
# multiple threads finish their work at once, which may lead to the
# main thread hanging if the post-processing procedure is too complex.
self.thread_pool.setMaxThreadCount(1)
self.worker_list = []
def add_worker(self, times, data, log_indexes=None, rq_points=0,
do_downsample=True):
"""
Create a downsampler worker for a data set and add it to the worker
list. Also returns the created worker so that users can connect its
signals to their slots.
:param times: the times array to downsample
:param data: the data array to downsample
:param log_indexes: the soh message line indices array to downsample
:param rq_points: the requested size of the downsampled arrays
:param do_downsample: whether to downsample the given data set. True by
default
:return: the worker that will downsample the given data
"""
worker = DownsamplerWorker(times, data, log_indexes, rq_points,
do_downsample)
self.worker_list.append(worker)
return worker
def start(self):
"""
Signal the internal thread pool to put all the stored downsampler
workers into its execution queue. Each worker will begin running when
its turn comes.
"""
for worker in self.worker_list:
self.thread_pool.start(worker)
def request_stop(self):
"""
Request the downsampler to stop all its workers by setting the global
stop flag.
"""
global stop, stop_lock
stop_lock.lock()
stop = True
stop_lock.unlock()
self.thread_pool.clear()
class StopRequested(Exception):
"""
Exception that is raised to indicate that a downsampler worker should stop
running and starts cleaning up.
"""
pass
def downsample(times, data, log_indexes=None, rq_points=0):
"""
Reduce sample rate of times and data so that times and data return has
the size around the rq_points.
Since the functions used for downsampling (chunk_minmax()/constant_rate)
are very slow, the values of data from mean to CUT_FROM_MEAN_FACTOR
will be removed first. If the size not meet the rq_points, then
continue to downsample.
:param times: numpy array - of a waveform channel's times
:param data: numpy array - of a waveform channel's data
:param log_indexes: numpy array - of a waveform channel's soh message line
index
:param rq_points: int - requested size to return.
:return np.array, np.array,(np.array) - new times and new data (and new
log_indexes) with the requested size
"""
# create a dummy array for log_indexes. However this way may slow down
# the performance of waveform downsample because waveform channel are large
# and have no log_indexes.
global stop, stop_lock
stop_lock.lock()
if stop:
stop_lock.unlock()
raise StopRequested()
stop_lock.unlock()
if times.size <= rq_points:
return times, data, log_indexes
if log_indexes is None:
log_indexes = np.empty_like(times)
data_max = max(abs(data.max()), abs(data.min()))
data_mean = abs(data.mean())
indexes = np.where(
abs(data - data.mean()) >
(data_max - data_mean) * const.CUT_FROM_MEAN_FACTOR)
times = times[indexes]
data = data[indexes]
log_indexes = log_indexes[indexes]
if times.size <= rq_points:
return times, data, log_indexes
return chunk_minmax(times, data, log_indexes, rq_points)
def chunk_minmax(times, data, log_indexes, rq_points):
"""
Split data into different chunks, take the min, max of each chunk to add
to the data return
:param times: numpy array - of a channel's times
:param data: numpy array - of a channel's data
:param log_indexes: numpy array - of a channel's log_indexes
:param rq_points: int - requested size to return.
:return times, data: np.array, np.array - new times and new data with the
requested size
"""
global stop, stop_lock
stop_lock.lock()
if stop:
stop_lock.unlock()
raise StopRequested()
stop_lock.unlock()
final_points = 0
if times.size <= rq_points:
final_points += times.size
return times, data, log_indexes
if rq_points < 2:
return np.empty((1, 0)), np.empty((1, 0)), np.empty((1, 0))
# Since grabbing the min and max from each
# chunk, need to div the requested number of points
# by 2.
chunk_size = rq_points // 2
chunk_count = math.ceil(times.size / chunk_size)
if chunk_count * chunk_size > times.size:
chunk_count -= 1
# Length of the trace is not divisible by the number of requested
# points. So split into an array that is divisible by the requested
# size, and an array that contains the excess. Downsample both,
# and combine. This case gives slightly more samples than
# the requested sample size, but not by much.
times_0 = times[:chunk_count * chunk_size]
data_0 = data[:chunk_count * chunk_size]
log_indexes_0 = log_indexes[:chunk_count * chunk_size]
excess_times = times[chunk_count * chunk_size:]
excess_data = data[chunk_count * chunk_size:]
excess_log_indexes = data[chunk_count * chunk_size:]
new_times_0, new_data_0, new_log_indexes_0 = downsample(
times_0, data_0, log_indexes_0, rq_points=rq_points
)
# right-most subarray is always smaller than
# the initially requested number of points.
excess_times, excess_data, excess_log_indexes = downsample(
excess_times, excess_data, excess_log_indexes,
rq_points=chunk_count
)
new_times = np.zeros(new_times_0.size + excess_times.size)
new_data = np.zeros(new_data_0.size + excess_data.size)
new_log_indexes = np.zeros(
new_log_indexes_0.size + excess_log_indexes.size
)
new_times[:new_times_0.size] = new_times_0
new_data[:new_data_0.size] = new_data_0
new_log_indexes[:new_log_indexes_0.size] = new_log_indexes_0
new_times[new_times_0.size:] = excess_times
new_data[new_data_0.size:] = excess_data
new_log_indexes[new_log_indexes_0.size:] = excess_log_indexes
return new_times, new_data, new_log_indexes
new_times = times.reshape(chunk_size, chunk_count)
new_data = data.reshape(chunk_size, chunk_count)
new_log_indexes = log_indexes.reshape(chunk_size, chunk_count)
min_data_idx = np.argmin(new_data, axis=1)
max_data_idx = np.argmax(new_data, axis=1)
rows = np.arange(chunk_size)
mask = np.zeros(shape=(chunk_size, chunk_count), dtype=bool)
mask[rows, min_data_idx] = True
mask[rows, max_data_idx] = True
new_times = new_times[mask]
new_data = new_data[mask]
new_log_indexes = new_log_indexes[mask]
return new_times, new_data, new_log_indexes
def constant_rate(times, data, rq_points):
"""
Take sample with constant_rate regardless of the value of the data
:param times: numpy array of a waveform channel's times
:param data: numpy array of a waveform channel's data
:param rq_points: requested size to return.
:return times, data: new times and new data with the requested size
"""
if times.size <= rq_points:
return times, data
rate = int(times.size/rq_points)
if rate == 1:
return times, data
indexes = np.arange(0, times.size, rate)
times = times[indexes]
data = data[indexes]
return times, data
This diff is collapsed.
......@@ -3,7 +3,6 @@ function for reading blockettes for ascii encodding
"""
from struct import unpack
from pathlib import Path
class ReadBlocketteError(Exception):
......@@ -11,132 +10,108 @@ class ReadBlocketteError(Exception):
self.msg = msg
def readASCII(path, fileName, byteorder):
"""
test function
"""
file = open(Path(path).joinpath(fileName), 'rb')
databytes = file.read()
file.close()
followingBlktsTotal = unpack('%s%s' % (byteorder, 'B'),
databytes[39:40])[0]
if followingBlktsTotal > 1:
nextBlktByteNo = 48 + 8 # header + blkt1000(SEED info)
else:
nextBlktByteNo = 0
logText = ""
while nextBlktByteNo != 0:
nextBlktByteNo, info = readNextBlkt(
nextBlktByteNo, databytes, byteorder)
logText += info
print("INFO:\n", logText)
# ________________ based on mseedpeek.libtrace.Mseed.blkXXX________________
# https://docs.python.org/3/library/struct.html
# SEED Manual V2.4 - Chapter 8 - Data Record
def readNextBlkt(bNo, databytes, byteorder):
def read_next_blkt(b_no, data_bytes, byte_order):
"""
:param bNo: int - next blockette Byte Number
:param databytes: bytes object - file's data in byte
:param byteorder: str - big/little endian
:param b_no: int - next blockette Byte Number
:param data_bytes: bytes object - file's data in byte
:param byte_order: str - big/little endian
:return:
nextBNo: int - byte number of next blockette
next_b_no: int - byte number of next blockette
info: str - info read from this blockette
"""
blocketteType = unpack('%s%s' % (byteorder, 'H'),
databytes[bNo:bNo + 2])[0]
nextBNo = unpack('%s%s' % (byteorder, 'H'),
databytes[bNo + 2:bNo + 4])[0]
blockette_type = unpack('%s%s' % (byte_order, 'H'),
data_bytes[b_no:b_no + 2])[0]
next_b_no = unpack('%s%s' % (byte_order, 'H'),
data_bytes[b_no + 2:b_no + 4])[0]
try:
# readBlkt will skip first 4 bytes (HH) as they are already read
info = eval("readBlkt%s(%s, %s, '%s')"
% (blocketteType, bNo, databytes, byteorder))
% (blockette_type, b_no, data_bytes, byte_order))
except NameError:
raise ReadBlocketteError(f"Function to read blockette {blocketteType} "
f"isn't implemented yet.")
return nextBNo, info
raise ReadBlocketteError(f"Function to read blockette {blockette_type}"
f" isn't implemented yet.")
return next_b_no, info
def readBlkt500(bNo, databytes, byteorder):
def read_blkt_500(b_no, data_bytes, byte_order):
"""
read blockette 500 (skip first 4 bytes (HH) as they are already read)
:param bNo: int - next blockette Byte Number
:param databytes: bytes object - file's data in byte
:param byteorder: str - big/little endian
:return logText: str - info of blockette 500 in string
:param b_no: int - next blockette Byte Number
:param data_bytes: bytes object - file's data in byte
:param byte_order: str - big/little endian
:return log_text: str - info of blockette 500 in string
"""
logText = "\nVCO Correction: %s" % unpack(
'%s%s' % (byteorder, 'f'), databytes[bNo + 4:bNo + 8])[0]
log_text = "\nVCO Correction: %s" % unpack(
'%s%s' % (byte_order, 'f'), data_bytes[b_no + 4:b_no + 8])[0]
t = {}
(t['year'], t['doy'], t['hour'], t['min'], t['sec'],
junk, t['micro']) = unpack(
'%s%s' % (byteorder, 'HHBBBBH'), databytes[bNo + 8:bNo + 18])
logText += ("\nTime of exception: %(year)s:%(doy)s:%(hour)s:%(min)s:"
"%(sec)s:%(micro)s" % t)
logText += "\nMicro sec: %s" % unpack(
'%s%s' % (byteorder, 'B'), databytes[bNo + 18:bNo + 19])[0]
logText += "\nReception Quality: %s" % unpack(
'%s%s' % (byteorder, 'B'), databytes[bNo + 19:bNo + 20])[0]
logText += "\nException Count: %s" % unpack(
'%s%s' % (byteorder, 'I'), databytes[bNo + 20:bNo + 24])[0]
logText += "\nException Type: %s" % unpack(
'%s%s' % (byteorder, '16s'), databytes[bNo + 24:bNo + 40])[0].strip()
logText += "\nClock Model: %s" % unpack(
'%s%s' % (byteorder, '32s'), databytes[bNo + 40:bNo + 72])[0].strip()
logText += "\nClock Status: %s" % unpack(
'%s%s' % (byteorder, '128s'), databytes[bNo + 72:bNo + 200])[0].strip()
return logText
def readBlkt2000(bNo, databytes, byteorder):
'%s%s' % (byte_order, 'HHBBBBH'), data_bytes[b_no + 8:b_no + 18])
log_text += ("\nTime of exception: %(year)s:%(doy)s:%(hour)s:%(min)s:"
"%(sec)s:%(micro)s" % t)
log_text += "\nMicro sec: %s" % unpack(
'%s%s' % (byte_order, 'B'), data_bytes[b_no + 18:b_no + 19])[0]
log_text += "\nReception Quality: %s" % unpack(
'%s%s' % (byte_order, 'B'), data_bytes[b_no + 19:b_no + 20])[0]
log_text += "\nException Count: %s" % unpack(
'%s%s' % (byte_order, 'I'), data_bytes[b_no + 20:b_no + 24])[0]
log_text += "\nException Type: %s" % unpack(
'%s%s' % (byte_order, '16s'), data_bytes[b_no + 24:b_no + 40])[
0].strip()
log_text += "\nClock Model: %s" % unpack(
'%s%s' % (byte_order, '32s'), data_bytes[b_no + 40:b_no + 72])[
0].strip()
log_text += "\nClock Status: %s" % unpack(
'%s%s' % (byte_order, '128s'), data_bytes[b_no + 72:b_no + 200])[
0].strip()
return log_text
def read_blkt_2000(b_no, data_bytes, byte_order):
"""
read blockette 2000 (skip first 4 bytes (HH) as they are already read)
:param bNo: int - next blockette Byte Number
:param databytes: bytes object - file's data in byte
:param byteorder: str - big/little endian
:return logText: str - info of blockette 2000 in string
:param b_no: int - next blockette Byte Number
:param data_bytes: bytes object - file's data in byte
:param byte_order: str - big/little endian
:return log_text: str - info of blockette 2000 in string
"""
blktLen = unpack(
'%s%s' % (byteorder, 'H'), databytes[bNo + 4:bNo + 6])[0]
logText = "\nTotal Blockette length: %s bytes" % blktLen
logText += "\nOffset to Opaque Data: %s" % unpack(
'%s%s' % (byteorder, 'H'), databytes[bNo + 6:bNo + 8])[0]
logText += "\nRecord number: %s" % unpack(
'%s%s' % (byteorder, 'I'), databytes[bNo + 8:bNo + 12])[0]
logText += "\nData Word order: %s" % unpack(
'%s%s' % (byteorder, 'B'), databytes[bNo + 12:bNo + 13])[0]
logText += "\nOpaque Data flags: %s" % unpack(
'%s%s' % (byteorder, 'B'), databytes[bNo + 13:bNo + 14])[0]
blkt_len = unpack(
'%s%s' % (byte_order, 'H'), data_bytes[b_no + 4:b_no + 6])[0]
log_text = "\nTotal Blockette length: %s bytes" % blkt_len
log_text += "\nOffset to Opaque Data: %s" % unpack(
'%s%s' % (byte_order, 'H'), data_bytes[b_no + 6:b_no + 8])[0]
log_text += "\nRecord number: %s" % unpack(
'%s%s' % (byte_order, 'I'), data_bytes[b_no + 8:b_no + 12])[0]
log_text += "\nData Word order: %s" % unpack(
'%s%s' % (byte_order, 'B'), data_bytes[b_no + 12:b_no + 13])[0]
log_text += "\nOpaque Data flags: %s" % unpack(
'%s%s' % (byte_order, 'B'), data_bytes[b_no + 13:b_no + 14])[0]
opaqueHeaderTotal = unpack(
'%s%s' % (byteorder, 'B'), databytes[bNo + 14:bNo + 15])[0]
logText += "\nNumber of Opaque Header fields: %s" % opaqueHeaderTotal
n = bNo + 15
'%s%s' % (byte_order, 'B'), data_bytes[b_no + 14:b_no + 15])[0]
log_text += "\nNumber of Opaque Header fields: %s" % opaqueHeaderTotal
n = b_no + 15
c = 0
headerLen = 0
header_len = 0
for i in range(opaqueHeaderTotal):
hfield = ''
hchar = ''
while hchar != '~':
hfield += hchar
hchar = unpack(
'%s%s' % (byteorder, '1s'), databytes[n + c:n + c + 1])[
'%s%s' % (byte_order, '1s'), data_bytes[n + c:n + c + 1])[
0].decode()
headerLen = c + 1
header_len = c + 1
c += 1
logText += "\nOpaque Header %s: %s" % (i, hfield)
opaqueDataLength = blktLen - 15 - headerLen
logText += "\nOpaque Data: %s" % unpack(
'%s%s' % (byteorder, '%ss' % opaqueDataLength),
databytes[n + headerLen:n + headerLen + opaqueDataLength])
return logText
log_text += "\nOpaque Header %s: %s" % (i, hfield)
opaque_data_length = blkt_len - 15 - header_len
log_text += "\nOpaque Data: %s" % unpack(
'%s%s' % (byte_order, '%ss' % opaque_data_length),
data_bytes[n + header_len:n + header_len + opaque_data_length])
if __name__ == '__main__':
readASCII(
"/Volumes/UNTITLED/fromCloud/qpeek/5244.sdr", "DT0001__.OCF", ">")
return log_text
......@@ -10,9 +10,9 @@ from typing import Dict, List, Tuple, Callable, Union, Optional
from obspy.core import Stream
from sohstationviewer.controller.util import getTime6
from sohstationviewer.controller.util import get_time_6
from sohstationviewer.model.handling_data import (
readSOHMSeed, readText, checkChan)
read_soh_mseed, read_text, check_chan)
from sohstationviewer.view.util.enums import LogType
......@@ -417,32 +417,32 @@ class MseedHeader(futils):
return 1
def readHdrs(path2file: Path, fileName: str,
SOHStreams: Dict[str, Dict[str, Stream]],
logData: Dict[str, Union[List[str], Dict[str, List[str]]]],
reqSOHChans: List[str], reqWFChans: List[str],
netsProbInFile: Dict[Tuple[str, ...], str],
trackInfo: Callable[[str, LogType], None]
) -> Optional[Dict[str, Union[float, str, bool, List[str]]]]:
def read_hdrs(path2file: Path, file_name: str,
soh_streams: Dict[str, Dict[str, Stream]],
log_data: Dict[str, Union[List[str], Dict[str, List[str]]]],
req_soh_chans: List[str], req_wf_chans: List[str],
nets_in_file: Dict[Tuple[str, ...], str],
track_info: Callable[[str, LogType], None]
) -> Optional[Dict[str, Union[float, str, bool, List[str]]]]:
"""
read headers of a given file build dictionary for quick access
:param path2file: str - path to file
:param fileName: str - name of file
:param SOHStreams: dict - holder for different sets of soh mseed stream
:param logData: dict - holder for logging messages
:param reqSOHChans: list of string - requested SOH channels sent
:param file_name: str - name of file
:param soh_streams: dict - holder for different sets of soh mseed stream
:param log_data: dict - holder for logging messages
:param req_soh_chans: list of string - requested SOH channels sent
from Main Window
:param reqWFChans: list of string - requested waveform channel sent from
:param req_wf_chans: list of string - requested waveform channel sent from
Main Window
:param netsProbInFile: dict - holder for all network
:param trackInfo: function - to do process tracking
:param nets_in_file: dict - holder for all network
:param track_info: function - to do process tracking
:return:
+ if file is mseed but cannot read: raise error
+ if file is mseed but chanType isn't requested, do nothing
+ if file is mseed but chan_type isn't requested, do nothing
+ if file isn't mseed, try to read text file (raise Error if needed)
+ is chanType is SOH, read data to soh_streams
+ if chanType is waveform but there is no reqWFChans: do nothing
+ if chanType is waveform, return header info
+ is chan_type is SOH, read data to soh_streams
+ if chan_type is waveform but there is no reqWFChans: do nothing
+ if chan_type is waveform, return header info
"""
rdfile = MseedHeader(path2file)
......@@ -450,51 +450,51 @@ def readHdrs(path2file: Path, fileName: str,
try:
filesize = rdfile.filesize
blksize = rdfile.blksize
chanID = rdfile.FH.Chan.strip().decode()
chanType = checkChan(chanID, reqSOHChans, reqWFChans)
if not chanType:
chan_id = rdfile.FH.Chan.strip().decode()
chan_type = check_chan(chan_id, req_soh_chans, req_wf_chans)
if not chan_type:
rdfile.close()
return
except Exception:
rdfile.close()
raise Exception("Cannot determine file and block sizes. File: %s"
% fileName)
% file_name)
else:
# not Mseed()
rdfile.close()
readText(path2file, fileName, logData['TEXT'])
read_text(path2file, file_name, log_data['TEXT'])
return
if chanType == 'SOH':
readSOHMSeed(path2file, fileName, SOHStreams,
logData, netsProbInFile, trackInfo)
if chan_type == 'SOH':
read_soh_mseed(path2file, file_name, soh_streams,
log_data, nets_in_file, track_info)
return
if reqWFChans == []:
if req_wf_chans == []:
return
(numblocks, odd_size) = divmod(filesize, blksize)
nets = set()
stats = set()
netStats = set()
chanIDs = set()
net_stats = set()
chan_ids = set()
epochs = []
startTms = []
start_tms = []
# looping over total number of blocks in files
for n in range(numblocks):
rdfile.fixedhdr(n * blksize)
chanID = rdfile.FH.Chan.strip().decode()
chan_id = rdfile.FH.Chan.strip().decode()
chanIDs.add(chanID)
chan_ids.add(chan_id)
nets.add(rdfile.FH.Net.strip().decode())
stats.add(rdfile.FH.Stat.strip().decode())
netStats.add((rdfile.FH.Net.strip().decode(),
rdfile.FH.Stat.strip().decode()))
net_stats.add((rdfile.FH.Net.strip().decode(),
rdfile.FH.Stat.strip().decode()))
t_str = "%d:%03d:%02d:%02d:%02d:%06d" % (
rdfile.FH.Year, rdfile.FH.Day, rdfile.FH.Hour,
rdfile.FH.Min, rdfile.FH.Sec, rdfile.FH.Micro)
startTms.append(t_str)
startepoch, _ = getTime6(t_str)
start_tms.append(t_str)
startepoch, _ = get_time_6(t_str)
endepoch = None
if rdfile.rate != 0:
endepoch = startepoch + rdfile.FH.NumSamp / rdfile.rate
......@@ -504,11 +504,11 @@ def readHdrs(path2file: Path, fileName: str,
rdfile.close()
return {'path2file': path2file,
'fileName': fileName,
'fileName': file_name,
'nets': list(nets),
'stats': sorted(list(stats)),
'netStats': sorted(list(netStats)),
'chanIDs': sorted(list(chanIDs)),
'netStats': sorted(list(net_stats)),
'chanIDs': sorted(list(chan_ids)),
'startEpoch': epochs[0][0],
'endEpoch': epochs[-1][1],
'tracesTotal': len(epochs),
......
......@@ -9,12 +9,12 @@ from typing import Dict, Tuple, List, Set
from obspy.core import Stream
from sohstationviewer.conf import constants
from sohstationviewer.controller.util import validateFile
from sohstationviewer.controller.util import validate_file
from sohstationviewer.model.data_type_model import DataTypeModel, ThreadStopped
from sohstationviewer.model.handling_data import (
readWaveformMSeed, squash_gaps, checkWFChan, sortData, readSOHTrace,
read_waveform_mseed, squash_gaps, check_wf_chan, sort_data, read_soh_trace,
)
from sohstationviewer.model.mseed.from_mseedpeek.mseed_header import readHdrs
from sohstationviewer.model.mseed.from_mseedpeek.mseed_header import read_hdrs
from sohstationviewer.view.util.enums import LogType
......@@ -38,10 +38,10 @@ class MSeed(DataTypeModel):
"""
A file can have more than one experiment which only one of them should
be correct as selected by user (handling_data.py - read_soh_mseed())
netsProbInFile is the dictionary with key is the tuple of all nets
nets_in_file is the dictionary with key is the tuple of all nets
in a file, value is the selected net selected by user.
"""
self.netsProbInFile: Dict[Tuple[str, ...], str] = {}
self.nets_in_file: Dict[Tuple[str, ...], str] = {}
if self.creator_thread.isInterruptionRequested():
raise ThreadStopped()
......@@ -49,12 +49,12 @@ class MSeed(DataTypeModel):
if self.creator_thread.isInterruptionRequested():
raise ThreadStopped()
self.selectedKey = self.selectStaID()
self.selected_key = self.select_sta_id()
if self.selectedKey is None:
if self.selected_key is None:
raise ThreadStopped()
if len(self.reqWFChans) != 0:
self.readWFFiles(self.selectedKey)
if len(self.req_wf_chans) != 0:
self.read_wf_files(self.selected_key)
def read_soh_and_index_waveform(self, folder: str):
"""
......@@ -64,15 +64,15 @@ class MSeed(DataTypeModel):
+ squash gaps from different soh channels to one
:param folder: absolute path to data set folder
"""
self.waveformData, soh_stream = self.index_waveform(folder)
self.SOHData, self.massPosData, self.gaps = self.merge_soh_streams(
self.waveform_data, soh_stream = self.index_waveform(folder)
self.soh_data, self.mass_pos_data, self.gaps = self.merge_soh_streams(
soh_stream)
def index_waveform(self, folder: str
) -> Tuple[Dict, Dict[str, Dict[str, Stream]]]:
"""
:param folder: absolute path to data set folder
:return waveformData: a dict for waveform data including
:return waveform_data: a dict for waveform data including
{sta_id: {'filesInfo': {chan_id: [{filepath,time range,trace's info}]}
'readData': { chan_id: {samplerate
and preset traces_info which is a list of {info and
......@@ -97,17 +97,17 @@ class MSeed(DataTypeModel):
raise ThreadStopped()
path2file = Path(path).joinpath(file_name)
if not validateFile(path2file, file_name):
if not validate_file(path2file, file_name):
continue
count += 1
if count % 50 == 0:
self.trackInfo(
self.track_info(
f'Read {count} file headers/ SOH files', LogType.INFO)
ret = readHdrs(
path2file, file_name, soh_streams, self.logData,
self.reqSOHChans, self.reqWFChans,
self.netsProbInFile, self.trackInfo)
ret = read_hdrs(
path2file, file_name, soh_streams, self.log_data,
self.req_soh_chans, self.req_wf_chans,
self.nets_in_file, self.track_info)
if ret is None:
continue
......@@ -144,7 +144,7 @@ class MSeed(DataTypeModel):
if sta_id not in waveform_data:
waveform_data[sta_id] = {"filesInfo": {},
"readData": {}}
self.dataTime[sta_id] = [constants.HIGHEST_INT, 0]
self.data_time[sta_id] = [constants.HIGHEST_INT, 0]
if chan_id not in waveform_data[sta_id]["filesInfo"]:
waveform_data[sta_id]["filesInfo"][chan_id] = []
waveform_data[sta_id]["readData"][chan_id] = {
......@@ -155,14 +155,14 @@ class MSeed(DataTypeModel):
if len(stat_prop) > 0:
errmsg = (f"More than one stations in a file: {stat_prop}. "
f"Will use the first one.")
self.trackInfo(errmsg, LogType.ERROR)
self.track_info(errmsg, LogType.ERROR)
if len(net_stat_prop) > 0:
errmsg = "More than one netIDs in a file: %s" % net_stat_prop
self.trackInfo(errmsg, LogType.WARNING)
self.track_info(errmsg, LogType.WARNING)
if len(chan_prop) > 0:
errmsg = (f"More than one channels in a file: {chan_prop} "
f"\nThis is a CRITICAL ERROR.")
self.trackInfo(errmsg, LogType.ERROR)
self.track_info(errmsg, LogType.ERROR)
return waveform_data, soh_streams
def merge_soh_streams(self, soh_streams: Dict[str, Dict[str, Stream]]
......@@ -175,7 +175,7 @@ class MSeed(DataTypeModel):
to that station into one data set (by changing network name to the
selected network then merge again)
:return masspos_data: merged data for chan_id with 'VM' prefixed
(structure in DataTypeModel.__init__.massPosData)
(structure in DataTypeModel.__init__.mass_pos_data)
:return soh_data: merged data for other soh chan_ids
(structure in DataTypeModel.__init__SOHData)
:return gaps: start and end of gaps from all merged soh streams
......@@ -189,7 +189,7 @@ class MSeed(DataTypeModel):
all_gaps: List[List[float]] = []
for sta_id in soh_streams:
self.stats.add(sta_id)
self.dataTime[sta_id] = [constants.HIGHEST_INT, 0]
self.data_time[sta_id] = [constants.HIGHEST_INT, 0]
soh_data[sta_id] = {}
masspos_data[sta_id] = {}
for chan_id in soh_streams[sta_id]:
......@@ -222,46 +222,46 @@ class MSeed(DataTypeModel):
gaps_in_stream = stream.get_gaps()
all_gaps += [[g[4].timestamp, g[5].timestamp]
for g in gaps_in_stream]
trace_info = readSOHTrace(tr)
trace_info = read_soh_trace(tr)
if chan_id.startswith('VM'):
masspos_data[sta_id][chan_id] = {
'orgTrace': trace_info}
else:
soh_data[sta_id][chan_id] = {
'orgTrace': trace_info}
self.dataTime[sta_id][0] = min(trace_info['startTmEpoch'],
self.dataTime[sta_id][0])
self.dataTime[sta_id][1] = max(trace_info['endTmEpoch'],
self.dataTime[sta_id][1])
self.data_time[sta_id][0] = min(trace_info['startTmEpoch'],
self.data_time[sta_id][0])
self.data_time[sta_id][1] = max(trace_info['endTmEpoch'],
self.data_time[sta_id][1])
gaps[sta_id] = squash_gaps(all_gaps)
return soh_data, masspos_data, gaps
def selectStaID(self) -> str:
def select_sta_id(self) -> str:
"""
:return selectedStaID: the selected station id from self.stats
:return selected_sta_id: the selected station id from self.stats
+ If there is only one station id, return it.
+ If there is more than one, show all ids, let user choose one to
return.
"""
stats = list(self.stats)
selectedStaID = stats[0]
selected_sta_id = stats[0]
if len(stats) > 1:
msg = ("There are more than one stations in the given data.\n"
"Please select one to display")
self.pause_signal.emit(msg, stats)
self.pause()
selectedStaID = stats[self.pause_response]
selected_sta_id = stats[self.pause_response]
self.trackInfo(f'Select Station {selectedStaID}', LogType.INFO)
return selectedStaID
self.track_info(f'Select Station {selected_sta_id}', LogType.INFO)
return selected_sta_id
def readWFFiles(self, staID: str) -> None:
def read_wf_files(self, sta_id: str) -> None:
"""
From filesInfo, read all waveform data of requested waveform channels
for given sta_id, in the selected time (from Main Window) to add
to waveformData[sat_id]['readData'][chan_id]['tracesInfo'].
to waveform_data[sat_id]['readData'][chan_id]['tracesInfo'].
'tracesInfo' includes list of traces' info dict in startTmEpoch order.
The keys of a trace's info dict:
{
......@@ -272,40 +272,41 @@ class MSeed(DataTypeModel):
'times_f': np.memmap's file path for time
'data_f': np.memmap's file path for data
}
:param staID: station ID
:param sta_id: station ID
"""
count = 0
for chanID in self.waveformData[staID]['filesInfo']:
# check chanID
hasChan = checkWFChan(chanID, self.reqWFChans)
if not hasChan:
for chan_id in self.waveform_data[sta_id]['filesInfo']:
# check chan_id
has_chan = check_wf_chan(chan_id, self.req_wf_chans)
if not has_chan:
continue
tracesInfo = self.waveformData[staID][
'readData'][chanID]['tracesInfo']
traces_info = self.waveform_data[sta_id][
'readData'][chan_id]['tracesInfo']
for fileInfo in self.waveformData[staID]['filesInfo'][chanID]:
for file_info in self.waveform_data[sta_id]['filesInfo'][chan_id]:
if self.creator_thread.isInterruptionRequested():
raise ThreadStopped
# file have been read
if fileInfo['read']:
if file_info['read']:
continue
# check time
hasData = False
has_data = False
if ((self.readStart <= fileInfo['startEpoch'] <= self.readEnd) or # noqa: E501
(self.readStart <= fileInfo['endEpoch'] <= self.readEnd)): # noqa: E501
hasData = True
if not hasData:
if ((self.read_start <= file_info['startEpoch'] <= self.read_end) or # noqa: E501
(self.read_start <= file_info['endEpoch'] <= self.read_end)): # noqa: E501
has_data = True
if not has_data:
continue
readWaveformMSeed(fileInfo['path2file'], fileInfo['fileName'],
staID, chanID, tracesInfo,
self.dataTime[staID], self.tmpDir)
fileInfo['read'] = True
read_waveform_mseed(file_info['path2file'],
file_info['fileName'],
sta_id, chan_id, traces_info,
self.data_time[sta_id], self.tmp_dir)
file_info['read'] = True
count += 1
if count % 50 == 0:
self.trackInfo(
self.track_info(
f'Read {count} waveform files', LogType.INFO)
sortData(self.waveformData)
sort_data(self.waveform_data)
This diff is collapsed.
......@@ -3,8 +3,10 @@ from PySide2 import QtWidgets, QtCore
from sohstationviewer.database.process_db import (
execute_db, trunc_add_db, execute_db_dict)
from sohstationviewer.controller.processing import readChannels, detectDataType
from sohstationviewer.controller.util import displayTrackingInfo
from sohstationviewer.controller.processing import (
read_channels, detect_data_type
)
from sohstationviewer.controller.util import display_tracking_info
from sohstationviewer.view.util.enums import LogType
......@@ -335,12 +337,12 @@ class ChannelPreferDialog(QtWidgets.QWidget):
if not self.validate_row():
return
data_type = detectDataType(self, self.dir_names)
data_type = detect_data_type(self, self.dir_names)
if data_type in self.avail_data_types:
self.data_type_widget.setCurrentText(data_type)
else:
self.data_type_widget.setCurrenText('Unknown')
scanned_channels = readChannels(self, self.dir_names)
scanned_channels = read_channels(self, self.dir_names)
self.id_widget.setText(','.join(scanned_channels))
@QtCore.Slot()
......@@ -379,7 +381,7 @@ class ChannelPreferDialog(QtWidgets.QWidget):
ret = trunc_add_db('ChannelPrefer', sql_list)
if ret is not True:
displayTrackingInfo(self.parent, ret, LogType.ERROR)
display_tracking_info(self.parent, ret, LogType.ERROR)
self.parent.IDs = [
t.strip() for t in self.id_widget.text().split(',')]
self.parent.IDsName = self.name_widget.text().strip()
......
import os
import pathlib
import shutil
from copy import deepcopy
import traceback
from datetime import datetime
from copy import deepcopy
from pathlib import Path
from PySide2 import QtCore, QtWidgets, QtGui
from sohstationviewer.conf.constants import TM_FORMAT
from sohstationviewer.controller.processing import detectDataType
from sohstationviewer.controller.util import displayTrackingInfo
from sohstationviewer.database.process_db import execute_db_dict, execute_db
from sohstationviewer.controller.util import display_tracking_info
from sohstationviewer.model.data_loader import DataLoader
from sohstationviewer.model.data_type_model import DataTypeModel
from sohstationviewer.view.calendar.calendar_dialog import CalendarDialog
from sohstationviewer.view.channel_prefer_dialog import ChannelPreferDialog
from sohstationviewer.view.db_config.channel_dialog import ChannelDialog
from sohstationviewer.view.db_config.data_type_dialog import DataTypeDialog
from sohstationviewer.view.db_config.param_dialog import ParamDialog
from sohstationviewer.view.db_config.plot_type_dialog import PlotTypeDialog
from sohstationviewer.view.file_list_widget import FileListItem
from sohstationviewer.view.plotting.time_power_squared_dialog import (
TimePowerSquaredDialog)
TimePowerSquaredDialog
)
from sohstationviewer.view.plotting.waveform_dialog import WaveformDialog
from sohstationviewer.view.search_message.search_message_dialog import (
SearchMessageDialog)
SearchMessageDialog
)
from sohstationviewer.view.help_view import HelpBrowser
from sohstationviewer.view.ui.main_ui import UIMainWindow
from sohstationviewer.view.util.enums import LogType
from sohstationviewer.view.channel_prefer_dialog import ChannelPreferDialog
from sohstationviewer.controller.processing import detect_data_type
from sohstationviewer.database.process_db import execute_db_dict, execute_db
from sohstationviewer.conf.constants import TM_FORMAT
class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
current_directory_changed = QtCore.Signal(str)
def __init__(self, parent=None):
......@@ -47,6 +53,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
self.data_type = 'Unknown'
self.data_loader = DataLoader()
self.data_loader.finished.connect(self.replot_loaded_data)
"""
req_soh_chans: [str,] - list of State-Of-Health channels to read data
......@@ -99,7 +106,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
"""
bit_weight_opt: str - option for bitweight
"""
self.bit_weight_opt = '' # currently only need one option
self.bit_weight_opt = '' # currently only need one option
self.get_channel_prefer()
self.yyyy_mm_dd_action.triggered.emit()
......@@ -127,6 +134,13 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
self.pull_current_directory_from_db()
self.delete_old_temp_data_folder()
self.has_problem = False
self.is_loading_data = False
self.is_plotting_soh = False
self.is_plotting_waveform = False
self.is_plotting_tps = False
self.is_stopping = False
@QtCore.Slot()
def open_data_type(self):
"""
......@@ -251,6 +265,14 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
Read data from selected files/directories, process and plot channels
read from those according to current options set on the GUI
"""
is_working = (self.is_loading_data or self.is_plotting_soh or
self.is_plotting_waveform or self.is_plotting_tps)
if is_working:
msg = 'Already working'
display_tracking_info(self.tracking_info_text_browser, msg, 'info')
return
self.has_problem = False
try:
del self.data_object
except AttributeError:
......@@ -267,8 +289,8 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
msg = "No directories has been selected."
QtWidgets.QMessageBox.warning(self, "Select directory", msg)
return
self.data_type = detectDataType(self.tracking_info_text_browser,
self.dir_names)
self.data_type = detect_data_type(self.tracking_info_text_browser,
self.dir_names)
if self.data_type is None:
return
......@@ -291,8 +313,8 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
self.req_wf_chans = []
if self.data_type == 'RT130':
req_dss = []
for idx, DSCheckbox in enumerate(self.ds_check_boxes):
if DSCheckbox.isChecked():
for idx, ds_checkbox in enumerate(self.ds_check_boxes):
if ds_checkbox.isChecked():
req_dss.append(idx + 1)
self.req_wf_chans = req_dss
else:
......@@ -313,10 +335,16 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
req_soh_chans=self.req_soh_chans,
read_start=self.start_tm,
read_end=self.end_tm)
self.data_loader.worker.finished.connect(self.plot_data)
self.data_loader.worker.finished.connect(self.data_loaded)
self.data_loader.worker.stopped.connect(self.problem_happened)
self.data_loader.worker.failed.connect(self.problem_happened)
self.data_loader.thread.finished.connect(self.reset_flags)
self.data_loader.connect_worker_signals()
self.is_loading_data = True
self.data_loader.load_data()
@QtCore.Slot()
def stop_load_data(self):
# TODO: find a way to stop the data loader without a long wait.
"""
......@@ -325,20 +353,50 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
very long.
"""
if self.data_loader.running:
self.is_loading_data = False
self.data_loader.thread.requestInterruption()
displayTrackingInfo(self.tracking_info_text_browser,
'Stopping data loading...',
LogType.INFO)
display_tracking_info(self.tracking_info_text_browser,
'Stopping data loading...',
LogType.INFO)
@QtCore.Slot()
def plot_data(self, data_obj: DataTypeModel):
"""
Process the loaded data and pass control to the plotter.
def problem_happened(self):
self.has_problem = True
@QtCore.Slot()
def stop(self):
is_working = (self.is_loading_data or self.is_plotting_soh or
self.is_plotting_waveform or self.is_plotting_tps)
if is_working:
if self.is_stopping:
msg = 'Already stopping'
display_tracking_info(self.tracking_info_text_browser, msg,
LogType.INFO)
return
self.is_stopping = True
if self.is_loading_data:
self.stop_load_data()
if self.is_plotting_soh:
display_tracking_info(self.tracking_info_text_browser,
'Stopping SOH plot...', 'info')
if self.is_plotting_waveform:
display_tracking_info(self.waveform_dlg.info_text_browser,
'Stopping waveform plot...', 'info')
waveform_widget = self.waveform_dlg.plotting_widget
running_processor = waveform_widget.data_processors[0]
running_processor.stopped.connect(self.reset_flags)
self.waveform_dlg.plotting_widget.request_stop()
@QtCore.Slot()
def data_loaded(self, data_obj: DataTypeModel):
"""
Process the loaded data.
:param data_obj: the data object that contains the loaded data.
"""
self.is_loading_data = False
self.data_object = data_obj
self.replot_loaded_data()
@QtCore.Slot()
def replot_loaded_data(self):
......@@ -346,6 +404,9 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
Plot using data from self.data_object with the current options set
from GUI
"""
if self.has_problem:
return
self.is_plotting_soh = True
if self.detect_gap_check_box.isChecked():
self.min_gap = self.gap_len_line_edit.text()
else:
......@@ -354,18 +415,28 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
time_tick_total = 5 # TODO: let user choose max ticks to be displayed
sel_key = do.selectedKey
soh_data = deepcopy(do.SOHData[sel_key])
sel_key = do.selected_key
soh_data = deepcopy(do.soh_data[sel_key])
soh_chans = list(soh_data.keys())
mp_data = deepcopy(do.massPosData[sel_key])
mp_data = deepcopy(do.mass_pos_data[sel_key])
if len(self.req_wf_chans) != 0:
wf_data = deepcopy(do.waveformData[sel_key]['readData'])
wf_data = deepcopy(do.waveform_data[sel_key]['readData'])
else:
wf_data = []
self.plotting_widget.plot_channels(
self.start_tm, self.end_tm, sel_key,
do.dataTime[sel_key], soh_chans, time_tick_total,
soh_data, mp_data, do.gaps[sel_key])
try:
self.plotting_widget.plot_channels(
self.start_tm, self.end_tm, sel_key,
do.data_time[sel_key], soh_chans, time_tick_total,
soh_data, mp_data, do.gaps[sel_key])
except Exception:
print("Failed ")
fmt = traceback.format_exc()
msg = f"Can't plot SOH data due to error: {str(fmt)}"
display_tracking_info(self.tracking_info_text_browser, msg,
LogType.ERROR)
self.reset_flags()
finally:
self.is_plotting_soh = False
peer_plotting_widgets = [self.plotting_widget]
......@@ -376,12 +447,13 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
self.tps_dlg.show()
self.tps_dlg.plotting_widget.plot_channels(
self.start_tm, self.end_tm, sel_key,
do.dataTime[sel_key],
do.data_time[sel_key],
wf_data)
else:
self.tps_dlg.hide()
if self.req_wf_chans != []:
self.is_plotting_waveform = True
# waveformPlot
peer_plotting_widgets.append(self.waveform_dlg.plotting_widget)
self.waveform_dlg.set_data(
......@@ -389,7 +461,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
self.waveform_dlg.show()
self.waveform_dlg.plotting_widget.plot_channels(
self.start_tm, self.end_tm, sel_key,
do.dataTime[sel_key], time_tick_total,
do.data_time[sel_key], time_tick_total,
wf_data, mp_data)
else:
self.waveform_dlg.hide()
......@@ -400,11 +472,26 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
self.tps_dlg.plotting_widget.set_peer_plotting_widgets(
peer_plotting_widgets)
processing_log = do.processingLog + self.plotting_widget.processing_log
processing_log = (
do.processing_log + self.plotting_widget.processing_log
)
self.search_message_dialog.setup_logview(
sel_key, do.logData, processing_log)
sel_key, do.log_data, processing_log)
self.search_message_dialog.show()
@QtCore.Slot()
def reset_flags(self):
"""
Reset the activity flags. Intended to be called when something goes
wrong to reset the state of the program. Each step of the program
should instead reset their own flag when they are finished.
"""
self.is_loading_data = False
self.is_plotting_soh = False
self.is_plotting_waveform = False
self.is_plotting_tps = False
self.is_stopping = False
def set_current_directory(self, path=''):
"""
Update currentDirectory with path in DB table PersistentData.
......@@ -463,15 +550,16 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
if len(rows) > 0 and rows[0]['FieldValue']:
self.set_current_directory(rows[0]['FieldValue'])
def closeEvent(self, event: QtGui.QCloseEvent) -> None:
def closeEvent(self, event: QtGui.QCloseEvent) -> None:
"""
Cleans up when the user exits the program. Currently only clean up
running data loaders.
:param event: parameter of method being overridden
"""
displayTrackingInfo(self.tracking_info_text_browser, 'Cleaning up...',
'info')
display_tracking_info(self.tracking_info_text_browser,
'Cleaning up...',
'info')
if self.data_loader.running:
self.data_loader.thread.requestInterruption()
self.data_loader.thread.quit()
......
# class with all plotting functions
from sohstationviewer.controller.util import getVal
from sohstationviewer.controller.plottingData import getMassposValueColors
from sohstationviewer.controller.util import get_val
from sohstationviewer.controller.plotting_data import get_masspos_value_colors
from sohstationviewer.view.util.color import clr
from sohstationviewer.conf import constants
......@@ -76,7 +76,7 @@ class Plotting:
value_colors = chan_db_info['valueColors'].split('|')
for vc in value_colors:
v, c = vc.split(':')
val = getVal(v)
val = get_val(v)
if c == '_':
prev_val = val
continue
......@@ -144,7 +144,7 @@ class Plotting:
colors = []
for vc in val_cols:
v, c = vc.split(':')
val = getVal(v)
val = get_val(v)
points = [c_data['times'][i]
for i in range(len(c_data['data']))
......@@ -309,7 +309,7 @@ class Plotting:
ax, linked_ax):
"""
Plot multi-color dots with grey line for mass position channel.
Use getMassposValueColors() to get value_colors map based on
Use get_masspos_value_colors() to get value_colors map based on
Menu - MP Coloring selected from Main Window.
:param c_data: dict - data of the channel which includes down-sampled
......@@ -323,10 +323,10 @@ class Plotting:
same axes
:return ax: matplotlib.axes.Axes - axes of the channel
"""
value_colors = getMassposValueColors(
value_colors = get_masspos_value_colors(
self.params.mass_pos_volt_range_opt, chan_id,
self.parent.c_mode, self.parent.processing_log,
retType='tupleList')
ret_type='tupleList')
if value_colors is None:
return
......
......@@ -4,8 +4,8 @@ from matplotlib import pyplot as pl
from matplotlib.backends.backend_qt5agg import (
FigureCanvasQTAgg as Canvas)
from sohstationviewer.controller.plottingData import (
getGaps, getTimeTicks, getUnitBitweight)
from sohstationviewer.controller.plotting_data import (
get_gaps, get_time_ticks, get_unit_bitweight)
from sohstationviewer.conf import constants
......@@ -96,7 +96,7 @@ class PlottingAxes:
:param timestamp_bar: matplotlib.axes.Axes - axes for timestamp_bar
"""
times, major_times, major_time_labels = getTimeTicks(
times, major_times, major_time_labels = get_time_ticks(
self.parent.min_x, self.parent.max_x, self.parent.date_mode,
self.parent.time_ticks_total
)
......@@ -253,7 +253,7 @@ class PlottingAxes:
max_y = y.max()
ax.spines['top'].set_visible(True)
ax.spines['bottom'].set_visible(True)
ax.unit_bw = getUnitBitweight(
ax.unit_bw = get_unit_bitweight(
chan_db_info, self.params.bit_weight_opt
)
self.set_axes_ylim(ax, min_y, max_y)
......@@ -291,7 +291,7 @@ class PlottingAxes:
"""
if self.params.min_gap is None:
return
gaps = getGaps(gaps, float(self.params.min_gap))
gaps = get_gaps(gaps, float(self.params.min_gap))
self.parent.plotting_bot -= 0.003
self.parent.gap_bar = self.create_axes(self.parent.plotting_bot,
0.001,
......
......@@ -12,8 +12,8 @@ from sohstationviewer.view.plotting.plotting_widget.plotting_axes import (
PlottingAxes)
from sohstationviewer.view.plotting.plotting_widget.plotting import Plotting
from sohstationviewer.controller.plottingData import formatTime
from sohstationviewer.controller.util import displayTrackingInfo
from sohstationviewer.controller.plotting_data import format_time
from sohstationviewer.controller.util import display_tracking_info
class PlottingWidget(QtWidgets.QScrollArea):
......@@ -290,13 +290,13 @@ class PlottingWidget(QtWidgets.QScrollArea):
if hasattr(ax, 'unit_bw'):
clicked_data = ax.unit_bw.format(clicked_data)
formatted_clicked_time = formatTime(
formatted_clicked_time = format_time(
clicked_time, self.date_mode, 'HH:MM:SS')
info_str = (f"<pre>Channel: {chan_id} "
f"Point:{click_plot_index + 1} "
f"Time: {formatted_clicked_time} "
f"Value: {clicked_data}</pre>")
displayTrackingInfo(self.tracking_box, info_str)
display_tracking_info(self.tracking_box, info_str)
if 'logIdx' in chan_data.keys():
self.parent.search_message_dialog.show()
......
......@@ -4,15 +4,15 @@ Drawing State-Of-Health channels and mass position
from sohstationviewer.view.util.plot_func_names import plot_functions
from sohstationviewer.view.plotting.plotting_widget import plotting_widget
from sohstationviewer.controller.plottingData import getTitle
from sohstationviewer.controller.plotting_data import get_title
from sohstationviewer.controller.util import (
displayTrackingInfo, apply_convert_factor)
display_tracking_info, apply_convert_factor)
from sohstationviewer.conf import constants
from sohstationviewer.database import extract_data
from sohstationviewer.model.handling_data import trim_downsample_SOHChan
from sohstationviewer.model.handling_data import trim_downsample_soh_chan
from sohstationviewer.view.util.enums import LogType
......@@ -51,7 +51,7 @@ class SOHWidget(plotting_widget.PlottingWidget):
self.min_x = max(data_time[0], start_tm)
self.max_x = min(data_time[1], end_tm)
self.plot_total = len(self.plotting_data1) + len(self.plotting_data2)
title = getTitle(key, self.min_x, self.max_x, self.date_mode)
title = get_title(key, self.min_x, self.max_x, self.date_mode)
self.plotting_bot = constants.BOTTOM
self.plotting_bot_pixel = constants.BOTTOM_PX
self.axes = []
......@@ -76,7 +76,7 @@ class SOHWidget(plotting_widget.PlottingWidget):
if chan_db_info['channel'] == 'DEFAULT':
msg = (f"Channel {chan_id}'s "
f"definition can't be found database.")
displayTrackingInfo(self.tracking_box, msg, LogType.WARNING)
display_tracking_info(self.tracking_box, msg, LogType.WARNING)
if chan_db_info['plotType'] == '':
continue
......@@ -125,8 +125,7 @@ class SOHWidget(plotting_widget.PlottingWidget):
"""
chan_db_info = c_data['chan_db_info']
plot_type = chan_db_info['plotType']
trim_downsample_SOHChan(c_data, self.min_x, self.max_x,
first_time)
trim_downsample_soh_chan(c_data, self.min_x, self.max_x)
apply_convert_factor(c_data, 1)
if 'ax' not in c_data:
linked_ax = None
......
......@@ -7,14 +7,14 @@ from PySide2 import QtWidgets, QtCore
from sohstationviewer.view.plotting.plotting_widget import plotting_widget
from sohstationviewer.view.util.color import clr
from sohstationviewer.controller.plottingData import (
getTitle, getDayTicks, formatTime)
from sohstationviewer.controller.plotting_data import (
get_title, get_day_ticks, format_time)
from sohstationviewer.controller.util import (
displayTrackingInfo, add_thousand_separator
display_tracking_info, add_thousand_separator
)
from sohstationviewer.model.handling_data import (
get_trimTPSData, get_eachDay5MinList, findTPSTm)
get_trim_tps_data, get_each_day_5_min_list, find_tps_tm)
from sohstationviewer.database.extract_data import (
get_color_def, get_color_ranges, get_chan_label)
......@@ -84,7 +84,7 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
self.min_x = max(data_time[0], start_tm)
self.max_x = min(data_time[1], end_tm)
self.plot_total = len(waveform_data)
title = getTitle(key, self.min_x, self.max_x, self.date_mode)
title = get_title(key, self.min_x, self.max_x, self.date_mode)
self.plotting_bot = const.BOTTOM
self.plotting_bot_pixel = const.BOTTOM_PX
self.axes = []
......@@ -94,7 +94,8 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
self.timestamp_bar_top = self.plotting_axes.add_timestamp_bar(0.)
self.plotting_axes.set_title(title, y=0, v_align='bottom')
self.each_day5_min_list = get_eachDay5MinList(self.min_x, self.max_x)
self.each_day5_min_list = get_each_day_5_min_list(self.min_x,
self.max_x)
for chan_id in self.plotting_data1:
ax = self.get_plot_data(self.plotting_data1[chan_id], chan_id)
self.axes.append(ax)
......@@ -130,8 +131,8 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
"""
if 'tps_data' not in c_data:
# get new minX, maxX according to exact start time of days
get_trimTPSData(c_data, self.min_x, self.max_x,
self.each_day5_min_list)
get_trim_tps_data(c_data, self.min_x, self.max_x,
self.each_day5_min_list)
total_days = c_data['tps_data'].shape[0]
plot_h = self.plotting_axes.get_height(
1.5 * total_days, bw_plots_distance=0.003)
......@@ -255,7 +256,7 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
color=self.display_color['sub_basic'], linestyle='-')
ax.set_yticks([])
times, major_times, major_time_labels = getDayTicks()
times, major_times, major_time_labels = get_day_ticks()
ax.set_xticks(times, minor=True)
ax.set_xticks(major_times)
ax.set_xticklabels(major_time_labels, fontsize=self.font_size,
......@@ -292,7 +293,7 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
try:
# identify time for rulers on other plotting widget
self.tps_t = self.each_day5_min_list[day_index, five_min_index]
format_t = formatTime(self.tps_t, self.date_mode, 'HH:MM:SS')
format_t = format_time(self.tps_t, self.date_mode, 'HH:MM:SS')
info_str += f"<pre>{format_t}:"
for chan_id in self.plotting_data1:
c_data = self.plotting_data1[chan_id]
......@@ -300,7 +301,7 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
info_str += (f" {chan_id}:"
f"{add_thousand_separator(sqrt(data))}")
info_str += " (counts)</pre>"
displayTrackingInfo(self.tracking_box, info_str)
display_tracking_info(self.tracking_box, info_str)
self.draw()
except IndexError:
# exclude the extra points added to the 2 sides of x axis to
......@@ -317,7 +318,7 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
:param xdata: float - time value in other plot
"""
self.zoom_marker1_shown = False
x_idx, y_idx = findTPSTm(xdata, self.each_day5_min_list)
x_idx, y_idx = find_tps_tm(xdata, self.each_day5_min_list)
for rl in self.rulers:
rl.set_data(x_idx, y_idx)
......@@ -355,10 +356,10 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
y index (which day) of self.min_x and self.min_y, and set data for
all markers in self.zoom_marker1s and self.zoom_marker2s.
"""
x_idx, y_idx = findTPSTm(self.min_x, self.each_day5_min_list)
x_idx, y_idx = find_tps_tm(self.min_x, self.each_day5_min_list)
for zm1 in self.zoom_marker1s:
zm1.set_data(x_idx, y_idx)
x_idx, y_idx = findTPSTm(self.max_x, self.each_day5_min_list)
x_idx, y_idx = find_tps_tm(self.max_x, self.each_day5_min_list)
for zm2 in self.zoom_marker2s:
zm2.set_data(x_idx, y_idx)
......