diff --git a/documentation/01 _ Table of Contents.help.md b/documentation/01 _ Table of Contents.help.md
index f7e158e1eb463f7b419d7f220879acbe094331ae..60e076d08b46d31af4e52b8bcd509a2c73aaefde 100644
--- a/documentation/01 _ Table of Contents.help.md	
+++ b/documentation/01 _ Table of Contents.help.md	
@@ -4,6 +4,8 @@ Welcome to the SOH Station Viewer documentation. Here you will find usage guides
 
 On the left-hand side you will find a list of currently available help topics.
 
+If the links of the Table of Contents are broken, click on Recreate Table of Content <img src='recreate_table_contents.png' height=30 style='margin: 3px 0px 0px 0px;'/> to rebuild it.
+
 The home button can be used to return to this page at any time.
 
 # Table of Contents
@@ -14,19 +16,23 @@ The home button can be used to return to this page at any time.
 
 + [How to Use Help](03%20_%20How%20to%20Use%20Help.help.md)
 
-+ [Search SOH n LOG](04%20_%20Search%20SOH%20n%20LOG.help.md)
++ [Search List of Directories](04%20_%20Search%20List%20of%20Directories.help.md)
+
++ [Read from Data Card](05%20_%20Read%20from%20Data%20Card.help.md)
+
++ [Select SOH](06%20_%20Select%20SOH.help.md)
 
-+ [Search List of Directories](05%20_%20Search%20List%20of%20Directories.help.md)
++ [Select Mass Position](07%20_%20Select%20Mass%20Position.help.md)
 
-+ [Read from Data Card](06%20_%20Read%20from%20Data%20Card.help.md)
++ [Select Waveforms](08%20_%20Select%20Waveforms.help.md)
 
-+ [Select SOH](07%20_%20Select%20SOH.help.md)
++ [Gap Display](09%20_%20Gap%20Display.help.md)
 
-+ [Select Mass Position](08%20_%20Select%20Mass%20Position.help.md)
++ [Change TPS Color Range](10%20_%20Change%20TPS%20Color%20Range.help.md)
 
-+ [Select Waveforms](09%20_%20Select%20Waveforms.help.md)
++ [Save Plots](11%20_%20Save%20Plots.help.md)
 
-+ [Gap Display](10%20_%20Gap%20Display.help.md)
++ [Search SOH n LOG](12%20_%20Search%20SOH%20n%20LOG.help.md)
 
 + [GPS Dialog](20%20_%20GPS%20Dialog.help.md)
 
diff --git a/documentation/05 _ Search List of Directories.help.md b/documentation/04 _ Search List of Directories.help.md
similarity index 100%
rename from documentation/05 _ Search List of Directories.help.md
rename to documentation/04 _ Search List of Directories.help.md
diff --git a/documentation/06 _ Read from Data Card.help.md b/documentation/05 _ Read from Data Card.help.md
similarity index 100%
rename from documentation/06 _ Read from Data Card.help.md
rename to documentation/05 _ Read from Data Card.help.md
diff --git a/documentation/07 _ Select SOH.help.md b/documentation/06 _ Select SOH.help.md
similarity index 100%
rename from documentation/07 _ Select SOH.help.md
rename to documentation/06 _ Select SOH.help.md
diff --git a/documentation/08 _ Select Mass Position.help.md b/documentation/07 _ Select Mass Position.help.md
similarity index 100%
rename from documentation/08 _ Select Mass Position.help.md
rename to documentation/07 _ Select Mass Position.help.md
diff --git a/documentation/09 _ Select Waveforms.help.md b/documentation/08 _ Select Waveforms.help.md
similarity index 82%
rename from documentation/09 _ Select Waveforms.help.md
rename to documentation/08 _ Select Waveforms.help.md
index 56a38d2bb7d57d3b29b6b00ebe3ec74aceb21a54..7f4a3a8d80cda885ad469d5c100c4ab1e9ac4095 100644
--- a/documentation/09 _ Select Waveforms.help.md	
+++ b/documentation/08 _ Select Waveforms.help.md	
@@ -44,10 +44,12 @@ checked,  a warning will be created, "Checked data streams will be ignored
 for RT130 data type."
 
 ## Displaying waveform channels
-If one of TPS or RAW checkboxes aren't checked which means no data need to be
-displayed,  all the waveform selected will be ignored.
-
-To display waveform channels,  user need to check:
+TPS needs to be checked to display Time-Power-Squared of waveform.
+RAW needs to be checked to display actual signal of waveform.
 + <img alt="TPS" src="images/select_waveform/select_TPS.png" height="30" />: to diplay Time-Power-Squared of the selected waveform data 
 + <img alt="RAW" src="images/select_waveform/select_RAW.png" height="30" />: and check RAW to display the actual selected waveform data.
-<br />
\ No newline at end of file
+<br />
+
+If any of waveform is checked but no TPS or RAW is checked,
++ For RT130, the program will read event of the selected data stream.
++ For MSeed, the program will pop up message request user to clear waveform selection or select either TPS or RAW.
\ No newline at end of file
diff --git a/documentation/11 _ Gap Display.help.md b/documentation/09 _ Gap Display.help.md
similarity index 100%
rename from documentation/11 _ Gap Display.help.md
rename to documentation/09 _ Gap Display.help.md
diff --git a/documentation/11 _ Save Plots.help.md b/documentation/11 _ Save Plots.help.md
new file mode 100644
index 0000000000000000000000000000000000000000..0027b76db29eeb97aa0adf7cbe68dc7fa5126b09
--- /dev/null
+++ b/documentation/11 _ Save Plots.help.md	
@@ -0,0 +1,60 @@
+# Save Plots
+
+---------------------------
+---------------------------
+
+## Step 1: click 'Save Plot'
+In Main Window,  Raw Data Plot and TPS Plot there are buttons labeled 'Save Plot'.
+
+User need to click those button to save plots in each window.
+
+* Saving State-of-Health plots
+<br />
+<img alt="Save SOH" src="images/save_plots/save_button_soh.png" height="30" />
+<br />
+* Saving Raw data plots
+<br />
+<img alt="Save Waveform" src="images/save_plots/save_button_wf.png" height="60" />
+<br />
+* Saving Time-power-square plots
+<br />
+<img alt="Save TPS" src="images/save_plots/save_button_tps.png" height="80" />
+<br />
+<br />
+<br />
+
+If the current color mode is black, user will be asked to continue or cancel 
+to change mode before saving the image.
+
+<br />
+<br />
+<img alt="Want to change color mode?" src="images/save_plots/question_on_changing_black_mode.png" height="150" />
+<br />
+
+* If user click 'Cancel'. The process of saving plots will be canceled for user 
+to change mode before restarting saving plots again.
+* If user click 'Continue'. The process of saving plots will be continue and the 
+image will be saved in black mode.
+<br />
+
+---------------------------
+## Step 2: Edit file path and select image's format
+Once clicking on 'Save Plot' button,  the 'Save Plot' dialog will pop up.
+
+<br />
+<br />
+<img alt="Select Image Format dialog" src="images/save_plots/save_file_dialog.png" height="200" />
+<br />
+
++ The default path to save the image file is preset in (1) text box.  If user 
+wants to change the path,  click on 'Save Directory button' to open file dialog 
+for changing path.
++ The default filename to save the image is preset in (2) text box.  User can 
+change the name in this box.
++ In side oval (3) are the radio buttons to select image format to save 
+file.
++ For 'PNG' format,  user can change DPI which is the resolution of the 
+image.  Other formats are vector formats which don't require resolution.
+
+Then user can click 'CANCEL' to cancel saving plot or click 'SAVE PLOT' to save
+the current plots to file.
\ No newline at end of file
diff --git a/documentation/04 _ Search SOH n LOG.help.md b/documentation/12 _ Search SOH n LOG.help.md
similarity index 100%
rename from documentation/04 _ Search SOH n LOG.help.md
rename to documentation/12 _ Search SOH n LOG.help.md
diff --git a/documentation/99 _ test.md b/documentation/99 _ test.md
index 7ef0655b760ac6880ab28c7b87f54ad34c2bb4ae..84fbede232f89c3fc5c6e9c03a105021552adb20 100644
--- a/documentation/99 _ test.md	
+++ b/documentation/99 _ test.md	
@@ -39,7 +39,7 @@ printf("%s\n", syntaxHighlighting.doesItWork ? "Success!" : "Oof.");
 ^ This is a horizontal line
 
 v This is an image
-![An Image?](images/image.jpg)
+![An Image?](recreate_table_contents.png)
 
 ---
 Another horizontal line
diff --git a/documentation/images/save_plots/question_on_changing_black_mode.png b/documentation/images/save_plots/question_on_changing_black_mode.png
new file mode 100644
index 0000000000000000000000000000000000000000..7424afda3387e8cbcad71a7fba63903072d2f23d
Binary files /dev/null and b/documentation/images/save_plots/question_on_changing_black_mode.png differ
diff --git a/documentation/images/save_plots/save_button_soh.png b/documentation/images/save_plots/save_button_soh.png
new file mode 100644
index 0000000000000000000000000000000000000000..588e20ca07de4e9dfde974de414107bb855ac1c8
Binary files /dev/null and b/documentation/images/save_plots/save_button_soh.png differ
diff --git a/documentation/images/save_plots/save_button_tps.png b/documentation/images/save_plots/save_button_tps.png
new file mode 100644
index 0000000000000000000000000000000000000000..1bfe4977370d6b904ff3d63a79bb6a4fbfe67266
Binary files /dev/null and b/documentation/images/save_plots/save_button_tps.png differ
diff --git a/documentation/images/save_plots/save_button_wf.png b/documentation/images/save_plots/save_button_wf.png
new file mode 100644
index 0000000000000000000000000000000000000000..f65ac57c793dd9b43cfd4814e56604eb3f3f3c80
Binary files /dev/null and b/documentation/images/save_plots/save_button_wf.png differ
diff --git a/documentation/images/save_plots/save_file_dialog.png b/documentation/images/save_plots/save_file_dialog.png
new file mode 100644
index 0000000000000000000000000000000000000000..ddb40fe65456a44943792bd94933a88a64556111
Binary files /dev/null and b/documentation/images/save_plots/save_file_dialog.png differ
diff --git a/documentation/img.png b/documentation/img.png
deleted file mode 100644
index 5d8c5a2165cf11862b70318e57343665de6e1a77..0000000000000000000000000000000000000000
Binary files a/documentation/img.png and /dev/null differ
diff --git a/documentation/recreate_table_contents.png b/documentation/recreate_table_contents.png
new file mode 100644
index 0000000000000000000000000000000000000000..34ab02a858eb4da3d62325cff47e1bd56dc90186
Binary files /dev/null and b/documentation/recreate_table_contents.png differ
diff --git a/sohstationviewer/conf/constants.py b/sohstationviewer/conf/constants.py
index 8bd00e091e0c436c87c64027c626cfa716dab02f..d060a1f8a3ac0865a719cddd898f39a6d55dd97e 100644
--- a/sohstationviewer/conf/constants.py
+++ b/sohstationviewer/conf/constants.py
@@ -50,8 +50,11 @@ TABLE_CONTENTS = "01 _ Table of Contents.help.md"
 SEARCH_RESULTS = "Search Results.md"
 
 # the list of all color modes
-ALL_COLOR_MODES = {'B', 'W'}
+ALL_COLOR_MODES = {'B': 'black', 'W': 'white'}
 
+# List of image formats. Have to put PNG at the beginning to go with
+# dpi in dialog
+IMG_FORMAT = ['PNG', 'PDF', 'EPS', 'SVG']
 # ================================================================= #
 #                      PLOTTING CONSTANT
 # ================================================================= #
diff --git a/sohstationviewer/controller/processing.py b/sohstationviewer/controller/processing.py
index 4b46116690bdbd0a26c084d55ca672334e265758..83cc1d96b99daf6428a31f620234f33c645623d0 100644
--- a/sohstationviewer/controller/processing.py
+++ b/sohstationviewer/controller/processing.py
@@ -4,18 +4,24 @@ channels, datatype
 """
 
 import os
-import traceback
+import json
+import re
 from pathlib import Path
 from typing import List, Set, Optional, Dict, Tuple
 
 from PySide2.QtCore import QEventLoop, Qt
 from PySide2.QtGui import QCursor
 from PySide2.QtWidgets import QTextBrowser, QApplication
-from obspy.core import read as read_ms
-from obspy.io.reftek.core import Reftek130Exception
+from obspy.io import reftek
+
+from sohstationviewer.model.mseed_data.record_reader import RecordReader \
+    as MSeedRecordReader
+from sohstationviewer.model.mseed_data.record_reader_helper import \
+    MSeedReadError
+from sohstationviewer.model.mseed_data.mseed_reader import \
+    move_to_next_record
 
 from sohstationviewer.database.extract_data import get_signature_channels
-from sohstationviewer.model.data_type_model import DataTypeModel
 from sohstationviewer.model.handling_data import (
     read_mseed_chanids_from_headers)
 
@@ -26,59 +32,6 @@ from sohstationviewer.controller.util import (
 from sohstationviewer.view.util.enums import LogType
 
 
-def load_data(data_type: str, tracking_box: QTextBrowser,
-              list_of_dir: List[str], list_of_rt130_paths: List[Path],
-              req_wf_chans: List[str] = [], req_soh_chans: List[str] = [],
-              read_start: Optional[float] = None,
-              read_end: Optional[float] = None) -> DataTypeModel:
-    """
-    Load the data stored in list_of_dir and store it in a DataTypeModel object.
-    The concrete class of the data object is based on dataType. Run on the same
-    thread as its caller, and so will block the GUI if called on the main
-    thread. It is advisable to use model.data_loader.DataLoader to load data
-    unless it is necessary to load data in the main thread (e.g. if there is
-    a need to access the call stack).
-
-    :param data_type: type of data read
-    :param tracking_box: widget to display tracking info
-    :param list_of_dir: list of directories selected by users
-    :param list_of_rt130_paths: list of rt130 directories selected by users
-    :param req_wf_chans: requested waveform channel list
-    :param req_soh_chans: requested soh channel list
-    :param read_start: start time of read data
-    :param read_end: finish time of read data
-    :return data_object: object that keep the data read from
-        list_of_dir
-    """
-    data_object = None
-    if list_of_rt130_paths == []:
-        try:
-            data_object = DataTypeModel.create_data_object(
-                data_type, tracking_box, list_of_dir, [],
-                req_wf_chans=req_wf_chans, req_soh_chans=req_soh_chans,
-                read_start=read_start, read_end=read_end)
-        except Exception:
-            fmt = traceback.format_exc()
-            msg = f"Data can't be read due to error: {str(fmt)}"
-            display_tracking_info(tracking_box, msg, LogType.WARNING)
-
-    else:
-        try:
-            data_object = DataTypeModel.create_data_object(
-                data_type, tracking_box, [''], list_of_rt130_paths,
-                req_wf_chans=req_wf_chans, req_soh_chans=req_soh_chans,
-                read_start=read_start, read_end=read_end)
-        except Exception:
-            fmt = traceback.format_exc()
-            msg = f"RT130 selected can't be read due to error: {str(fmt)}"
-            display_tracking_info(tracking_box, msg, LogType.WARNING)
-
-    if data_object is None:
-        msg = "No data object created. Check with implementer"
-        display_tracking_info(tracking_box, msg, LogType.WARNING)
-    return data_object
-
-
 def read_mseed_channels(tracking_box: QTextBrowser, list_of_dir: List[str],
                         on_unittest: bool = False
                         ) -> Set[str]:
@@ -127,7 +80,7 @@ def read_mseed_channels(tracking_box: QTextBrowser, list_of_dir: List[str],
                 spr_gr_1_chan_ids.update(ret[3])
     if not on_unittest:
         QApplication.restoreOverrideCursor()
-    return sorted(list(soh_chan_ids)), sorted(list(mass_pos_chan_ids)),\
+    return sorted(list(soh_chan_ids)), sorted(list(mass_pos_chan_ids)), \
         sorted(list(wf_chan_ids)), sorted(list(spr_gr_1_chan_ids))
 
 
@@ -145,6 +98,7 @@ def detect_data_type(list_of_dir: List[str]) -> Optional[str]:
     sign_chan_data_type_dict = get_signature_channels()
 
     dir_data_type_dict = {}
+    is_multiplex = None
     for d in list_of_dir:
         data_type = "Unknown"
         for path, subdirs, files in os.walk(d):
@@ -153,57 +107,105 @@ def detect_data_type(list_of_dir: List[str]) -> Optional[str]:
                 if not validate_file(path2file, file_name):
                     continue
                 ret = get_data_type_from_file(path2file,
-                                              sign_chan_data_type_dict)
+                                              sign_chan_data_type_dict,
+                                              is_multiplex)
                 if ret is not None:
-                    data_type, chan = ret
-                    break
+                    d_type, is_multiplex = ret
+                    if d_type is not None:
+                        data_type = d_type
+                        break
             if data_type != "Unknown":
                 break
+
+        if is_multiplex is None:
+            raise Exception("No channel found for the data set")
+
         if data_type == "Unknown":
-            dir_data_type_dict[d] = ("Unknown", '_')
+            dir_data_type_dict[d] = "Unknown"
         else:
-            dir_data_type_dict[d] = (data_type, chan)
-
-    data_type_list = {d[0] for d in dir_data_type_dict.values()}
+            dir_data_type_dict[d] = data_type
+    data_type_list = list(set(dir_data_type_dict.values()))
     if len(data_type_list) > 1:
-        dir_data_type_str = ', '. join(sorted(list(data_type_list)))
+        dir_data_type_str = json.dumps(dir_data_type_dict)
+        dir_data_type_str = re.sub(r'\{|\}|"', '', dir_data_type_str)
+        dir_data_type_str = re.sub(r'], ', ']\n', dir_data_type_str)
         msg = (f"There are more than one types of data detected:\n"
                f"{dir_data_type_str}\n\n"
-               f"Please have only one data type for each loading.")
+               f"Please have only data that related to each other.")
         raise Exception(msg)
 
-    elif data_type_list == {'Unknown'}:
-        msg = ("There are no known data detected.\n"
-               "Please select different folder(s).")
+    elif data_type_list == ['Unknown']:
+        msg = ("There are no known data detected.\n\n"
+               "Do you want to cancel to select different folder(s)\n"
+               "Or continue to read any available mseed file?")
         raise Exception(msg)
-
-    return list(dir_data_type_dict.values())[0][0]
+    return data_type_list[0], is_multiplex
 
 
 def get_data_type_from_file(
         path2file: Path,
-        sign_chan_data_type_dict: Dict[str, str]
-) -> Optional[Tuple[str, str]]:
+        sign_chan_data_type_dict: Dict[str, str],
+        is_multiplex: bool = None
+) -> Optional[Tuple[Optional[str], bool]]:
     """
-    + Try to read mseed data from given file
-        if catch TypeError: no data type detected => return None
-        if catch Reftek130Exception: data type => return data type RT130
-        otherwise data type is mseed which includes: q330, pegasus, centaur
-    + Continue to identify data type for a file by checking if the channel
-    in that file is a unique channel of a data type.
+    + Exclude files for waveform data to improve performance
+    + Loop through each record for file
+        If MSeedRecordReader gives Error; check if the file is RT130, report
+            data_type is RT130 or else, return to continue checking on another
+            file.
+        If there're more than one channels in a file, this file is multiplex.
+        If found signature channel, report the data_type of the file.
     :param path2file: absolute path to processed file
     :param sign_chan_data_type_dict: dict of unique chan for data
         type
+    :param is_multiplex: if the file is multiplex
     :return: detected data type, channel from which data type is detected
     """
-    try:
-        stream = read_ms(path2file)
-    except TypeError:
-        return
-    except Reftek130Exception:
-        return 'RT130', '_'
-
-    for trace in stream:
-        chan = trace.stats['channel']
+    wf_chan_posibilities = ['FH', 'FN',  # ≥ 1000 to < 5000
+                            'GH', 'GL',  # ≥ 1000 to < 5000
+                            'DH', 'DL',  # ≥ 250 to < 1000
+                            'CH', 'CN',  # ≥ 250 to < 1000
+                            'EH', 'EL', 'EP',  # ≥ 80
+                            'SH', 'SL', 'SP',  # ≥ 10 to < 80
+                            'HH', 'HN',  # ≥ 80
+                            'BH', 'BN',  # ≥ 10 to < 80
+                            'MH', 'MN', 'MP', 'ML',
+                            'LH', 'LL', 'LP', 'LN',
+                            'VP', 'VL', 'VL', 'VH',
+                            'UN', 'UP', 'UL', 'UH']
+
+    if any(x in path2file.name for x in wf_chan_posibilities):
+        # Skip checking waveform files which aren't signature channels
+        return None, False
+
+    file = open(path2file, 'rb')
+    chans_in_stream = set()
+    data_type = None
+    while 1:
+        is_eof = (file.read(1) == b'')
+        if is_eof:
+            break
+        file.seek(-1, 1)
+        current_record_start = file.tell()
+        try:
+            record = MSeedRecordReader(file)
+        except MSeedReadError:
+            file.close()
+            if reftek.core._is_reftek130(path2file):
+                return 'RT130', False
+            return
+
+        chan = record.record_metadata.channel
+        if is_multiplex is None:
+            chans_in_stream.add(chan)
+            if len(chans_in_stream) > 1:
+                is_multiplex = True
         if chan in sign_chan_data_type_dict.keys():
-            return sign_chan_data_type_dict[chan], chan
+            data_type = sign_chan_data_type_dict[chan]
+            if is_multiplex:
+                file.close()
+                return data_type, is_multiplex
+        move_to_next_record(file, current_record_start, record)
+    file.close()
+    is_multiplex = True if len(chans_in_stream) > 1 else False
+    return data_type, is_multiplex
diff --git a/sohstationviewer/controller/util.py b/sohstationviewer/controller/util.py
index c309427c4710c30b8752252ef8840d183f9c9d2a..b3f542680b332492d011e27c33e87cc6cea8df46 100644
--- a/sohstationviewer/controller/util.py
+++ b/sohstationviewer/controller/util.py
@@ -48,25 +48,6 @@ def validate_dir(path: str):
         raise Exception(f"Skip info folder: {path}")
 
 
-def get_total_files(list_of_dir: Path) -> int:
-    """
-    Get total number of valid files in valid directories from the list_of_dir
-    :param list_of_dir:
-    :return total: total number of valid files
-    """
-    total = 0
-    for folder in list_of_dir:
-        for path, _, files in os.walk(folder):
-            try:
-                validate_dir(path)
-            except Exception:
-                continue
-            total += len([f for f in files
-                         if validate_file(Path(path).joinpath(f), f)])
-
-    return total
-
-
 @QtCore.Slot()
 def display_tracking_info(tracking_box: QTextBrowser, text: str,
                           type: LogType = LogType.INFO):
@@ -84,19 +65,20 @@ def display_tracking_info(tracking_box: QTextBrowser, text: str,
     msg = {'text': text}
     if type == LogType.ERROR:
         msg['color'] = 'white'
-        msg['bgcolor'] = '#e46269'
+        msg['bgcolor'] = '#c45259'
     elif type == LogType.WARNING:
-        msg['color'] = '#ffd966'
-        msg['bgcolor'] = 'orange'
+        msg['color'] = 'white'
+        msg['bgcolor'] = '#c4a347'
     else:
         msg['color'] = 'blue'
         msg['bgcolor'] = 'white'
     html_text = """<body>
-        <div style='color:%(color)s; background-color:%(bgcolor)s'>
-            %(text)s
+        <div style='color:%(color)s'>
+            <strong>%(text)s</strong>
         </div>
         </body>"""
     tracking_box.setHtml(html_text % msg)
+    tracking_box.setStyleSheet(f"background-color: {msg['bgcolor']}")
     # parent.update()
     tracking_box.repaint()
 
@@ -329,3 +311,21 @@ def check_data_sdata(root_dir: str) -> bool:
     dir_list = [d for d in os.listdir(root_dir)
                 if os.path.isdir(os.path.join(root_dir, d))]
     return 'data' in dir_list and 'sdata' in dir_list
+
+
+def get_total_files(list_of_dir: Path) -> int:
+    """
+    Get total number of valid files in valid directories from the list_of_dir
+    :param list_of_dir:
+    :return total: total number of valid files
+    """
+    total = 0
+    for folder in list_of_dir:
+        for path, _, files in os.walk(folder):
+            try:
+                validate_dir(path)
+            except Exception:
+                continue
+            total += len([f for f in files
+                         if validate_file(Path(path).joinpath(f), f)])
+    return total
diff --git a/sohstationviewer/database/extract_data.py b/sohstationviewer/database/extract_data.py
index bfabf3aaa99accf25eb5505c2a5bb6cea9cf3468..cf0ab6208f841629f618bd28260d617c2aa15fd2 100755
--- a/sohstationviewer/database/extract_data.py
+++ b/sohstationviewer/database/extract_data.py
@@ -5,7 +5,7 @@ from sohstationviewer.database.process_db import execute_db_dict, execute_db
 from sohstationviewer.conf.dbSettings import dbConf
 
 
-def get_chan_plot_info(org_chan_id: str, chan_info: Dict, data_type: str,
+def get_chan_plot_info(org_chan_id: str, data_type: str,
                        color_mode: ColorMode = 'B') -> Dict:
     """
     Given chanID read from raw data file and detected dataType
@@ -24,10 +24,10 @@ def get_chan_plot_info(org_chan_id: str, chan_info: Dict, data_type: str,
         chan = 'VM?'
     if org_chan_id.startswith('MassPos'):
         chan = 'MassPos?'
+    if org_chan_id.startswith('DS'):
+        chan = 'SEISMIC'
     if org_chan_id.startswith('Event DS'):
         chan = 'Event DS?'
-    if org_chan_id.startswith('DS') and 'DSP' not in org_chan_id:
-        chan = 'DS?'
     if org_chan_id.startswith('Disk Usage'):
         chan = 'Disk Usage?'
     if dbConf['seisRE'].match(chan):
@@ -46,17 +46,13 @@ def get_chan_plot_info(org_chan_id: str, chan_info: Dict, data_type: str,
         sql = (f"{o_sql} WHERE channel='{chan}' and C.param=P.param"
                f" and dataType='{data_type}'")
     chan_db_info = execute_db_dict(sql)
-
+    seismic_label = None
     if len(chan_db_info) == 0:
         chan_db_info = execute_db_dict(
             f"{o_sql} WHERE channel='DEFAULT' and C.param=P.param")
     else:
         if chan_db_info[0]['channel'] == 'SEISMIC':
-            try:
-                chan_db_info[0]['label'] = dbConf['seisLabel'][org_chan_id[-1]]
-            except KeyError:
-                chan_db_info[0]['label'] = str(chan_info['samplerate'])
-
+            seismic_label = get_seismic_chan_label(org_chan_id)
         chan_db_info[0]['channel'] = org_chan_id
 
     chan_db_info[0]['label'] = (
@@ -68,6 +64,8 @@ def get_chan_plot_info(org_chan_id: str, chan_info: Dict, data_type: str,
         else chan_db_info[0]['fixPoint'])
     if chan_db_info[0]['label'].strip() == '':
         chan_db_info[0]['label'] = chan_db_info[0]['channel']
+    elif seismic_label is not None:
+        chan_db_info[0]['label'] = seismic_label
     else:
         chan_db_info[0]['label'] = '-'.join([chan_db_info[0]['channel'],
                                             chan_db_info[0]['label']])
@@ -76,30 +74,23 @@ def get_chan_plot_info(org_chan_id: str, chan_info: Dict, data_type: str,
     return chan_db_info[0]
 
 
-def get_wf_plot_info(org_chan_id: str, *args, **kwargs) -> Dict:
-    """
-    :param org_chan_id: channel name read from data source
-    :param chan_info: to be compliant with get_chan_plot_info()
-    :param data_type: to be compliant with get_chan_plot_info()
-    :param color_mode: to be compliant with get_chan_plot_info()
-    :return info of channel read from DB which is used for plotting
-    """
-    # Waveform plot's color is fixed to NULL in the database, so we do not need
-    # to get the valueColors columns from the database.
-    chan_info = execute_db_dict(
-        "SELECT param, plotType, height "
-        "FROM Parameters WHERE param='Seismic data'")
-    # The plotting API still requires that the key 'valueColors' is mapped to
-    # something, so we are setting it to None.
-    chan_info[0]['valueColors'] = None
-    chan_info[0]['label'] = get_chan_label(org_chan_id)
-    chan_info[0]['unit'] = ''
-    chan_info[0]['channel'] = 'SEISMIC'
-    chan_info[0]['convertFactor'] = 1
-    return chan_info[0]
+def get_convert_factor(chan_id, data_type):
+    sql = f"SELECT convertFactor FROM Channels WHERE channel='{chan_id}' " \
+          f"AND dataType='{data_type}'"
+    ret = execute_db(sql)
+    if ret:
+        return ret[0][0]
+    else:
+        return None
 
 
-def get_chan_label(chan_id):
+def get_seismic_chan_label(chan_id):
+    """
+    Get label for chan_id in which data stream can use chan_id for label while
+        other seismic need to add coordinate to chan_id for label
+    :param chan_id: name of channel
+    :return label: label to put in front of the plot of the channel
+    """
     if chan_id.startswith("DS"):
         label = chan_id
     else:
diff --git a/sohstationviewer/database/soh.db b/sohstationviewer/database/soh.db
index e705fa6ba04f1b7c745e28769bc6dc5dccb82a53..c3c01eeeb559a14690a70d0fce1428347a54c664 100755
Binary files a/sohstationviewer/database/soh.db and b/sohstationviewer/database/soh.db differ
diff --git a/sohstationviewer/model/data_loader.py b/sohstationviewer/model/data_loader.py
index f449ec720d5676268438b361170fbe647596034a..d2d5ef59abdfc135d6a6f1f0a2b0b122981be0d7 100644
--- a/sohstationviewer/model/data_loader.py
+++ b/sohstationviewer/model/data_loader.py
@@ -9,8 +9,8 @@ from PySide2 import QtCore, QtWidgets
 
 from sohstationviewer.conf import constants
 from sohstationviewer.controller.util import display_tracking_info
-from sohstationviewer.model.data_type_model import (
-    DataTypeModel, ThreadStopped, ProcessingDataError)
+from sohstationviewer.model.general_data.general_data import (
+    GeneralData, ThreadStopped, ProcessingDataError)
 from sohstationviewer.view.util.enums import LogType
 
 
@@ -18,7 +18,7 @@ class DataLoaderWorker(QtCore.QObject):
     """
     The worker class that executes the code to load the data.
     """
-    finished = QtCore.Signal(DataTypeModel)
+    finished = QtCore.Signal(GeneralData)
     failed = QtCore.Signal()
     stopped = QtCore.Signal()
     notification = QtCore.Signal(QtWidgets.QTextBrowser, str, str)
@@ -26,19 +26,23 @@ class DataLoaderWorker(QtCore.QObject):
     button_chosen = QtCore.Signal(int)
 
     def __init__(self, data_type: str, tracking_box: QtWidgets.QTextBrowser,
+                 is_multiplex: Optional[bool],
                  list_of_dir: List[Path], list_of_rt130_paths: List[Path],
                  req_wf_chans: Union[List[str], List[int]] = [],
                  req_soh_chans: List[str] = [], read_start: float = 0,
+                 gap_minimum: Optional[float] = None,
                  read_end: float = constants.HIGHEST_INT,
                  include_mp123: bool = False, include_mp456: bool = False,
                  rt130_waveform_data_req: bool = False, parent_thread=None):
         super().__init__()
         self.data_type = data_type
         self.tracking_box = tracking_box
+        self.is_multiplex = is_multiplex
         self.list_of_dir = list_of_dir
         self.list_of_rt130_paths = list_of_rt130_paths
         self.req_wf_chans = req_wf_chans
         self.req_soh_chans = req_soh_chans
+        self.gap_minimum = gap_minimum
         self.read_start = read_start
         self.read_end = read_end
         self.include_mp123 = include_mp123
@@ -61,7 +65,7 @@ class DataLoaderWorker(QtCore.QObject):
                 from sohstationviewer.model.reftek.reftek import RT130
                 object_type = RT130
             else:
-                from sohstationviewer.model.mseed.mseed import MSeed
+                from sohstationviewer.model.mseed_data.mseed import MSeed
                 object_type = MSeed
             # Create data object without loading any data in order to connect
             # its unpause slot to the loader's unpause signal
@@ -69,9 +73,11 @@ class DataLoaderWorker(QtCore.QObject):
             self.button_chosen.connect(data_object.receive_pause_response,
                                        type=QtCore.Qt.DirectConnection)
             data_object.__init__(
-                self.data_type, self.tracking_box, self.list_of_dir,
+                self.data_type, self.tracking_box,
+                self.data_type, self.tracking_box,
+                self.is_multiplex, self.list_of_dir,
                 self.list_of_rt130_paths, req_wf_chans=self.req_wf_chans,
-                req_soh_chans=self.req_soh_chans,
+                req_soh_chans=self.req_soh_chans, gap_minimum=self.gap_minimum,
                 read_start=self.read_start, read_end=self.read_end,
                 include_mp123zne=self.include_mp123,
                 include_mp456uvw=self.include_mp456,
@@ -112,11 +118,15 @@ class DataLoader(QtCore.QObject):
         self.thread: Optional[QtCore.QThread] = None
         self.worker: Optional[DataLoaderWorker] = None
 
-    def init_loader(self, data_type: str, tracking_box: QtWidgets.QTextBrowser,
+    def init_loader(self, data_type: str,
+                    tracking_box: QtWidgets.QTextBrowser,
+                    is_multiplex: bool,
                     list_of_dir: List[Union[str, Path]],
                     list_of_rt130_paths: List[Union[str, Path]],
                     req_wf_chans: Union[List[str], List[int]] = [],
-                    req_soh_chans: List[str] = [], read_start: float = 0,
+                    req_soh_chans: List[str] = [],
+                    gap_minimum: Optional[float] = None,
+                    read_start: float = 0,
                     read_end: float = constants.HIGHEST_INT,
                     include_mp123: bool = False,
                     include_mp456: bool = False,
@@ -148,10 +158,12 @@ class DataLoader(QtCore.QObject):
         self.worker = DataLoaderWorker(
             data_type,
             tracking_box,
+            is_multiplex,
             list_of_dir,
             list_of_rt130_paths,
             req_wf_chans=req_wf_chans,
             req_soh_chans=req_soh_chans,
+            gap_minimum=gap_minimum,
             read_start=read_start,
             read_end=read_end,
             include_mp123=include_mp123,
diff --git a/tests/test_controller/__init__.py b/sohstationviewer/model/general_data/__init__.py
similarity index 100%
rename from tests/test_controller/__init__.py
rename to sohstationviewer/model/general_data/__init__.py
diff --git a/sohstationviewer/model/general_data/data_structures.MD b/sohstationviewer/model/general_data/data_structures.MD
new file mode 100644
index 0000000000000000000000000000000000000000..f9433a985e680bf75d1e1b22579eafc74d4e6b47
--- /dev/null
+++ b/sohstationviewer/model/general_data/data_structures.MD
@@ -0,0 +1,44 @@
+## Log data:
+info from log channels, soh messages, text file in dict:
+{'TEXT': [str,], key:{chan_id: [str,],},}
+In which 'TEXT': is the chan_id given by sohview for text only files which have 
+no station or channel associate with it.
+Note: log_data for RT130's dataset has only one channel: SOH
+
+## data_dict:
+{set_key: {
+    chan_id (str): {
+        'file_path' (str): path of file to keep track of file changes in MSeedReader
+        'chanID' (str): name of channel
+        'samplerate' (float): Sampling rate of the data
+        'startTmEpoch' (float): start epoch time of channel
+        'endTmEpoch' (float): end epoch time of channel
+        'size' (int): size of channel data
+        'tracesInfo': [{
+            'startTmEpoch': Start epoch time of the trace - float
+            'endTmEpoch': End epoch time of the trace - float
+            'times': time of channel's trace: List[float] in mseed_reader but changed to ndarray in combine_data()
+            'data': data of channel's trace: List[float] in mseed_reader but changed to ndarray in combine_data()
+            }]
+        'tps_data': list of lists of mean of square of every 5m of data in each day
+        'times' (np.array): times that has been trimmed and down-sampled for plotting
+        'data' (np.array): data that has been trimmed and down-sampled for plotting
+        'chan_db_info' (dict): the plotting parameters got from database
+            for this channel - dict,
+        ax: axes to draw the channel in PlottingWidget
+        ax_wf (matplotlib.axes.Axes): axes to draw the channel in WaveformWidget
+    }
+}
+
+Use both ax and ax_wf because mass position channels are plotted in both widgets while 
+soh channels are plotted in PlottingWidget and waveform channel are plotted in WaveformWidget
+tps_data created in TimePoserSquareWidget only and apply for waveform_data only
+
+## tps_data: data that aren't separated to traces
+{set_key - str or (str, str): {
+    chan_id - str: {
+        times: np.array,
+        data: np.array,
+        }
+    }
+}
\ No newline at end of file
diff --git a/sohstationviewer/model/general_data/general_data.py b/sohstationviewer/model/general_data/general_data.py
new file mode 100644
index 0000000000000000000000000000000000000000..d391170f36e49c070181df861f2600afbfc0d7f2
--- /dev/null
+++ b/sohstationviewer/model/general_data/general_data.py
@@ -0,0 +1,438 @@
+from __future__ import annotations
+
+from pathlib import Path
+from tempfile import TemporaryDirectory
+from typing import Optional, Union, List, Tuple, Dict
+
+from obspy import UTCDateTime
+
+from PySide2 import QtCore
+from PySide2 import QtWidgets
+
+from sohstationviewer.controller.util import \
+    display_tracking_info, get_total_files
+from sohstationviewer.view.plotting.gps_plot.gps_point import GPSPoint
+from sohstationviewer.view.util.enums import LogType
+from sohstationviewer.database.process_db import execute_db
+from sohstationviewer.model.general_data.general_data_helper import \
+    retrieve_data_time_from_data_dict, retrieve_gaps_from_data_dict, \
+    combine_data, sort_data, squash_gaps, apply_convert_factor_to_data_dict, \
+    reset_data
+
+
+class ProcessingDataError(Exception):
+    def __init__(self, msg):
+        self.message = msg
+
+
+class ThreadStopped(Exception):
+    """
+    An exception that is raised when the user requests for the data loader
+    thread to be stopped.
+    """
+    def __init__(self, *args, **kwargs):
+        self.args = (args, kwargs)
+
+
+class GeneralData():
+    def __init__(self, data_type,
+                 tracking_box: Optional[QtWidgets.QTextBrowser] = None,
+                 is_multiplex: bool = False, list_of_dir: List[str] = [],
+                 list_of_rt130_paths: List[Path] = [],
+                 req_wf_chans: Union[List[str], List[int]] = [],
+                 req_soh_chans: List[str] = [],
+                 gap_minimum: float = None,
+                 read_start: Optional[float] = UTCDateTime(0).timestamp,
+                 read_end: Optional[float] = UTCDateTime().timestamp,
+                 include_mp123zne: bool = False,
+                 include_mp456uvw: bool = False,
+                 rt130_waveform_data_req: bool = False,
+                 creator_thread: Optional[QtCore.QThread] = None,
+                 notification_signal: Optional[QtCore.Signal] = None,
+                 pause_signal: Optional[QtCore.Signal] = None,
+                 on_unittest: bool = False,
+                 *args, **kwargs):
+        """
+        CHANGED FROM data_type_model.DataTypeModel.__init__:
+            + add self.is_multiplex, self.on_unittest, self.gap_minimum,
+                self.keys
+            + remove docstring for self.log_data, self.soh_data,
+                self.mass_pos_data,
+                self.waveform_data, self.gaps_by_key_chan,
+                self.stream_header_by_key_chan
+
+        Super class for different data type to process data from data files
+
+        :param data_type: type of the object
+        :param tracking_box: widget to display tracking info
+        :param is_multiplex: flag showing if a file have more than one channel
+        :param list_of_dir: list of paths to the folders of data
+        :param list_of_rt130_paths: path to the folders of RT130 data
+        :param req_wf_chans: requested waveform channel list
+        :param req_soh_chans: requested SOH channel list
+        :param read_start: requested start time to read
+        :param read_end: requested end time to read
+        :param include_mp123zne: if mass position channels 1,2,3 are requested
+        :param include_mp456uvw: if mass position channels 4,5,6 are requested
+        :param rt130_waveform_data_req: flag for RT130 to read waveform data
+        :param creator_thread: the thread the current DataTypeModel instance is
+            being created in. If None, the DataTypeModel instance is being
+            created in the main thread
+        :param notification_signal: signal used to send notifications to the
+            main thread. Only not None when creator_thread is not None
+        :param pause_signal: signal used to notify the main thread that the
+            data loader is paused.
+        """
+        self.data_type = data_type
+        self.tracking_box = tracking_box
+        self.is_multiplex = is_multiplex
+        self.list_of_dir = list_of_dir
+        self.list_of_rt130_paths = list_of_rt130_paths
+        self.req_soh_chans = req_soh_chans
+        self.req_wf_chans = req_wf_chans
+        self.gap_minimum = gap_minimum
+        self.read_start = read_start
+        self.read_end = read_end
+        self.include_mp123zne = include_mp123zne
+        self.include_mp456uvw = include_mp456uvw
+        self.rt130_waveform_data_req = rt130_waveform_data_req
+        self.on_unittest = on_unittest
+        if creator_thread is None:
+            err_msg = (
+                'A signal is not None while running in main thread'
+            )
+            assert notification_signal is None, err_msg
+            assert pause_signal is None, err_msg
+            self.creator_thread = QtCore.QThread()
+        else:
+            self.creator_thread = creator_thread
+        self.notification_signal = notification_signal
+        self.pause_signal = pause_signal
+
+        """
+        processing_log: record the progress of processing
+        """
+        self.processing_log: List[Tuple[str, LogType]] = []
+        """
+        keys: list of all keys
+        """
+        self.keys = []
+
+        DataKey = Union[Tuple[str, str], str]
+
+        """
+        log_texts: dictionary of content of text files by filenames
+        """
+        self.log_texts: Dict[str, str] = {}
+        # Look for description in data_structures.MD
+        self.log_data = {'TEXT': []}  # noqa
+        self.waveform_data = {}
+        self.soh_data = {}
+        self.mass_pos_data = {}
+        """
+        data_time: time range of data sets:
+        """
+        self.data_time: Dict[DataKey, List[float]] = {}
+
+        """
+        The given data may include more than one data set which is station_id
+        in mseed or (unit_id, exp_no) in reftek. User are allow to choose which
+        data set to be displayed
+        selected_key: str - key of the data set to be displayed
+        """
+        self.selected_key: Optional[str] = None
+        """
+        gaps: gaps info in dict:
+        """
+        self.gaps: Dict[DataKey, List[List[float]]] = {}
+
+        """
+         tmp_dir: dir to keep memmap files. Deleted when object is deleted
+        """
+        self.tmp_dir_obj: TemporaryDirectory = TemporaryDirectory()
+        self.tmp_dir = self.tmp_dir_obj.name
+        if not on_unittest:
+            self.save_temp_data_folder_to_database()
+
+        self._pauser = QtCore.QSemaphore()
+        self.pause_response = None
+
+        self.gps_points: List[GPSPoint] = []
+
+    def select_key(self) -> Union[str, Tuple[str, str]]:
+        """
+        FROM data_type_model.DataTypeModel.select_key
+        Get the key for the data set to process.
+        :return: key of the selected data set
+        """
+        pass
+
+    def processing_data(self):
+        """
+        CHANGED FROM data_type_model.Data_Type_Model.processing_data
+        """
+
+        if self.creator_thread.isInterruptionRequested():
+            raise ThreadStopped()
+        self.read_folders()
+        self.selected_key = self.select_key()
+
+        self.fill_empty_data()
+        if self.creator_thread.isInterruptionRequested():
+            raise ThreadStopped()
+        self.finalize_data()
+
+    def read_folders(self, folders) -> None:
+        """
+        Read data from given folders to create data dicts which are
+            attributes of current class
+        """
+        count = 0
+        total = get_total_files(folders)
+        for folder in folders:
+            count = self.read_folder(folder, total, count)
+
+    def read_folder(self, folder: str, total: int, count: int) -> int:
+        """
+        Read data from current folder.
+
+        :param folder: folder to read data from
+        :param total: total of all valid files
+        :param count: total of files that have been processed before this
+            folder to keep track of progress
+        :return count: total of files that have been processed after this
+            folder to keep track of progress
+        """
+        pass
+
+    def finalize_data(self):
+        """
+        CHANGED FROM data_type_model.Data_Type_Model.finalize_data
+        This function should be called after all folders finish reading to
+            + Filling an empty_dict into station with no data added in
+                data_dicts
+            + Sort all data traces in time order
+            + Combine traces in data and split at gaps > gap_minimum
+            + Apply convert_factor to avoid using flags to prevent double
+                applying convert factor when plotting
+            + Check not found channels
+            + Retrieve gaps from data_dicts
+            + Retrieve data_time from data_dicts
+            + Change data time with default value that are invalid for plotting
+                to read_start, read_end.
+        """
+        if self.selected_key is None:
+            return
+
+        self.track_info("Finalizing...", LogType.INFO)
+
+        self.sort_all_data()
+        self.combine_all_data()
+        self.apply_convert_factor_to_data_dicts()
+
+        self.retrieve_gaps_from_data_dicts()
+        self.retrieve_data_time_from_data_dicts()
+        if self.selected_key not in self.data_time.keys():
+            self.data_time[self.selected_key] = \
+                [self.read_start, self.read_end]
+
+    def __del__(self):
+        # FROM data_type_model.Data_Type_Model.__del__
+        print("delete dataType Object")
+        try:
+            del self.tmp_dir_obj
+        except OSError as e:
+            self.track_info(
+                "Error deleting %s : %s" % (self.tmp_dir, e.strerror),
+                LogType.ERROR)
+            print("Error deleting %s : %s" % (self.tmp_dir, e.strerror))
+        print("finish deleting")
+
+    def track_info(self, text: str, type: LogType) -> None:
+        """
+        CHANGED FROM data_type_model.Data_Type_Model.track_info:
+
+        Display tracking info in tracking_box.
+        Add all errors/warnings to processing_log.
+        :param text: str - message to display
+        :param type: str - type of message (error/warning/info)
+        """
+        # display_tracking_info updates a QtWidget, which can only be done in
+        # the main thread. So, if we are running in a background thread
+        # (i.e. self.creator_thread is not None), we need to use signal slot
+        # mechanism to ensure that display_tracking_info is run in the main
+        # thread.
+        if self.notification_signal is None:
+            display_tracking_info(self.tracking_box, text, type)
+        else:
+            self.notification_signal.emit(self.tracking_box, text, type)
+        if type != LogType.INFO:
+            self.processing_log.append((text, type))
+
+    def pause(self) -> None:
+        """
+        FROM data_type_model.Data_Type_Model.pause
+        Pause the thread this DataTypeModel instance is in. Works by trying
+        to acquire a semaphore that is not available, which causes the thread
+        to block.
+
+        Note: due to how this is implemented, each call to pause will require
+        a corresponding call to unpause. Thus, it is inadvisable to call this
+        method more than once.
+
+        Caution: not safe to call in the main thread. Unless a background
+        thread releases the semaphore, the whole program will freeze.
+        """
+        self._pauser.acquire()
+
+    @QtCore.Slot()
+    def unpause(self):
+        """
+        FROM data_type_model.Data_Type_Model.unpause
+        Unpause the thread this DataTypeModel instance is in. Works by trying
+        to acquire a semaphore that is not available, which causes the thread
+        to block.
+
+        Caution: due to how this is implemented, if unpause is called before
+        pause, the thread will not be paused until another call to pause is
+        made. Also, like pause, each call to unpause must be matched by another
+        call to pause for everything to work.
+        """
+        self._pauser.release()
+
+    @QtCore.Slot()
+    def receive_pause_response(self, response: object):
+        """
+        FROM data_type_model.Data_Type_Model.receive_pause_response
+        Receive a response to a request made to another thread and unpause the
+        calling thread.
+
+        :param response: the response to the request made
+        :type response: object
+        """
+        self.pause_response = response
+        self.unpause()
+
+    @classmethod
+    def get_empty_instance(cls) -> GeneralData:
+        """
+        # FROM data_type_model.Data_Type_Model.get_empty_instance
+        Create an empty data object. Useful if a DataTypeModel instance is
+        needed, but it is undesirable to load a data set. Basically wraps
+        __new__().
+
+        :return: an empty data object
+        :rtype: DataTypeModel
+        """
+        return cls.__new__(cls)
+
+    def save_temp_data_folder_to_database(self):
+        # FROM
+        #    data_type_model.Data_Type_Model.save_temp_data_folder_to_database
+        execute_db(f'UPDATE PersistentData SET FieldValue="{self.tmp_dir}" '
+                   f'WHERE FieldName="tempDataDirectory"')
+
+    def check_not_found_soh_channels(self):
+        # FROM data_type_model.Data_Type_Model.check_not_found_soh_channels
+        all_chans_meet_req = (
+                list(self.soh_data[self.selected_key].keys()) +
+                list(self.mass_pos_data[self.selected_key].keys()) +
+                list(self.log_data[self.selected_key].keys()))
+
+        not_found_chans = [c for c in self.req_soh_chans
+                           if c not in all_chans_meet_req]
+        if not_found_chans != []:
+            msg = (f"No data found for the following channels: "
+                   f"{', '.join( not_found_chans)}")
+            self.processing_log.append((msg, LogType.WARNING))
+
+    def sort_all_data(self):
+        """
+        FROM data_type_model.Data_Type_Model.sort_all_data
+        Sort traces by startTmEpoch on all data: waveform_data, mass_pos_data,
+            soh_data.
+        Reftek's soh_data won't be sorted here. It has been sorted by time
+            because it is created from log data which is sorted in
+            prepare_soh_data_from_log_data()
+        """
+        sort_data(self.waveform_data[self.selected_key])
+        sort_data(self.mass_pos_data[self.selected_key])
+        try:
+            sort_data(self.soh_data[self.selected_key])
+        except KeyError:
+            # Reftek's SOH trace doesn't have startTmEpoch and
+            # actually soh_data consists of only one trace
+            pass
+
+    def combine_all_data(self):
+        combine_data(self.selected_key, self.waveform_data, self.gap_minimum)
+        combine_data(self.selected_key, self.mass_pos_data, self.gap_minimum)
+        try:
+            combine_data(self.selected_key, self.soh_data, self.gap_minimum)
+        except KeyError:
+            # Reftek's SOH trace doesn't have startTmEpoch and
+            # actually soh_data consists of only one trace
+            pass
+
+    def retrieve_gaps_from_data_dicts(self):
+        """
+        Getting gaps from each data_dicts then squash all related gaps
+        """
+        self.gaps[self.selected_key] = []
+        retrieve_gaps_from_data_dict(
+            self.selected_key, self.soh_data, self.gaps)
+        retrieve_gaps_from_data_dict(
+            self.selected_key, self.mass_pos_data, self.gaps)
+        retrieve_gaps_from_data_dict(
+            self.selected_key, self.waveform_data, self.gaps)
+
+        self.gaps[self.selected_key] = squash_gaps(
+            self.gaps[self.selected_key])
+
+    def retrieve_data_time_from_data_dicts(self):
+        """
+        Going through each data_dict to update the data_time to be
+            [min of startTimeEpoch, max of endTimeEpoch] for each station.
+        """
+        retrieve_data_time_from_data_dict(
+            self.selected_key, self.soh_data, self.data_time)
+        retrieve_data_time_from_data_dict(
+            self.selected_key, self.mass_pos_data, self.data_time)
+        retrieve_data_time_from_data_dict(
+            self.selected_key, self.waveform_data, self.data_time)
+
+    def fill_empty_data(self):
+        """
+        Filling an empty_dict into station with no data added in data_dicts
+        """
+        for key in self.keys:
+            if key not in self.soh_data:
+                self.soh_data[key] = {}
+            if key not in self.waveform_data:
+                self.waveform_data[key] = {}
+            if key not in self.mass_pos_data:
+                self.mass_pos_data[key] = {}
+            if key not in self.log_data:
+                self.log_data[key] = {}
+
+    def apply_convert_factor_to_data_dicts(self):
+        """
+        Applying convert_factor to avoid using flags to prevent double
+            applying convert factor when plotting
+        """
+        apply_convert_factor_to_data_dict(
+            self.selected_key, self.soh_data, self.data_type)
+        apply_convert_factor_to_data_dict(
+            self.selected_key, self.mass_pos_data, self.data_type)
+        apply_convert_factor_to_data_dict(
+            self.selected_key, self.waveform_data, self.data_type)
+
+    def reset_all_selected_data(self):
+        """
+        FROM data_type_model.reset_all_selected_data()
+        Remove all keys created in the plotting process, and change fullData
+        to False. This function is to replace deepcopy which uses more memory.
+        """
+        reset_data(self.selected_key, self.soh_data)
+        reset_data(self.selected_key, self.waveform_data)
+        reset_data(self.selected_key, self.mass_pos_data)
diff --git a/sohstationviewer/model/general_data/general_data_helper.py b/sohstationviewer/model/general_data/general_data_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..953f80ad72761c464a2db23aa493368b05681532
--- /dev/null
+++ b/sohstationviewer/model/general_data/general_data_helper.py
@@ -0,0 +1,210 @@
+from typing import List, Dict, Optional, Union, Tuple
+import numpy as np
+
+from sohstationviewer.database.extract_data import get_convert_factor
+
+
+def _check_related_gaps(min1: float, max1: float,
+                        min2: float, max2: float,
+                        index: int, checked_indexes: List[int]):
+    """
+    FROM handling_data.check_related_gaps
+
+    Check if the passing ranges overlapping each other and add indexes to
+        checked_indexes.
+
+    :param min1: start of range 1
+    :param max1: end of range 1
+    :param min2: start of range 2
+    :param max2: end of range 2
+    :param index: index of gap being checked
+    :param checked_indexes: list of gaps that have been checked
+
+    :return: True if the two ranges overlap each other, False otherwise
+    """
+    if ((min1 <= min2 <= max1) or (min1 <= max2 <= max1)
+            or (min2 <= min1 <= max2) or (min2 <= max1 <= max2)):
+        # range [min1, max1] and [min2, max2] have some part overlap each other
+        checked_indexes.append(index)
+        return True
+    else:
+        return False
+
+
+def squash_gaps(gaps: List[List[float]]) -> List[List[float]]:
+    """
+    FROM handling_data.squash_gaps
+
+    Compress gaps from different channels that have time range related to
+    each other to the ones with outside boundary (min start, max end)
+    or (min end, max start) in case of overlap.
+    :param gaps: [[float, float],], [[float, float],] -
+        list of gaps of multiple channels: [[start, end],], [[start, end],]
+    :return: squashed_gaps: [[float, float],] - all related gaps are squashed
+        extending to the outside start and end
+        [[min start, max end], [max start, min end]]
+
+    """
+    gaps = sorted(gaps, key=lambda x: x[0])
+    squashed_gaps = []
+    checked_indexes = []
+
+    for idx, g in enumerate(gaps):
+        if idx in checked_indexes:
+            continue
+        squashed_gaps.append(g)
+        checked_indexes.append(idx)
+        overlap = g[0] >= g[1]
+        for idx_, g_ in enumerate(gaps):
+            if idx_ in checked_indexes:
+                continue
+            if not overlap:
+                if g_[0] >= g_[1]:
+                    continue
+                if _check_related_gaps(g[0], g[1], g_[0], g_[1],
+                                       idx_, checked_indexes):
+                    squashed_gaps[-1][0] = min(g[0], g_[0])
+                    squashed_gaps[-1][1] = max(g[1], g_[1])
+                else:
+                    break
+            else:
+                if g_[0] < g_[1]:
+                    continue
+                if _check_related_gaps(g[1], g[0], g_[1], g_[0],
+                                       idx_, checked_indexes):
+                    squashed_gaps[-1][0] = max(g[0], g_[0])
+                    squashed_gaps[-1][1] = min(g[1], g_[1])
+
+    return squashed_gaps
+
+
+def sort_data(sta_data_dict: Dict) -> None:
+    """
+    FROM handling_data.sort_data
+
+    Sort data in 'traces_info' of each channel by 'startTmEpoch' order
+    :param sta_data_dict: data of a station
+    """
+    for chan_id in sta_data_dict:
+        traces_info = sta_data_dict[chan_id]['tracesInfo']
+        sta_data_dict[chan_id]['tracesInfo'] = sorted(
+            traces_info, key=lambda i: i['startTmEpoch'])
+
+
+def retrieve_data_time_from_data_dict(
+        selected_key: Union[str, Tuple[str, str]],
+        data_dict: Dict, data_time: Dict[str, List[float]]) -> None:
+    """
+    Going through each channel in each station to get data_time for each
+        station which is [min of startTimeEpoch, max of endTimeEpoch] among
+        the station's channels.
+    :param selected_key: the key of the selected data set
+    :param data_dict: the given data_dict
+    :param data_time: data by sta_id
+    """
+    selected_data_dict = data_dict[selected_key]
+    for c in selected_data_dict:
+        dtime = [selected_data_dict[c]['startTmEpoch'],
+                 selected_data_dict[c]['endTmEpoch']]
+
+        if selected_key in data_time.keys():
+            data_time[selected_key][0] = min(data_time[selected_key][0],
+                                             dtime[0])
+            data_time[selected_key][1] = max(data_time[selected_key][1],
+                                             dtime[1])
+        else:
+            data_time[selected_key] = dtime
+
+
+def retrieve_gaps_from_data_dict(selected_key: Union[str, Tuple[str, str]],
+                                 data_dict: Dict,
+                                 gaps: Dict[str, List[List[float]]]) -> None:
+    """
+    Create each station's gaps by adding all gaps from all channels
+    :param selected_key: the key of the selected data set
+    :param data_dict: given stream
+    :param gaps: gaps list by key
+    """
+    selected_data_dict = data_dict[selected_key]
+    for c in selected_data_dict.keys():
+        cgaps = selected_data_dict[c]['gaps']
+        if cgaps != []:
+            gaps[selected_key] += cgaps
+
+
+def combine_data(selected_key: Union[str, Tuple[str, str]],
+                 data_dict: Dict, gap_minimum: Optional[float]) -> None:
+    """
+    Traverse through traces in each channel, add to gap list if
+        delta >= gap_minimum with delta is the distance between
+        contiguous traces.
+    Combine sorted data using concatenate, which also change data ot ndarray
+        and update startTmEpoch and endTmEpoch.
+    :param selected_key: the key of the selected data set
+    :param station_data_dict: dict of data of a station
+    :param gap_minimum: minimum length of gaps to be detected
+    """
+    selected_data_dict = data_dict[selected_key]
+    for chan_id in selected_data_dict:
+        channel = selected_data_dict[chan_id]
+        traces_info = channel['tracesInfo']
+        if 'gaps' in channel:
+            # gaps key is for mseed data only
+            for idx in range(len(traces_info) - 1):
+                curr_end_tm = traces_info[idx]['endTmEpoch']
+                next_start_tm = traces_info[idx + 1]['startTmEpoch']
+                delta = abs(curr_end_tm - next_start_tm)
+                if gap_minimum is not None and delta >= gap_minimum:
+                    # add gap
+                    gap = [curr_end_tm, next_start_tm]
+                    selected_data_dict[chan_id]['gaps'].append(gap)
+
+        channel['startTmEpoch'] = min([tr['startTmEpoch']
+                                       for tr in traces_info])
+        channel['endTmEpoch'] = max([tr['endTmEpoch'] for tr in traces_info])
+
+        data_list = [tr['data'] for tr in traces_info]
+        times_list = [tr['times'] for tr in traces_info]
+        channel['tracesInfo'] = [{
+            'startTmEpoch': channel['startTmEpoch'],
+            'endTmEpoch': channel['endTmEpoch'],
+            'data': np.concatenate(data_list),
+            'times': np.concatenate(times_list)
+        }]
+
+
+def apply_convert_factor_to_data_dict(
+        selected_key: Union[str, Tuple[str, str]],
+        data_dict: Dict, data_type: str) -> None:
+    """
+    Traverse through traces in each channel to convert data according to
+        convert_factor got from DB
+    :param selected_key: the key of the selected data set
+    :param data_dict: dict of data
+    :param data_type: type of data
+    """
+    selected_data_dict = data_dict[selected_key]
+    for chan_id in selected_data_dict:
+        channel = selected_data_dict[chan_id]
+        convert_factor = get_convert_factor(chan_id, data_type)
+        if convert_factor is not None and convert_factor != 1:
+            for tr in channel['tracesInfo']:
+                tr['data'] = convert_factor * tr['data']
+
+
+def reset_data(selected_key: Union[str, Tuple[str, str]], data_dict: Dict):
+    """
+    FROM data_type_model.reset_data()
+    Remove all keys created in the plotting process for the given data dict
+    :param selected_key: the key of the selected data set
+    :param data_dict: data of the selected key
+    """
+    selected_data_dict = data_dict[selected_key]
+    for chan_id in selected_data_dict:
+        selected_data_dict[chan_id]['fullData'] = False
+        del_keys = ['chan_db_info', 'times', 'data', 'ax', 'ax_wf']
+        for k in del_keys:
+            try:
+                del selected_data_dict[chan_id][k]
+            except KeyError:
+                pass
diff --git a/sohstationviewer/model/mseed/read_mseed_experiment/mseed_reader.py b/sohstationviewer/model/mseed/read_mseed_experiment/mseed_reader.py
deleted file mode 100644
index 120c30965fa4c24e6457413d6395dd913ba0246e..0000000000000000000000000000000000000000
--- a/sohstationviewer/model/mseed/read_mseed_experiment/mseed_reader.py
+++ /dev/null
@@ -1,55 +0,0 @@
-from typing import BinaryIO
-import obspy
-from record_reader import RecordReader
-
-
-class MSeedReader:
-    def __init__(self, file: BinaryIO) -> None:
-        self.file = file
-
-    def read(self):
-        trace = []
-        while 1:
-            # We know that end of file is reached when read() returns an empty
-            # string.
-            is_eof = (self.file.read(1) == b'')
-            if is_eof:
-                break
-            # We need to move the file pointer back to its position after we
-            # do the end of file check. Otherwise, we would be off by one
-            # byte for all the reads afterward.
-            self.file.seek(-1, 1)
-
-            # We save the start of the current record so that after we are
-            # done reading the record, we can move back. This makes moving
-            # to the next record a lot easier, seeing as we can simply move
-            # the file pointer a distance the size of the current record.
-            current_record_start = self.file.tell()
-
-            reader = RecordReader(self.file)
-            trace.append(reader.get_first_data_point())
-            # sample_count = reader.record_metadata.sample_count
-            # sample_rate = reader.record_metadata.sample_rate
-            # record_time_taken = sample_count / sample_rate
-            # record_end_time = (reader.record_metadata.start_time +
-            #                    record_time_taken)
-
-            # MSEED stores the size of a data record as an exponent of a
-            # power of two, so we have to convert that to actual size before
-            # doing anything else.
-            record_length_exp = reader.header_unpacker.unpack(
-                'B', reader.blockette_1000.record_length
-            )[0]
-            record_size = 2 ** record_length_exp
-
-            self.file.seek(current_record_start)
-            self.file.seek(record_size, 1)
-
-
-if __name__ == '__main__':
-    # numpy.set_printoptions(threshold=sys.maxsize)
-    file_path = '/Users/ldam/Documents/GIT/sohstationviewer/tests/test_data/' \
-                'Q330_mixed_traces/XX-3203_4-20221222183011'
-    file = open(file_path, 'rb')
-    stream = obspy.read(file_path)
-    MSeedReader(file).read()
diff --git a/sohstationviewer/model/mseed_data/__init__.py b/sohstationviewer/model/mseed_data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sohstationviewer/model/mseed/read_mseed_experiment/decode_mseed.py b/sohstationviewer/model/mseed_data/decode_mseed.py
similarity index 100%
rename from sohstationviewer/model/mseed/read_mseed_experiment/decode_mseed.py
rename to sohstationviewer/model/mseed_data/decode_mseed.py
diff --git a/sohstationviewer/model/mseed_data/mseed.py b/sohstationviewer/model/mseed_data/mseed.py
new file mode 100644
index 0000000000000000000000000000000000000000..ba5a2233f5a05eeddcb380ef53e016c0076300d2
--- /dev/null
+++ b/sohstationviewer/model/mseed_data/mseed.py
@@ -0,0 +1,187 @@
+"""
+MSeed object to hold and process MSeed data
+"""
+import os
+import re
+import traceback
+from pathlib import Path
+from typing import Dict, List
+
+from sohstationviewer.controller.util import validate_file, validate_dir
+from sohstationviewer.model.mseed_data.mseed_reader import MSeedReader
+from sohstationviewer.model.general_data.general_data import \
+    GeneralData, ThreadStopped, ProcessingDataError
+from sohstationviewer.view.util.enums import LogType
+
+from sohstationviewer.model.mseed_data.mseed_helper import \
+    retrieve_nets_from_data_dict, read_text
+from sohstationviewer.model.mseed_data.record_reader_helper import \
+    MSeedReadError
+
+
+class MSeed(GeneralData):
+    """
+    read and process mseed file into object with properties can be used to
+    plot SOH data, mass position data, waveform data and gaps
+    """
+
+    def __init__(self, *args, **kwargs):
+        # FROM mseed.mseed.MSEED.__init__
+        super().__init__(*args, **kwargs)
+        self.nets_by_sta: Dict[str, List[str]] = {}
+        self.invalid_blockettes = False
+        self.not_mseed_files = []
+        self.processing_data()
+
+    def finalize_data(self):
+        """
+        CHANGED FROM mseed.mseed.MSEED.finalize_data
+
+        This function should be called after all folders finish reading to
+            + get nets_by_sta from stream_header_by_key_chan
+            + other tasks in super().finalize_data()
+
+        """
+        self.distribute_log_text_to_station()
+        self.retrieve_nets_from_data_dicts()
+        super().finalize_data()
+
+    def read_folders(self) -> None:
+        """
+        Read data from list_of_dir for soh, mass position and waveform.
+        """
+        super().read_folders(self.list_of_dir)
+        if self.not_mseed_files:
+            self.track_info(
+                f"Not MSeed files: {self.not_mseed_files}", LogType.WARNING)
+        if self.invalid_blockettes:
+            # This check to only print out message once
+            print("We currently only handle blockettes 500, 1000,"
+                  " and 1001.")
+
+    def read_folder(self, folder: str, total: int, count: int) -> int:
+        """
+        Read data from current folder.
+
+        :param folder: folder to read data from
+        :param total: total of all valid files
+        :param count: total of files that have been processed before this
+            folder to keep track of progress
+        :return count: total of files that have been processed after this
+            folder to keep track of progress
+        """
+        if not os.path.isdir(folder):
+            raise ProcessingDataError(f"Path '{folder}' not exist")
+
+        for path, sub_dirs, files in os.walk(folder):
+            try:
+                validate_dir(path)
+            except Exception as e:
+                # skip Information folder
+                self.track_info(str(e), LogType.WARNING)
+                continue
+            for file_name in files:
+                if self.creator_thread.isInterruptionRequested():
+                    raise ThreadStopped()
+
+                path2file = Path(path).joinpath(file_name)
+
+                if not validate_file(path2file, file_name):
+                    continue
+                print("filename:", file_name)
+                count += 1
+                if count % 10 == 0:
+                    self.track_info(
+                        f'Read {count} files/{total}', LogType.INFO)
+                log_text = read_text(path2file)
+                if log_text is not None:
+                    self.log_texts[path2file] = log_text
+                    continue
+                reader = MSeedReader(
+                    path2file,
+                    read_start=self.read_start,
+                    read_end=self.read_end,
+                    is_multiplex=self.is_multiplex,
+                    req_soh_chans=self.req_soh_chans,
+                    req_wf_chans=self.req_wf_chans,
+                    include_mp123zne=self.include_mp123zne,
+                    include_mp456uvw=self.include_mp456uvw,
+                    soh_data=self.soh_data,
+                    mass_pos_data=self.mass_pos_data,
+                    waveform_data=self.waveform_data,
+                    log_data=self.log_data,
+                    gap_minimum=self.gap_minimum)
+                try:
+                    reader.read()
+                    self.invalid_blockettes = (self.invalid_blockettes
+                                               or reader.invalid_blockettes)
+                except MSeedReadError:
+                    self.not_mseed_files.append(file_name)
+                except Exception:
+                    fmt = traceback.format_exc()
+                    self.track_info(f"Skip file {path2file} can't be read "
+                                    f"due to error: {str(fmt)}",
+                                    LogType.WARNING)
+        return count
+
+    def retrieve_nets_from_data_dicts(self):
+        """
+        Going through stations of each data_dict to get all network codes found
+            in all channel of a station to add to nets_by_station.
+        """
+        retrieve_nets_from_data_dict(self.soh_data, self.nets_by_sta)
+        retrieve_nets_from_data_dict(self.mass_pos_data, self.nets_by_sta)
+        retrieve_nets_from_data_dict(self.waveform_data, self.nets_by_sta)
+
+    def select_key(self) -> str:
+        """
+        CHANGED FROM mseed.mseed.MSEED:
+            + get sta_ids from self.keys
+            + add condition if not on_unittest to create unittest for mseed
+
+        :return selected_sta_id: the selected station id from available
+            key of stream header.
+            + If there is only one station id, return it.
+            + If there is more than one, show all ids, let user choose one to
+                return.
+        """
+        self.keys = sorted(list(set(
+            list(self.soh_data.keys()) +
+            list(self.mass_pos_data.keys()) +
+            list(self.waveform_data.keys()) +
+            [k for k in list(self.log_data.keys()) if k != 'TEXT']
+        )))
+        sta_ids = self.keys
+
+        if len(sta_ids) == 0:
+            return
+
+        selected_sta_id = sta_ids[0]
+        if not self.on_unittest and len(sta_ids) > 1:
+            msg = ("There are more than one stations in the given data.\n"
+                   "Please select one to display")
+            self.pause_signal.emit(msg, sta_ids)
+            self.pause()
+            selected_sta_id = sta_ids[self.pause_response]
+
+        self.track_info(f'Select Station {selected_sta_id}', LogType.INFO)
+        return selected_sta_id
+
+    def distribute_log_text_to_station(self):
+        """
+        Loop through paths to text files to look for station id in the path.
+            + If there is station id in the path, add the content to the
+                station id with channel 'TXT'.
+            + if station id not in the path, add the content to the key 'TEXT'
+                which means don't know the station for these texts.
+        """
+        for path2file in self.log_texts:
+            try:
+                file_parts = re.split(rf"{os.sep}|\.", path2file.as_posix())
+                sta = [s for s in self.keys if s in file_parts][0]
+            except IndexError:
+                self.log_data['TEXT'].append(self.log_texts[path2file])
+                continue
+            if 'TXT' not in self.log_data[sta]:
+                self.log_data[sta]['TXT'] = []
+            self.log_data[sta]['TXT'].append(self.log_texts[path2file])
diff --git a/sohstationviewer/model/mseed_data/mseed_helper.py b/sohstationviewer/model/mseed_data/mseed_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..32d237e2ec5a3dc353458691ff4abe5381d33a46
--- /dev/null
+++ b/sohstationviewer/model/mseed_data/mseed_helper.py
@@ -0,0 +1,53 @@
+# Functions that change from handling_data's functions
+import os
+from pathlib import Path
+from typing import Union, List, Dict
+
+
+def retrieve_nets_from_data_dict(data_dict: Dict,
+                                 nets_by_sta: Dict[str, List[str]]) -> None:
+    """
+    Retrieve nets by sta_id from the given data_dict.
+
+    :param data_dict: dict of data by station
+    :param nets_by_sta: nets list by sta_id
+    """
+    for sta_id in data_dict.keys():
+        if sta_id not in nets_by_sta:
+            nets_by_sta[sta_id] = set()
+        for c in data_dict[sta_id]:
+            nets_by_sta[sta_id].update(
+                data_dict[sta_id][c]['nets'])
+
+
+def read_text(path2file: Path) -> Union[bool, str]:
+    """
+    CHANGED FROM handling_data.read_text:
+        + Don't need to check binary because UnicodeDecodeError caught means
+            the file is binary
+
+    Read text file and add to log_data under channel TEXT.
+        + Raise exception if the file isn't a text file
+        + Remove empty lines in content
+    :param path2file: str - absolute path to text file
+    :param file_name: str - name of text file
+    :param text_logs: holder to keep log string, refer to
+        DataTypeModel.__init__.log_data['TEXT']
+    """
+    try:
+        with open(path2file, 'r') as file:
+            content = file.read().strip()
+    except UnicodeDecodeError:
+        return
+
+    if content != '':
+        # skip empty lines
+        no_empty_line_list = [
+            line for line in content.splitlines() if line]
+        no_empty_line_content = os.linesep.join(no_empty_line_list)
+
+        log_text = "\n\n** STATE OF HEALTH: %s\n" % path2file.name
+        log_text += no_empty_line_content
+    else:
+        log_text = ''
+    return log_text
diff --git a/sohstationviewer/model/mseed_data/mseed_reader.py b/sohstationviewer/model/mseed_data/mseed_reader.py
new file mode 100644
index 0000000000000000000000000000000000000000..1f2eb366b52d3c03f6af451890d9c1607d520fae
--- /dev/null
+++ b/sohstationviewer/model/mseed_data/mseed_reader.py
@@ -0,0 +1,286 @@
+from numbers import Real
+from typing import BinaryIO, Optional, Dict, Union, List
+from pathlib import Path
+from obspy import UTCDateTime
+
+from sohstationviewer.model.mseed_data.record_reader import RecordReader
+from sohstationviewer.model.mseed_data.record_reader_helper import \
+    RecordMetadata
+
+from sohstationviewer.model.mseed_data.mseed_reader_helper import check_chan
+
+
+def move_to_next_record(file, current_record_start: int,
+                        record: RecordReader):
+    """
+    Move the current position of file to next record
+
+    :param current_record_start: the start position of the current record
+    :param reader: the record that is reading
+    """
+    # MSEED stores the size of a data record as an exponent of a
+    # power of two, so we have to convert that to actual size before
+    # doing anything else.
+    record_length_exp = record.header_unpacker.unpack(
+        'B', record.blockette_1000.record_length
+    )[0]
+    record_size = 2 ** record_length_exp
+
+    file.seek(current_record_start)
+    file.seek(record_size, 1)
+
+
+class MSeedReader:
+    def __init__(self, file_path: Path,
+                 read_start: float = UTCDateTime(0).timestamp,
+                 read_end: float = UTCDateTime().timestamp,
+                 is_multiplex: Optional[bool] = None,
+                 req_soh_chans: List[str] = [],
+                 req_wf_chans: List[str] = [],
+                 include_mp123zne: bool = False,
+                 include_mp456uvw: bool = False,
+                 soh_data: Dict = {},
+                 mass_pos_data: Dict = {},
+                 waveform_data: Dict = {},
+                 log_data: Dict[str, Union[List[str],
+                                           Dict[str, List[str]]]] = {},
+                 gap_minimum: Optional[float] = None
+                 ) -> None:
+        """
+        The object of the class is to read data from given file to add
+            to given stream if meet requirements.
+        If data_type is not multiplex, all records of a file are belong to the
+            same channel; the info found from the first record can
+            be used to determine to keep reading if the first one doesn't meet
+            channel's requirement.
+        If data_type is multiplex, every record have to be examined.
+        All data_dicts' definition can be found in data_dict_structures.MD
+
+        :param file_path: Absolute path to data file
+        :param read_start: time that is required to start reading
+        :param read_end: time that is required to end reading
+        :param is_multiplex: multiplex status of the file's data_type
+        :param req_soh_chans: requested SOH channel list
+        :param req_wf_chans: requested waveform channel list
+        :param include_mp123zne: if mass position channels 1,2,3 are requested
+        :param include_mp456uvw: if mass position channels 4,5,6 are requested
+        :param soh_data: data dict of SOH
+        :param mass_pos_data: data dict of mass position
+        :param waveform_data: data dict of waveform
+        :param log_data: data dict of log_data
+        :param gap_minimum: minimum length of gaps required to detect
+            from record
+        """
+        self.read_start = read_start
+        self.read_end = read_end
+        self.is_multiplex = is_multiplex
+        self.gap_minimum = gap_minimum
+        self.req_soh_chans = req_soh_chans
+        self.req_wf_chans = req_wf_chans
+        self.include_mp123zne = include_mp123zne
+        self.include_mp456uvw = include_mp456uvw
+        self.soh_data = soh_data
+        self.mass_pos_data = mass_pos_data
+        self.waveform_data = waveform_data
+        self.log_data = log_data
+        self.file_path = file_path
+        self.file: BinaryIO = open(file_path, 'rb')
+
+        self.invalid_blockettes = False,
+
+    def get_data_dict(self, metadata: RecordMetadata) -> Dict:
+        """
+        Find which data_dict to add data to from req_soh_chans, req_wf_chans,
+            include_mp123zne, include_mp456uvw, samplerate
+        :param metadata: record's metadata
+        :return: data_dict to add data
+        """
+        chan_id = metadata.channel
+        sample_rate = metadata.sample_rate
+        chan_type = check_chan(chan_id, self.req_soh_chans, self.req_wf_chans,
+                               self.include_mp123zne, self.include_mp456uvw)
+        if chan_type == 'SOH':
+            if self.req_soh_chans == [] and sample_rate > 1:
+                # If 'All chans' is selected for SOH, channel with samplerate>1
+                # will be skipped by default to improve performance.
+                # Note: If user intentionally added channels with samplerate>1
+                # using SOH Channel Preferences dialog, they are still read.
+                return
+            return self.soh_data
+        if chan_type == 'MP':
+            return self.mass_pos_data
+        if chan_type == 'WF':
+            return self.waveform_data
+
+    def check_time(self, record: RecordReader) -> bool:
+        """
+        Check if record time in the time range that user require to read
+
+        :param record: the record read from file
+        :return: True when record time satisfy the requirement
+        """
+        meta = record.record_metadata
+        if self.read_start > meta.end_time or self.read_end < meta.start_time:
+            return False
+        return True
+
+    def append_log(self, record: RecordReader) -> None:
+        """
+        Add all text info retrieved from record to log_data
+
+        :param record: the record read from file
+        """
+        logs = [record.ascii_text] + record.other_blockettes
+        log_str = "===========\n".join(logs)
+        if log_str == "":
+            return
+        meta = record.record_metadata
+        log_str = "\n\nSTATE OF HEALTH: " + \
+                  f"From:{meta.start_time}  To:{meta.end_time}\n" + log_str
+        sta_id = meta.station
+        chan_id = meta.channel
+        if sta_id not in self.log_data.keys():
+            self.log_data[sta_id] = {}
+        if chan_id not in self.log_data[sta_id]:
+            self.log_data[sta_id][chan_id] = []
+        self.log_data[sta_id][chan_id].append(log_str)
+
+    def append_data(self, data_dict: dict,
+                    record: RecordReader,
+                    data_point: Real) -> None:
+        """
+        Append data point to the given data_dict
+
+        :param data_dict: the data dict to add data get from record
+        :param record: the record read from file
+        :param data_point: the first sample of the record frame
+        """
+        if data_point is None:
+            return
+        meta = record.record_metadata
+        sta_id = meta.station
+        if sta_id not in data_dict.keys():
+            data_dict[sta_id] = {}
+        station = data_dict[sta_id]
+        self.add_chan_data(station, meta, data_point)
+
+    def _add_new_trace(self, channel: Dict, metadata: RecordMetadata,
+                       data_point: Real) -> None:
+        """
+        Start a new trace to channel['tracesInfo'] with data_point as
+            the first data value and metadata's start_time as first time value
+
+        :param channel: dict of channel's info
+        :param metadata: record's meta data
+        :param data_point: the first sample of the record frame
+        """
+        channel['tracesInfo'].append({
+            'startTmEpoch': metadata.start_time,
+            'data': [data_point],
+            'times': [metadata.start_time]
+        })
+
+    def _append_trace(self, channel, metadata, data_point):
+        """
+        Appending data_point to the latest trace of channel['tracesInfo']
+
+        :param channel: dict of channel's info
+        :param metadata: record's meta data
+        :param data_point: the first sample of the record frame
+        """
+        channel['tracesInfo'][-1]['data'].append(data_point)
+        channel['tracesInfo'][-1]['times'].append(metadata.start_time)
+
+    def add_chan_data(self, station: dict, metadata: RecordMetadata,
+                      data_point: Real) -> None:
+        """
+        Add new channel to the passed station and append data_point to the
+            channel if there's no gap/overlap or start a new trace of data
+            when there's a gap.
+        If gap/overlap > gap_minimum, add to gaps list.
+
+        :param station: dict of chan by id of a station
+        :param metadata: an Object of metadata from the record
+        :param data_point: the first sample of the record frame
+        """
+        meta = metadata
+        chan_id = metadata.channel
+        if chan_id not in station.keys():
+            station[chan_id] = {
+                'file_path': self.file_path,
+                'chanID': chan_id,
+                'samplerate': meta.sample_rate,
+                'startTmEpoch': meta.start_time,
+                'endTmEpoch': meta.end_time,
+                'size': meta.sample_count,
+                'nets': {meta.network},
+                'gaps': [],
+                'tracesInfo': [{
+                    'startTmEpoch': meta.start_time,
+                    'endTmEpoch': meta.end_time,
+                    'data': [data_point],
+                    'times': [meta.start_time]
+                }]
+            }
+        else:
+            channel = station[chan_id]
+            record_start_time = meta.start_time
+            previous_end_time = channel['endTmEpoch']
+            delta = abs(record_start_time - previous_end_time)
+            if channel['file_path'] != self.file_path:
+                # Start new trace for each file to reorder trace and
+                # combine traces again later
+                channel['file_path'] = self.file_path
+                self._add_new_trace(channel, meta, data_point)
+            else:
+                if self.gap_minimum is not None and delta >= self.gap_minimum:
+                    gap = [previous_end_time, record_start_time]
+                    channel['gaps'].append(gap)
+                # appending data
+                self._append_trace(channel, meta, data_point)
+
+            channel['tracesInfo'][-1]['endTmEpoch'] = meta.end_time
+            # update channel's metadata
+            channel['endTmEpoch'] = meta.end_time
+            channel['size'] += meta.sample_count
+            channel['nets'].add(meta.network)
+
+    def read(self):
+        while 1:
+            # We know that end of file is reached when read() returns an empty
+            # string.
+            is_eof = (self.file.read(1) == b'')
+            if is_eof:
+                break
+            # We need to move the file pointer back to its position after we
+            # do the end of file check. Otherwise, we would be off by one
+            # byte for all the reads afterward.
+            self.file.seek(-1, 1)
+
+            # We save the start of the current record so that after we are
+            # done reading the record, we can move back. This makes moving
+            # to the next record a lot easier, seeing as we can simply move
+            # the file pointer a distance the size of the current record.
+            current_record_start = self.file.tell()
+
+            record = RecordReader(self.file)
+            if record.invalid_blockettes:
+                self.invalid_blockettes = True
+            if not self.check_time(record):
+                move_to_next_record(
+                    self.file, current_record_start, record)
+                continue
+            data_dict = self.get_data_dict(record.record_metadata)
+            if data_dict is None:
+                if self.is_multiplex:
+                    move_to_next_record(
+                        self.file, current_record_start, record)
+                    continue
+                else:
+                    break
+            first_data_point = record.get_first_data_point()
+            self.append_data(data_dict, record, first_data_point)
+            self.append_log(record)
+
+            move_to_next_record(self.file, current_record_start, record)
+        self.file.close()
diff --git a/sohstationviewer/model/mseed_data/mseed_reader_helper.py b/sohstationviewer/model/mseed_data/mseed_reader_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..7275f53321f9460bf41102245ff809d4396db5ba
--- /dev/null
+++ b/sohstationviewer/model/mseed_data/mseed_reader_helper.py
@@ -0,0 +1,93 @@
+# ALL FUNCTIONS IN THIS FILE ARE FROM HANDLING DATA. NO NEED TO REVIEW
+
+import re
+from typing import Tuple, List, Union
+
+from sohstationviewer.conf.dbSettings import dbConf
+
+
+def check_chan(chan_id: str, req_soh_chans: List[str], req_wf_chans: List[str],
+               include_mp123zne: bool, include_mp456uvw: bool) \
+        -> Union[str, bool]:
+    """
+    Check if chanID is a requested channel.
+    :param chan_id: str - channel ID
+    :param req_soh_chans: list of str - requested SOH channels
+    :param req_wf_chans: list of str - requested waveform channels
+    :param include_mp123zne: if mass position channels 1,2,3 are requested
+    :param include_mp456uvw: if mass position channels 4,5,6 are requested
+
+    :return: str/bool -
+        'WF' if chanID is a requested waveform channel,
+        'SOH' if chanID is a requested SOH channel,
+        'MP' if chanID is a requested mass position channel
+        False otherwise.
+    """
+    if chan_id.startswith('VM'):
+        if (not include_mp123zne and
+                chan_id[-1] in ['1', '2', '3', 'Z', 'N', 'E']):
+            return False
+        if (not include_mp456uvw
+                and chan_id[-1] in ['4', '5', '6', 'U', 'V', 'W']):
+            return False
+        return 'MP'
+
+    ret = check_wf_chan(chan_id, req_wf_chans)
+    if ret[0] == 'WF':
+        if ret[1]:
+            return "WF"
+        else:
+            return False
+    if check_soh_chan(chan_id, req_soh_chans):
+        return "SOH"
+    return False
+
+
+def check_soh_chan(chan_id: str, req_soh_chans: List[str]) -> bool:
+    """
+    Check if chan_id is a requested SOH channel.
+    Mass position is always included.
+    This function is used for mseed only so mass position is 'VM'.
+    If there is no reqSOHChans, it means all SOH channels are requested
+    :param chan_id: str - channel ID
+    :param req_soh_chans: list of str - requested SOH channels
+    :return: bool - True if chan_id is a requested SOH channel. False otherwise
+    """
+    if req_soh_chans == []:
+        return True
+    if chan_id in req_soh_chans:
+        return True
+    if 'EX?' in req_soh_chans and chan_id.startswith('EX'):
+        if chan_id[2] in ['1', '2', '3']:
+            return True
+    # TODO: remove mass position channels from SOH
+    if chan_id.startswith('VM'):
+        if chan_id[2] in ['0', '1', '2', '3', '4', '5', '6']:
+            return True
+    return False
+
+
+def check_wf_chan(chan_id: str, req_wf_chans: List[str]) -> Tuple[str, bool]:
+    """
+    Check if chanID is a waveform channel and is requested by user
+    :param chan_id: str - channel ID
+    :param req_wf_chans: list of str - requested waveform channels
+    :return wf: str - '' if chan_id is not a waveform channel.
+                      'WF' if chan_id is a waveform channel.
+    :return has_chan: bool - True if chan_id is a requested waveform channel.
+    """
+    if not dbConf['seisRE'].match(chan_id):
+        return '', False
+
+    for req in req_wf_chans:
+        if len(req) == 1:
+            req = req.replace('*', '...')
+        elif len(req) == 2:
+            req = req.replace('*', '..')
+        elif len(req) == 3:
+            req = req.replace('*', '.')
+
+        if re.compile(f'^{req}$').match(chan_id):
+            return 'WF', True
+
+    return 'WF', False
diff --git a/sohstationviewer/model/mseed/read_mseed_experiment/record_reader.py b/sohstationviewer/model/mseed_data/record_reader.py
similarity index 90%
rename from sohstationviewer/model/mseed/read_mseed_experiment/record_reader.py
rename to sohstationviewer/model/mseed_data/record_reader.py
index 5b3af30c01f0e938989fdd7d388164d2e82338d0..40db266dd7377510ea1ff5c173d266ae22f55403 100644
--- a/sohstationviewer/model/mseed/read_mseed_experiment/record_reader.py
+++ b/sohstationviewer/model/mseed_data/record_reader.py
@@ -4,11 +4,11 @@ from typing import BinaryIO, Optional, List
 
 from obspy import UTCDateTime
 
-from decode_mseed import (
+from sohstationviewer.model.mseed_data.decode_mseed import (
     decode_ieee_float, decode_ieee_double, decode_steim, decode_int16,
     decode_int24, decode_int32,
 )
-from mseed_helper import (
+from sohstationviewer.model.mseed_data.record_reader_helper import (
     FixedHeader, Blockette1000, get_data_endianness, Unpacker,
     get_record_metadata, get_header_endianness, RecordMetadata,
     EncodingFormat,
@@ -37,7 +37,8 @@ class RecordReader:
 
         self.data_unpacker: Unpacker = Unpacker()
         self.record_metadata: Optional[RecordMetadata] = None
-
+        self.invalid_blockettes = False
+        self.ascii_text: str = ''
         self.read_header()
 
     def read_header(self) -> None:
@@ -220,8 +221,7 @@ class RecordReader:
                 'H', next_blockette_type
             )[0]
             if next_blockette_type not in (500, 1000, 1001):
-                print('We currently only handle blockettes 500, 1000, and'
-                      '1001.')
+                self.invalid_blockettes = True
                 continue
             if next_blockette_type == 500:
                 self.read_blockette_500()
@@ -230,7 +230,27 @@ class RecordReader:
             elif next_blockette_type == 2000:
                 self.read_blockette_2000()
 
-    def get_first_data_point(self) -> Real:
+    def decode_ascii_data(self, data_start: int):
+        """
+        Read ASCII string from data portion of the record but remove the
+            padding
+
+        :param data_start: Byte number where the data start
+        """
+        # We want to read everything in the record if the encoding is
+        # ASCII.
+        record_length_exp = self.header_unpacker.unpack(
+            'B', self.blockette_1000.record_length
+        )[0]
+        record_size = 2 ** record_length_exp
+        data_block = self.file.read(record_size - data_start)
+        single_padding = b'\x00'.decode()
+        try:
+            self.ascii_text = data_block.decode().rstrip(single_padding)
+        except UnicodeDecodeError:
+            pass
+
+    def get_first_data_point(self) -> Optional[Real]:
         """
         Get the first data point of the current data record.
         :return: the first data point of the current data record, whose type is
@@ -251,17 +271,8 @@ class RecordReader:
         encoding_format = EncodingFormat(encoding_format)
 
         if encoding_format == EncodingFormat.ASCII:
-            # We want to read everything in the record if the encoding is
-            # ASCII.
-            record_length_exp = self.header_unpacker.unpack(
-                'B', self.blockette_1000.record_length
-            )[0]
-            record_size = 2 ** record_length_exp
-            # This name does not make much sense with what we are doing here,
-            # but it will have to do for now.
-            # The size of the record includes the header, so we have to account
-            # for that when grabbing the data.
-            first_data_point = self.file.read(record_size - data_start)
+            self.decode_ascii_data(data_start)
+            first_data_point = None
         else:
 
             # Currently, we are extracting only the first data point in each
diff --git a/sohstationviewer/model/mseed/read_mseed_experiment/mseed_helper.py b/sohstationviewer/model/mseed_data/record_reader_helper.py
similarity index 70%
rename from sohstationviewer/model/mseed/read_mseed_experiment/mseed_helper.py
rename to sohstationviewer/model/mseed_data/record_reader_helper.py
index 28f0c228b713cc14d9adbaf243a052f0995c0f63..c9fa6ace53751c1487fd34ed678fda5cec38c862 100644
--- a/sohstationviewer/model/mseed/read_mseed_experiment/mseed_helper.py
+++ b/sohstationviewer/model/mseed_data/record_reader_helper.py
@@ -5,6 +5,11 @@ from enum import Enum
 from obspy import UTCDateTime
 
 
+class MSeedReadError(Exception):
+    def __init__(self, msg):
+        self.message = msg
+
+
 class Unpacker:
     """
     A wrapper around struct.unpack() to unpack binary data without having to
@@ -79,7 +84,8 @@ class RecordMetadata:
     location: str
     channel: str
     network: str
-    start_time: UTCDateTime
+    start_time: float
+    end_time: float
     sample_count: int
     sample_rate: float
 
@@ -95,6 +101,21 @@ class EncodingFormat(Enum):
     STEIM_2 = 11
 
 
+def check_time_from_time_string(endian, time_string):
+
+    try:
+        record_start_time_tuple = struct.unpack(f'{endian}hhbbbbh',
+                                                time_string)
+    except struct.error:
+        raise MSeedReadError("Not an MSeed file.")
+    # libmseed uses 1900 to 2100 as the sane year range. We follow their
+    # example here.
+    year_is_good = (1900 <= record_start_time_tuple[0] <= 2100)
+    # The upper range is 366 to account for leap years.
+    day_is_good = (1 <= record_start_time_tuple[1] <= 366)
+    return year_is_good and day_is_good
+
+
 def get_header_endianness(header: FixedHeader):
     """
     Determine the endianness of the fixed header of a data record. Works by
@@ -117,15 +138,15 @@ def get_header_endianness(header: FixedHeader):
     endianness of header
     """
     record_start_time_string = header.record_start_time
-    record_start_time_tuple = struct.unpack('>hhbbbbh',
-                                            record_start_time_string)
-    # libmseed uses 1900 to 2100 as the sane year range. We follow their
-    # example here.
-    year_is_good = (1900 <= record_start_time_tuple[0] <= 2100)
-    # The upper range is 366 to account for leap years.
-    day_is_good = (1 <= record_start_time_tuple[1] <= 366)
-
-    endianness = 'big' if year_is_good and day_is_good else 'little'
+    good_time = check_time_from_time_string('>', record_start_time_string)
+    if good_time:
+        endianness = 'big'
+    else:
+        good_time = check_time_from_time_string('<', record_start_time_string)
+        if good_time:
+            endianness = 'little'
+        else:
+            raise MSeedReadError("Not an MSeed file.")
     return endianness
 
 
@@ -178,32 +199,41 @@ def get_record_metadata(header: FixedHeader, header_unpacker: Unpacker):
         needed so that the correct byte order can be used
     :return: the extract record metadata
     """
-    station = header.station.decode('utf-8').rstrip()
-    location = header.location.decode('utf-8').rstrip()
-    channel = header.channel.decode('utf-8').rstrip()
-    network = header.net_code.decode('utf-8').rstrip()
-
-    record_start_time_string = header.record_start_time
-    record_start_time_tuple = header_unpacker.unpack('HHBBBBH',
-                                                     record_start_time_string)
-    record_start_time = UTCDateTime(year=record_start_time_tuple[0],
-                                    julday=record_start_time_tuple[1],
-                                    hour=record_start_time_tuple[2],
-                                    minute=record_start_time_tuple[3],
-                                    second=record_start_time_tuple[4],
-                                    microsecond=record_start_time_tuple[
-                                                    6] * 100)
-
-    sample_count = header_unpacker.unpack('H', header.sample_count)[0]
-
-    sample_rate_factor = header_unpacker.unpack(
-        'h', header.sample_rate_factor
-    )[0]
-    sample_rate_multiplier = header_unpacker.unpack(
-        'h', header.sample_rate_multiplier
-    )[0]
+    try:
+        station = header.station.decode('utf-8').rstrip()
+        location = header.location.decode('utf-8').rstrip()
+        channel = header.channel.decode('utf-8').rstrip()
+        network = header.net_code.decode('utf-8').rstrip()
+
+        record_start_time_string = header.record_start_time
+        record_start_time_tuple = header_unpacker.unpack(
+            'HHBBBBH', record_start_time_string)
+        record_start_time = UTCDateTime(year=record_start_time_tuple[0],
+                                        julday=record_start_time_tuple[1],
+                                        hour=record_start_time_tuple[2],
+                                        minute=record_start_time_tuple[3],
+                                        second=record_start_time_tuple[4],
+                                        microsecond=record_start_time_tuple[
+                                                        6] * 100).timestamp
+
+        sample_count = header_unpacker.unpack('H', header.sample_count)[0]
+
+        sample_rate_factor = header_unpacker.unpack(
+            'h', header.sample_rate_factor
+        )[0]
+        sample_rate_multiplier = header_unpacker.unpack(
+            'h', header.sample_rate_multiplier
+        )[0]
+    except ValueError:
+        raise MSeedReadError("Not an MSeed file.")
     sample_rate = calculate_sample_rate(sample_rate_factor,
                                         sample_rate_multiplier)
+    if sample_rate == 0:
+        record_end_time = record_start_time
+    else:
+        record_time_taken = sample_count / sample_rate
+        record_end_time = record_start_time + record_time_taken
 
     return RecordMetadata(station, location, channel, network,
-                          record_start_time, sample_count, sample_rate)
+                          record_start_time, record_end_time,
+                          sample_count, sample_rate)
diff --git a/sohstationviewer/model/reftek/reftek.py b/sohstationviewer/model/reftek/reftek.py
index 29200b9b5aafdc54eb70d4aaea49e289b7daf452..16cdfd565fff830f5cad72508ccd55a77dd2a691 100755
--- a/sohstationviewer/model/reftek/reftek.py
+++ b/sohstationviewer/model/reftek/reftek.py
@@ -4,6 +4,8 @@ RT130 object to hold and process RefTek data
 from pathlib import Path
 from typing import Tuple, List, Union
 import numpy as np
+import os
+import traceback
 
 from sohstationviewer.model.reftek.from_rt2ms import (
     core, soh_packet, packet)
@@ -15,6 +17,7 @@ from sohstationviewer.model.handling_data_reftek import (
     check_reftek_header, read_reftek_stream)
 from sohstationviewer.conf import constants
 from sohstationviewer.view.util.enums import LogType
+from sohstationviewer.controller.util import validate_file
 
 
 class RT130(DataTypeModel):
@@ -75,15 +78,42 @@ class RT130(DataTypeModel):
             folders = self.list_of_dir
         super().read_folders(folders)
 
-    def read_data_file(self, path2file, file_name):
+    def read_folder(self, folder: str, total: int, count: int) -> int:
         """
-        Read data from path <path2ffile>, with name <file_name>
+        Read data from current folder.
 
-        :param path2file: absolute path to data file
-        :param file_name: name of data file
+        :param folder: folder to read data from
+        :param total: total of all valid files
+        :param count: total of files that have been processed before this
+            folder to keep track of progress
+        :return count: total of files that have been processed after this
+            folder to keep track of progress
         """
-        if not self.read_reftek_130(path2file):
-            read_text(path2file, file_name, self.log_data['TEXT'])
+
+        if self.list_of_rt130_paths != []:
+            folders = self.list_of_rt130_paths
+            for folder in folders:
+                total += sum([len(files) for _, _, files in os.walk(folder)])
+        else:
+            folders = [self.dir]
+            total = sum([len(files) for _, _, files in os.walk(self.dir)])
+
+        for folder in folders:
+            for path, subdirs, files in os.walk(folder):
+                for file_name in files:
+                    if self.creator_thread.isInterruptionRequested():
+                        raise ThreadStopped()
+                    path2file = Path(path).joinpath(file_name)
+                    if not validate_file(path2file, file_name):
+                        continue
+                    if not self.read_reftek_130(path2file):
+                        read_text(path2file, file_name, self.log_data['TEXT'])
+                    count += 1
+                    if count % 50 == 0:
+                        self.track_info(
+                            f"Read {count} files/ {total}", LogType.INFO)
+
+        return count
 
     def select_key(self) -> Tuple[str, str]:
         """
diff --git a/sohstationviewer/view/db_config/param_dialog.py b/sohstationviewer/view/db_config/param_dialog.py
index 2fc8c8ad99d312e01857c2d4514062aeb49b4e10..21ecf7bcca7316e30a6b5e7253d7f1ce19ef400b 100755
--- a/sohstationviewer/view/db_config/param_dialog.py
+++ b/sohstationviewer/view/db_config/param_dialog.py
@@ -47,7 +47,7 @@ class ParamDialog(UiDBInfoDialog):
         color_mode_label = QtWidgets.QLabel('Color mode:')
         color_selector = QComboBox()
         color_selector.insertItem(0, initial_color_mode)
-        other_color_modes = ALL_COLOR_MODES - {initial_color_mode}
+        other_color_modes = set(ALL_COLOR_MODES.keys()) - {initial_color_mode}
         color_selector.insertItems(1, other_color_modes)
         color_selector.setFixedWidth(100)
         color_selector.currentTextChanged.connect(self.on_color_mode_changed)
diff --git a/sohstationviewer/view/file_information/get_file_information.py b/sohstationviewer/view/file_information/get_file_information.py
index 96fbac28b823341cc67a391a8b7aa53365243a02..01eb4a54535bd536862e8f8e84f08f206b75fe1a 100644
--- a/sohstationviewer/view/file_information/get_file_information.py
+++ b/sohstationviewer/view/file_information/get_file_information.py
@@ -1,13 +1,13 @@
 from typing import Union, Dict, List, Set, Tuple
 
 from sohstationviewer.controller.plotting_data import format_time
-from sohstationviewer.model.data_type_model import DataTypeModel
-from sohstationviewer.model.mseed.mseed import MSeed
+from sohstationviewer.model.general_data.general_data import GeneralData
+from sohstationviewer.model.mseed_data.mseed import MSeed
 from sohstationviewer.model.reftek.reftek import RT130
 from sohstationviewer.view.util.functions import extract_netcodes
 
 
-def extract_data_set_info(data_obj: Union[DataTypeModel, RT130, MSeed],
+def extract_data_set_info(data_obj: Union[GeneralData, RT130, MSeed],
                           date_format: str
                           ) -> Dict[str, Union[str, List[str]]]:
     """
@@ -48,7 +48,7 @@ def extract_data_set_info(data_obj: Union[DataTypeModel, RT130, MSeed],
             f"\n\t\tTo: {end_time_str}")
     data_set_info['Time ranges'] = '\n\t'.join(time_range_info_list)
 
-    key_sets = data_obj.stream_header_by_key_chan.keys()
+    key_sets = data_obj.keys
     if data_type == 'RT130':
         das_serials = list({key[0] for key in key_sets})
         experiment_numbers = list({key[1] for key in key_sets})
diff --git a/sohstationviewer/view/main_window.py b/sohstationviewer/view/main_window.py
index 84aec880ff9b1e0200e22040b07f6531ab6a9619..f2f448c6dc3752e1d050cd628ecaaa121aa697bf 100755
--- a/sohstationviewer/view/main_window.py
+++ b/sohstationviewer/view/main_window.py
@@ -10,9 +10,9 @@ from PySide2.QtCore import QSize
 from PySide2.QtGui import QFont, QPalette, QColor
 from PySide2.QtWidgets import QFrame, QListWidgetItem, QMessageBox
 
-from sohstationviewer.conf import constants
 from sohstationviewer.model.data_loader import DataLoader
-from sohstationviewer.model.data_type_model import DataTypeModel
+from sohstationviewer.model.general_data.general_data import \
+    GeneralData
 
 from sohstationviewer.view.calendar.calendar_dialog import CalendarDialog
 from sohstationviewer.view.db_config.channel_dialog import ChannelDialog
@@ -41,8 +41,7 @@ from sohstationviewer.view.channel_prefer_dialog import ChannelPreferDialog
 
 from sohstationviewer.controller.processing import detect_data_type
 from sohstationviewer.controller.util import (
-    display_tracking_info, rt130_find_cf_dass, check_data_sdata,
-    get_dir_size
+    display_tracking_info, rt130_find_cf_dass, check_data_sdata
 )
 
 from sohstationviewer.database.process_db import execute_db_dict, execute_db
@@ -63,9 +62,17 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         """
         self.list_of_dir: List[Path] = []
         """
-        current_dir: str - the current main data directory
+        current_dir: the current main data directory
         """
-        self.current_dir = ''
+        self.current_dir: str = ''
+        """
+        save_plot_dir: directory to save plot
+        """
+        self.save_plot_dir: str = ''
+        """
+        save_plot_format: format to save plot
+        """
+        self.save_plot_format: str = 'SVG'
         """
         rt130_das_dict: dict by rt130 for data paths, so user can choose
             dasses to assign list of data paths to selected_rt130_paths
@@ -81,6 +88,11 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         """
         self.data_type: str = 'Unknown'
         """
+        is_multiplex: flag showing if data_set is multiplex (more than one
+            channels in a file)
+        """
+        self.is_multiplex = None
+        """
         color_mode: str - the current color mode of the plot; can be either 'B'
             or 'W'
         """
@@ -117,11 +129,11 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         """
         data_object: Object that keep data read from data set for plotting
         """
-        self.data_object: Union[DataTypeModel, None] = None
+        self.data_object: Union[GeneralData, None] = None
         """
-        min_gap: minimum minutes of gap length to be display on gap bar
+        gap_minimum: minimum minutes of gap length to be display on gap bar
         """
-        self.min_gap: Union[float, None] = None
+        self.gap_minimum: Union[float, None] = None
         """
         pref_soh_list_name: name of selected preferred channels list
         """
@@ -185,6 +197,10 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         self.validate_config()
         self.apply_config()
 
+    @QtCore.Slot()
+    def save_plot(self):
+        self.plotting_widget.save_plot('SOH-Plot')
+
     @QtCore.Slot()
     def open_data_type(self) -> None:
         """
@@ -386,31 +402,41 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         :rtype: List[str, int]
         """
         req_wf_chans = []
-
-        if self.all_wf_chans_check_box.isChecked():
-            req_mseed_wildcards = ['*']
-            req_dss = ['*']      # all data stream
-        else:
-            req_dss = []
-            req_mseed_wildcards = []
-            for idx, ds_checkbox in enumerate(self.ds_check_boxes):
-                if ds_checkbox.isChecked():
-                    req_dss.append(idx + 1)
-            if self.mseed_wildcard_edit.text().strip() != "":
-                req_mseed_wildcards = self.mseed_wildcard_edit.text(
-                    ).split(",")
-
-        if self.data_type == 'RT130':
-            req_wf_chans = req_dss
-            if req_dss != ['*'] and req_mseed_wildcards != []:
-                msg = 'MSeed Wildcards will be ignored for RT130.'
-                self.processing_log.append((msg, LogType.WARNING))
-        else:
-            req_wf_chans = req_mseed_wildcards
-            if req_mseed_wildcards != ['*'] and req_dss != []:
-                msg = ('Checked data streams will be ignored for '
-                       'none-RT130 data type.')
-                self.processing_log.append((msg, LogType.WARNING))
+        if (self.data_type != 'RT130' and
+                (self.all_wf_chans_check_box.isChecked()
+                 or self.mseed_wildcard_edit.text().strip() != "")
+                and not self.tps_check_box.isChecked()
+                and not self.raw_check_box.isChecked()):
+            raise Exception(
+                "Waveform channels have been selected but there are none of "
+                "TPS or RAW checkboxes checked.\nPlease clear the "
+                "selection of waveform if you don't want to display the data.")
+
+        if self.tps_check_box.isChecked() or self.raw_check_box.isChecked():
+            if self.all_wf_chans_check_box.isChecked():
+                req_mseed_wildcards = ['*']
+                req_dss = ['*']      # all data stream
+            else:
+                req_dss = []
+                req_mseed_wildcards = []
+                for idx, ds_checkbox in enumerate(self.ds_check_boxes):
+                    if ds_checkbox.isChecked():
+                        req_dss.append(idx + 1)
+                if self.mseed_wildcard_edit.text().strip() != "":
+                    req_mseed_wildcards = self.mseed_wildcard_edit.text(
+                        ).split(",")
+
+            if self.data_type == 'RT130':
+                req_wf_chans = req_dss
+                if req_dss != ['*'] and req_mseed_wildcards != []:
+                    msg = 'MSeed Wildcards will be ignored for RT130.'
+                    self.processing_log.append((msg, LogType.WARNING))
+            else:
+                req_wf_chans = req_mseed_wildcards
+                if req_mseed_wildcards != ['*'] and req_dss != []:
+                    msg = ('Checked data streams will be ignored for '
+                           'none-RT130 data type.')
+                    self.processing_log.append((msg, LogType.WARNING))
         return req_wf_chans
 
     def get_requested_soh_chan(self):
@@ -483,20 +509,28 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
                 raise Exception(msg)
 
         if self.rt130_das_dict == {}:
-            self.data_type = detect_data_type(self.list_of_dir)
+            self.data_type, self.is_multiplex = detect_data_type(
+                self.list_of_dir)
 
     def clear_plots(self):
         self.plotting_widget.clear()
         self.waveform_dlg.plotting_widget.clear()
         self.tps_dlg.plotting_widget.clear()
 
+    def cancel_loading(self):
+        display_tracking_info(self.tracking_info_text_browser,
+                              "Loading cancelled",
+                              LogType.WARNING)
+
     @QtCore.Slot()
     def read_selected_files(self):
         """
         Read data from selected files/directories, process and plot channels
             read from those according to current options set on the GUI
         """
-
+        display_tracking_info(self.tracking_info_text_browser,
+                              "Loading started",
+                              LogType.INFO)
         self.clear_plots()
         start_tm_str = self.time_from_date_edit.date().toString(
             QtCore.Qt.ISODate)
@@ -506,6 +540,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         if self.end_tm <= self.start_tm:
             msg = "To Date must be greater than From Date."
             QtWidgets.QMessageBox.warning(self, "Wrong Date Given", msg)
+            self.cancel_loading()
             return
         self.info_list_widget.clear()
         is_working = (self.is_loading_data or self.is_plotting_soh or
@@ -518,22 +553,23 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
 
         if self.gap_len_line_edit.text().strip() != '':
             try:
-                self.min_gap = float(
-                    self.gap_len_line_edit.text())
+                # convert from minute to second
+                self.gap_minimum = float(
+                    self.gap_len_line_edit.text()) * 60
             except ValueError:
                 msg = "Minimum Gap must be a number."
+                QtWidgets.QMessageBox.warning(
+                    self, "Invalid Minimum Gap request", msg)
+                self.cancel_loading()
+                return
+            if self.gap_minimum < 0.1:
+                msg = "Minimum Gap must be greater than 0.1 minute to be " \
+                      "detected."
                 QtWidgets.QMessageBox.warning(
                     self, "Invalid Minimum Gap request", msg)
                 return
         else:
-            self.min_gap = None
-
-        # if waveform channels are selected, Event DS will be read from EH/ET
-        # header
-        # rt130_waveform_data_req is to read data for wave form data
-        rt130_waveform_data_req = False
-        if self.raw_check_box.isChecked() or self.tps_check_box.isChecked():
-            rt130_waveform_data_req = True
+            self.gap_minimum = None
 
         if self.mseed_wildcard_edit.text().strip() != '':
             try:
@@ -541,6 +577,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             except Exception as e:
                 QtWidgets.QMessageBox.warning(
                     self, "Incorrect Wildcard", str(e))
+                self.cancel_loading()
                 return
 
         try:
@@ -550,19 +587,30 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         except AttributeError:
             pass
 
-        self.req_soh_chans = (self.pref_soh_list
-                              if not self.all_soh_chans_check_box.isChecked()
-                              else [])
-
         try:
             self.get_file_list()
         except Exception as e:
-            QtWidgets.QMessageBox.warning(
-                self, "Directories",
-                f"Error in get_file_list:\n\n{str(e)}")
-            return
+            if 'no known data detected' in str(e):
+                msgbox = QtWidgets.QMessageBox()
+                msgbox.setWindowTitle('Do you want to continue?')
+                msgbox.setText(str(e))
+                msgbox.addButton(QtWidgets.QMessageBox.Cancel)
+                msgbox.addButton('Continue', QtWidgets.QMessageBox.YesRole)
+                result = msgbox.exec_()
+                if result == QtWidgets.QMessageBox.Cancel:
+                    self.cancel_loading()
+                    return
+                self.data_type == 'Unknown'
+            else:
+                fmt = traceback.format_exc()
+                QtWidgets.QMessageBox.warning(
+                    self, "Select directory", str(fmt))
+                self.cancel_loading()
+                return
 
-        dir_size = sum(get_dir_size(str(dir))[0] for dir in self.list_of_dir)
+        """
+        temporary skip check_size for it take too long.
+        dir_size = sum(get_dir_size(str(dir))[0] for dir in self.dir_names)
         if dir_size > constants.BIG_FILE_SIZE:
             data_too_big_dialog = QMessageBox()
             data_too_big_dialog.setText('Chosen data set is very big. It '
@@ -575,13 +623,15 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             data_too_big_dialog.setIcon(QMessageBox.Question)
             ret = data_too_big_dialog.exec_()
             if ret == QMessageBox.Abort:
+                self.cancel_loading()
                 return
-
+        """
         self.req_soh_chans = self.get_requested_soh_chan()
         try:
             self.req_wf_chans = self.get_requested_wf_chans()
         except Exception as e:
             QMessageBox.information(self, "Waveform Selection", str(e))
+            self.cancel_loading()
             return
 
         start_tm_str = self.time_from_date_edit.date().toString(
@@ -598,15 +648,16 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         self.data_loader.init_loader(
             self.data_type,
             self.tracking_info_text_browser,
+            self.is_multiplex,
             self.list_of_dir,
             self.selected_rt130_paths,
             req_wf_chans=self.req_wf_chans,
             req_soh_chans=self.req_soh_chans,
+            gap_minimum=self.gap_minimum,
             read_start=self.start_tm,
             read_end=self.end_tm,
             include_mp123=self.mass_pos_123zne_check_box.isChecked(),
-            include_mp456=self.mass_pos_456uvw_check_box.isChecked(),
-            rt130_waveform_data_req=rt130_waveform_data_req
+            include_mp456=self.mass_pos_456uvw_check_box.isChecked()
         )
 
         self.data_loader.worker.finished.connect(self.data_loaded)
@@ -677,13 +728,19 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             self.is_stopping = False
 
     @QtCore.Slot()
-    def data_loaded(self, data_obj: DataTypeModel):
+    def data_loaded(self, data_obj: GeneralData):
         """
         Process the loaded data.
         :param data_obj: the data object that contains the loaded data.
         """
         self.is_loading_data = False
         self.data_object = data_obj
+        if (self.data_type == 'Q330' and
+                'LOG' not in data_obj.log_data[data_obj.selected_key]):
+            log_message = ("Channel 'LOG' is required to get file info and "
+                           "gps info for Q330", LogType.WARNING)
+            self.processing_log.append(log_message)
+            return
         try:
             self.gps_dialog.gps_points = extract_gps_data(data_obj)
         except ValueError as e:
@@ -718,8 +775,11 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         """
         if self.has_problem:
             return
-        self.clear_plots()
         self.is_plotting_soh = True
+        self.plotting_widget.set_colors(self.color_mode)
+        self.waveform_dlg.plotting_widget.set_colors(self.color_mode)
+        self.tps_dlg.plotting_widget.set_colors(self.color_mode)
+        self.gps_dialog.set_colors(self.color_mode)
 
         d_obj = self.data_object
 
@@ -727,7 +787,6 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
 
         sel_key = d_obj.selected_key
         d_obj.reset_all_selected_data()
-        d_obj.reset_need_process_for_mass_pos()
         try:
             check_masspos(d_obj.mass_pos_data[sel_key], sel_key,
                           self.mass_pos_123zne_check_box.isChecked(),
@@ -842,6 +901,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         # current directory
         self.current_directory_changed.emit(path)
         self.current_dir = path
+        self.save_plot_dir = path
         execute_db(f'UPDATE PersistentData SET FieldValue="{path}" WHERE '
                    'FieldName="currentDirectory"')
         self.set_open_files_list_texts()
@@ -1060,10 +1120,6 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         if not checked:
             return
         self.color_mode = color_mode
-        self.plotting_widget.set_colors(color_mode)
-        self.waveform_dlg.plotting_widget.set_colors(color_mode)
-        self.tps_dlg.plotting_widget.set_colors(color_mode)
-        self.gps_dialog.set_colors(color_mode)
 
     @QtCore.Slot()
     def clear_file_search(self):
diff --git a/sohstationviewer/view/plotting/gps_plot/extract_gps_data.py b/sohstationviewer/view/plotting/gps_plot/extract_gps_data.py
index cd23e0b10b5c8b71d14676dc1019c31984db4aef..9a876211798221f824298ca880f39f94a1e7f734 100644
--- a/sohstationviewer/view/plotting/gps_plot/extract_gps_data.py
+++ b/sohstationviewer/view/plotting/gps_plot/extract_gps_data.py
@@ -6,8 +6,7 @@ from typing import List, Optional, Dict, NoReturn
 import numpy as np
 from obspy import UTCDateTime
 
-from sohstationviewer.controller.processing import detect_data_type
-from sohstationviewer.model.mseed.mseed import MSeed
+from sohstationviewer.model.mseed_data.mseed import MSeed
 from sohstationviewer.model.reftek.reftek import RT130
 from sohstationviewer.view.plotting.gps_plot.gps_point import GPSPoint
 from sohstationviewer.view.util.enums import LogType
@@ -184,9 +183,10 @@ def get_gps_channel_prefix(data_obj: MSeed, data_type: str) -> Optional[str]:
 
         # Determine the GPS channels by checking if the current data set
         # has all the GPS channels of a data type.
-        if pegasus_gps_channels & data_obj.channels == pegasus_gps_channels:
+        channels = set(data_obj.soh_data[data_obj.selected_key].keys())
+        if pegasus_gps_channels & channels == pegasus_gps_channels:
             gps_prefix = 'V'
-        elif centaur_gps_channels & data_obj.channels == centaur_gps_channels:
+        elif centaur_gps_channels & channels == centaur_gps_channels:
             gps_prefix = 'G'
         else:
             msg = "Can't detect GPS channels."
@@ -234,7 +234,9 @@ def extract_gps_data_pegasus_centaur(data_obj: MSeed, data_type: str
     gps_prefix = get_gps_channel_prefix(data_obj, data_type)
     gps_chans = {gps_prefix + 'NS', gps_prefix + 'LA', gps_prefix + 'LO',
                  gps_prefix + 'EL'}
-    channels = data_obj.stream_header_by_key_chan[data_obj.selected_key].keys()
+    if data_obj.selected_key is None:
+        return []
+    channels = data_obj.soh_data[data_obj.selected_key].keys()
     if not gps_chans.issubset(channels):
         missing_gps_chans = gps_chans - channels
         missing_gps_chans_string = ', '.join(missing_gps_chans)
@@ -434,8 +436,23 @@ def gps_data_rt130(data_obj: RT130) -> List[GPSPoint]:
 
 @extract_gps_data.register(MSeed)
 def gps_data_mseed(data_obj: MSeed) -> List[GPSPoint]:
-    data_type = detect_data_type(data_obj.list_of_dir)
+    try:
+        data_type = data_obj.data_type
+    except Exception:
+        data_type = 'Unknown'
+
     if data_type == 'Q330':
         return extract_gps_data_q330(data_obj)
     elif data_type == 'Centaur' or data_type == 'Pegasus':
         return extract_gps_data_pegasus_centaur(data_obj, data_type)
+    else:
+        # data_type = "Unknown"
+        try:
+            gps_data = extract_gps_data_q330(data_obj)
+        except KeyError:
+            try:
+                gps_data = extract_gps_data_pegasus_centaur(
+                    data_obj, data_type)
+            except AttributeError:
+                return []
+        return gps_data
diff --git a/sohstationviewer/view/plotting/plotting_widget/multi_threaded_plotting_widget.py b/sohstationviewer/view/plotting/plotting_widget/multi_threaded_plotting_widget.py
index 229544d77488a53b07c49a8a6b7254d969a92e22..2ef180480b0b8f98ad7c8661aeb0f0f71747fc0c 100644
--- a/sohstationviewer/view/plotting/plotting_widget/multi_threaded_plotting_widget.py
+++ b/sohstationviewer/view/plotting/plotting_widget/multi_threaded_plotting_widget.py
@@ -1,6 +1,6 @@
 # Define functions to call processor
 
-from typing import Tuple, Union, Dict, Callable, List, Optional
+from typing import Tuple, Union, Dict, List
 
 from PySide2 import QtCore
 
@@ -105,20 +105,17 @@ class MultiThreadedPlottingWidget(PlottingWidget):
             return True
 
     def create_plotting_channel_processors(
-            self, plotting_data: Dict,
-            get_plot_info: Optional[Callable[[str, Dict, str], Dict]]) -> None:
+            self, plotting_data: Dict, need_db_info: bool = False) -> None:
         """
         Create a data processor for each channel data.
 
         :param plotting_data: dict of data by chan_id
-        :param get_plot_info: function to get plotting info from database
+        :param need_db_info: flag to get db info
         """
         for chan_id in plotting_data:
-            if get_plot_info is not None:
-                chan_db_info = get_plot_info(chan_id,
-                                             plotting_data[chan_id],
-                                             self.parent.data_type,
-                                             self.c_mode)
+            if need_db_info:
+                chan_db_info = get_chan_plot_info(
+                    chan_id, self.parent.data_type, self.c_mode)
                 if chan_db_info['height'] == 0:
                     # not draw
                     continue
@@ -196,16 +193,10 @@ class MultiThreadedPlottingWidget(PlottingWidget):
                 self.clean_up()
                 self.finished.emit()
                 return
-            self.create_plotting_channel_processors(
-                self.plotting_data1, self.get_plot_info)
-            self.create_plotting_channel_processors(
-                self.plotting_data2, get_chan_plot_info)
+            self.create_plotting_channel_processors(self.plotting_data1, True)
+            self.create_plotting_channel_processors(self.plotting_data2, True)
             self.process_channel()
 
-    def get_plot_info(self, *args, **kwargs):
-        # function to get database info for channels in self.plotting_data1
-        pass
-
     @QtCore.Slot()
     def process_channel(self, channel_data=None, channel_id=None):
         """
@@ -347,6 +338,6 @@ class MultiThreadedPlottingWidget(PlottingWidget):
             self.is_working = True
             start_msg = 'Zooming in...'
             display_tracking_info(self.tracking_box, start_msg, 'info')
-            self.create_plotting_channel_processors(self.plotting_data1, None)
-            self.create_plotting_channel_processors(self.plotting_data2, None)
+            self.create_plotting_channel_processors(self.plotting_data1)
+            self.create_plotting_channel_processors(self.plotting_data2)
             self.process_channel()
diff --git a/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py b/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py
index 30e5abe461d93f8927ea9363023004150618e25c..50d8f93fe9bf18374d20e504607d28372843cb2d 100644
--- a/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py
+++ b/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py
@@ -9,7 +9,7 @@ from matplotlib.backends.backend_qt5agg import (
     FigureCanvasQTAgg as Canvas)
 
 from sohstationviewer.controller.plotting_data import (
-    get_gaps, get_time_ticks, get_unit_bitweight)
+    get_time_ticks, get_unit_bitweight)
 
 from sohstationviewer.conf import constants
 from sohstationviewer.view.util.color import clr
@@ -78,6 +78,7 @@ class PlottingAxes:
             labelbottom = False
         else:
             labelbottom = True
+            self.parent.plotting_bot -= 0.007       # space for ticks
         timestamp_bar.tick_params(which='major', length=7, width=2,
                                   direction='inout',
                                   colors=self.parent.display_color['basic'],
@@ -90,7 +91,8 @@ class PlottingAxes:
                                  fontweight='bold',
                                  fontsize=self.parent.font_size,
                                  rotation=0,
-                                 labelpad=constants.HOUR_TO_TMBAR_D,
+                                 labelpad=constants.HOUR_TO_TMBAR_D *
+                                 self.parent.ratio_w,
                                  ha='left',
                                  color=self.parent.display_color['basic'])
         # not show any y ticks
@@ -112,7 +114,8 @@ class PlottingAxes:
         timestamp_bar.set_xticks(times, minor=True)
         timestamp_bar.set_xticks(major_times)
         timestamp_bar.set_xticklabels(major_time_labels,
-                                      fontsize=self.parent.font_size + 2)
+                                      fontsize=self.parent.font_size +
+                                      2 * self.parent.ratio_w)
         timestamp_bar.set_xlim(self.parent.min_x, self.parent.max_x)
 
     def create_axes(self, plot_b, plot_h, has_min_max_lines=True):
@@ -221,7 +224,7 @@ class PlottingAxes:
                 rotation='horizontal',
                 transform=ax.transAxes,
                 color=color,
-                size=self.parent.font_size + 2
+                size=self.parent.font_size + 2 * self.parent.ratio_w
             )
 
         # set samples' total on right side
@@ -322,15 +325,15 @@ class PlottingAxes:
 
         :param gaps: [[float, float], ] - list of [min, max] of gaps
         """
-        if self.main_window.min_gap is None:
+        if self.main_window.gap_minimum is None:
             return
-        self.gaps = gaps = get_gaps(gaps, self.main_window.min_gap)
+        self.gaps = gaps
         self.parent.plotting_bot -= 0.003
         self.parent.gap_bar = self.create_axes(self.parent.plotting_bot,
                                                0.001,
                                                has_min_max_lines=False)
 
-        gap_label = f"GAP({self.main_window.min_gap}min)"
+        gap_label = f"GAP({self.main_window.gap_minimum}sec)"
         h = 0.001  # height of rectangle represent gap
         self.set_axes_info(self.parent.gap_bar, [len(gaps)],
                            label=gap_label)
@@ -409,4 +412,4 @@ class PlottingAxes:
                       horizontalalignment='left',
                       transform=self.parent.timestamp_bar_top.transAxes,
                       color=self.parent.display_color['basic'],
-                      size=self.parent.font_size)
+                      size=self.parent.font_size + 2 * self.parent.ratio_w)
diff --git a/sohstationviewer/view/plotting/plotting_widget/plotting_processor.py b/sohstationviewer/view/plotting/plotting_widget/plotting_processor.py
index b56e09a2e4431dc95513c39340526543a3779912..764369320011cf6b6df691b599b165a937a54d4f 100644
--- a/sohstationviewer/view/plotting/plotting_widget/plotting_processor.py
+++ b/sohstationviewer/view/plotting/plotting_widget/plotting_processor.py
@@ -1,16 +1,8 @@
-from typing import List, Dict
-
 from PySide2 import QtCore
-from obspy import UTCDateTime
-from obspy.core import Trace
 
 from sohstationviewer.conf import constants as const
-import numpy as np
-
-# from sohstationviewer.model.decimator import Decimator
-from sohstationviewer.model.downsampler import Downsampler
-from sohstationviewer.model.handling_data import \
-    trim_downsample_chan_with_spr_less_or_equal_1
+from sohstationviewer.view.plotting.plotting_widget.plotting_processor_helper\
+    import downsample
 
 
 class PlottingChannelProcessorSignals(QtCore.QObject):
@@ -33,10 +25,6 @@ class PlottingChannelProcessor(QtCore.QRunnable):
 
         self.stop_requested = False
 
-        self.downsampler = Downsampler()
-        # self.downsampler = Decimator()
-        self.decimator = self.downsampler
-
         self.channel_data: dict = channel_data
         self.channel_id = channel_id
 
@@ -44,288 +32,27 @@ class PlottingChannelProcessor(QtCore.QRunnable):
         self.end_time = end_time
         self.first_time = first_time
 
-        self.trimmed_trace_list = None
-
-        self.downsampled_times_list = []
-        self.downsampled_data_list = []
-        self.downsampled_list_lock = QtCore.QMutex()
-
-    def trim_plotting_data(self) -> List[Dict]:
-        """
-        Trim off plotting traces whose times do not intersect the closed
-        interval [self.start_time, self.end_time]. Store the traces that are
-        not removed in self.trimmed_trace_list.
-        """
-        data_start_time = self.channel_data['tracesInfo'][0]['startTmEpoch']
-        data_end_time = self.channel_data['tracesInfo'][-1]['endTmEpoch']
-        if (self.start_time > data_end_time
-                or self.end_time < data_start_time):
-            return []
-
-        good_start_indices = [index
-                              for index, tr
-                              in enumerate(self.channel_data['tracesInfo'])
-                              if tr['startTmEpoch'] > self.start_time]
-        if good_start_indices:
-            start_idx = good_start_indices[0]
-            if start_idx > 0:
-                start_idx -= 1  # start_time in middle of trace
-        else:
-            start_idx = 0
-
-        good_end_indices = [idx
-                            for idx, tr
-                            in enumerate(self.channel_data['tracesInfo'])
-                            if tr['endTmEpoch'] <= self.end_time]
-        if good_end_indices:
-            end_idx = good_end_indices[-1]
-            if end_idx < len(self.channel_data['tracesInfo']) - 1:
-                end_idx += 1  # end_time in middle of trace
-        else:
-            end_idx = 0
-        end_idx += 1  # a[x:y+1] = [a[x], ...a[y]]
-
-        good_indices = slice(start_idx, end_idx)
-        self.trimmed_trace_list = self.channel_data['tracesInfo'][good_indices]
-
-    def init_downsampler_(self):
-        """
-        Initialize the downsampler by loading the memmapped traces' data  into
-        Obsby Trace and creating a downsampler worker for each loaded trace
-        which use Obspy's decimate for downsampling
-
-        Currently using decimate from obspy is slower than using downsample.
-        Besides, decimate taking sample constantly while downsample which using
-        chunckminmax, taking min, max of each part, is better in detecting
-        spike of signal.
-
-        We decide to not use this function but leave this here as reference
-        to compare with the result of other method.
-        """
-        decimate_factor = int(self.channel_size / const.CHAN_SIZE_LIMIT)
-        if decimate_factor > 16:
-            decimate_factor = 16
-        do_decimate = decimate_factor > 1
-
-        for tr in self.trimmed_trace_list:
-            if not self.stop_requested:
-                trace = Trace(data=np.memmap(tr['data_f'], dtype='int64',
-                                             mode='r', shape=tr['size']))
-                trace.stats.starttime = UTCDateTime(tr['startTmEpoch'])
-                trace.stats.sampling_rate = tr['samplerate']
-                worker = self.decimator.add_worker(
-                    trace, decimate_factor, do_decimate
-                )
-                # We need these connections to run in the background thread.
-                # However, their owner (the channel processor) is in the main
-                # thread, so the default connection type would make them
-                # run in the main thread. Instead, we have to use a direct
-                # connection to make these slots run in the background thread.
-                worker.signals.finished.connect(
-                    self.decimator_trace_processed,
-                    type=QtCore.Qt.DirectConnection
-                )
-                worker.signals.stopped.connect(
-                    self.stopped,
-                    type=QtCore.Qt.DirectConnection
-                )
-
-    def init_downsampler(self):
-        """
-        Initialize the downsampler by loading the memmapped traces' data and
-        creating a downsampler worker for each loaded trace.
-        """
-        # Calculate the number of requested_points
-        total_size = sum([tr['size'] for tr in self.trimmed_trace_list])
-        requested_points = 0
-        if total_size > const.CHAN_SIZE_LIMIT:
-            requested_points = int(
-                const.CHAN_SIZE_LIMIT / len(self.trimmed_trace_list)
-            )
-
-        # Downsample the data
-        for tr_idx, tr in enumerate(self.trimmed_trace_list):
-            if not self.stop_requested:
-                times = np.linspace(tr['startTmEpoch'], tr['endTmEpoch'],
-                                    tr['size'])
-                data = np.memmap(tr['data_f'],
-                                 dtype='int64', mode='r',
-                                 shape=tr['size'])
-                indexes = np.where((self.start_time <= times) &
-                                   (times <= self.end_time))
-                times = times[indexes]
-                data = data[indexes]
-                do_downsample = (requested_points != 0)
-                worker = self.downsampler.add_worker(
-                    times, data, rq_points=requested_points,
-                    do_downsample=do_downsample
-                )
-                # We need these connections to run in the background thread.
-                # However, their owner (the channel processor) is in the main
-                # thread, so the default connection type would make them
-                # run in the main thread. Instead, we have to use a direct
-                # connection to make these slots run in the background thread.
-                worker.signals.finished.connect(
-                    self.trace_processed, type=QtCore.Qt.DirectConnection
-                )
-                worker.signals.stopped.connect(
-                    self.stopped, type=QtCore.Qt.DirectConnection
-                )
-
-    @QtCore.Slot()
-    def trace_processed(self, times, data):
-        """
-        The slot called when the downsampler worker of a plotting trace
-        finishes its job. Add the downsampled data to the appropriate list.
-
-        If the worker that emitted the signal is the last one, combine and
-        store the processed data in self.channel_data but not combine when
-        there is an overlap and then emit the finished signal of this class.
-
-        :param times: the downsampled array of time data.
-        :param data: the downsampled array of plotting data.
-        """
-        self.downsampled_list_lock.lock()
-        self.downsampled_times_list.append(times)
-        self.downsampled_data_list.append(data)
-        self.downsampled_list_lock.unlock()
-        if len(self.downsampled_times_list) == len(self.trimmed_trace_list):
-            times_list = []
-            data_list = []
-            last_end_time = 0
-            current_times = []
-            current_data = []
-            for idx, tr in enumerate(self.trimmed_trace_list):
-                # combine traces together but split at overlap
-                if tr['startTmEpoch'] > last_end_time:
-                    current_times.append(self.downsampled_times_list[idx])
-                    current_data.append(self.downsampled_data_list[idx])
-                else:
-                    if len(current_times) > 0:
-                        times_list.append(np.hstack(current_times))
-                        data_list.append(np.hstack(current_data))
-                    current_times = [self.downsampled_times_list[idx]]
-                    current_data = [self.downsampled_data_list[idx]]
-                last_end_time = tr['endTmEpoch']
-
-            times_list.append(np.hstack(current_times))
-            data_list.append(np.hstack(current_data))
-            self.channel_data['times'] = times_list
-            self.channel_data['data'] = data_list
-            self.signals.finished.emit(self.channel_data, self.channel_id)
-
-    @QtCore.Slot()
-    def decimator_trace_processed(self, trace: Trace):
-        """
-        The slot called when the decimator worker of a plotting trace
-        finishes its job. Add the decimated trace.data to the appropriate list,
-        construct time using np.linspace and add to the appropriate list.
-
-        If the worker that emitted the signal is the last one, combine and
-        store the processed data in self.channel_data but not combine when
-        there is an overlap and then emit the finished signal of this class.
-
-        :param trace: the decimated trace.
-        """
-        self.downsampled_list_lock.lock()
-        self.downsampled_times_list.append(
-            np.linspace(trace.stats.starttime.timestamp,
-                        trace.stats.endtime.timestamp,
-                        trace.stats.npts)
-        )
-        self.downsampled_data_list.append(trace.data)
-        self.downsampled_list_lock.unlock()
-        if len(self.downsampled_times_list) == len(self.trimmed_trace_list):
-            times_list = []
-            data_list = []
-            last_end_time = 0
-            current_times = []
-            current_data = []
-            for idx, tr in enumerate(self.trimmed_trace_list):
-                # combine traces together but split at overlap
-                if tr['startTmEpoch'] > last_end_time:
-                    current_times.append(self.downsampled_times_list[idx])
-                    current_data.append(self.downsampled_data_list[idx])
-                else:
-                    if len(current_times) > 0:
-                        times_list.append(np.hstack(current_times))
-                        data_list.append(np.hstack(current_data))
-                    current_times = [self.downsampled_times_list[idx]]
-                    current_data = [self.downsampled_data_list[idx]]
-                last_end_time = tr['endTmEpoch']
-
-            times_list.append(np.hstack(current_times))
-            data_list.append(np.hstack(current_data))
-            self.channel_data['times'] = times_list
-            self.channel_data['data'] = data_list
-            self.signals.finished.emit(self.channel_data, self.channel_id)
-
     def run(self):
         """
-        The main method of this class. First check that the channel is not
-        already small enough after the first trim that there is no need for
-        further processing. Then, trim the plotting data based on
-        self.start_time and self.end_time. Afterwards, do some checks to
-        determine if there is a need to downsample the data. If yes, initialize
-        and start the downsampler.
+        Because of changes that read less data instead of all data in files,
+        now data has only one trace. We can assign the times and data in that
+        trace to times and data of the channel. Trimming won't be necessary
+        anymore.
         """
 
-        if 'needProcess' in self.channel_data:
-            # refer to DataTypeModel.reset_need_process_for_mass_pos
-            # for needProcess
-            if not self.channel_data['needProcess']:
-                self.finished.emit(self.channel_data, self.channel_id)
-                return
-            else:
-                # put needProcess flag down
-                self.channel_data['needProcess'] = False
-
-        if self.channel_data['fullData']:
-            # Data is small, already has full in the first trim.
-            self.finished.emit(self.channel_data, self.channel_id)
-            return
-
-        self.trim_plotting_data()
-
-        if not self.trimmed_trace_list:
-            self.channel_data['fullData'] = True
-            self.channel_data['times'] = np.array([])
-            self.channel_data['data'] = np.array([])
-            self.finished.emit(self.channel_data, self.channel_id)
-            return False
-
-        if self.channel_data['samplerate'] <= 1:
-            self.channel_data['needConvert'] = True
-            self.channel_data['times'] = [
-                tr['times'] for tr in self.trimmed_trace_list]
-            self.channel_data['data'] = [
-                tr['data'] for tr in self.trimmed_trace_list]
-            trim_downsample_chan_with_spr_less_or_equal_1(
-                self.channel_data, self.start_time, self.end_time)
-            self.finished.emit(self.channel_data, self.channel_id)
-            return
-
-        self.channel_size = sum(
-            [tr['size'] for tr in self.trimmed_trace_list])
-
-        total_size = sum([tr['size'] for tr in self.trimmed_trace_list])
-        if not self.first_time and total_size > const.RECAL_SIZE_LIMIT:
-            # The data is so big that processing it would not make it any
-            # easier to understand the result plot.
-            self.finished.emit(self.channel_data, self.channel_id)
-            return
-        if total_size <= const.CHAN_SIZE_LIMIT and self.first_time:
-            self.channel_data['fullData'] = True
-
-        try:
-            del self.channel_data['times']
-            del self.channel_data['data']
-        except Exception:
-            pass
+        tr = self.channel_data['tracesInfo'][0]
+        if 'logIdx' in tr.keys():
+            tr_times, tr_data, tr_logidx = downsample(
+                tr['times'], tr['data'], tr['logIdx'],
+                rq_points=const.CHAN_SIZE_LIMIT)
+            self.channel_data['logIdx'] = [tr_logidx]
+        else:
+            tr_times, tr_data, _ = downsample(
+                tr['times'], tr['data'], rq_points=const.CHAN_SIZE_LIMIT)
+        self.channel_data['times'] = [tr_times]
+        self.channel_data['data'] = [tr_data]
 
-        self.channel_data['needConvert'] = True
-        self.init_downsampler()
-        self.downsampler.start()
+        self.finished.emit(self.channel_data, self.channel_id)
 
     def request_stop(self):
         """
@@ -333,4 +60,3 @@ class PlottingChannelProcessor(QtCore.QRunnable):
         running.
         """
         self.stop_requested = True
-        self.downsampler.request_stop()
diff --git a/sohstationviewer/view/plotting/plotting_widget/plotting_processor_helper.py b/sohstationviewer/view/plotting/plotting_widget/plotting_processor_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..049e295116bbc65b5273282f0e4f4cc09f78fa9c
--- /dev/null
+++ b/sohstationviewer/view/plotting/plotting_widget/plotting_processor_helper.py
@@ -0,0 +1,129 @@
+import numpy as np
+import math
+
+from sohstationviewer.conf import constants as const
+
+
+def downsample(times, data, log_indexes=None, rq_points=0):
+    """
+    Reduce sample rate of times and data so that times and data return has
+        the size around the rq_points.
+    Since the functions used for downsampling (chunk_minmax()/constant_rate)
+        are very slow, the values of data from mean to CUT_FROM_MEAN_FACTOR
+        will be removed first. If the size not meet the rq_points, then
+        continue to downsample.
+    :param times: numpy array - of a waveform channel's times
+    :param data: numpy array - of a waveform channel's data
+    :param log_indexes: numpy array - of a waveform channel's soh message line
+        index
+    :param rq_points: int - requested size to return.
+    :return np.array, np.array,(np.array) - new times and new data (and new
+        log_indexes) with the requested size
+    """
+    # create a dummy array for log_indexes. However this way may slow down
+    # the performance of waveform downsample because waveform channel are large
+    # and have no log_indexes.
+
+    if times.size <= rq_points:
+        return times, data, log_indexes
+    if log_indexes is None:
+        log_indexes = np.empty_like(times)
+    data_max = max(abs(data.max()), abs(data.min()))
+    data_mean = abs(data.mean())
+    indexes = np.where(
+        abs(data - data.mean()) >
+        (data_max - data_mean) * const.CUT_FROM_MEAN_FACTOR)
+    times = times[indexes]
+    data = data[indexes]
+    log_indexes = log_indexes[indexes]
+
+    if times.size <= rq_points:
+        return times, data, log_indexes
+
+    return chunk_minmax(times, data, log_indexes, rq_points)
+
+
+def chunk_minmax(times, data, log_indexes, rq_points):
+    """
+    Split data into different chunks, take the min, max of each chunk to add
+        to the data return
+    :param times: numpy array - of a channel's times
+    :param data: numpy array - of a channel's data
+    :param log_indexes: numpy array - of a channel's log_indexes
+    :param rq_points: int - requested size to return.
+    :return times, data: np.array, np.array - new times and new data with the
+        requested size
+    """
+    final_points = 0
+    if times.size <= rq_points:
+        final_points += times.size
+        return times, data, log_indexes
+
+    if rq_points < 2:
+        return np.empty((1, 0)), np.empty((1, 0)), np.empty((1, 0))
+
+    # Since grabbing the min and max from each
+    # chunk, need to div the requested number of points
+    # by 2.
+    chunk_size = rq_points // 2
+    chunk_count = math.ceil(times.size / chunk_size)
+
+    if chunk_count * chunk_size > times.size:
+        chunk_count -= 1
+        # Length of the trace is not divisible by the number of requested
+        # points. So split into an array that is divisible by the requested
+        # size, and an array that contains the excess. Downsample both,
+        # and combine. This case gives slightly more samples than
+        # the requested sample size, but not by much.
+        times_0 = times[:chunk_count * chunk_size]
+        data_0 = data[:chunk_count * chunk_size]
+        log_indexes_0 = log_indexes[:chunk_count * chunk_size]
+
+        excess_times = times[chunk_count * chunk_size:]
+        excess_data = data[chunk_count * chunk_size:]
+        excess_log_indexes = data[chunk_count * chunk_size:]
+
+        new_times_0, new_data_0, new_log_indexes_0 = downsample(
+            times_0, data_0, log_indexes_0, rq_points=rq_points
+        )
+
+        # right-most subarray is always smaller than
+        # the initially requested number of points.
+        excess_times, excess_data, excess_log_indexes = downsample(
+            excess_times, excess_data, excess_log_indexes,
+            rq_points=chunk_count
+        )
+
+        new_times = np.zeros(new_times_0.size + excess_times.size)
+        new_data = np.zeros(new_data_0.size + excess_data.size)
+        new_log_indexes = np.zeros(
+            new_log_indexes_0.size + excess_log_indexes.size
+        )
+
+        new_times[:new_times_0.size] = new_times_0
+        new_data[:new_data_0.size] = new_data_0
+        new_log_indexes[:new_log_indexes_0.size] = new_log_indexes_0
+
+        new_times[new_times_0.size:] = excess_times
+        new_data[new_data_0.size:] = excess_data
+        new_log_indexes[new_log_indexes_0.size:] = excess_log_indexes
+
+        return new_times, new_data, new_log_indexes
+
+    new_times = times.reshape(chunk_size, chunk_count)
+    new_data = data.reshape(chunk_size, chunk_count)
+    new_log_indexes = log_indexes.reshape(chunk_size, chunk_count)
+
+    min_data_idx = np.argmin(new_data, axis=1)
+    max_data_idx = np.argmax(new_data, axis=1)
+
+    rows = np.arange(chunk_size)
+
+    mask = np.zeros(shape=(chunk_size, chunk_count), dtype=bool)
+    mask[rows, min_data_idx] = True
+    mask[rows, max_data_idx] = True
+
+    new_times = new_times[mask]
+    new_data = new_data[mask]
+    new_log_indexes = new_log_indexes[mask]
+    return new_times, new_data, new_log_indexes
diff --git a/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py b/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py
index 9cc7a78fbcbd701deedda58b3b3f1b1d912900aa..77a60ce7172299433665c51d96590a7722aa2634 100755
--- a/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py
+++ b/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py
@@ -2,10 +2,10 @@
 Class of which object is used to plot data
 """
 from typing import List, Optional, Union
-
 import matplotlib.text
-from PySide2.QtCore import QTimer, Qt
 from matplotlib import pyplot as pl
+from matplotlib.transforms import Bbox
+from PySide2.QtCore import QTimer, Qt
 from PySide2 import QtCore, QtWidgets
 from PySide2.QtWidgets import QWidget, QApplication, QTextBrowser
 
@@ -18,6 +18,7 @@ from sohstationviewer.view.plotting.plotting_widget.plotting_axes import (
     PlottingAxes
 )
 from sohstationviewer.view.plotting.plotting_widget.plotting import Plotting
+from sohstationviewer.view.save_plot_dialog import SavePlotDialog
 
 from sohstationviewer.controller.plotting_data import format_time
 from sohstationviewer.controller.util import display_tracking_info
@@ -110,6 +111,7 @@ class PlottingWidget(QtWidgets.QScrollArea):
         font_size: float - font size on plot. With some require bigger font,
             +2 to the font_size
         """
+        self.base_font_size = 7
         self.font_size = 7
         """
         bottom: float - y position of the bottom edge of all plots in self.axes
@@ -243,6 +245,7 @@ class PlottingWidget(QtWidgets.QScrollArea):
         # set view size fit with the scroll's view port size
         self.main_widget.setFixedWidth(geo.width())
         self.ratio_w = geo.width() / self.width_base_px
+        self.font_size = self.ratio_w * self.base_font_size
         self.plotting_w = self.ratio_w * self.width_base
         self.plotting_l = self.ratio_w * self.plotting_l_base
         if self.plot_total == 0:
@@ -366,12 +369,6 @@ class PlottingWidget(QtWidgets.QScrollArea):
             # tps_t was assigned in TPS Widget
             xdata = self.tps_t
         else:
-            if (modifiers == QtCore.Qt.ShiftModifier and
-                    self.zoom_marker1_shown):
-                # When start zooming, need to reset mass position for processor
-                # to decide to calculate mass position channel or not
-                self.data_object.reset_need_process_for_mass_pos()
-
             xdata = self.get_timestamp(event)
 
         # We only want to remove the text on the ruler when we start zooming in
@@ -652,6 +649,57 @@ class PlottingWidget(QtWidgets.QScrollArea):
         """
         self.peer_plotting_widgets = widgets
 
+    def save_plot(self, default_name='plot'):
+        if self.c_mode != self.main_window.color_mode:
+            main_color = constants.ALL_COLOR_MODES[self.main_window.color_mode]
+            curr_color = constants.ALL_COLOR_MODES[self.c_mode]
+            msg = (f"Main window's color mode is {main_color}"
+                   f" but the mode haven't been applied to plotting.\n\n"
+                   f"Do you want to cancel to apply {main_color} mode "
+                   f"by clicking RePlot?\n"
+                   f"Or continue with {curr_color}?")
+            msgbox = QtWidgets.QMessageBox()
+            msgbox.setWindowTitle("Color Mode Conflict")
+            msgbox.setText(msg)
+            msgbox.addButton(QtWidgets.QMessageBox.Cancel)
+            msgbox.addButton('Continue', QtWidgets.QMessageBox.YesRole)
+            result = msgbox.exec_()
+            if result == QtWidgets.QMessageBox.Cancel:
+                return
+            self.main_window.color_mode = self.c_mode
+            if self.c_mode == 'B':
+                self.main_window.background_black_radio_button.setChecked(True)
+            else:
+                self.main_window.background_white_radio_button.setChecked(True)
+        if self.c_mode == 'B':
+            msg = ("The current background mode is black.\n"
+                   "Do you want to cancel to change the background mode "
+                   "before saving the plots to file?")
+            msgbox = QtWidgets.QMessageBox()
+            msgbox.setWindowTitle("Background Mode Confirmation")
+            msgbox.setText(msg)
+            msgbox.addButton(QtWidgets.QMessageBox.Cancel)
+            msgbox.addButton('Continue', QtWidgets.QMessageBox.YesRole)
+            result = msgbox.exec_()
+            if result == QtWidgets.QMessageBox.Cancel:
+                return
+        save_plot_dlg = SavePlotDialog(
+            self.parent, self.main_window, default_name)
+        save_plot_dlg.exec_()
+        save_file_path = save_plot_dlg.save_file_path
+        if save_file_path is None:
+            return
+        dpi = save_plot_dlg.dpi
+
+        self.plotting_axes.fig.savefig(
+            save_file_path,
+            bbox_inches=Bbox([[0, self.plotting_bot*100],
+                              [self.ratio_w*15.5, 100]]),
+            dpi=dpi
+        )
+        msg = f"Graph is saved at {save_file_path}"
+        display_tracking_info(self.tracking_box, msg)
+
     def clear(self):
         self.plotting_axes.fig.clear()
         self.axes = []
diff --git a/sohstationviewer/view/plotting/state_of_health_widget.py b/sohstationviewer/view/plotting/state_of_health_widget.py
index 4269c0e292b59538526941741200f679efbd0d19..acb00711d666b1c595aa2f9553a50eb1bf41f1b2 100644
--- a/sohstationviewer/view/plotting/state_of_health_widget.py
+++ b/sohstationviewer/view/plotting/state_of_health_widget.py
@@ -4,12 +4,8 @@ from typing import Tuple, Union, Dict
 
 from sohstationviewer.view.util.plot_func_names import plot_functions
 
-from sohstationviewer.controller.util import apply_convert_factor
-
 from sohstationviewer.model.data_type_model import DataTypeModel
 
-from sohstationviewer.database.extract_data import get_chan_plot_info
-
 from sohstationviewer.view.util.enums import LogType
 from sohstationviewer.view.plotting.plotting_widget.\
     multi_threaded_plotting_widget import MultiThreadedPlottingWidget
@@ -35,10 +31,10 @@ class SOHWidget(MultiThreadedPlottingWidget):
         :param time_ticks_total: max number of tick to show on time bar
         """
         self.data_object = d_obj
-        self.plotting_data1 = d_obj.soh_data[key]
-        self.plotting_data2 = d_obj.mass_pos_data[key]
-        channel_list = d_obj.soh_data[key].keys()
-        data_time = d_obj.data_time[key]
+        self.plotting_data1 = d_obj.soh_data[key] if key else {}
+        self.plotting_data2 = d_obj.mass_pos_data[key] if key else {}
+        channel_list = d_obj.soh_data[key].keys() if key else []
+        data_time = d_obj.data_time[key] if key else [0, 1]
         ret = super().init_plot(d_obj, data_time, key, start_tm, end_tm,
                                 time_ticks_total, is_waveform=False)
         if not ret:
@@ -52,10 +48,6 @@ class SOHWidget(MultiThreadedPlottingWidget):
             self.processing_log.append((msg, LogType.WARNING))
         return True
 
-    def get_plot_info(self, *args, **kwargs):
-        # function to get database info for soh channels in self.plotting_data1
-        return get_chan_plot_info(*args, **kwargs)
-
     def plot_single_channel(self, c_data: Dict, chan_id: str):
         """
         Plot the channel chan_id.
@@ -70,7 +62,6 @@ class SOHWidget(MultiThreadedPlottingWidget):
             return
         chan_db_info = c_data['chan_db_info']
         plot_type = chan_db_info['plotType']
-        apply_convert_factor(c_data, chan_db_info['convertFactor'])
 
         linked_ax = None
         if chan_db_info['linkedChan'] not in [None, 'None', '']:
diff --git a/sohstationviewer/view/plotting/time_power_squared_dialog.py b/sohstationviewer/view/plotting/time_power_squared_dialog.py
index 77c7c713d1ed6ad4808a47435d0e1aabba8e9ecf..f27f3c4362b8d0cf30d521808810b3da6fc5856d 100755
--- a/sohstationviewer/view/plotting/time_power_squared_dialog.py
+++ b/sohstationviewer/view/plotting/time_power_squared_dialog.py
@@ -13,7 +13,7 @@ from sohstationviewer.controller.util import (
     display_tracking_info, add_thousand_separator,
 )
 from sohstationviewer.database.extract_data import (
-    get_color_def, get_color_ranges, get_chan_label,
+    get_color_def, get_color_ranges, get_seismic_chan_label,
 )
 from sohstationviewer.model.data_type_model import DataTypeModel
 from sohstationviewer.model.handling_data import (
@@ -222,12 +222,12 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
 
         total_days = c_data['tps_data'].shape[0]
         plot_h = self.plotting_axes.get_height(
-            total_days/2, bw_plots_distance=0.003, pixel_height=12.1)
+            total_days/1.5, bw_plots_distance=0.003, pixel_height=12.1)
         ax = self.create_axes(self.plotting_bot, plot_h)
         ax.spines[['right', 'left', 'top', 'bottom']].set_visible(False)
         ax.text(
             -0.12, 1,
-            f"{get_chan_label(chan_id)} {c_data['samplerate']}sps",
+            f"{get_seismic_chan_label(chan_id)} {c_data['samplerate']}sps",
             horizontalalignment='left',
             verticalalignment='top',
             rotation='horizontal',
@@ -471,6 +471,7 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
         with new color range selected.
         """
         self.clear()
+        self.set_colors(self.main_window.color_mode)
         self.plotting_bot = const.BOTTOM
         title = get_title(self.set_key, self.min_x, self.max_x, self.date_mode)
         self.timestamp_bar_top = self.plotting_axes.add_timestamp_bar(0.)
@@ -559,6 +560,7 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         """
         self.color_range_choice = QtWidgets.QComboBox(self)
         self.color_range_choice.addItems(self.color_ranges)
+
         self.color_range_choice.setCurrentText('High')
         color_layout.addWidget(self.color_range_choice)
         # ##################### Replot button ########################
@@ -566,8 +568,8 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         buttons_layout.addWidget(self.replot_button)
 
         # ##################### Save button ##########################
-        self.save_button = QtWidgets.QPushButton('Save', self)
-        buttons_layout.addWidget(self.save_button)
+        self.save_plot_button = QtWidgets.QPushButton('Save Plot', self)
+        buttons_layout.addWidget(self.save_plot_button)
 
         self.info_text_browser.setFixedHeight(60)
         bottom_layout.addWidget(self.info_text_browser)
@@ -600,7 +602,7 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         """
         Connect functions to widgets
         """
-        self.save_button.clicked.connect(self.save)
+        self.save_plot_button.clicked.connect(self.save_plot)
         self.replot_button.clicked.connect(self.plotting_widget.replot)
         self.color_range_choice.currentTextChanged.connect(
             self.color_range_changed)
@@ -617,8 +619,8 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         self.sel_col_labels = self.color_label[cr_index]
 
     @QtCore.Slot()
-    def save(self):
+    def save_plot(self):
         """
         Save the plotting to a file
         """
-        print("save")
+        self.plotting_widget.save_plot('TPS-Plot')
diff --git a/sohstationviewer/view/plotting/time_power_squared_helper.py b/sohstationviewer/view/plotting/time_power_squared_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..b927a17c365b6f3808d04d3e3eb6f8fdd59580fa
--- /dev/null
+++ b/sohstationviewer/view/plotting/time_power_squared_helper.py
@@ -0,0 +1,218 @@
+import numpy as np
+from typing import Dict, Tuple, List
+
+from sohstationviewer.conf import constants as const
+
+
+def get_start_5mins_of_diff_days(start_tm: float, end_tm: float) -> np.ndarray:
+    """
+    FROM handling_data.get_start_5mins_of_diff_days()
+
+    Get the list of the start time of all five minutes for each day start from
+        the day of startTm and end at the day of endTm.
+    :param start_tm: float - start time
+    :param end_tm: float - end time
+    :return start_5mins_of_diff_days: [[288 of floats], ] - the list of
+        start of all five minutes of days specified by start_tm and end_tm in
+        which each day has 288 of 5 minutes.
+    """
+    exact_day_tm = (start_tm // const.SEC_DAY) * const.SEC_DAY
+    exact_day_tm_list = []
+
+    if start_tm < exact_day_tm:
+        exact_day_tm_list = [exact_day_tm - const.SEC_DAY]
+
+    while exact_day_tm < end_tm:
+        exact_day_tm_list.append(exact_day_tm)
+        exact_day_tm += const.SEC_DAY
+
+    # list of start/end 5m in each day: start_5mins_of_diff_days
+    for idx, start_day_tm in enumerate(exact_day_tm_list):
+        start_5mins_of_day = np.arange(start_day_tm,
+                                       start_day_tm + const.SEC_DAY,
+                                       const.SEC_5M)
+        if idx == 0:
+            start_5mins_of_diff_days = np.array([start_5mins_of_day])
+        else:
+            start_5mins_of_diff_days = np.vstack(
+                (start_5mins_of_diff_days, start_5mins_of_day))
+    return start_5mins_of_diff_days
+
+
+def find_tps_tm_idx(
+        given_tm: float, start_5mins_of_diff_days: List[List[float]]) \
+        -> Tuple[float, float]:
+    """
+    FROM handling_data.find_tps_tm_idx()
+
+    Find the position of the given time (given_tm) in time-power-squared plot
+    :param given_tm: float - given time
+    :param start_5mins_of_diff_days: [[288 of floats], ] - the list of
+        start of all five minutes of some specific days in which each day has
+        288 of 5 minutes.
+    :return x_idx: int - index of 5m section
+    :return y_idx: int - index of the day the given time belong to in plotting
+    """
+    x_idx = None
+    y_idx = None
+    for day_idx, a_day_5mins in enumerate(start_5mins_of_diff_days):
+        for start_5m_idx, start_5m in enumerate(a_day_5mins):
+            if start_5m > given_tm:
+                # index of day start from 0 to negative because day is plotted
+                # from top to bottom
+                y_idx = - day_idx
+                x_idx = start_5m_idx - 1
+                if start_5m_idx == 0:
+                    # if the start_5m_idx == 0, the given time belong to the
+                    # last 5m of the previous day
+                    y_idx = -(day_idx - 1)
+                    x_idx = const.NO_5M_DAY - 1
+                break
+        if x_idx is not None:
+            break
+
+    if x_idx is None:
+        # x_idx == None happens when the given time fall into the last 5m of
+        # the last day. Although the time 24:00 of the last day belongs
+        # to the next days of other cases, but since there is no more days to
+        # plot it, it is no harm to set it at the last 5m of the last day.
+        x_idx = const.NO_5M_DAY - 1
+        y_idx = - (len(start_5mins_of_diff_days) - 1)
+
+    return x_idx, y_idx
+
+
+def get_tps_for_discontinuous_data(
+        channel_data: Dict,
+        start_5mins_of_diff_days: List[List[float]]) -> np.ndarray:
+    """
+    First loop: look in times for indexes for each block of 5m of each day.
+        Because data is discontinuous, some block might have no data points.
+    Second loop: For each 5m block, calculate mean of all square of data in
+        that block (mean_square). For the blocks that have no data points,
+        use the mean of all square of data in the previous and next blocks if
+        they both have data or else the mean_square will be zero.
+
+    :param channel_data: dictionary that keeps data of a waveform channel
+    :param start_5mins_of_diff_days: the list of starts of all five minutes
+        of days in which each day has 288 of 5 minutes.
+    :return: array of mean square of five-minute data that are separated into
+        days
+    """
+    times = channel_data['tracesInfo'][0]['times']
+    data = channel_data['tracesInfo'][0]['data']
+    # create list of indexes for data points of each block of 5m data separated
+    # into different days
+    tps_idxs = []
+    for start5m_of_a_day in start_5mins_of_diff_days:
+        tps_idxs.append([])
+        for start5m in start5m_of_a_day:
+            end5m = start5m + const.SEC_5M
+            indexes = np.where((start5m <= times) & (times < end5m))[0]
+            tps_idxs[-1].append(indexes)
+
+    # based on tps_idxs, calculated mean square for each 5m data separated into
+    # different days
+    tps_data = []
+    for day_idx in range(len(tps_idxs)):
+        tps_data.append([])
+        for idx_5m in range(len(tps_idxs[day_idx])):
+            try:
+                indexes = tps_idxs[day_idx][idx_5m]
+                if len(indexes) == 0:
+                    # No data point, check both sides, if have data points then
+                    # calculate mean square of both sides' data points
+                    prev_indexes = tps_idxs[day_idx][idx_5m - 1]
+                    if idx_5m < len(tps_idxs[day_idx]) - 1:
+                        next_indexes = tps_idxs[day_idx][idx_5m + 1]
+                    else:
+                        # current 5m block is the last one, the right side
+                        # is the first 5m block of the next day
+                        next_indexes = tps_idxs[day_idx + 1][0]
+
+                    if len(prev_indexes) != 0 and len(next_indexes) != 0:
+                        indexes = np.hstack((prev_indexes, next_indexes))
+                if len(indexes) == 0:
+                    mean_square = 0
+                else:
+                    data5m = data[indexes]
+                    mean_square = np.mean(np.square(data5m))
+            except IndexError:
+                mean_square = 0
+            tps_data[-1].append(mean_square)
+
+    return np.array(tps_data)
+
+
+def get_tps_for_continuous_data(channel_data: Dict,
+                                start_5mins_of_diff_days: List[List[float]],
+                                start_time, end_time):
+    """
+     Different from soh_data where times and data are each in one np.array,
+     in waveform_data, times and data are each kept in a list of np.memmap
+     files along with startTmEpoch and endTmEpoch.
+     self.channel_data['startIdx'] and self.channel_data['endIdx'] will be
+     used to exclude np.memmap files that aren't in the zoom time range
+     (startTm, endTm). Data in np.memmap will be trimmed according to times
+     then time-power-square value for each 5 minutes will be calculated and
+     saved in channel_data['tps-data']: np.mean(np.square(5m data))
+
+     """
+
+    # preset all 0 for all 5 minutes for each day
+    tps_data = np.zeros((len(start_5mins_of_diff_days),
+                         const.NO_5M_DAY))
+
+    spr = channel_data['samplerate']
+    channel_data['tps_data'] = []
+
+    start_tps_tm = 0
+    acc_data_list = []
+
+    for tr_idx, tr in enumerate(channel_data['tracesInfo']):
+        if 'data_f' in tr:
+            times = np.linspace(tr['startTmEpoch'], tr['endTmEpoch'],
+                                tr['size'])
+            data = np.memmap(tr['data_f'],
+                             dtype='int64', mode='r',
+                             shape=tr['size'])
+        else:
+            times = tr['times']
+            data = tr['data']
+        start_index = 0
+        if tr_idx == 0:
+            # get index of times with closet value to startTm
+            start_index = np.abs(times - start_time).argmin()
+            start_tps_tm = times[start_index]
+
+        # identify index in case of overlaps or gaps
+        index = np.where(
+            (start_5mins_of_diff_days <= times[start_index]) &
+            (start_5mins_of_diff_days + const.SEC_5M > times[start_index])
+            # noqa: E501
+        )
+        curr_row = index[0][0]
+        curr_col = index[1][0]
+        next_tps_tm = start_tps_tm + const.SEC_5M
+        while end_time >= next_tps_tm:
+            next_index = int(start_index + spr * const.SEC_5M)
+            if next_index >= tr['size']:
+                acc_data_list.append(data[start_index:tr['size']])
+                break
+            else:
+                acc_data_list.append(
+                    np.square(data[start_index:next_index]))
+                acc_data = np.hstack(acc_data_list)
+                if acc_data.size == 0:
+                    tps_data[curr_row, curr_col] = 0
+                else:
+                    tps_data[curr_row, curr_col] = np.mean(acc_data)
+
+                start_index = next_index
+                curr_col += 1
+                acc_data_list = []
+                if curr_col == const.NO_5M_DAY:
+                    curr_col = 0
+                    curr_row += 1
+                    next_tps_tm += const.SEC_5M
+    return tps_data
diff --git a/sohstationviewer/view/plotting/time_power_squared_processor.py b/sohstationviewer/view/plotting/time_power_squared_processor.py
index 37700edbeb282af0b2b69b522bb1f26516995e85..c554c6867417f25344fcbb5387a1f9c74faccdb9 100644
--- a/sohstationviewer/view/plotting/time_power_squared_processor.py
+++ b/sohstationviewer/view/plotting/time_power_squared_processor.py
@@ -3,7 +3,8 @@ from typing import Dict, Optional, List
 import numpy as np
 from PySide2 import QtCore
 
-from sohstationviewer.conf import constants as const
+from sohstationviewer.view.plotting.time_power_squared_helper import \
+    get_tps_for_discontinuous_data
 
 
 class TimePowerSquaredProcessorSignal(QtCore.QObject):
@@ -76,75 +77,9 @@ class TimePowerSquaredProcessor(QtCore.QRunnable):
         saved in channel_data['tps-data']: np.mean(np.square(5m data))
 
         """
-        trimmed_traces_list = self.trim_waveform_data()
+        self.channel_data['tps_data'] = get_tps_for_discontinuous_data(
+            self.channel_data, self.start_5mins_of_diff_days)
 
-        # preset all 0 for all 5 minutes for each day
-        tps_data = np.zeros((len(self.start_5mins_of_diff_days),
-                             const.NO_5M_DAY))
-
-        spr = self.channel_data['samplerate']
-        self.channel_data['tps_data'] = []
-
-        start_tps_tm = 0
-        acc_data_list = []
-
-        for tr_idx, tr in enumerate(trimmed_traces_list):
-            self.stop_lock.lock()
-            if self.stop:
-                self.stop_lock.unlock()
-                return self.signals.stopped.emit('')
-            self.stop_lock.unlock()
-            if 'data_f' in tr:
-                times = np.linspace(tr['startTmEpoch'], tr['endTmEpoch'],
-                                    tr['size'])
-                data = np.memmap(tr['data_f'],
-                                 dtype='int64', mode='r',
-                                 shape=tr['size'])
-            else:
-                times = tr['times']
-                data = tr['data']
-            start_index = 0
-            if tr_idx == 0:
-                # get index of times with closet value to startTm
-                start_index = np.abs(times - self.start_time).argmin()
-                start_tps_tm = times[start_index]
-
-            # identify index in case of overlaps or gaps
-            index = np.where(
-                (self.start_5mins_of_diff_days <= times[start_index]) &
-                (self.start_5mins_of_diff_days + const.SEC_5M > times[start_index])  # noqa: E501
-            )
-            curr_row = index[0][0]
-            curr_col = index[1][0]
-            next_tps_tm = start_tps_tm + const.SEC_5M
-            while self.end_time >= next_tps_tm:
-                self.stop_lock.lock()
-                if self.stop:
-                    self.stop_lock.unlock()
-                    return self.signals.stopped.emit('')
-                self.stop_lock.unlock()
-
-                next_index = int(start_index + spr * const.SEC_5M)
-                if next_index >= tr['size']:
-                    acc_data_list.append(data[start_index:tr['size']])
-                    break
-                else:
-                    acc_data_list.append(
-                        np.square(data[start_index:next_index]))
-                    acc_data = np.hstack(acc_data_list)
-                    if acc_data.size == 0:
-                        tps_data[curr_row, curr_col] = 0
-                    else:
-                        tps_data[curr_row, curr_col] = np.mean(acc_data)
-
-                    start_index = next_index
-                    curr_col += 1
-                    acc_data_list = []
-                    if curr_col == const.NO_5M_DAY:
-                        curr_col = 0
-                        curr_row += 1
-                        next_tps_tm += const.SEC_5M
-        self.channel_data['tps_data'] = tps_data
         self.signals.finished.emit(self.channel_id)
 
     def request_stop(self):
diff --git a/sohstationviewer/view/plotting/waveform_dialog.py b/sohstationviewer/view/plotting/waveform_dialog.py
index ba9a2a2cd66f18d3658bacd72751b834901ff404..ffcc0eac5983498c58d5ea1e48ab4c89dbd535e6 100755
--- a/sohstationviewer/view/plotting/waveform_dialog.py
+++ b/sohstationviewer/view/plotting/waveform_dialog.py
@@ -9,10 +9,6 @@ from sohstationviewer.view.util.plot_func_names import plot_functions
 from sohstationviewer.view.plotting.plotting_widget.\
     multi_threaded_plotting_widget import MultiThreadedPlottingWidget
 
-from sohstationviewer.controller.util import apply_convert_factor
-
-from sohstationviewer.database.extract_data import get_wf_plot_info
-
 
 class WaveformWidget(MultiThreadedPlottingWidget):
     """
@@ -33,16 +29,12 @@ class WaveformWidget(MultiThreadedPlottingWidget):
         :param time_ticks_total: max number of tick to show on time bar
         """
         self.data_object = d_obj
-        self.plotting_data1 = d_obj.waveform_data[key]
-        self.plotting_data2 = d_obj.mass_pos_data[key]
-        data_time = d_obj.data_time[key]
+        self.plotting_data1 = d_obj.waveform_data[key] if key else {}
+        self.plotting_data2 = d_obj.mass_pos_data[key] if key else {}
+        data_time = d_obj.data_time[key] if key else [0, 1]
         return super().init_plot(d_obj, data_time, key, start_tm, end_tm,
                                  time_ticks_total, is_waveform=True)
 
-    def get_plot_info(self, *args, **kwargs):
-        # function to get database info for wf channels in self.plotting_data1
-        return get_wf_plot_info(*args, **kwargs)
-
     def plot_single_channel(self, c_data: Dict, chan_id: str):
         """
         Plot the channel chan_id.
@@ -57,7 +49,7 @@ class WaveformWidget(MultiThreadedPlottingWidget):
             return
         chan_db_info = c_data['chan_db_info']
         plot_type = chan_db_info['plotType']
-        apply_convert_factor(c_data, chan_db_info['convertFactor'])
+
         # refer to doc string for mass_pos_data to know the reason for 'ax_wf'
         if 'ax_wf' not in c_data:
             ax = getattr(self.plotting, plot_functions[plot_type][1])(
@@ -93,7 +85,7 @@ class WaveformDialog(QtWidgets.QWidget):
         data_type: str - type of data being plotted
         """
         self.data_type = None
-        self.setGeometry(300, 300, 1200, 700)
+        self.setGeometry(50, 10, 1600, 700)
         self.setWindowTitle("Raw Data Plot")
 
         main_layout = QtWidgets.QVBoxLayout()
@@ -118,11 +110,11 @@ class WaveformDialog(QtWidgets.QWidget):
         bottom_layout = QtWidgets.QHBoxLayout()
         main_layout.addLayout(bottom_layout)
         """
-        save_button: save plot in plotting_widget to file
+        save_plot_button: save plot in plotting_widget to file
         """
-        self.save_button = QtWidgets.QPushButton('Save', self)
-        self.save_button.clicked.connect(self.save)
-        bottom_layout.addWidget(self.save_button)
+        self.save_plot_button = QtWidgets.QPushButton('Save Plot', self)
+        self.save_plot_button.clicked.connect(self.save_plot)
+        bottom_layout.addWidget(self.save_plot_button)
         self.info_text_browser.setFixedHeight(60)
         bottom_layout.addWidget(self.info_text_browser)
 
@@ -148,11 +140,11 @@ class WaveformDialog(QtWidgets.QWidget):
         self.plotting_widget.init_size()
 
     @QtCore.Slot()
-    def save(self):
+    def save_plot(self):
         """
         Save the plotting to a file
         """
-        print("save")
+        self.plotting_widget.save_plot('Waveform-Plot')
 
     def plot_finished(self):
         self.parent.is_plotting_waveform = False
diff --git a/sohstationviewer/view/save_plot_dialog.py b/sohstationviewer/view/save_plot_dialog.py
new file mode 100644
index 0000000000000000000000000000000000000000..77a988f25a6679ac7ecd3bd4f916ca625d6a97d1
--- /dev/null
+++ b/sohstationviewer/view/save_plot_dialog.py
@@ -0,0 +1,139 @@
+import sys
+import platform
+import os
+from pathlib import Path
+from typing import Union, Optional
+
+from PySide2 import QtWidgets, QtCore, QtGui
+from PySide2.QtWidgets import QApplication, QWidget, QDialog
+
+from sohstationviewer.conf import constants
+
+
+class SavePlotDialog(QDialog):
+    def __init__(self, parent: Union[QWidget, QApplication],
+                 main_window: QApplication,
+                 default_name: str):
+        """
+        Dialog allow choosing file format and open file dialog to
+            save file as
+
+        :param parent: the parent widget
+        :param main_window: to keep path to save file
+        :param default_name: default name for graph file to be saved as
+        """
+        super(SavePlotDialog, self).__init__(parent)
+        self.main_window = main_window
+        """
+        save_file_path: path to save file
+        """
+        self.save_file_path: Optional[Path] = None
+        """
+        save_dir_path: path to save dir
+        """
+        self.save_dir_path: Path = main_window.save_plot_dir
+        """
+        dpi: resolution for png format
+        """
+        self.dpi: int = 100
+
+        self.save_dir_btn = QtWidgets.QPushButton("Save Directory", self)
+        self.save_dir_textbox = QtWidgets.QLineEdit(self.save_dir_path)
+        self.save_filename_textbox = QtWidgets.QLineEdit(default_name)
+
+        self.dpi_line_edit = QtWidgets.QSpinBox(self)
+        self.format_radio_btns = {}
+        for fmt in constants.IMG_FORMAT:
+            self.format_radio_btns[fmt] = QtWidgets.QRadioButton(fmt, self)
+            if fmt == self.main_window.save_plot_format:
+                self.format_radio_btns[fmt].setChecked(True)
+        self.cancel_btn = QtWidgets.QPushButton('CANCEL', self)
+        self.continue_btn = QtWidgets.QPushButton('SAVE PLOT', self)
+
+        self.setup_ui()
+        self.connect_signals()
+
+    def setup_ui(self) -> None:
+        self.setWindowTitle("Save Plot")
+
+        main_layout = QtWidgets.QGridLayout()
+        self.setLayout(main_layout)
+
+        main_layout.addWidget(self.save_dir_btn, 0, 0, 1, 1)
+        self.save_dir_textbox.setFixedWidth(500)
+        main_layout.addWidget(self.save_dir_textbox, 0, 1, 1, 5)
+        main_layout.addWidget(QtWidgets.QLabel('Save Filename'),
+                              1, 0, 1, 1)
+        main_layout.addWidget(self.save_filename_textbox, 1, 1, 1, 5)
+
+        main_layout.addWidget(QtWidgets.QLabel('DPI'),
+                              2, 2, 1, 1, QtGui.Qt.AlignRight)
+        self.dpi_line_edit.setRange(50, 300)
+        self.dpi_line_edit.setValue(100)
+        main_layout.addWidget(self.dpi_line_edit, 2, 3, 1, 1)
+        rowidx = 2
+        for fmt in self.format_radio_btns:
+            main_layout.addWidget(self.format_radio_btns[fmt], rowidx, 1, 1, 1)
+            rowidx += 1
+
+        main_layout.addWidget(self.cancel_btn, rowidx, 1, 1, 1)
+        main_layout.addWidget(self.continue_btn, rowidx, 3, 1, 1)
+
+    def connect_signals(self) -> None:
+        self.save_dir_btn.clicked.connect(self.change_save_directory)
+        self.cancel_btn.clicked.connect(self.close)
+        self.continue_btn.clicked.connect(self.on_continue)
+
+    @QtCore.Slot()
+    def change_save_directory(self) -> None:
+        """
+        Show a file selection window and change the GPS data save directory
+        based on the folder selected by the user.
+        """
+        fd = QtWidgets.QFileDialog(self)
+        fd.setFileMode(QtWidgets.QFileDialog.Directory)
+        fd.setDirectory(self.save_dir_textbox.text())
+        fd.exec_()
+        new_path = fd.selectedFiles()[0]
+        self.save_dir_textbox.setText(new_path)
+        self.save_dir_path = new_path
+        self.main_window.save_plot_dir = new_path
+
+    @QtCore.Slot()
+    def on_continue(self):
+        if self.save_dir_textbox.text().strip() == '':
+            QtWidgets.QMessageBox.warning(
+                self, "Add Directory",
+                "A directory need to be given before continue.")
+            return
+
+        if self.save_filename_textbox.text().strip() == '':
+            QtWidgets.QMessageBox.warning(
+                self, "Add Filename",
+                "A file name need to be given before continue.")
+            return
+
+        for img_format in self.format_radio_btns:
+            if self.format_radio_btns[img_format].isChecked():
+                save_format = img_format
+                self.main_window.save_plot_format = img_format
+                break
+
+        self.save_file_path = Path(self.save_dir_path).joinpath(
+            f"{self.save_filename_textbox.text()}.{save_format}")
+        self.dpi = self.dpi_line_edit.value()
+        self.close()
+
+
+if __name__ == '__main__':
+    os_name, version, *_ = platform.platform().split('-')
+    if os_name == 'macOS':
+        os.environ['QT_MAC_WANTS_LAYER'] = '1'
+    app = QtWidgets.QApplication(sys.argv)
+    save_path = '/Users/ldam/Documents/GIT/sohstationviewer/tests/test_data/Q330-sample'  # noqa: E501
+    test = SavePlotDialog(None, 'test_plot')
+    test.set_save_directory(save_path)
+    test.exec_()
+    print("dpi:", test.dpi)
+    print("save file path:", test.save_file_path)
+    sys.exit(app.exec_())
diff --git a/sohstationviewer/view/ui/main_ui.py b/sohstationviewer/view/ui/main_ui.py
index 005029668262238706fc02f0bf176aa25995df5e..194b23483bc13cbd916c0d77a737d556eee6a313 100755
--- a/sohstationviewer/view/ui/main_ui.py
+++ b/sohstationviewer/view/ui/main_ui.py
@@ -793,6 +793,8 @@ class UIMainWindow(object):
 
         self.stop_button.clicked.connect(main_window.stop)
 
+        self.save_plot_button.clicked.connect(main_window.save_plot)
+
     def read_config(self):
         self.config = configparser.ConfigParser()
         config_path = Path('sohstationviewer/conf/read_settings.ini')
diff --git a/sohstationviewer/view/util/functions.py b/sohstationviewer/view/util/functions.py
index 2927cae8c88a35dbe38423b4448c5c615c469da9..254f32030c796164cd0399d3e7d938174df27c8d 100644
--- a/sohstationviewer/view/util/functions.py
+++ b/sohstationviewer/view/util/functions.py
@@ -96,6 +96,9 @@ def create_table_of_content_file(base_path: Path) -> None:
         "this software.\n\n"
         "On the left-hand side you will find a list of currently available"
         " help topics.\n\n"
+        "If the links of the Table of Contents are broken, click on Recreate "
+        "Table of Content <img src='recreate_table_contents.png' height=30 /> "
+        "to rebuild it.\n\n"
         "The home button can be used to return to this page at any time.\n\n"
         "# Table of Contents\n\n")
     links = ""
diff --git a/tests/controller/__init__.py b/tests/controller/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tests/test_controller/test_plotting_data.py b/tests/controller/test_plotting_data.py
similarity index 100%
rename from tests/test_controller/test_plotting_data.py
rename to tests/controller/test_plotting_data.py
diff --git a/tests/test_controller/test_processing.py b/tests/controller/test_processing.py
similarity index 59%
rename from tests/test_controller/test_processing.py
rename to tests/controller/test_processing.py
index 74751b31bec8a04c43f6a063599452a6ee5ff5eb..a4cdf4a0f3be4b01ad5d3f0d2b109827bd842328 100644
--- a/tests/test_controller/test_processing.py
+++ b/tests/controller/test_processing.py
@@ -3,29 +3,25 @@ from pathlib import Path
 
 from unittest import TestCase
 from unittest.mock import patch
-from contextlib import redirect_stdout
-import io
 
 from sohstationviewer.controller.processing import (
-    load_data,
     read_mseed_channels,
     detect_data_type,
     get_data_type_from_file
 )
 from sohstationviewer.database.extract_data import get_signature_channels
 from PySide2 import QtWidgets
-from sohstationviewer.model.mseed.mseed import MSeed
-from sohstationviewer.model.reftek.reftek import RT130
+
 
 TEST_DATA_DIR = Path(__file__).resolve().parent.parent.joinpath('test_data')
 rt130_dir = TEST_DATA_DIR.joinpath('RT130-sample/2017149.92EB/2017150')
 q330_dir = TEST_DATA_DIR.joinpath('Q330-sample/day_vols_AX08')
 centaur_dir = TEST_DATA_DIR.joinpath('Centaur-sample/SOH')
 pegasus_dir = TEST_DATA_DIR.joinpath('Pegasus-sample/Pegasus_SVC4/soh')
-mix_traces_dir = TEST_DATA_DIR.joinpath('Q330_mixed_traces')
+multiplex_dir = TEST_DATA_DIR.joinpath('Q330_multiplex')
 
 
-class TestLoadDataAndReadChannels(TestCase):
+class TestReadChannels(TestCase):
     """Test suite for load_data and read_mseed_channels."""
 
     def setUp(self) -> None:
@@ -39,140 +35,6 @@ class TestLoadDataAndReadChannels(TestCase):
         # though, so we are setting it to a stub value.
         self.mseed_dtype = 'MSeed'
 
-    def test_load_data_rt130_good_dir(self):
-        """
-        Test basic functionality of load_data - the given directory can be
-        loaded without issues. Test RT130.
-        """
-        self.assertIsInstance(
-            load_data('RT130', self.widget_stub, [rt130_dir], []),
-            RT130
-        )
-
-    def test_load_data_rt130_used(self):
-        with self.subTest("R130, no dir_list"):
-            self.assertIsInstance(
-                load_data('RT130', self.widget_stub, [], [rt130_dir]),
-                RT130
-            )
-        with self.subTest("R130, any dir_list"):
-            # should ignore dir_list
-            self.assertIsInstance(
-                load_data('RT130', self.widget_stub, ['_'], [rt130_dir]),
-                RT130
-            )
-
-        with self.subTest("R130, bad dir_list"):
-            self.assertIsNone(
-                load_data('RT130', self.widget_stub, [], ['_'])
-            )
-
-        with self.subTest("Q330"):
-            self.assertIsNone(
-                load_data('Q330', self.widget_stub, [], [rt130_dir])
-            )
-
-    def test_load_data_mseed_q330_good_data_dir(self):
-        """
-        Test basic functionality of load_data - the given directory can be
-        loaded without issues. Test MSeed.
-        """
-        self.assertIsInstance(
-            load_data(self.mseed_dtype, self.widget_stub, [q330_dir], []),
-            MSeed
-        )
-        self.assertIsInstance(
-            load_data(self.mseed_dtype, self.widget_stub, [centaur_dir], []),
-            MSeed
-        )
-        self.assertIsInstance(
-            load_data(self.mseed_dtype, self.widget_stub, [pegasus_dir], []),
-            MSeed
-        )
-
-    def test_load_data_no_dir(self):
-        """Test basic functionality of load_data - no directory was given."""
-        no_dir_given = []
-        self.assertIsNone(load_data(
-            'RT130', self.widget_stub, no_dir_given, []))
-        self.assertIsNone(
-            load_data(
-                self.mseed_dtype, self.widget_stub, no_dir_given, []))
-
-    def test_load_data_dir_does_not_exist(self):
-        """
-        Test basic functionality of load_data - the given directory does not
-        exist.
-        """
-        empty_name_dir = ['']
-        non_existent_dir = ['dir_that_does_not_exist']
-
-        self.assertIsNone(
-            load_data('RT130', self.widget_stub, empty_name_dir, []))
-        self.assertIsNone(
-            load_data('RT130', self.widget_stub, non_existent_dir, []))
-
-        self.assertIsNone(
-            load_data(self.mseed_dtype, self.widget_stub, empty_name_dir, []))
-        self.assertIsNone(
-            load_data(
-                self.mseed_dtype, self.widget_stub, non_existent_dir, []))
-
-    def test_load_data_empty_dir(self):
-        """
-        Test basic functionality of load_data - the given directory is empty.
-        """
-        with TemporaryDirectory() as empty_dir:
-            self.assertIsNone(
-                load_data('RT130', self.widget_stub, [empty_dir], []))
-            self.assertIsNone(
-                load_data(self.mseed_dtype, self.widget_stub, [empty_dir], []))
-
-    def test_load_data_empty_data_dir(self):
-        """
-        Test basic functionality of load_data - the given directory
-        contains a data folder but no data file.
-        """
-        with TemporaryDirectory() as outer_dir:
-            with TemporaryDirectory(dir=outer_dir) as data_dir:
-                self.assertIsNone(
-                    load_data('RT130', self.widget_stub, [data_dir], []))
-                self.assertIsNone(
-                    load_data(
-                        self.mseed_dtype, self.widget_stub, [outer_dir], []))
-
-    def test_load_data_data_type_mismatch(self):
-        """
-        Test basic functionality of load_data - the data type given does not
-        match the type of the data contained in the given directory.
-        """
-        self.assertIsNone(
-            load_data('RT130', self.widget_stub, [q330_dir], []))
-        self.assertIsNone(
-            load_data(self.mseed_dtype, self.widget_stub, [rt130_dir], []))
-
-    def test_load_data_data_traceback_error(self):
-        """
-        Test basic functionality of load_data - when there is an error
-        on loading data, the traceback info will be printed out
-        """
-        f = io.StringIO()
-        with redirect_stdout(f):
-            self.assertIsNone(load_data('RT130', None, [q330_dir], []))
-        output = f.getvalue()
-        self.assertIn(
-            "Data can't be read due to error: Traceback",
-            output
-        )
-        with redirect_stdout(f):
-            self.assertIsNone(
-                load_data(self.mseed_dtype, None, [rt130_dir], []))
-        output = f.getvalue()
-        self.assertIn(
-            "Data can't be read due to error: Traceback",
-            output
-        )
-
     def test_read_channels_mseed_dir(self):
         """
         Test basic functionality of load_data - the given directory contains
@@ -210,21 +72,21 @@ class TestLoadDataAndReadChannels(TestCase):
         self.assertListEqual(ret[2], pegasus_wf_channels)
         self.assertListEqual(ret[3], pegasus_spr_gt_1)
 
-        mix_traces_soh_channels = ['LOG']
-        mix_traces_mass_pos_channels = []
-        mix_traces_wf_channels = sorted(
+        multiplex_soh_channels = ['LOG']
+        multiplex_mass_pos_channels = []
+        multiplex_wf_channels = sorted(
             ['BH1', 'BH2', 'BH3', 'BH4', 'BH5', 'BH6',
              'EL1', 'EL2', 'EL4', 'EL5', 'EL6', 'ELZ'])
-        mix_traces_spr_gt_1 = sorted(
+        multiplex_spr_gt_1 = sorted(
             ['BS1', 'BS2', 'BS3', 'BS4', 'BS5', 'BS6',
              'ES1', 'ES2', 'ES3', 'ES4', 'ES5', 'ES6',
              'LS1', 'LS2', 'LS3', 'LS4', 'LS5', 'LS6',
              'SS1', 'SS2', 'SS3', 'SS4', 'SS5', 'SS6'])
-        ret = read_mseed_channels(self.widget_stub, [mix_traces_dir], True)
-        self.assertListEqual(ret[0], mix_traces_soh_channels)
-        self.assertListEqual(ret[1], mix_traces_mass_pos_channels)
-        self.assertListEqual(ret[2], mix_traces_wf_channels)
-        self.assertListEqual(ret[3], mix_traces_spr_gt_1)
+        ret = read_mseed_channels(self.widget_stub, [multiplex_dir], True)
+        self.assertListEqual(ret[0], multiplex_soh_channels)
+        self.assertListEqual(ret[1], multiplex_mass_pos_channels)
+        self.assertListEqual(ret[2], multiplex_wf_channels)
+        self.assertListEqual(ret[3], multiplex_spr_gt_1)
 
     def test_read_channels_rt130_dir(self):
         """
@@ -304,40 +166,40 @@ class TestDetectDataType(TestCase):
         Test basic functionality of detect_data_type - only one directory was
         given and the data type it contains can be detected.
         """
-        expected_data_type = ('RT130', '_')
+        expected_data_type = ('RT130', False)
         self.mock_get_data_type_from_file.return_value = expected_data_type
 
         self.assertEqual(
             detect_data_type([self.dir1.name]),
-            expected_data_type[0]
+            expected_data_type
         )
 
-    def test_same_data_type_and_channel(self):
+    def test_same_data_type_not_multiplex(self):
         """
         Test basic functionality of detect_data_type - the given directories
         contain the same data type and the data type was detected using the
         same channel.
         """
-        expected_data_type = ('RT130', '_')
+        expected_data_type = ('RT130', False)
         self.mock_get_data_type_from_file.return_value = expected_data_type
 
         self.assertEqual(
             detect_data_type([self.dir1.name, self.dir2.name]),
-            expected_data_type[0]
+            expected_data_type
         )
 
-    def test_same_data_type_different_channel(self):
+    def test_same_data_type_multiplex(self):
         """
         Test basic functionality of detect_data_type - the given directories
         contain the same data type but the data type was detected using
         different channels.
         """
-        returned_data_types = [('Q330', 'OCF'), ('Q330', 'VEP')]
+        returned_data_types = [('Q330', True), ('Q330', True)]
         self.mock_get_data_type_from_file.side_effect = returned_data_types
 
         self.assertEqual(
             detect_data_type([self.dir1.name, self.dir2.name]),
-            returned_data_types[0][0]
+            returned_data_types[0]
         )
 
     def test_different_data_types(self):
@@ -345,30 +207,45 @@ class TestDetectDataType(TestCase):
         Test basic functionality of detect_data_type - the given directories
         contain different data types.
         """
-        returned_data_types = [('RT130', '_'), ('Q330', 'VEP')]
+        returned_data_types = [('RT130', False), ('Q330', False)]
         self.mock_get_data_type_from_file.side_effect = returned_data_types
 
         with self.assertRaises(Exception) as context:
             detect_data_type([self.dir1.name, self.dir2.name])
         self.assertEqual(
             str(context.exception),
-            "There are more than one types of data detected:\n"
-            "Q330, RT130\n\n"
-            "Please have only one data type for each loading.")
+            f"There are more than one types of data detected:\n"
+            f"{self.dir1.name}: RT130, "
+            f"{self.dir2.name}: Q330\n\n"
+            f"Please have only data that related to each other.")
 
     def test_unknown_data_type(self):
         """
         Test basic functionality of detect_data_type - can't detect any data
         type.
         """
-        unknown_data_type = ('Unknown', '_')
+        unknown_data_type = ('Unknown', False)
+        self.mock_get_data_type_from_file.return_value = unknown_data_type
+        with self.assertRaises(Exception) as context:
+            detect_data_type([self.dir1.name])
+        self.assertEqual(
+            str(context.exception),
+            "There are no known data detected.\n\n"
+            "Do you want to cancel to select different folder(s)\n"
+            "Or continue to read any available mseed file?")
+
+    def test_multiplex_none(self):
+        """
+        Test basic functionality of detect_data_type - can't detect any data
+        type.
+        """
+        unknown_data_type = ('Unknown', None)
         self.mock_get_data_type_from_file.return_value = unknown_data_type
         with self.assertRaises(Exception) as context:
             detect_data_type([self.dir1.name])
         self.assertEqual(
             str(context.exception),
-            "There are no known data detected.\n"
-            "Please select different folder(s).")
+            "No channel found for the data set")
 
 
 class TestGetDataTypeFromFile(TestCase):
@@ -380,7 +257,7 @@ class TestGetDataTypeFromFile(TestCase):
         """
         rt130_file = Path(rt130_dir).joinpath(
             '92EB/0/000000000_00000000')
-        expected_data_type = ('RT130', '_')
+        expected_data_type = ('RT130', False)
         self.assertTupleEqual(
             get_data_type_from_file(rt130_file, get_signature_channels()),
             expected_data_type
@@ -392,8 +269,9 @@ class TestGetDataTypeFromFile(TestCase):
         data type contained in given file.
         """
         test_file = NamedTemporaryFile()
-        self.assertIsNone(
-            get_data_type_from_file(test_file.name, get_signature_channels()))
+        ret = get_data_type_from_file(
+            Path(test_file.name), get_signature_channels())
+        self.assertEqual(ret, (None, False))
 
     def test_mseed_data(self):
         """
@@ -405,9 +283,9 @@ class TestGetDataTypeFromFile(TestCase):
             'XX.3734.SOH.centaur-3_3734..20180817_000000.miniseed.miniseed')
         pegasus_file = pegasus_dir.joinpath(
             '2020/XX/KC01/VE1.D/XX.KC01..VE1.D.2020.129')
-        q330_data_type = ('Q330', 'VKI')
-        centaur_data_type = ('Centaur', 'GEL')
-        pegasus_data_type = ('Pegasus', 'VE1')
+        q330_data_type = ('Q330', False)
+        centaur_data_type = ('Centaur', True)
+        pegasus_data_type = ('Pegasus', False)
 
         sig_chan = get_signature_channels()
 
@@ -423,10 +301,16 @@ class TestGetDataTypeFromFile(TestCase):
         Test basic functionality of get_data_type_from_file - given file does
         not exist.
         """
-        empty_name_file = ''
-        non_existent_file = 'non_existent_dir'
-        with self.assertRaises(FileNotFoundError):
+        empty_name_file = Path('')
+        non_existent_file = Path('non_existent_dir')
+        with self.assertRaises(IsADirectoryError):
             get_data_type_from_file(empty_name_file, get_signature_channels())
         with self.assertRaises(FileNotFoundError):
             get_data_type_from_file(non_existent_file,
                                     get_signature_channels())
+
+    def test_non_data_binary_file(self):
+        binary_file = Path(__file__).resolve().parent.parent.parent.joinpath(
+            'images', 'home.png')
+        ret = get_data_type_from_file(binary_file, get_signature_channels())
+        self.assertIsNone(ret)
diff --git a/tests/test_controller/test_util.py b/tests/controller/test_util.py
similarity index 99%
rename from tests/test_controller/test_util.py
rename to tests/controller/test_util.py
index 83bfa68850f1544a045e60123add9037b238c30d..6a95958998265ceeb8b2d117dbb81b6b57fb5c18 100644
--- a/tests/test_controller/test_util.py
+++ b/tests/controller/test_util.py
@@ -11,7 +11,6 @@ import string
 from sohstationviewer.controller.util import (
     validate_file,
     validate_dir,
-    get_total_files,
     get_dir_size,
     get_time_6,
     get_time_6_2y,
@@ -19,7 +18,8 @@ from sohstationviewer.controller.util import (
     get_time_4,
     get_val,
     rtn_pattern,
-    add_thousand_separator
+    add_thousand_separator,
+    get_total_files
 )
 
 TEST_DATA_DIR = os.path.realpath(os.path.join(
@@ -203,14 +203,6 @@ class TestValidateDir(TestCase):
             self.fail()
 
 
-class TestGetTotalFiles(TestCase):
-    def test_get_total_files(self):
-        list_of_dir = [os.path.join(TEST_DATA_DIR, 'Centaur-sample'),
-                       os.path.join(TEST_DATA_DIR, 'Q330-sample')]
-        total_files = get_total_files(list_of_dir)
-        self.assertEqual(total_files, 6)
-
-
 class TestGetDirSize(TestCase):
     """Test suite for get_dir_size."""
     def test_files_have_size_zero(self):
@@ -376,3 +368,11 @@ class TestFmti(TestCase):
             val = -62362.32523
             expected = '-62,362'
             self.assertEqual(add_thousand_separator(val), expected)
+
+
+class TestGetTotalFiles(TestCase):
+    def test_get_total_files(self):
+        list_of_dir = [os.path.join(TEST_DATA_DIR, 'Centaur-sample'),
+                       os.path.join(TEST_DATA_DIR, 'Q330-sample')]
+        total_files = get_total_files(list_of_dir)
+        self.assertEqual(total_files, 6)
diff --git a/tests/model/__init__.py b/tests/model/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tests/model/general_data/__init__.py b/tests/model/general_data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tests/model/general_data/test_general_data_helper.py b/tests/model/general_data/test_general_data_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..c82dc0ce7c5a4106dbb28bab625f570fb17fb326
--- /dev/null
+++ b/tests/model/general_data/test_general_data_helper.py
@@ -0,0 +1,303 @@
+import numpy as np
+from unittest import TestCase
+from unittest.mock import patch
+
+from sohstationviewer.model.general_data.general_data_helper import (
+    _check_related_gaps, squash_gaps, sort_data,
+    retrieve_data_time_from_data_dict, retrieve_gaps_from_data_dict,
+    combine_data, apply_convert_factor_to_data_dict
+)
+
+
+class TestCheckRelatedGaps(TestCase):
+    # FROM test_handling_data_rearrange_data.TestCheckRelatedGaps
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.checked_indexes = []
+
+    def test_minmax1_inside_minmax2(self):
+        self.assertTrue(
+            _check_related_gaps(3, 4, 1, 5, 1, self.checked_indexes))
+        self.assertIn(1, self.checked_indexes)
+
+    def test_minmax2_inside_minmax1(self):
+        self.assertTrue(
+            _check_related_gaps(1, 5, 3, 4, 2, self.checked_indexes))
+        self.assertIn(2, self.checked_indexes)
+
+    def end_minmax1_overlap_start_minmax(self):
+        self.assertTrue(
+            _check_related_gaps(1, 4, 3, 5, 3, self.checked_indexes))
+        self.assertIn(3, self.checked_indexes)
+
+    def end_minmax2_overlap_start_minmax1(self):
+        self.assertTrue(
+            _check_related_gaps(3, 5, 1, 4, 4, self.checked_indexes))
+        self.assertIn(4, self.checked_indexes)
+
+    def minmax1_less_than_minmax2(self):
+        self.assertFalse(
+            _check_related_gaps(1, 3, 4, 6, 5, self.checked_indexes))
+        self.assertNotIn(5, self.checked_indexes, )
+
+    def minmax1_greater_than_minmax2(self):
+        self.assertFalse(
+            _check_related_gaps(6, 6, 1, 3, 5, self.checked_indexes))
+        self.assertEqual(5, self.checked_indexes)
+
+
+class TestSquashGaps(TestCase):
+    # FROM test_handling_data_rearrange_data.TestSquashGaps
+    def setUp(self) -> None:
+        self.normal_gaps = [[4, 7], [4, 6], [5, 6], [3, 7], [5, 8]]
+        self.overlap_gaps = [[17, 14], [16, 14], [16, 15], [17, 13], [18, 15]]
+        self.mixed_gaps = []
+        for i in range(len(self.normal_gaps)):
+            self.mixed_gaps.append(self.normal_gaps[i])
+            self.mixed_gaps.append(self.overlap_gaps[i])
+
+    def test_normal_gaps(self):
+        gaps = squash_gaps(self.normal_gaps)
+        self.assertEqual(gaps, [[3, 8]])
+
+    def test_overlap_gaps(self):
+        gaps = squash_gaps(self.overlap_gaps)
+        self.assertEqual(gaps, [[18, 13]])
+
+    def test_mixed_gaps(self):
+        gaps = squash_gaps((self.mixed_gaps))
+        self.assertEqual(gaps, [[3, 8], [18, 13]])
+
+
+class TestSortData(TestCase):
+    # FROM test_handling_data_rearrange_data.TestSortData
+    def setUp(self) -> None:
+        self.station_data_dict = {
+            'CH1': {'tracesInfo': [{'startTmEpoch': 7},
+                                   {'startTmEpoch': 1},
+                                   {'startTmEpoch': 5},
+                                   {'startTmEpoch': 3}]},
+            'CH2': {'tracesInfo': [{'startTmEpoch': 2},
+                                   {'startTmEpoch': 8},
+                                   {'startTmEpoch': 6},
+                                   {'startTmEpoch': 4}]}
+        }
+
+    def test_sort_data(self):
+        sort_data(self.station_data_dict)
+        self.assertEqual(
+            self.station_data_dict,
+            {'CH1': {'tracesInfo': [{'startTmEpoch': 1}, {'startTmEpoch': 3},
+                                    {'startTmEpoch': 5}, {'startTmEpoch': 7}]},
+             'CH2': {'tracesInfo': [{'startTmEpoch': 2}, {'startTmEpoch': 4},
+                                    {'startTmEpoch': 6}, {'startTmEpoch': 8}]}}
+        )
+
+
+class TestRetrieveDataTimeFromDataDict(TestCase):
+    def setUp(self) -> None:
+        self.data_dict = {
+            'STA1': {'CH1': {'startTmEpoch': 4, 'endTmEpoch': 6},
+                     'CH2': {'startTmEpoch': 5, 'endTmEpoch': 9}
+                     },
+            'STA2': {'CH1': {'startTmEpoch': 2, 'endTmEpoch': 4},
+                     'CH2': {'startTmEpoch': 6, 'endTmEpoch': 8}
+                     }
+            }
+        self.data_time = {}
+        self.expected_data_time = {'STA1': [4, 9], 'STA2': [2, 8]}
+
+    def test_retrieve_data_time(self):
+        retrieve_data_time_from_data_dict(
+            'STA1', self.data_dict, self.data_time)
+        self.assertEqual(self.data_time,
+                         {'STA1': self.expected_data_time['STA1']})
+        retrieve_data_time_from_data_dict(
+            'STA2', self.data_dict, self.data_time)
+        self.assertEqual(self.data_time,
+                         self.expected_data_time)
+
+
+class TestRetrieveGapsFromDataDict(TestCase):
+    def setUp(self) -> None:
+        self.data_dict = {
+            'STA1': {'CH1': {'gaps': [[1, 2], [4, 3]]},
+                     'CH2': {'gaps': []}
+                     },
+            'STA2': {'CH1': {'gaps': [[1, 2], [4, 3], [2, 3]]},
+                     'CH2': {'gaps': [[1, 3], [3, 2]]}
+                     },
+            }
+        self.gaps = {}
+        self.expected_gaps = {'STA1': [[1, 2], [4, 3]],
+                              'STA2': [[1, 2], [4, 3], [2, 3], [1, 3], [3, 2]]}
+
+    def test_retrieve_gaps(self):
+        self.gaps['STA1'] = []
+        retrieve_gaps_from_data_dict('STA1', self.data_dict, self.gaps)
+        self.assertEqual(self.gaps,
+                         {'STA1': self.expected_gaps['STA1']})
+
+        self.gaps['STA2'] = []
+        retrieve_gaps_from_data_dict('STA2', self.data_dict, self.gaps)
+        self.assertEqual(self.gaps,
+                         self.expected_gaps)
+
+
+class TestCombineData(TestCase):
+    def test_overlap_lt_gap_minimum(self):
+        # combine; not add to gap list
+        data_dict = {'STA1': {
+            'CH1': {
+                'gaps': [],
+                'tracesInfo': [
+                    {'startTmEpoch': 5,
+                     'endTmEpoch': 15,
+                     'data': [1, 2, 2, -1],
+                     'times': [5, 8, 11, 15]},
+                    {'startTmEpoch': 13,     # delta = 2 < 10
+                     'endTmEpoch': 20,
+                     'data': [1, -2, 1, 1],
+                     'times': [13, 16, 18, 20]}
+                ]}
+            }}
+        gap_minimum = 10
+        combine_data('STA1', data_dict, gap_minimum)
+        self.assertEqual(data_dict['STA1']['CH1']['gaps'], [])
+
+        self.assertEqual(
+            len(data_dict['STA1']['CH1']['tracesInfo']),
+            1)
+        self.assertEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['startTmEpoch'],
+            5)
+        self.assertEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['endTmEpoch'],
+            20)
+        self.assertListEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['data'].tolist(),
+            [1, 2, 2, -1, 1, -2, 1, 1])
+        self.assertListEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['times'].tolist(),
+            [5, 8, 11, 15, 13, 16, 18, 20])
+
+    def test_overlap_gt_or_equal_gap_minimum(self):
+        # combine; add to gap list
+        data_dict = {'STA1': {
+            'CH1': {
+                'gaps': [],
+                'tracesInfo': [
+                    {'startTmEpoch': 5,
+                     'endTmEpoch': 15,
+                     'data': [1, 2, 2, -1],
+                     'times': [5, 8, 11, 15]},
+                    {'startTmEpoch': 5,     # delta = 10 >= 10
+                     'endTmEpoch': 20,
+                     'data': [1, -2, 1, 1],
+                     'times': [5, 11, 15, 20]}
+                ]}
+            }}
+        gap_minimum = 10
+        combine_data('STA1', data_dict, gap_minimum)
+        self.assertEqual(data_dict['STA1']['CH1']['gaps'], [[15, 5]])
+
+        self.assertEqual(
+            len(data_dict['STA1']['CH1']['tracesInfo']),
+            1)
+        self.assertEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['startTmEpoch'],
+            5)
+        self.assertEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['endTmEpoch'],
+            20)
+        self.assertListEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['data'].tolist(),
+            [1, 2, 2, -1, 1, -2, 1, 1])
+        self.assertListEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['times'].tolist(),
+            [5, 8, 11, 15, 5, 11, 15, 20])
+
+    def test_lt_gap_minimum(self):
+        # not combine; not add to gap list
+        data_dict = {'STA1': {
+            'CH1': {
+                'gaps': [],
+                'tracesInfo': [
+                    {'startTmEpoch': 5,
+                     'endTmEpoch': 15,
+                     'data': [1, 2, 2, -1],
+                     'times': [5, 8, 11, 15]},
+                    {'startTmEpoch': 22,    # delta = 7 > 6, < 10
+                     'endTmEpoch': 34,
+                     'data': [1, -2, 1, 1],
+                     'times': [22, 26, 30, 34]}
+                ]}
+        }}
+        gap_minimum = 10
+        combine_data('STA1', data_dict, gap_minimum)
+        self.assertEqual(data_dict['STA1']['CH1']['gaps'], [])
+
+        self.assertEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['startTmEpoch'],
+            5)
+        self.assertEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['endTmEpoch'],
+            34)
+        self.assertListEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['data'].tolist(),
+            [1, 2, 2, -1, 1, -2, 1, 1])
+        self.assertListEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['times'].tolist(),
+            [5, 8, 11, 15, 22, 26, 30, 34])
+
+    def test_gap_gt_or_equal_gap_minimum(self):
+        # not combine; add to gap list
+        data_dict = {'STA1': {
+            'CH1': {
+                'gaps': [],
+                'tracesInfo': [
+                    {'startTmEpoch': 5,
+                     'endTmEpoch': 15,
+                     'data': [1, 2, 2, -1],
+                     'times': [5, 8, 11, 15]},
+                    {'startTmEpoch': 25,        # delta = 10 >= 10
+                     'endTmEpoch': 40,
+                     'data': [1, -2, 1, 1],
+                     'times': [25, 29, 33, 36, 40]}
+                ]}
+            }}
+        gap_minimum = 10
+        combine_data('STA1', data_dict, gap_minimum)
+        self.assertEqual(data_dict['STA1']['CH1']['gaps'], [[15, 25]])
+
+        self.assertEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['startTmEpoch'],
+            5)
+        self.assertEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['endTmEpoch'],
+            40)
+        self.assertListEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['data'].tolist(),
+            [1, 2, 2, -1, 1, -2, 1, 1])
+        self.assertListEqual(
+            data_dict['STA1']['CH1']['tracesInfo'][0]['times'].tolist(),
+            [5, 8, 11, 15, 25, 29, 33, 36, 40])
+
+
+class TestApplyConvertFactorToDataDict(TestCase):
+    def setUp(self) -> None:
+        self.data_dict = {
+            'STA1': {
+                'CH1': {'tracesInfo': [{'data': np.array([1, 2, 2, -1])}]}
+            }
+        }
+        self.expected_data = [0.1, 0.2, 0.2, -0.1]
+
+    @patch('sohstationviewer.model.general_data.general_data_helper.'
+           'get_convert_factor')
+    def test_convert_factor(self, mock_get_convert_factor):
+        mock_get_convert_factor.return_value = 0.1
+        apply_convert_factor_to_data_dict('STA1', self.data_dict, 'Q330')
+        self.assertEqual(
+            self.data_dict['STA1']['CH1']['tracesInfo'][0]['data'].tolist(),
+            self.expected_data)
diff --git a/tests/model/mseed_data/__init__.py b/tests/model/mseed_data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tests/model/mseed_data/test_mseed.py b/tests/model/mseed_data/test_mseed.py
new file mode 100644
index 0000000000000000000000000000000000000000..2afa9b80c646d2cddd90d93b56ef33c2c9f4325c
--- /dev/null
+++ b/tests/model/mseed_data/test_mseed.py
@@ -0,0 +1,360 @@
+from unittest import TestCase
+from pathlib import Path
+
+from sohstationviewer.model.mseed_data.mseed import MSeed
+from sohstationviewer.model.general_data.general_data import \
+    ProcessingDataError
+
+
+TEST_DATA_DIR = Path(__file__).resolve().parent.parent.parent.joinpath(
+    'test_data')
+pegasus_data = TEST_DATA_DIR.joinpath("Pegasus-sample")
+q330_data = TEST_DATA_DIR.joinpath("Q330-sample")
+blockettes_data = TEST_DATA_DIR.joinpath("Q330_unimplemented_ascii_block")
+multiplex_data = TEST_DATA_DIR.joinpath("Q330_multiplex")
+centaur_data = TEST_DATA_DIR.joinpath("Centaur-sample")
+
+
+class TestMSeed(TestCase):
+    def test_path_not_exist(self):
+        # raise exception when path not exist
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'list_of_dir': ['_'],
+            'on_unittest': True
+        }
+        with self.assertRaises(ProcessingDataError) as context:
+            MSeed(**args)
+            self.assertEqual(
+                str(context.exception),
+                "Path '_' not exist"
+            )
+
+    def test_read_text_only(self):
+        # There is no station recognized, add text to key 'TEXT' in log_data
+        args = {
+            'data_type': 'Pegasus',
+            'is_multiplex': False,
+            'list_of_dir': [pegasus_data],
+            'req_soh_chans': ['_'],
+            'on_unittest': True
+        }
+
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT'])
+        self.assertEqual(len(obj.log_data['TEXT']), 2)
+        self.assertEqual(
+            obj.log_data['TEXT'][0][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.130'
+            '\n2020-05-09 00:00:09.839 UTC: I(TimingThread): timing unce')
+
+        self.assertEqual(
+            obj.log_data['TEXT'][1][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.129'
+            '\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware')
+
+    def test_read_text_with_soh(self):
+        # text get station from soh data with TXT as channel to add to log_data
+        args = {
+            'data_type': 'Pegasus',
+            'is_multiplex': False,
+            'list_of_dir': [pegasus_data],
+            'req_soh_chans': ['VE1'],
+            'on_unittest': True
+        }
+
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', 'KC01'])
+        self.assertEqual(len(obj.log_data['TEXT']), 0)
+        self.assertEqual(list(obj.log_data['KC01'].keys()), ['TXT'])
+        self.assertEqual(len(obj.log_data['KC01']['TXT']), 2)
+        self.assertEqual(
+            obj.log_data['KC01']['TXT'][0][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.130'
+            '\n2020-05-09 00:00:09.839 UTC: I(TimingThread): timing unce')
+
+        self.assertEqual(
+            obj.log_data['KC01']['TXT'][1][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.129'
+            '\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware')
+
+    def test_read_text_with_waveform(self):
+        # text get station from waveform data with TXT as channel to add to
+        # log_data
+        args = {
+            'data_type': 'Pegasus',
+            'is_multiplex': False,
+            'list_of_dir': [pegasus_data],
+            'req_wf_chans': ['HH1'],
+            'req_soh_chans': ['_'],
+            'on_unittest': True
+        }
+
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', 'KC01'])
+        self.assertEqual(len(obj.log_data['TEXT']), 0)
+        self.assertEqual(list(obj.log_data['KC01'].keys()), ['TXT'])
+        self.assertEqual(len(obj.log_data['KC01']['TXT']), 2)
+        self.assertEqual(
+            obj.log_data['KC01']['TXT'][0][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.130'
+            '\n2020-05-09 00:00:09.839 UTC: I(TimingThread): timing unce')
+
+        self.assertEqual(
+            obj.log_data['KC01']['TXT'][1][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.129'
+            '\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware')
+
+    def test_read_ascii(self):
+        # info is text wrapped in mseed format
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'list_of_dir': [q330_data],
+            'req_soh_chans': ['LOG'],
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', 'AX08'])
+        self.assertEqual(list(obj.log_data['AX08'].keys()), ['LOG'])
+        self.assertEqual(obj.log_data['TEXT'], [])
+        self.assertEqual(len(obj.log_data['AX08']['LOG']), 16)
+        self.assertEqual(
+            obj.log_data['AX08']['LOG'][0][:100],
+            '\n\nSTATE OF HEALTH: From:1625456260.12  To:1625456260.12\n\r'
+            '\nQuanterra Packet Baler Model 14 Restart. V'
+        )
+        self.assertEqual(
+            obj.log_data['AX08']['LOG'][1][:100],
+            '\n\nSTATE OF HEALTH: From:1625456366.62  To:1625456366.62'
+            '\nReducing Status Polling Interval\r\n[2021-07-0'
+        )
+
+    def test_read_blockettes_info(self):
+        # info in blockette 500
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': True,
+            'list_of_dir': [blockettes_data],
+            'req_soh_chans': ['ACE'],
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', '3203'])
+        self.assertEqual(list(obj.log_data['3203'].keys()), ['ACE'])
+        self.assertEqual(obj.log_data['TEXT'], [])
+        self.assertEqual(len(obj.log_data['3203']['ACE']), 1)
+        self.assertEqual(
+            obj.log_data['3203']['ACE'][0][:100],
+            '\n\nSTATE OF HEALTH: From:1671729287.00014  To:1671729287.0'
+            '\n===========\nVCO correction: 53.7109375\nTim'
+        )
+
+    def test_not_is_multiplex_read_channel(self):
+        # is_multiplex = False => stop when reach to channel not match req
+        # so the channel 'EL1' is read but not finished
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'list_of_dir': [multiplex_data],
+            'req_soh_chans': [],
+            'req_wf_chans': ['EL1']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(obj.waveform_data['3203'].keys()), ['EL1'])
+        self.assertEqual(obj.waveform_data['3203']['EL1']['samplerate'], 200)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['startTmEpoch'],
+                         1671730004.145029)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['endTmEpoch'],
+                         1671730013.805)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['size'], 1932)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['gaps'], [])
+        self.assertEqual(len(obj.waveform_data['3203']['EL1']['tracesInfo']),
+                         1)
+
+    def test_is_multiplex_read_channel(self):
+        # is_multiplex = True => read every record
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': True,
+            'list_of_dir': [multiplex_data],
+            'req_soh_chans': [],
+            'req_wf_chans': ['EL1']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(obj.waveform_data['3203'].keys()), ['EL1'])
+        self.assertEqual(obj.waveform_data['3203']['EL1']['samplerate'], 200)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['startTmEpoch'],
+                         1671730004.145029)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['endTmEpoch'],
+                         1671730720.4348998)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['size'], 143258)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['gaps'], [])
+        self.assertEqual(len(obj.waveform_data['3203']['EL1']['tracesInfo']),
+                         1)
+
+    def test_not_is_multiplex_selected_channel_in_middle(self):
+        # won't reached selected channel because previous record doesn't meet
+        # requirement when is_multiplex = False
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'list_of_dir': [multiplex_data],
+            'req_soh_chans': [],
+            'req_wf_chans': ['EL2']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), [])
+
+    def test_is_multiplex_selected_channel_in_middle(self):
+        # is_multiplex = True => the selected channel will be read
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': True,
+            'list_of_dir': [multiplex_data],
+            'req_soh_chans': [],
+            'req_wf_chans': ['EL2']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(obj.waveform_data['3203'].keys()), ['EL2'])
+        self.assertEqual(obj.waveform_data['3203']['EL2']['samplerate'], 200)
+        self.assertEqual(obj.waveform_data['3203']['EL2']['startTmEpoch'],
+                         1671730004.3100293)
+        self.assertEqual(obj.waveform_data['3203']['EL2']['endTmEpoch'],
+                         1671730720.5549)
+        self.assertEqual(obj.waveform_data['3203']['EL2']['size'], 143249)
+        self.assertEqual(obj.waveform_data['3203']['EL2']['gaps'], [])
+        self.assertEqual(len(obj.waveform_data['3203']['EL2']['tracesInfo']),
+                         1)
+
+    def test_existing_time_range(self):
+        # check if data_time is from the given range, end time may get
+        # a little greater than read_end according to record's end time
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'list_of_dir': [q330_data],
+            'req_soh_chans': [],
+            'read_start': 1625456018.0,
+            'read_end': 1625505627.9998999
+        }
+        obj = MSeed(**args)
+        self.assertEqual(obj.keys, ['AX08'])
+        self.assertEqual(list(obj.soh_data['AX08'].keys()), ['VKI'])
+        self.assertEqual(list(obj.mass_pos_data['AX08'].keys()), [])
+        self.assertEqual(list(obj.waveform_data['AX08'].keys()), [])
+        self.assertEqual(obj.data_time['AX08'], [1625446018.0, 1625510338.0])
+
+    def test_non_existing_time_range(self):
+        # if given time range out of the data time, no station will be created
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'list_of_dir': [q330_data],
+            'req_soh_chans': [],
+            'read_start': 1625356018.0,
+            'read_end': 1625405627.9998999
+        }
+        obj = MSeed(**args)
+        self.assertEqual(obj.keys, [])
+        self.assertEqual(obj.soh_data, {})
+        self.assertEqual(obj.mass_pos_data, {})
+        self.assertEqual(obj.waveform_data, {})
+        self.assertEqual(obj.data_time, {})
+
+    def test_read_waveform(self):
+        # data from tps similar to waveform but not separated at gaps
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'list_of_dir': [q330_data],
+            'req_soh_chans': [],
+            'req_wf_chans': ['LHE']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), ['AX08'])
+        self.assertEqual(list(obj.waveform_data['AX08'].keys()), ['LHE'])
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['samplerate'], 1)
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['startTmEpoch'],
+                         1625445156.000001)
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['endTmEpoch'],
+                         1625532950.0)
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['size'], 87794)
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['gaps'], [])
+        self.assertEqual(len(obj.waveform_data['AX08']['LHE']['tracesInfo']),
+                         1)
+
+    def test_read_mass_pos_channel(self):
+        # mass position channels will be read if one or both include_mpxxxxxx
+        # are True
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': True,
+            'list_of_dir': [q330_data],
+            'req_soh_chans': [],
+            'req_wf_chans': [],
+            'include_mp123zne': True
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.mass_pos_data.keys()), ['AX08'])
+        self.assertEqual(list(obj.mass_pos_data['AX08'].keys()), ['VM1'])
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['samplerate'], 0.1)
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['startTmEpoch'],
+                         1625444970.0)
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['endTmEpoch'],
+                         1625574580.0)
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['size'], 12961)
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['gaps'], [])
+        self.assertEqual(len(obj.mass_pos_data['AX08']['VM1']['tracesInfo']),
+                         1)
+
+    def test_gap(self):
+        # gaps will be detected when gap_minimum is set
+        args = {
+            'data_type': 'Centaur',
+            'is_multiplex': True,
+            'list_of_dir': [centaur_data],
+            'req_soh_chans': [],
+            'gap_minimum': 60
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.soh_data.keys()), ['3734'])
+        self.assertEqual(sorted(list(obj.soh_data['3734'].keys())),
+                         ['EX1', 'EX2', 'EX3', 'GAN', 'GEL', 'GLA', 'GLO',
+                          'GNS', 'GPL', 'GST', 'LCE', 'LCQ', 'VCO', 'VDT',
+                          'VEC', 'VEI', 'VPB'])
+        self.assertAlmostEqual(obj.soh_data['3734']['EX1']['samplerate'],
+                               0.0166, 3)
+        self.assertEqual(obj.soh_data['3734']['EX1']['startTmEpoch'],
+                         1534512840.0)
+        self.assertEqual(obj.soh_data['3734']['EX1']['endTmEpoch'],
+                         1534550400.0)
+        self.assertEqual(obj.soh_data['3734']['EX1']['size'], 597)
+        self.assertEqual(obj.gaps['3734'], [[1534521420.0, 1534524000.0]])
+
+    def test_not_detect_gap(self):
+        # if gap_minimum isn't set but gap exist, data still be separated, but
+        # gap won't be added to gap list
+        args = {
+            'data_type': 'Centaur',
+            'is_multiplex': True,
+            'list_of_dir': [centaur_data],
+            'req_soh_chans': [],
+            'gap_minimum': None
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.soh_data.keys()), ['3734'])
+        self.assertEqual(sorted(list(obj.soh_data['3734'].keys())),
+                         ['EX1', 'EX2', 'EX3', 'GAN', 'GEL', 'GLA', 'GLO',
+                          'GNS', 'GPL', 'GST', 'LCE', 'LCQ', 'VCO', 'VDT',
+                          'VEC', 'VEI', 'VPB'])
+        self.assertAlmostEqual(obj.soh_data['3734']['EX1']['samplerate'],
+                               0.0166, 3)
+        self.assertEqual(obj.soh_data['3734']['EX1']['startTmEpoch'],
+                         1534512840.0)
+        self.assertEqual(obj.soh_data['3734']['EX1']['endTmEpoch'],
+                         1534550400.0)
+        self.assertEqual(obj.soh_data['3734']['EX1']['size'], 597)
+        self.assertEqual(obj.gaps['3734'], [])  # no gaps
diff --git a/tests/model/mseed_data/test_mseed_helper.py b/tests/model/mseed_data/test_mseed_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..938092c629f7115bd2623971a58a7aa5e7b047fe
--- /dev/null
+++ b/tests/model/mseed_data/test_mseed_helper.py
@@ -0,0 +1,48 @@
+from unittest import TestCase
+from pathlib import Path
+
+from sohstationviewer.model.mseed_data.mseed_helper import (
+    retrieve_nets_from_data_dict, read_text
+)
+
+TEST_DATA_DIR = Path(__file__).resolve().parent.parent.parent.joinpath(
+    'test_data')
+text_file = TEST_DATA_DIR.joinpath(
+    "Pegasus-sample/Pegasus_SVC4/logs/2020/XX/KC01/XX.KC01...D.2020.129")
+binary_file = TEST_DATA_DIR.joinpath(
+    "Pegasus-sample/Pegasus_SVC4/soh/2020/XX/KC01/VDT.D/"
+    "XX.KC01..VDT.D.2020.129")
+
+
+class TestReadText(TestCase):
+    def test_text_file(self):
+        ret = read_text(text_file)
+        expected_ret = (
+            "\n\n** STATE OF HEALTH: XX.KC01...D.2020.129"
+            "\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware")
+        self.assertEqual(ret[:100], expected_ret
+                         )
+
+    def test_binary_file(self):
+        ret = read_text(binary_file)
+        self.assertIsNone(ret)
+
+
+class TestRetrieveNetsFromDataDict(TestCase):
+    def setUp(self):
+        self.nets_by_sta = {}
+        self.data_dict = {
+            'STA1': {'CHA1': {'nets': {'NET1', 'NET2'}},
+                     'CHA2': {'nets': {'NET2', 'NET3'}}
+                     },
+            'STA2': {'CHA1': {'nets': {'NET1'}},
+                     'CHA2': {'nets': {'NET1'}}
+                     }
+            }
+
+    def test_retrieve_nets(self):
+        retrieve_nets_from_data_dict(self.data_dict, self.nets_by_sta)
+        self.assertEqual(list(self.nets_by_sta.keys()), ['STA1', 'STA2'])
+        self.assertEqual(sorted(list(self.nets_by_sta['STA1'])),
+                         ['NET1', 'NET2', 'NET3'])
+        self.assertEqual(sorted(list(self.nets_by_sta['STA2'])), ['NET1'])
diff --git a/tests/model/mseed_data/test_mseed_reader.py b/tests/model/mseed_data/test_mseed_reader.py
new file mode 100644
index 0000000000000000000000000000000000000000..fcdbe513272a07e763b8a90a8f3a662e6ebdb26a
--- /dev/null
+++ b/tests/model/mseed_data/test_mseed_reader.py
@@ -0,0 +1,316 @@
+from unittest import TestCase
+from pathlib import Path
+
+from sohstationviewer.model.mseed_data.mseed_reader import MSeedReader
+
+TEST_DATA_DIR = Path(__file__).resolve().parent.parent.parent.joinpath(
+    'test_data')
+ascii_file = TEST_DATA_DIR.joinpath(
+    "Q330-sample/day_vols_AX08/AX08.XA..LOG.2021.186")
+blockettes_files = TEST_DATA_DIR.joinpath(
+    "Q330_unimplemented_ascii_block/XX-3203_4-20221222190255")
+multiplex_file = TEST_DATA_DIR.joinpath(
+    "Q330_multiplex/XX-3203_4-20221222183011")
+soh_file = TEST_DATA_DIR.joinpath(
+    "Q330-sample/day_vols_AX08/AX08.XA..VKI.2021.186")
+waveform_file = TEST_DATA_DIR.joinpath(
+    "Q330-sample/day_vols_AX08/AX08.XA..LHE.2021.186")
+mass_pos_file = TEST_DATA_DIR.joinpath(
+    "Q330-sample/day_vols_AX08/AX08.XA..VM1.2021.186")
+gap_file = TEST_DATA_DIR.joinpath(
+    "Centaur-sample/SOH/"
+    "XX.3734.SOH.centaur-3_3734..20180817_000000.miniseed.miniseed")
+
+
+class TestMSeedReader(TestCase):
+    def setUp(self) -> None:
+        self.soh_data = {}
+        self.mass_pos_data = {}
+        self.waveform_data = {}
+        self.log_data = {}
+
+    def test_read_ascii(self):
+        args = {
+            'file_path': ascii_file,
+            'is_multiplex': False,
+            'req_soh_chans': ['LOG'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.log_data.keys()), ['AX08'])
+        self.assertEqual(list(self.log_data['AX08'].keys()), ['LOG'])
+        self.assertEqual(len(self.log_data['AX08']['LOG']), 16)
+        self.assertEqual(
+            self.log_data['AX08']['LOG'][0][:100],
+            '\n\nSTATE OF HEALTH: From:1625456260.12  To:1625456260.12\n\r'
+            '\nQuanterra Packet Baler Model 14 Restart. V'
+        )
+        self.assertEqual(
+            self.log_data['AX08']['LOG'][1][:100],
+            '\n\nSTATE OF HEALTH: From:1625456366.62  To:1625456366.62'
+            '\nReducing Status Polling Interval\r\n[2021-07-0'
+        )
+
+    def test_read_blockettes_info(self):
+        args = {
+            'file_path': blockettes_files,
+            'is_multiplex': True,
+            'req_soh_chans': ['ACE'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.log_data.keys()), ['3203'])
+        self.assertEqual(list(self.log_data['3203'].keys()), ['ACE'])
+        self.assertEqual(len(self.log_data['3203']['ACE']), 1)
+        self.assertEqual(
+            self.log_data['3203']['ACE'][0][:100],
+            '\n\nSTATE OF HEALTH: From:1671729287.00014  To:1671729287.0'
+            '\n===========\nVCO correction: 53.7109375\nTim'
+        )
+
+    def test_not_is_multiplex_read_channel(self):
+        # is_multiplex = False => stop when reach to channel not match req
+        # so the channel 'EL1' is read but not finished
+        args = {
+            'file_path': multiplex_file,
+            'is_multiplex': False,
+            'req_wf_chans': ['EL1'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(self.waveform_data['3203'].keys()), ['EL1'])
+        self.assertEqual(self.waveform_data['3203']['EL1']['samplerate'], 200)
+        self.assertEqual(self.waveform_data['3203']['EL1']['startTmEpoch'],
+                         1671730004.145029)
+        self.assertEqual(self.waveform_data['3203']['EL1']['endTmEpoch'],
+                         1671730013.805)
+        self.assertEqual(self.waveform_data['3203']['EL1']['size'], 1932)
+        self.assertEqual(self.waveform_data['3203']['EL1']['gaps'], [])
+        self.assertEqual(len(self.waveform_data['3203']['EL1']['tracesInfo']),
+                         1)
+
+    def test_is_multiplex_read_channel(self):
+        # is_multiplex = True => read every record
+        args = {
+            'file_path': multiplex_file,
+            'is_multiplex': True,
+            'req_wf_chans': ['EL1'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(self.waveform_data['3203'].keys()), ['EL1'])
+        self.assertEqual(self.waveform_data['3203']['EL1']['samplerate'], 200)
+        self.assertEqual(self.waveform_data['3203']['EL1']['startTmEpoch'],
+                         1671730004.145029)
+        self.assertEqual(self.waveform_data['3203']['EL1']['endTmEpoch'],
+                         1671730720.4348998)
+        self.assertEqual(self.waveform_data['3203']['EL1']['size'], 143258)
+        self.assertEqual(self.waveform_data['3203']['EL1']['gaps'], [])
+        self.assertEqual(len(self.waveform_data['3203']['EL1']['tracesInfo']),
+                         1)
+
+    def test_not_is_multiplex_selected_channel_in_middle(self):
+        # won't reached selected channel because previous record doesn't meet
+        # requirement when is_multiplex = False
+        args = {
+            'file_path': multiplex_file,
+            'is_multiplex': False,
+            'req_wf_chans': ['EL2'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), [])
+
+    def test_is_multiplex_selected_channel_in_middle(self):
+        # is_multiplex = True => the selected channel will be read
+        args = {
+            'file_path': multiplex_file,
+            'is_multiplex': True,
+            'req_wf_chans': ['EL2'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(self.waveform_data['3203'].keys()), ['EL2'])
+        self.assertEqual(self.waveform_data['3203']['EL2']['samplerate'], 200)
+        self.assertEqual(self.waveform_data['3203']['EL2']['startTmEpoch'],
+                         1671730004.3100293)
+        self.assertEqual(self.waveform_data['3203']['EL2']['endTmEpoch'],
+                         1671730720.5549)
+        self.assertEqual(self.waveform_data['3203']['EL2']['size'], 143249)
+        self.assertEqual(self.waveform_data['3203']['EL2']['gaps'], [])
+        self.assertEqual(len(self.waveform_data['3203']['EL2']['tracesInfo']),
+                         1)
+
+    def test_existing_time_range(self):
+        # check if data_time is from the given range, end time may get
+        # a little greater than read_end according to record's end time
+        args = {
+            'file_path': soh_file,
+            'is_multiplex': False,
+            'req_soh_chans': ['VKI'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data,
+            'read_start': 1625456018.0,
+            'read_end': 1625505627.9998999
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.soh_data['AX08'].keys()), ['VKI'])
+        self.assertEqual(self.soh_data['AX08']['VKI']['startTmEpoch'],
+                         1625446018.0)
+        self.assertEqual(self.soh_data['AX08']['VKI']['endTmEpoch'],
+                         1625510338.0)
+
+    def test_non_existing_time_range(self):
+        # if given time range out of the data time, no station will be created
+        args = {
+            'file_path': soh_file,
+            'is_multiplex': False,
+            'req_soh_chans': ['VKI'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data,
+            'read_start': 1625356018.0,
+            'read_end': 1625405627.9998999
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(self.soh_data, {})
+        self.assertEqual(self.mass_pos_data, {})
+        self.assertEqual(self.waveform_data, {})
+
+    def test_read_waveform(self):
+        args = {
+            'file_path': waveform_file,
+            'is_multiplex': False,
+            'req_wf_chans': ['LHE'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), ['AX08'])
+        self.assertEqual(list(self.waveform_data['AX08'].keys()), ['LHE'])
+        self.assertEqual(self.waveform_data['AX08']['LHE']['samplerate'], 1)
+        self.assertEqual(self.waveform_data['AX08']['LHE']['startTmEpoch'],
+                         1625445156.000001)
+        self.assertEqual(self.waveform_data['AX08']['LHE']['endTmEpoch'],
+                         1625532950.0)
+        self.assertEqual(self.waveform_data['AX08']['LHE']['size'], 87794)
+        self.assertEqual(self.waveform_data['AX08']['LHE']['gaps'], [])
+        self.assertEqual(len(self.waveform_data['AX08']['LHE']['tracesInfo']),
+                         1)
+
+    def test_read_mass_pos_channel(self):
+        # mass position channels will be read if one or both include_mpxxxxxx
+        # are True
+        args = {
+            'file_path': mass_pos_file,
+            'is_multiplex': False,
+            'include_mp123zne': True,
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.mass_pos_data.keys()), ['AX08'])
+        self.assertEqual(list(self.mass_pos_data['AX08'].keys()), ['VM1'])
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['samplerate'], 0.1)
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['startTmEpoch'],
+                         1625444970.0)
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['endTmEpoch'],
+                         1625574580.0)
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['size'], 12961)
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['gaps'], [])
+        self.assertEqual(len(self.mass_pos_data['AX08']['VM1']['tracesInfo']),
+                         1)
+
+    def test_gap(self):
+        # gaps will be detected when gap_minimum is set
+        args = {
+            'file_path': gap_file,
+            'is_multiplex': True,
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data,
+            'gap_minimum': 60
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.soh_data.keys()), ['3734'])
+        self.assertEqual(sorted(list(self.soh_data['3734'].keys())),
+                         ['EX1', 'EX2', 'EX3', 'GAN', 'GEL', 'GLA', 'GLO',
+                          'GNS', 'GPL', 'GST', 'LCE', 'LCQ', 'VCO', 'VDT',
+                          'VEC', 'VEI', 'VPB'])
+        self.assertAlmostEqual(self.soh_data['3734']['EX1']['samplerate'],
+                               0.0166, 3)
+        self.assertEqual(self.soh_data['3734']['EX1']['startTmEpoch'],
+                         1534512840.0)
+        self.assertEqual(self.soh_data['3734']['EX1']['endTmEpoch'],
+                         1534550400.0)
+        self.assertEqual(self.soh_data['3734']['EX1']['size'], 597)
+        self.assertEqual(self.soh_data['3734']['EX1']['gaps'],
+                         [[1534522200.0, 1534523940.0]])
+
+    def test_not_detect_gap(self):
+        # if gap_minimum isn't set but gap exist, data still be separated, but
+        # gap won't be added to gap list
+        args = {
+            'file_path': gap_file,
+            'is_multiplex': True,
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data,
+            'gap_minimum': None
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.soh_data.keys()), ['3734'])
+        self.assertEqual(sorted(list(self.soh_data['3734'].keys())),
+                         ['EX1', 'EX2', 'EX3', 'GAN', 'GEL', 'GLA', 'GLO',
+                          'GNS', 'GPL', 'GST', 'LCE', 'LCQ', 'VCO', 'VDT',
+                          'VEC', 'VEI', 'VPB'])
+        self.assertAlmostEqual(self.soh_data['3734']['EX1']['samplerate'],
+                               0.0166, 3)
+        self.assertEqual(self.soh_data['3734']['EX1']['startTmEpoch'],
+                         1534512840.0)
+        self.assertEqual(self.soh_data['3734']['EX1']['endTmEpoch'],
+                         1534550400.0)
+        self.assertEqual(self.soh_data['3734']['EX1']['size'], 597)
+        self.assertEqual(self.soh_data['3734']['EX1']['gaps'], [])  # no gaps
diff --git a/tests/test_data/Q330_mixed_traces/XX-3203_4-20221222183011 b/tests/test_data/Q330_multiplex/XX-3203_4-20221222183011
similarity index 100%
rename from tests/test_data/Q330_mixed_traces/XX-3203_4-20221222183011
rename to tests/test_data/Q330_multiplex/XX-3203_4-20221222183011
diff --git a/tests/test_database/test_extract_data.py b/tests/test_database/test_extract_data.py
index 64e7e1da1358b02d0a133d401597af071962b794..6f8abbb0f4166c7aa1b40a2f27b64bba27fe61d4 100644
--- a/tests/test_database/test_extract_data.py
+++ b/tests/test_database/test_extract_data.py
@@ -2,8 +2,7 @@ import unittest
 
 from sohstationviewer.database.extract_data import (
     get_chan_plot_info,
-    get_wf_plot_info,
-    get_chan_label,
+    get_seismic_chan_label,
     get_signature_channels,
     get_color_def,
     get_color_ranges,
@@ -11,7 +10,7 @@ from sohstationviewer.database.extract_data import (
 
 
 class TestExtractData(unittest.TestCase):
-    def test_get_chan_plot_info_good_channel_and_data_type(self):
+    def test_get_chan_plot_info_good_soh_channel_and_data_type(self):
         """
         Test basic functionality of get_chan_plot_info - channel and data type
         combination exists in database table `Channels`
@@ -25,9 +24,62 @@ class TestExtractData(unittest.TestCase):
                            'label': 'SOH/Data Def',
                            'fixPoint': 0,
                            'valueColors': '0:W|1:C'}
-        self.assertDictEqual(
-            get_chan_plot_info('SOH/Data Def', {'samplerate': 10}, 'RT130'),
-            expected_result)
+        self.assertDictEqual(get_chan_plot_info('SOH/Data Def', 'RT130'),
+                             expected_result)
+
+    def test_get_chan_plot_info_masspos_channel(self):
+        with self.subTest("Mass position 'VM'"):
+            expected_result = {'channel': 'VM1',
+                               'plotType': 'linesMasspos',
+                               'height': 4,
+                               'unit': 'V',
+                               'linkedChan': None,
+                               'convertFactor': 0.1,
+                               'label': 'VM1-MassPos',
+                               'fixPoint': 1,
+                               'valueColors': None}
+            self.assertDictEqual(get_chan_plot_info('VM1', 'Q330'),
+                                 expected_result)
+
+        with self.subTest("Mass position 'MassPos'"):
+            expected_result = {'channel': 'MassPos1',
+                               'plotType': 'linesMasspos',
+                               'height': 4,
+                               'unit': 'V',
+                               'linkedChan': None,
+                               'convertFactor': 1,
+                               'label': 'MassPos1',
+                               'fixPoint': 1,
+                               'valueColors': None}
+            self.assertDictEqual(get_chan_plot_info('MassPos1', 'RT130'),
+                                 expected_result)
+
+    def test_get_chan_plot_info_seismic_channel(self):
+        with self.subTest("RT130 Seismic"):
+            expected_result = {'channel': 'DS2',
+                               'plotType': 'linesSRate',
+                               'height': 8,
+                               'unit': '',
+                               'linkedChan': None,
+                               'convertFactor': 1,
+                               'label': 'DS2',
+                               'fixPoint': 0,
+                               'valueColors': None}
+            self.assertDictEqual(get_chan_plot_info('DS2', 'RT130'),
+                                 expected_result)
+
+        with self.subTest("MSeed Seismic"):
+            expected_result = {'channel': 'LHE',
+                               'plotType': 'linesSRate',
+                               'height': 8,
+                               'unit': '',
+                               'linkedChan': None,
+                               'convertFactor': 1,
+                               'label': 'LHE-EW',
+                               'fixPoint': 0,
+                               'valueColors': None}
+            self.assertDictEqual(get_chan_plot_info('LHE', 'Q330'),
+                                 expected_result)
 
     def test_get_chan_plot_info_data_type_is_unknown(self):
         """
@@ -44,10 +96,8 @@ class TestExtractData(unittest.TestCase):
                            'label': 'DEFAULT-Bad Channel ID',
                            'fixPoint': 0,
                            'valueColors': None}
-        self.assertDictEqual(
-            get_chan_plot_info('Bad Channel ID',
-                               {'samplerate': 10}, 'Unknown'),
-            expected_result)
+        self.assertDictEqual(get_chan_plot_info('Bad Channel ID', 'Unknown'),
+                             expected_result)
 
         # Channel exist in database
         expected_result = {'channel': 'LCE',
@@ -59,12 +109,8 @@ class TestExtractData(unittest.TestCase):
                            'label': 'LCE-PhaseError',
                            'fixPoint': 0,
                            'valueColors': 'L:W|D:Y'}
-        self.assertDictEqual(
-            get_chan_plot_info('LCE', {'samplerate': 10}, 'Unknown'),
-            expected_result)
-        self.assertDictEqual(
-            get_chan_plot_info('LCE', {'samplerate': 10}, 'Unknown'),
-            expected_result)
+        self.assertDictEqual(get_chan_plot_info('LCE', 'Unknown'),
+                             expected_result)
 
     def test_get_chan_plot_info_bad_channel_or_data_type(self):
         """
@@ -86,69 +132,54 @@ class TestExtractData(unittest.TestCase):
         # Data type has None value. None value comes from
         # controller.processing.detect_data_type.
         expected_result['label'] = 'DEFAULT-SOH/Data Def'
-        self.assertDictEqual(
-            get_chan_plot_info('SOH/Data Def', {'samplerate': 10}, None),
-            expected_result)
+        self.assertDictEqual(get_chan_plot_info('SOH/Data Def', None),
+                             expected_result)
 
         # Channel and data type are empty strings
         expected_result['label'] = 'DEFAULT-'
-        self.assertDictEqual(
-            get_chan_plot_info('', {'samplerate': 10}, ''),
-            expected_result)
+        self.assertDictEqual(get_chan_plot_info('', ''),
+                             expected_result)
 
         # Channel exists in database but data type does not
         expected_result['label'] = 'DEFAULT-SOH/Data Def'
         self.assertDictEqual(
-            get_chan_plot_info('SOH/Data Def',
-                               {'samplerate': 10}, 'Bad Data Type'),
+            get_chan_plot_info('SOH/Data Def', 'Bad Data Type'),
             expected_result
         )
 
         # Data type exists in database but channel does not
         expected_result['label'] = 'DEFAULT-Bad Channel ID'
-        self.assertDictEqual(
-            get_chan_plot_info('Bad Channel ID',
-                               {'samplerate': 10}, 'RT130'),
-            expected_result)
+        self.assertDictEqual(get_chan_plot_info('Bad Channel ID', 'RT130'),
+                             expected_result)
 
         # Both channel and data type exists in database but not their
         # combination
         expected_result['label'] = 'DEFAULT-SOH/Data Def'
-        self.assertDictEqual(
-            get_chan_plot_info('SOH/Data Def', {'samplerate': 10}, 'Q330'),
-            expected_result)
-
-    def test_get_wf_plot_info(self):
-        """
-        Test basic functionality of get_wf_plot_info - ensures returned
-        dictionary contains all the needed key. Bad channel IDs cases are
-        handled in tests for get_chan_label.
-        """
-        result = get_wf_plot_info('CH1')
-        expected_keys = {'param', 'plotType', 'valueColors', 'height',
-                         'label', 'unit', 'channel', 'convertFactor'}
-        self.assertSetEqual(set(result.keys()), expected_keys)
+        self.assertDictEqual(get_chan_plot_info('SOH/Data Def', 'Q330'),
+                             expected_result)
 
-    def test_get_chan_label_good_channel_id(self):
+    def test_get_seismic_chan_label_good_channel_id(self):
         """
-        Test basic functionality of get_chan_label - channel ID ends in one
-        of the keys in conf.dbSettings.dbConf['seisLabel'] or starts with 'DS'
+        Test basic functionality of get_seismic_chan_label - channel ID ends
+        in one of the keys in conf.dbSettings.dbConf['seisLabel'] or
+        starts with 'DS'
         """
         # Channel ID does not start with 'DS'
-        self.assertEqual(get_chan_label('CH1'), 'CH1-NS')
-        self.assertEqual(get_chan_label('CH2'), 'CH2-EW')
-        self.assertEqual(get_chan_label('CHG'), 'CHG')
+        self.assertEqual(get_seismic_chan_label('CH1'), 'CH1-NS')
+        self.assertEqual(get_seismic_chan_label('CH2'), 'CH2-EW')
+        self.assertEqual(get_seismic_chan_label('CHG'), 'CHG')
 
         # Channel ID starts with 'DS'
-        self.assertEqual(get_chan_label('DS-TEST-CHANNEL'), 'DS-TEST-CHANNEL')
+        self.assertEqual(get_seismic_chan_label('DS-TEST-CHANNEL'),
+                         'DS-TEST-CHANNEL')
 
     def test_get_chan_label_bad_channel_id(self):
         """
-        Test basic functionality of get_chan_label - channel ID does not end in
-        one of the keys in conf.dbSettings.dbConf['seisLabel'] or is the empty
-        string.
+        Test basic functionality of get_seismic_chan_label - channel ID does
+            not end in one of the keys in conf.dbSettings.dbConf['seisLabel']
+            or is the empty string.
         """
-        self.assertRaises(IndexError, get_chan_label, '')
+        self.assertRaises(IndexError, get_seismic_chan_label, '')
 
     def test_get_signature_channels(self):
         """Test basic functionality of get_signature_channels"""
diff --git a/tests/test_model/test_handling_data_trim_downsample.py b/tests/test_model/test_handling_data_trim_downsample.py
index fd79ecbd82b5c13d8801ef5641b1ec88949b7dbc..bb26c2c54bb114e1a223f0385ba8e997da45cc52 100644
--- a/tests/test_model/test_handling_data_trim_downsample.py
+++ b/tests/test_model/test_handling_data_trim_downsample.py
@@ -1,6 +1,6 @@
 from pathlib import Path
 from tempfile import TemporaryDirectory
-from typing import Optional, Dict, Union, List
+from typing import Dict, Union, List
 
 from unittest import TestCase
 from unittest.mock import patch
@@ -8,17 +8,12 @@ from unittest.mock import patch
 from obspy.core import UTCDateTime
 import numpy as np
 
-import sohstationviewer.view.plotting.time_power_squared_processor
 from sohstationviewer.conf import constants as const
 from sohstationviewer.model.handling_data import (
     trim_downsample_chan_with_spr_less_or_equal_1,
     trim_downsample_wf_chan,
     trim_waveform_data,
     downsample_waveform_data,
-    get_start_5mins_of_diff_days,
-)
-from sohstationviewer.view.plotting.time_power_squared_processor import (
-    TimePowerSquaredProcessor,
 )
 from sohstationviewer.model.downsampler import downsample, chunk_minmax
 
@@ -610,337 +605,3 @@ class TestTrimDownsampleWfChan(TestCase):
                                 self.end_time, False)
         self.assertTrue(mock_trim.called)
         self.assertTrue(mock_downsample.called)
-
-
-class TestGetTrimTpsData(TestCase):
-    def no_file_memmap(self, file_path: Path, *args, **kwargs):
-        """
-        A mock of numpy.memmap. Reduce test run time significantly by making
-        sure that data access happens in memory and not on disk.
-
-        This method does not actually load the data stored on disk. Instead, it
-        constructs the array of data using the name of the given file. To do
-        so, this method requires the file name to be in the format
-        <prefix>_<index>. This method then constructs an array of
-        self.trace_size consecutive integers starting at
-        <index> * self.trace_size.
-
-        :param file_path: the path to a file used to construct the data array.
-        :param args: dummy arguments to make the API similar to numpy.memmap.
-        :param kwargs: dummy arguments to make the API similar to numpy.memmap.
-        :return: a numpy array constructed using file_path's name.
-        """
-        file_idx = int(file_path.name.split('_')[-1])
-        start = file_idx * self.trace_size
-        end = start + self.trace_size
-        return np.arange(start, end)
-
-    def add_trace(self, start_time: float, idx: Optional[int] = None):
-        """
-        Add a trace to the stored list of traces.
-
-        :param start_time: the start time of the trace to be added.
-        :param idx: the index to insert the trace into. If None, the new trace
-            will be appended to the list of traces
-        """
-        trace = {}
-        trace['startTmEpoch'] = start_time
-        trace['endTmEpoch'] = start_time + self.trace_size - 1
-        trace['size'] = self.trace_size
-
-        file_idx = start_time // self.trace_size
-        times_file_name = Path(self.data_folder.name) / f'times_{file_idx}'
-        trace['times_f'] = times_file_name
-
-        data_file_name = Path(self.data_folder.name) / f'data_{file_idx}'
-        trace['data_f'] = data_file_name
-
-        if idx is not None:
-            self.traces_info.insert(idx, trace)
-        else:
-            self.traces_info.append(trace)
-
-    def setUp(self) -> None:
-        """Set up text fixtures."""
-        memmap_patcher = patch.object(np, 'memmap',
-                                      side_effect=self.no_file_memmap)
-        self.addCleanup(memmap_patcher.stop)
-        memmap_patcher.start()
-
-        # Channel ID is only used when communicating with the main window.
-        # Seeing as we are testing the processing step here, we don't really
-        # need it.
-        channel_id = ''
-
-        self.channel_data: ChannelData = {'samplerate': 1}
-        self.traces_info = []
-        self.channel_data['tracesInfo'] = self.traces_info
-        self.data_folder = TemporaryDirectory()
-        self.trace_size = 1000
-        for i in range(100):
-            start_time = i * self.trace_size
-            self.add_trace(start_time)
-        self.start_time = 25000
-        self.end_time = 75000
-        self.start_5mins_of_diff_days = get_start_5mins_of_diff_days(
-            self.start_time, self.end_time)
-        self.tps_processor = TimePowerSquaredProcessor(
-            channel_id, self.channel_data, self.start_time, self.end_time,
-            self.start_5mins_of_diff_days
-        )
-
-    local_TimePowerSquaredProcessor = (sohstationviewer.view.plotting.
-                                       time_power_squared_processor.
-                                       TimePowerSquaredProcessor)
-
-    # If object obj is instance of class A, then the method call obj.method1()
-    # translate to A.method1(obj) for Python. So, in order to mock method1 for
-    # obj, we mock it for the class A.
-    @patch.object(local_TimePowerSquaredProcessor, 'trim_waveform_data')
-    def test_data_is_trimmed(self, mock_trim_waveform_data):
-        """Test that the data is trimmed."""
-        self.tps_processor.run()
-        self.assertTrue(mock_trim_waveform_data.called)
-
-    def test_appropriate_amount_of_5_mins_skipped(self):
-        """Test that the trimmed part of the data is skipped over."""
-        self.tps_processor.run()
-        with self.subTest('test_skip_before_start_time'):
-            first_unskipped_idx = 83
-            skipped_tps_arr = (
-                self.channel_data['tps_data'][0][:first_unskipped_idx]
-            )
-            self.assertTrue((skipped_tps_arr == 0).all())
-        with self.subTest('test_skip_after_end_time'):
-            last_unskipped_idx = 252
-            skipped_tps_arr = (
-                self.channel_data['tps_data'][0][last_unskipped_idx + 1:]
-            )
-            self.assertTrue((skipped_tps_arr == 0).all())
-
-    def test_result_is_stored(self):
-        """Test that the result of the TPS calculation is stored."""
-        self.tps_processor.run()
-        self.assertTrue('tps_data' in self.channel_data)
-
-    def test_formula_is_correct(self):
-        """Test that the TPS calculation uses the correct formula."""
-        self.tps_processor.start_time = 50000
-        self.tps_processor.end_time = 52000
-        self.tps_processor.run()
-        first_unskipped_idx = 166
-        last_unskipped_idx = 175
-        tps_data = self.channel_data['tps_data'][0]
-        unskipped_tps_arr = (
-            tps_data[first_unskipped_idx:last_unskipped_idx + 1]
-        )
-        expected = np.array([
-            2.51497985e+09, 2.54515955e+09, 2.57551925e+09, 0.00000000e+00,
-            1.96222188e+09, 2.64705855e+09, 2.67801825e+09, 2.03969638e+09,
-            2.75095755e+09, 2.78251725e+09
-        ])
-        self.assertTrue(np.allclose(unskipped_tps_arr, expected))
-
-    def test_one_tps_array_for_each_day_one_day_of_data(self):
-        """
-        Test that there is one TPS array for each day of data.
-
-        Test the case where there is only one day of data.
-        """
-        self.tps_processor.run()
-        self.assertEqual(len(self.channel_data['tps_data']), 1)
-
-    def test_one_tps_array_for_each_day_multiple_days_of_data(self):
-        """
-        Test that there is one TPS array for each dat of data.
-
-        Test the case where there are more than one day of data.
-        """
-        # Currently, the data time goes from 0 to 100000, which is enough to
-        # cover two days (the start of the second positive day in epoch time is
-        # 86400). Thus, we only have to set the end time to the data end time
-        # to have two days of data.
-        self.tps_processor.end_time = 100000
-        self.tps_processor.start_5mins_of_diff_days = \
-            get_start_5mins_of_diff_days(self.tps_processor.start_time,
-                                         self.tps_processor.end_time)
-        self.tps_processor.run()
-        self.assertEqual(len(self.channel_data['tps_data']), 2)
-
-    def test_data_has_gap_to_the_right_data_same_day_before_gap(self):
-        """
-        Test that gaps in the data are skipped in TPS calculation by checking
-        that the elements in the TPS array corresponding to the gaps are
-        0.
-
-        Test the case where there are gaps to the right of the data and the
-        traces directly next to the gaps are in the same day.
-        """
-        # Remove traces that go from 1000 to 24999 (traces 2 to 25) in order to
-        # create a gap on the right side of the data.
-        self.traces_info = [trace
-                            for i, trace in enumerate(self.traces_info)
-                            if not 0 < i < 25]
-        self.channel_data['tracesInfo'] = self.traces_info
-
-        with self.subTest('test_start_time_in_gap'):
-            self.tps_processor.start_time = 15000
-            self.tps_processor.start_5mins_of_diff_days =  \
-                get_start_5mins_of_diff_days(self.tps_processor.start_time,
-                                             self.tps_processor.end_time)
-            self.tps_processor.run()
-            self.assertEqual(len(self.channel_data['tps_data']), 1)
-            tps_gap = slice(0, 50)
-            tps_data_in_gap = self.channel_data['tps_data'][0][tps_gap]
-            tps_data_in_gap_contains_zero = np.allclose(
-                tps_data_in_gap, np.zeros(tps_data_in_gap.size)
-            )
-            self.assertTrue(tps_data_in_gap_contains_zero)
-
-        with self.subTest('test_start_time_cover_all_traces'):
-            self.tps_processor.start_time = 500
-            self.tps_processor.start_5mins_of_diff_days =  \
-                get_start_5mins_of_diff_days(self.tps_processor.start_time,
-                                             self.tps_processor.end_time)
-            self.tps_processor.run()
-            self.assertEqual(len(self.channel_data['tps_data']), 1)
-            tps_gap = slice(2, 83)
-            tps_data_in_gap = self.channel_data['tps_data'][0][tps_gap]
-            tps_data_in_gap_contains_zero = np.allclose(
-                tps_data_in_gap, np.zeros(tps_data_in_gap.size)
-            )
-            self.assertTrue(tps_data_in_gap_contains_zero)
-
-    def test_data_has_gap_to_the_left_data_same_day_after_gap(self):
-        """
-        Test that gaps in the data are skipped in TPS calculation by checking
-        that the elements in the TPS array corresponding to the gaps are
-        0.
-
-        Test the case where there are gaps to the left of the data and the
-        traces directly next to the gaps are in the same day.
-        """
-        # Data end time is 100000, so we want a trace that starts after 100001
-        trace_start_time = 125000
-        self.add_trace(trace_start_time)
-
-        with self.subTest('test_end_time_in_gap'):
-            # Subject to change after Issue #37 is fixed
-            self.tps_processor.end_time = 110000
-            self.tps_processor.start_5mins_of_diff_days =  \
-                get_start_5mins_of_diff_days(self.tps_processor.start_time,
-                                             self.tps_processor.end_time)
-            self.tps_processor.run()
-            self.assertEqual(len(self.channel_data['tps_data']), 2)
-            tps_gaps = (slice(45, 128), slice(131, None))
-            tps_data_in_gaps = np.concatenate(
-                [self.channel_data['tps_data'][1][gap] for gap in tps_gaps]
-            )
-            tps_data_in_gaps_contains_zero = np.allclose(
-                tps_data_in_gaps, np.zeros(tps_data_in_gaps.size)
-            )
-            self.assertTrue(tps_data_in_gaps_contains_zero)
-
-        with self.subTest('test_end_time_cover_all_traces'):
-            self.tps_processor.end_time = trace_start_time + 50
-            self.tps_processor.start_5mins_of_diff_days =  \
-                get_start_5mins_of_diff_days(self.tps_processor.start_time,
-                                             self.tps_processor.end_time)
-            self.tps_processor.run()
-            self.assertEqual(len(self.channel_data['tps_data']), 2)
-            tps_gaps = (slice(45, 128), slice(131, None))
-            tps_data_in_gaps = np.concatenate(
-                [self.channel_data['tps_data'][1][gap] for gap in tps_gaps]
-            )
-            tps_data_in_gaps_contains_zero = np.allclose(
-                tps_data_in_gaps, np.zeros(tps_data_in_gaps.size)
-            )
-            self.assertTrue(tps_data_in_gaps_contains_zero)
-
-    def test_data_has_gap_to_the_right_data_different_day_before_gap(self):
-        """
-        Test that gaps in the data are skipped in TPS calculation by checking
-        that the elements in the TPS array corresponding to the gaps are
-        0.
-
-        Test the case where there are gaps to the right of the data and the
-        traces directly next to the gaps are in different days.
-        """
-        trace_start_time = -50000
-        self.add_trace(trace_start_time, idx=0)
-
-        with self.subTest('test_start_time_in_gap'):
-            self.tps_processor.start_time = -25000
-            self.tps_processor.start_5mins_of_diff_days =  \
-                get_start_5mins_of_diff_days(self.tps_processor.start_time,
-                                             self.tps_processor.end_time)
-            self.tps_processor.run()
-            self.assertEqual(len(self.channel_data['tps_data']), 2)
-            tps_gap = slice(const.NO_5M_DAY)
-            tps_data_in_gap = self.channel_data['tps_data'][0][tps_gap]
-            tps_data_in_gap_contains_zero = np.allclose(
-                tps_data_in_gap, np.zeros(tps_data_in_gap.size)
-            )
-            self.assertTrue(tps_data_in_gap_contains_zero)
-
-        with self.subTest('test_start_time_cover_all_traces'):
-            self.tps_processor.start_time = -60000
-            self.tps_processor.start_5mins_of_diff_days = \
-                get_start_5mins_of_diff_days(self.tps_processor.start_time,
-                                             self.tps_processor.end_time)
-            self.tps_processor.run()
-            self.assertEqual(len(self.channel_data['tps_data']), 2)
-            tps_gaps = (slice(0, 121), slice(124, None))
-            tps_data_in_gaps = np.concatenate(
-                [self.channel_data['tps_data'][0][gap] for gap in tps_gaps]
-            )
-            tps_data_in_gaps_contains_zero = np.allclose(
-                tps_data_in_gaps, np.zeros(tps_data_in_gaps.size)
-            )
-            self.assertTrue(tps_data_in_gaps_contains_zero)
-
-    def test_data_has_gap_to_the_left_data_different_day_after_gap(self):
-        """
-        Test that gaps in the data are skipped in TPS calculation by checking
-        that the elements in the TPS array corresponding to the gaps are
-        0.
-
-        Test the case where there are gaps to the left of the data and the
-        traces directly next to the gaps are in different days.
-        """
-        # The setup portion of this test suite only create traces in the first
-        # positive day in epoch time. So, in order to guarantee there is a gap
-        # in the TPS array, we skip the second positive day. The start of the
-        # third positive day in epoch time is 172800, so we want a trace that
-        # starts after 172801.
-        trace_start_time = 173100
-        self.add_trace(trace_start_time)
-
-        with self.subTest('test_end_time_same_day_as_second_to_last_trace'):
-            # Subject to change after Issue #37 is fixed
-            self.tps_processor.end_time = 125000
-            self.tps_processor.start_5mins_of_diff_days =  \
-                get_start_5mins_of_diff_days(self.tps_processor.start_time,
-                                             self.tps_processor.end_time)
-            with self.assertRaises(IndexError):
-                self.tps_processor.run()
-
-        with self.subTest('test_end_time_cover_all_traces'):
-            self.tps_processor.end_time = trace_start_time + 50
-            self.tps_processor.start_5mins_of_diff_days =  \
-                get_start_5mins_of_diff_days(self.tps_processor.start_time,
-                                             self.tps_processor.end_time)
-            self.tps_processor.run()
-            self.assertEqual(len(self.channel_data['tps_data']), 3)
-            tps_gap_day_2 = slice(45, None)
-            tps_gap_day_3 = slice(4, None)
-            tps_data_in_gaps = np.hstack(
-                (
-                    self.channel_data['tps_data'][1][tps_gap_day_2],
-                    self.channel_data['tps_data'][2][tps_gap_day_3]
-                )
-            )
-            tps_data_in_gaps_contains_zero = np.allclose(
-                tps_data_in_gaps, np.zeros(tps_data_in_gaps.size)
-            )
-            self.assertTrue(tps_data_in_gaps_contains_zero)
diff --git a/tests/test_model/test_mseed/test_gps.py b/tests/test_model/test_mseed/test_gps.py
index 1d09b21dfc1d9efb257e7e4f9f6f88a7acf57d37..8fd114d0cd8561f1cd757d1bbff6d28f20206e90 100644
--- a/tests/test_model/test_mseed/test_gps.py
+++ b/tests/test_model/test_mseed/test_gps.py
@@ -223,7 +223,8 @@ class MockMSeed(MSeed):
 class TestGetGPSChannelPrefix(TestCase):
     def setUp(self) -> None:
         self.mseed_obj = MockMSeed()
-        self.mseed_obj.channels = set()
+        self.mseed_obj.selected_key = 'STA'
+        self.mseed_obj.soh_data = {'STA': {}}
 
     def test_pegasus_data_type(self):
         data_type = 'Pegasus'
@@ -239,14 +240,16 @@ class TestGetGPSChannelPrefix(TestCase):
 
     def test_unknown_data_type_pegasus_gps_channels(self):
         data_type = 'Unknown'
-        self.mseed_obj.channels = {'VNS', 'VLA', 'VLO', 'VEL'}
+        self.mseed_obj.soh_data = {
+            'STA': {'VNS': {}, 'VLA': {}, 'VEL': {}, 'VLO': {}}}
         expected = 'V'
         result = get_gps_channel_prefix(self.mseed_obj, data_type)
         self.assertEqual(expected, result)
 
     def test_unknown_data_type_centaur_gps_channels(self):
         data_type = 'Unknown'
-        self.mseed_obj.channels = {'GNS', 'GLA', 'GLO', 'GEL'}
+        self.mseed_obj.soh_data = {
+            'STA': {'GNS': {}, 'GLA': {}, 'GEL': {}, 'GLO': {}}}
         expected = 'G'
         result = get_gps_channel_prefix(self.mseed_obj, data_type)
         self.assertEqual(expected, result)
diff --git a/tests/test_model/test_reftek/test_gps.py b/tests/test_model/test_reftek/test_gps.py
index 20d38d0a3895715657b5d0d52e4af03d0dcaf2a3..381f1f6c542daf33e6c40bbed733cc2dcfccaf8c 100644
--- a/tests/test_model/test_reftek/test_gps.py
+++ b/tests/test_model/test_reftek/test_gps.py
@@ -97,7 +97,6 @@ class TestParseGpsPoint(unittest.TestCase):
             gps_point = parse_gps_point_rt130(self.good_gps_line,
                                               self.gps_year)
             result = gps_point.longitude
-            print(result)
             expected = -106.92038611111111
             self.assertTrue(math.isclose(result, expected))
 
diff --git a/tests/view/plotting/plotting_widget/__init__.py b/tests/view/plotting/plotting_widget/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tests/view/plotting/plotting_widget/test_plotting_processor_helper.py b/tests/view/plotting/plotting_widget/test_plotting_processor_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..748e36169a93e7c1caa063444ed6ce436f8a11e4
--- /dev/null
+++ b/tests/view/plotting/plotting_widget/test_plotting_processor_helper.py
@@ -0,0 +1,150 @@
+from unittest import TestCase
+from unittest.mock import patch
+
+from obspy.core import UTCDateTime
+import numpy as np
+
+from sohstationviewer.view.plotting.plotting_widget.plotting_processor_helper \
+    import downsample, chunk_minmax
+
+ZERO_EPOCH_TIME = UTCDateTime(1970, 1, 1, 0, 0, 0).timestamp
+
+
+class TestDownsample(TestCase):
+    # FROM test_handling_data_trim_downsample.TestDownsample
+    def setUp(self) -> None:
+        patcher = patch('sohstationviewer.view.plotting.plotting_widget.'
+                        'plotting_processor_helper.chunk_minmax')
+        self.addCleanup(patcher.stop)
+        self.mock_chunk_minmax = patcher.start()
+        self.times = np.arange(1000)
+        self.data = np.arange(1000)
+        self.log_idx = np.arange(1000)
+
+    def test_first_downsample_step_remove_enough_points(self):
+        req_points = 999
+        downsample(self.times, self.data, rq_points=req_points)
+        self.assertFalse(self.mock_chunk_minmax.called)
+
+    def test_first_downsample_step_remove_enough_points_with_logidx(self):
+        req_points = 999
+        downsample(self.times, self.data, self.log_idx, rq_points=req_points)
+        self.assertFalse(self.mock_chunk_minmax.called)
+
+    def test_second_downsample_step_required(self):
+        req_points = 1
+        downsample(self.times, self.data, rq_points=req_points)
+        self.assertTrue(self.mock_chunk_minmax.called)
+        times, data, _, rq_points = self.mock_chunk_minmax.call_args[0]
+        self.assertIsNot(times, self.times)
+        self.assertIsNot(data, self.data)
+        self.assertEqual(rq_points, req_points)
+
+    def test_second_downsample_step_required_with_logidx(self):
+        req_points = 1
+        downsample(self.times, self.data, self.log_idx, rq_points=req_points)
+        self.assertTrue(self.mock_chunk_minmax.called)
+        times, data, log_idx, rq_points = self.mock_chunk_minmax.call_args[0]
+        self.assertIsNot(times, self.times)
+        self.assertIsNot(data, self.data)
+        self.assertIsNot(log_idx, self.log_idx)
+        self.assertEqual(rq_points, req_points)
+
+    def test_requested_points_greater_than_data_size(self):
+        req_points = 10000
+        times, data, _ = downsample(
+            self.times, self.data, rq_points=req_points)
+        self.assertFalse(self.mock_chunk_minmax.called)
+        # Check that we did not do any processing on the times and data arrays.
+        # This ensures that we don't do two unneeded copy operations.
+        self.assertIs(times, self.times)
+        self.assertIs(data, self.data)
+
+    def test_requested_points_greater_than_data_size_with_logidx(self):
+        req_points = 10000
+        times, data, log_idx = downsample(
+            self.times, self.data, self.log_idx, rq_points=req_points)
+        self.assertFalse(self.mock_chunk_minmax.called)
+        # Check that we did not do any processing on the times and data arrays.
+        # This ensures that we don't do two unneeded copy operations.
+        self.assertIs(times, self.times)
+        self.assertIs(data, self.data)
+        self.assertIs(log_idx, self.log_idx)
+
+    def test_requested_points_is_zero(self):
+        req_points = 0
+        downsample(self.times, self.data, rq_points=req_points)
+        self.assertTrue(self.mock_chunk_minmax.called)
+        times, data, _, rq_points = self.mock_chunk_minmax.call_args[0]
+        self.assertIsNot(times, self.times)
+        self.assertIsNot(data, self.data)
+        self.assertEqual(rq_points, req_points)
+
+    def test_requested_points_is_zero_with_logidx(self):
+        req_points = 0
+        downsample(self.times, self.data, self.log_idx, rq_points=req_points)
+        self.assertTrue(self.mock_chunk_minmax.called)
+        times, data, log_idx, rq_points = self.mock_chunk_minmax.call_args[0]
+        self.assertIsNot(times, self.times)
+        self.assertIsNot(data, self.data)
+        self.assertIsNot(log_idx, self.log_idx)
+        self.assertEqual(rq_points, req_points)
+
+    def test_empty_times_and_data(self):
+        req_points = 1000
+        self.times = np.empty((0, 0))
+        self.data = np.empty((0, 0))
+        times, data, _ = downsample(
+            self.times, self.data, rq_points=req_points)
+        self.assertFalse(self.mock_chunk_minmax.called)
+        # Check that we did not do any processing on the times and data arrays.
+        # This ensures that we don't do two unneeded copy operations.
+        self.assertIs(times, self.times)
+        self.assertIs(data, self.data)
+
+    def test_empty_times_and_data_with_logidx(self):
+        req_points = 1000
+        self.times = np.empty((0, 0))
+        self.data = np.empty((0, 0))
+        self.log_idx = np.empty((0, 0))
+        times, data, log_idx = downsample(
+            self.times, self.data, self.log_idx, rq_points=req_points)
+        self.assertFalse(self.mock_chunk_minmax.called)
+        # Check that we did not do any processing on the times and data arrays.
+        # This ensures that we don't do two unneeded copy operations.
+        self.assertIs(times, self.times)
+        self.assertIs(data, self.data)
+        self.assertIs(log_idx, self.log_idx)
+
+
+class TestChunkMinmax(TestCase):
+    # FROM test_handling_data_trim_downsample.TestChunkMinmax
+    def setUp(self):
+        self.times = np.arange(1000)
+        self.data = np.arange(1000)
+        self.log_idx = np.arange(1000)
+
+    def test_data_size_is_multiple_of_requested_points(self):
+        req_points = 100
+        times, data, log_idx = chunk_minmax(
+            self.times, self.data, self.log_idx, req_points)
+        self.assertEqual(times.size, req_points)
+        self.assertEqual(data.size, req_points)
+        self.assertEqual(log_idx.size, req_points)
+
+    @patch('sohstationviewer.model.downsampler.downsample', wraps=downsample)
+    def test_data_size_is_not_multiple_of_requested_points(
+            self, mock_downsample):
+        req_points = 102
+        chunk_minmax(self.times, self.data, self.log_idx, req_points)
+        self.assertTrue(mock_downsample.called)
+
+    def test_requested_points_too_small(self):
+        small_req_points_list = [0, 1]
+        for req_points in small_req_points_list:
+            with self.subTest(f'test_requested_points_is_{req_points}'):
+                times, data, log_idx = chunk_minmax(
+                    self.times, self.data, self.log_idx, rq_points=req_points)
+                self.assertEqual(times.size, 0)
+                self.assertEqual(data.size, 0)
+                self.assertEqual(data.size, 0)
diff --git a/tests/test_model/test_handling_data_calc_time.py b/tests/view/plotting/test_time_power_square_helper.py
similarity index 56%
rename from tests/test_model/test_handling_data_calc_time.py
rename to tests/view/plotting/test_time_power_square_helper.py
index 30509774eb5ca919ebe7ebbb511ef3ed1a98a2a2..16ed6d7ff2c73063bb11cb57f3bddfed68522b7a 100644
--- a/tests/test_model/test_handling_data_calc_time.py
+++ b/tests/view/plotting/test_time_power_square_helper.py
@@ -1,12 +1,17 @@
+import math
 from unittest import TestCase
-
+import numpy as np
 from obspy import UTCDateTime
-from sohstationviewer.model.handling_data import (
-    get_start_5mins_of_diff_days, find_tps_tm_idx
+
+from sohstationviewer.view.plotting.time_power_squared_helper import (
+    get_start_5mins_of_diff_days, find_tps_tm_idx,
+    get_tps_for_discontinuous_data
 )
+from sohstationviewer.conf import constants as const
 
 
 class TestGetEachDay5MinList(TestCase):
+    # FROM handling_data_calc_time
     def test_start_in_midle_end_exact(self):
         """
         Start in the middle of a day and end at the exact end of a day
@@ -55,6 +60,7 @@ class TestGetEachDay5MinList(TestCase):
 
 
 class TestFindTPSTmIdx(TestCase):
+    # FROM handling_data_calc_time
     @classmethod
     def setUpClass(cls) -> None:
         start = UTCDateTime("2012-09-07T12:15:00").timestamp
@@ -83,3 +89,53 @@ class TestFindTPSTmIdx(TestCase):
         tm = UTCDateTime("2012-09-09T00:00:00").timestamp
         start_tps_tm_idx = find_tps_tm_idx(tm, self.start_5mins_of_diff_days)
         self.assertEqual(start_tps_tm_idx, (287, -1))
+
+
+class TestGetTPSForDiscontinuousData(TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.day_begin = UTCDateTime("2021-07-05T00:00:00").timestamp
+        cls.start = UTCDateTime("2021-07-05T22:59:28.340").timestamp
+        cls.end = UTCDateTime("2021-07-06T3:59:51.870").timestamp
+        cls.start_5mins_of_diff_days = get_start_5mins_of_diff_days(
+            cls.start, cls.end
+        )
+
+    def test_more_than_10_minute_apart(self):
+        # check for empty block in between tps data
+        times = np.arange(self.start, self.end, 60*60)    # 60m apart
+        data = np.random.uniform(-1000, 1000, times.size)
+        channel_data = {'tracesInfo': [{'times': times, 'data': data}]}
+        tps = get_tps_for_discontinuous_data(
+            channel_data, self.start_5mins_of_diff_days)
+        self.assertEqual(len(tps), 2)
+        expected_first_index = \
+            math.ceil((self.start - self.day_begin)/const.SEC_5M) - 1
+        day0_indexes = np.where(tps[0] != 0)[0]
+        day1_indexes = np.where(tps[1] != 0)[0]
+
+        self.assertEqual(day0_indexes[0], expected_first_index)
+
+        # different (60/5) = 12 blocks from each other
+        self.assertTrue(np.all(np.diff(day0_indexes) == 60/5))
+        self.assertTrue(np.all(np.diff(day1_indexes) == 60/5))
+
+    def test_less_than_10_minute_apart(self):
+        # though times of data are apart from each other, but with less
+        # than 10m apart, the function will fill up the empty space
+        times = np.arange(self.start, self.end, 9*60)    # 9m apart
+        data = np.random.uniform(-1000, 1000, times.size)
+        channel_data = {'tracesInfo': [{'times': times, 'data': data}]}
+        tps = get_tps_for_discontinuous_data(
+            channel_data, self.start_5mins_of_diff_days)
+        self.assertEqual(len(tps), 2)
+        expected_first_index = \
+            math.ceil((self.start - self.day_begin)/const.SEC_5M) - 1
+        day0_indexes = np.where(tps[0] != 0)[0]
+        day1_indexes = np.where(tps[1] != 0)[0]
+        self.assertEqual(day0_indexes[0], expected_first_index)
+        # no blocks apart from each other
+        self.assertTrue(np.all(np.diff(day0_indexes) == 1))
+        self.assertTrue(np.all(np.diff(day1_indexes) == 1))
+        # last block of day0 has value
+        self.assertIn(const.NO_5M_DAY - 1, day0_indexes)