diff --git a/documentation/01 _ Table of Contents.help.md b/documentation/01 _ Table of Contents.help.md
index f7e158e1eb463f7b419d7f220879acbe094331ae..60e076d08b46d31af4e52b8bcd509a2c73aaefde 100644
--- a/documentation/01 _ Table of Contents.help.md	
+++ b/documentation/01 _ Table of Contents.help.md	
@@ -4,6 +4,8 @@ Welcome to the SOH Station Viewer documentation. Here you will find usage guides
 
 On the left-hand side you will find a list of currently available help topics.
 
+If the links of the Table of Contents are broken, click on Recreate Table of Content <img src='recreate_table_contents.png' height=30 style='margin: 3px 0px 0px 0px;'/> to rebuild it.
+
 The home button can be used to return to this page at any time.
 
 # Table of Contents
@@ -14,19 +16,23 @@ The home button can be used to return to this page at any time.
 
 + [How to Use Help](03%20_%20How%20to%20Use%20Help.help.md)
 
-+ [Search SOH n LOG](04%20_%20Search%20SOH%20n%20LOG.help.md)
++ [Search List of Directories](04%20_%20Search%20List%20of%20Directories.help.md)
+
++ [Read from Data Card](05%20_%20Read%20from%20Data%20Card.help.md)
+
++ [Select SOH](06%20_%20Select%20SOH.help.md)
 
-+ [Search List of Directories](05%20_%20Search%20List%20of%20Directories.help.md)
++ [Select Mass Position](07%20_%20Select%20Mass%20Position.help.md)
 
-+ [Read from Data Card](06%20_%20Read%20from%20Data%20Card.help.md)
++ [Select Waveforms](08%20_%20Select%20Waveforms.help.md)
 
-+ [Select SOH](07%20_%20Select%20SOH.help.md)
++ [Gap Display](09%20_%20Gap%20Display.help.md)
 
-+ [Select Mass Position](08%20_%20Select%20Mass%20Position.help.md)
++ [Change TPS Color Range](10%20_%20Change%20TPS%20Color%20Range.help.md)
 
-+ [Select Waveforms](09%20_%20Select%20Waveforms.help.md)
++ [Save Plots](11%20_%20Save%20Plots.help.md)
 
-+ [Gap Display](10%20_%20Gap%20Display.help.md)
++ [Search SOH n LOG](12%20_%20Search%20SOH%20n%20LOG.help.md)
 
 + [GPS Dialog](20%20_%20GPS%20Dialog.help.md)
 
diff --git a/documentation/05 _ Search List of Directories.help.md b/documentation/04 _ Search List of Directories.help.md
similarity index 100%
rename from documentation/05 _ Search List of Directories.help.md
rename to documentation/04 _ Search List of Directories.help.md
diff --git a/documentation/06 _ Read from Data Card.help.md b/documentation/05 _ Read from Data Card.help.md
similarity index 100%
rename from documentation/06 _ Read from Data Card.help.md
rename to documentation/05 _ Read from Data Card.help.md
diff --git a/documentation/07 _ Select SOH.help.md b/documentation/06 _ Select SOH.help.md
similarity index 100%
rename from documentation/07 _ Select SOH.help.md
rename to documentation/06 _ Select SOH.help.md
diff --git a/documentation/08 _ Select Mass Position.help.md b/documentation/07 _ Select Mass Position.help.md
similarity index 100%
rename from documentation/08 _ Select Mass Position.help.md
rename to documentation/07 _ Select Mass Position.help.md
diff --git a/documentation/09 _ Select Waveforms.help.md b/documentation/08 _ Select Waveforms.help.md
similarity index 100%
rename from documentation/09 _ Select Waveforms.help.md
rename to documentation/08 _ Select Waveforms.help.md
diff --git a/documentation/11 _ Gap Display.help.md b/documentation/09 _ Gap Display.help.md
similarity index 100%
rename from documentation/11 _ Gap Display.help.md
rename to documentation/09 _ Gap Display.help.md
diff --git a/documentation/11 _ Save Plots.help.md b/documentation/11 _ Save Plots.help.md
new file mode 100644
index 0000000000000000000000000000000000000000..0027b76db29eeb97aa0adf7cbe68dc7fa5126b09
--- /dev/null
+++ b/documentation/11 _ Save Plots.help.md	
@@ -0,0 +1,60 @@
+# Save Plots
+
+---------------------------
+---------------------------
+
+## Step 1: click 'Save Plot'
+In Main Window,  Raw Data Plot and TPS Plot there are buttons labeled 'Save Plot'.
+
+User need to click those button to save plots in each window.
+
+* Saving State-of-Health plots
+<br />
+<img alt="Save SOH" src="images/save_plots/save_button_soh.png" height="30" />
+<br />
+* Saving Raw data plots
+<br />
+<img alt="Save Waveform" src="images/save_plots/save_button_wf.png" height="60" />
+<br />
+* Saving Time-power-square plots
+<br />
+<img alt="Save TPS" src="images/save_plots/save_button_tps.png" height="80" />
+<br />
+<br />
+<br />
+
+If the current color mode is black, user will be asked to continue or cancel 
+to change mode before saving the image.
+
+<br />
+<br />
+<img alt="Want to change color mode?" src="images/save_plots/question_on_changing_black_mode.png" height="150" />
+<br />
+
+* If user click 'Cancel'. The process of saving plots will be canceled for user 
+to change mode before restarting saving plots again.
+* If user click 'Continue'. The process of saving plots will be continue and the 
+image will be saved in black mode.
+<br />
+
+---------------------------
+## Step 2: Edit file path and select image's format
+Once clicking on 'Save Plot' button,  the 'Save Plot' dialog will pop up.
+
+<br />
+<br />
+<img alt="Select Image Format dialog" src="images/save_plots/save_file_dialog.png" height="200" />
+<br />
+
++ The default path to save the image file is preset in (1) text box.  If user 
+wants to change the path,  click on 'Save Directory button' to open file dialog 
+for changing path.
++ The default filename to save the image is preset in (2) text box.  User can 
+change the name in this box.
++ In side oval (3) are the radio buttons to select image format to save 
+file.
++ For 'PNG' format,  user can change DPI which is the resolution of the 
+image.  Other formats are vector formats which don't require resolution.
+
+Then user can click 'CANCEL' to cancel saving plot or click 'SAVE PLOT' to save
+the current plots to file.
\ No newline at end of file
diff --git a/documentation/04 _ Search SOH n LOG.help.md b/documentation/12 _ Search SOH n LOG.help.md
similarity index 100%
rename from documentation/04 _ Search SOH n LOG.help.md
rename to documentation/12 _ Search SOH n LOG.help.md
diff --git a/documentation/99 _ test.md b/documentation/99 _ test.md
index 7ef0655b760ac6880ab28c7b87f54ad34c2bb4ae..84fbede232f89c3fc5c6e9c03a105021552adb20 100644
--- a/documentation/99 _ test.md	
+++ b/documentation/99 _ test.md	
@@ -39,7 +39,7 @@ printf("%s\n", syntaxHighlighting.doesItWork ? "Success!" : "Oof.");
 ^ This is a horizontal line
 
 v This is an image
-![An Image?](images/image.jpg)
+![An Image?](recreate_table_contents.png)
 
 ---
 Another horizontal line
diff --git a/documentation/images/save_plots/question_on_changing_black_mode.png b/documentation/images/save_plots/question_on_changing_black_mode.png
new file mode 100644
index 0000000000000000000000000000000000000000..7424afda3387e8cbcad71a7fba63903072d2f23d
Binary files /dev/null and b/documentation/images/save_plots/question_on_changing_black_mode.png differ
diff --git a/documentation/images/save_plots/save_button_soh.png b/documentation/images/save_plots/save_button_soh.png
new file mode 100644
index 0000000000000000000000000000000000000000..588e20ca07de4e9dfde974de414107bb855ac1c8
Binary files /dev/null and b/documentation/images/save_plots/save_button_soh.png differ
diff --git a/documentation/images/save_plots/save_button_tps.png b/documentation/images/save_plots/save_button_tps.png
new file mode 100644
index 0000000000000000000000000000000000000000..1bfe4977370d6b904ff3d63a79bb6a4fbfe67266
Binary files /dev/null and b/documentation/images/save_plots/save_button_tps.png differ
diff --git a/documentation/images/save_plots/save_button_wf.png b/documentation/images/save_plots/save_button_wf.png
new file mode 100644
index 0000000000000000000000000000000000000000..f65ac57c793dd9b43cfd4814e56604eb3f3f3c80
Binary files /dev/null and b/documentation/images/save_plots/save_button_wf.png differ
diff --git a/documentation/images/save_plots/save_file_dialog.png b/documentation/images/save_plots/save_file_dialog.png
new file mode 100644
index 0000000000000000000000000000000000000000..ddb40fe65456a44943792bd94933a88a64556111
Binary files /dev/null and b/documentation/images/save_plots/save_file_dialog.png differ
diff --git a/documentation/img.png b/documentation/img.png
deleted file mode 100644
index 5d8c5a2165cf11862b70318e57343665de6e1a77..0000000000000000000000000000000000000000
Binary files a/documentation/img.png and /dev/null differ
diff --git a/documentation/recreate_table_contents.png b/documentation/recreate_table_contents.png
new file mode 100644
index 0000000000000000000000000000000000000000..34ab02a858eb4da3d62325cff47e1bd56dc90186
Binary files /dev/null and b/documentation/recreate_table_contents.png differ
diff --git a/sohstationviewer/conf/constants.py b/sohstationviewer/conf/constants.py
index 8bd00e091e0c436c87c64027c626cfa716dab02f..d060a1f8a3ac0865a719cddd898f39a6d55dd97e 100644
--- a/sohstationviewer/conf/constants.py
+++ b/sohstationviewer/conf/constants.py
@@ -50,8 +50,11 @@ TABLE_CONTENTS = "01 _ Table of Contents.help.md"
 SEARCH_RESULTS = "Search Results.md"
 
 # the list of all color modes
-ALL_COLOR_MODES = {'B', 'W'}
+ALL_COLOR_MODES = {'B': 'black', 'W': 'white'}
 
+# List of image formats. Have to put PNG at the beginning to go with
+# dpi in dialog
+IMG_FORMAT = ['PNG', 'PDF', 'EPS', 'SVG']
 # ================================================================= #
 #                      PLOTTING CONSTANT
 # ================================================================= #
diff --git a/sohstationviewer/controller/processing.py b/sohstationviewer/controller/processing.py
index 7eaa504cb3dca8afc1501aad5cd3aa0c6d4ee284..13715a429c60def6c1a4796b3acc3f6541f8abdc 100644
--- a/sohstationviewer/controller/processing.py
+++ b/sohstationviewer/controller/processing.py
@@ -139,7 +139,7 @@ def read_mseed_channels(tracking_box: QTextBrowser, list_of_dir: List[str],
                 spr_gr_1_chan_ids.update(ret[3])
     if not on_unittest:
         QApplication.restoreOverrideCursor()
-    return sorted(list(soh_chan_ids)), sorted(list(mass_pos_chan_ids)),\
+    return sorted(list(soh_chan_ids)), sorted(list(mass_pos_chan_ids)), \
         sorted(list(wf_chan_ids)), sorted(list(spr_gr_1_chan_ids))
 
 
diff --git a/sohstationviewer/controller/util.py b/sohstationviewer/controller/util.py
index 85c8203f7736126bd843d98012a0a82a9be40b2a..0e46a24ab1b673918022c15dacbacce654c11bcf 100644
--- a/sohstationviewer/controller/util.py
+++ b/sohstationviewer/controller/util.py
@@ -66,19 +66,20 @@ def display_tracking_info(tracking_box: QTextBrowser, text: str,
     msg = {'text': text}
     if type == LogType.ERROR:
         msg['color'] = 'white'
-        msg['bgcolor'] = '#e46269'
+        msg['bgcolor'] = '#c45259'
     elif type == LogType.WARNING:
-        msg['color'] = '#ffd966'
-        msg['bgcolor'] = 'orange'
+        msg['color'] = 'white'
+        msg['bgcolor'] = '#c4a347'
     else:
         msg['color'] = 'blue'
         msg['bgcolor'] = 'white'
     html_text = """<body>
-        <div style='color:%(color)s; background-color:%(bgcolor)s'>
-            %(text)s
+        <div style='color:%(color)s'>
+            <strong>%(text)s</strong>
         </div>
         </body>"""
     tracking_box.setHtml(html_text % msg)
+    tracking_box.setStyleSheet(f"background-color: {msg['bgcolor']}")
     # parent.update()
     tracking_box.repaint()
 
diff --git a/sohstationviewer/database/extract_data.py b/sohstationviewer/database/extract_data.py
index dcb518d2ab21b8353fab8c1a9274c38d955e9d3d..cf0ab6208f841629f618bd28260d617c2aa15fd2 100755
--- a/sohstationviewer/database/extract_data.py
+++ b/sohstationviewer/database/extract_data.py
@@ -74,6 +74,16 @@ def get_chan_plot_info(org_chan_id: str, data_type: str,
     return chan_db_info[0]
 
 
+def get_convert_factor(chan_id, data_type):
+    sql = f"SELECT convertFactor FROM Channels WHERE channel='{chan_id}' " \
+          f"AND dataType='{data_type}'"
+    ret = execute_db(sql)
+    if ret:
+        return ret[0][0]
+    else:
+        return None
+
+
 def get_seismic_chan_label(chan_id):
     """
     Get label for chan_id in which data stream can use chan_id for label while
diff --git a/sohstationviewer/model/general_data/__init__.py b/sohstationviewer/model/general_data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sohstationviewer/model/general_data/data_structures.MD b/sohstationviewer/model/general_data/data_structures.MD
new file mode 100644
index 0000000000000000000000000000000000000000..f9433a985e680bf75d1e1b22579eafc74d4e6b47
--- /dev/null
+++ b/sohstationviewer/model/general_data/data_structures.MD
@@ -0,0 +1,44 @@
+## Log data:
+info from log channels, soh messages, text file in dict:
+{'TEXT': [str,], key:{chan_id: [str,],},}
+In which 'TEXT': is the chan_id given by sohview for text only files which have 
+no station or channel associate with it.
+Note: log_data for RT130's dataset has only one channel: SOH
+
+## data_dict:
+{set_key: {
+    chan_id (str): {
+        'file_path' (str): path of file to keep track of file changes in MSeedReader
+        'chanID' (str): name of channel
+        'samplerate' (float): Sampling rate of the data
+        'startTmEpoch' (float): start epoch time of channel
+        'endTmEpoch' (float): end epoch time of channel
+        'size' (int): size of channel data
+        'tracesInfo': [{
+            'startTmEpoch': Start epoch time of the trace - float
+            'endTmEpoch': End epoch time of the trace - float
+            'times': time of channel's trace: List[float] in mseed_reader but changed to ndarray in combine_data()
+            'data': data of channel's trace: List[float] in mseed_reader but changed to ndarray in combine_data()
+            }]
+        'tps_data': list of lists of mean of square of every 5m of data in each day
+        'times' (np.array): times that has been trimmed and down-sampled for plotting
+        'data' (np.array): data that has been trimmed and down-sampled for plotting
+        'chan_db_info' (dict): the plotting parameters got from database
+            for this channel - dict,
+        ax: axes to draw the channel in PlottingWidget
+        ax_wf (matplotlib.axes.Axes): axes to draw the channel in WaveformWidget
+    }
+}
+
+Use both ax and ax_wf because mass position channels are plotted in both widgets while 
+soh channels are plotted in PlottingWidget and waveform channel are plotted in WaveformWidget
+tps_data created in TimePoserSquareWidget only and apply for waveform_data only
+
+## tps_data: data that aren't separated to traces
+{set_key - str or (str, str): {
+    chan_id - str: {
+        times: np.array,
+        data: np.array,
+        }
+    }
+}
\ No newline at end of file
diff --git a/sohstationviewer/model/general_data/general_data.py b/sohstationviewer/model/general_data/general_data.py
new file mode 100644
index 0000000000000000000000000000000000000000..405518eed4ef7f6c28bc951ae2d92614509967a3
--- /dev/null
+++ b/sohstationviewer/model/general_data/general_data.py
@@ -0,0 +1,404 @@
+from __future__ import annotations
+
+from pathlib import Path
+from tempfile import TemporaryDirectory
+from typing import Optional, Union, List, Tuple, Dict
+
+from obspy import UTCDateTime
+
+from PySide2 import QtCore
+from PySide2 import QtWidgets
+
+from sohstationviewer.controller.util import display_tracking_info
+from sohstationviewer.view.plotting.gps_plot.gps_point import GPSPoint
+from sohstationviewer.view.util.enums import LogType
+from sohstationviewer.database.process_db import execute_db
+from sohstationviewer.model.general_data.general_data_helper import \
+    retrieve_data_time_from_data_dict, retrieve_gaps_from_data_dict, \
+    combine_data, sort_data, squash_gaps, apply_convert_factor_to_data_dict
+
+
+class ProcessingDataError(Exception):
+    def __init__(self, msg):
+        self.message = msg
+
+
+class ThreadStopped(Exception):
+    """
+    An exception that is raised when the user requests for the data loader
+    thread to be stopped.
+    """
+    def __init__(self, *args, **kwargs):
+        self.args = (args, kwargs)
+
+
+class GeneralData():
+    def __init__(self, data_type,
+                 tracking_box: Optional[QtWidgets.QTextBrowser] = None,
+                 is_multiplex: bool = False, folder: str = '.',
+                 list_of_rt130_paths: List[Path] = [],
+                 req_wf_chans: Union[List[str], List[int]] = [],
+                 req_soh_chans: List[str] = [],
+                 gap_minimum: float = None,
+                 read_start: Optional[float] = UTCDateTime(0).timestamp,
+                 read_end: Optional[float] = UTCDateTime().timestamp,
+                 include_mp123zne: bool = False,
+                 include_mp456uvw: bool = False,
+                 rt130_waveform_data_req: bool = False,
+                 creator_thread: Optional[QtCore.QThread] = None,
+                 notification_signal: Optional[QtCore.Signal] = None,
+                 pause_signal: Optional[QtCore.Signal] = None,
+                 on_unittest: bool = False,
+                 *args, **kwargs):
+        """
+        CHANGED FROM data_type_model.DataTypeModel.__init__:
+            + add self.is_multiplex, self.on_unittest, self.gap_minimum,
+                self.keys
+            + remove docstring for self.log_data, self.soh_data,
+                self.mass_pos_data,
+                self.waveform_data, self.gaps_by_key_chan,
+                self.stream_header_by_key_chan
+
+        Super class for different data type to process data from data files
+
+        :param data_type: type of the object
+        :param tracking_box: widget to display tracking info
+        :param folder: path to the folder of data
+        :param list_of_rt130_paths: path to the folders of RT130 data
+        :param req_wf_chans: requested waveform channel list
+        :param req_soh_chans: requested SOH channel list
+        :param read_start: requested start time to read
+        :param read_end: requested end time to read
+        :param include_mp123zne: if mass position channels 1,2,3 are requested
+        :param include_mp456uvw: if mass position channels 4,5,6 are requested
+        :param rt130_waveform_data_req: flag for RT130 to read waveform data
+        :param creator_thread: the thread the current DataTypeModel instance is
+            being created in. If None, the DataTypeModel instance is being
+            created in the main thread
+        :param notification_signal: signal used to send notifications to the
+            main thread. Only not None when creator_thread is not None
+        :param pause_signal: signal used to notify the main thread that the
+            data loader is paused.
+        """
+        self.data_type = data_type
+        self.is_multiplex = is_multiplex
+        self.tracking_box = tracking_box
+        self.dir = folder
+        self.list_of_rt130_paths = list_of_rt130_paths
+        self.req_soh_chans = req_soh_chans
+        self.req_wf_chans = req_wf_chans
+        self.gap_minimum = gap_minimum
+        self.read_start = read_start
+        self.read_end = read_end
+        self.include_mp123zne = include_mp123zne
+        self.include_mp456uvw = include_mp456uvw
+        self.rt130_waveform_data_req = rt130_waveform_data_req
+        self.on_unittest = on_unittest
+
+        if creator_thread is None:
+            err_msg = (
+                'A signal is not None while running in main thread'
+            )
+            assert notification_signal is None, err_msg
+            assert pause_signal is None, err_msg
+            self.creator_thread = QtCore.QThread()
+        else:
+            self.creator_thread = creator_thread
+        self.notification_signal = notification_signal
+        self.pause_signal = pause_signal
+
+        """
+        processing_log: record the progress of processing
+        """
+        self.processing_log: List[Tuple[str, LogType]] = []
+        """
+        keys: list of all keys
+        """
+        self.keys = []
+
+        DataKey = Union[Tuple[str, str], str]
+
+        """
+        log_texts: dictionary of content of text files by filenames
+        """
+        self.log_texts: Dict[str, str] = {}
+        # Look for description in data_structures.MD
+        self.log_data = {'TEXT': []}  # noqa
+        self.waveform_data = {}
+        self.soh_data = {}
+        self.mass_pos_data = {}
+        """
+        data_time: time range of data sets:
+        """
+        self.data_time: Dict[DataKey, List[float]] = {}
+
+        """
+        The given data may include more than one data set which is station_id
+        in mseed or (unit_id, exp_no) in reftek. User are allow to choose which
+        data set to be displayed
+        selected_key: str - key of the data set to be displayed
+        """
+        self.selected_key: Optional[str] = None
+
+        """
+        gaps: gaps info in dict:
+        """
+        self.gaps: Dict[DataKey, List[List[float]]] = {}
+
+        """
+         tmp_dir: dir to keep memmap files. Deleted when object is deleted
+        """
+        self.tmp_dir_obj: TemporaryDirectory = TemporaryDirectory()
+        self.tmp_dir = self.tmp_dir_obj.name
+        if not on_unittest:
+            self.save_temp_data_folder_to_database()
+
+        self._pauser = QtCore.QSemaphore()
+        self.pause_response = None
+
+        self.gps_points: List[GPSPoint] = []
+
+    def read_folder(self, folder: str) -> Tuple[Dict]:
+        """
+        FROM data_type_model.DataTypeModel.read_folder
+        Read data from given folder
+        :param folder: path to folder to read data
+        :return: Tuple of different data dicts
+        """
+        pass
+
+    def select_key(self) -> Union[str, Tuple[str, str]]:
+        """
+        FROM data_type_model.DataTypeModel.select_key
+        Get the key for the data set to process.
+        :return: key of the selected data set
+        """
+        pass
+
+    def processing_data(self):
+        """
+        CHANGED FROM data_type_model.Data_Type_Model.processing_data
+        """
+
+        if self.creator_thread.isInterruptionRequested():
+            raise ThreadStopped()
+        self.read_folder(self.dir)
+
+        self.selected_key = self.select_key()
+
+        self.fill_empty_data()
+        if self.creator_thread.isInterruptionRequested():
+            raise ThreadStopped()
+        self.finalize_data()
+
+    def finalize_data(self):
+        """
+        CHANGED FROM data_type_model.Data_Type_Model.finalize_data
+        This function should be called after all folders finish reading to
+            + Filling an empty_dict into station with no data added in
+                data_dicts
+            + Sort all data traces in time order
+            + Combine traces in data and split at gaps > gap_minimum
+            + Apply convert_factor to avoid using flags to prevent double
+                applying convert factor when plotting
+            + Check not found channels
+            + Retrieve gaps from data_dicts
+            + Retrieve data_time from data_dicts
+            + Change data time with default value that are invalid for plotting
+                to read_start, read_end.
+        """
+        if self.selected_key is None:
+            return
+
+        self.track_info("Finalizing...", LogType.INFO)
+
+        self.sort_all_data()
+        self.combine_all_data()
+        self.apply_convert_factor_to_data_dicts()
+        self.check_not_found_soh_channels()
+
+        self.retrieve_gaps_from_data_dicts()
+        self.retrieve_data_time_from_data_dicts()
+        for key in self.keys:
+            if key not in self.data_time.keys():
+                self.data_time[key] = [self.read_start, self.read_end]
+
+    def __del__(self):
+        # FROM data_type_model.Data_Type_Model.__del__
+        print("delete dataType Object")
+        try:
+            del self.tmp_dir_obj
+        except OSError as e:
+            self.track_info(
+                "Error deleting %s : %s" % (self.tmp_dir, e.strerror),
+                LogType.ERROR)
+            print("Error deleting %s : %s" % (self.tmp_dir, e.strerror))
+        print("finish deleting")
+
+    def track_info(self, text: str, type: LogType) -> None:
+        """
+        CHANGED FROM data_type_model.Data_Type_Model.track_info:
+
+        Display tracking info in tracking_box.
+        Add all errors/warnings to processing_log.
+        :param text: str - message to display
+        :param type: str - type of message (error/warning/info)
+        """
+        # display_tracking_info updates a QtWidget, which can only be done in
+        # the main thread. So, if we are running in a background thread
+        # (i.e. self.creator_thread is not None), we need to use signal slot
+        # mechanism to ensure that display_tracking_info is run in the main
+        # thread.
+        if self.notification_signal is None:
+            display_tracking_info(self.tracking_box, text, type)
+        else:
+            self.notification_signal.emit(self.tracking_box, text, type)
+        if type != LogType.INFO:
+            self.processing_log.append((text, type))
+
+    def pause(self) -> None:
+        """
+        FROM data_type_model.Data_Type_Model.pause
+        Pause the thread this DataTypeModel instance is in. Works by trying
+        to acquire a semaphore that is not available, which causes the thread
+        to block.
+
+        Note: due to how this is implemented, each call to pause will require
+        a corresponding call to unpause. Thus, it is inadvisable to call this
+        method more than once.
+
+        Caution: not safe to call in the main thread. Unless a background
+        thread releases the semaphore, the whole program will freeze.
+        """
+        self._pauser.acquire()
+
+    @QtCore.Slot()
+    def unpause(self):
+        """
+        FROM data_type_model.Data_Type_Model.unpause
+        Unpause the thread this DataTypeModel instance is in. Works by trying
+        to acquire a semaphore that is not available, which causes the thread
+        to block.
+
+        Caution: due to how this is implemented, if unpause is called before
+        pause, the thread will not be paused until another call to pause is
+        made. Also, like pause, each call to unpause must be matched by another
+        call to pause for everything to work.
+        """
+        self._pauser.release()
+
+    @QtCore.Slot()
+    def receive_pause_response(self, response: object):
+        """
+        FROM data_type_model.Data_Type_Model.receive_pause_response
+        Receive a response to a request made to another thread and unpause the
+        calling thread.
+
+        :param response: the response to the request made
+        :type response: object
+        """
+        self.pause_response = response
+        self.unpause()
+
+    @classmethod
+    def get_empty_instance(cls) -> GeneralData:
+        """
+        # FROM data_type_model.Data_Type_Model.get_empty_instance
+        Create an empty data object. Useful if a DataTypeModel instance is
+        needed, but it is undesirable to load a data set. Basically wraps
+        __new__().
+
+        :return: an empty data object
+        :rtype: DataTypeModel
+        """
+        return cls.__new__(cls)
+
+    def save_temp_data_folder_to_database(self):
+        # FROM
+        #    data_type_model.Data_Type_Model.save_temp_data_folder_to_database
+        execute_db(f'UPDATE PersistentData SET FieldValue="{self.tmp_dir}" '
+                   f'WHERE FieldName="tempDataDirectory"')
+
+    def check_not_found_soh_channels(self):
+        # FROM data_type_model.Data_Type_Model.check_not_found_soh_channels
+        all_chans_meet_req = (
+                list(self.soh_data[self.selected_key].keys()) +
+                list(self.mass_pos_data[self.selected_key].keys()) +
+                list(self.log_data[self.selected_key].keys()))
+
+        not_found_chans = [c for c in self.req_soh_chans
+                           if c not in all_chans_meet_req]
+        if not_found_chans != []:
+            msg = (f"No data found for the following channels: "
+                   f"{', '.join( not_found_chans)}")
+            self.processing_log.append((msg, LogType.WARNING))
+
+    def sort_all_data(self):
+        """
+        FROM data_type_model.Data_Type_Model.sort_all_data
+        Sort traces by startTmEpoch on all data: waveform_data, mass_pos_data,
+            soh_data.
+        Reftek's soh_data won't be sorted here. It has been sorted by time
+            because it is created from log data which is sorted in
+            prepare_soh_data_from_log_data()
+        """
+        sort_data(self.waveform_data[self.selected_key])
+        sort_data(self.mass_pos_data[self.selected_key])
+        try:
+            sort_data(self.soh_data[self.selected_key])
+        except KeyError:
+            # Reftek's SOH trace doesn't have startTmEpoch and
+            # actually soh_data consists of only one trace
+            pass
+
+    def combine_all_data(self):
+        combine_data(self.waveform_data[self.selected_key], self.gap_minimum)
+        combine_data(self.mass_pos_data[self.selected_key], self.gap_minimum)
+        try:
+            combine_data(self.soh_data[self.selected_key], self.gap_minimum)
+        except KeyError:
+            # Reftek's SOH trace doesn't have startTmEpoch and
+            # actually soh_data consists of only one trace
+            pass
+
+    def retrieve_gaps_from_data_dicts(self):
+        """
+        Getting gaps from each data_dicts then squash all related gaps
+        """
+        retrieve_gaps_from_data_dict(self.soh_data, self.gaps)
+        retrieve_gaps_from_data_dict(self.mass_pos_data, self.gaps)
+        retrieve_gaps_from_data_dict(self.waveform_data, self.gaps)
+        for sta_id in self.gaps:
+            self.gaps[sta_id] = squash_gaps(self.gaps[sta_id])
+
+    def retrieve_data_time_from_data_dicts(self):
+        """
+        Going through each data_dict to update the data_time to be
+            [min of startTimeEpoch, max of endTimeEpoch] for each station.
+        """
+        retrieve_data_time_from_data_dict(self.soh_data, self.data_time)
+        retrieve_data_time_from_data_dict(self.mass_pos_data, self.data_time)
+        retrieve_data_time_from_data_dict(self.waveform_data, self.data_time)
+
+    def fill_empty_data(self):
+        """
+        Filling an empty_dict into station with no data added in data_dicts
+        """
+        for key in self.keys:
+            if key not in self.soh_data:
+                self.soh_data[key] = {}
+            if key not in self.waveform_data:
+                self.waveform_data[key] = {}
+            if key not in self.mass_pos_data:
+                self.mass_pos_data[key] = {}
+            if key not in self.log_data:
+                self.log_data[key] = {}
+
+    def apply_convert_factor_to_data_dicts(self):
+        """
+        Applying convert_factor to avoid using flags to prevent double
+            applying convert factor when plotting
+        """
+        apply_convert_factor_to_data_dict(self.soh_data, self.data_type)
+        apply_convert_factor_to_data_dict(self.mass_pos_data, self.data_type)
+        apply_convert_factor_to_data_dict(self.waveform_data, self.data_type)
diff --git a/sohstationviewer/model/general_data/general_data_helper.py b/sohstationviewer/model/general_data/general_data_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..d859dbb39f562fa72e42febaf0f40531a37395b2
--- /dev/null
+++ b/sohstationviewer/model/general_data/general_data_helper.py
@@ -0,0 +1,184 @@
+from typing import List, Dict, Optional
+import numpy as np
+from sohstationviewer.database.extract_data import get_convert_factor
+
+
+def _check_related_gaps(min1: float, max1: float,
+                        min2: float, max2: float,
+                        index: int, checked_indexes: List[int]):
+    """
+    FROM handling_data.check_related_gaps
+
+    Check if the passing ranges overlapping each other and add indexes to
+        checked_indexes.
+
+    :param min1: start of range 1
+    :param max1: end of range 1
+    :param min2: start of range 2
+    :param max2: end of range 2
+    :param index: index of gap being checked
+    :param checked_indexes: list of gaps that have been checked
+
+    :return: True if the two ranges overlap each other, False otherwise
+    """
+    if ((min1 <= min2 <= max1) or (min1 <= max2 <= max1)
+            or (min2 <= min1 <= max2) or (min2 <= max1 <= max2)):
+        # range [min1, max1] and [min2, max2] have some part overlap each other
+        checked_indexes.append(index)
+        return True
+    else:
+        return False
+
+
+def squash_gaps(gaps: List[List[float]]) -> List[List[float]]:
+    """
+    FROM handling_data.squash_gaps
+
+    Compress gaps from different channels that have time range related to
+    each other to the ones with outside boundary (min start, max end)
+    or (min end, max start) in case of overlap.
+    :param gaps: [[float, float],], [[float, float],] -
+        list of gaps of multiple channels: [[start, end],], [[start, end],]
+    :return: squashed_gaps: [[float, float],] - all related gaps are squashed
+        extending to the outside start and end
+        [[min start, max end], [max start, min end]]
+
+    """
+    gaps = sorted(gaps, key=lambda x: x[0])
+    squashed_gaps = []
+    checked_indexes = []
+
+    for idx, g in enumerate(gaps):
+        if idx in checked_indexes:
+            continue
+        squashed_gaps.append(g)
+        checked_indexes.append(idx)
+        overlap = g[0] >= g[1]
+        for idx_, g_ in enumerate(gaps):
+            if idx_ in checked_indexes:
+                continue
+            if not overlap:
+                if g_[0] >= g_[1]:
+                    continue
+                if _check_related_gaps(g[0], g[1], g_[0], g_[1],
+                                       idx_, checked_indexes):
+                    squashed_gaps[-1][0] = min(g[0], g_[0])
+                    squashed_gaps[-1][1] = max(g[1], g_[1])
+                else:
+                    break
+            else:
+                if g_[0] < g_[1]:
+                    continue
+                if _check_related_gaps(g[1], g[0], g_[1], g_[0],
+                                       idx_, checked_indexes):
+                    squashed_gaps[-1][0] = max(g[0], g_[0])
+                    squashed_gaps[-1][1] = min(g[1], g_[1])
+
+    return squashed_gaps
+
+
+def sort_data(sta_data_dict: Dict) -> None:
+    """
+    FROM handling_data.sort_data
+
+    Sort data in 'traces_info' of each channel by 'startTmEpoch' order
+    :param sta_data_dict: data of a station
+    """
+    for chan_id in sta_data_dict:
+        traces_info = sta_data_dict[chan_id]['tracesInfo']
+        sta_data_dict[chan_id]['tracesInfo'] = sorted(
+            traces_info, key=lambda i: i['startTmEpoch'])
+
+
+def retrieve_data_time_from_data_dict(
+        data_dict: Dict, data_time: Dict[str, List[float]]) -> None:
+    """
+    Going through each channel in each station to get data_time for each
+        station which is [min of startTimeEpoch, max of endTimeEpoch] among
+        the station's channels.
+
+    :param data_dict: the given data_dict
+    :param data_time: data by sta_id
+    """
+    for sta_id in data_dict.keys():
+        for c in data_dict[sta_id]:
+            dtime = [data_dict[sta_id][c]['startTmEpoch'],
+                     data_dict[sta_id][c]['endTmEpoch']]
+
+            if sta_id in data_time.keys():
+                data_time[sta_id][0] = min(data_time[sta_id][0], dtime[0])
+                data_time[sta_id][1] = max(data_time[sta_id][1], dtime[1])
+            else:
+                data_time[sta_id] = dtime
+
+
+def retrieve_gaps_from_data_dict(data_dict: Dict,
+                                 gaps: Dict[str, List[List[float]]]) -> None:
+    """
+    Create each station's gaps by adding all gaps from all channels
+
+    :param data_dict: given stream
+    :param gaps: gaps list by key
+    """
+    for key in data_dict.keys():
+        if key not in gaps:
+            gaps[key] = []
+        for c in data_dict[key].keys():
+            cgaps = data_dict[key][c]['gaps']
+            if cgaps != []:
+                gaps[key] += cgaps
+
+
+def combine_data(station_data_dict: Dict, gap_minimum: Optional[float]) \
+        -> None:
+    """
+    Traverse through traces in each channel, add to gap list if
+        delta >= gap_minimum with delta is the distance between
+        contiguous traces.
+    Combine sorted data using concatenate, which also change data ot ndarray
+        and update startTmEpoch and endTmEpoch.
+
+    :param station_data_dict: dict of data of a station
+    :param gap_minimum: minimum length of gaps to be detected
+    """
+    for chan_id in station_data_dict:
+        channel = station_data_dict[chan_id]
+        traces_info = channel['tracesInfo']
+
+        for idx in range(len(traces_info) - 1):
+            curr_end_tm = traces_info[idx]['endTmEpoch']
+            next_start_tm = traces_info[idx+1]['startTmEpoch']
+            delta = abs(curr_end_tm - next_start_tm)
+            if gap_minimum is not None and delta >= gap_minimum:
+                # add gap
+                gap = [curr_end_tm, next_start_tm]
+                station_data_dict[chan_id]['gaps'].append(gap)
+        channel['startTmEpoch'] = min([tr['startTmEpoch']
+                                       for tr in traces_info])
+        channel['endTmEpoch'] = max([tr['endTmEpoch'] for tr in traces_info])
+
+        data_list = [tr['data'] for tr in traces_info]
+        times_list = [tr['times'] for tr in traces_info]
+        channel['tracesInfo'] = [{
+            'startTmEpoch': channel['startTmEpoch'],
+            'endTmEpoch': channel['endTmEpoch'],
+            'data': np.concatenate(data_list),
+            'times': np.concatenate(times_list)
+        }]
+
+
+def apply_convert_factor_to_data_dict(data_dict: Dict, data_type: str):
+    """
+    Traverse through traces in each channel to convert data according to
+        convert_factor got from DB
+
+    :param data_dict: dict of data
+    :param data_type: type of data
+    """
+    for key in data_dict:
+        for chan_id in data_dict[key]:
+            channel = data_dict[key][chan_id]
+            convert_factor = get_convert_factor(chan_id, data_type)
+            if convert_factor is not None and convert_factor != 1:
+                for tr in channel['tracesInfo']:
+                    tr['data'] = convert_factor * tr['data']
diff --git a/sohstationviewer/model/mseed/read_mseed_experiment/mseed_reader.py b/sohstationviewer/model/mseed/read_mseed_experiment/mseed_reader.py
deleted file mode 100644
index 120c30965fa4c24e6457413d6395dd913ba0246e..0000000000000000000000000000000000000000
--- a/sohstationviewer/model/mseed/read_mseed_experiment/mseed_reader.py
+++ /dev/null
@@ -1,55 +0,0 @@
-from typing import BinaryIO
-import obspy
-from record_reader import RecordReader
-
-
-class MSeedReader:
-    def __init__(self, file: BinaryIO) -> None:
-        self.file = file
-
-    def read(self):
-        trace = []
-        while 1:
-            # We know that end of file is reached when read() returns an empty
-            # string.
-            is_eof = (self.file.read(1) == b'')
-            if is_eof:
-                break
-            # We need to move the file pointer back to its position after we
-            # do the end of file check. Otherwise, we would be off by one
-            # byte for all the reads afterward.
-            self.file.seek(-1, 1)
-
-            # We save the start of the current record so that after we are
-            # done reading the record, we can move back. This makes moving
-            # to the next record a lot easier, seeing as we can simply move
-            # the file pointer a distance the size of the current record.
-            current_record_start = self.file.tell()
-
-            reader = RecordReader(self.file)
-            trace.append(reader.get_first_data_point())
-            # sample_count = reader.record_metadata.sample_count
-            # sample_rate = reader.record_metadata.sample_rate
-            # record_time_taken = sample_count / sample_rate
-            # record_end_time = (reader.record_metadata.start_time +
-            #                    record_time_taken)
-
-            # MSEED stores the size of a data record as an exponent of a
-            # power of two, so we have to convert that to actual size before
-            # doing anything else.
-            record_length_exp = reader.header_unpacker.unpack(
-                'B', reader.blockette_1000.record_length
-            )[0]
-            record_size = 2 ** record_length_exp
-
-            self.file.seek(current_record_start)
-            self.file.seek(record_size, 1)
-
-
-if __name__ == '__main__':
-    # numpy.set_printoptions(threshold=sys.maxsize)
-    file_path = '/Users/ldam/Documents/GIT/sohstationviewer/tests/test_data/' \
-                'Q330_mixed_traces/XX-3203_4-20221222183011'
-    file = open(file_path, 'rb')
-    stream = obspy.read(file_path)
-    MSeedReader(file).read()
diff --git a/sohstationviewer/model/mseed_data/__init__.py b/sohstationviewer/model/mseed_data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sohstationviewer/model/mseed/read_mseed_experiment/decode_mseed.py b/sohstationviewer/model/mseed_data/decode_mseed.py
similarity index 100%
rename from sohstationviewer/model/mseed/read_mseed_experiment/decode_mseed.py
rename to sohstationviewer/model/mseed_data/decode_mseed.py
diff --git a/sohstationviewer/model/mseed_data/mseed.py b/sohstationviewer/model/mseed_data/mseed.py
new file mode 100644
index 0000000000000000000000000000000000000000..bd3641a8941a95a20c3aa51547caaf26e23ce61b
--- /dev/null
+++ b/sohstationviewer/model/mseed_data/mseed.py
@@ -0,0 +1,186 @@
+"""
+MSeed object to hold and process MSeed data
+"""
+import os
+import re
+import traceback
+from pathlib import Path
+from typing import Dict, Tuple, List
+
+from sohstationviewer.controller.util import validate_file, validate_dir
+from sohstationviewer.model.mseed_data.mseed_reader import MSeedReader
+from sohstationviewer.model.general_data.general_data import \
+    GeneralData, ThreadStopped, ProcessingDataError
+from sohstationviewer.view.util.enums import LogType
+
+from sohstationviewer.model.mseed_data.mseed_helper import \
+    retrieve_nets_from_data_dict, read_text
+from sohstationviewer.model.mseed_data.record_reader_helper import \
+    MSeedReadError
+
+
+class MSeed(GeneralData):
+    """
+    read and process mseed file into object with properties can be used to
+    plot SOH data, mass position data, waveform data and gaps
+    """
+
+    def __init__(self, *args, **kwargs):
+        # FROM mseed.mseed.MSEED.__init__
+        super().__init__(*args, **kwargs)
+        self.nets_by_sta: Dict[str, List[str]] = {}
+        self.processing_data()
+
+    def finalize_data(self):
+        """
+        CHANGED FROM mseed.mseed.MSEED.finalize_data
+
+        This function should be called after all folders finish reading to
+            + get nets_by_sta from stream_header_by_key_chan
+            + other tasks in super().finalize_data()
+
+        """
+        self.distribute_log_text_to_station()
+        self.retrieve_nets_from_data_dicts()
+        super().finalize_data()
+
+    def read_folder(self, folder: str) -> Tuple[Dict]:
+        """
+        CHANGED FROM mseed.mseed.MSEED.read_folder
+
+        Read data streams for soh, mass position and waveform.
+
+        :param folder: absolute path to data set folder
+        :return waveform_data: waveform data by station
+        :return soh_data: soh data by station
+        :return mass_pos_data: mass position data by station
+        :return gaps: gap list by station
+        :return nets_by_sta: netcodes list by station
+        """
+        if not os.path.isdir(folder):
+            raise ProcessingDataError(f"Path '{folder}' not exist")
+        count = 0
+
+        total = sum([len(files) for _, _, files in os.walk(folder)])
+        invalid_blockettes = False
+        not_mseed_files = []
+        for path, sub_dirs, files in os.walk(folder):
+            try:
+                validate_dir(path)
+            except Exception as e:
+                # skip Information folder
+                self.track_info(str(e), LogType.WARNING)
+                continue
+            for file_name in files:
+
+                if self.creator_thread.isInterruptionRequested():
+                    raise ThreadStopped()
+
+                path2file = Path(path).joinpath(file_name)
+
+                if not validate_file(path2file, file_name):
+                    continue
+                count += 1
+                if count % 10 == 0:
+                    self.track_info(
+                        f'Read {count} files/{total}', LogType.INFO)
+                log_text = read_text(path2file)
+                if log_text is not None:
+                    self.log_texts[path2file] = log_text
+                    continue
+                reader = MSeedReader(
+                    path2file,
+                    read_start=self.read_start,
+                    read_end=self.read_end,
+                    is_multiplex=self.is_multiplex,
+                    req_soh_chans=self.req_soh_chans,
+                    req_wf_chans=self.req_wf_chans,
+                    include_mp123zne=self.include_mp123zne,
+                    include_mp456uvw=self.include_mp456uvw,
+                    soh_data=self.soh_data,
+                    mass_pos_data=self.mass_pos_data,
+                    waveform_data=self.waveform_data,
+                    log_data=self.log_data,
+                    gap_minimum=self.gap_minimum)
+                try:
+                    reader.read()
+                    invalid_blockettes = (invalid_blockettes
+                                          or reader.invalid_blockettes)
+                except MSeedReadError:
+                    not_mseed_files.append(file_name)
+                except Exception:
+                    fmt = traceback.format_exc()
+                    self.track_info(f"Skip file {path2file} can't be read "
+                                    f"due to error: {str(fmt)}",
+                                    LogType.WARNING)
+        if not_mseed_files:
+            self.track_info(
+                f"Not MSeed files: {not_mseed_files}", LogType.WARNING)
+        if invalid_blockettes:
+            # This check to only print out message once
+            print("We currently only handle blockettes 500, 1000,"
+                  " and 1001.")
+        self.track_info(
+            f'Skipped {total - count} invalid files.', LogType.INFO)
+
+    def retrieve_nets_from_data_dicts(self):
+        """
+        Going through stations of each data_dict to get all network codes found
+            in all channel of a station to add to nets_by_station.
+        """
+        retrieve_nets_from_data_dict(self.soh_data, self.nets_by_sta)
+        retrieve_nets_from_data_dict(self.mass_pos_data, self.nets_by_sta)
+        retrieve_nets_from_data_dict(self.waveform_data, self.nets_by_sta)
+
+    def select_key(self) -> str:
+        """
+        CHANGED FROM mseed.mseed.MSEED:
+            + get sta_ids from self.keys
+            + add condition if not on_unittest to create unittest for mseed
+
+        :return selected_sta_id: the selected station id from available
+            key of stream header.
+            + If there is only one station id, return it.
+            + If there is more than one, show all ids, let user choose one to
+                return.
+        """
+        self.keys = sorted(list(set(
+            list(self.soh_data.keys()) +
+            list(self.mass_pos_data.keys()) +
+            list(self.waveform_data.keys()) +
+            [k for k in list(self.log_data.keys()) if k != 'TEXT']
+        )))
+        sta_ids = self.keys
+
+        if len(sta_ids) == 0:
+            return
+
+        selected_sta_id = sta_ids[0]
+        if not self.on_unittest and len(sta_ids) > 1:
+            msg = ("There are more than one stations in the given data.\n"
+                   "Please select one to display")
+            self.pause_signal.emit(msg, sta_ids)
+            self.pause()
+            selected_sta_id = sta_ids[self.pause_response]
+
+        self.track_info(f'Select Station {selected_sta_id}', LogType.INFO)
+        return selected_sta_id
+
+    def distribute_log_text_to_station(self):
+        """
+        Loop through paths to text files to look for station id in the path.
+            + If there is station id in the path, add the content to the
+                station id with channel 'TXT'.
+            + if station id not in the path, add the content to the key 'TEXT'
+                which means don't know the station for these texts.
+        """
+        for path2file in self.log_texts:
+            try:
+                file_parts = re.split(rf"{os.sep}|\.", path2file.as_posix())
+                sta = [s for s in self.keys if s in file_parts][0]
+            except IndexError:
+                self.log_data['TEXT'].append(self.log_texts[path2file])
+                continue
+            if 'TXT' not in self.log_data[sta]:
+                self.log_data[sta]['TXT'] = []
+            self.log_data[sta]['TXT'].append(self.log_texts[path2file])
diff --git a/sohstationviewer/model/mseed_data/mseed_helper.py b/sohstationviewer/model/mseed_data/mseed_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..32d237e2ec5a3dc353458691ff4abe5381d33a46
--- /dev/null
+++ b/sohstationviewer/model/mseed_data/mseed_helper.py
@@ -0,0 +1,53 @@
+# Functions that change from handling_data's functions
+import os
+from pathlib import Path
+from typing import Union, List, Dict
+
+
+def retrieve_nets_from_data_dict(data_dict: Dict,
+                                 nets_by_sta: Dict[str, List[str]]) -> None:
+    """
+    Retrieve nets by sta_id from the given data_dict.
+
+    :param data_dict: dict of data by station
+    :param nets_by_sta: nets list by sta_id
+    """
+    for sta_id in data_dict.keys():
+        if sta_id not in nets_by_sta:
+            nets_by_sta[sta_id] = set()
+        for c in data_dict[sta_id]:
+            nets_by_sta[sta_id].update(
+                data_dict[sta_id][c]['nets'])
+
+
+def read_text(path2file: Path) -> Union[bool, str]:
+    """
+    CHANGED FROM handling_data.read_text:
+        + Don't need to check binary because UnicodeDecodeError caught means
+            the file is binary
+
+    Read text file and add to log_data under channel TEXT.
+        + Raise exception if the file isn't a text file
+        + Remove empty lines in content
+    :param path2file: str - absolute path to text file
+    :param file_name: str - name of text file
+    :param text_logs: holder to keep log string, refer to
+        DataTypeModel.__init__.log_data['TEXT']
+    """
+    try:
+        with open(path2file, 'r') as file:
+            content = file.read().strip()
+    except UnicodeDecodeError:
+        return
+
+    if content != '':
+        # skip empty lines
+        no_empty_line_list = [
+            line for line in content.splitlines() if line]
+        no_empty_line_content = os.linesep.join(no_empty_line_list)
+
+        log_text = "\n\n** STATE OF HEALTH: %s\n" % path2file.name
+        log_text += no_empty_line_content
+    else:
+        log_text = ''
+    return log_text
diff --git a/sohstationviewer/model/mseed_data/mseed_reader.py b/sohstationviewer/model/mseed_data/mseed_reader.py
new file mode 100644
index 0000000000000000000000000000000000000000..24c153c2aec65978cd48c3282b6e76cc4f79db8b
--- /dev/null
+++ b/sohstationviewer/model/mseed_data/mseed_reader.py
@@ -0,0 +1,283 @@
+from numbers import Real
+from typing import BinaryIO, Optional, Dict, Union, List
+from pathlib import Path
+from obspy import UTCDateTime
+
+from sohstationviewer.model.mseed_data.record_reader import RecordReader
+from sohstationviewer.model.mseed_data.record_reader_helper import \
+    RecordMetadata
+
+from sohstationviewer.model.handling_data import check_chan
+
+
+class MSeedReader:
+    def __init__(self, file_path: Path,
+                 read_start: float = UTCDateTime(0).timestamp,
+                 read_end: float = UTCDateTime().timestamp,
+                 is_multiplex: Optional[bool] = None,
+                 req_soh_chans: List[str] = [],
+                 req_wf_chans: List[str] = [],
+                 include_mp123zne: bool = False,
+                 include_mp456uvw: bool = False,
+                 soh_data: Dict = {},
+                 mass_pos_data: Dict = {},
+                 waveform_data: Dict = {},
+                 log_data: Dict[str, Union[List[str],
+                                           Dict[str, List[str]]]] = {},
+                 gap_minimum: Optional[float] = None
+                 ) -> None:
+        """
+        The object of the class is to read data from given file to add
+            to given stream if meet requirements.
+        If data_type is not multiplex, all records of a file are belong to the
+            same channel; the info found from the first record can
+            be used to determine to keep reading if the first one doesn't meet
+            channel's requirement.
+        If data_type is multiplex, every record have to be examined.
+        All data_dicts' definition can be found in data_dict_structures.MD
+
+        :param file_path: Absolute path to data file
+        :param read_start: time that is required to start reading
+        :param read_end: time that is required to end reading
+        :param is_multiplex: multiplex status of the file's data_type
+        :param req_soh_chans: requested SOH channel list
+        :param req_wf_chans: requested waveform channel list
+        :param include_mp123zne: if mass position channels 1,2,3 are requested
+        :param include_mp456uvw: if mass position channels 4,5,6 are requested
+        :param soh_data: data dict of SOH
+        :param mass_pos_data: data dict of mass position
+        :param waveform_data: data dict of waveform
+        :param log_data: data dict of log_data
+        :param gap_minimum: minimum length of gaps required to detect
+            from record
+        """
+        self.read_start = read_start
+        self.read_end = read_end
+        self.is_multiplex = is_multiplex
+        self.gap_minimum = gap_minimum
+        self.req_soh_chans = req_soh_chans
+        self.req_wf_chans = req_wf_chans
+        self.include_mp123zne = include_mp123zne
+        self.include_mp456uvw = include_mp456uvw
+        self.soh_data = soh_data
+        self.mass_pos_data = mass_pos_data
+        self.waveform_data = waveform_data
+        self.log_data = log_data
+        self.file_path = file_path
+        self.file: BinaryIO = open(file_path, 'rb')
+
+        self.invalid_blockettes = False,
+
+    def get_data_dict(self, metadata: RecordMetadata) -> Dict:
+        """
+        Find which data_dict to add data to from req_soh_chans, req_wf_chans,
+            include_mp123zne, include_mp456uvw, samplerate
+        :param metadata: record's metadata
+        :return: data_dict to add data
+        """
+        chan_id = metadata.channel
+        sample_rate = metadata.sample_rate
+        chan_type = check_chan(chan_id, self.req_soh_chans, self.req_wf_chans,
+                               self.include_mp123zne, self.include_mp456uvw)
+        if chan_type == 'SOH':
+            if self.req_soh_chans == [] and sample_rate > 1:
+                # If 'All chans' is selected for SOH, channel with samplerate>1
+                # will be skipped by default to improve performance.
+                # Note: If user intentionally added channels with samplerate>1
+                # using SOH Channel Preferences dialog, they are still read.
+                return
+            return self.soh_data
+        if chan_type == 'MP':
+            return self.mass_pos_data
+        if chan_type == 'WF':
+            return self.waveform_data
+
+    def check_time(self, record: RecordReader) -> bool:
+        """
+        Check if record time in the time range that user require to read
+
+        :param record: the record read from file
+        :return: True when record time satisfy the requirement
+        """
+        meta = record.record_metadata
+        if self.read_start > meta.end_time or self.read_end < meta.start_time:
+            return False
+        return True
+
+    def append_log(self, record: RecordReader) -> None:
+        """
+        Add all text info retrieved from record to log_data
+
+        :param record: the record read from file
+        """
+        logs = [record.ascii_text] + record.other_blockettes
+        log_str = "===========\n".join(logs)
+        if log_str == "":
+            return
+        meta = record.record_metadata
+        log_str = "\n\nSTATE OF HEALTH: " + \
+                  f"From:{meta.start_time}  To:{meta.end_time}\n" + log_str
+        sta_id = meta.station
+        chan_id = meta.channel
+        if sta_id not in self.log_data.keys():
+            self.log_data[sta_id] = {}
+        if chan_id not in self.log_data[sta_id]:
+            self.log_data[sta_id][chan_id] = []
+        self.log_data[sta_id][chan_id].append(log_str)
+
+    def append_data(self, data_dict: dict,
+                    record: RecordReader,
+                    data_point: Real) -> None:
+        """
+        Append data point to the given data_dict
+
+        :param data_dict: the data dict to add data get from record
+        :param record: the record read from file
+        :param data_point: the first sample of the record frame
+        """
+        if data_point is None:
+            return
+        meta = record.record_metadata
+        sta_id = meta.station
+        if sta_id not in data_dict.keys():
+            data_dict[sta_id] = {}
+        station = data_dict[sta_id]
+        self.add_chan_data(station, meta, data_point)
+
+    def _add_new_trace(self, channel: Dict, metadata: RecordMetadata,
+                       data_point: Real) -> None:
+        """
+        Start a new trace to channel['tracesInfo'] with data_point as
+            the first data value and metadata's start_time as first time value
+
+        :param channel: dict of channel's info
+        :param metadata: record's meta data
+        :param data_point: the first sample of the record frame
+        """
+        channel['tracesInfo'].append({
+            'startTmEpoch': metadata.start_time,
+            'data': [data_point],
+            'times': [metadata.start_time]
+        })
+
+    def _append_trace(self, channel, metadata, data_point):
+        """
+        Appending data_point to the latest trace of channel['tracesInfo']
+
+        :param channel: dict of channel's info
+        :param metadata: record's meta data
+        :param data_point: the first sample of the record frame
+        """
+        channel['tracesInfo'][-1]['data'].append(data_point)
+        channel['tracesInfo'][-1]['times'].append(metadata.start_time)
+
+    def add_chan_data(self, station: dict, metadata: RecordMetadata,
+                      data_point: Real) -> None:
+        """
+        Add new channel to the passed station and append data_point to the
+            channel if there's no gap/overlap or start a new trace of data
+            when there's a gap.
+        If gap/overlap > gap_minimum, add to gaps list.
+
+        :param station: dict of chan by id of a station
+        :param metadata: an Object of metadata from the record
+        :param data_point: the first sample of the record frame
+        """
+        meta = metadata
+        chan_id = metadata.channel
+        if chan_id not in station.keys():
+            station[chan_id] = {
+                'file_path': self.file_path,
+                'chanID': chan_id,
+                'samplerate': meta.sample_rate,
+                'startTmEpoch': meta.start_time,
+                'endTmEpoch': meta.end_time,
+                'size': meta.sample_count,
+                'nets': {meta.network},
+                'gaps': [],
+                'tracesInfo': [{
+                    'startTmEpoch': meta.start_time,
+                    'endTmEpoch': meta.end_time,
+                    'data': [data_point],
+                    'times': [meta.start_time]
+                }]
+            }
+        else:
+            channel = station[chan_id]
+            record_start_time = meta.start_time
+            previous_end_time = channel['endTmEpoch']
+            delta = abs(record_start_time - previous_end_time)
+            if channel['file_path'] != self.file_path:
+                # Start new trace for each file to reorder trace and
+                # combine traces again later
+                channel['file_path'] = self.file_path
+                self._add_new_trace(channel, meta, data_point)
+            else:
+                if self.gap_minimum is not None and delta >= self.gap_minimum:
+                    gap = [previous_end_time, record_start_time]
+                    channel['gaps'].append(gap)
+                # appending data
+                self._append_trace(channel, meta, data_point)
+
+            channel['tracesInfo'][-1]['endTmEpoch'] = meta.end_time
+            # update channel's metadata
+            channel['endTmEpoch'] = meta.end_time
+            channel['size'] += meta.sample_count
+            channel['nets'].add(meta.network)
+
+    def get_ready_for_next_read(self, current_record_start: int,
+                                record: RecordReader):
+        """
+        Move the current position of file to next record' start.
+
+        :param current_record_start: the start position of the current record
+        :param record: the record that is reading
+        """
+        # MSEED stores the size of a data record as an exponent of a
+        # power of two, so we have to convert that to actual size before
+        # doing anything else.
+        record_length_exp = record.header_unpacker.unpack(
+            'B', record.blockette_1000.record_length
+        )[0]
+        record_size = 2 ** record_length_exp
+
+        self.file.seek(current_record_start)
+        self.file.seek(record_size, 1)
+
+    def read(self):
+        while 1:
+            # We know that end of file is reached when read() returns an empty
+            # string.
+            is_eof = (self.file.read(1) == b'')
+            if is_eof:
+                break
+            # We need to move the file pointer back to its position after we
+            # do the end of file check. Otherwise, we would be off by one
+            # byte for all the reads afterward.
+            self.file.seek(-1, 1)
+
+            # We save the start of the current record so that after we are
+            # done reading the record, we can move back. This makes moving
+            # to the next record a lot easier, seeing as we can simply move
+            # the file pointer a distance the size of the current record.
+            current_record_start = self.file.tell()
+
+            record = RecordReader(self.file)
+            if record.invalid_blockettes:
+                self.invalid_blockettes = True
+            if not self.check_time(record):
+                self.get_ready_for_next_read(current_record_start, record)
+                continue
+            data_dict = self.get_data_dict(record.record_metadata)
+            if data_dict is None:
+                if self.is_multiplex:
+                    self.get_ready_for_next_read(current_record_start, record)
+                    continue
+                else:
+                    break
+            first_data_point = record.get_first_data_point()
+            self.append_data(data_dict, record, first_data_point)
+            self.append_log(record)
+
+            self.get_ready_for_next_read(current_record_start, record)
+        self.file.close()
diff --git a/sohstationviewer/model/mseed/read_mseed_experiment/record_reader.py b/sohstationviewer/model/mseed_data/record_reader.py
similarity index 90%
rename from sohstationviewer/model/mseed/read_mseed_experiment/record_reader.py
rename to sohstationviewer/model/mseed_data/record_reader.py
index 5b3af30c01f0e938989fdd7d388164d2e82338d0..40db266dd7377510ea1ff5c173d266ae22f55403 100644
--- a/sohstationviewer/model/mseed/read_mseed_experiment/record_reader.py
+++ b/sohstationviewer/model/mseed_data/record_reader.py
@@ -4,11 +4,11 @@ from typing import BinaryIO, Optional, List
 
 from obspy import UTCDateTime
 
-from decode_mseed import (
+from sohstationviewer.model.mseed_data.decode_mseed import (
     decode_ieee_float, decode_ieee_double, decode_steim, decode_int16,
     decode_int24, decode_int32,
 )
-from mseed_helper import (
+from sohstationviewer.model.mseed_data.record_reader_helper import (
     FixedHeader, Blockette1000, get_data_endianness, Unpacker,
     get_record_metadata, get_header_endianness, RecordMetadata,
     EncodingFormat,
@@ -37,7 +37,8 @@ class RecordReader:
 
         self.data_unpacker: Unpacker = Unpacker()
         self.record_metadata: Optional[RecordMetadata] = None
-
+        self.invalid_blockettes = False
+        self.ascii_text: str = ''
         self.read_header()
 
     def read_header(self) -> None:
@@ -220,8 +221,7 @@ class RecordReader:
                 'H', next_blockette_type
             )[0]
             if next_blockette_type not in (500, 1000, 1001):
-                print('We currently only handle blockettes 500, 1000, and'
-                      '1001.')
+                self.invalid_blockettes = True
                 continue
             if next_blockette_type == 500:
                 self.read_blockette_500()
@@ -230,7 +230,27 @@ class RecordReader:
             elif next_blockette_type == 2000:
                 self.read_blockette_2000()
 
-    def get_first_data_point(self) -> Real:
+    def decode_ascii_data(self, data_start: int):
+        """
+        Read ASCII string from data portion of the record but remove the
+            padding
+
+        :param data_start: Byte number where the data start
+        """
+        # We want to read everything in the record if the encoding is
+        # ASCII.
+        record_length_exp = self.header_unpacker.unpack(
+            'B', self.blockette_1000.record_length
+        )[0]
+        record_size = 2 ** record_length_exp
+        data_block = self.file.read(record_size - data_start)
+        single_padding = b'\x00'.decode()
+        try:
+            self.ascii_text = data_block.decode().rstrip(single_padding)
+        except UnicodeDecodeError:
+            pass
+
+    def get_first_data_point(self) -> Optional[Real]:
         """
         Get the first data point of the current data record.
         :return: the first data point of the current data record, whose type is
@@ -251,17 +271,8 @@ class RecordReader:
         encoding_format = EncodingFormat(encoding_format)
 
         if encoding_format == EncodingFormat.ASCII:
-            # We want to read everything in the record if the encoding is
-            # ASCII.
-            record_length_exp = self.header_unpacker.unpack(
-                'B', self.blockette_1000.record_length
-            )[0]
-            record_size = 2 ** record_length_exp
-            # This name does not make much sense with what we are doing here,
-            # but it will have to do for now.
-            # The size of the record includes the header, so we have to account
-            # for that when grabbing the data.
-            first_data_point = self.file.read(record_size - data_start)
+            self.decode_ascii_data(data_start)
+            first_data_point = None
         else:
 
             # Currently, we are extracting only the first data point in each
diff --git a/sohstationviewer/model/mseed/read_mseed_experiment/mseed_helper.py b/sohstationviewer/model/mseed_data/record_reader_helper.py
similarity index 70%
rename from sohstationviewer/model/mseed/read_mseed_experiment/mseed_helper.py
rename to sohstationviewer/model/mseed_data/record_reader_helper.py
index 28f0c228b713cc14d9adbaf243a052f0995c0f63..c9fa6ace53751c1487fd34ed678fda5cec38c862 100644
--- a/sohstationviewer/model/mseed/read_mseed_experiment/mseed_helper.py
+++ b/sohstationviewer/model/mseed_data/record_reader_helper.py
@@ -5,6 +5,11 @@ from enum import Enum
 from obspy import UTCDateTime
 
 
+class MSeedReadError(Exception):
+    def __init__(self, msg):
+        self.message = msg
+
+
 class Unpacker:
     """
     A wrapper around struct.unpack() to unpack binary data without having to
@@ -79,7 +84,8 @@ class RecordMetadata:
     location: str
     channel: str
     network: str
-    start_time: UTCDateTime
+    start_time: float
+    end_time: float
     sample_count: int
     sample_rate: float
 
@@ -95,6 +101,21 @@ class EncodingFormat(Enum):
     STEIM_2 = 11
 
 
+def check_time_from_time_string(endian, time_string):
+
+    try:
+        record_start_time_tuple = struct.unpack(f'{endian}hhbbbbh',
+                                                time_string)
+    except struct.error:
+        raise MSeedReadError("Not an MSeed file.")
+    # libmseed uses 1900 to 2100 as the sane year range. We follow their
+    # example here.
+    year_is_good = (1900 <= record_start_time_tuple[0] <= 2100)
+    # The upper range is 366 to account for leap years.
+    day_is_good = (1 <= record_start_time_tuple[1] <= 366)
+    return year_is_good and day_is_good
+
+
 def get_header_endianness(header: FixedHeader):
     """
     Determine the endianness of the fixed header of a data record. Works by
@@ -117,15 +138,15 @@ def get_header_endianness(header: FixedHeader):
     endianness of header
     """
     record_start_time_string = header.record_start_time
-    record_start_time_tuple = struct.unpack('>hhbbbbh',
-                                            record_start_time_string)
-    # libmseed uses 1900 to 2100 as the sane year range. We follow their
-    # example here.
-    year_is_good = (1900 <= record_start_time_tuple[0] <= 2100)
-    # The upper range is 366 to account for leap years.
-    day_is_good = (1 <= record_start_time_tuple[1] <= 366)
-
-    endianness = 'big' if year_is_good and day_is_good else 'little'
+    good_time = check_time_from_time_string('>', record_start_time_string)
+    if good_time:
+        endianness = 'big'
+    else:
+        good_time = check_time_from_time_string('<', record_start_time_string)
+        if good_time:
+            endianness = 'little'
+        else:
+            raise MSeedReadError("Not an MSeed file.")
     return endianness
 
 
@@ -178,32 +199,41 @@ def get_record_metadata(header: FixedHeader, header_unpacker: Unpacker):
         needed so that the correct byte order can be used
     :return: the extract record metadata
     """
-    station = header.station.decode('utf-8').rstrip()
-    location = header.location.decode('utf-8').rstrip()
-    channel = header.channel.decode('utf-8').rstrip()
-    network = header.net_code.decode('utf-8').rstrip()
-
-    record_start_time_string = header.record_start_time
-    record_start_time_tuple = header_unpacker.unpack('HHBBBBH',
-                                                     record_start_time_string)
-    record_start_time = UTCDateTime(year=record_start_time_tuple[0],
-                                    julday=record_start_time_tuple[1],
-                                    hour=record_start_time_tuple[2],
-                                    minute=record_start_time_tuple[3],
-                                    second=record_start_time_tuple[4],
-                                    microsecond=record_start_time_tuple[
-                                                    6] * 100)
-
-    sample_count = header_unpacker.unpack('H', header.sample_count)[0]
-
-    sample_rate_factor = header_unpacker.unpack(
-        'h', header.sample_rate_factor
-    )[0]
-    sample_rate_multiplier = header_unpacker.unpack(
-        'h', header.sample_rate_multiplier
-    )[0]
+    try:
+        station = header.station.decode('utf-8').rstrip()
+        location = header.location.decode('utf-8').rstrip()
+        channel = header.channel.decode('utf-8').rstrip()
+        network = header.net_code.decode('utf-8').rstrip()
+
+        record_start_time_string = header.record_start_time
+        record_start_time_tuple = header_unpacker.unpack(
+            'HHBBBBH', record_start_time_string)
+        record_start_time = UTCDateTime(year=record_start_time_tuple[0],
+                                        julday=record_start_time_tuple[1],
+                                        hour=record_start_time_tuple[2],
+                                        minute=record_start_time_tuple[3],
+                                        second=record_start_time_tuple[4],
+                                        microsecond=record_start_time_tuple[
+                                                        6] * 100).timestamp
+
+        sample_count = header_unpacker.unpack('H', header.sample_count)[0]
+
+        sample_rate_factor = header_unpacker.unpack(
+            'h', header.sample_rate_factor
+        )[0]
+        sample_rate_multiplier = header_unpacker.unpack(
+            'h', header.sample_rate_multiplier
+        )[0]
+    except ValueError:
+        raise MSeedReadError("Not an MSeed file.")
     sample_rate = calculate_sample_rate(sample_rate_factor,
                                         sample_rate_multiplier)
+    if sample_rate == 0:
+        record_end_time = record_start_time
+    else:
+        record_time_taken = sample_count / sample_rate
+        record_end_time = record_start_time + record_time_taken
 
     return RecordMetadata(station, location, channel, network,
-                          record_start_time, sample_count, sample_rate)
+                          record_start_time, record_end_time,
+                          sample_count, sample_rate)
diff --git a/sohstationviewer/view/db_config/param_dialog.py b/sohstationviewer/view/db_config/param_dialog.py
index 2fc8c8ad99d312e01857c2d4514062aeb49b4e10..21ecf7bcca7316e30a6b5e7253d7f1ce19ef400b 100755
--- a/sohstationviewer/view/db_config/param_dialog.py
+++ b/sohstationviewer/view/db_config/param_dialog.py
@@ -47,7 +47,7 @@ class ParamDialog(UiDBInfoDialog):
         color_mode_label = QtWidgets.QLabel('Color mode:')
         color_selector = QComboBox()
         color_selector.insertItem(0, initial_color_mode)
-        other_color_modes = ALL_COLOR_MODES - {initial_color_mode}
+        other_color_modes = set(ALL_COLOR_MODES.keys()) - {initial_color_mode}
         color_selector.insertItems(1, other_color_modes)
         color_selector.setFixedWidth(100)
         color_selector.currentTextChanged.connect(self.on_color_mode_changed)
diff --git a/sohstationviewer/view/main_window.py b/sohstationviewer/view/main_window.py
index 324a558f42115fffafe9e4cfcd1eadbbfcd77593..688d1846098ef3aceefaa5474dc06c1522909a22 100755
--- a/sohstationviewer/view/main_window.py
+++ b/sohstationviewer/view/main_window.py
@@ -63,9 +63,17 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         """
         self.dir_names: List[Path] = []
         """
-        current_dir: str - the current main data directory
+        current_dir: the current main data directory
         """
-        self.current_dir = ''
+        self.current_dir: str = ''
+        """
+        save_plot_dir: directory to save plot
+        """
+        self.save_plot_dir: str = ''
+        """
+        save_plot_format: format to save plot
+        """
+        self.save_plot_format: str = 'SVG'
         """
         rt130_das_dict: dict by rt130 for data paths, so user can choose
             dasses to assign list of data paths to selected_rt130_paths
@@ -185,6 +193,10 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         self.validate_config()
         self.apply_config()
 
+    @QtCore.Slot()
+    def save_plot(self):
+        self.plotting_widget.save_plot('SOH-Plot')
+
     @QtCore.Slot()
     def open_data_type(self) -> None:
         """
@@ -492,13 +504,20 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         self.waveform_dlg.plotting_widget.clear()
         self.tps_dlg.plotting_widget.clear()
 
+    def cancel_loading(self):
+        display_tracking_info(self.tracking_info_text_browser,
+                              "Loading cancelled",
+                              LogType.WARNING)
+
     @QtCore.Slot()
     def read_selected_files(self):
         """
         Read data from selected files/directories, process and plot channels
             read from those according to current options set on the GUI
         """
-
+        display_tracking_info(self.tracking_info_text_browser,
+                              "Loading started",
+                              LogType.INFO)
         self.clear_plots()
         start_tm_str = self.time_from_date_edit.date().toString(
             QtCore.Qt.ISODate)
@@ -508,6 +527,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         if self.end_tm <= self.start_tm:
             msg = "To Date must be greater than From Date."
             QtWidgets.QMessageBox.warning(self, "Wrong Date Given", msg)
+            self.cancel_loading()
             return
         self.info_list_widget.clear()
         is_working = (self.is_loading_data or self.is_plotting_soh or
@@ -526,6 +546,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
                 msg = "Minimum Gap must be a number."
                 QtWidgets.QMessageBox.warning(
                     self, "Invalid Minimum Gap request", msg)
+                self.cancel_loading()
                 return
         else:
             self.min_gap = None
@@ -543,6 +564,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             except Exception as e:
                 QtWidgets.QMessageBox.warning(
                     self, "Incorrect Wildcard", str(e))
+                self.cancel_loading()
                 return
 
         try:
@@ -560,6 +582,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             self.read_from_file_list()
         except Exception as e:
             QtWidgets.QMessageBox.warning(self, "Select directory", str(e))
+            self.cancel_loading()
             return
 
         dir_size = sum(get_dir_size(str(dir))[0] for dir in self.dir_names)
@@ -575,6 +598,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             data_too_big_dialog.setIcon(QMessageBox.Question)
             ret = data_too_big_dialog.exec_()
             if ret == QMessageBox.Abort:
+                self.cancel_loading()
                 return
 
         self.req_soh_chans = self.get_requested_soh_chan()
@@ -582,6 +606,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             self.req_wf_chans = self.get_requested_wf_chans()
         except Exception as e:
             QMessageBox.information(self, "Waveform Selection", str(e))
+            self.cancel_loading()
             return
 
         start_tm_str = self.time_from_date_edit.date().toString(
@@ -720,6 +745,10 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             return
         self.clear_plots()
         self.is_plotting_soh = True
+        self.plotting_widget.set_colors(self.color_mode)
+        self.waveform_dlg.plotting_widget.set_colors(self.color_mode)
+        self.tps_dlg.plotting_widget.set_colors(self.color_mode)
+        self.gps_dialog.set_colors(self.color_mode)
 
         d_obj = self.data_object
 
@@ -842,6 +871,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         # current directory
         self.current_directory_changed.emit(path)
         self.current_dir = path
+        self.save_plot_dir = path
         execute_db(f'UPDATE PersistentData SET FieldValue="{path}" WHERE '
                    'FieldName="currentDirectory"')
         self.set_open_files_list_texts()
@@ -1060,10 +1090,6 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         if not checked:
             return
         self.color_mode = color_mode
-        self.plotting_widget.set_colors(color_mode)
-        self.waveform_dlg.plotting_widget.set_colors(color_mode)
-        self.tps_dlg.plotting_widget.set_colors(color_mode)
-        self.gps_dialog.set_colors(color_mode)
 
     @QtCore.Slot()
     def clear_file_search(self):
diff --git a/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py b/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py
index 30e5abe461d93f8927ea9363023004150618e25c..002e0dd6fc613ba135b3df1a919a5894401e2583 100644
--- a/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py
+++ b/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py
@@ -78,6 +78,7 @@ class PlottingAxes:
             labelbottom = False
         else:
             labelbottom = True
+            self.parent.plotting_bot -= 0.007       # space for ticks
         timestamp_bar.tick_params(which='major', length=7, width=2,
                                   direction='inout',
                                   colors=self.parent.display_color['basic'],
@@ -90,7 +91,8 @@ class PlottingAxes:
                                  fontweight='bold',
                                  fontsize=self.parent.font_size,
                                  rotation=0,
-                                 labelpad=constants.HOUR_TO_TMBAR_D,
+                                 labelpad=constants.HOUR_TO_TMBAR_D *
+                                 self.parent.ratio_w,
                                  ha='left',
                                  color=self.parent.display_color['basic'])
         # not show any y ticks
@@ -112,7 +114,8 @@ class PlottingAxes:
         timestamp_bar.set_xticks(times, minor=True)
         timestamp_bar.set_xticks(major_times)
         timestamp_bar.set_xticklabels(major_time_labels,
-                                      fontsize=self.parent.font_size + 2)
+                                      fontsize=self.parent.font_size +
+                                      2 * self.parent.ratio_w)
         timestamp_bar.set_xlim(self.parent.min_x, self.parent.max_x)
 
     def create_axes(self, plot_b, plot_h, has_min_max_lines=True):
@@ -221,7 +224,7 @@ class PlottingAxes:
                 rotation='horizontal',
                 transform=ax.transAxes,
                 color=color,
-                size=self.parent.font_size + 2
+                size=self.parent.font_size + 2 * self.parent.ratio_w
             )
 
         # set samples' total on right side
@@ -409,4 +412,4 @@ class PlottingAxes:
                       horizontalalignment='left',
                       transform=self.parent.timestamp_bar_top.transAxes,
                       color=self.parent.display_color['basic'],
-                      size=self.parent.font_size)
+                      size=self.parent.font_size + 2 * self.parent.ratio_w)
diff --git a/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py b/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py
index 9cc7a78fbcbd701deedda58b3b3f1b1d912900aa..20a8d99105e4b0c73d33577f34776d8d96b93db9 100755
--- a/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py
+++ b/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py
@@ -2,10 +2,10 @@
 Class of which object is used to plot data
 """
 from typing import List, Optional, Union
-
 import matplotlib.text
-from PySide2.QtCore import QTimer, Qt
 from matplotlib import pyplot as pl
+from matplotlib.transforms import Bbox
+from PySide2.QtCore import QTimer, Qt
 from PySide2 import QtCore, QtWidgets
 from PySide2.QtWidgets import QWidget, QApplication, QTextBrowser
 
@@ -18,6 +18,7 @@ from sohstationviewer.view.plotting.plotting_widget.plotting_axes import (
     PlottingAxes
 )
 from sohstationviewer.view.plotting.plotting_widget.plotting import Plotting
+from sohstationviewer.view.save_plot_dialog import SavePlotDialog
 
 from sohstationviewer.controller.plotting_data import format_time
 from sohstationviewer.controller.util import display_tracking_info
@@ -110,6 +111,7 @@ class PlottingWidget(QtWidgets.QScrollArea):
         font_size: float - font size on plot. With some require bigger font,
             +2 to the font_size
         """
+        self.base_font_size = 7
         self.font_size = 7
         """
         bottom: float - y position of the bottom edge of all plots in self.axes
@@ -243,6 +245,7 @@ class PlottingWidget(QtWidgets.QScrollArea):
         # set view size fit with the scroll's view port size
         self.main_widget.setFixedWidth(geo.width())
         self.ratio_w = geo.width() / self.width_base_px
+        self.font_size = self.ratio_w * self.base_font_size
         self.plotting_w = self.ratio_w * self.width_base
         self.plotting_l = self.ratio_w * self.plotting_l_base
         if self.plot_total == 0:
@@ -652,6 +655,57 @@ class PlottingWidget(QtWidgets.QScrollArea):
         """
         self.peer_plotting_widgets = widgets
 
+    def save_plot(self, default_name='plot'):
+        if self.c_mode != self.main_window.color_mode:
+            main_color = constants.ALL_COLOR_MODES[self.main_window.color_mode]
+            curr_color = constants.ALL_COLOR_MODES[self.c_mode]
+            msg = (f"Main window's color mode is {main_color}"
+                   f" but the mode haven't been applied to plotting.\n\n"
+                   f"Do you want to cancel to apply {main_color} mode "
+                   f"by clicking RePlot?\n"
+                   f"Or continue with {curr_color}?")
+            msgbox = QtWidgets.QMessageBox()
+            msgbox.setWindowTitle("Color Mode Conflict")
+            msgbox.setText(msg)
+            msgbox.addButton(QtWidgets.QMessageBox.Cancel)
+            msgbox.addButton('Continue', QtWidgets.QMessageBox.YesRole)
+            result = msgbox.exec_()
+            if result == QtWidgets.QMessageBox.Cancel:
+                return
+            self.main_window.color_mode = self.c_mode
+            if self.c_mode == 'B':
+                self.main_window.background_black_radio_button.setChecked(True)
+            else:
+                self.main_window.background_white_radio_button.setChecked(True)
+        if self.c_mode == 'B':
+            msg = ("The current background mode is black.\n"
+                   "Do you want to cancel to change the background mode "
+                   "before saving the plots to file?")
+            msgbox = QtWidgets.QMessageBox()
+            msgbox.setWindowTitle("Background Mode Confirmation")
+            msgbox.setText(msg)
+            msgbox.addButton(QtWidgets.QMessageBox.Cancel)
+            msgbox.addButton('Continue', QtWidgets.QMessageBox.YesRole)
+            result = msgbox.exec_()
+            if result == QtWidgets.QMessageBox.Cancel:
+                return
+        save_plot_dlg = SavePlotDialog(
+            self.parent, self.main_window, default_name)
+        save_plot_dlg.exec_()
+        save_file_path = save_plot_dlg.save_file_path
+        if save_file_path is None:
+            return
+        dpi = save_plot_dlg.dpi
+
+        self.plotting_axes.fig.savefig(
+            save_file_path,
+            bbox_inches=Bbox([[0, self.plotting_bot*100],
+                              [self.ratio_w*15.5, 100]]),
+            dpi=dpi
+        )
+        msg = f"Graph is saved at {save_file_path}"
+        display_tracking_info(self.tracking_box, msg)
+
     def clear(self):
         self.plotting_axes.fig.clear()
         self.axes = []
diff --git a/sohstationviewer/view/plotting/time_power_squared_dialog.py b/sohstationviewer/view/plotting/time_power_squared_dialog.py
index 60a27bbe4f7e04311c5b48e03e14a0f24f8eed29..f27f3c4362b8d0cf30d521808810b3da6fc5856d 100755
--- a/sohstationviewer/view/plotting/time_power_squared_dialog.py
+++ b/sohstationviewer/view/plotting/time_power_squared_dialog.py
@@ -222,7 +222,7 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
 
         total_days = c_data['tps_data'].shape[0]
         plot_h = self.plotting_axes.get_height(
-            total_days/2, bw_plots_distance=0.003, pixel_height=12.1)
+            total_days/1.5, bw_plots_distance=0.003, pixel_height=12.1)
         ax = self.create_axes(self.plotting_bot, plot_h)
         ax.spines[['right', 'left', 'top', 'bottom']].set_visible(False)
         ax.text(
@@ -471,6 +471,7 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
         with new color range selected.
         """
         self.clear()
+        self.set_colors(self.main_window.color_mode)
         self.plotting_bot = const.BOTTOM
         title = get_title(self.set_key, self.min_x, self.max_x, self.date_mode)
         self.timestamp_bar_top = self.plotting_axes.add_timestamp_bar(0.)
@@ -559,6 +560,7 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         """
         self.color_range_choice = QtWidgets.QComboBox(self)
         self.color_range_choice.addItems(self.color_ranges)
+
         self.color_range_choice.setCurrentText('High')
         color_layout.addWidget(self.color_range_choice)
         # ##################### Replot button ########################
@@ -566,8 +568,8 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         buttons_layout.addWidget(self.replot_button)
 
         # ##################### Save button ##########################
-        self.save_button = QtWidgets.QPushButton('Save', self)
-        buttons_layout.addWidget(self.save_button)
+        self.save_plot_button = QtWidgets.QPushButton('Save Plot', self)
+        buttons_layout.addWidget(self.save_plot_button)
 
         self.info_text_browser.setFixedHeight(60)
         bottom_layout.addWidget(self.info_text_browser)
@@ -600,7 +602,7 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         """
         Connect functions to widgets
         """
-        self.save_button.clicked.connect(self.save)
+        self.save_plot_button.clicked.connect(self.save_plot)
         self.replot_button.clicked.connect(self.plotting_widget.replot)
         self.color_range_choice.currentTextChanged.connect(
             self.color_range_changed)
@@ -617,8 +619,8 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         self.sel_col_labels = self.color_label[cr_index]
 
     @QtCore.Slot()
-    def save(self):
+    def save_plot(self):
         """
         Save the plotting to a file
         """
-        print("save")
+        self.plotting_widget.save_plot('TPS-Plot')
diff --git a/sohstationviewer/view/plotting/waveform_dialog.py b/sohstationviewer/view/plotting/waveform_dialog.py
index d80e8014c9e759121d2077d157eed0fa214fb027..11d07c2625521381cc2c951e3fa1cf047eb584f7 100755
--- a/sohstationviewer/view/plotting/waveform_dialog.py
+++ b/sohstationviewer/view/plotting/waveform_dialog.py
@@ -112,11 +112,11 @@ class WaveformDialog(QtWidgets.QWidget):
         bottom_layout = QtWidgets.QHBoxLayout()
         main_layout.addLayout(bottom_layout)
         """
-        save_button: save plot in plotting_widget to file
+        save_plot_button: save plot in plotting_widget to file
         """
-        self.save_button = QtWidgets.QPushButton('Save', self)
-        self.save_button.clicked.connect(self.save)
-        bottom_layout.addWidget(self.save_button)
+        self.save_plot_button = QtWidgets.QPushButton('Save Plot', self)
+        self.save_plot_button.clicked.connect(self.save_plot)
+        bottom_layout.addWidget(self.save_plot_button)
         self.info_text_browser.setFixedHeight(60)
         bottom_layout.addWidget(self.info_text_browser)
 
@@ -142,11 +142,11 @@ class WaveformDialog(QtWidgets.QWidget):
         self.plotting_widget.init_size()
 
     @QtCore.Slot()
-    def save(self):
+    def save_plot(self):
         """
         Save the plotting to a file
         """
-        print("save")
+        self.plotting_widget.save_plot('Waveform-Plot')
 
     def plot_finished(self):
         self.parent.is_plotting_waveform = False
diff --git a/sohstationviewer/view/save_plot_dialog.py b/sohstationviewer/view/save_plot_dialog.py
new file mode 100644
index 0000000000000000000000000000000000000000..77a988f25a6679ac7ecd3bd4f916ca625d6a97d1
--- /dev/null
+++ b/sohstationviewer/view/save_plot_dialog.py
@@ -0,0 +1,139 @@
+import sys
+import platform
+import os
+from pathlib import Path
+from typing import Union, Optional
+
+from PySide2 import QtWidgets, QtCore, QtGui
+from PySide2.QtWidgets import QApplication, QWidget, QDialog
+
+from sohstationviewer.conf import constants
+
+
+class SavePlotDialog(QDialog):
+    def __init__(self, parent: Union[QWidget, QApplication],
+                 main_window: QApplication,
+                 default_name: str):
+        """
+        Dialog allow choosing file format and open file dialog to
+            save file as
+
+        :param parent: the parent widget
+        :param main_window: to keep path to save file
+        :param default_name: default name for graph file to be saved as
+        """
+        super(SavePlotDialog, self).__init__(parent)
+        self.main_window = main_window
+        """
+        save_file_path: path to save file
+        """
+        self.save_file_path: Optional[Path] = None
+        """
+        save_dir_path: path to save dir
+        """
+        self.save_dir_path: Path = main_window.save_plot_dir
+        """
+        dpi: resolution for png format
+        """
+        self.dpi: int = 100
+
+        self.save_dir_btn = QtWidgets.QPushButton("Save Directory", self)
+        self.save_dir_textbox = QtWidgets.QLineEdit(self.save_dir_path)
+        self.save_filename_textbox = QtWidgets.QLineEdit(default_name)
+
+        self.dpi_line_edit = QtWidgets.QSpinBox(self)
+        self.format_radio_btns = {}
+        for fmt in constants.IMG_FORMAT:
+            self.format_radio_btns[fmt] = QtWidgets.QRadioButton(fmt, self)
+            if fmt == self.main_window.save_plot_format:
+                self.format_radio_btns[fmt].setChecked(True)
+        self.cancel_btn = QtWidgets.QPushButton('CANCEL', self)
+        self.continue_btn = QtWidgets.QPushButton('SAVE PLOT', self)
+
+        self.setup_ui()
+        self.connect_signals()
+
+    def setup_ui(self) -> None:
+        self.setWindowTitle("Save Plot")
+
+        main_layout = QtWidgets.QGridLayout()
+        self.setLayout(main_layout)
+
+        main_layout.addWidget(self.save_dir_btn, 0, 0, 1, 1)
+        self.save_dir_textbox.setFixedWidth(500)
+        main_layout.addWidget(self.save_dir_textbox, 0, 1, 1, 5)
+        main_layout.addWidget(QtWidgets.QLabel('Save Filename'),
+                              1, 0, 1, 1)
+        main_layout.addWidget(self.save_filename_textbox, 1, 1, 1, 5)
+
+        main_layout.addWidget(QtWidgets.QLabel('DPI'),
+                              2, 2, 1, 1, QtGui.Qt.AlignRight)
+        self.dpi_line_edit.setRange(50, 300)
+        self.dpi_line_edit.setValue(100)
+        main_layout.addWidget(self.dpi_line_edit, 2, 3, 1, 1)
+        rowidx = 2
+        for fmt in self.format_radio_btns:
+            main_layout.addWidget(self.format_radio_btns[fmt], rowidx, 1, 1, 1)
+            rowidx += 1
+
+        main_layout.addWidget(self.cancel_btn, rowidx, 1, 1, 1)
+        main_layout.addWidget(self.continue_btn, rowidx, 3, 1, 1)
+
+    def connect_signals(self) -> None:
+        self.save_dir_btn.clicked.connect(self.change_save_directory)
+        self.cancel_btn.clicked.connect(self.close)
+        self.continue_btn.clicked.connect(self.on_continue)
+
+    @QtCore.Slot()
+    def change_save_directory(self) -> None:
+        """
+        Show a file selection window and change the GPS data save directory
+        based on the folder selected by the user.
+        """
+        fd = QtWidgets.QFileDialog(self)
+        fd.setFileMode(QtWidgets.QFileDialog.Directory)
+        fd.setDirectory(self.save_dir_textbox.text())
+        fd.exec_()
+        new_path = fd.selectedFiles()[0]
+        self.save_dir_textbox.setText(new_path)
+        self.save_dir_path = new_path
+        self.main_window.save_plot_dir = new_path
+
+    @QtCore.Slot()
+    def on_continue(self):
+        if self.save_dir_textbox.text().strip() == '':
+            QtWidgets.QMessageBox.warning(
+                self, "Add Directory",
+                "A directory need to be given before continue.")
+            return
+
+        if self.save_filename_textbox.text().strip() == '':
+            QtWidgets.QMessageBox.warning(
+                self, "Add Filename",
+                "A file name need to be given before continue.")
+            return
+
+        for img_format in self.format_radio_btns:
+            if self.format_radio_btns[img_format].isChecked():
+                save_format = img_format
+                self.main_window.save_plot_format = img_format
+                break
+
+        self.save_file_path = Path(self.save_dir_path).joinpath(
+            f"{self.save_filename_textbox.text()}.{save_format}")
+        self.dpi = self.dpi_line_edit.value()
+        self.close()
+
+
+if __name__ == '__main__':
+    os_name, version, *_ = platform.platform().split('-')
+    if os_name == 'macOS':
+        os.environ['QT_MAC_WANTS_LAYER'] = '1'
+    app = QtWidgets.QApplication(sys.argv)
+    save_path = '/Users/ldam/Documents/GIT/sohstationviewer/tests/test_data/Q330-sample'  # noqa: E501
+    test = SavePlotDialog(None, 'test_plot')
+    test.set_save_directory(save_path)
+    test.exec_()
+    print("dpi:", test.dpi)
+    print("save file path:", test.save_file_path)
+    sys.exit(app.exec_())
diff --git a/sohstationviewer/view/ui/main_ui.py b/sohstationviewer/view/ui/main_ui.py
index 005029668262238706fc02f0bf176aa25995df5e..194b23483bc13cbd916c0d77a737d556eee6a313 100755
--- a/sohstationviewer/view/ui/main_ui.py
+++ b/sohstationviewer/view/ui/main_ui.py
@@ -793,6 +793,8 @@ class UIMainWindow(object):
 
         self.stop_button.clicked.connect(main_window.stop)
 
+        self.save_plot_button.clicked.connect(main_window.save_plot)
+
     def read_config(self):
         self.config = configparser.ConfigParser()
         config_path = Path('sohstationviewer/conf/read_settings.ini')
diff --git a/sohstationviewer/view/util/functions.py b/sohstationviewer/view/util/functions.py
index 2927cae8c88a35dbe38423b4448c5c615c469da9..254f32030c796164cd0399d3e7d938174df27c8d 100644
--- a/sohstationviewer/view/util/functions.py
+++ b/sohstationviewer/view/util/functions.py
@@ -96,6 +96,9 @@ def create_table_of_content_file(base_path: Path) -> None:
         "this software.\n\n"
         "On the left-hand side you will find a list of currently available"
         " help topics.\n\n"
+        "If the links of the Table of Contents are broken, click on Recreate "
+        "Table of Content <img src='recreate_table_contents.png' height=30 /> "
+        "to rebuild it.\n\n"
         "The home button can be used to return to this page at any time.\n\n"
         "# Table of Contents\n\n")
     links = ""
diff --git a/tests/model/__init__.py b/tests/model/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tests/model/general_data/__init__.py b/tests/model/general_data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tests/model/general_data/test_general_data_helper.py b/tests/model/general_data/test_general_data_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..4bd91761805bc6632a8e47e08a793fc00c6a7fa5
--- /dev/null
+++ b/tests/model/general_data/test_general_data_helper.py
@@ -0,0 +1,292 @@
+import numpy as np
+from unittest import TestCase
+from unittest.mock import patch
+
+from sohstationviewer.model.general_data.general_data_helper import (
+    _check_related_gaps, squash_gaps, sort_data,
+    retrieve_data_time_from_data_dict, retrieve_gaps_from_data_dict,
+    combine_data, apply_convert_factor_to_data_dict
+)
+
+
+class TestCheckRelatedGaps(TestCase):
+    # FROM test_handling_data_rearrange_data.TestCheckRelatedGaps
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.checked_indexes = []
+
+    def test_minmax1_inside_minmax2(self):
+        self.assertTrue(
+            _check_related_gaps(3, 4, 1, 5, 1, self.checked_indexes))
+        self.assertIn(1, self.checked_indexes)
+
+    def test_minmax2_inside_minmax1(self):
+        self.assertTrue(
+            _check_related_gaps(1, 5, 3, 4, 2, self.checked_indexes))
+        self.assertIn(2, self.checked_indexes)
+
+    def end_minmax1_overlap_start_minmax(self):
+        self.assertTrue(
+            _check_related_gaps(1, 4, 3, 5, 3, self.checked_indexes))
+        self.assertIn(3, self.checked_indexes)
+
+    def end_minmax2_overlap_start_minmax1(self):
+        self.assertTrue(
+            _check_related_gaps(3, 5, 1, 4, 4, self.checked_indexes))
+        self.assertIn(4, self.checked_indexes)
+
+    def minmax1_less_than_minmax2(self):
+        self.assertFalse(
+            _check_related_gaps(1, 3, 4, 6, 5, self.checked_indexes))
+        self.assertNotIn(5, self.checked_indexes, )
+
+    def minmax1_greater_than_minmax2(self):
+        self.assertFalse(
+            _check_related_gaps(6, 6, 1, 3, 5, self.checked_indexes))
+        self.assertEqual(5, self.checked_indexes)
+
+
+class TestSquashGaps(TestCase):
+    # FROM test_handling_data_rearrange_data.TestSquashGaps
+    def setUp(self) -> None:
+        self.normal_gaps = [[4, 7], [4, 6], [5, 6], [3, 7], [5, 8]]
+        self.overlap_gaps = [[17, 14], [16, 14], [16, 15], [17, 13], [18, 15]]
+        self.mixed_gaps = []
+        for i in range(len(self.normal_gaps)):
+            self.mixed_gaps.append(self.normal_gaps[i])
+            self.mixed_gaps.append(self.overlap_gaps[i])
+
+    def test_normal_gaps(self):
+        gaps = squash_gaps(self.normal_gaps)
+        self.assertEqual(gaps, [[3, 8]])
+
+    def test_overlap_gaps(self):
+        gaps = squash_gaps(self.overlap_gaps)
+        self.assertEqual(gaps, [[18, 13]])
+
+    def test_mixed_gaps(self):
+        gaps = squash_gaps((self.mixed_gaps))
+        self.assertEqual(gaps, [[3, 8], [18, 13]])
+
+
+class TestSortData(TestCase):
+    # FROM test_handling_data_rearrange_data.TestSortData
+    def setUp(self) -> None:
+        self.station_data_dict = {
+            'CH1': {'tracesInfo': [{'startTmEpoch': 7},
+                                   {'startTmEpoch': 1},
+                                   {'startTmEpoch': 5},
+                                   {'startTmEpoch': 3}]},
+            'CH2': {'tracesInfo': [{'startTmEpoch': 2},
+                                   {'startTmEpoch': 8},
+                                   {'startTmEpoch': 6},
+                                   {'startTmEpoch': 4}]}
+        }
+
+    def test_sort_data(self):
+        sort_data(self.station_data_dict)
+        self.assertEqual(
+            self.station_data_dict,
+            {'CH1': {'tracesInfo': [{'startTmEpoch': 1}, {'startTmEpoch': 3},
+                                    {'startTmEpoch': 5}, {'startTmEpoch': 7}]},
+             'CH2': {'tracesInfo': [{'startTmEpoch': 2}, {'startTmEpoch': 4},
+                                    {'startTmEpoch': 6}, {'startTmEpoch': 8}]}}
+        )
+
+
+class TestRetrieveDataTimeFromDataDict(TestCase):
+    def setUp(self) -> None:
+        self.data_dict = {
+            'STA1': {'CH1': {'startTmEpoch': 4, 'endTmEpoch': 6},
+                     'CH2': {'startTmEpoch': 5, 'endTmEpoch': 9}
+                     },
+            'STA2': {'CH1': {'startTmEpoch': 2, 'endTmEpoch': 4},
+                     'CH2': {'startTmEpoch': 6, 'endTmEpoch': 8}
+                     }
+            }
+        self.data_time = {}
+        self.expected_data_time = {'STA1': [4, 9], 'STA2': [2, 8]}
+
+    def test_retrieve_data_time(self):
+        retrieve_data_time_from_data_dict(self.data_dict, self.data_time)
+        self.assertEqual(self.data_time,
+                         self.expected_data_time)
+
+
+class TestRetrieveGapsFromDataDict(TestCase):
+    def setUp(self) -> None:
+        self.data_dict = {
+            'STA1': {'CH1': {'gaps': [[1, 2], [4, 3]]},
+                     'CH2': {'gaps': []}
+                     },
+            'STA2': {'CH1': {'gaps': [[1, 2], [4, 3], [2, 3]]},
+                     'CH2': {'gaps': [[1, 3], [3, 2]]}
+                     },
+            }
+        self.gaps = {}
+        self.expected_gaps = {'STA1': [[1, 2], [4, 3]],
+                              'STA2': [[1, 2], [4, 3], [2, 3], [1, 3], [3, 2]]}
+
+    def test_retrieve_gaps(self):
+        retrieve_gaps_from_data_dict(self.data_dict, self.gaps)
+        self.assertEqual(self.gaps,
+                         self.expected_gaps)
+
+
+class TestCombineData(TestCase):
+    def test_overlap_lt_gap_minimum(self):
+        # combine; not add to gap list
+        station_data_dict = {
+            'CH1': {
+                'gaps': [],
+                'tracesInfo': [
+                    {'startTmEpoch': 5,
+                     'endTmEpoch': 15,
+                     'data': [1, 2, 2, -1],
+                     'times': [5, 8, 11, 15]},
+                    {'startTmEpoch': 13,     # delta = 2 < 10
+                     'endTmEpoch': 20,
+                     'data': [1, -2, 1, 1],
+                     'times': [13, 16, 18, 20]}
+                ]}
+            }
+        gap_minimum = 10
+        combine_data(station_data_dict, gap_minimum)
+        self.assertEqual(station_data_dict['CH1']['gaps'], [])
+
+        self.assertEqual(
+            len(station_data_dict['CH1']['tracesInfo']),
+            1)
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['startTmEpoch'],
+            5)
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['endTmEpoch'],
+            20)
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['data'].tolist(),
+            [1, 2, 2, -1, 1, -2, 1, 1])
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['times'].tolist(),
+            [5, 8, 11, 15, 13, 16, 18, 20])
+
+    def test_overlap_gt_or_equal_gap_minimum(self):
+        # combine; add to gap list
+        station_data_dict = {
+            'CH1': {
+                'gaps': [],
+                'tracesInfo': [
+                    {'startTmEpoch': 5,
+                     'endTmEpoch': 15,
+                     'data': [1, 2, 2, -1],
+                     'times': [5, 8, 11, 15]},
+                    {'startTmEpoch': 5,     # delta = 10 >= 10
+                     'endTmEpoch': 20,
+                     'data': [1, -2, 1, 1],
+                     'times': [5, 11, 15, 20]}
+                ]}
+            }
+        gap_minimum = 10
+        combine_data(station_data_dict, gap_minimum)
+        self.assertEqual(station_data_dict['CH1']['gaps'], [[15, 5]])
+
+        self.assertEqual(
+            len(station_data_dict['CH1']['tracesInfo']),
+            1)
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['startTmEpoch'],
+            5)
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['endTmEpoch'],
+            20)
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['data'].tolist(),
+            [1, 2, 2, -1, 1, -2, 1, 1])
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['times'].tolist(),
+            [5, 8, 11, 15, 5, 11, 15, 20])
+
+    def test_lt_gap_minimum(self):
+        # not combine; not add to gap list
+        station_data_dict = {
+            'CH1': {
+                'gaps': [],
+                'tracesInfo': [
+                    {'startTmEpoch': 5,
+                     'endTmEpoch': 15,
+                     'data': [1, 2, 2, -1],
+                     'times': [5, 8, 11, 15]},
+                    {'startTmEpoch': 22,    # delta = 7 > 6, < 10
+                     'endTmEpoch': 34,
+                     'data': [1, -2, 1, 1],
+                     'times': [22, 26, 30, 34]}
+                ]}
+        }
+        gap_minimum = 10
+        combine_data(station_data_dict, gap_minimum)
+        self.assertEqual(station_data_dict['CH1']['gaps'], [])
+
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['startTmEpoch'],
+            5)
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['endTmEpoch'],
+            34)
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['data'].tolist(),
+            [1, 2, 2, -1, 1, -2, 1, 1])
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['times'].tolist(),
+            [5, 8, 11, 15, 22, 26, 30, 34])
+
+    def test_gap_gt_or_equal_gap_minimum(self):
+        # not combine; add to gap list
+        station_data_dict = {
+            'CH1': {
+                'gaps': [],
+                'tracesInfo': [
+                    {'startTmEpoch': 5,
+                     'endTmEpoch': 15,
+                     'data': [1, 2, 2, -1],
+                     'times': [5, 8, 11, 15]},
+                    {'startTmEpoch': 25,        # delta = 10 >= 10
+                     'endTmEpoch': 40,
+                     'data': [1, -2, 1, 1],
+                     'times': [25, 29, 33, 36, 40]}
+                ]}
+            }
+        gap_minimum = 10
+        combine_data(station_data_dict, gap_minimum)
+        self.assertEqual(station_data_dict['CH1']['gaps'], [[15, 25]])
+
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['startTmEpoch'],
+            5)
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['endTmEpoch'],
+            40)
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['data'].tolist(),
+            [1, 2, 2, -1, 1, -2, 1, 1])
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['times'].tolist(),
+            [5, 8, 11, 15, 25, 29, 33, 36, 40])
+
+
+class TestApplyConvertFactorToDataDict(TestCase):
+    def setUp(self) -> None:
+        self.data_dict = {
+            'STA1': {
+                'CH1': {'tracesInfo': [{'data': np.array([1, 2, 2, -1])}]}
+            }
+        }
+        self.expected_data = [0.1, 0.2, 0.2, -0.1]
+
+    @patch('sohstationviewer.model.general_data.general_data_helper.'
+           'get_convert_factor')
+    def test_convert_factor(self, mock_get_convert_factor):
+        mock_get_convert_factor.return_value = 0.1
+        apply_convert_factor_to_data_dict(self.data_dict, 'Q330')
+        self.assertEqual(
+            self.data_dict['STA1']['CH1']['tracesInfo'][0]['data'].tolist(),
+            self.expected_data)
diff --git a/tests/model/mseed_data/__init__.py b/tests/model/mseed_data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tests/model/mseed_data/test_mseed.py b/tests/model/mseed_data/test_mseed.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ac2551379537c384038c2f263870d0630733bca
--- /dev/null
+++ b/tests/model/mseed_data/test_mseed.py
@@ -0,0 +1,362 @@
+from unittest import TestCase
+from pathlib import Path
+
+from sohstationviewer.model.mseed_data.mseed import MSeed
+from sohstationviewer.model.general_data.general_data import \
+    ProcessingDataError
+
+
+TEST_DATA_DIR = Path(__file__).resolve().parent.parent.parent.joinpath(
+    'test_data')
+pegasus_data = TEST_DATA_DIR.joinpath("Pegasus-sample")
+q330_data = TEST_DATA_DIR.joinpath("Q330-sample")
+blockettes_data = TEST_DATA_DIR.joinpath("Q330_unimplemented_ascii_block")
+multiplex_data = TEST_DATA_DIR.joinpath("Q330_multiplex")
+centaur_data = TEST_DATA_DIR.joinpath("Centaur-sample")
+
+
+class TestMSeed(TestCase):
+    def test_path_not_exist(self):
+        # raise exception when path not exist
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': '_',
+            'on_unittest': True
+        }
+        with self.assertRaises(ProcessingDataError) as context:
+            MSeed(**args)
+            self.assertEqual(
+                str(context.exception),
+                "Path '_' not exist"
+            )
+
+    def test_read_text_only(self):
+        # There is no station recognized, add text to key 'TEXT' in log_data
+        args = {
+            'data_type': 'Pegasus',
+            'is_multiplex': False,
+            'folder': pegasus_data,
+            'req_soh_chans': ['_'],
+            'on_unittest': True
+        }
+
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT'])
+        self.assertEqual(len(obj.log_data['TEXT']), 2)
+        self.assertEqual(
+            obj.log_data['TEXT'][0][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.130'
+            '\n2020-05-09 00:00:09.839 UTC: I(TimingThread): timing unce')
+
+        self.assertEqual(
+            obj.log_data['TEXT'][1][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.129'
+            '\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware')
+
+    def test_read_text_with_soh(self):
+        # text get station from soh data with TXT as channel to add to log_data
+        args = {
+            'data_type': 'Pegasus',
+            'is_multiplex': False,
+            'folder': pegasus_data,
+            'req_soh_chans': ['VE1'],
+            'on_unittest': True
+        }
+
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', 'KC01'])
+        self.assertEqual(len(obj.log_data['TEXT']), 0)
+        self.assertEqual(list(obj.log_data['KC01'].keys()), ['TXT'])
+        self.assertEqual(len(obj.log_data['KC01']['TXT']), 2)
+        self.assertEqual(
+            obj.log_data['KC01']['TXT'][0][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.130'
+            '\n2020-05-09 00:00:09.839 UTC: I(TimingThread): timing unce')
+
+        self.assertEqual(
+            obj.log_data['KC01']['TXT'][1][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.129'
+            '\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware')
+
+    def test_read_text_with_waveform(self):
+        # text get station from waveform data with TXT as channel to add to
+        # log_data
+        args = {
+            'data_type': 'Pegasus',
+            'is_multiplex': False,
+            'folder': pegasus_data,
+            'req_wf_chans': ['HH1'],
+            'req_soh_chans': ['_'],
+            'on_unittest': True
+        }
+
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', 'KC01'])
+        self.assertEqual(len(obj.log_data['TEXT']), 0)
+        self.assertEqual(list(obj.log_data['KC01'].keys()), ['TXT'])
+        self.assertEqual(len(obj.log_data['KC01']['TXT']), 2)
+        self.assertEqual(
+            obj.log_data['KC01']['TXT'][0][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.130'
+            '\n2020-05-09 00:00:09.839 UTC: I(TimingThread): timing unce')
+
+        self.assertEqual(
+            obj.log_data['KC01']['TXT'][1][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.129'
+            '\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware')
+
+    def test_read_ascii(self):
+        # info is text wrapped in mseed format
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': q330_data,
+            'req_soh_chans': ['LOG'],
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', 'AX08'])
+        self.assertEqual(list(obj.log_data['AX08'].keys()), ['LOG'])
+        self.assertEqual(obj.log_data['TEXT'], [])
+        self.assertEqual(len(obj.log_data['AX08']['LOG']), 16)
+        self.assertEqual(
+            obj.log_data['AX08']['LOG'][0][:100],
+            '\n\nSTATE OF HEALTH: From:1625456260.12  To:1625456260.12\n\r'
+            '\nQuanterra Packet Baler Model 14 Restart. V'
+        )
+        self.assertEqual(
+            obj.log_data['AX08']['LOG'][1][:100],
+            '\n\nSTATE OF HEALTH: From:1625456366.62  To:1625456366.62'
+            '\nReducing Status Polling Interval\r\n[2021-07-0'
+        )
+
+    def test_read_blockettes_info(self):
+        # info in blockette 500
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': True,
+            'folder': blockettes_data,
+            'req_soh_chans': ['ACE'],
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', '3203'])
+        self.assertEqual(list(obj.log_data['3203'].keys()), ['ACE'])
+        self.assertEqual(obj.log_data['TEXT'], [])
+        self.assertEqual(len(obj.log_data['3203']['ACE']), 1)
+        self.assertEqual(
+            obj.log_data['3203']['ACE'][0][:100],
+            '\n\nSTATE OF HEALTH: From:1671729287.00014  To:1671729287.0'
+            '\n===========\nVCO correction: 53.7109375\nTim'
+        )
+
+    def test_not_is_multiplex_read_channel(self):
+        # is_multiplex = False => stop when reach to channel not match req
+        # so the channel 'EL1' is read but not finished
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': multiplex_data,
+            'req_soh_chans': [],
+            'req_wf_chans': ['EL1']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(obj.waveform_data['3203'].keys()), ['EL1'])
+        self.assertEqual(obj.waveform_data['3203']['EL1']['samplerate'], 200)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['startTmEpoch'],
+                         1671730004.145029)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['endTmEpoch'],
+                         1671730013.805)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['size'], 1932)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['gaps'], [])
+        self.assertEqual(len(obj.waveform_data['3203']['EL1']['tracesInfo']),
+                         1)
+
+    def test_is_multiplex_read_channel(self):
+        # is_multiplex = True => read every record
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': True,
+            'folder': multiplex_data,
+            'req_soh_chans': [],
+            'req_wf_chans': ['EL1']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(obj.waveform_data['3203'].keys()), ['EL1'])
+        self.assertEqual(obj.waveform_data['3203']['EL1']['samplerate'], 200)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['startTmEpoch'],
+                         1671730004.145029)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['endTmEpoch'],
+                         1671730720.4348998)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['size'], 143258)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['gaps'], [])
+        self.assertEqual(len(obj.waveform_data['3203']['EL1']['tracesInfo']),
+                         1)
+
+    def test_not_is_multiplex_selected_channel_in_middle(self):
+        # won't reached selected channel because previous record doesn't meet
+        # requirement when is_multiplex = False
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': multiplex_data,
+            'req_soh_chans': [],
+            'req_wf_chans': ['EL2']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), [])
+
+    def test_is_multiplex_selected_channel_in_middle(self):
+        # is_multiplex = True => the selected channel will be read
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': True,
+            'folder': multiplex_data,
+            'req_soh_chans': [],
+            'req_wf_chans': ['EL2']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(obj.waveform_data['3203'].keys()), ['EL2'])
+        self.assertEqual(obj.waveform_data['3203']['EL2']['samplerate'], 200)
+        self.assertEqual(obj.waveform_data['3203']['EL2']['startTmEpoch'],
+                         1671730004.3100293)
+        self.assertEqual(obj.waveform_data['3203']['EL2']['endTmEpoch'],
+                         1671730720.5549)
+        self.assertEqual(obj.waveform_data['3203']['EL2']['size'], 143249)
+        self.assertEqual(obj.waveform_data['3203']['EL2']['gaps'], [])
+        self.assertEqual(len(obj.waveform_data['3203']['EL2']['tracesInfo']),
+                         1)
+
+    def test_existing_time_range(self):
+        import os
+        print(os.getcwd())
+        # check if data_time is from the given range, end time may get
+        # a little greater than read_end according to record's end time
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': q330_data,
+            'req_soh_chans': [],
+            'read_start': 1625456018.0,
+            'read_end': 1625505627.9998999
+        }
+        obj = MSeed(**args)
+        self.assertEqual(obj.keys, ['AX08'])
+        self.assertEqual(list(obj.soh_data['AX08'].keys()), ['VKI'])
+        self.assertEqual(list(obj.mass_pos_data['AX08'].keys()), [])
+        self.assertEqual(list(obj.waveform_data['AX08'].keys()), [])
+        self.assertEqual(obj.data_time['AX08'], [1625446018.0, 1625510338.0])
+
+    def test_non_existing_time_range(self):
+        # if given time range out of the data time, no station will be created
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': q330_data,
+            'req_soh_chans': [],
+            'read_start': 1625356018.0,
+            'read_end': 1625405627.9998999
+        }
+        obj = MSeed(**args)
+        self.assertEqual(obj.keys, [])
+        self.assertEqual(obj.soh_data, {})
+        self.assertEqual(obj.mass_pos_data, {})
+        self.assertEqual(obj.waveform_data, {})
+        self.assertEqual(obj.data_time, {})
+
+    def test_read_waveform(self):
+        # data from tps similar to waveform but not separated at gaps
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': q330_data,
+            'req_soh_chans': [],
+            'req_wf_chans': ['LHE']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), ['AX08'])
+        self.assertEqual(list(obj.waveform_data['AX08'].keys()), ['LHE'])
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['samplerate'], 1)
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['startTmEpoch'],
+                         1625445156.000001)
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['endTmEpoch'],
+                         1625532950.0)
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['size'], 87794)
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['gaps'], [])
+        self.assertEqual(len(obj.waveform_data['AX08']['LHE']['tracesInfo']),
+                         1)
+
+    def test_read_mass_pos_channel(self):
+        # mass position channels will be read if one or both include_mpxxxxxx
+        # are True
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': True,
+            'folder': q330_data,
+            'req_soh_chans': [],
+            'req_wf_chans': [],
+            'include_mp123zne': True
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.mass_pos_data.keys()), ['AX08'])
+        self.assertEqual(list(obj.mass_pos_data['AX08'].keys()), ['VM1'])
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['samplerate'], 0.1)
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['startTmEpoch'],
+                         1625444970.0)
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['endTmEpoch'],
+                         1625574580.0)
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['size'], 12961)
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['gaps'], [])
+        self.assertEqual(len(obj.mass_pos_data['AX08']['VM1']['tracesInfo']),
+                         1)
+
+    def test_gap(self):
+        # gaps will be detected when gap_minimum is set
+        args = {
+            'data_type': 'Centaur',
+            'is_multiplex': True,
+            'folder': centaur_data,
+            'req_soh_chans': [],
+            'gap_minimum': 60
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.soh_data.keys()), ['3734'])
+        self.assertEqual(sorted(list(obj.soh_data['3734'].keys())),
+                         ['EX1', 'EX2', 'EX3', 'GAN', 'GEL', 'GLA', 'GLO',
+                          'GNS', 'GPL', 'GST', 'LCE', 'LCQ', 'VCO', 'VDT',
+                          'VEC', 'VEI', 'VPB'])
+        self.assertAlmostEqual(obj.soh_data['3734']['EX1']['samplerate'],
+                               0.0166, 3)
+        self.assertEqual(obj.soh_data['3734']['EX1']['startTmEpoch'],
+                         1534512840.0)
+        self.assertEqual(obj.soh_data['3734']['EX1']['endTmEpoch'],
+                         1534550400.0)
+        self.assertEqual(obj.soh_data['3734']['EX1']['size'], 597)
+        self.assertEqual(obj.gaps['3734'], [[1534521420.0, 1534524000.0]])
+
+    def test_not_detect_gap(self):
+        # if gap_minimum isn't set but gap exist, data still be separated, but
+        # gap won't be added to gap list
+        args = {
+            'data_type': 'Centaur',
+            'is_multiplex': True,
+            'folder': centaur_data,
+            'req_soh_chans': [],
+            'gap_minimum': None
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.soh_data.keys()), ['3734'])
+        self.assertEqual(sorted(list(obj.soh_data['3734'].keys())),
+                         ['EX1', 'EX2', 'EX3', 'GAN', 'GEL', 'GLA', 'GLO',
+                          'GNS', 'GPL', 'GST', 'LCE', 'LCQ', 'VCO', 'VDT',
+                          'VEC', 'VEI', 'VPB'])
+        self.assertAlmostEqual(obj.soh_data['3734']['EX1']['samplerate'],
+                               0.0166, 3)
+        self.assertEqual(obj.soh_data['3734']['EX1']['startTmEpoch'],
+                         1534512840.0)
+        self.assertEqual(obj.soh_data['3734']['EX1']['endTmEpoch'],
+                         1534550400.0)
+        self.assertEqual(obj.soh_data['3734']['EX1']['size'], 597)
+        self.assertEqual(obj.gaps['3734'], [])  # no gaps
diff --git a/tests/model/mseed_data/test_mseed_helper.py b/tests/model/mseed_data/test_mseed_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..938092c629f7115bd2623971a58a7aa5e7b047fe
--- /dev/null
+++ b/tests/model/mseed_data/test_mseed_helper.py
@@ -0,0 +1,48 @@
+from unittest import TestCase
+from pathlib import Path
+
+from sohstationviewer.model.mseed_data.mseed_helper import (
+    retrieve_nets_from_data_dict, read_text
+)
+
+TEST_DATA_DIR = Path(__file__).resolve().parent.parent.parent.joinpath(
+    'test_data')
+text_file = TEST_DATA_DIR.joinpath(
+    "Pegasus-sample/Pegasus_SVC4/logs/2020/XX/KC01/XX.KC01...D.2020.129")
+binary_file = TEST_DATA_DIR.joinpath(
+    "Pegasus-sample/Pegasus_SVC4/soh/2020/XX/KC01/VDT.D/"
+    "XX.KC01..VDT.D.2020.129")
+
+
+class TestReadText(TestCase):
+    def test_text_file(self):
+        ret = read_text(text_file)
+        expected_ret = (
+            "\n\n** STATE OF HEALTH: XX.KC01...D.2020.129"
+            "\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware")
+        self.assertEqual(ret[:100], expected_ret
+                         )
+
+    def test_binary_file(self):
+        ret = read_text(binary_file)
+        self.assertIsNone(ret)
+
+
+class TestRetrieveNetsFromDataDict(TestCase):
+    def setUp(self):
+        self.nets_by_sta = {}
+        self.data_dict = {
+            'STA1': {'CHA1': {'nets': {'NET1', 'NET2'}},
+                     'CHA2': {'nets': {'NET2', 'NET3'}}
+                     },
+            'STA2': {'CHA1': {'nets': {'NET1'}},
+                     'CHA2': {'nets': {'NET1'}}
+                     }
+            }
+
+    def test_retrieve_nets(self):
+        retrieve_nets_from_data_dict(self.data_dict, self.nets_by_sta)
+        self.assertEqual(list(self.nets_by_sta.keys()), ['STA1', 'STA2'])
+        self.assertEqual(sorted(list(self.nets_by_sta['STA1'])),
+                         ['NET1', 'NET2', 'NET3'])
+        self.assertEqual(sorted(list(self.nets_by_sta['STA2'])), ['NET1'])
diff --git a/tests/model/mseed_data/test_mseed_reader.py b/tests/model/mseed_data/test_mseed_reader.py
new file mode 100644
index 0000000000000000000000000000000000000000..fcdbe513272a07e763b8a90a8f3a662e6ebdb26a
--- /dev/null
+++ b/tests/model/mseed_data/test_mseed_reader.py
@@ -0,0 +1,316 @@
+from unittest import TestCase
+from pathlib import Path
+
+from sohstationviewer.model.mseed_data.mseed_reader import MSeedReader
+
+TEST_DATA_DIR = Path(__file__).resolve().parent.parent.parent.joinpath(
+    'test_data')
+ascii_file = TEST_DATA_DIR.joinpath(
+    "Q330-sample/day_vols_AX08/AX08.XA..LOG.2021.186")
+blockettes_files = TEST_DATA_DIR.joinpath(
+    "Q330_unimplemented_ascii_block/XX-3203_4-20221222190255")
+multiplex_file = TEST_DATA_DIR.joinpath(
+    "Q330_multiplex/XX-3203_4-20221222183011")
+soh_file = TEST_DATA_DIR.joinpath(
+    "Q330-sample/day_vols_AX08/AX08.XA..VKI.2021.186")
+waveform_file = TEST_DATA_DIR.joinpath(
+    "Q330-sample/day_vols_AX08/AX08.XA..LHE.2021.186")
+mass_pos_file = TEST_DATA_DIR.joinpath(
+    "Q330-sample/day_vols_AX08/AX08.XA..VM1.2021.186")
+gap_file = TEST_DATA_DIR.joinpath(
+    "Centaur-sample/SOH/"
+    "XX.3734.SOH.centaur-3_3734..20180817_000000.miniseed.miniseed")
+
+
+class TestMSeedReader(TestCase):
+    def setUp(self) -> None:
+        self.soh_data = {}
+        self.mass_pos_data = {}
+        self.waveform_data = {}
+        self.log_data = {}
+
+    def test_read_ascii(self):
+        args = {
+            'file_path': ascii_file,
+            'is_multiplex': False,
+            'req_soh_chans': ['LOG'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.log_data.keys()), ['AX08'])
+        self.assertEqual(list(self.log_data['AX08'].keys()), ['LOG'])
+        self.assertEqual(len(self.log_data['AX08']['LOG']), 16)
+        self.assertEqual(
+            self.log_data['AX08']['LOG'][0][:100],
+            '\n\nSTATE OF HEALTH: From:1625456260.12  To:1625456260.12\n\r'
+            '\nQuanterra Packet Baler Model 14 Restart. V'
+        )
+        self.assertEqual(
+            self.log_data['AX08']['LOG'][1][:100],
+            '\n\nSTATE OF HEALTH: From:1625456366.62  To:1625456366.62'
+            '\nReducing Status Polling Interval\r\n[2021-07-0'
+        )
+
+    def test_read_blockettes_info(self):
+        args = {
+            'file_path': blockettes_files,
+            'is_multiplex': True,
+            'req_soh_chans': ['ACE'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.log_data.keys()), ['3203'])
+        self.assertEqual(list(self.log_data['3203'].keys()), ['ACE'])
+        self.assertEqual(len(self.log_data['3203']['ACE']), 1)
+        self.assertEqual(
+            self.log_data['3203']['ACE'][0][:100],
+            '\n\nSTATE OF HEALTH: From:1671729287.00014  To:1671729287.0'
+            '\n===========\nVCO correction: 53.7109375\nTim'
+        )
+
+    def test_not_is_multiplex_read_channel(self):
+        # is_multiplex = False => stop when reach to channel not match req
+        # so the channel 'EL1' is read but not finished
+        args = {
+            'file_path': multiplex_file,
+            'is_multiplex': False,
+            'req_wf_chans': ['EL1'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(self.waveform_data['3203'].keys()), ['EL1'])
+        self.assertEqual(self.waveform_data['3203']['EL1']['samplerate'], 200)
+        self.assertEqual(self.waveform_data['3203']['EL1']['startTmEpoch'],
+                         1671730004.145029)
+        self.assertEqual(self.waveform_data['3203']['EL1']['endTmEpoch'],
+                         1671730013.805)
+        self.assertEqual(self.waveform_data['3203']['EL1']['size'], 1932)
+        self.assertEqual(self.waveform_data['3203']['EL1']['gaps'], [])
+        self.assertEqual(len(self.waveform_data['3203']['EL1']['tracesInfo']),
+                         1)
+
+    def test_is_multiplex_read_channel(self):
+        # is_multiplex = True => read every record
+        args = {
+            'file_path': multiplex_file,
+            'is_multiplex': True,
+            'req_wf_chans': ['EL1'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(self.waveform_data['3203'].keys()), ['EL1'])
+        self.assertEqual(self.waveform_data['3203']['EL1']['samplerate'], 200)
+        self.assertEqual(self.waveform_data['3203']['EL1']['startTmEpoch'],
+                         1671730004.145029)
+        self.assertEqual(self.waveform_data['3203']['EL1']['endTmEpoch'],
+                         1671730720.4348998)
+        self.assertEqual(self.waveform_data['3203']['EL1']['size'], 143258)
+        self.assertEqual(self.waveform_data['3203']['EL1']['gaps'], [])
+        self.assertEqual(len(self.waveform_data['3203']['EL1']['tracesInfo']),
+                         1)
+
+    def test_not_is_multiplex_selected_channel_in_middle(self):
+        # won't reached selected channel because previous record doesn't meet
+        # requirement when is_multiplex = False
+        args = {
+            'file_path': multiplex_file,
+            'is_multiplex': False,
+            'req_wf_chans': ['EL2'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), [])
+
+    def test_is_multiplex_selected_channel_in_middle(self):
+        # is_multiplex = True => the selected channel will be read
+        args = {
+            'file_path': multiplex_file,
+            'is_multiplex': True,
+            'req_wf_chans': ['EL2'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(self.waveform_data['3203'].keys()), ['EL2'])
+        self.assertEqual(self.waveform_data['3203']['EL2']['samplerate'], 200)
+        self.assertEqual(self.waveform_data['3203']['EL2']['startTmEpoch'],
+                         1671730004.3100293)
+        self.assertEqual(self.waveform_data['3203']['EL2']['endTmEpoch'],
+                         1671730720.5549)
+        self.assertEqual(self.waveform_data['3203']['EL2']['size'], 143249)
+        self.assertEqual(self.waveform_data['3203']['EL2']['gaps'], [])
+        self.assertEqual(len(self.waveform_data['3203']['EL2']['tracesInfo']),
+                         1)
+
+    def test_existing_time_range(self):
+        # check if data_time is from the given range, end time may get
+        # a little greater than read_end according to record's end time
+        args = {
+            'file_path': soh_file,
+            'is_multiplex': False,
+            'req_soh_chans': ['VKI'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data,
+            'read_start': 1625456018.0,
+            'read_end': 1625505627.9998999
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.soh_data['AX08'].keys()), ['VKI'])
+        self.assertEqual(self.soh_data['AX08']['VKI']['startTmEpoch'],
+                         1625446018.0)
+        self.assertEqual(self.soh_data['AX08']['VKI']['endTmEpoch'],
+                         1625510338.0)
+
+    def test_non_existing_time_range(self):
+        # if given time range out of the data time, no station will be created
+        args = {
+            'file_path': soh_file,
+            'is_multiplex': False,
+            'req_soh_chans': ['VKI'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data,
+            'read_start': 1625356018.0,
+            'read_end': 1625405627.9998999
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(self.soh_data, {})
+        self.assertEqual(self.mass_pos_data, {})
+        self.assertEqual(self.waveform_data, {})
+
+    def test_read_waveform(self):
+        args = {
+            'file_path': waveform_file,
+            'is_multiplex': False,
+            'req_wf_chans': ['LHE'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), ['AX08'])
+        self.assertEqual(list(self.waveform_data['AX08'].keys()), ['LHE'])
+        self.assertEqual(self.waveform_data['AX08']['LHE']['samplerate'], 1)
+        self.assertEqual(self.waveform_data['AX08']['LHE']['startTmEpoch'],
+                         1625445156.000001)
+        self.assertEqual(self.waveform_data['AX08']['LHE']['endTmEpoch'],
+                         1625532950.0)
+        self.assertEqual(self.waveform_data['AX08']['LHE']['size'], 87794)
+        self.assertEqual(self.waveform_data['AX08']['LHE']['gaps'], [])
+        self.assertEqual(len(self.waveform_data['AX08']['LHE']['tracesInfo']),
+                         1)
+
+    def test_read_mass_pos_channel(self):
+        # mass position channels will be read if one or both include_mpxxxxxx
+        # are True
+        args = {
+            'file_path': mass_pos_file,
+            'is_multiplex': False,
+            'include_mp123zne': True,
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.mass_pos_data.keys()), ['AX08'])
+        self.assertEqual(list(self.mass_pos_data['AX08'].keys()), ['VM1'])
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['samplerate'], 0.1)
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['startTmEpoch'],
+                         1625444970.0)
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['endTmEpoch'],
+                         1625574580.0)
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['size'], 12961)
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['gaps'], [])
+        self.assertEqual(len(self.mass_pos_data['AX08']['VM1']['tracesInfo']),
+                         1)
+
+    def test_gap(self):
+        # gaps will be detected when gap_minimum is set
+        args = {
+            'file_path': gap_file,
+            'is_multiplex': True,
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data,
+            'gap_minimum': 60
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.soh_data.keys()), ['3734'])
+        self.assertEqual(sorted(list(self.soh_data['3734'].keys())),
+                         ['EX1', 'EX2', 'EX3', 'GAN', 'GEL', 'GLA', 'GLO',
+                          'GNS', 'GPL', 'GST', 'LCE', 'LCQ', 'VCO', 'VDT',
+                          'VEC', 'VEI', 'VPB'])
+        self.assertAlmostEqual(self.soh_data['3734']['EX1']['samplerate'],
+                               0.0166, 3)
+        self.assertEqual(self.soh_data['3734']['EX1']['startTmEpoch'],
+                         1534512840.0)
+        self.assertEqual(self.soh_data['3734']['EX1']['endTmEpoch'],
+                         1534550400.0)
+        self.assertEqual(self.soh_data['3734']['EX1']['size'], 597)
+        self.assertEqual(self.soh_data['3734']['EX1']['gaps'],
+                         [[1534522200.0, 1534523940.0]])
+
+    def test_not_detect_gap(self):
+        # if gap_minimum isn't set but gap exist, data still be separated, but
+        # gap won't be added to gap list
+        args = {
+            'file_path': gap_file,
+            'is_multiplex': True,
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data,
+            'gap_minimum': None
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.soh_data.keys()), ['3734'])
+        self.assertEqual(sorted(list(self.soh_data['3734'].keys())),
+                         ['EX1', 'EX2', 'EX3', 'GAN', 'GEL', 'GLA', 'GLO',
+                          'GNS', 'GPL', 'GST', 'LCE', 'LCQ', 'VCO', 'VDT',
+                          'VEC', 'VEI', 'VPB'])
+        self.assertAlmostEqual(self.soh_data['3734']['EX1']['samplerate'],
+                               0.0166, 3)
+        self.assertEqual(self.soh_data['3734']['EX1']['startTmEpoch'],
+                         1534512840.0)
+        self.assertEqual(self.soh_data['3734']['EX1']['endTmEpoch'],
+                         1534550400.0)
+        self.assertEqual(self.soh_data['3734']['EX1']['size'], 597)
+        self.assertEqual(self.soh_data['3734']['EX1']['gaps'], [])  # no gaps
diff --git a/tests/test_controller/test_processing.py b/tests/test_controller/test_processing.py
index 289eb5bbdfc516f1a3b6d925e15c507415359b75..0fa881ff8e3cc5f2f8bf0652d21c31342c3fbed3 100644
--- a/tests/test_controller/test_processing.py
+++ b/tests/test_controller/test_processing.py
@@ -22,7 +22,7 @@ rt130_dir = TEST_DATA_DIR.joinpath('RT130-sample/2017149.92EB/2017150')
 q330_dir = TEST_DATA_DIR.joinpath('Q330-sample/day_vols_AX08')
 centaur_dir = TEST_DATA_DIR.joinpath('Centaur-sample/SOH')
 pegasus_dir = TEST_DATA_DIR.joinpath('Pegasus-sample/Pegasus_SVC4/soh')
-mix_traces_dir = TEST_DATA_DIR.joinpath('Q330_mixed_traces')
+multiplex_dir = TEST_DATA_DIR.joinpath('Q330_multiplex')
 
 
 class TestLoadDataAndReadChannels(TestCase):
@@ -212,21 +212,21 @@ class TestLoadDataAndReadChannels(TestCase):
         self.assertListEqual(ret[2], pegasus_wf_channels)
         self.assertListEqual(ret[3], pegasus_spr_gt_1)
 
-        mix_traces_soh_channels = ['LOG']
-        mix_traces_mass_pos_channels = []
-        mix_traces_wf_channels = sorted(
+        multiplex_soh_channels = ['LOG']
+        multiplex_mass_pos_channels = []
+        multiplex_wf_channels = sorted(
             ['BH1', 'BH2', 'BH3', 'BH4', 'BH5', 'BH6',
              'EL1', 'EL2', 'EL4', 'EL5', 'EL6', 'ELZ'])
-        mix_traces_spr_gt_1 = sorted(
+        multiplex_spr_gt_1 = sorted(
             ['BS1', 'BS2', 'BS3', 'BS4', 'BS5', 'BS6',
              'ES1', 'ES2', 'ES3', 'ES4', 'ES5', 'ES6',
              'LS1', 'LS2', 'LS3', 'LS4', 'LS5', 'LS6',
              'SS1', 'SS2', 'SS3', 'SS4', 'SS5', 'SS6'])
-        ret = read_mseed_channels(self.widget_stub, [mix_traces_dir], True)
-        self.assertListEqual(ret[0], mix_traces_soh_channels)
-        self.assertListEqual(ret[1], mix_traces_mass_pos_channels)
-        self.assertListEqual(ret[2], mix_traces_wf_channels)
-        self.assertListEqual(ret[3], mix_traces_spr_gt_1)
+        ret = read_mseed_channels(self.widget_stub, [multiplex_dir], True)
+        self.assertListEqual(ret[0], multiplex_soh_channels)
+        self.assertListEqual(ret[1], multiplex_mass_pos_channels)
+        self.assertListEqual(ret[2], multiplex_wf_channels)
+        self.assertListEqual(ret[3], multiplex_spr_gt_1)
 
     def test_read_channels_rt130_dir(self):
         """
diff --git a/tests/test_data/Q330_mixed_traces/XX-3203_4-20221222183011 b/tests/test_data/Q330_multiplex/XX-3203_4-20221222183011
similarity index 100%
rename from tests/test_data/Q330_mixed_traces/XX-3203_4-20221222183011
rename to tests/test_data/Q330_multiplex/XX-3203_4-20221222183011