diff --git a/documentation/01 _ Table of Contents.help.md b/documentation/01 _ Table of Contents.help.md
index f7e158e1eb463f7b419d7f220879acbe094331ae..60e076d08b46d31af4e52b8bcd509a2c73aaefde 100644
--- a/documentation/01 _ Table of Contents.help.md	
+++ b/documentation/01 _ Table of Contents.help.md	
@@ -4,6 +4,8 @@ Welcome to the SOH Station Viewer documentation. Here you will find usage guides
 
 On the left-hand side you will find a list of currently available help topics.
 
+If the links of the Table of Contents are broken, click on Recreate Table of Content <img src='recreate_table_contents.png' height=30 style='margin: 3px 0px 0px 0px;'/> to rebuild it.
+
 The home button can be used to return to this page at any time.
 
 # Table of Contents
@@ -14,19 +16,23 @@ The home button can be used to return to this page at any time.
 
 + [How to Use Help](03%20_%20How%20to%20Use%20Help.help.md)
 
-+ [Search SOH n LOG](04%20_%20Search%20SOH%20n%20LOG.help.md)
++ [Search List of Directories](04%20_%20Search%20List%20of%20Directories.help.md)
+
++ [Read from Data Card](05%20_%20Read%20from%20Data%20Card.help.md)
+
++ [Select SOH](06%20_%20Select%20SOH.help.md)
 
-+ [Search List of Directories](05%20_%20Search%20List%20of%20Directories.help.md)
++ [Select Mass Position](07%20_%20Select%20Mass%20Position.help.md)
 
-+ [Read from Data Card](06%20_%20Read%20from%20Data%20Card.help.md)
++ [Select Waveforms](08%20_%20Select%20Waveforms.help.md)
 
-+ [Select SOH](07%20_%20Select%20SOH.help.md)
++ [Gap Display](09%20_%20Gap%20Display.help.md)
 
-+ [Select Mass Position](08%20_%20Select%20Mass%20Position.help.md)
++ [Change TPS Color Range](10%20_%20Change%20TPS%20Color%20Range.help.md)
 
-+ [Select Waveforms](09%20_%20Select%20Waveforms.help.md)
++ [Save Plots](11%20_%20Save%20Plots.help.md)
 
-+ [Gap Display](10%20_%20Gap%20Display.help.md)
++ [Search SOH n LOG](12%20_%20Search%20SOH%20n%20LOG.help.md)
 
 + [GPS Dialog](20%20_%20GPS%20Dialog.help.md)
 
diff --git a/documentation/05 _ Search List of Directories.help.md b/documentation/04 _ Search List of Directories.help.md
similarity index 100%
rename from documentation/05 _ Search List of Directories.help.md
rename to documentation/04 _ Search List of Directories.help.md
diff --git a/documentation/06 _ Read from Data Card.help.md b/documentation/05 _ Read from Data Card.help.md
similarity index 100%
rename from documentation/06 _ Read from Data Card.help.md
rename to documentation/05 _ Read from Data Card.help.md
diff --git a/documentation/07 _ Select SOH.help.md b/documentation/06 _ Select SOH.help.md
similarity index 100%
rename from documentation/07 _ Select SOH.help.md
rename to documentation/06 _ Select SOH.help.md
diff --git a/documentation/08 _ Select Mass Position.help.md b/documentation/07 _ Select Mass Position.help.md
similarity index 100%
rename from documentation/08 _ Select Mass Position.help.md
rename to documentation/07 _ Select Mass Position.help.md
diff --git a/documentation/09 _ Select Waveforms.help.md b/documentation/08 _ Select Waveforms.help.md
similarity index 100%
rename from documentation/09 _ Select Waveforms.help.md
rename to documentation/08 _ Select Waveforms.help.md
diff --git a/documentation/11 _ Gap Display.help.md b/documentation/09 _ Gap Display.help.md
similarity index 100%
rename from documentation/11 _ Gap Display.help.md
rename to documentation/09 _ Gap Display.help.md
diff --git a/documentation/11 _ Save Plots.help.md b/documentation/11 _ Save Plots.help.md
new file mode 100644
index 0000000000000000000000000000000000000000..0027b76db29eeb97aa0adf7cbe68dc7fa5126b09
--- /dev/null
+++ b/documentation/11 _ Save Plots.help.md	
@@ -0,0 +1,60 @@
+# Save Plots
+
+---------------------------
+---------------------------
+
+## Step 1: click 'Save Plot'
+In Main Window,  Raw Data Plot and TPS Plot there are buttons labeled 'Save Plot'.
+
+User need to click those button to save plots in each window.
+
+* Saving State-of-Health plots
+<br />
+<img alt="Save SOH" src="images/save_plots/save_button_soh.png" height="30" />
+<br />
+* Saving Raw data plots
+<br />
+<img alt="Save Waveform" src="images/save_plots/save_button_wf.png" height="60" />
+<br />
+* Saving Time-power-square plots
+<br />
+<img alt="Save TPS" src="images/save_plots/save_button_tps.png" height="80" />
+<br />
+<br />
+<br />
+
+If the current color mode is black, user will be asked to continue or cancel 
+to change mode before saving the image.
+
+<br />
+<br />
+<img alt="Want to change color mode?" src="images/save_plots/question_on_changing_black_mode.png" height="150" />
+<br />
+
+* If user click 'Cancel'. The process of saving plots will be canceled for user 
+to change mode before restarting saving plots again.
+* If user click 'Continue'. The process of saving plots will be continue and the 
+image will be saved in black mode.
+<br />
+
+---------------------------
+## Step 2: Edit file path and select image's format
+Once clicking on 'Save Plot' button,  the 'Save Plot' dialog will pop up.
+
+<br />
+<br />
+<img alt="Select Image Format dialog" src="images/save_plots/save_file_dialog.png" height="200" />
+<br />
+
++ The default path to save the image file is preset in (1) text box.  If user 
+wants to change the path,  click on 'Save Directory button' to open file dialog 
+for changing path.
++ The default filename to save the image is preset in (2) text box.  User can 
+change the name in this box.
++ In side oval (3) are the radio buttons to select image format to save 
+file.
++ For 'PNG' format,  user can change DPI which is the resolution of the 
+image.  Other formats are vector formats which don't require resolution.
+
+Then user can click 'CANCEL' to cancel saving plot or click 'SAVE PLOT' to save
+the current plots to file.
\ No newline at end of file
diff --git a/documentation/04 _ Search SOH n LOG.help.md b/documentation/12 _ Search SOH n LOG.help.md
similarity index 100%
rename from documentation/04 _ Search SOH n LOG.help.md
rename to documentation/12 _ Search SOH n LOG.help.md
diff --git a/documentation/99 _ test.md b/documentation/99 _ test.md
index 7ef0655b760ac6880ab28c7b87f54ad34c2bb4ae..84fbede232f89c3fc5c6e9c03a105021552adb20 100644
--- a/documentation/99 _ test.md	
+++ b/documentation/99 _ test.md	
@@ -39,7 +39,7 @@ printf("%s\n", syntaxHighlighting.doesItWork ? "Success!" : "Oof.");
 ^ This is a horizontal line
 
 v This is an image
-![An Image?](images/image.jpg)
+![An Image?](recreate_table_contents.png)
 
 ---
 Another horizontal line
diff --git a/documentation/images/save_plots/question_on_changing_black_mode.png b/documentation/images/save_plots/question_on_changing_black_mode.png
new file mode 100644
index 0000000000000000000000000000000000000000..7424afda3387e8cbcad71a7fba63903072d2f23d
Binary files /dev/null and b/documentation/images/save_plots/question_on_changing_black_mode.png differ
diff --git a/documentation/images/save_plots/save_button_soh.png b/documentation/images/save_plots/save_button_soh.png
new file mode 100644
index 0000000000000000000000000000000000000000..588e20ca07de4e9dfde974de414107bb855ac1c8
Binary files /dev/null and b/documentation/images/save_plots/save_button_soh.png differ
diff --git a/documentation/images/save_plots/save_button_tps.png b/documentation/images/save_plots/save_button_tps.png
new file mode 100644
index 0000000000000000000000000000000000000000..1bfe4977370d6b904ff3d63a79bb6a4fbfe67266
Binary files /dev/null and b/documentation/images/save_plots/save_button_tps.png differ
diff --git a/documentation/images/save_plots/save_button_wf.png b/documentation/images/save_plots/save_button_wf.png
new file mode 100644
index 0000000000000000000000000000000000000000..f65ac57c793dd9b43cfd4814e56604eb3f3f3c80
Binary files /dev/null and b/documentation/images/save_plots/save_button_wf.png differ
diff --git a/documentation/images/save_plots/save_file_dialog.png b/documentation/images/save_plots/save_file_dialog.png
new file mode 100644
index 0000000000000000000000000000000000000000..ddb40fe65456a44943792bd94933a88a64556111
Binary files /dev/null and b/documentation/images/save_plots/save_file_dialog.png differ
diff --git a/documentation/img.png b/documentation/img.png
deleted file mode 100644
index 5d8c5a2165cf11862b70318e57343665de6e1a77..0000000000000000000000000000000000000000
Binary files a/documentation/img.png and /dev/null differ
diff --git a/documentation/recreate_table_contents.png b/documentation/recreate_table_contents.png
new file mode 100644
index 0000000000000000000000000000000000000000..34ab02a858eb4da3d62325cff47e1bd56dc90186
Binary files /dev/null and b/documentation/recreate_table_contents.png differ
diff --git a/sohstationviewer/conf/constants.py b/sohstationviewer/conf/constants.py
index 8bd00e091e0c436c87c64027c626cfa716dab02f..d060a1f8a3ac0865a719cddd898f39a6d55dd97e 100644
--- a/sohstationviewer/conf/constants.py
+++ b/sohstationviewer/conf/constants.py
@@ -50,8 +50,11 @@ TABLE_CONTENTS = "01 _ Table of Contents.help.md"
 SEARCH_RESULTS = "Search Results.md"
 
 # the list of all color modes
-ALL_COLOR_MODES = {'B', 'W'}
+ALL_COLOR_MODES = {'B': 'black', 'W': 'white'}
 
+# List of image formats. Have to put PNG at the beginning to go with
+# dpi in dialog
+IMG_FORMAT = ['PNG', 'PDF', 'EPS', 'SVG']
 # ================================================================= #
 #                      PLOTTING CONSTANT
 # ================================================================= #
diff --git a/sohstationviewer/controller/processing.py b/sohstationviewer/controller/processing.py
index 7eaa504cb3dca8afc1501aad5cd3aa0c6d4ee284..13715a429c60def6c1a4796b3acc3f6541f8abdc 100644
--- a/sohstationviewer/controller/processing.py
+++ b/sohstationviewer/controller/processing.py
@@ -139,7 +139,7 @@ def read_mseed_channels(tracking_box: QTextBrowser, list_of_dir: List[str],
                 spr_gr_1_chan_ids.update(ret[3])
     if not on_unittest:
         QApplication.restoreOverrideCursor()
-    return sorted(list(soh_chan_ids)), sorted(list(mass_pos_chan_ids)),\
+    return sorted(list(soh_chan_ids)), sorted(list(mass_pos_chan_ids)), \
         sorted(list(wf_chan_ids)), sorted(list(spr_gr_1_chan_ids))
 
 
diff --git a/sohstationviewer/controller/util.py b/sohstationviewer/controller/util.py
index 85c8203f7736126bd843d98012a0a82a9be40b2a..0e46a24ab1b673918022c15dacbacce654c11bcf 100644
--- a/sohstationviewer/controller/util.py
+++ b/sohstationviewer/controller/util.py
@@ -66,19 +66,20 @@ def display_tracking_info(tracking_box: QTextBrowser, text: str,
     msg = {'text': text}
     if type == LogType.ERROR:
         msg['color'] = 'white'
-        msg['bgcolor'] = '#e46269'
+        msg['bgcolor'] = '#c45259'
     elif type == LogType.WARNING:
-        msg['color'] = '#ffd966'
-        msg['bgcolor'] = 'orange'
+        msg['color'] = 'white'
+        msg['bgcolor'] = '#c4a347'
     else:
         msg['color'] = 'blue'
         msg['bgcolor'] = 'white'
     html_text = """<body>
-        <div style='color:%(color)s; background-color:%(bgcolor)s'>
-            %(text)s
+        <div style='color:%(color)s'>
+            <strong>%(text)s</strong>
         </div>
         </body>"""
     tracking_box.setHtml(html_text % msg)
+    tracking_box.setStyleSheet(f"background-color: {msg['bgcolor']}")
     # parent.update()
     tracking_box.repaint()
 
diff --git a/sohstationviewer/database/extract_data.py b/sohstationviewer/database/extract_data.py
index c6cf6581b84373601f8515046c48be5098b86e67..cf0ab6208f841629f618bd28260d617c2aa15fd2 100755
--- a/sohstationviewer/database/extract_data.py
+++ b/sohstationviewer/database/extract_data.py
@@ -5,7 +5,7 @@ from sohstationviewer.database.process_db import execute_db_dict, execute_db
 from sohstationviewer.conf.dbSettings import dbConf
 
 
-def get_chan_plot_info(org_chan_id: str, chan_info: Dict, data_type: str,
+def get_chan_plot_info(org_chan_id: str, data_type: str,
                        color_mode: ColorMode = 'B') -> Dict:
     """
     Given chanID read from raw data file and detected dataType
@@ -24,10 +24,10 @@ def get_chan_plot_info(org_chan_id: str, chan_info: Dict, data_type: str,
         chan = 'VM?'
     if org_chan_id.startswith('MassPos'):
         chan = 'MassPos?'
+    if org_chan_id.startswith('DS'):
+        chan = 'SEISMIC'
     if org_chan_id.startswith('Event DS'):
         chan = 'Event DS?'
-    if org_chan_id.startswith('DS'):
-        chan = 'DS?'
     if org_chan_id.startswith('Disk Usage'):
         chan = 'Disk Usage?'
     if dbConf['seisRE'].match(chan):
@@ -46,17 +46,13 @@ def get_chan_plot_info(org_chan_id: str, chan_info: Dict, data_type: str,
         sql = (f"{o_sql} WHERE channel='{chan}' and C.param=P.param"
                f" and dataType='{data_type}'")
     chan_db_info = execute_db_dict(sql)
-
+    seismic_label = None
     if len(chan_db_info) == 0:
         chan_db_info = execute_db_dict(
             f"{o_sql} WHERE channel='DEFAULT' and C.param=P.param")
     else:
         if chan_db_info[0]['channel'] == 'SEISMIC':
-            try:
-                chan_db_info[0]['label'] = dbConf['seisLabel'][org_chan_id[-1]]
-            except KeyError:
-                chan_db_info[0]['label'] = str(chan_info['samplerate'])
-
+            seismic_label = get_seismic_chan_label(org_chan_id)
         chan_db_info[0]['channel'] = org_chan_id
 
     chan_db_info[0]['label'] = (
@@ -68,6 +64,8 @@ def get_chan_plot_info(org_chan_id: str, chan_info: Dict, data_type: str,
         else chan_db_info[0]['fixPoint'])
     if chan_db_info[0]['label'].strip() == '':
         chan_db_info[0]['label'] = chan_db_info[0]['channel']
+    elif seismic_label is not None:
+        chan_db_info[0]['label'] = seismic_label
     else:
         chan_db_info[0]['label'] = '-'.join([chan_db_info[0]['channel'],
                                             chan_db_info[0]['label']])
@@ -76,30 +74,23 @@ def get_chan_plot_info(org_chan_id: str, chan_info: Dict, data_type: str,
     return chan_db_info[0]
 
 
-def get_wf_plot_info(org_chan_id: str, *args, **kwargs) -> Dict:
-    """
-    :param org_chan_id: channel name read from data source
-    :param chan_info: to be compliant with get_chan_plot_info()
-    :param data_type: to be compliant with get_chan_plot_info()
-    :param color_mode: to be compliant with get_chan_plot_info()
-    :return info of channel read from DB which is used for plotting
-    """
-    # Waveform plot's color is fixed to NULL in the database, so we do not need
-    # to get the valueColors columns from the database.
-    chan_info = execute_db_dict(
-        "SELECT param, plotType, height "
-        "FROM Parameters WHERE param='Seismic data'")
-    # The plotting API still requires that the key 'valueColors' is mapped to
-    # something, so we are setting it to None.
-    chan_info[0]['valueColors'] = None
-    chan_info[0]['label'] = get_chan_label(org_chan_id)
-    chan_info[0]['unit'] = ''
-    chan_info[0]['channel'] = 'SEISMIC'
-    chan_info[0]['convertFactor'] = 1
-    return chan_info[0]
+def get_convert_factor(chan_id, data_type):
+    sql = f"SELECT convertFactor FROM Channels WHERE channel='{chan_id}' " \
+          f"AND dataType='{data_type}'"
+    ret = execute_db(sql)
+    if ret:
+        return ret[0][0]
+    else:
+        return None
 
 
-def get_chan_label(chan_id):
+def get_seismic_chan_label(chan_id):
+    """
+    Get label for chan_id in which data stream can use chan_id for label while
+        other seismic need to add coordinate to chan_id for label
+    :param chan_id: name of channel
+    :return label: label to put in front of the plot of the channel
+    """
     if chan_id.startswith("DS"):
         label = chan_id
     else:
diff --git a/sohstationviewer/database/soh.db b/sohstationviewer/database/soh.db
index 2b1a06758c874e7b77d26c3dbe9b4873b0010ed5..288518a08e4dca688498faf5752ed00a9e618d6d 100755
Binary files a/sohstationviewer/database/soh.db and b/sohstationviewer/database/soh.db differ
diff --git a/sohstationviewer/model/data_loader.py b/sohstationviewer/model/data_loader.py
index f6a5e0db3402f807af4152969147791e39ea8154..63320fe6bddae1c55b37962d350dec4173f1a32f 100644
--- a/sohstationviewer/model/data_loader.py
+++ b/sohstationviewer/model/data_loader.py
@@ -31,7 +31,7 @@ class DataLoaderWorker(QtCore.QObject):
                  req_soh_chans: List[str] = [], read_start: float = 0,
                  read_end: float = constants.HIGHEST_INT,
                  include_mp123: bool = False, include_mp456: bool = False,
-                 parent_thread=None):
+                 rt130_waveform_data_req: bool = False, parent_thread=None):
         super().__init__()
         self.data_type = data_type
         self.tracking_box = tracking_box
@@ -43,6 +43,7 @@ class DataLoaderWorker(QtCore.QObject):
         self.read_end = read_end
         self.include_mp123 = include_mp123
         self.include_mp456 = include_mp456
+        self. rt130_waveform_data_req = rt130_waveform_data_req
         self.parent_thread = parent_thread
         # display_tracking_info updates a QtWidget, which can only be done in
         # the read. Since self.run runs in a background thread, we need to use
@@ -71,6 +72,7 @@ class DataLoaderWorker(QtCore.QObject):
                 read_start=self.read_start, read_end=self.read_end,
                 include_mp123zne=self.include_mp123,
                 include_mp456uvw=self.include_mp456,
+                rt130_waveform_data_req=self.rt130_waveform_data_req,
                 creator_thread=self.parent_thread,
                 notification_signal=self.notification,
                 pause_signal=self.button_dialog
@@ -114,7 +116,8 @@ class DataLoader(QtCore.QObject):
                     req_soh_chans: List[str] = [], read_start: float = 0,
                     read_end: float = constants.HIGHEST_INT,
                     include_mp123: bool = False,
-                    include_mp456: bool = False):
+                    include_mp456: bool = False,
+                    rt130_waveform_data_req: bool = False):
         """
         Initialize the data loader. Construct the thread and worker and connect
         them together. Separated from the actual loading of the data to allow
@@ -150,6 +153,7 @@ class DataLoader(QtCore.QObject):
             read_end=read_end,
             include_mp123=include_mp123,
             include_mp456=include_mp456,
+            rt130_waveform_data_req=rt130_waveform_data_req,
             parent_thread=self.thread
         )
 
diff --git a/sohstationviewer/model/data_type_model.py b/sohstationviewer/model/data_type_model.py
index f89db45b8ada5db2f5c38a27ac191c87d0101306..2f8c6a5dd21e56315dc7a9c6a6527002d3f5fd75 100644
--- a/sohstationviewer/model/data_type_model.py
+++ b/sohstationviewer/model/data_type_model.py
@@ -43,6 +43,7 @@ class DataTypeModel():
                  read_end: Optional[float] = UTCDateTime().timestamp,
                  include_mp123zne: bool = False,
                  include_mp456uvw: bool = False,
+                 rt130_waveform_data_req: bool = False,
                  creator_thread: Optional[QtCore.QThread] = None,
                  notification_signal: Optional[QtCore.Signal] = None,
                  pause_signal: Optional[QtCore.Signal] = None,
@@ -60,6 +61,7 @@ class DataTypeModel():
         :param read_end: requested end time to read
         :param include_mp123zne: if mass position channels 1,2,3 are requested
         :param include_mp456uvw: if mass position channels 4,5,6 are requested
+        :param rt130_waveform_data_req: flag for RT130 to read waveform data
         :param creator_thread: the thread the current DataTypeModel instance is
             being created in. If None, the DataTypeModel instance is being
             created in the main thread
@@ -78,6 +80,7 @@ class DataTypeModel():
         self.read_end = read_end
         self.include_mp123zne = include_mp123zne
         self.include_mp456uvw = include_mp456uvw
+        self.rt130_waveform_data_req = rt130_waveform_data_req
         if creator_thread is None:
             err_msg = (
                 'A signal is not None while running in main thread'
@@ -356,7 +359,8 @@ class DataTypeModel():
                            list_of_rt130_paths,
                            req_wf_chans=[], req_soh_chans=[],
                            read_start=0, read_end=constants.HIGHEST_INT,
-                           include_mp123=False, include_mp456=False):
+                           include_mp123=False, include_mp456=False,
+                           rt130_waveform_data_req=False):
         """
         Create a DataTypeModel object, with the concrete class being based on
         data_type. Run on the same thread as its caller, and so will block the
@@ -382,7 +386,9 @@ class DataTypeModel():
                 data_type, tracking_box, folder, list_of_rt130_paths,
                 reqWFChans=req_wf_chans, reqSOHChans=req_soh_chans,
                 readStart=read_start, readEnd=read_end,
-                include_mp123=include_mp123, include_mp456=include_mp456)
+                include_mp123=include_mp123, include_mp456=include_mp456,
+                rt130_waveform_data_req=rt130_waveform_data_req
+            )
         else:
             from sohstationviewer.model.mseed.mseed import MSeed
             data_object = MSeed(
diff --git a/sohstationviewer/model/general_data/__init__.py b/sohstationviewer/model/general_data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sohstationviewer/model/general_data/data_structures.MD b/sohstationviewer/model/general_data/data_structures.MD
new file mode 100644
index 0000000000000000000000000000000000000000..f9433a985e680bf75d1e1b22579eafc74d4e6b47
--- /dev/null
+++ b/sohstationviewer/model/general_data/data_structures.MD
@@ -0,0 +1,44 @@
+## Log data:
+info from log channels, soh messages, text file in dict:
+{'TEXT': [str,], key:{chan_id: [str,],},}
+In which 'TEXT': is the chan_id given by sohview for text only files which have 
+no station or channel associate with it.
+Note: log_data for RT130's dataset has only one channel: SOH
+
+## data_dict:
+{set_key: {
+    chan_id (str): {
+        'file_path' (str): path of file to keep track of file changes in MSeedReader
+        'chanID' (str): name of channel
+        'samplerate' (float): Sampling rate of the data
+        'startTmEpoch' (float): start epoch time of channel
+        'endTmEpoch' (float): end epoch time of channel
+        'size' (int): size of channel data
+        'tracesInfo': [{
+            'startTmEpoch': Start epoch time of the trace - float
+            'endTmEpoch': End epoch time of the trace - float
+            'times': time of channel's trace: List[float] in mseed_reader but changed to ndarray in combine_data()
+            'data': data of channel's trace: List[float] in mseed_reader but changed to ndarray in combine_data()
+            }]
+        'tps_data': list of lists of mean of square of every 5m of data in each day
+        'times' (np.array): times that has been trimmed and down-sampled for plotting
+        'data' (np.array): data that has been trimmed and down-sampled for plotting
+        'chan_db_info' (dict): the plotting parameters got from database
+            for this channel - dict,
+        ax: axes to draw the channel in PlottingWidget
+        ax_wf (matplotlib.axes.Axes): axes to draw the channel in WaveformWidget
+    }
+}
+
+Use both ax and ax_wf because mass position channels are plotted in both widgets while 
+soh channels are plotted in PlottingWidget and waveform channel are plotted in WaveformWidget
+tps_data created in TimePoserSquareWidget only and apply for waveform_data only
+
+## tps_data: data that aren't separated to traces
+{set_key - str or (str, str): {
+    chan_id - str: {
+        times: np.array,
+        data: np.array,
+        }
+    }
+}
\ No newline at end of file
diff --git a/sohstationviewer/model/general_data/general_data.py b/sohstationviewer/model/general_data/general_data.py
new file mode 100644
index 0000000000000000000000000000000000000000..405518eed4ef7f6c28bc951ae2d92614509967a3
--- /dev/null
+++ b/sohstationviewer/model/general_data/general_data.py
@@ -0,0 +1,404 @@
+from __future__ import annotations
+
+from pathlib import Path
+from tempfile import TemporaryDirectory
+from typing import Optional, Union, List, Tuple, Dict
+
+from obspy import UTCDateTime
+
+from PySide2 import QtCore
+from PySide2 import QtWidgets
+
+from sohstationviewer.controller.util import display_tracking_info
+from sohstationviewer.view.plotting.gps_plot.gps_point import GPSPoint
+from sohstationviewer.view.util.enums import LogType
+from sohstationviewer.database.process_db import execute_db
+from sohstationviewer.model.general_data.general_data_helper import \
+    retrieve_data_time_from_data_dict, retrieve_gaps_from_data_dict, \
+    combine_data, sort_data, squash_gaps, apply_convert_factor_to_data_dict
+
+
+class ProcessingDataError(Exception):
+    def __init__(self, msg):
+        self.message = msg
+
+
+class ThreadStopped(Exception):
+    """
+    An exception that is raised when the user requests for the data loader
+    thread to be stopped.
+    """
+    def __init__(self, *args, **kwargs):
+        self.args = (args, kwargs)
+
+
+class GeneralData():
+    def __init__(self, data_type,
+                 tracking_box: Optional[QtWidgets.QTextBrowser] = None,
+                 is_multiplex: bool = False, folder: str = '.',
+                 list_of_rt130_paths: List[Path] = [],
+                 req_wf_chans: Union[List[str], List[int]] = [],
+                 req_soh_chans: List[str] = [],
+                 gap_minimum: float = None,
+                 read_start: Optional[float] = UTCDateTime(0).timestamp,
+                 read_end: Optional[float] = UTCDateTime().timestamp,
+                 include_mp123zne: bool = False,
+                 include_mp456uvw: bool = False,
+                 rt130_waveform_data_req: bool = False,
+                 creator_thread: Optional[QtCore.QThread] = None,
+                 notification_signal: Optional[QtCore.Signal] = None,
+                 pause_signal: Optional[QtCore.Signal] = None,
+                 on_unittest: bool = False,
+                 *args, **kwargs):
+        """
+        CHANGED FROM data_type_model.DataTypeModel.__init__:
+            + add self.is_multiplex, self.on_unittest, self.gap_minimum,
+                self.keys
+            + remove docstring for self.log_data, self.soh_data,
+                self.mass_pos_data,
+                self.waveform_data, self.gaps_by_key_chan,
+                self.stream_header_by_key_chan
+
+        Super class for different data type to process data from data files
+
+        :param data_type: type of the object
+        :param tracking_box: widget to display tracking info
+        :param folder: path to the folder of data
+        :param list_of_rt130_paths: path to the folders of RT130 data
+        :param req_wf_chans: requested waveform channel list
+        :param req_soh_chans: requested SOH channel list
+        :param read_start: requested start time to read
+        :param read_end: requested end time to read
+        :param include_mp123zne: if mass position channels 1,2,3 are requested
+        :param include_mp456uvw: if mass position channels 4,5,6 are requested
+        :param rt130_waveform_data_req: flag for RT130 to read waveform data
+        :param creator_thread: the thread the current DataTypeModel instance is
+            being created in. If None, the DataTypeModel instance is being
+            created in the main thread
+        :param notification_signal: signal used to send notifications to the
+            main thread. Only not None when creator_thread is not None
+        :param pause_signal: signal used to notify the main thread that the
+            data loader is paused.
+        """
+        self.data_type = data_type
+        self.is_multiplex = is_multiplex
+        self.tracking_box = tracking_box
+        self.dir = folder
+        self.list_of_rt130_paths = list_of_rt130_paths
+        self.req_soh_chans = req_soh_chans
+        self.req_wf_chans = req_wf_chans
+        self.gap_minimum = gap_minimum
+        self.read_start = read_start
+        self.read_end = read_end
+        self.include_mp123zne = include_mp123zne
+        self.include_mp456uvw = include_mp456uvw
+        self.rt130_waveform_data_req = rt130_waveform_data_req
+        self.on_unittest = on_unittest
+
+        if creator_thread is None:
+            err_msg = (
+                'A signal is not None while running in main thread'
+            )
+            assert notification_signal is None, err_msg
+            assert pause_signal is None, err_msg
+            self.creator_thread = QtCore.QThread()
+        else:
+            self.creator_thread = creator_thread
+        self.notification_signal = notification_signal
+        self.pause_signal = pause_signal
+
+        """
+        processing_log: record the progress of processing
+        """
+        self.processing_log: List[Tuple[str, LogType]] = []
+        """
+        keys: list of all keys
+        """
+        self.keys = []
+
+        DataKey = Union[Tuple[str, str], str]
+
+        """
+        log_texts: dictionary of content of text files by filenames
+        """
+        self.log_texts: Dict[str, str] = {}
+        # Look for description in data_structures.MD
+        self.log_data = {'TEXT': []}  # noqa
+        self.waveform_data = {}
+        self.soh_data = {}
+        self.mass_pos_data = {}
+        """
+        data_time: time range of data sets:
+        """
+        self.data_time: Dict[DataKey, List[float]] = {}
+
+        """
+        The given data may include more than one data set which is station_id
+        in mseed or (unit_id, exp_no) in reftek. User are allow to choose which
+        data set to be displayed
+        selected_key: str - key of the data set to be displayed
+        """
+        self.selected_key: Optional[str] = None
+
+        """
+        gaps: gaps info in dict:
+        """
+        self.gaps: Dict[DataKey, List[List[float]]] = {}
+
+        """
+         tmp_dir: dir to keep memmap files. Deleted when object is deleted
+        """
+        self.tmp_dir_obj: TemporaryDirectory = TemporaryDirectory()
+        self.tmp_dir = self.tmp_dir_obj.name
+        if not on_unittest:
+            self.save_temp_data_folder_to_database()
+
+        self._pauser = QtCore.QSemaphore()
+        self.pause_response = None
+
+        self.gps_points: List[GPSPoint] = []
+
+    def read_folder(self, folder: str) -> Tuple[Dict]:
+        """
+        FROM data_type_model.DataTypeModel.read_folder
+        Read data from given folder
+        :param folder: path to folder to read data
+        :return: Tuple of different data dicts
+        """
+        pass
+
+    def select_key(self) -> Union[str, Tuple[str, str]]:
+        """
+        FROM data_type_model.DataTypeModel.select_key
+        Get the key for the data set to process.
+        :return: key of the selected data set
+        """
+        pass
+
+    def processing_data(self):
+        """
+        CHANGED FROM data_type_model.Data_Type_Model.processing_data
+        """
+
+        if self.creator_thread.isInterruptionRequested():
+            raise ThreadStopped()
+        self.read_folder(self.dir)
+
+        self.selected_key = self.select_key()
+
+        self.fill_empty_data()
+        if self.creator_thread.isInterruptionRequested():
+            raise ThreadStopped()
+        self.finalize_data()
+
+    def finalize_data(self):
+        """
+        CHANGED FROM data_type_model.Data_Type_Model.finalize_data
+        This function should be called after all folders finish reading to
+            + Filling an empty_dict into station with no data added in
+                data_dicts
+            + Sort all data traces in time order
+            + Combine traces in data and split at gaps > gap_minimum
+            + Apply convert_factor to avoid using flags to prevent double
+                applying convert factor when plotting
+            + Check not found channels
+            + Retrieve gaps from data_dicts
+            + Retrieve data_time from data_dicts
+            + Change data time with default value that are invalid for plotting
+                to read_start, read_end.
+        """
+        if self.selected_key is None:
+            return
+
+        self.track_info("Finalizing...", LogType.INFO)
+
+        self.sort_all_data()
+        self.combine_all_data()
+        self.apply_convert_factor_to_data_dicts()
+        self.check_not_found_soh_channels()
+
+        self.retrieve_gaps_from_data_dicts()
+        self.retrieve_data_time_from_data_dicts()
+        for key in self.keys:
+            if key not in self.data_time.keys():
+                self.data_time[key] = [self.read_start, self.read_end]
+
+    def __del__(self):
+        # FROM data_type_model.Data_Type_Model.__del__
+        print("delete dataType Object")
+        try:
+            del self.tmp_dir_obj
+        except OSError as e:
+            self.track_info(
+                "Error deleting %s : %s" % (self.tmp_dir, e.strerror),
+                LogType.ERROR)
+            print("Error deleting %s : %s" % (self.tmp_dir, e.strerror))
+        print("finish deleting")
+
+    def track_info(self, text: str, type: LogType) -> None:
+        """
+        CHANGED FROM data_type_model.Data_Type_Model.track_info:
+
+        Display tracking info in tracking_box.
+        Add all errors/warnings to processing_log.
+        :param text: str - message to display
+        :param type: str - type of message (error/warning/info)
+        """
+        # display_tracking_info updates a QtWidget, which can only be done in
+        # the main thread. So, if we are running in a background thread
+        # (i.e. self.creator_thread is not None), we need to use signal slot
+        # mechanism to ensure that display_tracking_info is run in the main
+        # thread.
+        if self.notification_signal is None:
+            display_tracking_info(self.tracking_box, text, type)
+        else:
+            self.notification_signal.emit(self.tracking_box, text, type)
+        if type != LogType.INFO:
+            self.processing_log.append((text, type))
+
+    def pause(self) -> None:
+        """
+        FROM data_type_model.Data_Type_Model.pause
+        Pause the thread this DataTypeModel instance is in. Works by trying
+        to acquire a semaphore that is not available, which causes the thread
+        to block.
+
+        Note: due to how this is implemented, each call to pause will require
+        a corresponding call to unpause. Thus, it is inadvisable to call this
+        method more than once.
+
+        Caution: not safe to call in the main thread. Unless a background
+        thread releases the semaphore, the whole program will freeze.
+        """
+        self._pauser.acquire()
+
+    @QtCore.Slot()
+    def unpause(self):
+        """
+        FROM data_type_model.Data_Type_Model.unpause
+        Unpause the thread this DataTypeModel instance is in. Works by trying
+        to acquire a semaphore that is not available, which causes the thread
+        to block.
+
+        Caution: due to how this is implemented, if unpause is called before
+        pause, the thread will not be paused until another call to pause is
+        made. Also, like pause, each call to unpause must be matched by another
+        call to pause for everything to work.
+        """
+        self._pauser.release()
+
+    @QtCore.Slot()
+    def receive_pause_response(self, response: object):
+        """
+        FROM data_type_model.Data_Type_Model.receive_pause_response
+        Receive a response to a request made to another thread and unpause the
+        calling thread.
+
+        :param response: the response to the request made
+        :type response: object
+        """
+        self.pause_response = response
+        self.unpause()
+
+    @classmethod
+    def get_empty_instance(cls) -> GeneralData:
+        """
+        # FROM data_type_model.Data_Type_Model.get_empty_instance
+        Create an empty data object. Useful if a DataTypeModel instance is
+        needed, but it is undesirable to load a data set. Basically wraps
+        __new__().
+
+        :return: an empty data object
+        :rtype: DataTypeModel
+        """
+        return cls.__new__(cls)
+
+    def save_temp_data_folder_to_database(self):
+        # FROM
+        #    data_type_model.Data_Type_Model.save_temp_data_folder_to_database
+        execute_db(f'UPDATE PersistentData SET FieldValue="{self.tmp_dir}" '
+                   f'WHERE FieldName="tempDataDirectory"')
+
+    def check_not_found_soh_channels(self):
+        # FROM data_type_model.Data_Type_Model.check_not_found_soh_channels
+        all_chans_meet_req = (
+                list(self.soh_data[self.selected_key].keys()) +
+                list(self.mass_pos_data[self.selected_key].keys()) +
+                list(self.log_data[self.selected_key].keys()))
+
+        not_found_chans = [c for c in self.req_soh_chans
+                           if c not in all_chans_meet_req]
+        if not_found_chans != []:
+            msg = (f"No data found for the following channels: "
+                   f"{', '.join( not_found_chans)}")
+            self.processing_log.append((msg, LogType.WARNING))
+
+    def sort_all_data(self):
+        """
+        FROM data_type_model.Data_Type_Model.sort_all_data
+        Sort traces by startTmEpoch on all data: waveform_data, mass_pos_data,
+            soh_data.
+        Reftek's soh_data won't be sorted here. It has been sorted by time
+            because it is created from log data which is sorted in
+            prepare_soh_data_from_log_data()
+        """
+        sort_data(self.waveform_data[self.selected_key])
+        sort_data(self.mass_pos_data[self.selected_key])
+        try:
+            sort_data(self.soh_data[self.selected_key])
+        except KeyError:
+            # Reftek's SOH trace doesn't have startTmEpoch and
+            # actually soh_data consists of only one trace
+            pass
+
+    def combine_all_data(self):
+        combine_data(self.waveform_data[self.selected_key], self.gap_minimum)
+        combine_data(self.mass_pos_data[self.selected_key], self.gap_minimum)
+        try:
+            combine_data(self.soh_data[self.selected_key], self.gap_minimum)
+        except KeyError:
+            # Reftek's SOH trace doesn't have startTmEpoch and
+            # actually soh_data consists of only one trace
+            pass
+
+    def retrieve_gaps_from_data_dicts(self):
+        """
+        Getting gaps from each data_dicts then squash all related gaps
+        """
+        retrieve_gaps_from_data_dict(self.soh_data, self.gaps)
+        retrieve_gaps_from_data_dict(self.mass_pos_data, self.gaps)
+        retrieve_gaps_from_data_dict(self.waveform_data, self.gaps)
+        for sta_id in self.gaps:
+            self.gaps[sta_id] = squash_gaps(self.gaps[sta_id])
+
+    def retrieve_data_time_from_data_dicts(self):
+        """
+        Going through each data_dict to update the data_time to be
+            [min of startTimeEpoch, max of endTimeEpoch] for each station.
+        """
+        retrieve_data_time_from_data_dict(self.soh_data, self.data_time)
+        retrieve_data_time_from_data_dict(self.mass_pos_data, self.data_time)
+        retrieve_data_time_from_data_dict(self.waveform_data, self.data_time)
+
+    def fill_empty_data(self):
+        """
+        Filling an empty_dict into station with no data added in data_dicts
+        """
+        for key in self.keys:
+            if key not in self.soh_data:
+                self.soh_data[key] = {}
+            if key not in self.waveform_data:
+                self.waveform_data[key] = {}
+            if key not in self.mass_pos_data:
+                self.mass_pos_data[key] = {}
+            if key not in self.log_data:
+                self.log_data[key] = {}
+
+    def apply_convert_factor_to_data_dicts(self):
+        """
+        Applying convert_factor to avoid using flags to prevent double
+            applying convert factor when plotting
+        """
+        apply_convert_factor_to_data_dict(self.soh_data, self.data_type)
+        apply_convert_factor_to_data_dict(self.mass_pos_data, self.data_type)
+        apply_convert_factor_to_data_dict(self.waveform_data, self.data_type)
diff --git a/sohstationviewer/model/general_data/general_data_helper.py b/sohstationviewer/model/general_data/general_data_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..d859dbb39f562fa72e42febaf0f40531a37395b2
--- /dev/null
+++ b/sohstationviewer/model/general_data/general_data_helper.py
@@ -0,0 +1,184 @@
+from typing import List, Dict, Optional
+import numpy as np
+from sohstationviewer.database.extract_data import get_convert_factor
+
+
+def _check_related_gaps(min1: float, max1: float,
+                        min2: float, max2: float,
+                        index: int, checked_indexes: List[int]):
+    """
+    FROM handling_data.check_related_gaps
+
+    Check if the passing ranges overlapping each other and add indexes to
+        checked_indexes.
+
+    :param min1: start of range 1
+    :param max1: end of range 1
+    :param min2: start of range 2
+    :param max2: end of range 2
+    :param index: index of gap being checked
+    :param checked_indexes: list of gaps that have been checked
+
+    :return: True if the two ranges overlap each other, False otherwise
+    """
+    if ((min1 <= min2 <= max1) or (min1 <= max2 <= max1)
+            or (min2 <= min1 <= max2) or (min2 <= max1 <= max2)):
+        # range [min1, max1] and [min2, max2] have some part overlap each other
+        checked_indexes.append(index)
+        return True
+    else:
+        return False
+
+
+def squash_gaps(gaps: List[List[float]]) -> List[List[float]]:
+    """
+    FROM handling_data.squash_gaps
+
+    Compress gaps from different channels that have time range related to
+    each other to the ones with outside boundary (min start, max end)
+    or (min end, max start) in case of overlap.
+    :param gaps: [[float, float],], [[float, float],] -
+        list of gaps of multiple channels: [[start, end],], [[start, end],]
+    :return: squashed_gaps: [[float, float],] - all related gaps are squashed
+        extending to the outside start and end
+        [[min start, max end], [max start, min end]]
+
+    """
+    gaps = sorted(gaps, key=lambda x: x[0])
+    squashed_gaps = []
+    checked_indexes = []
+
+    for idx, g in enumerate(gaps):
+        if idx in checked_indexes:
+            continue
+        squashed_gaps.append(g)
+        checked_indexes.append(idx)
+        overlap = g[0] >= g[1]
+        for idx_, g_ in enumerate(gaps):
+            if idx_ in checked_indexes:
+                continue
+            if not overlap:
+                if g_[0] >= g_[1]:
+                    continue
+                if _check_related_gaps(g[0], g[1], g_[0], g_[1],
+                                       idx_, checked_indexes):
+                    squashed_gaps[-1][0] = min(g[0], g_[0])
+                    squashed_gaps[-1][1] = max(g[1], g_[1])
+                else:
+                    break
+            else:
+                if g_[0] < g_[1]:
+                    continue
+                if _check_related_gaps(g[1], g[0], g_[1], g_[0],
+                                       idx_, checked_indexes):
+                    squashed_gaps[-1][0] = max(g[0], g_[0])
+                    squashed_gaps[-1][1] = min(g[1], g_[1])
+
+    return squashed_gaps
+
+
+def sort_data(sta_data_dict: Dict) -> None:
+    """
+    FROM handling_data.sort_data
+
+    Sort data in 'traces_info' of each channel by 'startTmEpoch' order
+    :param sta_data_dict: data of a station
+    """
+    for chan_id in sta_data_dict:
+        traces_info = sta_data_dict[chan_id]['tracesInfo']
+        sta_data_dict[chan_id]['tracesInfo'] = sorted(
+            traces_info, key=lambda i: i['startTmEpoch'])
+
+
+def retrieve_data_time_from_data_dict(
+        data_dict: Dict, data_time: Dict[str, List[float]]) -> None:
+    """
+    Going through each channel in each station to get data_time for each
+        station which is [min of startTimeEpoch, max of endTimeEpoch] among
+        the station's channels.
+
+    :param data_dict: the given data_dict
+    :param data_time: data by sta_id
+    """
+    for sta_id in data_dict.keys():
+        for c in data_dict[sta_id]:
+            dtime = [data_dict[sta_id][c]['startTmEpoch'],
+                     data_dict[sta_id][c]['endTmEpoch']]
+
+            if sta_id in data_time.keys():
+                data_time[sta_id][0] = min(data_time[sta_id][0], dtime[0])
+                data_time[sta_id][1] = max(data_time[sta_id][1], dtime[1])
+            else:
+                data_time[sta_id] = dtime
+
+
+def retrieve_gaps_from_data_dict(data_dict: Dict,
+                                 gaps: Dict[str, List[List[float]]]) -> None:
+    """
+    Create each station's gaps by adding all gaps from all channels
+
+    :param data_dict: given stream
+    :param gaps: gaps list by key
+    """
+    for key in data_dict.keys():
+        if key not in gaps:
+            gaps[key] = []
+        for c in data_dict[key].keys():
+            cgaps = data_dict[key][c]['gaps']
+            if cgaps != []:
+                gaps[key] += cgaps
+
+
+def combine_data(station_data_dict: Dict, gap_minimum: Optional[float]) \
+        -> None:
+    """
+    Traverse through traces in each channel, add to gap list if
+        delta >= gap_minimum with delta is the distance between
+        contiguous traces.
+    Combine sorted data using concatenate, which also change data ot ndarray
+        and update startTmEpoch and endTmEpoch.
+
+    :param station_data_dict: dict of data of a station
+    :param gap_minimum: minimum length of gaps to be detected
+    """
+    for chan_id in station_data_dict:
+        channel = station_data_dict[chan_id]
+        traces_info = channel['tracesInfo']
+
+        for idx in range(len(traces_info) - 1):
+            curr_end_tm = traces_info[idx]['endTmEpoch']
+            next_start_tm = traces_info[idx+1]['startTmEpoch']
+            delta = abs(curr_end_tm - next_start_tm)
+            if gap_minimum is not None and delta >= gap_minimum:
+                # add gap
+                gap = [curr_end_tm, next_start_tm]
+                station_data_dict[chan_id]['gaps'].append(gap)
+        channel['startTmEpoch'] = min([tr['startTmEpoch']
+                                       for tr in traces_info])
+        channel['endTmEpoch'] = max([tr['endTmEpoch'] for tr in traces_info])
+
+        data_list = [tr['data'] for tr in traces_info]
+        times_list = [tr['times'] for tr in traces_info]
+        channel['tracesInfo'] = [{
+            'startTmEpoch': channel['startTmEpoch'],
+            'endTmEpoch': channel['endTmEpoch'],
+            'data': np.concatenate(data_list),
+            'times': np.concatenate(times_list)
+        }]
+
+
+def apply_convert_factor_to_data_dict(data_dict: Dict, data_type: str):
+    """
+    Traverse through traces in each channel to convert data according to
+        convert_factor got from DB
+
+    :param data_dict: dict of data
+    :param data_type: type of data
+    """
+    for key in data_dict:
+        for chan_id in data_dict[key]:
+            channel = data_dict[key][chan_id]
+            convert_factor = get_convert_factor(chan_id, data_type)
+            if convert_factor is not None and convert_factor != 1:
+                for tr in channel['tracesInfo']:
+                    tr['data'] = convert_factor * tr['data']
diff --git a/sohstationviewer/model/handling_data_reftek.py b/sohstationviewer/model/handling_data_reftek.py
index f7312c86dfe6b8977999403c6cb00ade3aca2dac..33016a02a1c241a9e222a73312059f6232534f16 100644
--- a/sohstationviewer/model/handling_data_reftek.py
+++ b/sohstationviewer/model/handling_data_reftek.py
@@ -47,7 +47,10 @@ def check_reftek_header(
         if chan_id not in cur_data_dict:
             cur_data_dict[chan_id] = {'tracesInfo': [],
                                       'samplerate': samplerate}
-
+        if trace.stats.npts == 0:
+            #  this trace isn't available to prevent bug when creating memmap
+            #  with no data
+            continue
         if (starttime <= trace.stats['starttime'] <= endtime or
                 starttime <= trace.stats['endtime'] <= endtime):
             avail_trace_indexes.append(index)
diff --git a/sohstationviewer/model/mseed_data/__init__.py b/sohstationviewer/model/mseed_data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sohstationviewer/model/mseed_data/decode_mseed.py b/sohstationviewer/model/mseed_data/decode_mseed.py
new file mode 100644
index 0000000000000000000000000000000000000000..dc4d85396b81ef2962d365cad33d8f0a8acb2b0e
--- /dev/null
+++ b/sohstationviewer/model/mseed_data/decode_mseed.py
@@ -0,0 +1,35 @@
+def decode_int16(buffer, unpacker):
+    requested_bytes = buffer[:2]
+    return unpacker.unpack('h', requested_bytes)[0]
+
+
+def decode_int24(buffer, unpacker):
+    requested_bytes = buffer[:3]
+    byte_order = 'big' if unpacker.byte_order_char == '>' else 'little'
+    # We delegate to int.from_bytes() because it takes a lot of work to make
+    # struct.unpack() handle signed 24-bits integers.
+    # See: https://stackoverflow.com/questions/3783677/how-to-read-integers-from-a-file-that-are-24bit-and-little-endian-using-python  # noqa
+    return int.from_bytes(requested_bytes, byte_order)
+
+
+def decode_int32(buffer, unpacker):
+    requested_bytes = buffer[:4]
+    return unpacker.unpack('i', requested_bytes)[0]
+
+
+def decode_ieee_float(buffer, unpacker):
+    requested_bytes = buffer[:4]
+    return unpacker.unpack('f', requested_bytes)[0]
+
+
+def decode_ieee_double(buffer, unpacker):
+    requested_bytes = buffer[:8]
+    return unpacker.unpack('d', requested_bytes)[0]
+
+
+def decode_steim(buffer, unpacker):
+    # The first 4 bytes in a Steim frame is metadata of the record. Since we
+    # aren't decompressing the data, we are skipping. The next 4 bytes contain
+    # the first data point of the MSEED data record, which is what we need.
+    requested_bytes = buffer[4:8]
+    return unpacker.unpack('i', requested_bytes)[0]
diff --git a/sohstationviewer/model/mseed_data/mseed.py b/sohstationviewer/model/mseed_data/mseed.py
new file mode 100644
index 0000000000000000000000000000000000000000..bd3641a8941a95a20c3aa51547caaf26e23ce61b
--- /dev/null
+++ b/sohstationviewer/model/mseed_data/mseed.py
@@ -0,0 +1,186 @@
+"""
+MSeed object to hold and process MSeed data
+"""
+import os
+import re
+import traceback
+from pathlib import Path
+from typing import Dict, Tuple, List
+
+from sohstationviewer.controller.util import validate_file, validate_dir
+from sohstationviewer.model.mseed_data.mseed_reader import MSeedReader
+from sohstationviewer.model.general_data.general_data import \
+    GeneralData, ThreadStopped, ProcessingDataError
+from sohstationviewer.view.util.enums import LogType
+
+from sohstationviewer.model.mseed_data.mseed_helper import \
+    retrieve_nets_from_data_dict, read_text
+from sohstationviewer.model.mseed_data.record_reader_helper import \
+    MSeedReadError
+
+
+class MSeed(GeneralData):
+    """
+    read and process mseed file into object with properties can be used to
+    plot SOH data, mass position data, waveform data and gaps
+    """
+
+    def __init__(self, *args, **kwargs):
+        # FROM mseed.mseed.MSEED.__init__
+        super().__init__(*args, **kwargs)
+        self.nets_by_sta: Dict[str, List[str]] = {}
+        self.processing_data()
+
+    def finalize_data(self):
+        """
+        CHANGED FROM mseed.mseed.MSEED.finalize_data
+
+        This function should be called after all folders finish reading to
+            + get nets_by_sta from stream_header_by_key_chan
+            + other tasks in super().finalize_data()
+
+        """
+        self.distribute_log_text_to_station()
+        self.retrieve_nets_from_data_dicts()
+        super().finalize_data()
+
+    def read_folder(self, folder: str) -> Tuple[Dict]:
+        """
+        CHANGED FROM mseed.mseed.MSEED.read_folder
+
+        Read data streams for soh, mass position and waveform.
+
+        :param folder: absolute path to data set folder
+        :return waveform_data: waveform data by station
+        :return soh_data: soh data by station
+        :return mass_pos_data: mass position data by station
+        :return gaps: gap list by station
+        :return nets_by_sta: netcodes list by station
+        """
+        if not os.path.isdir(folder):
+            raise ProcessingDataError(f"Path '{folder}' not exist")
+        count = 0
+
+        total = sum([len(files) for _, _, files in os.walk(folder)])
+        invalid_blockettes = False
+        not_mseed_files = []
+        for path, sub_dirs, files in os.walk(folder):
+            try:
+                validate_dir(path)
+            except Exception as e:
+                # skip Information folder
+                self.track_info(str(e), LogType.WARNING)
+                continue
+            for file_name in files:
+
+                if self.creator_thread.isInterruptionRequested():
+                    raise ThreadStopped()
+
+                path2file = Path(path).joinpath(file_name)
+
+                if not validate_file(path2file, file_name):
+                    continue
+                count += 1
+                if count % 10 == 0:
+                    self.track_info(
+                        f'Read {count} files/{total}', LogType.INFO)
+                log_text = read_text(path2file)
+                if log_text is not None:
+                    self.log_texts[path2file] = log_text
+                    continue
+                reader = MSeedReader(
+                    path2file,
+                    read_start=self.read_start,
+                    read_end=self.read_end,
+                    is_multiplex=self.is_multiplex,
+                    req_soh_chans=self.req_soh_chans,
+                    req_wf_chans=self.req_wf_chans,
+                    include_mp123zne=self.include_mp123zne,
+                    include_mp456uvw=self.include_mp456uvw,
+                    soh_data=self.soh_data,
+                    mass_pos_data=self.mass_pos_data,
+                    waveform_data=self.waveform_data,
+                    log_data=self.log_data,
+                    gap_minimum=self.gap_minimum)
+                try:
+                    reader.read()
+                    invalid_blockettes = (invalid_blockettes
+                                          or reader.invalid_blockettes)
+                except MSeedReadError:
+                    not_mseed_files.append(file_name)
+                except Exception:
+                    fmt = traceback.format_exc()
+                    self.track_info(f"Skip file {path2file} can't be read "
+                                    f"due to error: {str(fmt)}",
+                                    LogType.WARNING)
+        if not_mseed_files:
+            self.track_info(
+                f"Not MSeed files: {not_mseed_files}", LogType.WARNING)
+        if invalid_blockettes:
+            # This check to only print out message once
+            print("We currently only handle blockettes 500, 1000,"
+                  " and 1001.")
+        self.track_info(
+            f'Skipped {total - count} invalid files.', LogType.INFO)
+
+    def retrieve_nets_from_data_dicts(self):
+        """
+        Going through stations of each data_dict to get all network codes found
+            in all channel of a station to add to nets_by_station.
+        """
+        retrieve_nets_from_data_dict(self.soh_data, self.nets_by_sta)
+        retrieve_nets_from_data_dict(self.mass_pos_data, self.nets_by_sta)
+        retrieve_nets_from_data_dict(self.waveform_data, self.nets_by_sta)
+
+    def select_key(self) -> str:
+        """
+        CHANGED FROM mseed.mseed.MSEED:
+            + get sta_ids from self.keys
+            + add condition if not on_unittest to create unittest for mseed
+
+        :return selected_sta_id: the selected station id from available
+            key of stream header.
+            + If there is only one station id, return it.
+            + If there is more than one, show all ids, let user choose one to
+                return.
+        """
+        self.keys = sorted(list(set(
+            list(self.soh_data.keys()) +
+            list(self.mass_pos_data.keys()) +
+            list(self.waveform_data.keys()) +
+            [k for k in list(self.log_data.keys()) if k != 'TEXT']
+        )))
+        sta_ids = self.keys
+
+        if len(sta_ids) == 0:
+            return
+
+        selected_sta_id = sta_ids[0]
+        if not self.on_unittest and len(sta_ids) > 1:
+            msg = ("There are more than one stations in the given data.\n"
+                   "Please select one to display")
+            self.pause_signal.emit(msg, sta_ids)
+            self.pause()
+            selected_sta_id = sta_ids[self.pause_response]
+
+        self.track_info(f'Select Station {selected_sta_id}', LogType.INFO)
+        return selected_sta_id
+
+    def distribute_log_text_to_station(self):
+        """
+        Loop through paths to text files to look for station id in the path.
+            + If there is station id in the path, add the content to the
+                station id with channel 'TXT'.
+            + if station id not in the path, add the content to the key 'TEXT'
+                which means don't know the station for these texts.
+        """
+        for path2file in self.log_texts:
+            try:
+                file_parts = re.split(rf"{os.sep}|\.", path2file.as_posix())
+                sta = [s for s in self.keys if s in file_parts][0]
+            except IndexError:
+                self.log_data['TEXT'].append(self.log_texts[path2file])
+                continue
+            if 'TXT' not in self.log_data[sta]:
+                self.log_data[sta]['TXT'] = []
+            self.log_data[sta]['TXT'].append(self.log_texts[path2file])
diff --git a/sohstationviewer/model/mseed_data/mseed_helper.py b/sohstationviewer/model/mseed_data/mseed_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..32d237e2ec5a3dc353458691ff4abe5381d33a46
--- /dev/null
+++ b/sohstationviewer/model/mseed_data/mseed_helper.py
@@ -0,0 +1,53 @@
+# Functions that change from handling_data's functions
+import os
+from pathlib import Path
+from typing import Union, List, Dict
+
+
+def retrieve_nets_from_data_dict(data_dict: Dict,
+                                 nets_by_sta: Dict[str, List[str]]) -> None:
+    """
+    Retrieve nets by sta_id from the given data_dict.
+
+    :param data_dict: dict of data by station
+    :param nets_by_sta: nets list by sta_id
+    """
+    for sta_id in data_dict.keys():
+        if sta_id not in nets_by_sta:
+            nets_by_sta[sta_id] = set()
+        for c in data_dict[sta_id]:
+            nets_by_sta[sta_id].update(
+                data_dict[sta_id][c]['nets'])
+
+
+def read_text(path2file: Path) -> Union[bool, str]:
+    """
+    CHANGED FROM handling_data.read_text:
+        + Don't need to check binary because UnicodeDecodeError caught means
+            the file is binary
+
+    Read text file and add to log_data under channel TEXT.
+        + Raise exception if the file isn't a text file
+        + Remove empty lines in content
+    :param path2file: str - absolute path to text file
+    :param file_name: str - name of text file
+    :param text_logs: holder to keep log string, refer to
+        DataTypeModel.__init__.log_data['TEXT']
+    """
+    try:
+        with open(path2file, 'r') as file:
+            content = file.read().strip()
+    except UnicodeDecodeError:
+        return
+
+    if content != '':
+        # skip empty lines
+        no_empty_line_list = [
+            line for line in content.splitlines() if line]
+        no_empty_line_content = os.linesep.join(no_empty_line_list)
+
+        log_text = "\n\n** STATE OF HEALTH: %s\n" % path2file.name
+        log_text += no_empty_line_content
+    else:
+        log_text = ''
+    return log_text
diff --git a/sohstationviewer/model/mseed_data/mseed_reader.py b/sohstationviewer/model/mseed_data/mseed_reader.py
new file mode 100644
index 0000000000000000000000000000000000000000..24c153c2aec65978cd48c3282b6e76cc4f79db8b
--- /dev/null
+++ b/sohstationviewer/model/mseed_data/mseed_reader.py
@@ -0,0 +1,283 @@
+from numbers import Real
+from typing import BinaryIO, Optional, Dict, Union, List
+from pathlib import Path
+from obspy import UTCDateTime
+
+from sohstationviewer.model.mseed_data.record_reader import RecordReader
+from sohstationviewer.model.mseed_data.record_reader_helper import \
+    RecordMetadata
+
+from sohstationviewer.model.handling_data import check_chan
+
+
+class MSeedReader:
+    def __init__(self, file_path: Path,
+                 read_start: float = UTCDateTime(0).timestamp,
+                 read_end: float = UTCDateTime().timestamp,
+                 is_multiplex: Optional[bool] = None,
+                 req_soh_chans: List[str] = [],
+                 req_wf_chans: List[str] = [],
+                 include_mp123zne: bool = False,
+                 include_mp456uvw: bool = False,
+                 soh_data: Dict = {},
+                 mass_pos_data: Dict = {},
+                 waveform_data: Dict = {},
+                 log_data: Dict[str, Union[List[str],
+                                           Dict[str, List[str]]]] = {},
+                 gap_minimum: Optional[float] = None
+                 ) -> None:
+        """
+        The object of the class is to read data from given file to add
+            to given stream if meet requirements.
+        If data_type is not multiplex, all records of a file are belong to the
+            same channel; the info found from the first record can
+            be used to determine to keep reading if the first one doesn't meet
+            channel's requirement.
+        If data_type is multiplex, every record have to be examined.
+        All data_dicts' definition can be found in data_dict_structures.MD
+
+        :param file_path: Absolute path to data file
+        :param read_start: time that is required to start reading
+        :param read_end: time that is required to end reading
+        :param is_multiplex: multiplex status of the file's data_type
+        :param req_soh_chans: requested SOH channel list
+        :param req_wf_chans: requested waveform channel list
+        :param include_mp123zne: if mass position channels 1,2,3 are requested
+        :param include_mp456uvw: if mass position channels 4,5,6 are requested
+        :param soh_data: data dict of SOH
+        :param mass_pos_data: data dict of mass position
+        :param waveform_data: data dict of waveform
+        :param log_data: data dict of log_data
+        :param gap_minimum: minimum length of gaps required to detect
+            from record
+        """
+        self.read_start = read_start
+        self.read_end = read_end
+        self.is_multiplex = is_multiplex
+        self.gap_minimum = gap_minimum
+        self.req_soh_chans = req_soh_chans
+        self.req_wf_chans = req_wf_chans
+        self.include_mp123zne = include_mp123zne
+        self.include_mp456uvw = include_mp456uvw
+        self.soh_data = soh_data
+        self.mass_pos_data = mass_pos_data
+        self.waveform_data = waveform_data
+        self.log_data = log_data
+        self.file_path = file_path
+        self.file: BinaryIO = open(file_path, 'rb')
+
+        self.invalid_blockettes = False,
+
+    def get_data_dict(self, metadata: RecordMetadata) -> Dict:
+        """
+        Find which data_dict to add data to from req_soh_chans, req_wf_chans,
+            include_mp123zne, include_mp456uvw, samplerate
+        :param metadata: record's metadata
+        :return: data_dict to add data
+        """
+        chan_id = metadata.channel
+        sample_rate = metadata.sample_rate
+        chan_type = check_chan(chan_id, self.req_soh_chans, self.req_wf_chans,
+                               self.include_mp123zne, self.include_mp456uvw)
+        if chan_type == 'SOH':
+            if self.req_soh_chans == [] and sample_rate > 1:
+                # If 'All chans' is selected for SOH, channel with samplerate>1
+                # will be skipped by default to improve performance.
+                # Note: If user intentionally added channels with samplerate>1
+                # using SOH Channel Preferences dialog, they are still read.
+                return
+            return self.soh_data
+        if chan_type == 'MP':
+            return self.mass_pos_data
+        if chan_type == 'WF':
+            return self.waveform_data
+
+    def check_time(self, record: RecordReader) -> bool:
+        """
+        Check if record time in the time range that user require to read
+
+        :param record: the record read from file
+        :return: True when record time satisfy the requirement
+        """
+        meta = record.record_metadata
+        if self.read_start > meta.end_time or self.read_end < meta.start_time:
+            return False
+        return True
+
+    def append_log(self, record: RecordReader) -> None:
+        """
+        Add all text info retrieved from record to log_data
+
+        :param record: the record read from file
+        """
+        logs = [record.ascii_text] + record.other_blockettes
+        log_str = "===========\n".join(logs)
+        if log_str == "":
+            return
+        meta = record.record_metadata
+        log_str = "\n\nSTATE OF HEALTH: " + \
+                  f"From:{meta.start_time}  To:{meta.end_time}\n" + log_str
+        sta_id = meta.station
+        chan_id = meta.channel
+        if sta_id not in self.log_data.keys():
+            self.log_data[sta_id] = {}
+        if chan_id not in self.log_data[sta_id]:
+            self.log_data[sta_id][chan_id] = []
+        self.log_data[sta_id][chan_id].append(log_str)
+
+    def append_data(self, data_dict: dict,
+                    record: RecordReader,
+                    data_point: Real) -> None:
+        """
+        Append data point to the given data_dict
+
+        :param data_dict: the data dict to add data get from record
+        :param record: the record read from file
+        :param data_point: the first sample of the record frame
+        """
+        if data_point is None:
+            return
+        meta = record.record_metadata
+        sta_id = meta.station
+        if sta_id not in data_dict.keys():
+            data_dict[sta_id] = {}
+        station = data_dict[sta_id]
+        self.add_chan_data(station, meta, data_point)
+
+    def _add_new_trace(self, channel: Dict, metadata: RecordMetadata,
+                       data_point: Real) -> None:
+        """
+        Start a new trace to channel['tracesInfo'] with data_point as
+            the first data value and metadata's start_time as first time value
+
+        :param channel: dict of channel's info
+        :param metadata: record's meta data
+        :param data_point: the first sample of the record frame
+        """
+        channel['tracesInfo'].append({
+            'startTmEpoch': metadata.start_time,
+            'data': [data_point],
+            'times': [metadata.start_time]
+        })
+
+    def _append_trace(self, channel, metadata, data_point):
+        """
+        Appending data_point to the latest trace of channel['tracesInfo']
+
+        :param channel: dict of channel's info
+        :param metadata: record's meta data
+        :param data_point: the first sample of the record frame
+        """
+        channel['tracesInfo'][-1]['data'].append(data_point)
+        channel['tracesInfo'][-1]['times'].append(metadata.start_time)
+
+    def add_chan_data(self, station: dict, metadata: RecordMetadata,
+                      data_point: Real) -> None:
+        """
+        Add new channel to the passed station and append data_point to the
+            channel if there's no gap/overlap or start a new trace of data
+            when there's a gap.
+        If gap/overlap > gap_minimum, add to gaps list.
+
+        :param station: dict of chan by id of a station
+        :param metadata: an Object of metadata from the record
+        :param data_point: the first sample of the record frame
+        """
+        meta = metadata
+        chan_id = metadata.channel
+        if chan_id not in station.keys():
+            station[chan_id] = {
+                'file_path': self.file_path,
+                'chanID': chan_id,
+                'samplerate': meta.sample_rate,
+                'startTmEpoch': meta.start_time,
+                'endTmEpoch': meta.end_time,
+                'size': meta.sample_count,
+                'nets': {meta.network},
+                'gaps': [],
+                'tracesInfo': [{
+                    'startTmEpoch': meta.start_time,
+                    'endTmEpoch': meta.end_time,
+                    'data': [data_point],
+                    'times': [meta.start_time]
+                }]
+            }
+        else:
+            channel = station[chan_id]
+            record_start_time = meta.start_time
+            previous_end_time = channel['endTmEpoch']
+            delta = abs(record_start_time - previous_end_time)
+            if channel['file_path'] != self.file_path:
+                # Start new trace for each file to reorder trace and
+                # combine traces again later
+                channel['file_path'] = self.file_path
+                self._add_new_trace(channel, meta, data_point)
+            else:
+                if self.gap_minimum is not None and delta >= self.gap_minimum:
+                    gap = [previous_end_time, record_start_time]
+                    channel['gaps'].append(gap)
+                # appending data
+                self._append_trace(channel, meta, data_point)
+
+            channel['tracesInfo'][-1]['endTmEpoch'] = meta.end_time
+            # update channel's metadata
+            channel['endTmEpoch'] = meta.end_time
+            channel['size'] += meta.sample_count
+            channel['nets'].add(meta.network)
+
+    def get_ready_for_next_read(self, current_record_start: int,
+                                record: RecordReader):
+        """
+        Move the current position of file to next record' start.
+
+        :param current_record_start: the start position of the current record
+        :param record: the record that is reading
+        """
+        # MSEED stores the size of a data record as an exponent of a
+        # power of two, so we have to convert that to actual size before
+        # doing anything else.
+        record_length_exp = record.header_unpacker.unpack(
+            'B', record.blockette_1000.record_length
+        )[0]
+        record_size = 2 ** record_length_exp
+
+        self.file.seek(current_record_start)
+        self.file.seek(record_size, 1)
+
+    def read(self):
+        while 1:
+            # We know that end of file is reached when read() returns an empty
+            # string.
+            is_eof = (self.file.read(1) == b'')
+            if is_eof:
+                break
+            # We need to move the file pointer back to its position after we
+            # do the end of file check. Otherwise, we would be off by one
+            # byte for all the reads afterward.
+            self.file.seek(-1, 1)
+
+            # We save the start of the current record so that after we are
+            # done reading the record, we can move back. This makes moving
+            # to the next record a lot easier, seeing as we can simply move
+            # the file pointer a distance the size of the current record.
+            current_record_start = self.file.tell()
+
+            record = RecordReader(self.file)
+            if record.invalid_blockettes:
+                self.invalid_blockettes = True
+            if not self.check_time(record):
+                self.get_ready_for_next_read(current_record_start, record)
+                continue
+            data_dict = self.get_data_dict(record.record_metadata)
+            if data_dict is None:
+                if self.is_multiplex:
+                    self.get_ready_for_next_read(current_record_start, record)
+                    continue
+                else:
+                    break
+            first_data_point = record.get_first_data_point()
+            self.append_data(data_dict, record, first_data_point)
+            self.append_log(record)
+
+            self.get_ready_for_next_read(current_record_start, record)
+        self.file.close()
diff --git a/sohstationviewer/model/mseed_data/record_reader.py b/sohstationviewer/model/mseed_data/record_reader.py
new file mode 100644
index 0000000000000000000000000000000000000000..40db266dd7377510ea1ff5c173d266ae22f55403
--- /dev/null
+++ b/sohstationviewer/model/mseed_data/record_reader.py
@@ -0,0 +1,299 @@
+from numbers import Real
+from typing import BinaryIO, Optional, List
+
+
+from obspy import UTCDateTime
+
+from sohstationviewer.model.mseed_data.decode_mseed import (
+    decode_ieee_float, decode_ieee_double, decode_steim, decode_int16,
+    decode_int24, decode_int32,
+)
+from sohstationviewer.model.mseed_data.record_reader_helper import (
+    FixedHeader, Blockette1000, get_data_endianness, Unpacker,
+    get_record_metadata, get_header_endianness, RecordMetadata,
+    EncodingFormat,
+)
+
+
+class RecordReader:
+    """
+    This class reads one data record from an MSEED file.
+    """
+
+    def __init__(self, file: BinaryIO) -> None:
+        # The MSEED file object to read from. The file pointer needs to be
+        # located at the start of a data record.
+        self.file = file
+
+        self.fixed_header: Optional[FixedHeader] = None
+        self.blockette_1000: Optional[Blockette1000] = None
+        self.other_blockettes: List[str] = []
+        # Utility object that helps unpack byte strings in the header (the
+        # fixed header and the blockettes).
+        # Separate from the one for data in case the header has a different
+        # byte order.
+        # TODO: change blockettes to use this unpacker as well.
+        self.header_unpacker: Unpacker = Unpacker()
+
+        self.data_unpacker: Unpacker = Unpacker()
+        self.record_metadata: Optional[RecordMetadata] = None
+        self.invalid_blockettes = False
+        self.ascii_text: str = ''
+        self.read_header()
+
+    def read_header(self) -> None:
+        """
+        Read the header of the current data record. The header includes the
+        fixed portion, blockette 1000, and any blockettes that follow.
+        """
+        # Save the start of the record so that we can go back after reading the
+        # header.
+        record_start = self.file.tell()
+
+        self.read_fixed_header()
+        self.read_blockette_1000()
+
+        header_endianness = get_header_endianness(self.fixed_header)
+        if header_endianness == 'little':
+            self.header_unpacker.byte_order_char = '<'
+        else:
+            self.header_unpacker.byte_order_char = '>'
+
+        data_endianness = get_data_endianness(self.blockette_1000)
+        if data_endianness == 'little':
+            self.data_unpacker.byte_order_char = '<'
+        else:
+            self.data_unpacker.byte_order_char = '>'
+
+        self.record_metadata = get_record_metadata(self.fixed_header,
+                                                   self.header_unpacker)
+
+        self.apply_time_correction()
+        self.read_blockettes()
+        self.file.seek(record_start)
+
+    def read_fixed_header(self) -> None:
+        """
+        Read the fixed header of the current data record and store it in
+        self.fixed_header.
+        """
+        byte_counts = [6, 1, 1, 5, 2, 3, 2, 10, 2, 2, 2, 1, 1, 1, 1, 4, 2, 2]
+
+        fixed_header_sections_values = []
+        for byte_count in byte_counts:
+            fixed_header_sections_values.append(self.file.read(byte_count))
+        self.fixed_header = FixedHeader(*fixed_header_sections_values)
+
+    def read_blockette_500(self) -> None:
+        """
+        Read blockette 500 and format its content. The result is stored for
+        future uses. Assumes that the file pointer is at the start of the
+        blockette.
+        """
+        blockette_content = {}
+        # Skip the first four bytes because they contain meta-information about
+        # the blockettes.
+        self.file.read(4)
+
+        vco_correction = self.file.read(4)
+        blockette_content['VCO correction'] = self.header_unpacker.unpack(
+            'f', vco_correction
+        )[0]
+
+        exception_time_bytes = self.file.read(10)
+        exception_time_tuple = self.header_unpacker.unpack(
+            'HHBBBBH', exception_time_bytes)
+        exception_time = UTCDateTime(year=exception_time_tuple[0],
+                                     julday=exception_time_tuple[1],
+                                     hour=exception_time_tuple[2],
+                                     minute=exception_time_tuple[3],
+                                     second=exception_time_tuple[4],
+                                     microsecond=exception_time_tuple[6] * 100)
+        blockette_content['Time of exception'] = exception_time.strftime(
+            '%Y:%j:%H:%M:%S:%f'
+        )
+
+        microsecond = self.file.read(1)
+        microsecond = self.header_unpacker.unpack('B', microsecond)[0]
+        start_time_adjustment = microsecond / (10 ** 6)
+        self.record_metadata.start_time += start_time_adjustment
+        blockette_content['Micro sec'] = microsecond
+
+        reception_quality = self.file.read(1)
+        blockette_content['Reception Quality'] = self.header_unpacker.unpack(
+            'B', reception_quality
+        )[0]
+
+        exception_count = self.file.read(4)
+        blockette_content['Exception Count'] = self.header_unpacker.unpack(
+            'I', exception_count
+        )[0]
+
+        exception_type = self.file.read(16)
+        blockette_content['Exception Type'] = self.header_unpacker.unpack(
+            '16s', exception_type
+        )[0].decode('utf-8').strip()
+
+        clock_model = self.file.read(32)
+        blockette_content['Clock Model'] = self.header_unpacker.unpack(
+            '32s', clock_model
+        )[0].decode('utf-8').strip()
+
+        clock_status = self.file.read(128)
+        blockette_content['Clock Status'] = self.header_unpacker.unpack(
+            '128s', clock_status
+        )[0].decode('utf-8').strip()
+
+        formatted_blockette = '\n'.join([f'{key}: {value}'
+                                         for key, value
+                                         in blockette_content.items()])
+        self.other_blockettes.append(formatted_blockette)
+
+    def read_blockette_1000(self) -> None:
+        """
+        Read blockette 1000 of the current data record and store it in
+        self.blockette_1000.
+        """
+        blockette_1000_section_lengths = [2, 2, 1, 1, 1, 1]
+        blockette_1000_values = []
+        for section_length in blockette_1000_section_lengths:
+            blockette_1000_values.append(self.file.read(section_length))
+
+        self.blockette_1000 = Blockette1000(*blockette_1000_values)
+
+    def read_blockette_1001(self) -> None:
+        """
+        Read blockette 1001. The only valuable thing in this blockette is the
+        more precise start time. Assumes that the file pointer is at the start
+        of the blockette.
+        """
+        self.file.read(5)
+        start_time_microsecond = self.file.read(1)
+        start_time_microsecond = self.header_unpacker.unpack(
+            'b', start_time_microsecond
+        )[0]
+        # Convert from microsecond to second so that UTCDateTime can handle it.
+        start_time_microsecond /= (10 ** 6)
+        self.record_metadata.start_time += start_time_microsecond
+        self.file.read(2)
+
+    def read_blockette_2000(self) -> None:
+        pass
+
+    def apply_time_correction(self) -> None:
+        """
+        Apply the time correction found in the fixed header to the start time.
+        """
+        # format() is used here instead of bin() because we need to pad the
+        # resulting bit string with 0 to the left.
+        activity_flags = format(
+            self.header_unpacker.unpack(
+                'B', self.fixed_header.activity_flags)[0],
+            '0>8b'
+        )
+        is_time_correction_applied = int(activity_flags[1])
+        if is_time_correction_applied:
+            return
+
+        time_correction = self.header_unpacker.unpack(
+            'L', self.fixed_header.time_correction
+        )[0]
+        # We need to convert the unit from 0.0001 seconds to seconds
+        time_correction *= 0.0001
+        self.record_metadata.start_time += time_correction
+
+    def read_blockettes(self) -> None:
+        """
+        Read all the blockettes in the current data record aside from blockette
+        1000, which has beem read previously. Currently only handle blockettes
+        500, 1001, and 2000.
+        """
+        blockette_count = self.header_unpacker.unpack(
+            'B', self.fixed_header.blockette_count
+        )[0]
+        for i in range(1, blockette_count):
+            # All blockettes store their type in the first two bytes, so we
+            # read that to determine what to do
+            next_blockette_type = self.file.read(2)
+            # Move file pointer back to start of blockette
+            self.file.seek(-2, 1)
+            next_blockette_type = self.header_unpacker.unpack(
+                'H', next_blockette_type
+            )[0]
+            if next_blockette_type not in (500, 1000, 1001):
+                self.invalid_blockettes = True
+                continue
+            if next_blockette_type == 500:
+                self.read_blockette_500()
+            elif next_blockette_type == 1001:
+                self.read_blockette_1001()
+            elif next_blockette_type == 2000:
+                self.read_blockette_2000()
+
+    def decode_ascii_data(self, data_start: int):
+        """
+        Read ASCII string from data portion of the record but remove the
+            padding
+
+        :param data_start: Byte number where the data start
+        """
+        # We want to read everything in the record if the encoding is
+        # ASCII.
+        record_length_exp = self.header_unpacker.unpack(
+            'B', self.blockette_1000.record_length
+        )[0]
+        record_size = 2 ** record_length_exp
+        data_block = self.file.read(record_size - data_start)
+        single_padding = b'\x00'.decode()
+        try:
+            self.ascii_text = data_block.decode().rstrip(single_padding)
+        except UnicodeDecodeError:
+            pass
+
+    def get_first_data_point(self) -> Optional[Real]:
+        """
+        Get the first data point of the current data record.
+        :return: the first data point of the current data record, whose type is
+            determined based on the encoding type stored in blockette 1000.
+        """
+        record_start = self.file.tell()
+        data_start = self.header_unpacker.unpack(
+            'H', self.fixed_header.data_offset
+        )[0]
+        # The data start byte is defined as an offset from the start of the
+        # data record. Seeing as we should be at the start of the data record
+        # by seeking there at the end of every major step, we can simply seek
+        # to the start of the data.
+        self.file.seek(data_start, 1)
+
+        encoding_format = self.blockette_1000.encoding_format
+        encoding_format = self.header_unpacker.unpack('b', encoding_format)[0]
+        encoding_format = EncodingFormat(encoding_format)
+
+        if encoding_format == EncodingFormat.ASCII:
+            self.decode_ascii_data(data_start)
+            first_data_point = None
+        else:
+
+            # Currently, we are extracting only the first data point in each
+            # record. The smallest possible amount of bytes we can extract
+            # while guaranteeing that we get the first data point in the
+            # record is 8, with Steim encodings and IEEE double precision
+            # float needing to use the whole buffer.
+            buffer = self.file.read(8)
+            encoding_to_decoder = {
+                EncodingFormat.INT_16_BIT: decode_int16,
+                EncodingFormat.INT_24_BIT: decode_int24,
+                EncodingFormat.INT_32_BIT: decode_int32,
+                EncodingFormat.IEEE_FLOAT_32_BIT: decode_ieee_float,
+                EncodingFormat.IEEE_FLOAT_64_BIT: decode_ieee_double,
+                EncodingFormat.STEIM_1: decode_steim,
+                EncodingFormat.STEIM_2: decode_steim,
+            }
+            first_data_point = encoding_to_decoder[encoding_format](
+                buffer, self.data_unpacker
+            )
+        # Seek back to the start of the record so we can call this method again
+        # if needed.
+        self.file.seek(record_start)
+        return first_data_point
diff --git a/sohstationviewer/model/mseed_data/record_reader_helper.py b/sohstationviewer/model/mseed_data/record_reader_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..c9fa6ace53751c1487fd34ed678fda5cec38c862
--- /dev/null
+++ b/sohstationviewer/model/mseed_data/record_reader_helper.py
@@ -0,0 +1,239 @@
+from dataclasses import dataclass
+import struct
+from enum import Enum
+
+from obspy import UTCDateTime
+
+
+class MSeedReadError(Exception):
+    def __init__(self, msg):
+        self.message = msg
+
+
+class Unpacker:
+    """
+    A wrapper around struct.unpack() to unpack binary data without having to
+    explicitly define the byte order in the format string. Also restrict the
+    type of format to str and buffer to bytes.
+    """
+    def __init__(self, byte_order_char: str = '') -> None:
+        self.byte_order_char = byte_order_char
+
+    def unpack(self, format: str, buffer: bytes):
+        """
+        Unpack a string of bytes into a tuple of values based on the given
+        format
+        :param format: the format used to unpack the byte string
+        :param buffer: the byte string
+        :return: a tuple containing the unpacked values.
+        """
+        default_byte_order_chars = ('@', '=', '>', '<', '!')
+        if format.startswith(default_byte_order_chars):
+            format = self.byte_order_char + format[:1]
+        else:
+            format = self.byte_order_char + format
+        return struct.unpack(format, buffer)
+
+
+@dataclass
+class FixedHeader:
+    """
+    The fixed portion of the header of a data record. All fields are stored as
+    bytes to minimize time wasted on decoding unused values.
+    """
+    sequence_number: bytes
+    data_header_quality_indicator: bytes
+    reserved: bytes
+    station: bytes
+    location: bytes
+    channel: bytes
+    net_code: bytes
+    record_start_time: bytes
+    sample_count: bytes
+    sample_rate_factor: bytes
+    sample_rate_multiplier: bytes
+    activity_flags: bytes
+    io_and_clock_flags: bytes
+    data_quality_flags: bytes
+    blockette_count: bytes
+    time_correction: bytes
+    data_offset: bytes
+    first_blockette_offset: bytes
+
+
+@dataclass
+class Blockette1000:
+    """
+    Blockette 100 of a data record. All fields are stored as bytes to minimize
+    time wasted on decoding unused values.
+    """
+    blockette_type: bytes
+    next_blockette_offset: bytes
+    encoding_format: bytes
+    byte_order: bytes
+    record_length: bytes
+    reserved_byte: bytes
+
+
+@dataclass
+class RecordMetadata:
+    """
+    The metadata of a data record.
+    """
+    station: str
+    location: str
+    channel: str
+    network: str
+    start_time: float
+    end_time: float
+    sample_count: int
+    sample_rate: float
+
+
+class EncodingFormat(Enum):
+    ASCII = 0
+    INT_16_BIT = 1
+    INT_24_BIT = 2
+    INT_32_BIT = 3
+    IEEE_FLOAT_32_BIT = 4
+    IEEE_FLOAT_64_BIT = 5
+    STEIM_1 = 10
+    STEIM_2 = 11
+
+
+def check_time_from_time_string(endian, time_string):
+
+    try:
+        record_start_time_tuple = struct.unpack(f'{endian}hhbbbbh',
+                                                time_string)
+    except struct.error:
+        raise MSeedReadError("Not an MSeed file.")
+    # libmseed uses 1900 to 2100 as the sane year range. We follow their
+    # example here.
+    year_is_good = (1900 <= record_start_time_tuple[0] <= 2100)
+    # The upper range is 366 to account for leap years.
+    day_is_good = (1 <= record_start_time_tuple[1] <= 366)
+    return year_is_good and day_is_good
+
+
+def get_header_endianness(header: FixedHeader):
+    """
+    Determine the endianness of the fixed header of a data record. Works by
+    checking if the decoded record start time has a sane value if the header
+    is assumed to be big-endian.
+
+    WARNING: This check fails on three dates: 2056-1, 2056-256, and 2056-257.
+    2056 is a palindrome when encoded as a pair of octets, so endianness does
+    not affect it. Similarly, 257 is also 2-octet-palindromic. Meanwhile, 1 and
+    256 are counterparts when encoded as pairs of octets. Because they are both
+    valid values for day of year, it is impossible to make a conclusion about
+    endianness based on day of year if it is either 1 or 256 in big-endian.
+    These facts combined means that we cannot determine the endianness of the
+    header whose record starts on the aforementioned dates. The way this
+    function was written, the endianness will be recorded as big in these
+    cases. This problem is also recorded in libmseed.
+
+    :param header: the fixed header of the data record
+    :return: either of the string 'big' or 'little' depending on the extracted
+    endianness of header
+    """
+    record_start_time_string = header.record_start_time
+    good_time = check_time_from_time_string('>', record_start_time_string)
+    if good_time:
+        endianness = 'big'
+    else:
+        good_time = check_time_from_time_string('<', record_start_time_string)
+        if good_time:
+            endianness = 'little'
+        else:
+            raise MSeedReadError("Not an MSeed file.")
+    return endianness
+
+
+def get_data_endianness(blockette_1000: Blockette1000):
+    """
+    Get endianness of a data record by examining blockette 1000.
+
+    :param blockette_1000: the blockette 1000 of the data record.
+    """
+    # The byte order is only one byte so using big or little endian does not
+    # matter.
+    blockette_1000_endianness = int.from_bytes(
+        blockette_1000.byte_order, 'big'
+    )
+    if blockette_1000_endianness:
+        return 'big'
+    else:
+        return 'little'
+
+
+def calculate_sample_rate(factor: int, multiplier: int) -> float:
+    """
+    Calculate the sample rate using the sample rate factor and multiplier. This
+    algorithm is described around the start of Chapter 8 in the SEED manual.
+
+    :param factor: the sample rate factor
+    :param multiplier: the sample rate multiplier
+    :return: the nominal sample rate
+    """
+    sample_rate = 0
+    if factor == 0:
+        sample_rate = 0
+    elif factor > 0 and multiplier > 0:
+        sample_rate = factor * multiplier
+    elif factor > 0 and multiplier < 0:
+        sample_rate = -(factor / multiplier)
+    elif factor < 0 and multiplier > 0:
+        sample_rate = -(multiplier / factor)
+    elif factor < 0 and multiplier < 0:
+        sample_rate = 1 / (factor * multiplier)
+    return sample_rate
+
+
+def get_record_metadata(header: FixedHeader, header_unpacker: Unpacker):
+    """
+    Extract and parse the metadata of a data record from its fixed header.
+
+    :param header: the fixed header of the data record
+    :param header_unpacker: the unpacker corresponding to the data record;
+        needed so that the correct byte order can be used
+    :return: the extract record metadata
+    """
+    try:
+        station = header.station.decode('utf-8').rstrip()
+        location = header.location.decode('utf-8').rstrip()
+        channel = header.channel.decode('utf-8').rstrip()
+        network = header.net_code.decode('utf-8').rstrip()
+
+        record_start_time_string = header.record_start_time
+        record_start_time_tuple = header_unpacker.unpack(
+            'HHBBBBH', record_start_time_string)
+        record_start_time = UTCDateTime(year=record_start_time_tuple[0],
+                                        julday=record_start_time_tuple[1],
+                                        hour=record_start_time_tuple[2],
+                                        minute=record_start_time_tuple[3],
+                                        second=record_start_time_tuple[4],
+                                        microsecond=record_start_time_tuple[
+                                                        6] * 100).timestamp
+
+        sample_count = header_unpacker.unpack('H', header.sample_count)[0]
+
+        sample_rate_factor = header_unpacker.unpack(
+            'h', header.sample_rate_factor
+        )[0]
+        sample_rate_multiplier = header_unpacker.unpack(
+            'h', header.sample_rate_multiplier
+        )[0]
+    except ValueError:
+        raise MSeedReadError("Not an MSeed file.")
+    sample_rate = calculate_sample_rate(sample_rate_factor,
+                                        sample_rate_multiplier)
+    if sample_rate == 0:
+        record_end_time = record_start_time
+    else:
+        record_time_taken = sample_count / sample_rate
+        record_end_time = record_start_time + record_time_taken
+
+    return RecordMetadata(station, location, channel, network,
+                          record_start_time, record_end_time,
+                          sample_count, sample_rate)
diff --git a/sohstationviewer/model/reftek/log_info.py b/sohstationviewer/model/reftek/log_info.py
index c242f822a38809a42128e9ff43a257bfe9142dd0..2ce79fef548c265d8b4c371fe938e4a3b6379c33 100644
--- a/sohstationviewer/model/reftek/log_info.py
+++ b/sohstationviewer/model/reftek/log_info.py
@@ -68,21 +68,23 @@ class LogInfo():
         #       TT =2001:253:15:13:59:768 NS: 144005 SPS: 40 ETO: 0
         parts = line.split()
         data_stream = int(parts[5])
-        if data_stream not in self.parent.req_data_streams:
-            return (0, 0)
-        try:
-            if parts[8].startswith("00:000"):
-                if parts[11].startswith("00:000"):
-                    return -1, 0
-                epoch, _ = get_time_6(parts[11])
+        if (self.req_data_streams == ['*'] or
+                data_stream in self.req_data_streams):
+            try:
+                if parts[8].startswith("00:000"):
+                    if parts[11].startswith("00:000"):
+                        return -1, 0
+                    epoch, _ = get_time_6(parts[11])
+                else:
+                    epoch, _ = get_time_6(parts[8])
+            except AttributeError:
+                self.parent.processing_log.append((line, LogType.ERROR))
+                return False
+            if epoch > 0:
+                self.min_epoch = min(epoch, self.min_epoch)
+                self.max_epoch = max(epoch, self.max_epoch)
             else:
-                epoch, _ = get_time_6(parts[8])
-        except AttributeError:
-            self.parent.processing_log.append(line, LogType.ERROR)
-            return False
-        if epoch > 0:
-            self.min_epoch = min(epoch, self.min_epoch)
-            self.max_epoch = max(epoch, self.max_epoch)
+                return 0, 0
         else:
             return 0, 0
         return epoch, data_stream
@@ -203,10 +205,10 @@ class LogInfo():
             return False
         return epoch, disk, val
 
-    def read_dps_clock_diff(self, line: str
+    def read_dsp_clock_diff(self, line: str
                             ) -> Union[bool, Tuple[float, float]]:
         """
-        Read DPS clock difference
+        Read DSP clock difference
         :param line: str - a line of evt message
         :return epoch: float - time when info is recorded
         :return total: float - total difference time in milliseconds
@@ -347,18 +349,17 @@ class LogInfo():
             line = line.upper()
             if 'FST' in line:
                 ret = self.read_evt(line)
-                if ret:
+                if ret is not False:
                     epoch, data_stream = ret
-                    if data_stream in self.req_data_streams:
-                        if epoch > 0:
-                            chan_name = 'Event DS%s' % data_stream
-                            self.add_chan_info(chan_name, epoch, 1, idx)
-                        elif epoch == 0:
-                            self.parent.processing_log.append(
-                                line, LogType.WARNING)
-                        else:
-                            self.parent.processing_log.append(
-                                line, LogType.ERROR)
+                    if epoch > 0:
+                        chan_name = 'Event DS%s' % data_stream
+                        self.add_chan_info(chan_name, epoch, 1, idx)
+                    elif epoch == 0:
+                        self.parent.processing_log.append(
+                            (line, LogType.WARNING))
+                    else:
+                        self.parent.processing_log.append(
+                            (line, LogType.ERROR))
 
             elif line.startswith("STATE OF HEALTH"):
                 epoch = self.read_sh_header(line)
@@ -415,11 +416,11 @@ class LogInfo():
                 if epoch:
                     self.add_chan_info('Jerks/DSP Sets', epoch, 0, idx)
 
-            elif "DPS clock diff" in line:
-                ret = self.read_dps_clock_diff()
+            elif "DSP CLOCK DIFFERENCE" in line:
+                ret = self.read_dsp_clock_diff(line)
                 if ret:
                     epoch, total = ret
-                    self.add_chan_info('DPS Clock Diff', epoch, total, idx)
+                    self.add_chan_info('DSP Clock Diff', epoch, total, idx)
 
             elif "ACQUISITION STARTED" in line:
                 epoch = self.simple_read(line)[1]
@@ -457,7 +458,7 @@ class LogInfo():
             elif "EXTERNAL CLOCK IS UNLOCKED" in line:
                 epoch = self.simple_read(line)[1]
                 if epoch:
-                    self.add_chan_info('GPS Lk/Unlk', epoch, 0, idx)
+                    self.add_chan_info('GPS Lk/Unlk', epoch, -1, idx)
             elif "EXTERNAL CLOCK IS LOCKED" in line:
                 epoch = self.simple_read(line)[1]
                 if epoch:
diff --git a/sohstationviewer/model/reftek/reftek.py b/sohstationviewer/model/reftek/reftek.py
index f7fa193d4ca40066cef2afd711a233ac5b5b99fd..083cfe794949fabbf5bf91fb3c8460ac9b6a8204 100755
--- a/sohstationviewer/model/reftek/reftek.py
+++ b/sohstationviewer/model/reftek/reftek.py
@@ -4,7 +4,7 @@ RT130 object to hold and process RefTek data
 import os
 from pathlib import Path
 from typing import Tuple, List, Union
-
+import traceback
 import numpy as np
 
 from sohstationviewer.model.reftek.from_rt2ms import (
@@ -35,6 +35,11 @@ class RT130(DataTypeModel):
         """
         self.req_data_streams: List[Union[int, str]] = self.req_wf_chans
         """
+        rt130_waveform_data_req: flag to create waveform data according to
+            req_data_stream
+        """
+        self.rt130_waveform_data_req: bool = kwarg['rt130_waveform_data_req']
+        """
         found_data_streams: list of data streams found to help inform user
             why the selected data streams don't show up
         """
@@ -89,8 +94,15 @@ class RT130(DataTypeModel):
                     path2file = Path(path).joinpath(file_name)
                     if not validate_file(path2file, file_name):
                         continue
-                    if not self.read_reftek_130(path2file):
-                        read_text(path2file, file_name, self.log_data['TEXT'])
+                    try:
+                        if not self.read_reftek_130(path2file):
+                            read_text(path2file, self.log_data['TEXT'])
+                    except Exception:
+                        fmt = traceback.format_exc()
+                        self.track_info(f"Skip file {path2file} can't be read "
+                                        f"due to error: {str(fmt)}",
+                                        LogType.WARNING)
+
                     count += 1
                     if count % 50 == 0:
                         self.track_info(
@@ -133,7 +145,13 @@ class RT130(DataTypeModel):
 
         :param path2file: absolute path to file
         """
-        rt130 = core.Reftek130.from_file(path2file)
+        try:
+            rt130 = core.Reftek130.from_file(path2file)
+        except Exception:
+            fmt = traceback.format_exc()
+            self.track_info(f"Skip file {path2file} can't be read "
+                            f"due to error: {str(fmt)}", LogType.WARNING)
+            return
         unique, counts = np.unique(rt130._data["packet_type"],
                                    return_counts=True)
         nbr_packet_type = dict(zip(unique, counts))
@@ -189,7 +207,9 @@ class RT130(DataTypeModel):
         cur_key = (rt130._data[0]['unit_id'].decode(),
                    f"{rt130._data[0]['experiment_number']}")
         self.populate_cur_key_for_all_data(cur_key)
-        self.get_ehet_in_log_data(rt130, cur_key)
+        if data_stream != 9:
+            # don't get event info for mass position
+            self.get_ehet_in_log_data(rt130, cur_key)
         self.get_mass_pos_data_and_waveform_data(rt130, data_stream, cur_key)
 
     def get_ehet_in_log_data(self, rt130: core.Reftek130,
@@ -230,8 +250,10 @@ class RT130(DataTypeModel):
         """
         if data_stream == 9:
             cur_data_dict = self.mass_pos_data[cur_key]
-        else:
+        elif self.rt130_waveform_data_req:
             cur_data_dict = self.waveform_data[cur_key]
+        else:
+            return
 
         avail_trace_indexes = check_reftek_header(
             rt130, cur_key, self.read_start, self.read_end,
diff --git a/sohstationviewer/view/db_config/param_dialog.py b/sohstationviewer/view/db_config/param_dialog.py
index 2fc8c8ad99d312e01857c2d4514062aeb49b4e10..21ecf7bcca7316e30a6b5e7253d7f1ce19ef400b 100755
--- a/sohstationviewer/view/db_config/param_dialog.py
+++ b/sohstationviewer/view/db_config/param_dialog.py
@@ -47,7 +47,7 @@ class ParamDialog(UiDBInfoDialog):
         color_mode_label = QtWidgets.QLabel('Color mode:')
         color_selector = QComboBox()
         color_selector.insertItem(0, initial_color_mode)
-        other_color_modes = ALL_COLOR_MODES - {initial_color_mode}
+        other_color_modes = set(ALL_COLOR_MODES.keys()) - {initial_color_mode}
         color_selector.insertItems(1, other_color_modes)
         color_selector.setFixedWidth(100)
         color_selector.currentTextChanged.connect(self.on_color_mode_changed)
diff --git a/sohstationviewer/view/main_window.py b/sohstationviewer/view/main_window.py
index 6a3dcdb057857adf157b102f5ae11c46f42affed..358aa0fe71c861b5c5a8d71b2bf7b6fc3fb9629a 100755
--- a/sohstationviewer/view/main_window.py
+++ b/sohstationviewer/view/main_window.py
@@ -63,9 +63,17 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         """
         self.dir_names: List[Path] = []
         """
-        current_dir: str - the current main data directory
+        current_dir: the current main data directory
         """
-        self.current_dir = ''
+        self.current_dir: str = ''
+        """
+        save_plot_dir: directory to save plot
+        """
+        self.save_plot_dir: str = ''
+        """
+        save_plot_format: format to save plot
+        """
+        self.save_plot_format: str = 'SVG'
         """
         rt130_das_dict: dict by rt130 for data paths, so user can choose
             dasses to assign list of data paths to selected_rt130_paths
@@ -185,6 +193,10 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         self.validate_config()
         self.apply_config()
 
+    @QtCore.Slot()
+    def save_plot(self):
+        self.plotting_widget.save_plot('SOH-Plot')
+
     @QtCore.Slot()
     def open_data_type(self) -> None:
         """
@@ -386,41 +398,31 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         :rtype: List[str, int]
         """
         req_wf_chans = []
-        if ((self.all_wf_chans_check_box.isChecked()
-                or [ds for ds in self.ds_check_boxes if ds.isChecked()] != []
-                or self.mseed_wildcard_edit.text().strip() != "")
-                and not self.tps_check_box.isChecked()
-                and not self.raw_check_box.isChecked()):
-            raise Exception(
-                "Waveform channels have been selected but there are none of "
-                "TPS or RAW checkboxes checked.\nPlease clear the "
-                "selection of waveform if you don't want to display the data.")
-
-        if self.tps_check_box.isChecked() or self.raw_check_box.isChecked():
-            if self.all_wf_chans_check_box.isChecked():
-                req_mseed_wildcards = ['*']
-                req_dss = ['*']      # all data stream
-            else:
-                req_dss = []
-                req_mseed_wildcards = []
-                for idx, ds_checkbox in enumerate(self.ds_check_boxes):
-                    if ds_checkbox.isChecked():
-                        req_dss.append(idx + 1)
-                if self.mseed_wildcard_edit.text().strip() != "":
-                    req_mseed_wildcards = self.mseed_wildcard_edit.text(
-                        ).split(",")
-
-            if self.data_type == 'RT130':
-                req_wf_chans = req_dss
-                if req_dss != ['*'] and req_mseed_wildcards != []:
-                    msg = 'MSeed Wildcards will be ignored for RT130.'
-                    self.processing_log.append((msg, LogType.WARNING))
-            else:
-                req_wf_chans = req_mseed_wildcards
-                if req_mseed_wildcards != ['*'] and req_dss != []:
-                    msg = ('Checked data streams will be ignored for '
-                           'none-RT130 data type.')
-                    self.processing_log.append((msg, LogType.WARNING))
+
+        if self.all_wf_chans_check_box.isChecked():
+            req_mseed_wildcards = ['*']
+            req_dss = ['*']      # all data stream
+        else:
+            req_dss = []
+            req_mseed_wildcards = []
+            for idx, ds_checkbox in enumerate(self.ds_check_boxes):
+                if ds_checkbox.isChecked():
+                    req_dss.append(idx + 1)
+            if self.mseed_wildcard_edit.text().strip() != "":
+                req_mseed_wildcards = self.mseed_wildcard_edit.text(
+                    ).split(",")
+
+        if self.data_type == 'RT130':
+            req_wf_chans = req_dss
+            if req_dss != ['*'] and req_mseed_wildcards != []:
+                msg = 'MSeed Wildcards will be ignored for RT130.'
+                self.processing_log.append((msg, LogType.WARNING))
+        else:
+            req_wf_chans = req_mseed_wildcards
+            if req_mseed_wildcards != ['*'] and req_dss != []:
+                msg = ('Checked data streams will be ignored for '
+                       'none-RT130 data type.')
+                self.processing_log.append((msg, LogType.WARNING))
         return req_wf_chans
 
     def get_requested_soh_chan(self):
@@ -502,13 +504,20 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         self.waveform_dlg.plotting_widget.clear()
         self.tps_dlg.plotting_widget.clear()
 
+    def cancel_loading(self):
+        display_tracking_info(self.tracking_info_text_browser,
+                              "Loading cancelled",
+                              LogType.WARNING)
+
     @QtCore.Slot()
     def read_selected_files(self):
         """
         Read data from selected files/directories, process and plot channels
             read from those according to current options set on the GUI
         """
-
+        display_tracking_info(self.tracking_info_text_browser,
+                              "Loading started",
+                              LogType.INFO)
         self.clear_plots()
         start_tm_str = self.time_from_date_edit.date().toString(
             QtCore.Qt.ISODate)
@@ -518,6 +527,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         if self.end_tm <= self.start_tm:
             msg = "To Date must be greater than From Date."
             QtWidgets.QMessageBox.warning(self, "Wrong Date Given", msg)
+            self.cancel_loading()
             return
         self.info_list_widget.clear()
         is_working = (self.is_loading_data or self.is_plotting_soh or
@@ -536,16 +546,25 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
                 msg = "Minimum Gap must be a number."
                 QtWidgets.QMessageBox.warning(
                     self, "Invalid Minimum Gap request", msg)
+                self.cancel_loading()
                 return
         else:
             self.min_gap = None
 
+        # if waveform channels are selected, Event DS will be read from EH/ET
+        # header
+        # rt130_waveform_data_req is to read data for wave form data
+        rt130_waveform_data_req = False
+        if self.raw_check_box.isChecked() or self.tps_check_box.isChecked():
+            rt130_waveform_data_req = True
+
         if self.mseed_wildcard_edit.text().strip() != '':
             try:
                 check_chan_wildcards_format(self.mseed_wildcard_edit.text())
             except Exception as e:
                 QtWidgets.QMessageBox.warning(
                     self, "Incorrect Wildcard", str(e))
+                self.cancel_loading()
                 return
 
         try:
@@ -563,6 +582,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             self.read_from_file_list()
         except Exception as e:
             QtWidgets.QMessageBox.warning(self, "Select directory", str(e))
+            self.cancel_loading()
             return
 
         dir_size = sum(get_dir_size(str(dir))[0] for dir in self.dir_names)
@@ -578,6 +598,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             data_too_big_dialog.setIcon(QMessageBox.Question)
             ret = data_too_big_dialog.exec_()
             if ret == QMessageBox.Abort:
+                self.cancel_loading()
                 return
 
         self.req_soh_chans = self.get_requested_soh_chan()
@@ -585,6 +606,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             self.req_wf_chans = self.get_requested_wf_chans()
         except Exception as e:
             QMessageBox.information(self, "Waveform Selection", str(e))
+            self.cancel_loading()
             return
 
         start_tm_str = self.time_from_date_edit.date().toString(
@@ -608,7 +630,8 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             read_start=self.start_tm,
             read_end=self.end_tm,
             include_mp123=self.mass_pos_123zne_check_box.isChecked(),
-            include_mp456=self.mass_pos_456uvw_check_box.isChecked()
+            include_mp456=self.mass_pos_456uvw_check_box.isChecked(),
+            rt130_waveform_data_req=rt130_waveform_data_req
         )
 
         self.data_loader.worker.finished.connect(self.data_loaded)
@@ -722,6 +745,10 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             return
         self.clear_plots()
         self.is_plotting_soh = True
+        self.plotting_widget.set_colors(self.color_mode)
+        self.waveform_dlg.plotting_widget.set_colors(self.color_mode)
+        self.tps_dlg.plotting_widget.set_colors(self.color_mode)
+        self.gps_dialog.set_colors(self.color_mode)
 
         d_obj = self.data_object
 
@@ -845,6 +872,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         # current directory
         self.current_directory_changed.emit(path)
         self.current_dir = path
+        self.save_plot_dir = path
         execute_db(f'UPDATE PersistentData SET FieldValue="{path}" WHERE '
                    'FieldName="currentDirectory"')
         self.set_open_files_list_texts()
@@ -1064,10 +1092,6 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         if not checked:
             return
         self.color_mode = color_mode
-        self.plotting_widget.set_colors(color_mode)
-        self.waveform_dlg.plotting_widget.set_colors(color_mode)
-        self.tps_dlg.plotting_widget.set_colors(color_mode)
-        self.gps_dialog.set_colors(color_mode)
 
     @QtCore.Slot()
     def clear_file_search(self):
diff --git a/sohstationviewer/view/plotting/plotting_widget/multi_threaded_plotting_widget.py b/sohstationviewer/view/plotting/plotting_widget/multi_threaded_plotting_widget.py
index 7e52995e0291415225e0c5661b4deac3967391f4..e8afaff4a988dfde74b707f356a81f197de432ca 100644
--- a/sohstationviewer/view/plotting/plotting_widget/multi_threaded_plotting_widget.py
+++ b/sohstationviewer/view/plotting/plotting_widget/multi_threaded_plotting_widget.py
@@ -1,6 +1,6 @@
 # Define functions to call processor
 
-from typing import Tuple, Union, Dict, Callable, List, Optional
+from typing import Tuple, Union, Dict, List, Optional, Callable
 
 from PySide2 import QtCore
 
@@ -210,15 +210,13 @@ class MultiThreadedPlottingWidget(PlottingWidget):
                 self.clean_up()
                 self.finished.emit()
                 return
+
             self.create_plotting_channel_processors(
                 self.plotting_data1, self.get_plot_info, pref_soh_order)
             self.create_plotting_channel_processors(
                 self.plotting_data2, get_chan_plot_info)
-            self.process_channel()
 
-    def get_plot_info(self, *args, **kwargs):
-        # function to get database info for channels in self.plotting_data1
-        pass
+            self.process_channel()
 
     @QtCore.Slot()
     def process_channel(self, channel_data=None, channel_id=None):
@@ -361,6 +359,6 @@ class MultiThreadedPlottingWidget(PlottingWidget):
             self.is_working = True
             start_msg = 'Zooming in...'
             display_tracking_info(self.tracking_box, start_msg, 'info')
-            self.create_plotting_channel_processors(self.plotting_data1, None)
-            self.create_plotting_channel_processors(self.plotting_data2, None)
+            self.create_plotting_channel_processors(self.plotting_data1)
+            self.create_plotting_channel_processors(self.plotting_data2)
             self.process_channel()
diff --git a/sohstationviewer/view/plotting/plotting_widget/plotting.py b/sohstationviewer/view/plotting/plotting_widget/plotting.py
index ec2ab8e8a7b6c12a7cddc176caf44bb4e6df2271..9e28f8d144e50fcd587ae5a7d2a8e41c310709d5 100644
--- a/sohstationviewer/view/plotting/plotting_widget/plotting.py
+++ b/sohstationviewer/view/plotting/plotting_widget/plotting.py
@@ -1,4 +1,6 @@
 # class with all plotting functions
+import numpy as np
+
 from sohstationviewer.controller.util import get_val
 from sohstationviewer.controller.plotting_data import get_masspos_value_colors
 
@@ -75,8 +77,10 @@ class Plotting:
         if chan_db_info['valueColors'] in [None, 'None', '']:
             chan_db_info['valueColors'] = '*:W'
         value_colors = chan_db_info['valueColors'].split('|')
+        colors = []
         for vc in value_colors:
             v, c = vc.split(':')
+            colors.append(c)
             val = get_val(v)
             if c == '_':
                 prev_val = val
@@ -104,9 +108,14 @@ class Plotting:
         total_samples = len(x)
 
         x = sorted(x)
+        if len(colors) != 1:
+            sample_no_colors = [clr['W']]
+        else:
+            sample_no_colors = [clr[colors[0]]]
+
         self.plotting_axes.set_axes_info(
-            ax, [total_samples], chan_db_info=chan_db_info,
-            linked_ax=linked_ax)
+            ax, [total_samples], sample_no_colors=sample_no_colors,
+            chan_db_info=chan_db_info, linked_ax=linked_ax)
         if linked_ax is None:
             ax.x = x
         else:
@@ -168,6 +177,8 @@ class Plotting:
         ax.set_ylim(-2, 2)
         self.plotting_axes.set_axes_info(
             ax, [len(points_list[0]), len(points_list[1])],
+            sample_no_colors=[clr[colors[0]], clr[colors[1]]],
+            sample_no_pos=[0.25, 0.75],
             chan_db_info=chan_db_info, linked_ax=linked_ax)
         if linked_ax is None:
             ax.x = x
@@ -203,7 +214,8 @@ class Plotting:
         x_list = c_data['times']
         total_x = sum([len(x) for x in x_list])
         self.plotting_axes.set_axes_info(
-            ax, [total_x], chan_db_info=chan_db_info, linked_ax=linked_ax)
+            ax, [total_x], sample_no_colors=[clr[color]],
+            chan_db_info=chan_db_info, linked_ax=linked_ax)
 
         for x in x_list:
             ax.plot(x, [0] * len(x), marker='s', markersize=1.5,
@@ -250,10 +262,7 @@ class Plotting:
                 self.parent.plotting_bot, plot_h)
 
         x_list, y_list = c_data['times'], c_data['data']
-        total_x = sum([len(x) for x in x_list])
-        self.plotting_axes.set_axes_info(
-            ax, [total_x], chan_db_info=chan_db_info,
-            info=info, y_list=y_list, linked_ax=linked_ax)
+
         colors = {}
         if chan_db_info['valueColors'] not in [None, 'None', '']:
             color_parts = chan_db_info['valueColors'].split('|')
@@ -261,12 +270,27 @@ class Plotting:
                 obj, c = cStr.split(':')
                 colors[obj] = c
         l_color = 'G'
+        d_color = 'W'
         has_dot = False
         if 'L' in colors:
             l_color = colors['L']
         if 'D' in colors:
             d_color = colors['D']
             has_dot = True
+
+        if chan_id == 'GPS Lk/Unlk':
+            sample_no_list = []
+            sample_no_list.append(np.where(y_list[0] == -1)[0].size)
+            sample_no_list.append(np.where(y_list[0] == 1)[0].size)
+            sample_no_colors = [clr[d_color], clr[d_color]]
+        else:
+            sample_no_list = [sum([len(x) for x in x_list])]
+            sample_no_colors = [clr[d_color]]
+        self.plotting_axes.set_axes_info(
+            ax, sample_no_list, sample_no_colors=sample_no_colors,
+            chan_db_info=chan_db_info,
+            info=info, y_list=y_list, linked_ax=linked_ax)
+
         for x, y in zip(x_list, y_list):
             if not has_dot:
                 # set marker to be able to click point for info
diff --git a/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py b/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py
index 940110dc3391f55ec682ff855975c427a9e40d01..002e0dd6fc613ba135b3df1a919a5894401e2583 100644
--- a/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py
+++ b/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py
@@ -1,5 +1,7 @@
-from typing import List
+from typing import List, Optional, Dict
 
+import numpy as np
+from matplotlib.axes import Axes
 from matplotlib.patches import ConnectionPatch, Rectangle
 from matplotlib.ticker import AutoMinorLocator
 from matplotlib import pyplot as pl
@@ -10,6 +12,7 @@ from sohstationviewer.controller.plotting_data import (
     get_gaps, get_time_ticks, get_unit_bitweight)
 
 from sohstationviewer.conf import constants
+from sohstationviewer.view.util.color import clr
 
 
 class PlottingAxes:
@@ -75,6 +78,7 @@ class PlottingAxes:
             labelbottom = False
         else:
             labelbottom = True
+            self.parent.plotting_bot -= 0.007       # space for ticks
         timestamp_bar.tick_params(which='major', length=7, width=2,
                                   direction='inout',
                                   colors=self.parent.display_color['basic'],
@@ -87,7 +91,8 @@ class PlottingAxes:
                                  fontweight='bold',
                                  fontsize=self.parent.font_size,
                                  rotation=0,
-                                 labelpad=constants.HOUR_TO_TMBAR_D,
+                                 labelpad=constants.HOUR_TO_TMBAR_D *
+                                 self.parent.ratio_w,
                                  ha='left',
                                  color=self.parent.display_color['basic'])
         # not show any y ticks
@@ -109,7 +114,8 @@ class PlottingAxes:
         timestamp_bar.set_xticks(times, minor=True)
         timestamp_bar.set_xticks(major_times)
         timestamp_bar.set_xticklabels(major_time_labels,
-                                      fontsize=self.parent.font_size + 2)
+                                      fontsize=self.parent.font_size +
+                                      2 * self.parent.ratio_w)
         timestamp_bar.set_xlim(self.parent.min_x, self.parent.max_x)
 
     def create_axes(self, plot_b, plot_h, has_min_max_lines=True):
@@ -148,24 +154,30 @@ class PlottingAxes:
         ax.patch.set_alpha(0)
         return ax
 
-    def set_axes_info(self, ax, sample_no_list,
-                      label=None, info='', y_list=None, chan_db_info=None,
-                      linked_ax=None):
+    def set_axes_info(self, ax: Axes,
+                      sample_no_list: List[int],
+                      sample_no_colors: List[str] = [clr['W'], clr['W']],
+                      sample_no_pos: List[float] = [0.05, 0.95],
+                      label: Optional[str] = None,
+                      info: str = '',
+                      y_list: Optional[np.ndarray] = None,
+                      chan_db_info: Optional[Dict] = None,
+                      linked_ax: Optional[Axes] = None):
         """
         Draw plot's title, sub title, sample total label, center line, y labels
         for a channel.
 
-        :param ax: matplotlib.axes.Axes - axes of a channel
-        :param sample_no_list: [int,] - list of totals of different sample
-            groups
-        :param label: str/None - title of the plot.
-            If None, show chan_db_info['label']
-        :param info: str - additional info to show in sub title which is
+        :param ax:  axes of a channel
+        :param sample_no_list: list of totals of different sample groups
+        :param sample_no_colors: list of color to display sample numbers
+        :param sample_no_pos: list of position to display sample numbers
+            top/bottom
+        :param label: title of the plot. If None, show chan_db_info['label']
+        :param info: additional info to show in sub title which is
             smaller and under title on the left side
-        :param y: numpy.array - y values of the channel, to show min/max labels
-            and min/max lines
-        :param chan_db_info: dict - info of channel from database
-        :param linked_ax: matplotlib.axes.Axes/None -
+        :param y: y values of the channel for min/max labels, min/max lines
+        :param chan_db_info: info of channel from database
+        :param linked_ax:
             if linked_ax is None, this is a main channel, label of channel will
                 be displayed with title's format, on top right of plot.
             if linked_ax is not None, this is a channel using main channel's
@@ -181,6 +193,7 @@ class PlottingAxes:
 
         if label is None:
             label = chan_db_info['label']
+
         title_ver_alignment = 'center'
         # set info in subtitle under title
         if linked_ax is not None:
@@ -211,7 +224,7 @@ class PlottingAxes:
                 rotation='horizontal',
                 transform=ax.transAxes,
                 color=color,
-                size=self.parent.font_size + 2
+                size=self.parent.font_size + 2 * self.parent.ratio_w
             )
 
         # set samples' total on right side
@@ -223,7 +236,7 @@ class PlottingAxes:
                 verticalalignment='center',
                 rotation='horizontal',
                 transform=ax.transAxes,
-                color=self.parent.display_color['basic'],
+                color=sample_no_colors[0],
                 size=self.parent.font_size
             )
         else:
@@ -233,30 +246,31 @@ class PlottingAxes:
             # on data created in trim_downsample_chan_with_spr_less_or_equal_1
             # and won't be changed in set_lim, then don't need to assign a
             # variable for it.
-
             # bottom
             ax.text(
-                1.005, 0.25,
+                1.005, sample_no_pos[0],
                 sample_no_list[0],
                 horizontalalignment='left',
                 verticalalignment='center',
                 rotation='horizontal',
                 transform=ax.transAxes,
-                color=self.parent.display_color['basic'],
+                color=sample_no_colors[0],
                 size=self.parent.font_size
             )
             # top
             ax.text(
-                1.005, 0.75,
+                1.005, sample_no_pos[1],
                 sample_no_list[1],
                 horizontalalignment='left',
                 verticalalignment='center',
                 rotation='horizontal',
                 transform=ax.transAxes,
-                color=self.parent.display_color['basic'],
+                color=sample_no_colors[1],
                 size=self.parent.font_size
             )
-
+        if linked_ax is not None:
+            ax.set_yticks([])
+            return
         if y_list is None:
             # draw center line
             ax.plot([self.parent.min_x, self.parent.max_x],
@@ -341,17 +355,23 @@ class PlottingAxes:
                 )
             )
 
-    def get_height(self, ratio, bw_plots_distance=0.0015):
+    def get_height(self, ratio: float, bw_plots_distance: float = 0.0015,
+                   pixel_height: float = 19) -> float:
         """
         Calculate new plot's bottom position and return plot's height.
 
-        :param ratio: float - ratio of the plot height on the BASIC_HEIGHT
-        :param bw_plots_distance: float - distance between plots
-        :return plot_h: float - height of the plot
+        :param ratio: ratio of the plot height on the BASIC_HEIGHT
+        :param bw_plots_distance: distance between plots
+        :param pixel_height: height of plot in pixel (
+            for TPS/TPS legend, height of each day row)
+
+        :return plot_h: height of the plot
         """
         plot_h = constants.BASIC_HEIGHT * ratio  # ratio with figure height
         self.parent.plotting_bot -= plot_h + bw_plots_distance
-        self.parent.plotting_bot_pixel += 19 * ratio
+        bw_plots_distance_pixel = 3000 * bw_plots_distance
+        self.parent.plotting_bot_pixel += (pixel_height * ratio +
+                                           bw_plots_distance_pixel)
         return plot_h
 
     def add_ruler(self, color):
@@ -392,4 +412,4 @@ class PlottingAxes:
                       horizontalalignment='left',
                       transform=self.parent.timestamp_bar_top.transAxes,
                       color=self.parent.display_color['basic'],
-                      size=self.parent.font_size)
+                      size=self.parent.font_size + 2 * self.parent.ratio_w)
diff --git a/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py b/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py
index 9cc7a78fbcbd701deedda58b3b3f1b1d912900aa..20a8d99105e4b0c73d33577f34776d8d96b93db9 100755
--- a/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py
+++ b/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py
@@ -2,10 +2,10 @@
 Class of which object is used to plot data
 """
 from typing import List, Optional, Union
-
 import matplotlib.text
-from PySide2.QtCore import QTimer, Qt
 from matplotlib import pyplot as pl
+from matplotlib.transforms import Bbox
+from PySide2.QtCore import QTimer, Qt
 from PySide2 import QtCore, QtWidgets
 from PySide2.QtWidgets import QWidget, QApplication, QTextBrowser
 
@@ -18,6 +18,7 @@ from sohstationviewer.view.plotting.plotting_widget.plotting_axes import (
     PlottingAxes
 )
 from sohstationviewer.view.plotting.plotting_widget.plotting import Plotting
+from sohstationviewer.view.save_plot_dialog import SavePlotDialog
 
 from sohstationviewer.controller.plotting_data import format_time
 from sohstationviewer.controller.util import display_tracking_info
@@ -110,6 +111,7 @@ class PlottingWidget(QtWidgets.QScrollArea):
         font_size: float - font size on plot. With some require bigger font,
             +2 to the font_size
         """
+        self.base_font_size = 7
         self.font_size = 7
         """
         bottom: float - y position of the bottom edge of all plots in self.axes
@@ -243,6 +245,7 @@ class PlottingWidget(QtWidgets.QScrollArea):
         # set view size fit with the scroll's view port size
         self.main_widget.setFixedWidth(geo.width())
         self.ratio_w = geo.width() / self.width_base_px
+        self.font_size = self.ratio_w * self.base_font_size
         self.plotting_w = self.ratio_w * self.width_base
         self.plotting_l = self.ratio_w * self.plotting_l_base
         if self.plot_total == 0:
@@ -652,6 +655,57 @@ class PlottingWidget(QtWidgets.QScrollArea):
         """
         self.peer_plotting_widgets = widgets
 
+    def save_plot(self, default_name='plot'):
+        if self.c_mode != self.main_window.color_mode:
+            main_color = constants.ALL_COLOR_MODES[self.main_window.color_mode]
+            curr_color = constants.ALL_COLOR_MODES[self.c_mode]
+            msg = (f"Main window's color mode is {main_color}"
+                   f" but the mode haven't been applied to plotting.\n\n"
+                   f"Do you want to cancel to apply {main_color} mode "
+                   f"by clicking RePlot?\n"
+                   f"Or continue with {curr_color}?")
+            msgbox = QtWidgets.QMessageBox()
+            msgbox.setWindowTitle("Color Mode Conflict")
+            msgbox.setText(msg)
+            msgbox.addButton(QtWidgets.QMessageBox.Cancel)
+            msgbox.addButton('Continue', QtWidgets.QMessageBox.YesRole)
+            result = msgbox.exec_()
+            if result == QtWidgets.QMessageBox.Cancel:
+                return
+            self.main_window.color_mode = self.c_mode
+            if self.c_mode == 'B':
+                self.main_window.background_black_radio_button.setChecked(True)
+            else:
+                self.main_window.background_white_radio_button.setChecked(True)
+        if self.c_mode == 'B':
+            msg = ("The current background mode is black.\n"
+                   "Do you want to cancel to change the background mode "
+                   "before saving the plots to file?")
+            msgbox = QtWidgets.QMessageBox()
+            msgbox.setWindowTitle("Background Mode Confirmation")
+            msgbox.setText(msg)
+            msgbox.addButton(QtWidgets.QMessageBox.Cancel)
+            msgbox.addButton('Continue', QtWidgets.QMessageBox.YesRole)
+            result = msgbox.exec_()
+            if result == QtWidgets.QMessageBox.Cancel:
+                return
+        save_plot_dlg = SavePlotDialog(
+            self.parent, self.main_window, default_name)
+        save_plot_dlg.exec_()
+        save_file_path = save_plot_dlg.save_file_path
+        if save_file_path is None:
+            return
+        dpi = save_plot_dlg.dpi
+
+        self.plotting_axes.fig.savefig(
+            save_file_path,
+            bbox_inches=Bbox([[0, self.plotting_bot*100],
+                              [self.ratio_w*15.5, 100]]),
+            dpi=dpi
+        )
+        msg = f"Graph is saved at {save_file_path}"
+        display_tracking_info(self.tracking_box, msg)
+
     def clear(self):
         self.plotting_axes.fig.clear()
         self.axes = []
diff --git a/sohstationviewer/view/plotting/state_of_health_widget.py b/sohstationviewer/view/plotting/state_of_health_widget.py
index 4269c0e292b59538526941741200f679efbd0d19..ae7bfabfe78e40572f33ce6e6b772062ce77d155 100644
--- a/sohstationviewer/view/plotting/state_of_health_widget.py
+++ b/sohstationviewer/view/plotting/state_of_health_widget.py
@@ -8,8 +8,6 @@ from sohstationviewer.controller.util import apply_convert_factor
 
 from sohstationviewer.model.data_type_model import DataTypeModel
 
-from sohstationviewer.database.extract_data import get_chan_plot_info
-
 from sohstationviewer.view.util.enums import LogType
 from sohstationviewer.view.plotting.plotting_widget.\
     multi_threaded_plotting_widget import MultiThreadedPlottingWidget
@@ -52,10 +50,6 @@ class SOHWidget(MultiThreadedPlottingWidget):
             self.processing_log.append((msg, LogType.WARNING))
         return True
 
-    def get_plot_info(self, *args, **kwargs):
-        # function to get database info for soh channels in self.plotting_data1
-        return get_chan_plot_info(*args, **kwargs)
-
     def plot_single_channel(self, c_data: Dict, chan_id: str):
         """
         Plot the channel chan_id.
diff --git a/sohstationviewer/view/plotting/time_power_squared_dialog.py b/sohstationviewer/view/plotting/time_power_squared_dialog.py
index 1c5acdb972716024206009c925052032f57c0538..f27f3c4362b8d0cf30d521808810b3da6fc5856d 100755
--- a/sohstationviewer/view/plotting/time_power_squared_dialog.py
+++ b/sohstationviewer/view/plotting/time_power_squared_dialog.py
@@ -13,7 +13,7 @@ from sohstationviewer.controller.util import (
     display_tracking_info, add_thousand_separator,
 )
 from sohstationviewer.database.extract_data import (
-    get_color_def, get_color_ranges, get_chan_label,
+    get_color_def, get_color_ranges, get_seismic_chan_label,
 )
 from sohstationviewer.model.data_type_model import DataTypeModel
 from sohstationviewer.model.handling_data import (
@@ -89,8 +89,10 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
         self.is_working = True
         self.set_key = key
         self.plotting_data1 = d_obj.waveform_data[key]
+        self.plot_total = len(self.plotting_data1)
 
         self.plotting_bot = const.BOTTOM
+        self.plotting_bot_pixel = const.BOTTOM_PX
         self.processed_channels = []
         self.channels = []
         self.tps_processors = []
@@ -111,7 +113,7 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
             title = get_title(key, self.min_x, self.max_x, self.date_mode)
 
         self.timestamp_bar_top = self.plotting_axes.add_timestamp_bar(0.)
-        self.plotting_axes.set_title(title, y=0, v_align='bottom')
+        self.plotting_axes.set_title(title, y=5, v_align='bottom')
 
         if self.plotting_data1 == {}:
             self.is_working = False
@@ -220,11 +222,12 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
 
         total_days = c_data['tps_data'].shape[0]
         plot_h = self.plotting_axes.get_height(
-            1.5 * total_days, bw_plots_distance=0.003)
+            total_days/1.5, bw_plots_distance=0.003, pixel_height=12.1)
         ax = self.create_axes(self.plotting_bot, plot_h)
+        ax.spines[['right', 'left', 'top', 'bottom']].set_visible(False)
         ax.text(
-            -0.1, 1.2,
-            f"{get_chan_label(chan_id)} {c_data['samplerate']}",
+            -0.12, 1,
+            f"{get_seismic_chan_label(chan_id)} {c_data['samplerate']}sps",
             horizontalalignment='left',
             verticalalignment='top',
             rotation='horizontal',
@@ -234,17 +237,17 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
         )
 
         zoom_marker1 = ax.plot(
-            [], [], marker='|', markersize=10,
+            [], [], marker='|', markersize=5,
             markeredgecolor=self.display_color['zoom_marker'])[0]
         self.zoom_marker1s.append(zoom_marker1)
 
         zoom_marker2 = ax.plot(
-            [], [], marker='|', markersize=10,
+            [], [], marker='|', markersize=5,
             markeredgecolor=self.display_color['zoom_marker'])[0]
         self.zoom_marker2s.append(zoom_marker2)
 
         ruler = ax.plot(
-            [], [], marker='s', markersize=5,
+            [], [], marker='s', markersize=4,
             markeredgecolor=self.display_color['time_ruler'],
             markerfacecolor='None')[0]
         self.rulers.append(ruler)
@@ -258,8 +261,8 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
             # not draw data out of day range
             color_set = self.get_color_set(y, square_counts, color_codes)
             # (- dayIdx): each day is a line, increase from top to bottom
-            ax.scatter(x, [- dayIdx] * len(x), marker='|',
-                       c=color_set, s=7, alpha=0.8)
+            ax.scatter(x, [- dayIdx] * len(x), marker='s',
+                       c=color_set, s=3)
         # extra to show highlight square
         ax.set_ylim(-(c_data['tps_data'].shape[0] + 1), 1)
 
@@ -274,11 +277,13 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
         ax.legend will create one label for each dot.
         """
         # set height of legend and distance bw legend and upper ax
-        plot_h = self.plotting_axes.get_height(7, bw_plots_distance=0.003)
+        plot_h = self.plotting_axes.get_height(
+            21, bw_plots_distance=0.004, pixel_height=12)
         ax = self.plotting_axes.canvas.figure.add_axes(
             [self.plotting_l, self.plotting_bot, self.plotting_w, plot_h],
             picker=True
         )
+        ax.axis('off')
         ax.patch.set_alpha(0)
         c_labels = self.parent.sel_col_labels
         clrs = self.parent.color_def  # colordef
@@ -466,6 +471,7 @@ class TimePowerSquaredWidget(plotting_widget.PlottingWidget):
         with new color range selected.
         """
         self.clear()
+        self.set_colors(self.main_window.color_mode)
         self.plotting_bot = const.BOTTOM
         title = get_title(self.set_key, self.min_x, self.max_x, self.date_mode)
         self.timestamp_bar_top = self.plotting_axes.add_timestamp_bar(0.)
@@ -554,6 +560,7 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         """
         self.color_range_choice = QtWidgets.QComboBox(self)
         self.color_range_choice.addItems(self.color_ranges)
+
         self.color_range_choice.setCurrentText('High')
         color_layout.addWidget(self.color_range_choice)
         # ##################### Replot button ########################
@@ -561,8 +568,8 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         buttons_layout.addWidget(self.replot_button)
 
         # ##################### Save button ##########################
-        self.save_button = QtWidgets.QPushButton('Save', self)
-        buttons_layout.addWidget(self.save_button)
+        self.save_plot_button = QtWidgets.QPushButton('Save Plot', self)
+        buttons_layout.addWidget(self.save_plot_button)
 
         self.info_text_browser.setFixedHeight(60)
         bottom_layout.addWidget(self.info_text_browser)
@@ -595,7 +602,7 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         """
         Connect functions to widgets
         """
-        self.save_button.clicked.connect(self.save)
+        self.save_plot_button.clicked.connect(self.save_plot)
         self.replot_button.clicked.connect(self.plotting_widget.replot)
         self.color_range_choice.currentTextChanged.connect(
             self.color_range_changed)
@@ -612,8 +619,8 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         self.sel_col_labels = self.color_label[cr_index]
 
     @QtCore.Slot()
-    def save(self):
+    def save_plot(self):
         """
         Save the plotting to a file
         """
-        print("save")
+        self.plotting_widget.save_plot('TPS-Plot')
diff --git a/sohstationviewer/view/plotting/waveform_dialog.py b/sohstationviewer/view/plotting/waveform_dialog.py
index ba9a2a2cd66f18d3658bacd72751b834901ff404..11d07c2625521381cc2c951e3fa1cf047eb584f7 100755
--- a/sohstationviewer/view/plotting/waveform_dialog.py
+++ b/sohstationviewer/view/plotting/waveform_dialog.py
@@ -11,8 +11,6 @@ from sohstationviewer.view.plotting.plotting_widget.\
 
 from sohstationviewer.controller.util import apply_convert_factor
 
-from sohstationviewer.database.extract_data import get_wf_plot_info
-
 
 class WaveformWidget(MultiThreadedPlottingWidget):
     """
@@ -39,10 +37,6 @@ class WaveformWidget(MultiThreadedPlottingWidget):
         return super().init_plot(d_obj, data_time, key, start_tm, end_tm,
                                  time_ticks_total, is_waveform=True)
 
-    def get_plot_info(self, *args, **kwargs):
-        # function to get database info for wf channels in self.plotting_data1
-        return get_wf_plot_info(*args, **kwargs)
-
     def plot_single_channel(self, c_data: Dict, chan_id: str):
         """
         Plot the channel chan_id.
@@ -118,11 +112,11 @@ class WaveformDialog(QtWidgets.QWidget):
         bottom_layout = QtWidgets.QHBoxLayout()
         main_layout.addLayout(bottom_layout)
         """
-        save_button: save plot in plotting_widget to file
+        save_plot_button: save plot in plotting_widget to file
         """
-        self.save_button = QtWidgets.QPushButton('Save', self)
-        self.save_button.clicked.connect(self.save)
-        bottom_layout.addWidget(self.save_button)
+        self.save_plot_button = QtWidgets.QPushButton('Save Plot', self)
+        self.save_plot_button.clicked.connect(self.save_plot)
+        bottom_layout.addWidget(self.save_plot_button)
         self.info_text_browser.setFixedHeight(60)
         bottom_layout.addWidget(self.info_text_browser)
 
@@ -148,11 +142,11 @@ class WaveformDialog(QtWidgets.QWidget):
         self.plotting_widget.init_size()
 
     @QtCore.Slot()
-    def save(self):
+    def save_plot(self):
         """
         Save the plotting to a file
         """
-        print("save")
+        self.plotting_widget.save_plot('Waveform-Plot')
 
     def plot_finished(self):
         self.parent.is_plotting_waveform = False
diff --git a/sohstationviewer/view/save_plot_dialog.py b/sohstationviewer/view/save_plot_dialog.py
new file mode 100644
index 0000000000000000000000000000000000000000..77a988f25a6679ac7ecd3bd4f916ca625d6a97d1
--- /dev/null
+++ b/sohstationviewer/view/save_plot_dialog.py
@@ -0,0 +1,139 @@
+import sys
+import platform
+import os
+from pathlib import Path
+from typing import Union, Optional
+
+from PySide2 import QtWidgets, QtCore, QtGui
+from PySide2.QtWidgets import QApplication, QWidget, QDialog
+
+from sohstationviewer.conf import constants
+
+
+class SavePlotDialog(QDialog):
+    def __init__(self, parent: Union[QWidget, QApplication],
+                 main_window: QApplication,
+                 default_name: str):
+        """
+        Dialog allow choosing file format and open file dialog to
+            save file as
+
+        :param parent: the parent widget
+        :param main_window: to keep path to save file
+        :param default_name: default name for graph file to be saved as
+        """
+        super(SavePlotDialog, self).__init__(parent)
+        self.main_window = main_window
+        """
+        save_file_path: path to save file
+        """
+        self.save_file_path: Optional[Path] = None
+        """
+        save_dir_path: path to save dir
+        """
+        self.save_dir_path: Path = main_window.save_plot_dir
+        """
+        dpi: resolution for png format
+        """
+        self.dpi: int = 100
+
+        self.save_dir_btn = QtWidgets.QPushButton("Save Directory", self)
+        self.save_dir_textbox = QtWidgets.QLineEdit(self.save_dir_path)
+        self.save_filename_textbox = QtWidgets.QLineEdit(default_name)
+
+        self.dpi_line_edit = QtWidgets.QSpinBox(self)
+        self.format_radio_btns = {}
+        for fmt in constants.IMG_FORMAT:
+            self.format_radio_btns[fmt] = QtWidgets.QRadioButton(fmt, self)
+            if fmt == self.main_window.save_plot_format:
+                self.format_radio_btns[fmt].setChecked(True)
+        self.cancel_btn = QtWidgets.QPushButton('CANCEL', self)
+        self.continue_btn = QtWidgets.QPushButton('SAVE PLOT', self)
+
+        self.setup_ui()
+        self.connect_signals()
+
+    def setup_ui(self) -> None:
+        self.setWindowTitle("Save Plot")
+
+        main_layout = QtWidgets.QGridLayout()
+        self.setLayout(main_layout)
+
+        main_layout.addWidget(self.save_dir_btn, 0, 0, 1, 1)
+        self.save_dir_textbox.setFixedWidth(500)
+        main_layout.addWidget(self.save_dir_textbox, 0, 1, 1, 5)
+        main_layout.addWidget(QtWidgets.QLabel('Save Filename'),
+                              1, 0, 1, 1)
+        main_layout.addWidget(self.save_filename_textbox, 1, 1, 1, 5)
+
+        main_layout.addWidget(QtWidgets.QLabel('DPI'),
+                              2, 2, 1, 1, QtGui.Qt.AlignRight)
+        self.dpi_line_edit.setRange(50, 300)
+        self.dpi_line_edit.setValue(100)
+        main_layout.addWidget(self.dpi_line_edit, 2, 3, 1, 1)
+        rowidx = 2
+        for fmt in self.format_radio_btns:
+            main_layout.addWidget(self.format_radio_btns[fmt], rowidx, 1, 1, 1)
+            rowidx += 1
+
+        main_layout.addWidget(self.cancel_btn, rowidx, 1, 1, 1)
+        main_layout.addWidget(self.continue_btn, rowidx, 3, 1, 1)
+
+    def connect_signals(self) -> None:
+        self.save_dir_btn.clicked.connect(self.change_save_directory)
+        self.cancel_btn.clicked.connect(self.close)
+        self.continue_btn.clicked.connect(self.on_continue)
+
+    @QtCore.Slot()
+    def change_save_directory(self) -> None:
+        """
+        Show a file selection window and change the GPS data save directory
+        based on the folder selected by the user.
+        """
+        fd = QtWidgets.QFileDialog(self)
+        fd.setFileMode(QtWidgets.QFileDialog.Directory)
+        fd.setDirectory(self.save_dir_textbox.text())
+        fd.exec_()
+        new_path = fd.selectedFiles()[0]
+        self.save_dir_textbox.setText(new_path)
+        self.save_dir_path = new_path
+        self.main_window.save_plot_dir = new_path
+
+    @QtCore.Slot()
+    def on_continue(self):
+        if self.save_dir_textbox.text().strip() == '':
+            QtWidgets.QMessageBox.warning(
+                self, "Add Directory",
+                "A directory need to be given before continue.")
+            return
+
+        if self.save_filename_textbox.text().strip() == '':
+            QtWidgets.QMessageBox.warning(
+                self, "Add Filename",
+                "A file name need to be given before continue.")
+            return
+
+        for img_format in self.format_radio_btns:
+            if self.format_radio_btns[img_format].isChecked():
+                save_format = img_format
+                self.main_window.save_plot_format = img_format
+                break
+
+        self.save_file_path = Path(self.save_dir_path).joinpath(
+            f"{self.save_filename_textbox.text()}.{save_format}")
+        self.dpi = self.dpi_line_edit.value()
+        self.close()
+
+
+if __name__ == '__main__':
+    os_name, version, *_ = platform.platform().split('-')
+    if os_name == 'macOS':
+        os.environ['QT_MAC_WANTS_LAYER'] = '1'
+    app = QtWidgets.QApplication(sys.argv)
+    save_path = '/Users/ldam/Documents/GIT/sohstationviewer/tests/test_data/Q330-sample'  # noqa: E501
+    test = SavePlotDialog(None, 'test_plot')
+    test.set_save_directory(save_path)
+    test.exec_()
+    print("dpi:", test.dpi)
+    print("save file path:", test.save_file_path)
+    sys.exit(app.exec_())
diff --git a/sohstationviewer/view/ui/main_ui.py b/sohstationviewer/view/ui/main_ui.py
index 005029668262238706fc02f0bf176aa25995df5e..194b23483bc13cbd916c0d77a737d556eee6a313 100755
--- a/sohstationviewer/view/ui/main_ui.py
+++ b/sohstationviewer/view/ui/main_ui.py
@@ -793,6 +793,8 @@ class UIMainWindow(object):
 
         self.stop_button.clicked.connect(main_window.stop)
 
+        self.save_plot_button.clicked.connect(main_window.save_plot)
+
     def read_config(self):
         self.config = configparser.ConfigParser()
         config_path = Path('sohstationviewer/conf/read_settings.ini')
diff --git a/sohstationviewer/view/util/functions.py b/sohstationviewer/view/util/functions.py
index 68bd436b384ae812e42fc6521d65cc90a8d795a6..2d50ff1c60f20f068682dd0db810544ae2de7537 100644
--- a/sohstationviewer/view/util/functions.py
+++ b/sohstationviewer/view/util/functions.py
@@ -96,6 +96,9 @@ def create_table_of_content_file(base_path: Path) -> None:
         "this software.\n\n"
         "On the left-hand side you will find a list of currently available"
         " help topics.\n\n"
+        "If the links of the Table of Contents are broken, click on Recreate "
+        "Table of Content <img src='recreate_table_contents.png' height=30 /> "
+        "to rebuild it.\n\n"
         "The home button can be used to return to this page at any time.\n\n"
         "# Table of Contents\n\n")
     links = ""
diff --git a/tests/model/__init__.py b/tests/model/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tests/model/general_data/__init__.py b/tests/model/general_data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tests/model/general_data/test_general_data_helper.py b/tests/model/general_data/test_general_data_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..4bd91761805bc6632a8e47e08a793fc00c6a7fa5
--- /dev/null
+++ b/tests/model/general_data/test_general_data_helper.py
@@ -0,0 +1,292 @@
+import numpy as np
+from unittest import TestCase
+from unittest.mock import patch
+
+from sohstationviewer.model.general_data.general_data_helper import (
+    _check_related_gaps, squash_gaps, sort_data,
+    retrieve_data_time_from_data_dict, retrieve_gaps_from_data_dict,
+    combine_data, apply_convert_factor_to_data_dict
+)
+
+
+class TestCheckRelatedGaps(TestCase):
+    # FROM test_handling_data_rearrange_data.TestCheckRelatedGaps
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.checked_indexes = []
+
+    def test_minmax1_inside_minmax2(self):
+        self.assertTrue(
+            _check_related_gaps(3, 4, 1, 5, 1, self.checked_indexes))
+        self.assertIn(1, self.checked_indexes)
+
+    def test_minmax2_inside_minmax1(self):
+        self.assertTrue(
+            _check_related_gaps(1, 5, 3, 4, 2, self.checked_indexes))
+        self.assertIn(2, self.checked_indexes)
+
+    def end_minmax1_overlap_start_minmax(self):
+        self.assertTrue(
+            _check_related_gaps(1, 4, 3, 5, 3, self.checked_indexes))
+        self.assertIn(3, self.checked_indexes)
+
+    def end_minmax2_overlap_start_minmax1(self):
+        self.assertTrue(
+            _check_related_gaps(3, 5, 1, 4, 4, self.checked_indexes))
+        self.assertIn(4, self.checked_indexes)
+
+    def minmax1_less_than_minmax2(self):
+        self.assertFalse(
+            _check_related_gaps(1, 3, 4, 6, 5, self.checked_indexes))
+        self.assertNotIn(5, self.checked_indexes, )
+
+    def minmax1_greater_than_minmax2(self):
+        self.assertFalse(
+            _check_related_gaps(6, 6, 1, 3, 5, self.checked_indexes))
+        self.assertEqual(5, self.checked_indexes)
+
+
+class TestSquashGaps(TestCase):
+    # FROM test_handling_data_rearrange_data.TestSquashGaps
+    def setUp(self) -> None:
+        self.normal_gaps = [[4, 7], [4, 6], [5, 6], [3, 7], [5, 8]]
+        self.overlap_gaps = [[17, 14], [16, 14], [16, 15], [17, 13], [18, 15]]
+        self.mixed_gaps = []
+        for i in range(len(self.normal_gaps)):
+            self.mixed_gaps.append(self.normal_gaps[i])
+            self.mixed_gaps.append(self.overlap_gaps[i])
+
+    def test_normal_gaps(self):
+        gaps = squash_gaps(self.normal_gaps)
+        self.assertEqual(gaps, [[3, 8]])
+
+    def test_overlap_gaps(self):
+        gaps = squash_gaps(self.overlap_gaps)
+        self.assertEqual(gaps, [[18, 13]])
+
+    def test_mixed_gaps(self):
+        gaps = squash_gaps((self.mixed_gaps))
+        self.assertEqual(gaps, [[3, 8], [18, 13]])
+
+
+class TestSortData(TestCase):
+    # FROM test_handling_data_rearrange_data.TestSortData
+    def setUp(self) -> None:
+        self.station_data_dict = {
+            'CH1': {'tracesInfo': [{'startTmEpoch': 7},
+                                   {'startTmEpoch': 1},
+                                   {'startTmEpoch': 5},
+                                   {'startTmEpoch': 3}]},
+            'CH2': {'tracesInfo': [{'startTmEpoch': 2},
+                                   {'startTmEpoch': 8},
+                                   {'startTmEpoch': 6},
+                                   {'startTmEpoch': 4}]}
+        }
+
+    def test_sort_data(self):
+        sort_data(self.station_data_dict)
+        self.assertEqual(
+            self.station_data_dict,
+            {'CH1': {'tracesInfo': [{'startTmEpoch': 1}, {'startTmEpoch': 3},
+                                    {'startTmEpoch': 5}, {'startTmEpoch': 7}]},
+             'CH2': {'tracesInfo': [{'startTmEpoch': 2}, {'startTmEpoch': 4},
+                                    {'startTmEpoch': 6}, {'startTmEpoch': 8}]}}
+        )
+
+
+class TestRetrieveDataTimeFromDataDict(TestCase):
+    def setUp(self) -> None:
+        self.data_dict = {
+            'STA1': {'CH1': {'startTmEpoch': 4, 'endTmEpoch': 6},
+                     'CH2': {'startTmEpoch': 5, 'endTmEpoch': 9}
+                     },
+            'STA2': {'CH1': {'startTmEpoch': 2, 'endTmEpoch': 4},
+                     'CH2': {'startTmEpoch': 6, 'endTmEpoch': 8}
+                     }
+            }
+        self.data_time = {}
+        self.expected_data_time = {'STA1': [4, 9], 'STA2': [2, 8]}
+
+    def test_retrieve_data_time(self):
+        retrieve_data_time_from_data_dict(self.data_dict, self.data_time)
+        self.assertEqual(self.data_time,
+                         self.expected_data_time)
+
+
+class TestRetrieveGapsFromDataDict(TestCase):
+    def setUp(self) -> None:
+        self.data_dict = {
+            'STA1': {'CH1': {'gaps': [[1, 2], [4, 3]]},
+                     'CH2': {'gaps': []}
+                     },
+            'STA2': {'CH1': {'gaps': [[1, 2], [4, 3], [2, 3]]},
+                     'CH2': {'gaps': [[1, 3], [3, 2]]}
+                     },
+            }
+        self.gaps = {}
+        self.expected_gaps = {'STA1': [[1, 2], [4, 3]],
+                              'STA2': [[1, 2], [4, 3], [2, 3], [1, 3], [3, 2]]}
+
+    def test_retrieve_gaps(self):
+        retrieve_gaps_from_data_dict(self.data_dict, self.gaps)
+        self.assertEqual(self.gaps,
+                         self.expected_gaps)
+
+
+class TestCombineData(TestCase):
+    def test_overlap_lt_gap_minimum(self):
+        # combine; not add to gap list
+        station_data_dict = {
+            'CH1': {
+                'gaps': [],
+                'tracesInfo': [
+                    {'startTmEpoch': 5,
+                     'endTmEpoch': 15,
+                     'data': [1, 2, 2, -1],
+                     'times': [5, 8, 11, 15]},
+                    {'startTmEpoch': 13,     # delta = 2 < 10
+                     'endTmEpoch': 20,
+                     'data': [1, -2, 1, 1],
+                     'times': [13, 16, 18, 20]}
+                ]}
+            }
+        gap_minimum = 10
+        combine_data(station_data_dict, gap_minimum)
+        self.assertEqual(station_data_dict['CH1']['gaps'], [])
+
+        self.assertEqual(
+            len(station_data_dict['CH1']['tracesInfo']),
+            1)
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['startTmEpoch'],
+            5)
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['endTmEpoch'],
+            20)
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['data'].tolist(),
+            [1, 2, 2, -1, 1, -2, 1, 1])
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['times'].tolist(),
+            [5, 8, 11, 15, 13, 16, 18, 20])
+
+    def test_overlap_gt_or_equal_gap_minimum(self):
+        # combine; add to gap list
+        station_data_dict = {
+            'CH1': {
+                'gaps': [],
+                'tracesInfo': [
+                    {'startTmEpoch': 5,
+                     'endTmEpoch': 15,
+                     'data': [1, 2, 2, -1],
+                     'times': [5, 8, 11, 15]},
+                    {'startTmEpoch': 5,     # delta = 10 >= 10
+                     'endTmEpoch': 20,
+                     'data': [1, -2, 1, 1],
+                     'times': [5, 11, 15, 20]}
+                ]}
+            }
+        gap_minimum = 10
+        combine_data(station_data_dict, gap_minimum)
+        self.assertEqual(station_data_dict['CH1']['gaps'], [[15, 5]])
+
+        self.assertEqual(
+            len(station_data_dict['CH1']['tracesInfo']),
+            1)
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['startTmEpoch'],
+            5)
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['endTmEpoch'],
+            20)
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['data'].tolist(),
+            [1, 2, 2, -1, 1, -2, 1, 1])
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['times'].tolist(),
+            [5, 8, 11, 15, 5, 11, 15, 20])
+
+    def test_lt_gap_minimum(self):
+        # not combine; not add to gap list
+        station_data_dict = {
+            'CH1': {
+                'gaps': [],
+                'tracesInfo': [
+                    {'startTmEpoch': 5,
+                     'endTmEpoch': 15,
+                     'data': [1, 2, 2, -1],
+                     'times': [5, 8, 11, 15]},
+                    {'startTmEpoch': 22,    # delta = 7 > 6, < 10
+                     'endTmEpoch': 34,
+                     'data': [1, -2, 1, 1],
+                     'times': [22, 26, 30, 34]}
+                ]}
+        }
+        gap_minimum = 10
+        combine_data(station_data_dict, gap_minimum)
+        self.assertEqual(station_data_dict['CH1']['gaps'], [])
+
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['startTmEpoch'],
+            5)
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['endTmEpoch'],
+            34)
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['data'].tolist(),
+            [1, 2, 2, -1, 1, -2, 1, 1])
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['times'].tolist(),
+            [5, 8, 11, 15, 22, 26, 30, 34])
+
+    def test_gap_gt_or_equal_gap_minimum(self):
+        # not combine; add to gap list
+        station_data_dict = {
+            'CH1': {
+                'gaps': [],
+                'tracesInfo': [
+                    {'startTmEpoch': 5,
+                     'endTmEpoch': 15,
+                     'data': [1, 2, 2, -1],
+                     'times': [5, 8, 11, 15]},
+                    {'startTmEpoch': 25,        # delta = 10 >= 10
+                     'endTmEpoch': 40,
+                     'data': [1, -2, 1, 1],
+                     'times': [25, 29, 33, 36, 40]}
+                ]}
+            }
+        gap_minimum = 10
+        combine_data(station_data_dict, gap_minimum)
+        self.assertEqual(station_data_dict['CH1']['gaps'], [[15, 25]])
+
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['startTmEpoch'],
+            5)
+        self.assertEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['endTmEpoch'],
+            40)
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['data'].tolist(),
+            [1, 2, 2, -1, 1, -2, 1, 1])
+        self.assertListEqual(
+            station_data_dict['CH1']['tracesInfo'][0]['times'].tolist(),
+            [5, 8, 11, 15, 25, 29, 33, 36, 40])
+
+
+class TestApplyConvertFactorToDataDict(TestCase):
+    def setUp(self) -> None:
+        self.data_dict = {
+            'STA1': {
+                'CH1': {'tracesInfo': [{'data': np.array([1, 2, 2, -1])}]}
+            }
+        }
+        self.expected_data = [0.1, 0.2, 0.2, -0.1]
+
+    @patch('sohstationviewer.model.general_data.general_data_helper.'
+           'get_convert_factor')
+    def test_convert_factor(self, mock_get_convert_factor):
+        mock_get_convert_factor.return_value = 0.1
+        apply_convert_factor_to_data_dict(self.data_dict, 'Q330')
+        self.assertEqual(
+            self.data_dict['STA1']['CH1']['tracesInfo'][0]['data'].tolist(),
+            self.expected_data)
diff --git a/tests/model/mseed_data/__init__.py b/tests/model/mseed_data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tests/model/mseed_data/test_mseed.py b/tests/model/mseed_data/test_mseed.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ac2551379537c384038c2f263870d0630733bca
--- /dev/null
+++ b/tests/model/mseed_data/test_mseed.py
@@ -0,0 +1,362 @@
+from unittest import TestCase
+from pathlib import Path
+
+from sohstationviewer.model.mseed_data.mseed import MSeed
+from sohstationviewer.model.general_data.general_data import \
+    ProcessingDataError
+
+
+TEST_DATA_DIR = Path(__file__).resolve().parent.parent.parent.joinpath(
+    'test_data')
+pegasus_data = TEST_DATA_DIR.joinpath("Pegasus-sample")
+q330_data = TEST_DATA_DIR.joinpath("Q330-sample")
+blockettes_data = TEST_DATA_DIR.joinpath("Q330_unimplemented_ascii_block")
+multiplex_data = TEST_DATA_DIR.joinpath("Q330_multiplex")
+centaur_data = TEST_DATA_DIR.joinpath("Centaur-sample")
+
+
+class TestMSeed(TestCase):
+    def test_path_not_exist(self):
+        # raise exception when path not exist
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': '_',
+            'on_unittest': True
+        }
+        with self.assertRaises(ProcessingDataError) as context:
+            MSeed(**args)
+            self.assertEqual(
+                str(context.exception),
+                "Path '_' not exist"
+            )
+
+    def test_read_text_only(self):
+        # There is no station recognized, add text to key 'TEXT' in log_data
+        args = {
+            'data_type': 'Pegasus',
+            'is_multiplex': False,
+            'folder': pegasus_data,
+            'req_soh_chans': ['_'],
+            'on_unittest': True
+        }
+
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT'])
+        self.assertEqual(len(obj.log_data['TEXT']), 2)
+        self.assertEqual(
+            obj.log_data['TEXT'][0][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.130'
+            '\n2020-05-09 00:00:09.839 UTC: I(TimingThread): timing unce')
+
+        self.assertEqual(
+            obj.log_data['TEXT'][1][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.129'
+            '\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware')
+
+    def test_read_text_with_soh(self):
+        # text get station from soh data with TXT as channel to add to log_data
+        args = {
+            'data_type': 'Pegasus',
+            'is_multiplex': False,
+            'folder': pegasus_data,
+            'req_soh_chans': ['VE1'],
+            'on_unittest': True
+        }
+
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', 'KC01'])
+        self.assertEqual(len(obj.log_data['TEXT']), 0)
+        self.assertEqual(list(obj.log_data['KC01'].keys()), ['TXT'])
+        self.assertEqual(len(obj.log_data['KC01']['TXT']), 2)
+        self.assertEqual(
+            obj.log_data['KC01']['TXT'][0][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.130'
+            '\n2020-05-09 00:00:09.839 UTC: I(TimingThread): timing unce')
+
+        self.assertEqual(
+            obj.log_data['KC01']['TXT'][1][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.129'
+            '\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware')
+
+    def test_read_text_with_waveform(self):
+        # text get station from waveform data with TXT as channel to add to
+        # log_data
+        args = {
+            'data_type': 'Pegasus',
+            'is_multiplex': False,
+            'folder': pegasus_data,
+            'req_wf_chans': ['HH1'],
+            'req_soh_chans': ['_'],
+            'on_unittest': True
+        }
+
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', 'KC01'])
+        self.assertEqual(len(obj.log_data['TEXT']), 0)
+        self.assertEqual(list(obj.log_data['KC01'].keys()), ['TXT'])
+        self.assertEqual(len(obj.log_data['KC01']['TXT']), 2)
+        self.assertEqual(
+            obj.log_data['KC01']['TXT'][0][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.130'
+            '\n2020-05-09 00:00:09.839 UTC: I(TimingThread): timing unce')
+
+        self.assertEqual(
+            obj.log_data['KC01']['TXT'][1][:100],
+            '\n\n** STATE OF HEALTH: XX.KC01...D.2020.129'
+            '\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware')
+
+    def test_read_ascii(self):
+        # info is text wrapped in mseed format
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': q330_data,
+            'req_soh_chans': ['LOG'],
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', 'AX08'])
+        self.assertEqual(list(obj.log_data['AX08'].keys()), ['LOG'])
+        self.assertEqual(obj.log_data['TEXT'], [])
+        self.assertEqual(len(obj.log_data['AX08']['LOG']), 16)
+        self.assertEqual(
+            obj.log_data['AX08']['LOG'][0][:100],
+            '\n\nSTATE OF HEALTH: From:1625456260.12  To:1625456260.12\n\r'
+            '\nQuanterra Packet Baler Model 14 Restart. V'
+        )
+        self.assertEqual(
+            obj.log_data['AX08']['LOG'][1][:100],
+            '\n\nSTATE OF HEALTH: From:1625456366.62  To:1625456366.62'
+            '\nReducing Status Polling Interval\r\n[2021-07-0'
+        )
+
+    def test_read_blockettes_info(self):
+        # info in blockette 500
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': True,
+            'folder': blockettes_data,
+            'req_soh_chans': ['ACE'],
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', '3203'])
+        self.assertEqual(list(obj.log_data['3203'].keys()), ['ACE'])
+        self.assertEqual(obj.log_data['TEXT'], [])
+        self.assertEqual(len(obj.log_data['3203']['ACE']), 1)
+        self.assertEqual(
+            obj.log_data['3203']['ACE'][0][:100],
+            '\n\nSTATE OF HEALTH: From:1671729287.00014  To:1671729287.0'
+            '\n===========\nVCO correction: 53.7109375\nTim'
+        )
+
+    def test_not_is_multiplex_read_channel(self):
+        # is_multiplex = False => stop when reach to channel not match req
+        # so the channel 'EL1' is read but not finished
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': multiplex_data,
+            'req_soh_chans': [],
+            'req_wf_chans': ['EL1']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(obj.waveform_data['3203'].keys()), ['EL1'])
+        self.assertEqual(obj.waveform_data['3203']['EL1']['samplerate'], 200)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['startTmEpoch'],
+                         1671730004.145029)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['endTmEpoch'],
+                         1671730013.805)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['size'], 1932)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['gaps'], [])
+        self.assertEqual(len(obj.waveform_data['3203']['EL1']['tracesInfo']),
+                         1)
+
+    def test_is_multiplex_read_channel(self):
+        # is_multiplex = True => read every record
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': True,
+            'folder': multiplex_data,
+            'req_soh_chans': [],
+            'req_wf_chans': ['EL1']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(obj.waveform_data['3203'].keys()), ['EL1'])
+        self.assertEqual(obj.waveform_data['3203']['EL1']['samplerate'], 200)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['startTmEpoch'],
+                         1671730004.145029)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['endTmEpoch'],
+                         1671730720.4348998)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['size'], 143258)
+        self.assertEqual(obj.waveform_data['3203']['EL1']['gaps'], [])
+        self.assertEqual(len(obj.waveform_data['3203']['EL1']['tracesInfo']),
+                         1)
+
+    def test_not_is_multiplex_selected_channel_in_middle(self):
+        # won't reached selected channel because previous record doesn't meet
+        # requirement when is_multiplex = False
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': multiplex_data,
+            'req_soh_chans': [],
+            'req_wf_chans': ['EL2']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), [])
+
+    def test_is_multiplex_selected_channel_in_middle(self):
+        # is_multiplex = True => the selected channel will be read
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': True,
+            'folder': multiplex_data,
+            'req_soh_chans': [],
+            'req_wf_chans': ['EL2']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(obj.waveform_data['3203'].keys()), ['EL2'])
+        self.assertEqual(obj.waveform_data['3203']['EL2']['samplerate'], 200)
+        self.assertEqual(obj.waveform_data['3203']['EL2']['startTmEpoch'],
+                         1671730004.3100293)
+        self.assertEqual(obj.waveform_data['3203']['EL2']['endTmEpoch'],
+                         1671730720.5549)
+        self.assertEqual(obj.waveform_data['3203']['EL2']['size'], 143249)
+        self.assertEqual(obj.waveform_data['3203']['EL2']['gaps'], [])
+        self.assertEqual(len(obj.waveform_data['3203']['EL2']['tracesInfo']),
+                         1)
+
+    def test_existing_time_range(self):
+        import os
+        print(os.getcwd())
+        # check if data_time is from the given range, end time may get
+        # a little greater than read_end according to record's end time
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': q330_data,
+            'req_soh_chans': [],
+            'read_start': 1625456018.0,
+            'read_end': 1625505627.9998999
+        }
+        obj = MSeed(**args)
+        self.assertEqual(obj.keys, ['AX08'])
+        self.assertEqual(list(obj.soh_data['AX08'].keys()), ['VKI'])
+        self.assertEqual(list(obj.mass_pos_data['AX08'].keys()), [])
+        self.assertEqual(list(obj.waveform_data['AX08'].keys()), [])
+        self.assertEqual(obj.data_time['AX08'], [1625446018.0, 1625510338.0])
+
+    def test_non_existing_time_range(self):
+        # if given time range out of the data time, no station will be created
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': q330_data,
+            'req_soh_chans': [],
+            'read_start': 1625356018.0,
+            'read_end': 1625405627.9998999
+        }
+        obj = MSeed(**args)
+        self.assertEqual(obj.keys, [])
+        self.assertEqual(obj.soh_data, {})
+        self.assertEqual(obj.mass_pos_data, {})
+        self.assertEqual(obj.waveform_data, {})
+        self.assertEqual(obj.data_time, {})
+
+    def test_read_waveform(self):
+        # data from tps similar to waveform but not separated at gaps
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': False,
+            'folder': q330_data,
+            'req_soh_chans': [],
+            'req_wf_chans': ['LHE']
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.waveform_data.keys()), ['AX08'])
+        self.assertEqual(list(obj.waveform_data['AX08'].keys()), ['LHE'])
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['samplerate'], 1)
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['startTmEpoch'],
+                         1625445156.000001)
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['endTmEpoch'],
+                         1625532950.0)
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['size'], 87794)
+        self.assertEqual(obj.waveform_data['AX08']['LHE']['gaps'], [])
+        self.assertEqual(len(obj.waveform_data['AX08']['LHE']['tracesInfo']),
+                         1)
+
+    def test_read_mass_pos_channel(self):
+        # mass position channels will be read if one or both include_mpxxxxxx
+        # are True
+        args = {
+            'data_type': 'Q330',
+            'is_multiplex': True,
+            'folder': q330_data,
+            'req_soh_chans': [],
+            'req_wf_chans': [],
+            'include_mp123zne': True
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.mass_pos_data.keys()), ['AX08'])
+        self.assertEqual(list(obj.mass_pos_data['AX08'].keys()), ['VM1'])
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['samplerate'], 0.1)
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['startTmEpoch'],
+                         1625444970.0)
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['endTmEpoch'],
+                         1625574580.0)
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['size'], 12961)
+        self.assertEqual(obj.mass_pos_data['AX08']['VM1']['gaps'], [])
+        self.assertEqual(len(obj.mass_pos_data['AX08']['VM1']['tracesInfo']),
+                         1)
+
+    def test_gap(self):
+        # gaps will be detected when gap_minimum is set
+        args = {
+            'data_type': 'Centaur',
+            'is_multiplex': True,
+            'folder': centaur_data,
+            'req_soh_chans': [],
+            'gap_minimum': 60
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.soh_data.keys()), ['3734'])
+        self.assertEqual(sorted(list(obj.soh_data['3734'].keys())),
+                         ['EX1', 'EX2', 'EX3', 'GAN', 'GEL', 'GLA', 'GLO',
+                          'GNS', 'GPL', 'GST', 'LCE', 'LCQ', 'VCO', 'VDT',
+                          'VEC', 'VEI', 'VPB'])
+        self.assertAlmostEqual(obj.soh_data['3734']['EX1']['samplerate'],
+                               0.0166, 3)
+        self.assertEqual(obj.soh_data['3734']['EX1']['startTmEpoch'],
+                         1534512840.0)
+        self.assertEqual(obj.soh_data['3734']['EX1']['endTmEpoch'],
+                         1534550400.0)
+        self.assertEqual(obj.soh_data['3734']['EX1']['size'], 597)
+        self.assertEqual(obj.gaps['3734'], [[1534521420.0, 1534524000.0]])
+
+    def test_not_detect_gap(self):
+        # if gap_minimum isn't set but gap exist, data still be separated, but
+        # gap won't be added to gap list
+        args = {
+            'data_type': 'Centaur',
+            'is_multiplex': True,
+            'folder': centaur_data,
+            'req_soh_chans': [],
+            'gap_minimum': None
+        }
+        obj = MSeed(**args)
+        self.assertEqual(list(obj.soh_data.keys()), ['3734'])
+        self.assertEqual(sorted(list(obj.soh_data['3734'].keys())),
+                         ['EX1', 'EX2', 'EX3', 'GAN', 'GEL', 'GLA', 'GLO',
+                          'GNS', 'GPL', 'GST', 'LCE', 'LCQ', 'VCO', 'VDT',
+                          'VEC', 'VEI', 'VPB'])
+        self.assertAlmostEqual(obj.soh_data['3734']['EX1']['samplerate'],
+                               0.0166, 3)
+        self.assertEqual(obj.soh_data['3734']['EX1']['startTmEpoch'],
+                         1534512840.0)
+        self.assertEqual(obj.soh_data['3734']['EX1']['endTmEpoch'],
+                         1534550400.0)
+        self.assertEqual(obj.soh_data['3734']['EX1']['size'], 597)
+        self.assertEqual(obj.gaps['3734'], [])  # no gaps
diff --git a/tests/model/mseed_data/test_mseed_helper.py b/tests/model/mseed_data/test_mseed_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..938092c629f7115bd2623971a58a7aa5e7b047fe
--- /dev/null
+++ b/tests/model/mseed_data/test_mseed_helper.py
@@ -0,0 +1,48 @@
+from unittest import TestCase
+from pathlib import Path
+
+from sohstationviewer.model.mseed_data.mseed_helper import (
+    retrieve_nets_from_data_dict, read_text
+)
+
+TEST_DATA_DIR = Path(__file__).resolve().parent.parent.parent.joinpath(
+    'test_data')
+text_file = TEST_DATA_DIR.joinpath(
+    "Pegasus-sample/Pegasus_SVC4/logs/2020/XX/KC01/XX.KC01...D.2020.129")
+binary_file = TEST_DATA_DIR.joinpath(
+    "Pegasus-sample/Pegasus_SVC4/soh/2020/XX/KC01/VDT.D/"
+    "XX.KC01..VDT.D.2020.129")
+
+
+class TestReadText(TestCase):
+    def test_text_file(self):
+        ret = read_text(text_file)
+        expected_ret = (
+            "\n\n** STATE OF HEALTH: XX.KC01...D.2020.129"
+            "\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware")
+        self.assertEqual(ret[:100], expected_ret
+                         )
+
+    def test_binary_file(self):
+        ret = read_text(binary_file)
+        self.assertIsNone(ret)
+
+
+class TestRetrieveNetsFromDataDict(TestCase):
+    def setUp(self):
+        self.nets_by_sta = {}
+        self.data_dict = {
+            'STA1': {'CHA1': {'nets': {'NET1', 'NET2'}},
+                     'CHA2': {'nets': {'NET2', 'NET3'}}
+                     },
+            'STA2': {'CHA1': {'nets': {'NET1'}},
+                     'CHA2': {'nets': {'NET1'}}
+                     }
+            }
+
+    def test_retrieve_nets(self):
+        retrieve_nets_from_data_dict(self.data_dict, self.nets_by_sta)
+        self.assertEqual(list(self.nets_by_sta.keys()), ['STA1', 'STA2'])
+        self.assertEqual(sorted(list(self.nets_by_sta['STA1'])),
+                         ['NET1', 'NET2', 'NET3'])
+        self.assertEqual(sorted(list(self.nets_by_sta['STA2'])), ['NET1'])
diff --git a/tests/model/mseed_data/test_mseed_reader.py b/tests/model/mseed_data/test_mseed_reader.py
new file mode 100644
index 0000000000000000000000000000000000000000..fcdbe513272a07e763b8a90a8f3a662e6ebdb26a
--- /dev/null
+++ b/tests/model/mseed_data/test_mseed_reader.py
@@ -0,0 +1,316 @@
+from unittest import TestCase
+from pathlib import Path
+
+from sohstationviewer.model.mseed_data.mseed_reader import MSeedReader
+
+TEST_DATA_DIR = Path(__file__).resolve().parent.parent.parent.joinpath(
+    'test_data')
+ascii_file = TEST_DATA_DIR.joinpath(
+    "Q330-sample/day_vols_AX08/AX08.XA..LOG.2021.186")
+blockettes_files = TEST_DATA_DIR.joinpath(
+    "Q330_unimplemented_ascii_block/XX-3203_4-20221222190255")
+multiplex_file = TEST_DATA_DIR.joinpath(
+    "Q330_multiplex/XX-3203_4-20221222183011")
+soh_file = TEST_DATA_DIR.joinpath(
+    "Q330-sample/day_vols_AX08/AX08.XA..VKI.2021.186")
+waveform_file = TEST_DATA_DIR.joinpath(
+    "Q330-sample/day_vols_AX08/AX08.XA..LHE.2021.186")
+mass_pos_file = TEST_DATA_DIR.joinpath(
+    "Q330-sample/day_vols_AX08/AX08.XA..VM1.2021.186")
+gap_file = TEST_DATA_DIR.joinpath(
+    "Centaur-sample/SOH/"
+    "XX.3734.SOH.centaur-3_3734..20180817_000000.miniseed.miniseed")
+
+
+class TestMSeedReader(TestCase):
+    def setUp(self) -> None:
+        self.soh_data = {}
+        self.mass_pos_data = {}
+        self.waveform_data = {}
+        self.log_data = {}
+
+    def test_read_ascii(self):
+        args = {
+            'file_path': ascii_file,
+            'is_multiplex': False,
+            'req_soh_chans': ['LOG'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.log_data.keys()), ['AX08'])
+        self.assertEqual(list(self.log_data['AX08'].keys()), ['LOG'])
+        self.assertEqual(len(self.log_data['AX08']['LOG']), 16)
+        self.assertEqual(
+            self.log_data['AX08']['LOG'][0][:100],
+            '\n\nSTATE OF HEALTH: From:1625456260.12  To:1625456260.12\n\r'
+            '\nQuanterra Packet Baler Model 14 Restart. V'
+        )
+        self.assertEqual(
+            self.log_data['AX08']['LOG'][1][:100],
+            '\n\nSTATE OF HEALTH: From:1625456366.62  To:1625456366.62'
+            '\nReducing Status Polling Interval\r\n[2021-07-0'
+        )
+
+    def test_read_blockettes_info(self):
+        args = {
+            'file_path': blockettes_files,
+            'is_multiplex': True,
+            'req_soh_chans': ['ACE'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.log_data.keys()), ['3203'])
+        self.assertEqual(list(self.log_data['3203'].keys()), ['ACE'])
+        self.assertEqual(len(self.log_data['3203']['ACE']), 1)
+        self.assertEqual(
+            self.log_data['3203']['ACE'][0][:100],
+            '\n\nSTATE OF HEALTH: From:1671729287.00014  To:1671729287.0'
+            '\n===========\nVCO correction: 53.7109375\nTim'
+        )
+
+    def test_not_is_multiplex_read_channel(self):
+        # is_multiplex = False => stop when reach to channel not match req
+        # so the channel 'EL1' is read but not finished
+        args = {
+            'file_path': multiplex_file,
+            'is_multiplex': False,
+            'req_wf_chans': ['EL1'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(self.waveform_data['3203'].keys()), ['EL1'])
+        self.assertEqual(self.waveform_data['3203']['EL1']['samplerate'], 200)
+        self.assertEqual(self.waveform_data['3203']['EL1']['startTmEpoch'],
+                         1671730004.145029)
+        self.assertEqual(self.waveform_data['3203']['EL1']['endTmEpoch'],
+                         1671730013.805)
+        self.assertEqual(self.waveform_data['3203']['EL1']['size'], 1932)
+        self.assertEqual(self.waveform_data['3203']['EL1']['gaps'], [])
+        self.assertEqual(len(self.waveform_data['3203']['EL1']['tracesInfo']),
+                         1)
+
+    def test_is_multiplex_read_channel(self):
+        # is_multiplex = True => read every record
+        args = {
+            'file_path': multiplex_file,
+            'is_multiplex': True,
+            'req_wf_chans': ['EL1'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(self.waveform_data['3203'].keys()), ['EL1'])
+        self.assertEqual(self.waveform_data['3203']['EL1']['samplerate'], 200)
+        self.assertEqual(self.waveform_data['3203']['EL1']['startTmEpoch'],
+                         1671730004.145029)
+        self.assertEqual(self.waveform_data['3203']['EL1']['endTmEpoch'],
+                         1671730720.4348998)
+        self.assertEqual(self.waveform_data['3203']['EL1']['size'], 143258)
+        self.assertEqual(self.waveform_data['3203']['EL1']['gaps'], [])
+        self.assertEqual(len(self.waveform_data['3203']['EL1']['tracesInfo']),
+                         1)
+
+    def test_not_is_multiplex_selected_channel_in_middle(self):
+        # won't reached selected channel because previous record doesn't meet
+        # requirement when is_multiplex = False
+        args = {
+            'file_path': multiplex_file,
+            'is_multiplex': False,
+            'req_wf_chans': ['EL2'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), [])
+
+    def test_is_multiplex_selected_channel_in_middle(self):
+        # is_multiplex = True => the selected channel will be read
+        args = {
+            'file_path': multiplex_file,
+            'is_multiplex': True,
+            'req_wf_chans': ['EL2'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), ['3203'])
+        self.assertEqual(list(self.waveform_data['3203'].keys()), ['EL2'])
+        self.assertEqual(self.waveform_data['3203']['EL2']['samplerate'], 200)
+        self.assertEqual(self.waveform_data['3203']['EL2']['startTmEpoch'],
+                         1671730004.3100293)
+        self.assertEqual(self.waveform_data['3203']['EL2']['endTmEpoch'],
+                         1671730720.5549)
+        self.assertEqual(self.waveform_data['3203']['EL2']['size'], 143249)
+        self.assertEqual(self.waveform_data['3203']['EL2']['gaps'], [])
+        self.assertEqual(len(self.waveform_data['3203']['EL2']['tracesInfo']),
+                         1)
+
+    def test_existing_time_range(self):
+        # check if data_time is from the given range, end time may get
+        # a little greater than read_end according to record's end time
+        args = {
+            'file_path': soh_file,
+            'is_multiplex': False,
+            'req_soh_chans': ['VKI'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data,
+            'read_start': 1625456018.0,
+            'read_end': 1625505627.9998999
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.soh_data['AX08'].keys()), ['VKI'])
+        self.assertEqual(self.soh_data['AX08']['VKI']['startTmEpoch'],
+                         1625446018.0)
+        self.assertEqual(self.soh_data['AX08']['VKI']['endTmEpoch'],
+                         1625510338.0)
+
+    def test_non_existing_time_range(self):
+        # if given time range out of the data time, no station will be created
+        args = {
+            'file_path': soh_file,
+            'is_multiplex': False,
+            'req_soh_chans': ['VKI'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data,
+            'read_start': 1625356018.0,
+            'read_end': 1625405627.9998999
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(self.soh_data, {})
+        self.assertEqual(self.mass_pos_data, {})
+        self.assertEqual(self.waveform_data, {})
+
+    def test_read_waveform(self):
+        args = {
+            'file_path': waveform_file,
+            'is_multiplex': False,
+            'req_wf_chans': ['LHE'],
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.waveform_data.keys()), ['AX08'])
+        self.assertEqual(list(self.waveform_data['AX08'].keys()), ['LHE'])
+        self.assertEqual(self.waveform_data['AX08']['LHE']['samplerate'], 1)
+        self.assertEqual(self.waveform_data['AX08']['LHE']['startTmEpoch'],
+                         1625445156.000001)
+        self.assertEqual(self.waveform_data['AX08']['LHE']['endTmEpoch'],
+                         1625532950.0)
+        self.assertEqual(self.waveform_data['AX08']['LHE']['size'], 87794)
+        self.assertEqual(self.waveform_data['AX08']['LHE']['gaps'], [])
+        self.assertEqual(len(self.waveform_data['AX08']['LHE']['tracesInfo']),
+                         1)
+
+    def test_read_mass_pos_channel(self):
+        # mass position channels will be read if one or both include_mpxxxxxx
+        # are True
+        args = {
+            'file_path': mass_pos_file,
+            'is_multiplex': False,
+            'include_mp123zne': True,
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.mass_pos_data.keys()), ['AX08'])
+        self.assertEqual(list(self.mass_pos_data['AX08'].keys()), ['VM1'])
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['samplerate'], 0.1)
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['startTmEpoch'],
+                         1625444970.0)
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['endTmEpoch'],
+                         1625574580.0)
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['size'], 12961)
+        self.assertEqual(self.mass_pos_data['AX08']['VM1']['gaps'], [])
+        self.assertEqual(len(self.mass_pos_data['AX08']['VM1']['tracesInfo']),
+                         1)
+
+    def test_gap(self):
+        # gaps will be detected when gap_minimum is set
+        args = {
+            'file_path': gap_file,
+            'is_multiplex': True,
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data,
+            'gap_minimum': 60
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.soh_data.keys()), ['3734'])
+        self.assertEqual(sorted(list(self.soh_data['3734'].keys())),
+                         ['EX1', 'EX2', 'EX3', 'GAN', 'GEL', 'GLA', 'GLO',
+                          'GNS', 'GPL', 'GST', 'LCE', 'LCQ', 'VCO', 'VDT',
+                          'VEC', 'VEI', 'VPB'])
+        self.assertAlmostEqual(self.soh_data['3734']['EX1']['samplerate'],
+                               0.0166, 3)
+        self.assertEqual(self.soh_data['3734']['EX1']['startTmEpoch'],
+                         1534512840.0)
+        self.assertEqual(self.soh_data['3734']['EX1']['endTmEpoch'],
+                         1534550400.0)
+        self.assertEqual(self.soh_data['3734']['EX1']['size'], 597)
+        self.assertEqual(self.soh_data['3734']['EX1']['gaps'],
+                         [[1534522200.0, 1534523940.0]])
+
+    def test_not_detect_gap(self):
+        # if gap_minimum isn't set but gap exist, data still be separated, but
+        # gap won't be added to gap list
+        args = {
+            'file_path': gap_file,
+            'is_multiplex': True,
+            'soh_data': self.soh_data,
+            'mass_pos_data': self.mass_pos_data,
+            'waveform_data': self.waveform_data,
+            'log_data': self.log_data,
+            'gap_minimum': None
+        }
+        reader = MSeedReader(**args)
+        reader.read()
+        self.assertEqual(list(self.soh_data.keys()), ['3734'])
+        self.assertEqual(sorted(list(self.soh_data['3734'].keys())),
+                         ['EX1', 'EX2', 'EX3', 'GAN', 'GEL', 'GLA', 'GLO',
+                          'GNS', 'GPL', 'GST', 'LCE', 'LCQ', 'VCO', 'VDT',
+                          'VEC', 'VEI', 'VPB'])
+        self.assertAlmostEqual(self.soh_data['3734']['EX1']['samplerate'],
+                               0.0166, 3)
+        self.assertEqual(self.soh_data['3734']['EX1']['startTmEpoch'],
+                         1534512840.0)
+        self.assertEqual(self.soh_data['3734']['EX1']['endTmEpoch'],
+                         1534550400.0)
+        self.assertEqual(self.soh_data['3734']['EX1']['size'], 597)
+        self.assertEqual(self.soh_data['3734']['EX1']['gaps'], [])  # no gaps
diff --git a/tests/test_controller/test_processing.py b/tests/test_controller/test_processing.py
index 289eb5bbdfc516f1a3b6d925e15c507415359b75..0fa881ff8e3cc5f2f8bf0652d21c31342c3fbed3 100644
--- a/tests/test_controller/test_processing.py
+++ b/tests/test_controller/test_processing.py
@@ -22,7 +22,7 @@ rt130_dir = TEST_DATA_DIR.joinpath('RT130-sample/2017149.92EB/2017150')
 q330_dir = TEST_DATA_DIR.joinpath('Q330-sample/day_vols_AX08')
 centaur_dir = TEST_DATA_DIR.joinpath('Centaur-sample/SOH')
 pegasus_dir = TEST_DATA_DIR.joinpath('Pegasus-sample/Pegasus_SVC4/soh')
-mix_traces_dir = TEST_DATA_DIR.joinpath('Q330_mixed_traces')
+multiplex_dir = TEST_DATA_DIR.joinpath('Q330_multiplex')
 
 
 class TestLoadDataAndReadChannels(TestCase):
@@ -212,21 +212,21 @@ class TestLoadDataAndReadChannels(TestCase):
         self.assertListEqual(ret[2], pegasus_wf_channels)
         self.assertListEqual(ret[3], pegasus_spr_gt_1)
 
-        mix_traces_soh_channels = ['LOG']
-        mix_traces_mass_pos_channels = []
-        mix_traces_wf_channels = sorted(
+        multiplex_soh_channels = ['LOG']
+        multiplex_mass_pos_channels = []
+        multiplex_wf_channels = sorted(
             ['BH1', 'BH2', 'BH3', 'BH4', 'BH5', 'BH6',
              'EL1', 'EL2', 'EL4', 'EL5', 'EL6', 'ELZ'])
-        mix_traces_spr_gt_1 = sorted(
+        multiplex_spr_gt_1 = sorted(
             ['BS1', 'BS2', 'BS3', 'BS4', 'BS5', 'BS6',
              'ES1', 'ES2', 'ES3', 'ES4', 'ES5', 'ES6',
              'LS1', 'LS2', 'LS3', 'LS4', 'LS5', 'LS6',
              'SS1', 'SS2', 'SS3', 'SS4', 'SS5', 'SS6'])
-        ret = read_mseed_channels(self.widget_stub, [mix_traces_dir], True)
-        self.assertListEqual(ret[0], mix_traces_soh_channels)
-        self.assertListEqual(ret[1], mix_traces_mass_pos_channels)
-        self.assertListEqual(ret[2], mix_traces_wf_channels)
-        self.assertListEqual(ret[3], mix_traces_spr_gt_1)
+        ret = read_mseed_channels(self.widget_stub, [multiplex_dir], True)
+        self.assertListEqual(ret[0], multiplex_soh_channels)
+        self.assertListEqual(ret[1], multiplex_mass_pos_channels)
+        self.assertListEqual(ret[2], multiplex_wf_channels)
+        self.assertListEqual(ret[3], multiplex_spr_gt_1)
 
     def test_read_channels_rt130_dir(self):
         """
diff --git a/tests/test_data/Q330_mixed_traces/XX-3203_4-20221222183011 b/tests/test_data/Q330_multiplex/XX-3203_4-20221222183011
similarity index 100%
rename from tests/test_data/Q330_mixed_traces/XX-3203_4-20221222183011
rename to tests/test_data/Q330_multiplex/XX-3203_4-20221222183011
diff --git a/tests/test_database/test_extract_data.py b/tests/test_database/test_extract_data.py
index 64e7e1da1358b02d0a133d401597af071962b794..418ea97751a4167656c6a2ab139937e26d3877a6 100644
--- a/tests/test_database/test_extract_data.py
+++ b/tests/test_database/test_extract_data.py
@@ -2,8 +2,7 @@ import unittest
 
 from sohstationviewer.database.extract_data import (
     get_chan_plot_info,
-    get_wf_plot_info,
-    get_chan_label,
+    get_seismic_chan_label,
     get_signature_channels,
     get_color_def,
     get_color_ranges,
@@ -11,7 +10,7 @@ from sohstationviewer.database.extract_data import (
 
 
 class TestExtractData(unittest.TestCase):
-    def test_get_chan_plot_info_good_channel_and_data_type(self):
+    def test_get_chan_plot_info_good_soh_channel_and_data_type(self):
         """
         Test basic functionality of get_chan_plot_info - channel and data type
         combination exists in database table `Channels`
@@ -25,9 +24,62 @@ class TestExtractData(unittest.TestCase):
                            'label': 'SOH/Data Def',
                            'fixPoint': 0,
                            'valueColors': '0:W|1:C'}
-        self.assertDictEqual(
-            get_chan_plot_info('SOH/Data Def', {'samplerate': 10}, 'RT130'),
-            expected_result)
+        self.assertDictEqual(get_chan_plot_info('SOH/Data Def', 'RT130'),
+                             expected_result)
+
+    def test_get_chan_plot_info_masspos_channel(self):
+        with self.subTest("Mass position 'VM'"):
+            expected_result = {'channel': 'VM1',
+                               'plotType': 'linesMasspos',
+                               'height': 4,
+                               'unit': 'V',
+                               'linkedChan': None,
+                               'convertFactor': 0.1,
+                               'label': 'VM1-MassPos',
+                               'fixPoint': 1,
+                               'valueColors': None}
+            self.assertDictEqual(get_chan_plot_info('VM1', 'Q330'),
+                                 expected_result)
+
+        with self.subTest("Mass position 'MassPos'"):
+            expected_result = {'channel': 'MassPos1',
+                               'plotType': 'linesMasspos',
+                               'height': 4,
+                               'unit': 'V',
+                               'linkedChan': None,
+                               'convertFactor': 1,
+                               'label': 'MassPos1',
+                               'fixPoint': 1,
+                               'valueColors': None}
+            self.assertDictEqual(get_chan_plot_info('MassPos1', 'RT130'),
+                                 expected_result)
+
+    def test_get_chan_plot_info_seismic_channel(self):
+        with self.subTest("RT130 Seismic"):
+            expected_result = {'channel': 'DS2',
+                               'plotType': 'linesSRate',
+                               'height': 4,
+                               'unit': '',
+                               'linkedChan': None,
+                               'convertFactor': 1,
+                               'label': 'DS2',
+                               'fixPoint': 0,
+                               'valueColors': None}
+            self.assertDictEqual(get_chan_plot_info('DS2', 'RT130'),
+                                 expected_result)
+
+        with self.subTest("MSeed Seismic"):
+            expected_result = {'channel': 'LHE',
+                               'plotType': 'linesSRate',
+                               'height': 4,
+                               'unit': '',
+                               'linkedChan': None,
+                               'convertFactor': 1,
+                               'label': 'LHE-EW',
+                               'fixPoint': 0,
+                               'valueColors': None}
+            self.assertDictEqual(get_chan_plot_info('LHE', 'Q330'),
+                                 expected_result)
 
     def test_get_chan_plot_info_data_type_is_unknown(self):
         """
@@ -44,10 +96,8 @@ class TestExtractData(unittest.TestCase):
                            'label': 'DEFAULT-Bad Channel ID',
                            'fixPoint': 0,
                            'valueColors': None}
-        self.assertDictEqual(
-            get_chan_plot_info('Bad Channel ID',
-                               {'samplerate': 10}, 'Unknown'),
-            expected_result)
+        self.assertDictEqual(get_chan_plot_info('Bad Channel ID', 'Unknown'),
+                             expected_result)
 
         # Channel exist in database
         expected_result = {'channel': 'LCE',
@@ -59,12 +109,8 @@ class TestExtractData(unittest.TestCase):
                            'label': 'LCE-PhaseError',
                            'fixPoint': 0,
                            'valueColors': 'L:W|D:Y'}
-        self.assertDictEqual(
-            get_chan_plot_info('LCE', {'samplerate': 10}, 'Unknown'),
-            expected_result)
-        self.assertDictEqual(
-            get_chan_plot_info('LCE', {'samplerate': 10}, 'Unknown'),
-            expected_result)
+        self.assertDictEqual(get_chan_plot_info('LCE', 'Unknown'),
+                             expected_result)
 
     def test_get_chan_plot_info_bad_channel_or_data_type(self):
         """
@@ -86,69 +132,54 @@ class TestExtractData(unittest.TestCase):
         # Data type has None value. None value comes from
         # controller.processing.detect_data_type.
         expected_result['label'] = 'DEFAULT-SOH/Data Def'
-        self.assertDictEqual(
-            get_chan_plot_info('SOH/Data Def', {'samplerate': 10}, None),
-            expected_result)
+        self.assertDictEqual(get_chan_plot_info('SOH/Data Def', None),
+                             expected_result)
 
         # Channel and data type are empty strings
         expected_result['label'] = 'DEFAULT-'
-        self.assertDictEqual(
-            get_chan_plot_info('', {'samplerate': 10}, ''),
-            expected_result)
+        self.assertDictEqual(get_chan_plot_info('', ''),
+                             expected_result)
 
         # Channel exists in database but data type does not
         expected_result['label'] = 'DEFAULT-SOH/Data Def'
         self.assertDictEqual(
-            get_chan_plot_info('SOH/Data Def',
-                               {'samplerate': 10}, 'Bad Data Type'),
+            get_chan_plot_info('SOH/Data Def', 'Bad Data Type'),
             expected_result
         )
 
         # Data type exists in database but channel does not
         expected_result['label'] = 'DEFAULT-Bad Channel ID'
-        self.assertDictEqual(
-            get_chan_plot_info('Bad Channel ID',
-                               {'samplerate': 10}, 'RT130'),
-            expected_result)
+        self.assertDictEqual(get_chan_plot_info('Bad Channel ID', 'RT130'),
+                             expected_result)
 
         # Both channel and data type exists in database but not their
         # combination
         expected_result['label'] = 'DEFAULT-SOH/Data Def'
-        self.assertDictEqual(
-            get_chan_plot_info('SOH/Data Def', {'samplerate': 10}, 'Q330'),
-            expected_result)
-
-    def test_get_wf_plot_info(self):
-        """
-        Test basic functionality of get_wf_plot_info - ensures returned
-        dictionary contains all the needed key. Bad channel IDs cases are
-        handled in tests for get_chan_label.
-        """
-        result = get_wf_plot_info('CH1')
-        expected_keys = {'param', 'plotType', 'valueColors', 'height',
-                         'label', 'unit', 'channel', 'convertFactor'}
-        self.assertSetEqual(set(result.keys()), expected_keys)
+        self.assertDictEqual(get_chan_plot_info('SOH/Data Def', 'Q330'),
+                             expected_result)
 
-    def test_get_chan_label_good_channel_id(self):
+    def test_get_seismic_chan_label_good_channel_id(self):
         """
-        Test basic functionality of get_chan_label - channel ID ends in one
-        of the keys in conf.dbSettings.dbConf['seisLabel'] or starts with 'DS'
+        Test basic functionality of get_seismic_chan_label - channel ID ends
+        in one of the keys in conf.dbSettings.dbConf['seisLabel'] or
+        starts with 'DS'
         """
         # Channel ID does not start with 'DS'
-        self.assertEqual(get_chan_label('CH1'), 'CH1-NS')
-        self.assertEqual(get_chan_label('CH2'), 'CH2-EW')
-        self.assertEqual(get_chan_label('CHG'), 'CHG')
+        self.assertEqual(get_seismic_chan_label('CH1'), 'CH1-NS')
+        self.assertEqual(get_seismic_chan_label('CH2'), 'CH2-EW')
+        self.assertEqual(get_seismic_chan_label('CHG'), 'CHG')
 
         # Channel ID starts with 'DS'
-        self.assertEqual(get_chan_label('DS-TEST-CHANNEL'), 'DS-TEST-CHANNEL')
+        self.assertEqual(get_seismic_chan_label('DS-TEST-CHANNEL'),
+                         'DS-TEST-CHANNEL')
 
     def test_get_chan_label_bad_channel_id(self):
         """
-        Test basic functionality of get_chan_label - channel ID does not end in
-        one of the keys in conf.dbSettings.dbConf['seisLabel'] or is the empty
-        string.
+        Test basic functionality of get_seismic_chan_label - channel ID does
+            not end in one of the keys in conf.dbSettings.dbConf['seisLabel']
+            or is the empty string.
         """
-        self.assertRaises(IndexError, get_chan_label, '')
+        self.assertRaises(IndexError, get_seismic_chan_label, '')
 
     def test_get_signature_channels(self):
         """Test basic functionality of get_signature_channels"""