diff --git a/documentation/06 _ Select SOH.help.md b/documentation/06 _ Select SOH.help.md
index 41e6eef89e12b35e57a55a2ebc8040dda98612a6..74ff7ba9b4f1a43488619266b229ce25eacabcc3 100644
--- a/documentation/06 _ Select SOH.help.md	
+++ b/documentation/06 _ Select SOH.help.md	
@@ -72,4 +72,4 @@ channels in the data set.
 + "Save": Save any changes to database
 + "Save - Add to Main": Save any changes to database and add the selected
 Preferred SOH's name to Main Window so that its SOH list will be included when
-reading the data set.
\ No newline at end of file
+reading the data set and the SOH channel will be plotted in this order.
\ No newline at end of file
diff --git a/sohstationviewer/model/data_loader.py b/sohstationviewer/model/data_loader.py
index f151a07ef31a5497a99d9337dcd1feefe5763ed4..00cb6ff54261895fa1a39053da306a21fad3418b 100644
--- a/sohstationviewer/model/data_loader.py
+++ b/sohstationviewer/model/data_loader.py
@@ -62,7 +62,7 @@ class DataLoaderWorker(QtCore.QObject):
         folders_str = ', '.join([dir.name for dir in folders])
         try:
             if self.data_type == 'RT130':
-                from sohstationviewer.model.reftek.reftek import RT130
+                from sohstationviewer.model.reftek_data.reftek import RT130
                 object_type = RT130
             else:
                 from sohstationviewer.model.mseed_data.mseed import MSeed
diff --git a/sohstationviewer/model/data_type_model.py b/sohstationviewer/model/data_type_model.py
index 4f7b6253f786b0f1c7b51d3347779e44e5a6cecb..2f8c6a5dd21e56315dc7a9c6a6527002d3f5fd75 100644
--- a/sohstationviewer/model/data_type_model.py
+++ b/sohstationviewer/model/data_type_model.py
@@ -309,7 +309,6 @@ class DataTypeModel():
         self.sort_all_data()
         self.track_info("Combine data.", LogType.INFO)
         self.combine_traces_in_all_data()
-        self.check_not_found_soh_channels()
         for key in self.data_time:
             if self.data_time[key] == [constants.HIGHEST_INT, 0]:
                 # this happens when there is text or ascii only in the data
@@ -456,19 +455,6 @@ class DataTypeModel():
         execute_db(f'UPDATE PersistentData SET FieldValue="{self.tmp_dir}" '
                    f'WHERE FieldName="tempDataDirectory"')
 
-    def check_not_found_soh_channels(self):
-        all_chans_meet_req = (
-                list(self.soh_data[self.selected_key].keys()) +
-                list(self.mass_pos_data[self.selected_key].keys()) +
-                list(self.log_data[self.selected_key].keys()))
-
-        not_found_chans = [c for c in self.req_soh_chans
-                           if c not in all_chans_meet_req]
-        if not_found_chans != []:
-            msg = (f"No data found for the following channels: "
-                   f"{', '.join( not_found_chans)}")
-            self.processing_log.append((msg, LogType.WARNING))
-
     def combine_times_data_of_traces_w_spr_less_or_equal_1(
             self, data: Dict[str, Dict], selected_key: Union[(str, str), str],
             data_name: str):
diff --git a/sohstationviewer/model/general_data/general_data_helper.py b/sohstationviewer/model/general_data/general_data_helper.py
index 953f80ad72761c464a2db23aa493368b05681532..b689a6e97dda433c59a2ab8ddff4ebbd3469fc2a 100644
--- a/sohstationviewer/model/general_data/general_data_helper.py
+++ b/sohstationviewer/model/general_data/general_data_helper.py
@@ -1,5 +1,7 @@
 from typing import List, Dict, Optional, Union, Tuple
 import numpy as np
+import os
+from pathlib import Path
 
 from sohstationviewer.database.extract_data import get_convert_factor
 
@@ -208,3 +210,36 @@ def reset_data(selected_key: Union[str, Tuple[str, str]], data_dict: Dict):
                 del selected_data_dict[chan_id][k]
             except KeyError:
                 pass
+
+
+def read_text(path2file: Path) -> Union[bool, str]:
+    """
+    CHANGED FROM handling_data.read_text:
+        + Don't need to check binary because UnicodeDecodeError caught means
+            the file is binary
+
+    Read text file and add to log_data under channel TEXT.
+        + Raise exception if the file isn't a text file
+        + Remove empty lines in content
+    :param path2file: str - absolute path to text file
+    :param file_name: str - name of text file
+    :param text_logs: holder to keep log string, refer to
+        DataTypeModel.__init__.log_data['TEXT']
+    """
+    try:
+        with open(path2file, 'r') as file:
+            content = file.read().strip()
+    except UnicodeDecodeError:
+        return
+
+    if content != '':
+        # skip empty lines
+        no_empty_line_list = [
+            line for line in content.splitlines() if line]
+        no_empty_line_content = os.linesep.join(no_empty_line_list)
+
+        log_text = "\n\n** STATE OF HEALTH: %s\n" % path2file.name
+        log_text += no_empty_line_content
+    else:
+        log_text = ''
+    return log_text
diff --git a/sohstationviewer/model/general_data/general_record_helper.py b/sohstationviewer/model/general_data/general_record_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..c1f9289d4d481bb3bf7ce41ec2d2d29ad666d32f
--- /dev/null
+++ b/sohstationviewer/model/general_data/general_record_helper.py
@@ -0,0 +1,26 @@
+import struct
+
+
+class Unpacker:
+    """
+    A wrapper around struct.unpack() to unpack binary data without having to
+    explicitly define the byte order in the format string. Also restrict the
+    type of format to str and buffer to bytes.
+    """
+    def __init__(self, byte_order_char: str = '') -> None:
+        self.byte_order_char = byte_order_char
+
+    def unpack(self, format: str, buffer: bytes):
+        """
+        Unpack a string of bytes into a tuple of values based on the given
+        format
+        :param format: the format used to unpack the byte string
+        :param buffer: the byte string
+        :return: a tuple containing the unpacked values.
+        """
+        default_byte_order_chars = ('@', '=', '>', '<', '!')
+        if format.startswith(default_byte_order_chars):
+            format = self.byte_order_char + format[:1]
+        else:
+            format = self.byte_order_char + format
+        return struct.unpack(format, buffer)
diff --git a/sohstationviewer/model/handling_data_reftek.py b/sohstationviewer/model/handling_data_reftek.py
index 33016a02a1c241a9e222a73312059f6232534f16..8524de7c6398b08f69cc78c9f3666dbbd4881414 100644
--- a/sohstationviewer/model/handling_data_reftek.py
+++ b/sohstationviewer/model/handling_data_reftek.py
@@ -3,7 +3,7 @@ from typing import Tuple, List, Dict
 from obspy.core import Stream
 from obspy import UTCDateTime
 
-from sohstationviewer.model.reftek.from_rt2ms import core
+from sohstationviewer.model.reftek.reftek_data import core
 from sohstationviewer.model.handling_data import read_mseed_trace
 
 
diff --git a/sohstationviewer/model/mseed_data/mseed.py b/sohstationviewer/model/mseed_data/mseed.py
index ba5a2233f5a05eeddcb380ef53e016c0076300d2..ad0dbda765186b581e14e946c52e94bb5039b01b 100644
--- a/sohstationviewer/model/mseed_data/mseed.py
+++ b/sohstationviewer/model/mseed_data/mseed.py
@@ -8,15 +8,17 @@ from pathlib import Path
 from typing import Dict, List
 
 from sohstationviewer.controller.util import validate_file, validate_dir
-from sohstationviewer.model.mseed_data.mseed_reader import MSeedReader
+from sohstationviewer.view.util.enums import LogType
+
 from sohstationviewer.model.general_data.general_data import \
     GeneralData, ThreadStopped, ProcessingDataError
-from sohstationviewer.view.util.enums import LogType
+from sohstationviewer.model.general_data.general_data_helper import read_text
 
 from sohstationviewer.model.mseed_data.mseed_helper import \
-    retrieve_nets_from_data_dict, read_text
+    retrieve_nets_from_data_dict
 from sohstationviewer.model.mseed_data.record_reader_helper import \
     MSeedReadError
+from sohstationviewer.model.mseed_data.mseed_reader import MSeedReader
 
 
 class MSeed(GeneralData):
diff --git a/sohstationviewer/model/mseed_data/mseed_helper.py b/sohstationviewer/model/mseed_data/mseed_helper.py
index 32d237e2ec5a3dc353458691ff4abe5381d33a46..2d5b1eb300d83e39a2be5a9331c5732bf9b1b977 100644
--- a/sohstationviewer/model/mseed_data/mseed_helper.py
+++ b/sohstationviewer/model/mseed_data/mseed_helper.py
@@ -1,7 +1,4 @@
-# Functions that change from handling_data's functions
-import os
-from pathlib import Path
-from typing import Union, List, Dict
+from typing import List, Dict
 
 
 def retrieve_nets_from_data_dict(data_dict: Dict,
@@ -18,36 +15,3 @@ def retrieve_nets_from_data_dict(data_dict: Dict,
         for c in data_dict[sta_id]:
             nets_by_sta[sta_id].update(
                 data_dict[sta_id][c]['nets'])
-
-
-def read_text(path2file: Path) -> Union[bool, str]:
-    """
-    CHANGED FROM handling_data.read_text:
-        + Don't need to check binary because UnicodeDecodeError caught means
-            the file is binary
-
-    Read text file and add to log_data under channel TEXT.
-        + Raise exception if the file isn't a text file
-        + Remove empty lines in content
-    :param path2file: str - absolute path to text file
-    :param file_name: str - name of text file
-    :param text_logs: holder to keep log string, refer to
-        DataTypeModel.__init__.log_data['TEXT']
-    """
-    try:
-        with open(path2file, 'r') as file:
-            content = file.read().strip()
-    except UnicodeDecodeError:
-        return
-
-    if content != '':
-        # skip empty lines
-        no_empty_line_list = [
-            line for line in content.splitlines() if line]
-        no_empty_line_content = os.linesep.join(no_empty_line_list)
-
-        log_text = "\n\n** STATE OF HEALTH: %s\n" % path2file.name
-        log_text += no_empty_line_content
-    else:
-        log_text = ''
-    return log_text
diff --git a/sohstationviewer/model/mseed_data/record_reader_helper.py b/sohstationviewer/model/mseed_data/record_reader_helper.py
index c9fa6ace53751c1487fd34ed678fda5cec38c862..4028d497cb3f48b426ed0371402eaa972bead41b 100644
--- a/sohstationviewer/model/mseed_data/record_reader_helper.py
+++ b/sohstationviewer/model/mseed_data/record_reader_helper.py
@@ -4,37 +4,14 @@ from enum import Enum
 
 from obspy import UTCDateTime
 
+from sohstationviewer.model.general_data.general_record_helper import Unpacker
+
 
 class MSeedReadError(Exception):
     def __init__(self, msg):
         self.message = msg
 
 
-class Unpacker:
-    """
-    A wrapper around struct.unpack() to unpack binary data without having to
-    explicitly define the byte order in the format string. Also restrict the
-    type of format to str and buffer to bytes.
-    """
-    def __init__(self, byte_order_char: str = '') -> None:
-        self.byte_order_char = byte_order_char
-
-    def unpack(self, format: str, buffer: bytes):
-        """
-        Unpack a string of bytes into a tuple of values based on the given
-        format
-        :param format: the format used to unpack the byte string
-        :param buffer: the byte string
-        :return: a tuple containing the unpacked values.
-        """
-        default_byte_order_chars = ('@', '=', '>', '<', '!')
-        if format.startswith(default_byte_order_chars):
-            format = self.byte_order_char + format[:1]
-        else:
-            format = self.byte_order_char + format
-        return struct.unpack(format, buffer)
-
-
 @dataclass
 class FixedHeader:
     """
diff --git a/sohstationviewer/model/reftek/__init__.py b/sohstationviewer/model/reftek_data/__init__.py
similarity index 100%
rename from sohstationviewer/model/reftek/__init__.py
rename to sohstationviewer/model/reftek_data/__init__.py
diff --git a/sohstationviewer/model/reftek/log_info.py b/sohstationviewer/model/reftek_data/log_info.py
similarity index 99%
rename from sohstationviewer/model/reftek/log_info.py
rename to sohstationviewer/model/reftek_data/log_info.py
index 54756cdfee58af8b33f9529cd854bc7582b55609..70e5a111fd5a5a8d2ec43c25d69c4b56f0a4a0ff 100644
--- a/sohstationviewer/model/reftek/log_info.py
+++ b/sohstationviewer/model/reftek_data/log_info.py
@@ -7,7 +7,7 @@ from sohstationviewer.controller.util import (
 from sohstationviewer.view.util.enums import LogType
 
 if TYPE_CHECKING:
-    from sohstationviewer.model.reftek.reftek import RT130
+    from sohstationviewer.model.reftek_data.reftek import RT130
 
 
 class LogInfo():
diff --git a/sohstationviewer/model/reftek/reftek.py b/sohstationviewer/model/reftek_data/reftek.py
similarity index 79%
rename from sohstationviewer/model/reftek/reftek.py
rename to sohstationviewer/model/reftek_data/reftek.py
index 7c3171a749fc005f103b93c537a1b09b12f9bea0..0a0275ea83e1dd5e9cc199fb55824b205dfd7c0a 100755
--- a/sohstationviewer/model/reftek/reftek.py
+++ b/sohstationviewer/model/reftek_data/reftek.py
@@ -2,25 +2,28 @@
 RT130 object to hold and process RefTek data
 """
 from pathlib import Path
-from typing import Tuple, List, Union
-import numpy as np
 import os
+from typing import Union, List, Tuple, Dict
 import traceback
+import numpy as np
+from obspy.core import Stream
 
-from sohstationviewer.model.reftek.from_rt2ms import (
-    core, soh_packet, packet)
-from sohstationviewer.model.reftek.log_info import LogInfo
-from sohstationviewer.model.data_type_model import (
-    DataTypeModel, ThreadStopped, ProcessingDataError)
-from sohstationviewer.model.handling_data import read_text
-from sohstationviewer.model.handling_data_reftek import (
-    check_reftek_header, read_reftek_stream)
 from sohstationviewer.conf import constants
 from sohstationviewer.view.util.enums import LogType
 from sohstationviewer.controller.util import validate_file
 
+from sohstationviewer.model.general_data.general_data import \
+    GeneralData, ThreadStopped, ProcessingDataError
+from sohstationviewer.model.general_data.general_data_helper import read_text
+
+from sohstationviewer.model.reftek_data.reftek_helper import (
+    check_reftek_header, read_reftek_stream,
+    retrieve_gaps_from_stream_header)
+from sohstationviewer.model.reftek_data.reftek_reader import core, soh_packet
+from sohstationviewer.model.reftek_data.log_info import LogInfo
 
-class RT130(DataTypeModel):
+
+class RT130(GeneralData):
     """
     read and process reftek file into object with properties can be used to
     plot SOH data, mass position data, waveform data and gaps
@@ -40,12 +43,39 @@ class RT130(DataTypeModel):
         """
         self.rt130_waveform_data_req: bool = kwarg['rt130_waveform_data_req']
         """
+        stream_header_by_key_chan: stream header by key, chan_id to get key
+            list, gaps by sta_id, nets by sta_id, channels by sta_id
+        """
+        self.stream_header_by_key_chan: Dict[str, Dict[str, Stream]] = {}
+        """
+        gaps_by_key_chan: gap list for each key/chan_id to separate data at
+            gaps, overlaps
+        """
+        self.gaps_by_key_chan: Dict[Union[str, Tuple[str, str]],
+                                    Dict[str, List[List[int]]]] = {}
+        """
         found_data_streams: list of data streams found to help inform user
             why the selected data streams don't show up
         """
         self.found_data_streams: List[int] = []
+
         self.processing_data()
 
+    def processing_data(self):
+        if self.creator_thread.isInterruptionRequested():
+            raise ThreadStopped()
+        self.read_folder(self.dir)
+
+        if self.creator_thread.isInterruptionRequested():
+            raise ThreadStopped()
+        self.selected_key = self.select_key()
+        if self.selected_key is None:
+            raise ThreadStopped()
+
+        if self.creator_thread.isInterruptionRequested():
+            raise ThreadStopped()
+        self.finalize_data()
+
     def finalize_data(self):
         """
         This function should be called after all folders finish reading to
@@ -53,6 +83,7 @@ class RT130(DataTypeModel):
             + check not found data stream to give user a warning if needed
             + other tasks in super().finalize_data()
         """
+        self.track_info("Finalizing...", LogType.INFO)
         self.track_info(
             "Prepare SOH data from log data", LogType.INFO)
         self.prepare_soh_data_from_log_data()
@@ -65,7 +96,18 @@ class RT130(DataTypeModel):
                        f"{', '.join(map(str, not_found_data_streams))}")
                 self.processing_log.append((msg, LogType.WARNING))
 
-        super().finalize_data()
+        self.sort_all_data()
+        self.combine_all_data()
+        self.apply_convert_factor_to_data_dicts()
+
+        retrieve_gaps_from_stream_header(
+            self.stream_header_by_key_chan, self.gaps_by_key_chan,
+            self.gaps, self.gap_minimum, self.read_start, self.read_end)
+
+        for key in self.data_time:
+            if self.data_time[key] == [constants.HIGHEST_INT, 0]:
+                # this happens when there is text or ascii only in the data
+                self.data_time[key] = [self.read_start, self.read_end]
 
     def read_folders(self) -> None:
         """
@@ -99,6 +141,8 @@ class RT130(DataTypeModel):
             total = sum([len(files) for _, _, files in os.walk(self.dir)])
 
         for folder in folders:
+            if not os.path.isdir(folder):
+                raise ProcessingDataError(f"Path '{folder}' not exist")
             for path, subdirs, files in os.walk(folder):
                 for file_name in files:
                     if self.creator_thread.isInterruptionRequested():
@@ -130,7 +174,11 @@ class RT130(DataTypeModel):
         + If there is more than one, show all keys, let user choose one to
         return.
         """
-        keys = sorted(list(self.stream_header_by_key_chan.keys()))
+        self.keys = sorted(list(set(
+            list(self.soh_data.keys()) +
+            list(self.mass_pos_data.keys()) +
+            list(self.waveform_data.keys()))))
+        keys = self.keys
         if len(keys) == 0:
             msg = 'No required data found for the data set.'
             raise ProcessingDataError(msg)
@@ -186,7 +234,7 @@ class RT130(DataTypeModel):
             cur_key = (d['unit_id'].decode(),
                        f"{d['experiment_number']}")
             self.populate_cur_key_for_all_data(cur_key)
-            logs = soh_packet.Packet.from_data(d).__str__()
+            logs = soh_packet.SOHPacket.from_data(d).__str__()
             if 'SOH' not in self.log_data[cur_key]:
                 self.log_data[cur_key]['SOH'] = []
             self.log_data[cur_key]['SOH'].append((d['time'], logs))
@@ -217,9 +265,8 @@ class RT130(DataTypeModel):
         cur_key = (rt130._data[0]['unit_id'].decode(),
                    f"{rt130._data[0]['experiment_number']}")
         self.populate_cur_key_for_all_data(cur_key)
-        if data_stream != 9:
-            # don't get event info for mass position
-            self.get_ehet_in_log_data(rt130, cur_key)
+
+        self.get_ehet_in_log_data(rt130, cur_key)
         self.get_mass_pos_data_and_waveform_data(rt130, data_stream, cur_key)
 
     def get_ehet_in_log_data(self, rt130: core.Reftek130,
@@ -240,7 +287,7 @@ class RT130(DataTypeModel):
 
         for index in ind_ehet:
             d = rt130._data[index]
-            logs = packet.EHPacket(d).eh_et_info(nbr_dt_samples)
+            logs = core.EHPacket(d).eh_et_info(nbr_dt_samples)
             if 'EHET' not in self.log_data[cur_key]:
                 self.log_data[cur_key]['EHET'] = []
             self.log_data[cur_key]['EHET'].append((d['time'], logs))
@@ -322,3 +369,18 @@ class RT130(DataTypeModel):
                     'endTmEpoch': self.data_time[k][1]
                 }
                 self.soh_data[k][c_name]['tracesInfo'] = [tr]
+
+    def populate_cur_key_for_all_data(self, cur_key: Tuple[str, str]) -> None:
+        """
+        Set up new data set's key for all data
+
+        :param cur_key: current processing key: DAS SN, experiment number
+        """
+        if cur_key not in self.log_data:
+            self.log_data[cur_key] = {}
+            self.soh_data[cur_key] = {}
+            self.mass_pos_data[cur_key] = {}
+            self.waveform_data[cur_key] = {}
+            self.gaps[cur_key] = []
+            self.data_time[cur_key] = [constants.HIGHEST_INT, 0]
+            self.stream_header_by_key_chan[cur_key] = {}
diff --git a/sohstationviewer/model/reftek_data/reftek_helper.py b/sohstationviewer/model/reftek_data/reftek_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..ca29e9cbeda98e96564ad2a76f8d53c290725dca
--- /dev/null
+++ b/sohstationviewer/model/reftek_data/reftek_helper.py
@@ -0,0 +1,211 @@
+import numpy as np
+from typing import Tuple, List, Dict, Optional, Union
+
+from obspy.core import Trace
+from obspy.core import Stream
+from obspy import UTCDateTime
+
+from sohstationviewer.model.reftek_data.reftek_reader.core import (
+    DiscontinuousTrace, Reftek130)
+from sohstationviewer.model.general_data.general_data_helper import squash_gaps
+
+
+def check_reftek_header(
+        rt130: Reftek130, cur_key: Tuple[str, str],
+        starttime: UTCDateTime, endtime: UTCDateTime,
+        stream_header_by_key_chan: Dict[str, Dict[str, Stream]],
+        cur_data_dict: Dict, cur_data_time: List[float],
+        include_mp123zne: bool, include_mp456uvw: bool):
+    """
+    FROM handling_data_reftek.check_reftek_header()
+
+    Read mseed headers of a file from the given rt130 object
+        to check for time, create stream_header for retrieving gaps later.
+        Requested data stream has been checked before passing to this function.
+
+    :param rt130: RT130 object to get data stream from
+    :param cur_key: Tuple of DAS serial number, experiment number of the
+        current file.
+    :param starttime: start of read data to skip reading actual data if not
+        in range
+    :param endtime: end of read data to skip reading actual data if not
+        in range
+    :param stream_header_by_key_chan: dict of stream header by key, chan to get
+        gaps later
+    :param cur_data_dict: waveform_data/mass_pos_data of the current key
+    :param cur_data_time: data_time of the current key
+    :param include_mp123zne: if mass position channels 1,2,3 are requested
+    :param include_mp456uvw: if mass position channels 4,5,6 are requested
+    """
+    stream = Reftek130.to_stream(
+        rt130,
+        include_mp123=include_mp123zne,
+        include_mp456=include_mp456uvw,
+        headonly=True,
+        verbose=False,
+        sort_permuted_package_sequence=True)
+
+    avail_trace_indexes = []
+    for index, trace in enumerate(stream):
+        chan_id = trace.stats['channel'].strip()
+        samplerate = trace.stats['sampling_rate']
+        if chan_id not in cur_data_dict:
+            cur_data_dict[chan_id] = {'tracesInfo': [],
+                                      'samplerate': samplerate}
+        if trace.stats.npts == 0:
+            #  this trace isn't available to prevent bug when creating memmap
+            #  with no data
+            continue
+        if (starttime <= trace.stats['starttime'] <= endtime or
+                starttime <= trace.stats['endtime'] <= endtime):
+            avail_trace_indexes.append(index)
+
+            if chan_id not in stream_header_by_key_chan[cur_key]:
+                stream_header_by_key_chan[cur_key][chan_id] = Stream()
+            stream_header_by_key_chan[cur_key][chan_id].append(trace)
+
+            cur_data_time[0] = min(
+                trace.stats['starttime'].timestamp, cur_data_time[0])
+            cur_data_time[1] = max(
+                trace.stats['endtime'].timestamp, cur_data_time[1])
+
+    return avail_trace_indexes
+
+
+def read_reftek_stream(
+        rt130: Reftek130, tmp_dir: str, cur_key: Tuple[str, str],
+        avail_trace_indexes: List[int], cur_data_dict: Dict,
+        include_mp123zne: bool, include_mp456uvw: bool):
+    """
+    FROM handling_data_reftek.read_reftek_stream
+    Read traces of a file from the given rt130 object for the index in
+        avail_trace_indexes.
+
+    :param rt130: RT130 object to get data stream from
+    :param tmp_dir: dir to keep memmap files
+    :param cur_key: Tuple of DAS serial number, experiment number of the
+        current file.
+    :param avail_trace_indexes: index of traces to get
+    :param cur_data_dict: waveform_data/mass_pos_data of the current key
+    :param include_mp123zne: if mass position channels 1,2,3 are requested
+    :param include_mp456uvw: if mass position channels 4,5,6 are requested
+    """
+    # TODO: rewrite reftek to read stream with start and end time
+    stream = Reftek130.to_stream(
+        rt130,
+        include_mp123=include_mp123zne,
+        include_mp456=include_mp456uvw,
+        headonly=False,
+        verbose=False,
+        sort_permuted_package_sequence=True)
+    for index in avail_trace_indexes:
+        trace = stream[index]
+        chan_id = trace.stats['channel'].strip()
+        traces_info = cur_data_dict[chan_id]['tracesInfo']
+        tr = read_mseed_trace(trace)
+        traces_info.append(tr)
+
+
+def read_mseed_trace(
+        trace: Union[Trace, DiscontinuousTrace]) -> Dict:
+    """
+    FROM handling_data.read_mseed_trace_spr_less_than_or_equal_1()
+
+    For mseed trace of which sample rate <=1, read and keep all info of the
+        trace including times and data in memory.
+    Traces that have sample_rate <=1 can be soh, mass position, waveform
+
+    :param trace: mseed trace
+    :return tr: dict of trace's info in which data and times are kept
+    """
+    tr = {}
+    tr['chanID'] = trace.stats.channel
+    tr['startTmEpoch'] = trace.stats.starttime.timestamp
+    tr['endTmEpoch'] = trace.stats.endtime.timestamp
+    tr['samplerate'] = trace.stats.sampling_rate
+    if hasattr(trace.stats, 'actual_npts'):
+        tr['size'] = trace.stats.actual_npts
+    else:
+        tr['size'] = trace.stats.npts
+    """
+    trace time start with 0 => need to add with epoch starttime
+    times and data have type ndarray
+    """
+    tr['times'] = trace.times() + trace.stats['starttime'].timestamp
+    if trace.stats.channel.startswith('MassPos'):
+        tr['data'] = _convert_reftek_masspos_data(trace.data)
+    else:
+        tr['data'] = trace.data
+    return tr
+
+
+def _convert_reftek_masspos_data(data: np.ndarray) -> Dict:
+    """
+    FROM handling_data.convert_reftek_masspos_data()
+
+    Read mass possition trace's info using read_mseed_trace_spr_lt1(), then
+        calculate real value for mass possition
+
+    :param data: mseed data
+    :return data that has been converted from 16-bit signed integer in which
+        32767= 2 ** 16/2 - 1 is the highest value of 16-bit two's complement
+        number. The value is also multiplied by 10 for readable display.
+    (According to 130_theory.pdf: Each channel connects to a 12-bit A/D
+    converter with an input range of +/- 10V. These channel are read
+    once per second as left-justified, 2's-compliment, 16 bit values.)
+
+    """
+    return np.round_(data / 32767.0 * 10.0, 1)
+
+
+def retrieve_gaps_from_stream_header(
+        streams: Dict[str, Dict[str, Stream]],
+        gaps_by_key_chan: Dict[Union[str, Tuple[str, str]],
+                               Dict[str, List[List[int]]]],
+        gaps: Dict[str, List[List[float]]],
+        gap_minimum: Optional[float],
+        read_start: Optional[float],
+        read_end: Optional[float]) -> Dict[str, List[List[float]]]:
+    """
+    CHANGED FROM handling_data.retrieve_gaps_from_stream_header()
+    Retrieve gaps by sta_id from stream_header_by_key_chan
+
+    :param streams: dict of stream header by sta, chan
+    :param gaps_by_key_chan: gaps list by key and channel id
+    :param gaps: gaps list by key
+    :param gap_minimum: minimum length of gaps to be detected
+    :param read_start: start time of data to be read
+    :param read_end: end time of data to be read
+    """
+    for sta_id in streams:
+        sta_gaps = []
+        gaps_by_key_chan[sta_id] = {}
+        if gap_minimum is None:
+            continue
+        for chan_id in streams[sta_id]:
+            stream = streams[sta_id][chan_id]
+            gaps_in_stream = stream.get_gaps()
+            gaps_by_key_chan[sta_id][chan_id] = stream_gaps = [
+                [g[4].timestamp, g[5].timestamp] for g in gaps_in_stream
+                if _check_gap(g[4], g[5], read_start, read_end, gap_minimum)]
+
+            sta_gaps += stream_gaps
+        gaps[sta_id] = squash_gaps(sta_gaps)
+
+
+def _check_gap(t1: float, t2: float, start: float, end: float,
+               gap_minimum: float) -> bool:
+    """
+    Check if a part of the given gap in the given time range and the gap is
+        greater than gap_minimum
+    :param t1: start of given gap
+    :param t2: end of given gap
+    :param start: start of time range
+    :param end: end of given time range
+    :param gap_minimum: minimum length of gaps to be detected
+    :return: True if check is satisfied, False otherwise
+    """
+    t1 = t1.timestamp
+    t2 = t2.timestamp
+    return (abs(t2 - t1) > gap_minimum and
+            (start <= min(t1, t2) <= end or start <= max(t1, t2) <= end))
diff --git a/sohstationviewer/model/reftek/from_rt2ms/__init__.py b/sohstationviewer/model/reftek_data/reftek_reader/__init__.py
similarity index 100%
rename from sohstationviewer/model/reftek/from_rt2ms/__init__.py
rename to sohstationviewer/model/reftek_data/reftek_reader/__init__.py
diff --git a/sohstationviewer/model/reftek/from_rt2ms/core.py b/sohstationviewer/model/reftek_data/reftek_reader/core.py
similarity index 50%
rename from sohstationviewer/model/reftek/from_rt2ms/core.py
rename to sohstationviewer/model/reftek_data/reftek_reader/core.py
index c9e299d573434d6e95078d3f32b764caa3bfcd16..5406c51e26f5c56c31e9fbda25b0454b1d253000 100644
--- a/sohstationviewer/model/reftek/from_rt2ms/core.py
+++ b/sohstationviewer/model/reftek_data/reftek_reader/core.py
@@ -1,5 +1,4 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+from __future__ import annotations
 
 """
 Suggested updates to obspy.io.reftek.core:
@@ -10,8 +9,10 @@ Suggested updates to obspy.io.reftek.core:
 Maeva Pourpoint IRIS/PASSCAL
 """
 
-
 import copy
+from pathlib import Path
+from typing import Optional, Union
+
 import obspy.io.reftek.core as obspy_rt130_core
 import warnings
 
@@ -19,8 +20,48 @@ import numpy as np
 
 from obspy import Trace, Stream, UTCDateTime
 from obspy.core.util.obspy_types import ObsPyException
-from obspy.io.reftek.packet import _unpack_C0_C2_data
-from sohstationviewer.model.reftek.from_rt2ms.packet import EHPacket
+from obspy.io.reftek.packet import PACKET_FINAL_DTYPE
+
+from sohstationviewer.model.general_data.general_record_helper import Unpacker
+from sohstationviewer.model.reftek_data.reftek_reader.packet import EHPacket
+
+from sohstationviewer.model.reftek_data.reftek_reader.reftek_reader_helper \
+    import (read_rt130_file, convert_packet_to_obspy_format)
+
+
+class DiscontinuousTrace(Trace):
+    """
+    Extension of obspy.Trace that changes the way time data is handled when
+    reading data using the method from logpeek/qpeek.
+    """
+    def __init__(self, *args, times: np.ndarray, **kwargs):
+        super().__init__(*args, **kwargs)
+        self._times = times
+
+    def times(self, type: str = "relative",
+              reftime: Optional[UTCDateTime] = None) -> np.ndarray:
+        """
+        Override Trace.times(). Returns a numpy array of stored times data,
+        modified based on the argument "type".
+        :param type: the type of times data to return. For more information,
+            refer to Trace.times(). Note: this method does not implement
+            types 'utcdatetime' and 'matplotlib' because they are not going
+            to be useful.
+        :param reftime: the time used as a reference point when getting
+            relative time. If None, the start time of the trace is used as
+            the reference point.
+        :return: the requested array of time data, modified based on the type
+            requested.
+        """
+        if type == 'utcdatetime' or type == 'matplotlib':
+            raise NotImplementedError
+        elif type == 'relative':
+            if reftime is None:
+                return self._times - self.stats.starttime.timestamp
+            else:
+                return self._times - reftime.timestamp
+        elif type == 'timestamp':
+            return self._times
 
 
 class Reftek130Exception(ObsPyException):
@@ -28,18 +69,41 @@ class Reftek130Exception(ObsPyException):
 
 
 class Reftek130(obspy_rt130_core.Reftek130):
+    """
+    Child class of obspy.Reftek that reads waveform data similar to logpeek for
+    better performance.
+    """
+    @staticmethod
+    def from_file(file: Union[str, Path]) -> Reftek130:
+        """
+        Read data from an RT130 file and save it in a Reftek130 object.
+        :param file: the RT130 file to read
+        :return: a Reftek130 object that stores the data in file
+        """
+        # RT130 data is all big-endian
+        rt130_unpacker = Unpacker('>')
+        rt = Reftek130()
+        rt._filename = file
+        packets_in_file = read_rt130_file(file, rt130_unpacker)
+        converted_packets = []
+        for packet in packets_in_file:
+            converted_packets.append(
+                convert_packet_to_obspy_format(packet, rt130_unpacker))
+        rt._data = np.array(converted_packets, dtype=PACKET_FINAL_DTYPE)
+        return rt
 
-    def to_stream(self, network="", location="", component_codes=None,
-                  include_mp123=False, include_mp456=False,
-                  headonly=False, verbose=False,
-                  sort_permuted_package_sequence=False):
+    def to_stream(self, network: str = "", location: str = "",
+                  include_mp123: bool = False, include_mp456: bool = False,
+                  headonly: bool = False, verbose: bool = False,
+                  sort_permuted_package_sequence: bool = False) -> Stream:
         """
+        Create an obspy.Stream object that holds the data stored in this
+            Reftek130 object.
+
         :type headonly: bool
         :param headonly: Determines whether or not to unpack the data or just
             read the headers.
         """
-        if verbose:
-            print(self)
         if not len(self._data):
             msg = "No packet data in Reftek130 object (file: {})"
             raise Reftek130Exception(msg.format(self._filename))
@@ -81,20 +145,6 @@ class Reftek130(obspy_rt130_core.Reftek130):
                 eh = EHPacket(eh_packets[0])
             else:
                 eh = EHPacket(et_packets[0])
-            # only C0, C2, 16, 32 encodings supported right now
-            if eh.data_format == b"C0":
-                encoding = 'C0'
-            elif eh.data_format == b"C2":
-                encoding = 'C2'
-            elif eh.data_format == b"16":
-                encoding = '16'
-            elif eh.data_format == b"32":
-                encoding = '32'
-            else:
-                msg = ("Reftek data encoding '{}' not implemented yet. Please "
-                       "open an issue on GitHub and provide a small (< 50kb) "
-                       "test file.").format(eh.data_format)
-                raise NotImplementedError(msg)
             header = {
                 "unit_id": self._data['unit_id'][0],
                 "experiment_number": self._data['experiment_number'][0],
@@ -140,74 +190,34 @@ class Reftek130(obspy_rt130_core.Reftek130):
                         sample_data = np.array([], dtype=np.int32)
                         npts = packets_["number_of_samples"].sum()
                     else:
-                        if encoding in ('C0', 'C2'):
-                            sample_data = _unpack_C0_C2_data(packets_,
-                                                             encoding)
-                        elif encoding in ('16', '32'):
-                            # rt130 stores in big endian
-                            dtype = {'16': '>i2', '32': '>i4'}[encoding]
-                            # just fix endianness and use correct dtype
-                            sample_data = np.require(
-                                packets_['payload'],
-                                requirements=['C_CONTIGUOUS'])
-                            # either int16 or int32
-                            sample_data = sample_data.view(dtype)
-                            # account for number of samples, i.e. some packets
-                            # might not use the full payload size but have
-                            # empty parts at the end that need to be cut away
-                            number_of_samples_max = sample_data.shape[1]
-                            sample_data = sample_data.flatten()
-                            # go through packets starting at the back,
-                            # otherwise indices of later packets would change
-                            # while looping
-                            for ind, num_samps in reversed([
-                                    (ind, num_samps) for ind, num_samps in
-                                    enumerate(packets_["number_of_samples"])
-                                    if num_samps != number_of_samples_max]):
-                                # looping backwards we can easily find the
-                                # start of each packet, since the earlier
-                                # packets are still untouched and at maximum
-                                # sample length in our big array with all
-                                # packets
-                                start_of_packet = ind * number_of_samples_max
-                                start_empty_part = start_of_packet + num_samps
-                                end_empty_part = (start_of_packet +
-                                                  number_of_samples_max)
-                                sample_data = np.delete(
-                                    sample_data,
-                                    slice(start_empty_part, end_empty_part))
-                        npts = len(sample_data)
-
-                    tr = Trace(data=sample_data, header=copy.deepcopy(header))
+                        # The payload stores the first data point of each
+                        # packet, encoded as a numpy array of 4 1-byte numbers.
+                        # Due to the way the payload is encoded during the
+                        # reading process and a quirk of 2-complement binary
+                        # numbers (namely, appending a negative number with 1s
+                        # does not change its value), we do not have to care
+                        # about the actual encoding type of the stored packets.
+                        sample_data = np.ascontiguousarray(
+                            packets_['payload'][:, :4])
+                        sample_data = sample_data.view(np.dtype('>i4'))
+                        sample_data = sample_data.squeeze(axis=-1)
+                        npts = sample_data.size
+                    tr = DiscontinuousTrace(
+                        data=sample_data, header=copy.deepcopy(header),
+                        times=(packets_['time'] / 10**9).round(3)
+                    )
+                    # The plotting process needs to know about the number of
+                    # points stored in the trace. However, tr.stats use the
+                    # stored npts to calculate some other metadata, so we can't
+                    # store that information there. As a compromise, we keep
+                    # tr.stats.npts the same, while storing the actual number
+                    # of data points in the trace in another part of tr.stats.
+                    tr.stats.npts = packets_['number_of_samples'].sum()
+                    tr.stats.actual_npts = npts
                     # channel number is not included in the EH/ET packet
                     # payload, so add it to stats as well..
                     tr.stats.reftek130['channel_number'] = channel_number
-                    if headonly:
-                        tr.stats.npts = npts
                     tr.stats.starttime = UTCDateTime(ns=starttime)
-                    """
-                    if component codes were explicitly provided, use them
-                    together with the stream label
-                    if component_codes is not None:
-                        tr.stats.channel = (eh.stream_name.strip() +
-                                            component_codes[channel_number])
-                    # otherwise check if channel code is set for the given
-                    # channel (seems to be not the case usually)
-                    elif eh.channel_code[channel_number] is not None:
-                        tr.stats.channel = eh.channel_code[channel_number]
-                    # otherwise fall back to using the stream label together
-                    # with the number of the channel in the file (starting with
-                    # 0, as Z-1-2 is common use for data streams not oriented
-                    # against North)
-                    else:
-                        msg = ("No channel code specified in the data file "
-                               "and no component codes specified. Using "
-                               "stream label and number of channel in file as "
-                               "channel codes.")
-                        warnings.warn(msg)
-                        tr.stats.channel = (
-                            eh.stream_name.strip() + str(channel_number))
-                    """
                     DS = self._data['data_stream_number'][0] + 1
                     if DS != 9:
                         tr.stats.channel = "DS%s-%s" % (DS, channel_number + 1)
@@ -218,22 +228,5 @@ class Reftek130(obspy_rt130_core.Reftek130):
                             continue
                         tr.stats.channel = "MassPos%s" % (channel_number + 1)
                         # check if endtime of trace is consistent
-                    t_last = packets_[-1]['time']
-                    npts_last = packets_[-1]['number_of_samples']
-                    try:
-                        if not headonly:
-                            assert npts == len(sample_data)
-                        if npts_last:
-                            assert tr.stats.endtime == UTCDateTime(
-                                ns=t_last) + (npts_last - 1) * delta
-                        if npts:
-                            assert tr.stats.endtime == (
-                                tr.stats.starttime + (npts - 1) * delta)
-                    except AssertionError:
-                        msg = ("Reftek file has a trace with an inconsistent "
-                               "endtime or number of samples. Please open an "
-                               "issue on GitHub and provide your file for"
-                               "testing.")
-                        raise Reftek130Exception(msg)
                     st += tr
         return st
diff --git a/sohstationviewer/model/reftek_data/reftek_reader/header.py b/sohstationviewer/model/reftek_data/reftek_reader/header.py
new file mode 100644
index 0000000000000000000000000000000000000000..dafb944fb11d57f1ae11bc043e170a26fe4b91ee
--- /dev/null
+++ b/sohstationviewer/model/reftek_data/reftek_reader/header.py
@@ -0,0 +1,96 @@
+import dataclasses
+
+from obspy import UTCDateTime
+
+
+class NotRT130FileError(Exception):
+    """
+    Error to raise when there is a problem with parsing RT130 data.
+    """
+    pass
+
+
+@dataclasses.dataclass
+class PacketHeader:
+    """
+    The decoded header of an RT130 packet.
+    """
+    packet_type: str
+    experiment_number: int
+    unit_id: str
+    time: UTCDateTime
+    byte_count: int
+    packet_sequence: int
+
+
+def parse_rt130_time(year: int, time_bytes: bytes) -> UTCDateTime:
+    """
+    Convert BCD-encoded RT130 time into UTCDateTime.
+    :param year: the year of the time. RT130's header store the year separate
+        from the time, so we have to pass it as an argument.
+    :param time_bytes: the BCD-encoded time.
+    :return: an UTCDateTime object that stores the decoded time.
+    """
+    time_string = time_bytes.hex()
+    # The time string has the format of DDDHHMMSSTTT, where
+    # D = day of year
+    # H = hour
+    # M = minute
+    # S = second
+    # T = millisecond
+    day_of_year, hour, minute, second, millisecond = (
+        int(time_string[0:3]),
+        int(time_string[3:5]),
+        int(time_string[5:7]),
+        int(time_string[7:9]),
+        int(time_string[9:12])
+    )
+    # RT130 only stores the last two digits of the year. Because the
+    # documentation for RT130 does not define a way to retrieve the full year,
+    # we use Obspy's method. Accordingly, we convert 0-49 to 2000-2049 and
+    # 50-99 to 1950-1999.
+    if 0 <= year <= 49:
+        year += 2000
+    elif 50 <= year <= 99:
+        year += 1900
+    converted_time = UTCDateTime(year=year, julday=day_of_year, hour=hour,
+                                 minute=minute, second=second,
+                                 microsecond=millisecond * 1000)
+    return converted_time
+
+
+def get_rt130_packet_header(rt130_packet: bytes) -> PacketHeader:
+    """
+    Get the packet header stored in the first 16 bits of an RT130 packet.
+
+    :param rt130_packet: the RT130 packet to process
+    :return: a PacketHeader object containing the header of rt130_packet
+    """
+    try:
+        # Because RT130 data is always big-endian, it is more convenient to
+        # use str.decode() than the unpacker.
+        packet_type = rt130_packet[:2].decode('ASCII')
+    except UnicodeError:
+        print('Cannot decode packet type.')
+        print('The given file does not appear to be a valid RT130 file.')
+        raise NotRT130FileError
+    valid_packet_types = ['AD', 'CD', 'DS', 'DT', 'EH', 'ET', 'OM', 'SH', 'SC',
+                          'FD']
+    if packet_type not in valid_packet_types:
+        print(f'Invalid packet type found: {packet_type}')
+        print('The given file does not appear to be a valid RT130 file.')
+        raise NotRT130FileError
+
+    experiment_number = int(rt130_packet[2:3].hex())
+    year = int(rt130_packet[3:4].hex())
+    # A call to str.upper() is needed because bytes.hex() makes any
+    # hexadecimal letter (i.e. ABCDEF) lowercase, while we want them to be
+    # uppercase for display purpose.
+    unit_id = rt130_packet[4:6].hex().upper()
+    time_bytes = rt130_packet[6:12]
+    packet_time = parse_rt130_time(year, time_bytes)
+    byte_count = int(rt130_packet[12:14].hex())
+    packet_sequence = int(rt130_packet[14:16].hex())
+
+    return PacketHeader(packet_type, experiment_number, unit_id, packet_time,
+                        byte_count, packet_sequence)
diff --git a/sohstationviewer/model/reftek/from_rt2ms/packet.py b/sohstationviewer/model/reftek_data/reftek_reader/packet.py
similarity index 59%
rename from sohstationviewer/model/reftek/from_rt2ms/packet.py
rename to sohstationviewer/model/reftek_data/reftek_reader/packet.py
index c3ddb8865c877ef54805ca1bf72277623fe88f44..d7bd0c52a6d1febac2327221b747d87ca934a767 100644
--- a/sohstationviewer/model/reftek/from_rt2ms/packet.py
+++ b/sohstationviewer/model/reftek_data/reftek_reader/packet.py
@@ -9,24 +9,47 @@ Suggested updates to obspy.io.reftek.packet:
 
 Maeva Pourpoint IRIS/PASSCAL
 """
+from typing import List
 
+import numpy
 import obspy.io.reftek.packet as obspy_rt130_packet
 
 from obspy import UTCDateTime
-from obspy.io.reftek.util import (_decode_ascii,
-                                  _parse_long_time,
-                                  _16_tuple_ascii,
-                                  _16_tuple_int,
-                                  _16_tuple_float)
-from sohstationviewer.model.reftek.from_rt2ms.soh_packet import Packet
+from obspy.io.reftek.util import (
+    _decode_ascii, _parse_long_time, _16_tuple_ascii, _16_tuple_float,
+    _16_tuple_int,
+)
+from sohstationviewer.model.reftek_data.reftek_reader.soh_packet import (
+    SOHPacket)
 
 
 class Reftek130UnpackPacketError(ValueError):
     pass
 
 
+eh_et_payload_last_field_start = 88
+eh_et_payload_last_field_size = 16
+
+# The payload start is based on the start of the payload, so we have to add 24
+# to compensate for the size of the header and extended header.
+eh_et_payload_end_in_packet = (
+        eh_et_payload_last_field_start + eh_et_payload_last_field_size + 24
+)
+
 # name, offset, length (bytes) and converter routine for EH/ET packet payload
+# Trimmed to only include the parts used elsewhere for the sake of better
+# performance.
 EH_PAYLOAD = {
+    "station_name_extension": (35, 1, _decode_ascii),
+    "station_name": (36, 4, _decode_ascii),
+    "sampling_rate": (64, 4, float),
+    "trigger_time": (72, 16, _parse_long_time),
+    "first_sample_time": (
+        eh_et_payload_last_field_start, eh_et_payload_last_field_size,
+        _parse_long_time),
+}
+
+obspy_rt130_packet.EH_PAYLOAD = {
     "trigger_time_message": (0, 33, _decode_ascii),
     "time_source": (33, 1, _decode_ascii),
     "time_quality": (34, 1, _decode_ascii),
@@ -57,21 +80,37 @@ EH_PAYLOAD = {
     "position": (894, 26, _decode_ascii),
     "reftek_120": (920, 80, None)}
 
-obspy_rt130_packet.EH_PAYLOAD = EH_PAYLOAD
-
 
 class EHPacket(obspy_rt130_packet.EHPacket):
-    def __str__(self, compact=False):
+    def __init__(self, data: numpy.ndarray) -> None:
+        """
+        Reimplement __init__ to change a different value for EH_PAYLOAD.
+        This should be the cleanest way to do it, seeing as any other way I
+        can think of modify EH_PAYLOAD in the original file, which can have
+        consequences that are not readily apparent.
+
+        :param data: the data of an EH packet. For more information, refer to
+            obspy.io.reftek.packet.PACKET_FINAL_DTYPE.
+        """
+        self._data = data
+        payload = self._data["payload"].tobytes()
+        for name, (start, length, converter) in EH_PAYLOAD.items():
+            data = payload[start:start + length]
+            if converter is not None:
+                data = converter(data)
+            setattr(self, name, data)
+
+    def __str__(self, compact: bool = False) -> str:
         if compact:
             sta = (self.station_name.strip() +
                    self.station_name_extension.strip())
             info = ("{:04d} {:2s} {:4s} {:2d} {:4d} {:4d} {:2d} {:2s} "
                     "{:5s}  {:4s}        {!s}").format(
-                        self.packet_sequence, self.type.decode(),
-                        self.unit_id.decode(), self.experiment_number,
-                        self.byte_count, self.event_number,
-                        self.data_stream_number, self.data_format.decode(),
-                        sta, str(self.sampling_rate)[:4], self.time)
+                self.packet_sequence, self.type.decode(),
+                self.unit_id.decode(), self.experiment_number,
+                self.byte_count, self.event_number,
+                self.data_stream_number, self.data_format.decode(),
+                sta, str(self.sampling_rate)[:4], self.time)
         else:
             info = []
             for key in self._headers:
@@ -91,40 +130,16 @@ class EHPacket(obspy_rt130_packet.EHPacket):
                                             "\n\t".join(info))
         return info
 
-    def eh_et_info(self, nbr_DT_samples):
+    def eh_et_info(self, nbr_DT_samples: int) -> List[str]:
         """
         Compile EH and ET info to write to log file.
         Returns list of strings.
         Formatting of strings is based on earlier version of rt2ms.
         """
         info = []
-        # packet_tagline1 = ("\n\n{:s} exp {:02d} bytes {:04d} {:s} ID: {:s} "
-        #                    "seq {:04d}".format(self.type.decode(),
-        #                                        self.experiment_number,
-        #                                        self.byte_count,
-        #                                        Packet.time_tag(self.time),
-        #                                        self.unit_id.decode(),
-        #                                        self.packet_sequence))
-        # info.append(packet_tagline1)
-        # if self.type.decode('ASCII') == 'EH':
-        #     nbr_DT_samples = 0
-        #     info.append("\nEvent Header")
-        # else:
-        #     info.append("\nEvent Trailer")
-        # info.append("\n  event = " + str(self.event_number))
-        # info.append("\n  stream = " + str(self.data_stream_number + 1))
-        # info.append("\n  format = " + self.data_format.decode('ASCII'))
-        # info.append("\n  stream name = " + self.stream_name)
-        # info.append("\n  sample rate = " + str(self.sampling_rate))
-        # info.append("\n  trigger type = " + self.trigger_type)
-        trigger_time = Packet.time_tag(UTCDateTime(ns=self.trigger_time))
-        # info.append("\n  trigger time = " + trigger_time)
-        first_sample_time = Packet.time_tag(UTCDateTime(ns=self.first_sample_time))  # noqa: E501
-        # info.append("\n  first sample = " + first_sample_time)
-        # if self.last_sample_time:
-        #     info.append("\n  last sample = " + Packet.time_tag(UTCDateTime(ns=self.last_sample_time)))  # noqa: E501
-        # info.append("\n  bit weights = " + " ".join([val for val in self.channel_adjusted_nominal_bit_weights if val]))  # noqa: E501
-        # info.append("\n  true weights = " + " ".join([val for val in self.channel_true_bit_weights if val]))  # noqa: E501
+        trigger_time = SOHPacket.time_tag(UTCDateTime(ns=self.trigger_time))
+        first_sample_time = SOHPacket.time_tag(
+            UTCDateTime(ns=self.first_sample_time))  # noqa: E501
         packet_tagline2 = ("\nDAS: {:s} EV: {:04d} DS: {:d} FST = {:s} TT = "
                            "{:s} NS: {:d} SPS: {:.1f} ETO: 0"
                            .format(self.unit_id.decode(),
diff --git a/sohstationviewer/model/reftek_data/reftek_reader/packet_readers.py b/sohstationviewer/model/reftek_data/reftek_reader/packet_readers.py
new file mode 100644
index 0000000000000000000000000000000000000000..9e1b4f75b74b918dd67ad2f54fd3349d781572b7
--- /dev/null
+++ b/sohstationviewer/model/reftek_data/reftek_reader/packet_readers.py
@@ -0,0 +1,149 @@
+from typing import Tuple, Any
+
+import numpy
+from obspy.io.reftek.util import bcd
+
+from sohstationviewer.model.general_data.general_record_helper import Unpacker
+from sohstationviewer.model.reftek_data.reftek_reader.packet import \
+    eh_et_payload_end_in_packet
+from sohstationviewer.model.reftek_data.reftek_reader.packets import (
+    DTExtendedHeader,
+    EHETExtendedHeader, SOHExtendedHeader,
+)
+
+
+def decode_uncompressed(packet: bytes, data_format: str, unpacker: Unpacker
+                        ) -> int:
+    """
+    Grab the first data point in a packet that contains uncompressed RT130 data
+    (aka packets with data format 16, 32, or 33_.
+    :param packet: the bytes that make up the given packet.
+    :param data_format: the data format of the given packet, can be one of 16,
+        32, or 33.
+    :param unpacker: the unpacker to use to decode the data.
+    :return: the first data point in the given packet
+    """
+    data = packet[24:]
+    # For uncompressed RT130 data, the data format is also the size of a data
+    # point in bit (aside from data format 33, which uses the same size as data
+    # format 32).
+    point_size = int(data_format)
+    if point_size == 33:
+        point_size = 32
+    # Convert the size of a data point to byte because the data is stored
+    # as a byte string.
+    point_size = point_size // 8
+
+    # struct.unpack uses different format characters for different point sizes.
+    format_char = {2: 'h', 4: 'i'}[point_size]
+
+    first_data_point = data[:point_size]
+
+    return unpacker.unpack(f'{format_char}', first_data_point)[0]
+
+
+def decode_compressed(packet: bytes, data_format: str, unpacker: Unpacker
+                      ) -> int:
+    """
+    Grab the stop point in a packet that contains compressed RT130 data (aka
+    packets with data format C0, C1, C2, or C3).
+    We get the stop point in this case because that is what logpeek did. It
+    also looks a lot better than using the start point, so that is a plus.
+    :param packet: the bytes that make up the given packet.
+    :param data_format: the data format of the given packet, can be one of C0,
+        C1, C2, or C3. Exist only to have the same signature as
+        decode_uncompressed
+    :param unpacker: the unpacker to use to decode the data.
+    :return: the first data point in the given packet
+    """
+    # The data in a compressed data packet starts at byte 64, with bytes
+    # between byte 24 and 64 being fillers.
+    data = packet[64:]
+    first_data_point = data[8:12]
+    return unpacker.unpack('i', first_data_point)[0]
+
+
+def read_dt_packet(packet: bytes, unpacker: Unpacker
+                   ) -> Tuple[DTExtendedHeader, Any]:
+    """
+    Process a DT packet and get its extended header and first data point.
+    :param packet: the bytes that make up the given DT packet.
+    :param unpacker: the unpacker to use to decode the data.
+    :return: the extended header and first data point of the given DT packet.
+    """
+    decoders = {
+        **dict.fromkeys(['16', '32', '33'], decode_uncompressed),
+        **dict.fromkeys(['C0', 'C1', 'C2', 'C3'], decode_compressed)
+    }
+
+    event_number = int(packet[16:18].hex())
+    data_stream_number = int(packet[18:19].hex())
+    channel_number = int(packet[19:20].hex())
+    number_of_samples = int(packet[20:22].hex())
+    flags = unpacker.unpack('B', packet[22:23])[0]
+    data_format = packet[23:24].hex().upper()
+
+    extended_header = DTExtendedHeader(event_number, data_stream_number,
+                                       channel_number, number_of_samples,
+                                       flags, data_format)
+    first_data_point = decoders[data_format](packet, data_format, unpacker)
+    return extended_header, first_data_point
+
+
+def read_eh_et_packet(packet: bytes, unpacker: Unpacker
+                      ) -> Tuple[EHETExtendedHeader, bytes]:
+    """
+    Process an EH/ET packet and get its extended header and required part of
+    the payload.
+    :param packet: the bytes that make up the given EH/ET packet.
+    :param unpacker: the unpacker to use to decode the data.
+    :return: the extended header and truncated payload of the given EH/ET
+        packet.
+    """
+    event_number = int(packet[16:18].hex())
+    data_stream_number = int(packet[18:19].hex())
+    flags = unpacker.unpack('B', packet[22:23])[0]
+    data_format = packet[23:24].hex().upper()
+
+    extended_header = EHETExtendedHeader(event_number, data_stream_number,
+                                         flags, data_format)
+    # The largest possible data point has a size of 4  bytes, so we need to
+    # grab at least data.
+    payload = packet[24:eh_et_payload_end_in_packet]
+    return extended_header, payload
+
+
+def bcd_16bit_int(_i) -> int:
+    """
+    Reimplement a private function of the same name in obspy. Kept here in case
+    the private function is removed in a future obspy version.
+    :param _i: the byte string to convert into a 16-bite integer
+    :return: a 16-bit integer
+    """
+    _i = bcd(_i)
+    return _i[0] * 100 + _i[1]
+
+
+def read_soh_packet(packet: bytes, unpacker: Unpacker
+                    ) -> Tuple[SOHExtendedHeader, bytes]:
+    """
+    Process an SOH packet and get its extended header and poyload.
+    :param packet: the bytes that make up the given SOH packet.
+    :param unpacker: the unpacker to use to decode the data.
+    :return: the extended header and payload of the given SOH packet.
+    """
+
+    event_number = bcd_16bit_int(numpy.frombuffer(packet[16:18], numpy.uint8))
+    data_stream_number = bcd(numpy.frombuffer(packet[18:19], numpy.uint8))
+    channel_number = bcd(numpy.frombuffer(packet[19:20], numpy.uint8))
+    number_of_samples = bcd_16bit_int(
+        numpy.frombuffer(packet[20:22], numpy.uint8)
+    )
+    flags = unpacker.unpack('B', packet[22:23])[0]
+    data_format = packet[23:24].hex().upper()
+
+    extended_header = SOHExtendedHeader(event_number, data_stream_number,
+                                        channel_number, number_of_samples,
+                                        flags, data_format)
+    payload = packet[24:]
+    return extended_header, payload
diff --git a/sohstationviewer/model/reftek_data/reftek_reader/packets.py b/sohstationviewer/model/reftek_data/reftek_reader/packets.py
new file mode 100644
index 0000000000000000000000000000000000000000..dddd4e68a003ecdfdeac9eb844bd6bd6aa500362
--- /dev/null
+++ b/sohstationviewer/model/reftek_data/reftek_reader/packets.py
@@ -0,0 +1,89 @@
+import dataclasses
+
+from sohstationviewer.model.reftek_data.reftek_reader.header import (
+    PacketHeader)
+
+
+@dataclasses.dataclass
+class DTExtendedHeader:
+    """
+    The extended header of a DT packet.
+    """
+    event_number: int
+    data_stream_number: int
+    channel_number: int
+    number_of_samples: int
+    flags: int
+    data_format: str
+
+
+@dataclasses.dataclass
+class DTPacket:
+    """
+    The decoded data of a DT packet.
+    """
+    header: PacketHeader
+    extended_header: DTExtendedHeader
+    data: int
+
+
+@dataclasses.dataclass
+class EHETExtendedHeader:
+    """
+    A collection of some useful information about an EH/ET packet. Technically,
+    EH/ET packets do not have extended headers. We name this class what it is
+    due to the way obspy.Reftek130 (and consequently, core.Reftek130) stores
+    the data of processed packets. For more information, refer to
+    Reftek130._data.
+    """
+    event_number: int
+    data_stream_number: int
+    flags: int
+    data_format: str
+
+    def __post_init__(self):
+        self.channel_number = 0
+        self.number_of_samples = 0
+
+
+@dataclasses.dataclass
+class EHETPacket:
+    """
+    The decoded data of an EH/ET packet. The extended_header field is to ensure
+    compatibility with dt_packet.DTPacket. EH/ET packets do not have an
+    extended header otherwise.
+    """
+    header: PacketHeader
+    extended_header: EHETExtendedHeader
+    data: bytes
+
+
+@dataclasses.dataclass
+class SOHExtendedHeader:
+    """
+    A collection of dummy data for some information needed so that
+    core.Reftek130 can understand SOH packets.
+
+    core.Reftek130 focuses on reading waveform data, so it wants information
+    available in the waveform packets (EH/ET/DT). However, core.Reftek130 also
+    supports SOH packets, which does not contain the required information. As
+    a result, we need to store dummy data in its place.
+    """
+    event_number: int
+    data_stream_number: int
+    channel_number: int
+    number_of_samples: int
+    flags: int
+    data_format: str
+
+
+@dataclasses.dataclass
+class SOHPacket:
+    """
+    The decoded data of an SOH packet. The extended_header field is to ensure
+    compatibility with dt_packet.DTPacket. SOH packets do not have an
+    extended header otherwise.
+    """
+    header: PacketHeader
+    extended_header: SOHExtendedHeader
+    data: bytes
diff --git a/sohstationviewer/model/reftek_data/reftek_reader/reftek_reader_helper.py b/sohstationviewer/model/reftek_data/reftek_reader/reftek_reader_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..32b53080b94152e85e0af8a4c46102200d8839ef
--- /dev/null
+++ b/sohstationviewer/model/reftek_data/reftek_reader/reftek_reader_helper.py
@@ -0,0 +1,135 @@
+import os
+from typing import Any, Dict, Callable, Union, List, Tuple
+
+import numpy
+import numpy as np
+
+from sohstationviewer.model.general_data.general_record_helper import Unpacker
+from sohstationviewer.model.reftek_data.reftek_reader.packet import \
+    eh_et_payload_end_in_packet
+from sohstationviewer.model.reftek_data.reftek_reader.packet_readers import (
+    read_dt_packet, read_eh_et_packet, read_soh_packet,
+)
+from sohstationviewer.model.reftek_data.reftek_reader.packets import (
+    DTPacket, EHETPacket, SOHPacket,
+)
+from sohstationviewer.model.reftek_data.reftek_reader.header import \
+    get_rt130_packet_header
+
+
+def packet_reader_placeholder(*args: Any, **kwargs: Any) -> Tuple[Any, Any]:
+    """
+    Placeholder function to be used in place of an RT130 packet reader
+    function. This function immediately returns None.
+    """
+    return None, None
+
+
+def read_rt130_file(file_name: str, unpacker: Unpacker
+                    ) -> List[Union[EHETPacket, DTPacket, SOHPacket]]:
+    """
+    Read an RT130 file and stores the data in a list of RT130 packets.
+    :param file_name: the name of the file to read.
+    :param unpacker: the decoder used to decode the data.
+    :return: a list of processed RT130 packets.
+    """
+    # RT130 data looks to be all big-endian (logpeek assumes this, and it has
+    # been working pretty well), so we don't have to do any endianness check.
+
+    packets = []
+
+    with open(file_name, 'rb') as rt130_file:
+        # Each packet is exactly 1024 bytes, so we can rely on that to know
+        # when we have finished reading.
+        for i in range(os.path.getsize(file_name) // 1024):
+            packet = rt130_file.read(1024)
+            packet_header = get_rt130_packet_header(packet)
+
+            waveform_handlers: Dict[str, Callable] = {
+                'EH': read_eh_et_packet,
+                'ET': read_eh_et_packet,
+                'DT': read_dt_packet,
+            }
+
+            soh_handlers: Dict[str, Callable] = dict.fromkeys(
+                ['AD', 'CD', 'DS', 'FD', 'OM', 'SC', 'SH'],
+                read_soh_packet
+            )
+
+            packet_handlers = {
+                **waveform_handlers, **soh_handlers
+            }
+
+            packet_handler = packet_handlers.get(
+                packet_header.packet_type, packet_reader_placeholder
+            )
+            return_val = packet_handler(packet, unpacker)
+            if packet_header.packet_type == 'DT':
+                packet_type = DTPacket
+            elif packet_header.packet_type in ['EH', 'ET']:
+                packet_type = EHETPacket
+            else:
+                packet_type = SOHPacket
+
+            extended_header, data = return_val
+            current_packet = packet_type(packet_header, extended_header, data)
+            packets.append(current_packet)
+
+    return packets
+
+
+def convert_packet_to_obspy_format(packet: Union[EHETPacket, DTPacket,
+                                                 SOHPacket],
+                                   unpacker: Unpacker) -> Tuple:
+    """
+    Convert an RT130 packet into a numpy array of type PACKET_FINAL_DTYPE
+    :param packet: an RT130 packet.
+    :param unpacker: the decoder used to decode the data.
+    :return: a tuple that can be converted into an object of type
+        PACKET_FINAL_DTYPE that contains the data stored in packet.
+    """
+    # We want to convert the packet to a tuple. In order to make it easier to
+    # maintain, we first convert the packet to a dictionary. Then, we grab the
+    # values of the dictionary as tuple to get the final result.
+    converted_packet = {}
+    converted_packet['packet_type'] = packet.header.packet_type
+    converted_packet['experiment_number'] = packet.header.experiment_number
+    # Obspy only stores the last two digits of the year.
+    converted_packet['year'] = packet.header.time.year % 100
+    converted_packet['unit_id'] = packet.header.unit_id
+    converted_packet['time'] = packet.header.time.ns
+    converted_packet['byte_count'] = packet.header.byte_count
+    converted_packet['packet_sequence'] = packet.header.packet_sequence
+    converted_packet['event_number'] = packet.extended_header.event_number
+    converted_packet[
+        'data_stream_number'] = packet.extended_header.data_stream_number
+    converted_packet['channel_number'] = packet.extended_header.channel_number
+    converted_packet[
+        'number_of_samples'] = packet.extended_header.number_of_samples
+    converted_packet['flags'] = packet.extended_header.flags
+    converted_packet['data_format'] = packet.extended_header.data_format
+
+    if converted_packet['packet_type'] == 'DT':
+        # Obspy stores the data as list of 1-byte integers. We store the
+        # data as an arbitrary length integer, so we need to do some
+        # conversion. To make encoding and decoding the data point easier, we
+        # store it in 4 bytes no matter what the data format is. This only
+        # has an effect on data with format 16. Thanks to a quirk with
+        # 2-complement binary encoding, however, this does not cause any
+        # problem.
+        data_size = 4
+        format_char = 'B'
+        converted_packet['payload'] = numpy.empty(1000, np.uint8)
+        packet_data = list(unpacker.unpack(
+            f'{data_size}{format_char}',
+            packet.data.to_bytes(data_size, 'big', signed=True)
+        ))
+        converted_packet['payload'][:4] = packet_data
+    elif converted_packet['packet_type'] in ['EH', 'ET']:
+        eh_et_payload_size = eh_et_payload_end_in_packet - 24
+        converted_packet['payload'] = numpy.empty(1000, np.uint8)
+        packet_data = numpy.frombuffer(packet.data, np.uint8)
+        converted_packet['payload'][:eh_et_payload_size] = packet_data
+    else:
+        converted_packet['payload'] = numpy.frombuffer(packet.data, np.uint8)
+    return tuple(converted_packet.values())
diff --git a/sohstationviewer/model/reftek/from_rt2ms/soh_packet.py b/sohstationviewer/model/reftek_data/reftek_reader/soh_packet.py
similarity index 95%
rename from sohstationviewer/model/reftek/from_rt2ms/soh_packet.py
rename to sohstationviewer/model/reftek_data/reftek_reader/soh_packet.py
index ff54fe1539c53d352b8efa036bc66ed0f6c7eb0b..7900f68abf02a98e7ea0e572ed132703d2bb11f6 100644
--- a/sohstationviewer/model/reftek/from_rt2ms/soh_packet.py
+++ b/sohstationviewer/model/reftek_data/reftek_reader/soh_packet.py
@@ -1,5 +1,7 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+from __future__ import annotations
+
+from typing import Optional, List
+
 """
 Routines building upon obspy.io.reftek.packet.
 Redefine packet header (PACKET) based on rt130 manual.
@@ -268,14 +270,14 @@ FD_INFO = {
     "_coeff": (984, None)}
 
 
-class Packet(obspy_rt130_packet.Packet):
+class SOHPacket(obspy_rt130_packet.Packet):
     """Class used to define shared tools for the SOH packets"""
 
     _headers = ('experiment_number', 'unit_id', 'byte_count',
                 'packet_sequence', 'time')
 
     @staticmethod
-    def from_data(data):
+    def from_data(data: np.ndarray) -> SOHPacket:
         """
         Checks for valid packet type identifier and returns appropriate
         packet object
@@ -300,7 +302,7 @@ class Packet(obspy_rt130_packet.Packet):
             raise NotImplementedError(msg.format(packet_type))
 
     @staticmethod
-    def time_tag(time, implement_time=None):
+    def time_tag(time: UTCDateTime, implement_time: Optional[int] = None):
         if implement_time is not None and time > UTCDateTime(ns=implement_time):  # noqa: E501
             time = UTCDateTime(ns=implement_time)
         return "{:04d}:{:03d}:{:02d}:{:02d}:{:02d}:{:03d}".format(time.year,
@@ -311,20 +313,14 @@ class Packet(obspy_rt130_packet.Packet):
                                                                   time.microsecond)  # noqa: E501
 
     @property
-    def packet_tagline(self):
+    def packet_tagline(self) -> str:
         return "\n"
-        # return "\n\n{:s} exp {:02d} bytes {:04d} {:s} ID: {:s} seq {:04d}".format(self.type.decode(),  # noqa: E501
-        #                                                                           self.experiment_number,  # noqa: E501
-        #                                                                           self.byte_count,  # noqa: E501
-        #                                                                           self.time_tag(self.time),  # noqa: E501
-        #                                                                           self.unit_id.decode(),  # noqa: E501
-        #                                                                           self.packet_sequence)  # noqa: E501
 
 
-class SHPacket(Packet):
+class SHPacket(SOHPacket):
     """Class used to parse and generate string representation for SH packets"""
 
-    def __init__(self, data):
+    def __init__(self, data: np.ndarray) -> None:
         self._data = data
         payload = self._data["payload"].tobytes()
         start_sh = 0
@@ -341,7 +337,7 @@ class SHPacket(Packet):
             setattr(self, name, data)
             start_sh = start_sh + length
 
-    def __str__(self):
+    def __str__(self) -> str:
         info = []
         # info.append(self.packet_tagline)
         packet_soh_string = ("\nState of Health  {:s}   ST: {:s}"
@@ -352,10 +348,10 @@ class SHPacket(Packet):
         return info
 
 
-class SCPacket(Packet):
+class SCPacket(SOHPacket):
     """Class used to parse and generate string representation for SC packets"""
 
-    def __init__(self, data):
+    def __init__(self, data: np.ndarray) -> None:
         # Station/Channel payload
         self._data = data
         payload = self._data["payload"].tobytes()
@@ -389,12 +385,8 @@ class SCPacket(Packet):
                 setattr(self, name, data)
                 start_info = start_info + length
 
-    def __str__(self):
+    def __str__(self) -> str:
         info = []
-        # info.append(self.packet_tagline)
-        # packet_soh_string = ("\nStation Channel Definition  {:s}   ST: {:s}"
-        #                      .format(self.time_tag(self.time, implement_time=self.implement_time),  # noqa: E501
-        #                              self.unit_id.decode()))
         packet_soh_string = ("\nStation Channel Definition  {:s}   ST: {:s}"
                              .format(self.time_tag(self.time),
                                      self.unit_id.decode()))
@@ -430,7 +422,7 @@ class SCPacket(Packet):
                 info.append("\n     Comments - " + getattr(self, 'sc' + str(ind_sc) + '_comments'))  # noqa: E501
         return info
 
-    def get_info(self, infos):
+    def get_info(self, infos: List[List]) -> List[List]:
         """
         Compile relevant information - unit id, reference channel, network
         code, station code, component code, gain and implementation time - for
@@ -461,10 +453,10 @@ class SCPacket(Packet):
         return infos
 
 
-class OMPacket(Packet):
+class OMPacket(SOHPacket):
     """Class used to parse and generate string representation for OM packets"""
 
-    def __init__(self, data):
+    def __init__(self, data: np.ndarray) -> None:
         self._data = data
         payload = self._data["payload"].tobytes()
         start_om = 0
@@ -481,7 +473,7 @@ class OMPacket(Packet):
             setattr(self, name, data)
             start_om = start_om + length
 
-    def __str__(self):
+    def __str__(self) -> str:
         info = []
         # info.append(self.packet_tagline)
         packet_soh_string = ("\nOperating Mode Definition  {:s}   ST: {:s}"
@@ -503,10 +495,10 @@ class OMPacket(Packet):
         return info
 
 
-class DSPacket(Packet):
+class DSPacket(SOHPacket):
     """Class used to parse and generate string representation for DS packets"""
 
-    def __init__(self, data):
+    def __init__(self, data: np.ndarray) -> None:
         # Data Stream payload
         self._data = data
         payload = self._data["payload"].tobytes()
@@ -561,7 +553,7 @@ class DSPacket(Packet):
                 msg = ("Trigger type {:s} not found".format(trigger_type))
                 warnings.warn(msg)
 
-    def __str__(self):
+    def __str__(self) -> str:
         info = []
         info.append(self.packet_tagline)
         packet_soh_string = ("\nData Stream Definition  {:s}   ST: {:s}"
@@ -597,7 +589,7 @@ class DSPacket(Packet):
                                     info.append(" ".join(["\n     Trigger", key, trigger_info]))  # noqa: E501
         return info
 
-    def get_info(self, infos):
+    def get_info(self, infos: List[List]) -> List[List]:
         """
         Compile relevant information - reference data stream, band and
         instrument codes, sample rate and implementation time - for given DS
@@ -624,10 +616,10 @@ class DSPacket(Packet):
         return infos
 
 
-class ADPacket(Packet):
+class ADPacket(SOHPacket):
     """Class used to parse and generate string representation for AD packets"""
 
-    def __init__(self, data):
+    def __init__(self, data: np.ndarray) -> None:
         self._data = data
         payload = self._data["payload"].tobytes()
         start_ad = 0
@@ -644,7 +636,7 @@ class ADPacket(Packet):
             setattr(self, name, data)
             start_ad = start_ad + length
 
-    def __str__(self):
+    def __str__(self) -> str:
         info = []
         # info.append(self.packet_tagline)
         packet_soh_string = ("\nAuxiliary Data Parameter  {:s}   ST: {:s}"
@@ -664,10 +656,10 @@ class ADPacket(Packet):
         return info
 
 
-class CDPacket(Packet):
+class CDPacket(SOHPacket):
     """Class used to parse and generate string representation for CD packets"""
 
-    def __init__(self, data):
+    def __init__(self, data: np.ndarray) -> None:
         # Calibration parameter payload
         self._data = data
         payload = self._data["payload"].tobytes()
@@ -736,7 +728,7 @@ class CDPacket(Packet):
                 setattr(self, name, data)
                 start_info_seq = start_info_seq + length
 
-    def __str__(self):
+    def __str__(self) -> str:
         info = []
         # info.append(self.packet_tagline)
         packet_soh_string = ("\nCalibration Definition  {:s}   ST: {:s}"
@@ -790,10 +782,10 @@ class CDPacket(Packet):
         return info
 
 
-class FDPacket(Packet):
+class FDPacket(SOHPacket):
     """Class used to parse and generate string representation for FD packets"""
 
-    def __init__(self, data):
+    def __init__(self, data: np.ndarray) -> None:
         # Filter description payload
         self._data = data
         payload = self._data["payload"]
@@ -845,7 +837,7 @@ class FDPacket(Packet):
                 setattr(self, name, data)
                 start_info = start_info + length
 
-    def __str__(self):
+    def __str__(self) -> str:
         info = []
         # info.append(self.packet_tagline)
         packet_soh_string = ("\nFilter Description  {:s}   ST: {:s}"
@@ -873,7 +865,7 @@ class FDPacket(Packet):
         return info
 
     @staticmethod
-    def twosCom_bin2dec(bin_, digit):
+    def twosCom_bin2dec(bin_: str, digit: int):
         while len(bin_) < digit:
             bin_ = '0' + bin_
         if bin_[0] == '0':
@@ -882,7 +874,7 @@ class FDPacket(Packet):
             return -1 * (int(''.join('1' if x == '0' else '0' for x in bin_), 2) + 1)  # noqa: E501
 
     @staticmethod
-    def twosCom_dec2bin(dec, digit):
+    def twosCom_dec2bin(dec: int, digit: int):
         if dec >= 0:
             bin_ = bin(dec).split("0b")[1]
             while len(bin_) < digit:
@@ -893,7 +885,7 @@ class FDPacket(Packet):
             return bin(dec - pow(2, digit)).split("0b")[1]
 
 
-def _initial_unpack_packets_soh(bytestring):
+def _initial_unpack_packets_soh(bytestring: bytes) -> np.ndarray:
     """
     First unpack data with dtype matching itemsize of storage in the reftek
     file, than allocate result array with dtypes for storage of python
diff --git a/sohstationviewer/view/file_information/get_file_information.py b/sohstationviewer/view/file_information/get_file_information.py
index 01eb4a54535bd536862e8f8e84f08f206b75fe1a..a1dab80d1225d0566b0782b993075c271def926d 100644
--- a/sohstationviewer/view/file_information/get_file_information.py
+++ b/sohstationviewer/view/file_information/get_file_information.py
@@ -3,7 +3,7 @@ from typing import Union, Dict, List, Set, Tuple
 from sohstationviewer.controller.plotting_data import format_time
 from sohstationviewer.model.general_data.general_data import GeneralData
 from sohstationviewer.model.mseed_data.mseed import MSeed
-from sohstationviewer.model.reftek.reftek import RT130
+from sohstationviewer.model.reftek_data.reftek import RT130
 from sohstationviewer.view.util.functions import extract_netcodes
 
 
diff --git a/sohstationviewer/view/main_window.py b/sohstationviewer/view/main_window.py
index 017af842356a56d417be6c447a5b64f6be23e5e4..a7398e53a2e970744304047c773b72b5cc010f68 100755
--- a/sohstationviewer/view/main_window.py
+++ b/sohstationviewer/view/main_window.py
@@ -160,7 +160,6 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         """
         self.bit_weight_opt: str = ''  # currently only need one option
         self.get_channel_prefer()
-        self.yyyy_mm_dd_action.triggered.emit()
 
         """
         waveform_dlg: widget to display waveform channels' plotting
@@ -196,6 +195,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         self.read_config()
         self.validate_config()
         self.apply_config()
+        self.yyyy_mm_dd_action.trigger()
 
     @QtCore.Slot()
     def save_plot(self):
@@ -364,6 +364,8 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         self.time_to_date_edit.setDisplayFormat(qt_format)
         self.time_from_date_edit.setDisplayFormat(qt_format)
         self.date_format = display_format
+        self.tps_dlg.date_format = self.date_format
+        self.waveform_dlg.date_format = self.date_format
 
     @QtCore.Slot()
     def open_files_list_item_double_clicked(self, item: FileListItem):
@@ -547,7 +549,8 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
                       self.is_plotting_waveform or self.is_plotting_tps)
         if is_working:
             msg = 'Already working'
-            display_tracking_info(self.tracking_info_text_browser, msg, 'info')
+            display_tracking_info(self.tracking_info_text_browser,
+                                  msg, LogType.INFO)
             return
         self.has_problem = False
 
@@ -571,6 +574,13 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         else:
             self.gap_minimum = None
 
+        # if waveform channels are selected, Event DS will be read from EH/ET
+        # header
+        # rt130_waveform_data_req is to read data for wave form data
+        rt130_waveform_data_req = False
+        if self.raw_check_box.isChecked() or self.tps_check_box.isChecked():
+            rt130_waveform_data_req = True
+
         if self.mseed_wildcard_edit.text().strip() != '':
             try:
                 check_chan_wildcards_format(self.mseed_wildcard_edit.text())
@@ -657,7 +667,8 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
             read_start=self.start_tm,
             read_end=self.end_tm,
             include_mp123=self.mass_pos_123zne_check_box.isChecked(),
-            include_mp456=self.mass_pos_456uvw_check_box.isChecked()
+            include_mp456=self.mass_pos_456uvw_check_box.isChecked(),
+            rt130_waveform_data_req=rt130_waveform_data_req
         )
 
         self.data_loader.worker.finished.connect(self.data_loaded)
@@ -797,7 +808,8 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
 
         try:
             self.plotting_widget.plot_channels(
-                d_obj, sel_key, self.start_tm, self.end_tm, time_tick_total)
+                d_obj, sel_key, self.start_tm, self.end_tm, time_tick_total,
+                self.req_soh_chans)
         except Exception:
             fmt = traceback.format_exc()
             msg = f"Can't plot SOH data due to error: {str(fmt)}"
@@ -952,7 +964,8 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
                                'WHERE current=1')
         if len(rows) > 0:
             self.pref_soh_list_name = rows[0]['name']
-            self.pref_soh_list = [t.strip() for t in rows[0]['IDs'].split(',')]
+            self.pref_soh_list = [t.strip() for t in rows[0]['IDs'].split(',')
+                                  if t.strip() != '']
             self.pref_soh_list_data_type = rows[0]['dataType']
 
     def resizeEvent(self, event):
@@ -983,7 +996,7 @@ class MainWindow(QtWidgets.QMainWindow, UIMainWindow):
         """
         display_tracking_info(self.tracking_info_text_browser,
                               'Cleaning up...',
-                              'info')
+                              LogType.INFO)
         if self.data_loader.running:
             self.data_loader.thread.requestInterruption()
             self.data_loader.thread.quit()
diff --git a/sohstationviewer/view/plotting/gps_plot/extract_gps_data.py b/sohstationviewer/view/plotting/gps_plot/extract_gps_data.py
index 9a876211798221f824298ca880f39f94a1e7f734..0c0a6ecf102b45ef1ca8e0f41857fc853d6ffda9 100644
--- a/sohstationviewer/view/plotting/gps_plot/extract_gps_data.py
+++ b/sohstationviewer/view/plotting/gps_plot/extract_gps_data.py
@@ -7,7 +7,7 @@ import numpy as np
 from obspy import UTCDateTime
 
 from sohstationviewer.model.mseed_data.mseed import MSeed
-from sohstationviewer.model.reftek.reftek import RT130
+from sohstationviewer.model.reftek_data.reftek import RT130
 from sohstationviewer.view.plotting.gps_plot.gps_point import GPSPoint
 from sohstationviewer.view.util.enums import LogType
 
diff --git a/sohstationviewer/view/plotting/plotting_widget/multi_threaded_plotting_widget.py b/sohstationviewer/view/plotting/plotting_widget/multi_threaded_plotting_widget.py
index 7a6caf5ffa6f9d36319f2523c254e9fcac0c2667..0e36e7ab4e08bee2f3caba711566357eec7473f0 100644
--- a/sohstationviewer/view/plotting/plotting_widget/multi_threaded_plotting_widget.py
+++ b/sohstationviewer/view/plotting/plotting_widget/multi_threaded_plotting_widget.py
@@ -11,6 +11,8 @@ from sohstationviewer.view.plotting.plotting_widget.plotting_processor import (
 from sohstationviewer.view.plotting.plotting_widget.plotting_widget import (
     PlottingWidget)
 from sohstationviewer.view.util.enums import LogType
+from sohstationviewer.view.util.functions import (
+    replace_actual_question_chans, remove_not_found_chans)
 
 from sohstationviewer.controller.util import display_tracking_info
 from sohstationviewer.controller.plotting_data import get_title
@@ -28,7 +30,8 @@ class MultiThreadedPlottingWidget(PlottingWidget):
     def __init__(self, *args, **kwargs):
         PlottingWidget.__init__(self, *args, **kwargs)
         self.data_processors: List[PlottingChannelProcessor] = []
-
+        # pref_order: order of channels to be plotted
+        self.pref_order: List[str] = []
         # Only one data processor can run at a time, so it is not a big problem
         #
         self.thread_pool = QtCore.QThreadPool()
@@ -105,19 +108,33 @@ class MultiThreadedPlottingWidget(PlottingWidget):
             return True
 
     def create_plotting_channel_processors(
-            self, plotting_data: Dict, need_db_info: bool = False) -> None:
+            self, plotting_data: Dict,
+            need_db_info: bool = False) -> None:
         """
-        Create a data processor for each channel data.
+        Create a data processor for each channel data in the order of
+            pref_order. If pref_order isn't given, process in order of
+            plotting_data.
 
         :param plotting_data: dict of data by chan_id
         :param need_db_info: flag to get db info
         """
-        for chan_id in plotting_data:
+        chan_order = self.pref_order if self.pref_order \
+            else sorted(list(plotting_data.keys()))
+        chan_order = replace_actual_question_chans(
+            chan_order, list(plotting_data.keys()))
+        chan_order = remove_not_found_chans(
+            chan_order, list(plotting_data.keys()), self.processing_log)
+
+        not_plot_chans = []
+        for chan_id in chan_order:
             if need_db_info:
-                chan_db_info = get_chan_plot_info(
-                    chan_id, self.parent.data_type, self.c_mode)
-                if chan_db_info['height'] == 0:
+                chan_db_info = get_chan_plot_info(chan_id,
+                                                  self.parent.data_type,
+                                                  self.c_mode)
+                if (chan_db_info['height'] == 0 or
+                        chan_db_info['plotType'] == ''):
                     # not draw
+                    not_plot_chans.append(chan_id)
                     continue
                 if 'DEFAULT' in chan_db_info['channel']:
                     msg = (f"Channel {chan_id}'s "
@@ -127,14 +144,16 @@ class MultiThreadedPlottingWidget(PlottingWidget):
                     #  instruction here
                     self.processing_log.append((msg, LogType.WARNING))
 
-                if chan_db_info['plotType'] == '':
-                    continue
-
                 plotting_data[chan_id]['chan_db_info'] = chan_db_info
+        if not_plot_chans != []:
+            msg = (f"The database settings 'plotType' or 'height' show not to "
+                   f"be plotted for the following channels: "
+                   f"{', '.join( not_plot_chans)}")
+            self.processing_log.append((msg, LogType.WARNING))
 
         self.move_soh_channels_with_link_to_the_end()
 
-        for chan_id in plotting_data:
+        for chan_id in chan_order:
             if 'chan_db_info' not in plotting_data[chan_id]:
                 continue
             channel_processor = PlottingChannelProcessor(
@@ -165,7 +184,8 @@ class MultiThreadedPlottingWidget(PlottingWidget):
         for channel in channels_to_move:
             self.plotting_data1[channel] = self.plotting_data1.pop(channel)
 
-    def plot_channels(self, d_obj, key, start_tm, end_tm, time_ticks_total):
+    def plot_channels(self, d_obj, key, start_tm, end_tm, time_ticks_total,
+                      pref_order=[]):
         """
         Prepare to plot waveform/SOH/mass-position data by creating a data
         processor for each channel, then, run the processors.
@@ -175,12 +195,14 @@ class MultiThreadedPlottingWidget(PlottingWidget):
         :param start_tm: requested start time to read
         :param end_tm: requested end time to read
         :param time_ticks_total: max number of tick to show on time bar
+        :param pref_order: order of channels to be plotted
         """
+        self.pref_order = pref_order
         if not self.is_working:
             self.reset_widget()
             self.is_working = True
             start_msg = f'Plotting {self.name} data...'
-            display_tracking_info(self.tracking_box, start_msg, 'info')
+            display_tracking_info(self.tracking_box, start_msg, LogType.INFO)
             ret = self.init_plot(d_obj, key, start_tm, end_tm,
                                  time_ticks_total)
             if not ret:
@@ -188,8 +210,10 @@ class MultiThreadedPlottingWidget(PlottingWidget):
                 self.clean_up()
                 self.finished.emit()
                 return
+
             self.create_plotting_channel_processors(self.plotting_data1, True)
             self.create_plotting_channel_processors(self.plotting_data2, True)
+
             self.process_channel()
 
     @QtCore.Slot()
@@ -307,32 +331,6 @@ class MultiThreadedPlottingWidget(PlottingWidget):
         all running background threads.
         """
         display_tracking_info(self.tracking_box,
-                              f'{self.name} plot stopped', 'info')
+                              f'{self.name} plot stopped', LogType.INFO)
         self.is_working = False
         self.stopped.emit()
-
-    def set_lim(self, first_time=False, is_waveform=False):
-        """
-        The set_lim method of the base class PlottingWidget was not designed
-        with multi-threading in mind, so it made some assumption that is
-        difficult to satisfy in a multi-threaded design. While these
-        assumptions do not affect the initial plotting of the data, they make
-        designing a system for zooming more difficult.
-
-        Rather than trying to comply with the design of PlottingWidget.set_lim,
-        we decide to work around. This set_lim method still keeps the
-        functionality of processing the data based on the zoom range. However,
-        it delegates setting the new limit of the x and y axes to
-        PlottingWidget.set_lim.
-
-        :param first_time: flag that indicate whether set_lim is called the
-            fist time for a data set.
-        """
-        self.data_processors = []
-        if not self.is_working:
-            self.is_working = True
-            start_msg = 'Zooming in...'
-            display_tracking_info(self.tracking_box, start_msg, 'info')
-            self.create_plotting_channel_processors(self.plotting_data1)
-            self.create_plotting_channel_processors(self.plotting_data2)
-            self.process_channel()
diff --git a/sohstationviewer/view/plotting/plotting_widget/plotting.py b/sohstationviewer/view/plotting/plotting_widget/plotting.py
index 1a9268988a4960b05616b498961e8e3029a54e1a..1a169ac683cf9f2627f65f06bfc4b6cb6a07130b 100644
--- a/sohstationviewer/view/plotting/plotting_widget/plotting.py
+++ b/sohstationviewer/view/plotting/plotting_widget/plotting.py
@@ -37,6 +37,7 @@ class Plotting:
             self.parent.plotting_bot, plot_h, has_min_max_lines=False)
         ax.x = None
         ax.plot([0], [0], linestyle="")
+        ax.chan_db_info = None
         return ax
 
     def plot_multi_color_dots(self, c_data, chan_db_info, chan_id,
@@ -107,7 +108,6 @@ class Plotting:
 
         total_samples = len(x)
 
-        x = sorted(x)
         if len(colors) != 1:
             sample_no_colors = [clr['W']]
         else:
@@ -116,10 +116,9 @@ class Plotting:
         self.plotting_axes.set_axes_info(
             ax, [total_samples], sample_no_colors=sample_no_colors,
             chan_db_info=chan_db_info, linked_ax=linked_ax)
-        if linked_ax is None:
-            ax.x = x
-        else:
-            ax.linkedX = x
+
+        ax.x_list = c_data['times']
+        ax.chan_db_info = chan_db_info
         return ax
 
     def plot_up_down_dots(self, c_data, chan_db_info, chan_id, ax, linked_ax):
@@ -172,18 +171,20 @@ class Plotting:
         ax.plot(points_list[1], len(points_list[1]) * [0.5], linestyle="",
                 marker='s', markersize=2, zorder=constants.Z_ORDER['DOT'],
                 color=clr[colors[1]], picker=True, pickradius=3)
-        x = points_list[0] + points_list[1]
-        x = sorted(x)
+
         ax.set_ylim(-2, 2)
         self.plotting_axes.set_axes_info(
             ax, [len(points_list[0]), len(points_list[1])],
             sample_no_colors=[clr[colors[0]], clr[colors[1]]],
             sample_no_pos=[0.25, 0.75],
             chan_db_info=chan_db_info, linked_ax=linked_ax)
-        if linked_ax is None:
-            ax.x = x
-        else:
-            ax.linkedX = x
+
+        # x_bottom, x_top are the times of data points to be displayed at
+        # bottom or top of the plot
+        ax.x_bottom = np.array(points_list[0])
+        ax.x_top = np.array(points_list[1])
+
+        ax.chan_db_info = chan_db_info
         return ax
 
     def plot_time_dots(self, c_data, chan_db_info, chan_id, ax, linked_ax):
@@ -222,10 +223,8 @@ class Plotting:
                     linestyle='', zorder=constants.Z_ORDER['LINE'],
                     color=clr[color], picker=True,
                     pickradius=3)
-        if linked_ax is None:
-            ax.x_list = x_list
-        else:
-            ax.linkedX = x_list
+        ax.x_list = x_list
+        ax.chan_db_info = chan_db_info
         return ax
 
     def plot_lines_dots(self, c_data, chan_db_info, chan_id,
@@ -270,13 +269,14 @@ class Plotting:
                 obj, c = cStr.split(':')
                 colors[obj] = c
         l_color = 'G'
-        d_color = 'W'
         has_dot = False
         if 'L' in colors:
             l_color = colors['L']
         if 'D' in colors:
             d_color = colors['D']
             has_dot = True
+        else:
+            d_color = l_color
 
         if chan_id == 'GPS Lk/Unlk':
             sample_no_list = []
@@ -292,7 +292,7 @@ class Plotting:
             info=info, y_list=y_list, linked_ax=linked_ax)
 
         for x, y in zip(x_list, y_list):
-            if not has_dot:
+            if not has_dot and sample_no_list[0] > 1:
                 # set marker to be able to click point for info
                 # but marker's size is small to not show dot.
                 ax.myPlot = ax.plot(x, y, marker='o', markersize=0.01,
@@ -309,12 +309,9 @@ class Plotting:
                                     mec=clr[d_color],
                                     picker=True, pickradius=3)
 
-        if linked_ax is None:
-            ax.x_list = x_list
-            ax.y_list = y_list
-        else:
-            ax.linkedX = x_list
-            ax.linkedY = y_list
+        ax.x_list = x_list
+        ax.y_list = y_list
+        ax.chan_db_info = chan_db_info
         return ax
 
     def plot_lines_s_rate(self, c_data, chan_db_info, chan_id, ax, linked_ax):
@@ -399,4 +396,5 @@ class Plotting:
                        zorder=constants.Z_ORDER['DOT'])
         ax.x_list = x_list
         ax.y_list = y_list
+        ax.chan_db_info = chan_db_info
         return ax
diff --git a/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py b/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py
index 9becc2273e3f20a52939135af500c90785bcf8f0..cbe97d14472c1054915a2e7f9f9f32b1ab4a0d11 100644
--- a/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py
+++ b/sohstationviewer/view/plotting/plotting_widget/plotting_axes.py
@@ -184,13 +184,6 @@ class PlottingAxes:
                 axes, label of channel will be displayed with sub title's
                 format - under main title.
         """
-        if linked_ax is None:
-            # clear all texts before recreated.
-            # But not clear when this is a linked_ax because texts are already
-            # cleared with ax, if clear with linked_ax all info of ax won't be
-            # displayed
-            ax.texts.clear()
-
         if label is None:
             label = chan_db_info['label']
 
@@ -229,7 +222,11 @@ class PlottingAxes:
 
         # set samples' total on right side
         if len(sample_no_list) == 1:
-            ax.sampleLbl = ax.text(
+            # center_total_point_lbl: The label to display total number of data
+            # points for plots whose ax has attribute x_list.
+            # The plotTypes that use this label are linesDot, linesSRate,
+            # linesMassPos, dotForTime, multiColorDot
+            ax.center_total_point_lbl = ax.text(
                 1.005, 0.5,
                 sample_no_list[0],
                 horizontalalignment='left',
@@ -240,14 +237,13 @@ class PlottingAxes:
                 size=self.parent.font_size
             )
         else:
-            # Each zoom this infor is created again.
-            # Plots that have data separated in two to have text in top and
-            # bottom, sample rate= 1. These numbers completely depends
-            # on data created in trim_downsample_chan_with_spr_less_or_equal_1
-            # and won't be changed in set_lim, then don't need to assign a
-            # variable for it.
-            # bottom
-            ax.text(
+            # bottom_total_point_lbl, top_total_point_lbl are label to diplay
+            # total number of data points which are splitted into top
+            # and bottom. The ax needs to include attributes x_bottom and x_top
+            # The plotTypes that use these labels are upDownDots (and linesDot
+            # with channel='GPS Lk/Unlk' which will have another MR to add
+            # x_bottom and x_top for this)
+            ax.bottom_total_point_lbl = ax.text(
                 1.005, sample_no_pos[0],
                 sample_no_list[0],
                 horizontalalignment='left',
@@ -258,7 +254,7 @@ class PlottingAxes:
                 size=self.parent.font_size
             )
             # top
-            ax.text(
+            ax.top_total_point_lbl = ax.text(
                 1.005, sample_no_pos[1],
                 sample_no_list[1],
                 horizontalalignment='left',
diff --git a/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py b/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py
index 77a60ce7172299433665c51d96590a7722aa2634..cfcfb8a0c63b1837becc4adebd15c423d6625eda 100755
--- a/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py
+++ b/sohstationviewer/view/plotting/plotting_widget/plotting_widget.py
@@ -2,6 +2,7 @@
 Class of which object is used to plot data
 """
 from typing import List, Optional, Union
+import numpy as np
 import matplotlib.text
 from matplotlib import pyplot as pl
 from matplotlib.transforms import Bbox
@@ -466,26 +467,6 @@ class PlottingWidget(QtWidgets.QScrollArea):
         ruler.set_visible(True)
         ruler.xy1 = (xdata, 0)
         ruler.xy2 = (xdata, self.bottom)
-        try:
-            if ruler == self.zoom_marker2:
-                # make zoom_marker2 follow mouse.
-                # need to disconnect when state of rulers change
-                self.follower = self.fig.canvas.mpl_connect(
-                    "motion_notify_event", self.zoom_marker2_follow_mouse)
-        except AttributeError:
-            pass
-
-    def zoom_marker2_follow_mouse(self, mouseevent):
-        """
-        Set zoom_marker2's to follow mouse's x.
-
-        :param mouseevent: motion_notify_event - event to help keeping track
-            of mouse move
-        """
-        xdata = self.get_timestamp(mouseevent)
-        self.zoom_marker2.xy1 = (xdata, 0)
-        self.zoom_marker2.xy2 = (xdata, self.bottom)
-        self.draw()
 
     def keyPressEvent(self, event):
         """
@@ -557,7 +538,7 @@ class PlottingWidget(QtWidgets.QScrollArea):
                                                                self.max_x)]
 
                 # reset total of samples on the right
-                self.gap_bar.sampleLbl.set_text(len(new_gaps))
+                self.gap_bar.center_total_point_lbl.set_text(len(new_gaps))
 
         for ax in self.axes:
             if hasattr(ax, 'x') and ax.x is None:
@@ -568,10 +549,25 @@ class PlottingWidget(QtWidgets.QScrollArea):
             if not first_time:
                 new_min_y = None
                 new_max_y = None
+                if hasattr(ax, 'x_top'):
+                    # plot_up_down_dots
+                    new_x_bottom_indexes = np.where(
+                        (ax.x_bottom >= self.min_x) &
+                        (ax.x_bottom <= self.max_x))[0]
+                    ax.bottom_total_point_lbl.set_text(
+                        new_x_bottom_indexes.size)
+                    new_x_top_indexes = np.where(
+                        (ax.x_top >= self.min_x) &
+                        (ax.x_top <= self.max_x))[0]
+                    ax.top_total_point_lbl.set_text(
+                        new_x_top_indexes.size)
                 if hasattr(ax, 'x_list'):
                     if not hasattr(ax, 'y_list'):
-                        # dotForTime plots have attribute 'x_list' but not
-                        # 'y_list'
+                        # plot_time_dots and plot_multi_color_dots
+                        x = ax.x_list[0]
+                        new_x_indexes = np.where(
+                            (x >= self.min_x) & (x <= self.max_x))[0]
+                        ax.center_total_point_lbl.set_text(new_x_indexes.size)
                         continue
                     total_points = 0
                     tr_min_ys = []
@@ -590,21 +586,12 @@ class PlottingWidget(QtWidgets.QScrollArea):
                     if tr_min_ys != []:
                         new_min_y = min(tr_min_ys)
                         new_max_y = max(tr_max_ys)
-                else:
-                    total_points = len(ax.x)
-                    if hasattr(ax, 'y') and len(ax.y) > 0:
-                        new_min_y = min(ax.y)
-                        new_max_y = max(ax.y)
-                try:
-                    ax.sampleLbl.set_text(total_points)
-                except AttributeError:
-                    # for case of having top and bottom total points
-                    # which is for RT130's SOH only, trust in total point
-                    # calculated in set_axes_info
-                    pass
-
-                if new_min_y is not None:
-                    self.plotting_axes.set_axes_ylim(ax, new_min_y, new_max_y)
+                        # in case total_points == 1, y lim shouldn't be set
+                        # again or the plot would be collapsed to one line
+                        if total_points > 1:
+                            self.plotting_axes.set_axes_ylim(
+                                ax, new_min_y, new_max_y)
+                    ax.center_total_point_lbl.set_text(total_points)
 
     def draw(self):
         """
diff --git a/sohstationviewer/view/plotting/state_of_health_widget.py b/sohstationviewer/view/plotting/state_of_health_widget.py
index acb00711d666b1c595aa2f9553a50eb1bf41f1b2..bc219840a012ad21555f4fe35dcdffc1e2563d98 100644
--- a/sohstationviewer/view/plotting/state_of_health_widget.py
+++ b/sohstationviewer/view/plotting/state_of_health_widget.py
@@ -66,19 +66,8 @@ class SOHWidget(MultiThreadedPlottingWidget):
         linked_ax = None
         if chan_db_info['linkedChan'] not in [None, 'None', '']:
             linked_ax = self.plotting_data1[chan_db_info['linkedChan']]['ax']
-        if 'ax' not in c_data:
-            ax = getattr(self.plotting, plot_functions[plot_type][1])(
-                c_data, chan_db_info, chan_id, None, linked_ax)
-            if ax is None:
-                return
-            c_data['ax'] = ax
-            ax.chan = chan_id
-            self.axes.append(ax)
-        else:
-            for artist in c_data['ax'].lines + c_data['ax'].collections:
-                artist.remove()
-            getattr(self.plotting, plot_functions[plot_type][1])(
-                c_data, chan_db_info, chan_id, c_data['ax'], linked_ax)
-
-    def set_lim(self, first_time=False):
-        super().set_lim(first_time, is_waveform=False)
+        ax = getattr(self.plotting, plot_functions[plot_type][1])(
+            c_data, chan_db_info, chan_id, None, linked_ax)
+        c_data['ax'] = ax
+        ax.chan = chan_id
+        self.axes.append(ax)
diff --git a/sohstationviewer/view/plotting/time_power_squared_dialog.py b/sohstationviewer/view/plotting/time_power_squared_dialog.py
index f27f3c4362b8d0cf30d521808810b3da6fc5856d..5e533b53b591894908a59bca12eb8899c5dde0d3 100755
--- a/sohstationviewer/view/plotting/time_power_squared_dialog.py
+++ b/sohstationviewer/view/plotting/time_power_squared_dialog.py
@@ -498,6 +498,11 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         data_type: str - type of data being plotted
         """
         self.data_type = None
+        """
+         date_format: format for date
+        """
+        self.date_format: str = 'YYYY-MM-DD'
+
         self.setGeometry(50, 50, 1200, 700)
         self.setWindowTitle("TPS Plot")
 
@@ -577,16 +582,16 @@ class TimePowerSquaredDialog(QtWidgets.QWidget):
         self.connect_signals()
         self.color_range_changed()
 
-    def set_data(self, data_type, file_name):
+    def set_data(self, data_type: str, folder_name: str):
         """
         Set data_type and the window's title.
 
-        :param data_type: str - data type of data being plotted
-        :param file_name: str - name of the file/folder of the data set to be
+        :param data_type: data type of data being plotted
+        :param folder_name: name of the folder of the data set to be
             displayed
         """
         self.data_type = data_type
-        self.setWindowTitle("TPS Plot %s - %s" % (data_type, file_name))
+        self.setWindowTitle("TPS Plot %s - %s" % (data_type, folder_name))
 
     def resizeEvent(self, event):
         """
diff --git a/sohstationviewer/view/plotting/waveform_dialog.py b/sohstationviewer/view/plotting/waveform_dialog.py
index ffcc0eac5983498c58d5ea1e48ab4c89dbd535e6..24c9a9a8cfb2256c3f40f1a7ef7bbe7776f4305d 100755
--- a/sohstationviewer/view/plotting/waveform_dialog.py
+++ b/sohstationviewer/view/plotting/waveform_dialog.py
@@ -51,22 +51,11 @@ class WaveformWidget(MultiThreadedPlottingWidget):
         plot_type = chan_db_info['plotType']
 
         # refer to doc string for mass_pos_data to know the reason for 'ax_wf'
-        if 'ax_wf' not in c_data:
-            ax = getattr(self.plotting, plot_functions[plot_type][1])(
-                c_data, chan_db_info, chan_id, None, None)
-            if ax is None:
-                return
-            c_data['ax_wf'] = ax
-            ax.chan = chan_id
-            self.axes.append(ax)
-        else:
-            for artist in c_data['ax_wf'].lines + c_data['ax_wf'].collections:
-                artist.remove()
-            getattr(self.plotting, plot_functions[plot_type][1])(
-                c_data, chan_db_info, chan_id, c_data['ax_wf'], None)
-
-    def set_lim(self, first_time=False):
-        super().set_lim(first_time, is_waveform=True)
+        ax = getattr(self.plotting, plot_functions[plot_type][1])(
+            c_data, chan_db_info, chan_id, None, None)
+        c_data['ax_wf'] = ax
+        ax.chan = chan_id
+        self.axes.append(ax)
 
 
 class WaveformDialog(QtWidgets.QWidget):
@@ -85,6 +74,10 @@ class WaveformDialog(QtWidgets.QWidget):
         data_type: str - type of data being plotted
         """
         self.data_type = None
+        """
+         date_format: format for date
+        """
+        self.date_format: str = 'YYYY-MM-DD'
         self.setGeometry(50, 10, 1600, 700)
         self.setWindowTitle("Raw Data Plot")
 
@@ -118,12 +111,12 @@ class WaveformDialog(QtWidgets.QWidget):
         self.info_text_browser.setFixedHeight(60)
         bottom_layout.addWidget(self.info_text_browser)
 
-    def set_data(self, data_type, folder_name):
+    def set_data(self, data_type: str, folder_name: str):
         """
         Set data_type and the window's title.
 
-        :param data_type: str - data type of data being plotted
-        :param folder_name: str - name of the folder of the data set to be
+        :param data_type: data type of data being plotted
+        :param folder_name: name of the folder of the data set to be
             displayed
         """
         self.data_type = data_type
diff --git a/sohstationviewer/view/ui/main_ui.py b/sohstationviewer/view/ui/main_ui.py
index 194b23483bc13cbd916c0d77a737d556eee6a313..4c98291a6cc6a45533b847dc17f65cb2f97544ac 100755
--- a/sohstationviewer/view/ui/main_ui.py
+++ b/sohstationviewer/view/ui/main_ui.py
@@ -721,7 +721,6 @@ class UIMainWindow(object):
             lambda: main_window.set_date_format('YYYYMMMDD'))
         self.yyyy_doy_action.triggered.connect(
             lambda: main_window.set_date_format('YYYY:DOY'))
-        self.yyyy_mm_dd_action.trigger()
 
         # Database
         self.add_edit_data_type_action.triggered.connect(
diff --git a/sohstationviewer/view/util/functions.py b/sohstationviewer/view/util/functions.py
index 254f32030c796164cd0399d3e7d938174df27c8d..1a70f2fac011c174cbf8113c06ae8b33aae06f2f 100644
--- a/sohstationviewer/view/util/functions.py
+++ b/sohstationviewer/view/util/functions.py
@@ -254,12 +254,8 @@ def get_total_miny_maxy(
     if new_x.size == 0:
         return 0, None, None
 
-    new_min_x = min(new_x)
-    new_max_x = max(new_x)
-
-    new_min_x_index = np.where(x == new_min_x)[0][0]
-    new_max_x_index = np.where(x == new_max_x)[0][0]
-
+    new_min_x_index = min(new_x_indexes)
+    new_max_x_index = max(new_x_indexes)
     new_y = y[new_min_x_index:new_max_x_index + 1]
     new_min_y = min(new_y)
     new_max_y = max(new_y)
@@ -328,5 +324,49 @@ def get_index_from_time(chan_data: List[np.ndarray], tm: float, val: float) \
     return list_idx, section_idx
 
 
+def remove_not_found_chans(
+        chan_order: List[str], actual_chans: List[str],
+        processing_log: List[Tuple[str, LogType]]) -> List[str]:
+    """
+    Remove channels that are not found in actual_chans from chan_order.
+
+    :param chan_order: list of channels in order that user wants to plot
+    :param actual_chans: The actual channel list
+    :param processing_log: The log list to keep track with not found channels
+    :return: chan_order from which not found channels have been removed.
+    """
+    not_found_chans = [c for c in chan_order if c not in actual_chans]
+    if not_found_chans != []:
+        msg = (f"No data found for the following channels: "
+               f"{', '.join(not_found_chans)}")
+        processing_log.append((msg, LogType.WARNING))
+    return [c for c in chan_order if c not in not_found_chans]
+
+
+def replace_actual_question_chans(
+        chan_order: List[str], actual_chans: List[str]) -> List[str]:
+    """
+    Remove channels end with '?' from chan_order and replace with corresponding
+        channels found in actual channels.
+
+    :param chan_order: The list of channel that have channels end with '?'
+    :param actual_chans: The actual channel list
+    :return: chan_order that have channels end with '?' replaced by actual
+        channels.
+    """
+    question_chans = [c for c in chan_order if c.endswith('?')]
+    for qc in question_chans:
+        actual_question_chans = [c for c in list(actual_chans)
+                                 if qc[:-1] == c[:-1]]
+        if actual_question_chans:
+            question_idx = chan_order.index(qc)
+            chan_order.remove(qc)
+            # replace a question channel with the actual channels that it
+            # represent for
+            chan_order[question_idx:question_idx] = \
+                sorted(actual_question_chans)
+    return chan_order
+
+
 if __name__ == '__main__':
     create_table_of_content_file(Path('../../../documentation'))
diff --git a/tests/model/general_data/test_general_data_helper.py b/tests/model/general_data/test_general_data_helper.py
index c82dc0ce7c5a4106dbb28bab625f570fb17fb326..275405f26f4e29eed66884b2dc3e744f05af2cab 100644
--- a/tests/model/general_data/test_general_data_helper.py
+++ b/tests/model/general_data/test_general_data_helper.py
@@ -1,13 +1,36 @@
 import numpy as np
+from pathlib import Path
 from unittest import TestCase
 from unittest.mock import patch
 
 from sohstationviewer.model.general_data.general_data_helper import (
     _check_related_gaps, squash_gaps, sort_data,
     retrieve_data_time_from_data_dict, retrieve_gaps_from_data_dict,
-    combine_data, apply_convert_factor_to_data_dict
+    combine_data, apply_convert_factor_to_data_dict, read_text
 )
 
+TEST_DATA_DIR = Path(__file__).resolve().parent.parent.parent.joinpath(
+    'test_data')
+text_file = TEST_DATA_DIR.joinpath(
+    "Pegasus-sample/Pegasus_SVC4/logs/2020/XX/KC01/XX.KC01...D.2020.129")
+binary_file = TEST_DATA_DIR.joinpath(
+    "Pegasus-sample/Pegasus_SVC4/soh/2020/XX/KC01/VDT.D/"
+    "XX.KC01..VDT.D.2020.129")
+
+
+class TestReadText(TestCase):
+    def test_text_file(self):
+        ret = read_text(text_file)
+        expected_ret = (
+            "\n\n** STATE OF HEALTH: XX.KC01...D.2020.129"
+            "\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware")
+        self.assertEqual(ret[:100], expected_ret
+                         )
+
+    def test_binary_file(self):
+        ret = read_text(binary_file)
+        self.assertIsNone(ret)
+
 
 class TestCheckRelatedGaps(TestCase):
     # FROM test_handling_data_rearrange_data.TestCheckRelatedGaps
diff --git a/tests/model/mseed_data/test_mseed_helper.py b/tests/model/mseed_data/test_mseed_helper.py
index 938092c629f7115bd2623971a58a7aa5e7b047fe..e695b0edf71f15d78ae6f50072327464430c47d7 100644
--- a/tests/model/mseed_data/test_mseed_helper.py
+++ b/tests/model/mseed_data/test_mseed_helper.py
@@ -1,32 +1,9 @@
 from unittest import TestCase
-from pathlib import Path
 
 from sohstationviewer.model.mseed_data.mseed_helper import (
-    retrieve_nets_from_data_dict, read_text
+    retrieve_nets_from_data_dict
 )
 
-TEST_DATA_DIR = Path(__file__).resolve().parent.parent.parent.joinpath(
-    'test_data')
-text_file = TEST_DATA_DIR.joinpath(
-    "Pegasus-sample/Pegasus_SVC4/logs/2020/XX/KC01/XX.KC01...D.2020.129")
-binary_file = TEST_DATA_DIR.joinpath(
-    "Pegasus-sample/Pegasus_SVC4/soh/2020/XX/KC01/VDT.D/"
-    "XX.KC01..VDT.D.2020.129")
-
-
-class TestReadText(TestCase):
-    def test_text_file(self):
-        ret = read_text(text_file)
-        expected_ret = (
-            "\n\n** STATE OF HEALTH: XX.KC01...D.2020.129"
-            "\n2020-05-08 22:55:45.390 UTC: I(Initializations): Firmware")
-        self.assertEqual(ret[:100], expected_ret
-                         )
-
-    def test_binary_file(self):
-        ret = read_text(binary_file)
-        self.assertIsNone(ret)
-
 
 class TestRetrieveNetsFromDataDict(TestCase):
     def setUp(self):
diff --git a/tests/model/reftek_data/__init__.py b/tests/model/reftek_data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tests/model/reftek_data/test_reftek.py b/tests/model/reftek_data/test_reftek.py
new file mode 100644
index 0000000000000000000000000000000000000000..8bb2a71749d59d30833d4c98014308fff58060df
--- /dev/null
+++ b/tests/model/reftek_data/test_reftek.py
@@ -0,0 +1,150 @@
+from unittest import TestCase
+from pathlib import Path
+
+from sohstationviewer.model.reftek_data.reftek import RT130
+from sohstationviewer.model.general_data.general_data import \
+    ProcessingDataError
+
+
+TEST_DATA_DIR = Path(__file__).resolve().parent.parent.parent.joinpath(
+    'test_data')
+reftek_data = TEST_DATA_DIR.joinpath("RT130-sample")
+reftek_gap_data = TEST_DATA_DIR.joinpath("RT130-gap")
+
+
+class TestReftek(TestCase):
+    def test_path_not_exist(self):
+        # raise exception when path not exist
+        args = {
+            'data_type': 'RT130',
+            'folder': '_',
+            'rt130_waveform_data_req': False,
+            'on_unittest': True
+        }
+        with self.assertRaises(ProcessingDataError) as context:
+            RT130(**args)
+            self.assertEqual(
+                str(context.exception),
+                "Path '_' not exist"
+            )
+
+    def test_read_soh(self):
+        args = {
+            'data_type': 'RT130',
+            'folder': reftek_data,
+            'req_soh_chans': [],
+            'rt130_waveform_data_req': False,
+            'on_unittest': True
+        }
+        expected_soh = [
+            'SOH/Data Def', 'Battery Volt', 'DAS Temp', 'Backup Volt',
+            'Disk Usage1', 'Disk Usage2', 'Dump Called/Comp', 'GPS On/Off/Err',
+            'GPS Lk/Unlk', 'Clk Phase Err']
+        obj = RT130(**args)
+        self.assertEqual(obj.found_data_streams, [9])
+        self.assertEqual(obj.keys, [('92EB', '25')])
+        self.assertEqual(
+            list(obj.stream_header_by_key_chan[('92EB', '25')].keys()),
+            [])
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', ('92EB', '25')])
+        self.assertEqual(len(obj.log_data['TEXT']), 0)
+        self.assertEqual(list(obj.log_data[('92EB', '25')].keys()), ['SOH'])
+        self.assertEqual(len(obj.log_data[('92EB', '25')]['SOH']), 1)
+        self.assertEqual(
+            obj.log_data[('92EB', '25')]['SOH'][0][:100],
+            '\nState of Health  17:150:00:00:00:000   ST: 92EB'
+            '\n150:00:00:00 REF TEK 130'
+            '\r\n150:00:00:00 CPU SOFTWARE')
+        self.assertEqual(list(obj.soh_data.keys()), [('92EB', '25')])
+        self.assertEqual(list(obj.soh_data[('92EB', '25')].keys()),
+                         expected_soh)
+
+    def test_read_waveform(self):
+        args = {
+            'data_type': 'RT130',
+            'folder': reftek_data,
+            'req_soh_chans': [],
+            'req_wf_chans': [1],
+            'rt130_waveform_data_req': True,
+            'on_unittest': True
+        }
+        expected_waveform = ['DS1-1', 'DS1-2', 'DS1-3']
+        obj = RT130(**args)
+        self.assertEqual(obj.found_data_streams, [9, 1, 1])
+        self.assertEqual(obj.keys, [('92EB', '25')])
+        self.assertEqual(
+            list(obj.stream_header_by_key_chan[('92EB', '25')].keys()),
+            expected_waveform)
+
+        self.assertEqual(list(obj.waveform_data[('92EB', '25')].keys()),
+                         expected_waveform)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', ('92EB', '25')])
+        self.assertIn('Event DS1',
+                      list(obj.soh_data[('92EB', '25')].keys()))
+
+    def test_read_mass_pos(self):
+        args = {
+            'data_type': 'RT130',
+            'folder': reftek_data,
+            'req_soh_chans': ['_'],
+            'include_mp123zne': True,
+            'rt130_waveform_data_req': False,
+            'on_unittest': True
+        }
+        expected_mass_pos = ['MassPos1', 'MassPos2', 'MassPos3']
+        obj = RT130(**args)
+        self.assertEqual(obj.found_data_streams, [9])
+        self.assertEqual(obj.keys, [('92EB', '25')])
+        self.assertEqual(
+            list(obj.stream_header_by_key_chan[('92EB', '25')].keys()),
+            expected_mass_pos)
+        self.assertEqual(list(obj.mass_pos_data[('92EB', '25')].keys()),
+                         expected_mass_pos)
+        self.assertEqual(list(obj.log_data.keys()), ['TEXT', ('92EB', '25')])
+
+    def test_gap(self):
+        expected_waveform = ['DS2-1', 'DS2-2', 'DS2-3']
+        with self.subTest("no gap_minimum set"):
+            args = {
+                'data_type': 'RT130',
+                'folder': reftek_gap_data,
+                'req_soh_chans': [],
+                'req_wf_chans': ['*'],
+                'rt130_waveform_data_req': True,
+                'on_unittest': True
+            }
+            obj = RT130(**args)
+            self.assertEqual(obj.found_data_streams, [2, 2])
+            self.assertEqual(obj.keys, [('98AD', '0')])
+            self.assertEqual(
+                list(obj.stream_header_by_key_chan[('98AD', '0')].keys()),
+                expected_waveform)
+
+            self.assertEqual(list(obj.waveform_data[('98AD', '0')].keys()),
+                             expected_waveform)
+            self.assertEqual(list(obj.log_data.keys()),
+                             ['TEXT', ('98AD', '0')])
+            self.assertEqual(obj.gaps[('98AD', '0')], [])
+
+        with self.subTest("has gap_minimum set"):
+            args = {
+                'data_type': 'RT130',
+                'folder': reftek_gap_data,
+                'req_soh_chans': [],
+                'req_wf_chans': ['*'],
+                'rt130_waveform_data_req': True,
+                'gap_minimum': 60,
+                'on_unittest': True
+            }
+            obj = RT130(**args)
+            self.assertEqual(obj.found_data_streams, [2, 2])
+            self.assertEqual(obj.keys, [('98AD', '0')])
+            self.assertEqual(
+                list(obj.stream_header_by_key_chan[('98AD', '0')].keys()),
+                expected_waveform)
+            self.assertEqual(list(obj.waveform_data[('98AD', '0')].keys()),
+                             expected_waveform)
+            self.assertEqual(list(obj.log_data.keys()),
+                             ['TEXT', ('98AD', '0')])
+            self.assertEqual(obj.gaps[('98AD', '0')],
+                             [[1648493999.64, 1648508400.64]])
diff --git a/tests/test_data/RT130-gap/2022087/98AD/2/185104640_00082DC0 b/tests/test_data/RT130-gap/2022087/98AD/2/185104640_00082DC0
new file mode 100755
index 0000000000000000000000000000000000000000..c0373322def9812bb8854988b5bd83b643fee4ba
Binary files /dev/null and b/tests/test_data/RT130-gap/2022087/98AD/2/185104640_00082DC0 differ
diff --git a/tests/test_data/RT130-gap/2022087/98AD/2/230000640_0036EE80 b/tests/test_data/RT130-gap/2022087/98AD/2/230000640_0036EE80
new file mode 100755
index 0000000000000000000000000000000000000000..13d6d171ca1563428281e25b3ddd01c32e803645
Binary files /dev/null and b/tests/test_data/RT130-gap/2022087/98AD/2/230000640_0036EE80 differ
diff --git a/tests/test_model/test_reftek/test_core.py b/tests/test_model/test_reftek/test_core.py
new file mode 100644
index 0000000000000000000000000000000000000000..c485d1fb04bc005525288fc00c32feb7dd24b402
--- /dev/null
+++ b/tests/test_model/test_reftek/test_core.py
@@ -0,0 +1,102 @@
+import os
+import unittest
+from pathlib import Path
+
+import numpy
+import obspy.core
+from numpy.testing import assert_array_equal
+
+from sohstationviewer.model.reftek_data.reftek_reader.core import (
+    DiscontinuousTrace,
+    Reftek130,
+)
+from sohstationviewer.model.reftek_data.reftek_reader.header import \
+    NotRT130FileError
+
+
+class TestDiscontinuousTrace(unittest.TestCase):
+    def setUp(self) -> None:
+        data = numpy.arange(1024)
+        stub_stats = obspy.core.Stats()
+        times = numpy.arange(1024)
+        self.trace = DiscontinuousTrace(data, stub_stats, times=times)
+
+    def test_times_argument_is_stored(self):
+        self.assertTrue(hasattr(self.trace, '_times'))
+
+    def test_times_utcdatetime(self):
+        with self.assertRaises(NotImplementedError):
+            self.trace.times('utcdatetime')
+
+    def test_times_matplotlib(self):
+        with self.assertRaises(NotImplementedError):
+            self.trace.times('matplotlib')
+
+    def test_times_relative(self):
+        with self.subTest('test_relative_to_start_time'):
+            # The default start time of a trace is 0 anyhow, but we write that
+            # down explicitly for clarity.
+            self.trace.stats.starttime = obspy.core.UTCDateTime(0)
+            expected = numpy.arange(1024)
+            assert_array_equal(self.trace.times('relative'), expected)
+
+        with self.subTest('test_relative_to_given_reftime'):
+            reftime = obspy.core.UTCDateTime(0)
+            expected = numpy.arange(1024)
+            assert_array_equal(self.trace.times('relative', reftime),
+                               expected)
+
+            reftime = obspy.core.UTCDateTime(1024)
+            expected = numpy.arange(-1024, 0)
+            assert_array_equal(self.trace.times('relative', reftime),
+                               expected)
+
+            reftime = obspy.core.UTCDateTime(-1024)
+            expected = numpy.arange(1024, 2048)
+            assert_array_equal(self.trace.times('relative', reftime),
+                               expected)
+
+    def test_times_timestamp(self):
+        expected = numpy.arange(1024)
+        assert_array_equal(self.trace.times('timestamp'), expected)
+
+
+class TestReftek130FromFile(unittest.TestCase):
+    def setUp(self) -> None:
+        self.TEST_DATA_DIR = Path(os.getcwd()).joinpath('tests/test_data')
+        self.rt130_dir = self.TEST_DATA_DIR.joinpath(
+            'RT130-sample/2017149.92EB/2017150/92EB'
+        )
+
+    def test_rt130_file(self):
+        file = self.rt130_dir.joinpath('0/000000000_00000000')
+        rt130 = Reftek130.from_file(file)
+        self.assertIsInstance(rt130, Reftek130)
+
+    def test_rt130_soh_file(self):
+        file = self.rt130_dir.joinpath('0/000000000_00000000')
+        rt130 = Reftek130.from_file(file)
+        # The most common SOH packet type looks to be SH, so we use that as
+        # the default.
+        self.assertIn(b'SH', rt130._data['packet_type'])
+
+    def test_rt130_raw_data_file(self):
+        file = self.rt130_dir.joinpath('1/000000015_0036EE80')
+        rt130 = Reftek130.from_file(file)
+        assert_array_equal(
+            numpy.unique(numpy.sort(rt130._data['packet_type'])),
+            numpy.sort([b'EH', b'DT', b'ET'])
+        )
+
+    def test_non_rt130_file(self):
+        with self.subTest('test_file_exist'):
+            test_file = self.TEST_DATA_DIR.joinpath(
+                'Q330-sample/day_vols_AX08/AX08.XA..HHE.2021.186'
+            )
+            with self.assertRaises(NotRT130FileError):
+                Reftek130.from_file(test_file)
+
+        with self.subTest('test_file_does_not_exist'):
+            test_file = ''
+            with self.assertRaises(FileNotFoundError):
+                Reftek130.from_file(test_file)
diff --git a/tests/test_model/test_reftek/test_header.py b/tests/test_model/test_reftek/test_header.py
new file mode 100644
index 0000000000000000000000000000000000000000..24678a26a87f2ae74a295007fc36620d1b5a670d
--- /dev/null
+++ b/tests/test_model/test_reftek/test_header.py
@@ -0,0 +1,72 @@
+import unittest
+
+from sohstationviewer.model.reftek_data.reftek_reader.header import (
+    parse_rt130_time,
+    get_rt130_packet_header, NotRT130FileError,
+)
+
+
+class TestParseRT130Time(unittest.TestCase):
+    def test_time_bytes_parsed_correctly(self):
+        time_bytes = b'\x36\x01\x15\x13\x51\x35'
+        year = 15
+        result = parse_rt130_time(year, time_bytes)
+        self.assertEqual(result.julday, 360)
+        self.assertEqual(result.day, 26)
+        self.assertEqual(result.month, 12)
+        self.assertEqual(result.hour, 11)
+        self.assertEqual(result.minute, 51)
+        self.assertEqual(result.second, 35)
+        self.assertEqual(result.microsecond, 135000)
+        self.assertEqual(result.ns, 1451130695135000000)
+
+    def test_year_1900s(self):
+        time_bytes = b'\x36\x01\x15\x13\x51\x35'
+        year = 71
+        result = parse_rt130_time(year, time_bytes)
+        self.assertEqual(result.year, 1971)
+
+    def test_year_2000s(self):
+        time_bytes = b'\x36\x01\x15\x13\x51\x35'
+        year = 12
+        result = parse_rt130_time(year, time_bytes)
+        self.assertEqual(result.year, 2012)
+
+    def test_year_threshold(self):
+        with self.subTest('test_year_is_49'):
+            time_bytes = b'\x36\x01\x15\x13\x51\x35'
+            year = 49
+            result = parse_rt130_time(year, time_bytes)
+            self.assertEqual(result.year, 2049)
+        with self.subTest('test_year_is_50'):
+            time_bytes = b'\x36\x01\x15\x13\x51\x35'
+            year = 50
+            result = parse_rt130_time(year, time_bytes)
+            self.assertEqual(result.year, 1950)
+
+
+class TestGetRT130PacketHeader(unittest.TestCase):
+    def test_header_extracted_correctly(self):
+        header = b'DT\x12\x15\x98\xe1\x36\x01\x15\x13\x51\x35\x05\x12\x01\x11'
+        packet = header + b' ' * 1008
+        result = get_rt130_packet_header(packet)
+        self.assertEqual(result.packet_type, 'DT')
+        self.assertEqual(result.experiment_number, 12)
+        self.assertEqual(result.unit_id, '98E1')
+        self.assertEqual(result.time.ns, 1451130695135000000)
+        self.assertEqual(result.byte_count, 512)
+        self.assertEqual(result.packet_sequence, 111)
+
+    def test_packet_type_cannot_be_parsed(self):
+        packet_type = b'\x01\x02'
+        header = packet_type + b'\x11' * 14
+        packet = header + b' ' * 1008
+        with self.assertRaises(NotRT130FileError):
+            get_rt130_packet_header(packet)
+
+    def test_packet_type_is_not_valid(self):
+        packet_type = b'AB'
+        header = packet_type + b'\x11' * 14
+        packet = header + b' ' * 1008
+        with self.assertRaises(NotRT130FileError):
+            get_rt130_packet_header(packet)
diff --git a/tests/test_model/test_reftek/test_packet_readers.py b/tests/test_model/test_reftek/test_packet_readers.py
new file mode 100644
index 0000000000000000000000000000000000000000..6494b416bc37b5af2d82cbaf50c3b3a750abd23b
--- /dev/null
+++ b/tests/test_model/test_reftek/test_packet_readers.py
@@ -0,0 +1,184 @@
+import unittest
+from unittest.mock import patch
+
+from sohstationviewer.model.mseed_data.record_reader_helper import Unpacker
+from sohstationviewer.model.reftek_data.reftek_reader.packet import \
+    eh_et_payload_end_in_packet
+from sohstationviewer.model.reftek_data.reftek_reader.packet_readers import (
+    decode_uncompressed, decode_compressed, read_dt_packet, read_eh_et_packet,
+    read_soh_packet,
+)
+from sohstationviewer.model.reftek_data.reftek_reader.packets import \
+    SOHExtendedHeader
+
+unpacker = Unpacker('>')
+
+
+class TestDecodeFunctions(unittest.TestCase):
+    def setUp(self) -> None:
+        self.header = b' ' * 24
+
+    def test_decode_uncompressed_format_16(self):
+        data_format = '16'
+        with self.subTest('test_positive_number'):
+            first_data_point_byte = b'\x06\x19'
+            data_filler = b' ' * 998
+            packet = self.header + first_data_point_byte + data_filler
+            actual = decode_uncompressed(packet, data_format, unpacker)
+            expected = 1561
+            self.assertEqual(actual, expected)
+        with self.subTest('test_negative_number'):
+            first_data_point_byte = b'\xf9\xe4'
+            data_filler = b' ' * 998
+            packet = self.header + first_data_point_byte + data_filler
+            actual = decode_uncompressed(packet, data_format, unpacker)
+            expected = -1564
+            self.assertEqual(actual, expected)
+
+    def test_decode_uncompressed_format_32(self):
+        data_format = '32'
+        with self.subTest('test_positive_number'):
+            first_data_point_byte = b'\x03\xc5\x4e\x9a'
+            data_filler = b' ' * 996
+            packet = self.header + first_data_point_byte + data_filler
+            actual = decode_uncompressed(packet, data_format, unpacker)
+            expected = 63262362
+            self.assertEqual(actual, expected)
+        with self.subTest('test_negative_number'):
+            first_data_point_byte = b'\xf6\xac\xba\x00'
+            data_filler = b' ' * 996
+            packet = self.header + first_data_point_byte + data_filler
+            actual = decode_uncompressed(packet, data_format, unpacker)
+            expected = -156452352
+            self.assertEqual(actual, expected)
+
+    def test_decode_uncompressed_format_33(self):
+        data_format = '33'
+        with self.subTest('test_positive_number'):
+            first_data_point_byte = b'\x03\xc5\x4e\x9a'
+            data_filler = b' ' * 996
+            packet = self.header + first_data_point_byte + data_filler
+            actual = decode_uncompressed(packet, data_format, unpacker)
+            expected = 63262362
+            self.assertEqual(actual, expected)
+        with self.subTest('test_negative_number'):
+            first_data_point_byte = b'\xf6\xac\xba\x00'
+            data_filler = b' ' * 996
+            packet = self.header + first_data_point_byte + data_filler
+            actual = decode_uncompressed(packet, data_format, unpacker)
+            expected = -156452352
+            self.assertEqual(actual, expected)
+
+    def test_decode_compressed(self):
+        data_format = 'C0'
+        filler = b' ' * 40
+        first_frame_code = b'\x00\x11\x11\x11'
+        start_data_point_byte = b'0000'
+        header = self.header + filler
+        bytes_before_data = header + first_frame_code + start_data_point_byte
+        with self.subTest('test_positive_number'):
+            end_point_byte = b'\x03\xc5\x4e\x9a'
+            data_filler = b' ' * 952
+            packet = bytes_before_data + end_point_byte + data_filler
+            actual = decode_compressed(packet, data_format, unpacker)
+            expected = 63262362
+            self.assertEqual(actual, expected)
+        with self.subTest('test_negative_number'):
+            end_point_byte = b'\xf6\xac\xba\x00'
+            data_filler = b' ' * 952
+            packet = bytes_before_data + end_point_byte + data_filler
+            actual = decode_compressed(packet, data_format, unpacker)
+            expected = -156452352
+            self.assertEqual(actual, expected)
+
+
+class TestReadDTPacket(unittest.TestCase):
+    def setUp(self) -> None:
+        self.header = b' ' * 16
+        # We only test if the correct method is used to extract the data point,
+        # so the data can be anything we want.
+        self.data = b' ' * 1000
+
+        uncompressed_patcher = patch(
+            'sohstationviewer.model.reftek_data.reftek_reader.packet_readers.'
+            'decode_uncompressed'
+        )
+        compressed_patcher = patch(
+            'sohstationviewer.model.reftek_data.reftek_reader.packet_readers.'
+            'decode_compressed'
+        )
+        self.mock_uncompressed = uncompressed_patcher.start()
+        self.mock_compressed = compressed_patcher.start()
+        self.addCleanup(uncompressed_patcher.stop)
+        self.addCleanup(compressed_patcher.stop)
+
+    def test_extended_header_is_extracted_correctly(self):
+        extended_header_bytes = b'\x01\x11\x01\x02\x05\x00\x00\xc0'
+        packet = self.header + extended_header_bytes + self.data
+        extended_header, _ = read_dt_packet(packet, unpacker)
+        self.assertEqual(extended_header.event_number, 111)
+        self.assertEqual(extended_header.data_stream_number, 1)
+        self.assertEqual(extended_header.channel_number, 2)
+        self.assertEqual(extended_header.number_of_samples, 500)
+        self.assertEqual(extended_header.flags, 0)
+        self.assertEqual(extended_header.data_format, 'C0')
+
+    def test_data_point_extracted_with_correct_method(self):
+        with self.subTest('test_uncompressed_packet'):
+            extended_header_bytes = b'\x01\x11\x01\x02\x05\x00\x00\x16'
+            packet = self.header + extended_header_bytes + self.data
+            read_dt_packet(packet, unpacker)
+            self.assertTrue(self.mock_uncompressed.called)
+            self.assertFalse(self.mock_compressed.called)
+
+        self.mock_uncompressed.reset_mock()
+        self.mock_compressed.reset_mock()
+
+        with self.subTest('test_compressed_packet'):
+            extended_header_bytes = b'\x01\x11\x01\x02\x05\x00\x00\xc0'
+            packet = self.header + extended_header_bytes + self.data
+            read_dt_packet(packet, unpacker)
+            self.assertTrue(self.mock_compressed.called)
+            self.assertFalse(self.mock_uncompressed.called)
+
+
+class TestReadEHETPacket(unittest.TestCase):
+    def setUp(self) -> None:
+        header = b' ' * 16
+        extended_header_bytes = b'\x01\x11\x01\x00\x00\x00\x00\xc0'
+        # We only care about the length of the payload (the content is dealt
+        # with somewhere else), and so it can contain dummy data.
+        payload = b' ' * 1000
+        self.packet = header + extended_header_bytes + payload
+
+    def test_extended_header_is_extracted_correctly(self):
+        extended_header, _ = read_eh_et_packet(self.packet, unpacker)
+        self.assertEqual(extended_header.event_number, 111)
+        self.assertEqual(extended_header.data_stream_number, 1)
+        self.assertEqual(extended_header.channel_number, 0)
+        self.assertEqual(extended_header.number_of_samples, 0)
+        self.assertEqual(extended_header.flags, 0)
+        self.assertEqual(extended_header.data_format, 'C0')
+
+    def test_payload_extracted_correctly(self):
+        _, payload = read_eh_et_packet(self.packet, unpacker)
+        self.assertEqual(len(payload), eh_et_payload_end_in_packet - 24)
+
+
+class TestReadSOHPacket(unittest.TestCase):
+    """
+    Test suite for packet_readers.read_soh_packet. We only test that the
+    function has the correct interface, seeing as the intended purpose of this
+    method is to be compatible with packet_readers.read_dt_packet and
+    packet_readers.read_eh_et_packet interface-wise.
+    """
+    def test_correct_interface(self):
+        packet = b' ' * 1024
+        extended_header, payload = read_soh_packet(packet, unpacker)
+        self.assertIsInstance(extended_header, SOHExtendedHeader)
+        self.assertIsInstance(payload, bytes)
+
+    def test_payload_has_correct_length(self):
+        packet = b' ' * 1024
+        extended_header, payload = read_soh_packet(packet, unpacker)
+        self.assertEqual(len(payload), 1000)
diff --git a/tests/test_model/test_reftek/test_reftek_helper.py b/tests/test_model/test_reftek/test_reftek_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..7fe0e02e523f935ac98a63b89bfcd5454960cfe6
--- /dev/null
+++ b/tests/test_model/test_reftek/test_reftek_helper.py
@@ -0,0 +1,107 @@
+import os
+import unittest
+from pathlib import Path
+from unittest.mock import patch
+
+from obspy.io.reftek.packet import PACKET_FINAL_DTYPE
+
+from sohstationviewer.model.mseed_data.record_reader_helper import Unpacker
+from sohstationviewer.model.reftek_data.reftek_reader.header import \
+    NotRT130FileError
+from sohstationviewer.model.reftek_data.reftek_reader.packet_readers import (
+    read_eh_et_packet, read_dt_packet, read_soh_packet,
+)
+from sohstationviewer.model.reftek_data.reftek_reader.packets import (
+    SOHPacket,
+    EHETPacket, DTPacket,
+)
+from sohstationviewer.model.reftek_data.reftek_reader.reftek_reader_helper \
+    import (read_rt130_file, convert_packet_to_obspy_format)
+
+unpacker = Unpacker('>')
+
+
+class TestReadRT130File(unittest.TestCase):
+    def setUp(self) -> None:
+        self.TEST_DATA_DIR = Path(os.getcwd()).joinpath('tests/test_data')
+        self.rt130_dir = self.TEST_DATA_DIR.joinpath(
+            'RT130-sample/2017149.92EB/2017150/92EB'
+        )
+
+        eh_et_patcher = patch(
+            'sohstationviewer.model.reftek_data.reftek_reader.'
+            'reftek_reader_helper.'
+            'read_eh_et_packet',
+            wraps=read_eh_et_packet
+        )
+        self.mock_read_eh_et = eh_et_patcher.start()
+        self.addCleanup(eh_et_patcher.stop)
+
+        dt_patcher = patch(
+            'sohstationviewer.model.reftek_data.reftek_reader.'
+            'reftek_reader_helper.'
+            'read_dt_packet',
+            wraps=read_dt_packet
+        )
+        self.mock_read_dt = dt_patcher.start()
+        self.addCleanup(dt_patcher.stop)
+
+        soh_patcher = patch(
+            'sohstationviewer.model.reftek_data.reftek_reader.'
+            'reftek_reader_helper.'
+            'read_soh_packet',
+            wraps=read_soh_packet
+        )
+        self.mock_read_soh = soh_patcher.start()
+        self.addCleanup(soh_patcher.stop)
+
+    def test_rt130_soh_file(self):
+        file = self.rt130_dir.joinpath('0/000000000_00000000')
+        packets = read_rt130_file(file, unpacker)
+        self.assertTrue(
+            all(isinstance(packet, SOHPacket) for packet in packets)
+        )
+        self.assertTrue(self.mock_read_soh.called)
+        self.assertFalse(self.mock_read_dt.called)
+        self.assertFalse(self.mock_read_eh_et.called)
+
+    def test_rt130_raw_data_file(self):
+        file = self.rt130_dir.joinpath('1/000000015_0036EE80')
+        packets = read_rt130_file(file, unpacker)
+        self.assertTrue(all(
+            isinstance(packet, EHETPacket) or isinstance(packet, DTPacket)
+            for packet in packets)
+        )
+        self.assertFalse(self.mock_read_soh.called)
+        self.assertTrue(self.mock_read_dt.called)
+        self.assertTrue(self.mock_read_eh_et.called)
+
+    def test_non_rt130_file(self):
+        with self.subTest('test_file_exist'):
+            file = self.TEST_DATA_DIR.joinpath(
+                'Q330-sample/day_vols_AX08/AX08.XA..HHE.2021.186'
+            )
+            with self.assertRaises(NotRT130FileError):
+                read_rt130_file(file, unpacker)
+
+        with self.subTest('test_file_does_not_exist'):
+            file = ''
+            with self.assertRaises(FileNotFoundError):
+                read_rt130_file(file, unpacker)
+
+
+class TestConvertPacketToObspyFormat(unittest.TestCase):
+    def setUp(self) -> None:
+        TEST_DATA_DIR = Path(os.getcwd()).joinpath('tests/test_data')
+        rt130_dir = TEST_DATA_DIR.joinpath(
+            'RT130-sample/2017149.92EB/2017150/92EB'
+        )
+        file = rt130_dir.joinpath('1/000000015_0036EE80')
+        self.packet = read_rt130_file(file, unpacker)[0]
+
+    def test_all_needed_fields_are_available(self):
+        converted_packet = convert_packet_to_obspy_format(
+            self.packet, unpacker
+        )
+
+        self.assertEqual(len(converted_packet), len(PACKET_FINAL_DTYPE))
diff --git a/tests/test_view/test_util_functions.py b/tests/test_view/test_util_functions.py
index bc59a41236a024290fa3540f5cf9a7106229de2e..8ad6187487f9eb673f656267f49908fc7ddfced9 100644
--- a/tests/test_view/test_util_functions.py
+++ b/tests/test_view/test_util_functions.py
@@ -9,7 +9,8 @@ from sohstationviewer.view.util.functions import (
     get_soh_messages_for_view, log_str, is_doc_file,
     create_search_results_file, create_table_of_content_file,
     check_chan_wildcards_format, check_masspos, get_total_miny_maxy,
-    extract_netcodes, get_index_from_time
+    extract_netcodes, get_index_from_time, remove_not_found_chans,
+    replace_actual_question_chans
 )
 
 from sohstationviewer.view.util.enums import LogType
@@ -501,3 +502,36 @@ class TestGetIndexFromTime(TestCase):
                 self.plotting_data['CH2'], 3, 4)
             self.assertEqual(list_idx, 1)
             self.assertEqual(section_idx, 0)
+
+
+class RemoveNotFoundChansClass(TestCase):
+    def test_remove_not_found_chans(self):
+        chan_order = ['A', 'B', 'C', 'D']
+        actual_chans = ['C', 'D', 'E', 'F']
+        processing_log = []
+        expected_new_chan_order = ['C', 'D']
+        expected_processing_log = [
+            ("No data found for the following channels: A, B",
+             LogType.WARNING)]
+
+        ret = remove_not_found_chans(chan_order, actual_chans, processing_log)
+        self.assertListEqual(ret, expected_new_chan_order)
+        self.assertEqual(processing_log, expected_processing_log)
+
+
+class ReplaceActualQuestChans(TestCase):
+    def test_question_chans_in_actual_chans(self):
+        chan_order = ['A', 'B', 'C?', 'D']
+        actual_chans = ['C1', 'C3', 'C2', 'D', 'E', 'F']
+        expected_new_chan_order = ['A', 'B', 'C1', 'C2', 'C3', 'D']
+
+        ret = replace_actual_question_chans(chan_order, actual_chans)
+        self.assertListEqual(ret, expected_new_chan_order)
+
+    def test_question_chans_not_in_actual_chans(self):
+        chan_order = ['A?', 'B', 'C', 'D']
+        actual_chans = ['C', 'D', 'E', 'F']
+        expected_new_chan_order = ['A?', 'B', 'C', 'D']
+
+        ret = replace_actual_question_chans(chan_order, actual_chans)
+        self.assertListEqual(ret, expected_new_chan_order)