Newer
Older
fmtorderstr = self.setEndian(endianess)
fmtstr = fmtorderstr + "HHHHBBBBHBBIf3sBI12s12s8s"
(blkette, nextblk, Year, Day, Hour, Min, Sec, junk, Micro,
res, flags, dura, ptop, chan, res2, refamp, coup, rolloff,
noise) = inblk
flags = self.FlagStrtoInt(flags)
pack_blk = self.spack(fmtstr, blkette, nextblk, Year, Day, Hour,
Min, Sec, junk, Micro, res, flags, dura,
ptop, chan, res2, refamp, coup, rolloff,
noise)
self.infilewrite(pack_blk)
return 64
print("error writing blockette 320")
return 0
#########################################################
def blk390(self, seekval=0):
"""
Generic Calibraton Blockette 390 (28 bytes)
Returns tuple
blk
Blockette type (UWORD, 2)
Next blockette's byte offset relative to fixed section of header
(UWORD, 2)
Beginning of calibration time (BTime expanded, 10)
Reserved (UBYTE, 1)
Calibrations flags (UBYTE, 1)
Calibration duration (ULONG, 4)
Calibration signal amplitude (FLOAT, 4)
Channel with calibration input (CHAR*3)
Reserved (UBYTE, 1)
"""
self.infileseek(seekval)
fmtstr = self.fmt_order + "HH"
blk1 = self.sunpack(fmtstr, self.infileread(4))
fmtstr = self.fmt_order + "HHBBBBH"
timeblk = self.sunpack(fmtstr, self.infileread(10))
fmtstr = self.fmt_order + "BBIf3sB"
blk2 = self.sunpack(fmtstr, self.infileread(14))
# incorporate timeblk tuple into blk list
tmpblk = list(blk1)
tmpblk.append(timeblk)
tmpblk = tmpblk + list(blk2)
# apply bit mask to Ubyte
blk = self.UbytetoStr(tmpblk, 4)
return blk
#########################################################
def Writeblk390(self, inblk, seekval=0, endianess=""):
"""
writes Generic Calibraton Blockette 390 (28 bytes)
requires tuples inblk
inblk=(blkette, nextblk, Year, Day, Hour, Min, Sec, junk, Micro,
res, flags, dura,amp,chan, res2)
"""
try:
self.infileseek(seekval)
fmtorderstr = self.setEndian(endianess)
fmtstr = fmtorderstr + "HHHHBBBBHBBIf3sB"
(blkette, nextblk, Year, Day, Hour, Min, Sec, junk, Micro, res,
flags, dura, amp, chan, res2) = inblk
flags = self.FlagStrtoInt(flags)
pack_blk = self.spack(fmtstr, blkette, nextblk, Year, Day, Hour,
Min, Sec, junk, Micro, res, flags, dura,
amp, chan, res2)
self.infilewrite(pack_blk)
return 28
print("error writing blockette 390")
return 0
#########################################################
def blk395(self, seekval=0):
"""
Calibraton Abort Blockette 395 (16 bytes)
Returns tuple
blk
Blockette type (UWORD, 2)
Next blockette's byte offset relative to fixed section of header
(UWORD, 2)
End of calibration time (BTime expanded, 10)
Reserved (UBYTE, 2)
"""
self.infileseek(seekval)
fmtstr = self.fmt_order + "HHHHBBBBH2s"
blk = self.sunpack(fmtstr, self.infileread(16))
#########################################################
def Writeblk395(self, inblk, seekval=0, endianess=""):
"""
writes Calibraton Abort Blockette 395 (16 bytes)
requires tuples inblk
inblk=(blkette, nextblk, Year, Day, Hour, Min, Sec, junk, Micro, res)
"""
try:
self.infileseek(seekval)
fmtorderstr = self.setEndian(endianess)
fmtstr = fmtorderstr + "HHHHBBBBH2s"
(blkette, nextblk, Year, Day, Hour, Min, Sec, junk, Micro,
res) = inblk
pack_blk = self.spack(fmtstr, blkette, nextblk, Year, Day, Hour,
Min, Sec, junk, Micro, res)
self.infilewrite(pack_blk)
return 16
print("error writing blockette 395")
return 0
#########################################################
def blk400(self, seekval=0):
"""
Beam Blockette 400 (16 bytes)
Returns tuple
blk
Blockette type (UWORD, 2)
Next blockette's byte offset relative to fixed section of header
(UWORD, 2)
Beam azimuth (degrees) (FLOAT, 4)
Beam slowness (sec/degree) (FLOAT, 4)
Beam configuration (UWORD, 2)
Reserved (UBYTE, 2)
"""
self.infileseek(seekval)
fmtstr = self.fmt_order + "HHffH2B"
blk = self.sunpack(fmtstr, self.infileread(16))
#########################################################
def Writeblk400(self, inblk, seekval=0, endianess=""):
"""
writes Beam Blockette 400 (16 bytes)
requires tuples inblk
inblk=(blkette, nextblk, baz,bslw,bconf,res0,res1)
"""
try:
self.infileseek(seekval)
fmtorderstr = self.setEndian(endianess)
fmtstr = fmtorderstr + "HHffH2B"
(blkette, nextblk, baz, bslw, bconf, res0, res1) = inblk
pack_blk = self.spack(fmtstr, blkette, nextblk, baz, bslw, bconf,
res0, res1)
self.infilewrite(pack_blk)
return 16
print("error writing blockette 400")
return 0
#########################################################
def blk405(self, seekval=0):
"""
Beam Delay Blockette 405 (6 bytes)
Returns tuple
blk
Blockette type (UWORD, 2)
Next blockette's byte offset relative to fixed section of header
(UWORD, 2)
Array of delay values (UWORD, 2)
"""
self.infileseek(seekval)
fmtstr = self.fmt_order + "HHH"
blk = self.sunpack(fmtstr, self.infileread(6))
#########################################################
def Writeblk405(self, inblk, seekval=0, endianess=""):
"""
writes Beam Delay Blockette 405 (6 bytes)
requires tuples inblk
inblk=(blkette, nextblk, delay)
"""
try:
self.infileseek(seekval)
fmtorderstr = self.setEndian(endianess)
fmtstr = fmtorderstr + "HHH"
(blkette, nextblk, delay) = inblk
pack_blk = self.spack(fmtstr, blkette, nextblk, delay)
self.infilewrite(pack_blk)
return 6
print("error writing blockette 405")
return 0
#########################################################
def blk500(self, seekval=0):
"""
Timing Blockette 500 (200 bytes)
Returns tuple
blk
Blockette type (UWORD, 2)
Next blockette's byte offset relative to fixed section of header
(UWORD, 2)
VCO correction (FLOAT, 4)
Time of exception (BTime expanded, 10)
microsec (UBYTE, 1)
Reception quality (UBYTE, 1)
Exception count (ULONG, 4)
Exception type (CHAR*16)
Clock model (CHAR*32)
Clock status (CHAR*128)
"""
self.infileseek(seekval)
fmtstr = self.fmt_order + "HHf"
blk1 = self.sunpack(fmtstr, self.infileread(8))
fmtstr = self.fmt_order + "HHBBBBH"
timeblk = self.sunpack(fmtstr, self.infileread(10))
fmtstr = self.fmt_order + "BBI16s32s128s"
blk2 = self.sunpack(fmtstr, self.infileread(182))
# incorporate timeblk tuple into blk list
blk = list(blk1)
blk.append(timeblk)
blk = blk + list(blk2)
return blk
#########################################################
def Writeblk500(self, inblk, seekval=0, endianess=""):
"""
writes Timing Blockette 500 (200 bytes)
requires tuples inblk
inblk=(blkette, nextblk, vcocorr,Year, Day, Hour, Min, Sec, junk,
Micro, Micro2, qual,cnt,type,mod,stat)
"""
try:
self.infileseek(seekval)
fmtorderstr = self.setEndian(endianess)
fmtstr = fmtorderstr + "HHfHHBBBBHBBI16s32s128s"
(blkette, nextblk, vcocorr, Year, Day, Hour, Min, Sec, junk,
Micro, Micro2, qual, cnt, type, mod, stat) = inblk
pack_blk = self.spack(fmtstr, blkette, nextblk, vcocorr, Year, Day,
Hour, Min, Sec, junk, Micro, Micro2, qual,
cnt, type, mod, stat)
self.infilewrite(pack_blk)
return 200
print("error writing blockette 500")
return 0
#########################################################
def blk1000(self, seekval=0):
"""
Data Only SEED Blockette 1000 (8 bytes)
Returns tuple
blk
Blockette type (UWORD, 2)
Next blockette's byte offset relative to fixed section of header
(UWORD, 2)
Encoding Format (BYTE, 1)
Word Order (UBYTE, 1)
Data Record Length (UBYTE, 1)
Reserved (UBYTE, 1)
"""
self.infileseek(seekval)
fmtstr = self.fmt_order + "HHbBBB"
blk = self.sunpack(fmtstr, self.infileread(8))
#########################################################
def Writeblk1000(self, inblk, seekval=0, endianess=""):
"""
writes Data Only SEED Blockette 1000 (8 bytes)
requires tuples inblk
inblk=(blkette, nextblk, fmt,order,length,res)
"""
try:
self.infileseek(seekval)
fmtorderstr = self.setEndian(endianess)
fmtstr = fmtorderstr + "HHbBBB"
(blkette, nextblk, fmt, order, length, res) = inblk
pack_blk = self.spack(fmtstr, blkette, nextblk, fmt, order,
length, res)
self.infilewrite(pack_blk)
return 8
print("error writing blockette 1000")
return 0
#########################################################
def blk1001(self, seekval=0):
"""
Data Extension Blockette 1001 (8 bytes)
Returns tuple
blk
Blockette type (UWORD, 2)
Next blockette's byte offset relative to fixed section of header
(UWORD, 2)
Timing Quality (UBYTE, 1)
microsec (UBYTE, 1)
Reserved (UBYTE, 1)
Frame count (UBYTE, 1)
"""
self.infileseek(seekval)
fmtstr = self.fmt_order + "HHBBBB"
blk = self.sunpack(fmtstr, self.infileread(8))
#########################################################
def Writeblk1001(self, inblk, seekval=0, endianess=""):
"""
writes Data Extension Blockette 1001 (8 bytes)
requires tuples inblk
inblk=(blkette, nextblk, tqual,micro,res,fcnt)
"""
try:
self.infileseek(seekval)
fmtorderstr = self.setEndian(endianess)
fmtstr = fmtorderstr + "HHBBBB"
(blkette, nextblk, tqual, micro, res, fcnt) = inblk
pack_blk = self.spack(fmtstr, blkette, nextblk,
tqual, micro, res, fcnt)
self.infilewrite(pack_blk)
return 8
print("error writing blockette 1001")
return 0
#########################################################
def blk2000(self, seekval=0):
"""
Variable Length Opaque Data Blockette
Returns two tuples
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
blk, opaque
Blockette type (UWORD, 2)
Next blockette's byte offset relative to fixed section of header.
Use 0 if no more blockettes will follow. (UWORD, 2)
Total Blockette length kin Bytes (UWORD, 2)
Offset to Opaque Data (UWORD, 2)
Record number (ULONG, 4)
Data Word order (UBYTE, 1)
0 = little endian (VAX or 80x86 byte order).
1 = big endian (68000 or SPARC byte order).
Opaque Data flags (UBYTE, 1)
[bit 0] Opaque blockette orientation.
0 = record oriented.
1 = stream oriented.
[bit 1] Packaging bit.
0 = Blockette 2000s from multiple SEED data records with
different timetags may be packaged into a single SEED data
record. The exact original timetag in each SEED Fixed Data
Header is not required for each blockette 2000.
1= Blockette 2000s from multiple SEED data records with
differing timetags may NOT be repackaged into a single SEED
data record. Set this bit if the timetag in the SEED Fixed
Data Header is required to properly interpret the opaque data.
[bits 2-3] Opaque blockette fragmentation flags.
00 = opaque record identified by record number is completely
contained in this opaque blockette.
01 = first opaque blockette for record spanning multiple
blockettes.
11 = continuation blockette 2...N-1 of record spanning N
blockettes.
10 = final blockette for record spanning N blockettes.
[bits 4-5] File blockette information.
00 = not file oriented.
01 = first blockette of file.
10 = continuation of file.
11 = last blockette of file.
Number of Opaque Header fields (UBYTE, 1)
Opaque Data Header fields (VAR)
a Record type
b Vendor type
c Model type
d Software
e Firmware
Opaque Data Opaque - bytes of opaque data. Total length of opaque data
in bytes is blockette_length-15-length (opaque_data_header_string)
(Opaque)
"""
self.infileseek(seekval)
fmtstr = self.fmt_order + "HHHHIBBB"
blk = self.sunpack(fmtstr, self.infileread(15))
# decompose tmpblk[6] into bit fields, create blk
tmpblk = self.UbytetoStr(blk, 6)
blk = tmpblk # recast blk as list
# Now decipher Opaque Header
charlist = []
char = ""
NumOpaqueHeaders = int(blk[7])
fmtstr = self.fmt_order + "s"
for i in range(NumOpaqueHeaders):
tupchar = self.sunpack(fmtstr, self.infileread(1))
tmpchar = str(tupchar[0])
if tmpchar != "~":
char = char + tmpchar
length_data_string += 1
charlist.append(char)
blk.append(charlist)
# opaque = ""
# rdbyte = int(tmpblk[2]) - 15 - length_data_string
# fmt = "=%ds" % rdbyte
# opaque=self.sunpack(fmt, self.infileread(rdbyte))
#
# print(opaque)
#
#########################################################
def UbytetoStr(self, tup, offset):
"""
converts unsign byte to string values
"""
strval = ""
# mask bit fields and build string
for i in range(8):
mask = 2**i
if tup[offset] & mask:
strval = "1" + strval
else:
strval = "0" + strval
# build new list with decomposed bit string
for i in range(len(tup)):
if i == offset:
list.append(strval)
else:
list.append(tup[i])
return list
#########################################################
def FlagStrtoInt(self, flagstr):
"""
converts Flag String to Integer
"""
flagint = 0
n = len(flagstr) - 1
for a in flagstr:
if int(a):
flagint = flagint + 2**n
#########################################################
def Pad(self, var, num):
"""
pad header values to specified length with blank space
right justified
"""
if varlength == num:
return var
elif varlength < num:
pad = num - varlength
newvar = var + SPACE * pad
#########################################################
#########################################################
class Segy(futils):
self.type = self.serial = self.chan = self.time = self.rate = None
futils.__init__(self, infile)
# self.byteorder: byte order of input file.
# "big"
# "little"
# or "unknown"
self.byteorder = self.ByteOrder()
# self.fmt_order: companion to self.byteorder. Format string for
# stuct.pack
# ">" big endian
# "<" little endian
#########################################################
"""
determines if processed file is segy or unknown type
"""
# if we don't know byteorder it must not be segy
if self.byteorder == "unknown":
return 0
# otherwise it might be segy
(Sserial, Schan, Srate, Syear, Sday, Shour, Smin, Ssec) = self.idread()
# we have already tested for a valid time in self.ByteOrder, this test
# is to ensure that we were able to read a complete idhdr header
if (Sserial == Schan == Srate == Syear == Sday == Shour ==
Smin == Ssec is None):
return 0
# if we're this far it must be segy
self.type = "segy"
if Sserial < 36864:
self.serial = Sserial
else:
self.serial = self.to_hex(Sserial)
# self.time = string.join(list(map(str, (Syear, Sday, Shour, Smin,
# Ssec))), ":")
templist = list(map(str, (Syear, Sday, Shour, Smin, Ssec)))
self.time = ":".join(templist)
self.chan = Schan
self.rate = Srate
return 1
#########################################################
def idhdr(self):
return self.type, self.serial, self.chan, self.time, self.rate
#########################################################
def tracelength(self):
"""
returns trace length in seconds
"""
self.infile.seek(228)
fmtstr = self.fmt_order + "L"
numsamp = struct.unpack(fmtstr, self.infile.read(4))[0]
length = numsamp / self.rate
#########################################################
"""
Read file as if SEGY file trying to extract
channel, time block, sample rate, and serial number
"""
SH = self.infile.read(240)
# self.infile.seek(12)
fmtstr = self.fmt_order + "L"
# chan = struct.unpack(fmtstr, self.infile.read(4))[0]
# print(chan)
chan = struct.unpack(fmtstr, SH[12:16])[0]
# self.infile.seek(156)
# timeblock=self.infile.read(10)
fmtstr = self.fmt_order + "HHHHH"
# (year,day,hour,min,sec)=struct.unpack(fmtstr, timeblock)
(year, day, hour, min, sec) = struct.unpack(fmtstr, SH[156:166])
fmtstr = self.fmt_order + "L"
# samp=struct.unpack(fmtstr, self.infile.read(4))[0]
# print(samp)
samp = struct.unpack(fmtstr, SH[200:204])[0]
rate = int(1 / (samp / 1e6))
fmtstr = self.fmt_order + "H"
# serial = struct.unpack(fmtstr, self.infile.read(2))[0]
serial = struct.unpack(fmtstr, SH[224:226])[0]
return serial, chan, rate, year, day, hour, min, sec
return None, None, None, None, None, None, None, None
#########################################################
def ByteOrder(self, seekval=156):
"""
read file as if it is segy just pulling time info
from header and determine if it makes sense unpacked
as big endian or little endian
"""
Order = "unknown"
try:
# seek to timeblock and read
timeblock = self.infile.read(10)
# assume big endian
(Year, Day, Hour, Min, Sec) =\
struct.unpack('>HHHHH', timeblock)
# test if big endian read makes sense
if (1950 <= Year <= 2050 and 1 <= Day <= 366 and
0 <= Hour <= 23 and 0 <= Min <= 59 and 0 <= Sec <= 59):
Order = "big"
self.fmt_order = ">"
else:
# try little endian read
(Year, Day, Hour, Min, Sec) =\
struct.unpack('<HHHHH', timeblock)
# test if little endian read makes sense
if (1950 <= Year <= 2050 and 1 <= Day <= 366 and
0 <= Hour <= 23 and 0 <= Min <= 59 and 0 <= Sec <= 59):
Order = "little"
self.fmt_order = "<"
#########################################################
def to_int(self, input):
"""
conversion routine from hex to integer
"""
HEXCHAR = ["0", "1", "2", "3", "4", "5", "6",
"7", "8", "9", "A", "B", "C", "D", "E", "F"]
hexnum = str(input)
retval = 0
input_length = len(hexnum)
for i in range(input_length):
for index in range(len(HEXCHAR)):
if string.upper(hexnum[i]) == HEXCHAR[index]:
retval = retval + index * (16**(input_length - (1 + i)))
#########################################################
def to_hex(self, number):
"""
conversion routine from integer to hex
"""
retval = 0
hexnum = hex(number)
#########################################################
#########################################################
if __name__ == "__main__":
filecnt = 0
# based on traverse routine in "python standard library", Lundh pg 34
def GetTrace(DataDir):
global filecnt
stack = []
rwError = 0
for k in range(len(DataDir)):
stack.append(DataDir[k])
files = []
file_list = {}
errcnt = 0
while stack:
directory = stack.pop()
if not os.path.isdir(directory):
print("\n***WARNING*** Directory \"%s\" not found.\n" %
directory)
for file in os.listdir(directory):
fullname = os.path.join(directory, file)
if not os.access(fullname, 6):
if os.path.isfile(fullname):
filecnt += 1
try:
newfile = Segy(fullname)
if newfile.isSegy():
(type, serial, chan, time, rate) = newfile.idhdr()
length = newfile.tracelength()
# length = "NULL"
file_list[fullname] = (type, serial, chan, time,
rate, length,
newfile.ByteOrder())
except Exception:
rwError += 1
newfile = Mseed(fullname)
if newfile.isMseed():
try:
# simple test to determine if correct size
# file
filesize = newfile.filesize
blksize = newfile.blksize
(numblocks, odd_size) = divmod(
filesize, blksize)
except Exception:
rwError += 1
type = newfile.type
serial = string.strip(newfile.FH.Stat)
chan = string.strip(newfile.FH.Chan)
loc = string.strip(newfile.FH.Loc)
net = string.strip(newfile.FH.Net)
time = newfile.time
rate = newfile.rate
length = newfile.tracelength()
# length = "NULL"
file_list[fullname] = (type, serial, chan,
time, rate, length,
newfile.ByteOrder())
except Exception:
rwError += 1
# if os.path.isdir(fullname) and not os.path.islink(fullname) :
if (os.path.isdir(fullname) or (os.path.islink(fullname) and
not os.path.isfile(fullname))):
stack.append(fullname)
return file_list, rwError
if len(sys.argv) > 1:
if sys.argv[1] == "-#":
print(VERSION)
sys.exit(1)
print("Unknown arguement %s" % sys.argv[1:])
sys.exit(1)
segynum = 0
mseednum = 0
unknownnum = 0
Dirs = []
# Dirs.append("/Users/bruce/data/uniq")
(file_list, rwError) = GetTrace(Dirs)
for file in list(file_list.keys()):
file_type = file_list[file][0]
# print("\n SEGY \n")
# print("segy: ", file_list[file][1], file_list[file][2],
# file_list[file][3], file_list[file][4], file_list[file][5],
# file_list[file][6])
elif file_type == "mseed":
# if file_type == "mseed":
# print("\n MSEED \n")
# print("mseed: ", file_list[file][1], file_list[file][2],
# file_list[file][3], file_list[file][4], file_list[file][5],
# file_list[file][6])
# else :
# unknownnum += 1
# print("\n Format Not Recognized \n")
print("\nTotal Files Found = %i " % filecnt)
print("SEGY Files Processed = %i \nMSEED Files Processed = %i" %
(segynum, mseednum))
print("Total RWErrors = %i " % rwError)
print("Total Files Identified = %i" % len(file_list))