Skip to content

Commit 07ac092

Browse files
author
Kazuki Suzuki Przyborowski
committed
Update pyarchivefile.py
1 parent bc04c03 commit 07ac092

1 file changed

Lines changed: 65 additions & 18 deletions

File tree

pyarchivefile.py

Lines changed: 65 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -2088,7 +2088,7 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
20882088
fjsonsize = int(HeaderOut[27], 16)
20892089
fjsoncontent = {}
20902090
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
2091-
if(len(fjsoncontent) > 0):
2091+
if(fjsonsize > 0):
20922092
try:
20932093
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent).decode("UTF-8"))
20942094
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
@@ -2750,7 +2750,7 @@ def ReadFileDataWithContentToList(fp, seekstart=0, seekend=0, listonly=False, co
27502750
prefsize = int(preheaderdata[5], 16)
27512751
prefcompression = preheaderdata[12]
27522752
prefcsize = int(preheaderdata[13], 16)
2753-
prefseeknextfile = HeaderOut[25]
2753+
prefseeknextfile = preheaderdata[25]
27542754
prenewfcs = GetHeaderChecksum(
27552755
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
27562756
prefcs = preheaderdata[-2]
@@ -6960,14 +6960,23 @@ def ArchiveFileSeekToFileName(infile, fmttype="auto", seekfile=None, listonly=Fa
69606960
prefdev_minor = int(preheaderdata[24], 16)
69616961
prefdev_major = int(preheaderdata[25], 16)
69626962
prefseeknextfile = preheaderdata[26]
6963-
prefextrasize = int(preheaderdata[27], 16)
6964-
prefextrafields = int(preheaderdata[28], 16)
6965-
extrastart = 29
6963+
prefsize = int(preheaderdata[7], 16)
6964+
prefcompression = preheaderdata[14]
6965+
prefcsize = int(preheaderdata[15], 16)
6966+
prefseeknextfile = preheaderdata[26]
6967+
prefjsonsize = int(preheaderdata[27], 16)
6968+
prefjoutfprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
6969+
if(prefjsonsize <= 0):
6970+
prefjoutfprejsoncontent = "".encode()
6971+
fp.seek(len(formatspecs['format_delimiter']), 1)
6972+
prefextrasize = int(preheaderdata[28], 16)
6973+
prefextrafields = int(preheaderdata[29], 16)
6974+
extrastart = 30
69666975
extraend = extrastart + prefextrafields
69676976
prefcs = preheaderdata[-2].lower()
69686977
prenewfcs = preheaderdata[-1].lower()
69696978
prenewfcs = GetHeaderChecksum(
6970-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
6979+
preheaderdata[:-2] + [prefjoutfprejsoncontent], preheaderdata[-4].lower(), True, formatspecs)
69716980
if(prefcs != prenewfcs and not skipchecksum):
69726981
VerbosePrintOut("File Header Checksum Error with file " +
69736982
prefname + " at offset " + str(prefhstart))
@@ -7267,14 +7276,31 @@ def ArchiveFileValidate(infile, fmttype="auto", formatspecs=__file_format_multi_
72677276
outfdev_minor = int(inheaderdata[24], 16)
72687277
outfdev_major = int(inheaderdata[25], 16)
72697278
outfseeknextfile = inheaderdata[26]
7270-
outfextrasize = int(inheaderdata[27], 16)
7271-
outfextrafields = int(inheaderdata[28], 16)
7272-
extrastart = 29
7279+
outfjsonsize = int(inheaderdata[27], 16)
7280+
outfjsoncontent = {}
7281+
outfprejsoncontent = fp.read(outfjsonsize).decode("UTF-8")
7282+
if(outfjsonsize > 0):
7283+
try:
7284+
outfjsoncontent = json.loads(base64.b64decode(outfprejsoncontent).decode("UTF-8"))
7285+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
7286+
try:
7287+
outfjsoncontent = json.loads(outfprejsoncontent.decode("UTF-8"))
7288+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
7289+
outfprejsoncontent = ""
7290+
outfjsoncontent = {}
7291+
else:
7292+
outfprejsoncontent = ""
7293+
outfjsoncontent = {}
7294+
fp.seek(len(formatspecs['format_delimiter']), 1)
7295+
outfextrasize = int(inheaderdata[28], 16)
7296+
outfextrafields = int(inheaderdata[29], 16)
7297+
extrafieldslist = []
7298+
extrastart = 30
72737299
extraend = extrastart + outfextrafields
72747300
outfcs = inheaderdata[-2].lower()
72757301
outfccs = inheaderdata[-1].lower()
72767302
infcs = GetHeaderChecksum(
7277-
inheaderdata[:-2], inheaderdata[-4].lower(), True, formatspecs)
7303+
inheaderdata[:-2] + [outfprejsoncontent.encode()], inheaderdata[-4].lower(), True, formatspecs)
72787304
if(verbose):
72797305
VerbosePrintOut(outfname)
72807306
VerbosePrintOut("Record Number " + str(il) + "; File ID " +
@@ -7576,14 +7602,19 @@ def ArchiveFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=
75767602
prefcompression = preheaderdata[14]
75777603
prefcsize = int(preheaderdata[15], 16)
75787604
prefseeknextfile = preheaderdata[26]
7579-
prefextrasize = int(preheaderdata[27], 16)
7580-
prefextrafields = int(preheaderdata[28], 16)
7581-
extrastart = 29
7605+
prefjsonsize = int(preheaderdata[27], 16)
7606+
prefjoutfprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
7607+
if(prefjsonsize <= 0):
7608+
prefjoutfprejsoncontent = "".encode()
7609+
fp.seek(len(formatspecs['format_delimiter']), 1)
7610+
prefextrasize = int(preheaderdata[28], 16)
7611+
prefextrafields = int(preheaderdata[29], 16)
7612+
extrastart = 30
75827613
extraend = extrastart + prefextrafields
75837614
prefcs = preheaderdata[-2].lower()
75847615
prenewfcs = preheaderdata[-1].lower()
75857616
prenewfcs = GetHeaderChecksum(
7586-
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
7617+
preheaderdata[:-2] + [prefjoutfprejsoncontent], preheaderdata[-4].lower(), True, formatspecs)
75877618
if(prefcs != prenewfcs and not skipchecksum):
75887619
VerbosePrintOut("File Header Checksum Error with file " +
75897620
prefname + " at offset " + str(prefhstart))
@@ -7673,10 +7704,26 @@ def ArchiveFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=
76737704
outfdev_minor = int(inheaderdata[24], 16)
76747705
outfdev_major = int(inheaderdata[25], 16)
76757706
outfseeknextfile = inheaderdata[26]
7676-
outfextrasize = int(inheaderdata[27], 16)
7677-
outfextrafields = int(inheaderdata[28], 16)
7707+
outfjsonsize = int(inheaderdata[27], 16)
7708+
outfjsoncontent = {}
7709+
outfprejsoncontent = fp.read(outfjsonsize).decode("UTF-8")
7710+
if(outfjsonsize > 0):
7711+
try:
7712+
outfjsoncontent = json.loads(base64.b64decode(outfprejsoncontent).decode("UTF-8"))
7713+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
7714+
try:
7715+
outfjsoncontent = json.loads(outfprejsoncontent.decode("UTF-8"))
7716+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
7717+
outfprejsoncontent = ""
7718+
outfjsoncontent = {}
7719+
else:
7720+
outfprejsoncontent = ""
7721+
outfjsoncontent = {}
7722+
fp.seek(len(formatspecs['format_delimiter']), 1)
7723+
outfextrasize = int(inheaderdata[28], 16)
7724+
outfextrafields = int(inheaderdata[29], 16)
76787725
extrafieldslist = []
7679-
extrastart = 29
7726+
extrastart = 30
76807727
extraend = extrastart + outfextrafields
76817728
while(extrastart < extraend):
76827729
extrafieldslist.append(inheaderdata[extrastart])
@@ -7760,7 +7807,7 @@ def ArchiveFileToArray(infile, fmttype="auto", seekstart=0, seekend=0, listonly=
77607807
outfcontents.seek(0, 0)
77617808
if(not contentasfile):
77627809
outfcontents = outfcontents.read()
7763-
outlist['ffilelist'].append({'fid': realidnum, 'fidalt': fileidnum, 'fheadersize': outfheadsize, 'fhstart': outfhstart, 'fhend': outfhend, 'ftype': outftype, 'fencoding': outfencoding, 'fcencoding': outfcencoding, 'fname': outfname, 'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize, 'fatime': outfatime, 'fmtime': outfmtime, 'fctime': outfctime, 'fbtime': outfbtime, 'fmode': outfmode, 'fchmode': outfchmode, 'ftypemod': outftypemod, 'fwinattributes': outfwinattributes, 'fcompression': outfcompression, 'fcsize': outfcsize, 'fuid': outfuid, 'funame': outfuname, 'fgid': outfgid, 'fgname': outfgname, 'finode': outfinode, 'flinkcount': outflinkcount, 'fdev': outfdev, 'fminor': outfdev_minor, 'fmajor': outfdev_major, 'fseeknextfile': outfseeknextfile, 'fheaderchecksumtype': inheaderdata[-4], 'fcontentchecksumtype': inheaderdata[-3], 'fnumfields': outfnumfields + 2, 'frawheader': inheaderdata, 'fextrafields': outfextrafields, 'fextrafieldsize': outfextrasize, 'fextralist': extrafieldslist, 'fheaderchecksum': outfcs, 'fcontentchecksum': outfccs, 'fhascontents': pyhascontents, 'fcontentstart': outfcontentstart, 'fcontentend': outfcontentend, 'fcontentasfile': contentasfile, 'fcontents': outfcontents})
7810+
outlist['ffilelist'].append({'fid': realidnum, 'fidalt': fileidnum, 'fheadersize': outfheadsize, 'fhstart': outfhstart, 'fhend': outfhend, 'ftype': outftype, 'fencoding': outfencoding, 'fcencoding': outfcencoding, 'fname': outfname, 'fbasedir': outfbasedir, 'flinkname': outflinkname, 'fsize': outfsize, 'fatime': outfatime, 'fmtime': outfmtime, 'fctime': outfctime, 'fbtime': outfbtime, 'fmode': outfmode, 'fchmode': outfchmode, 'ftypemod': outftypemod, 'fwinattributes': outfwinattributes, 'fcompression': outfcompression, 'fcsize': outfcsize, 'fuid': outfuid, 'funame': outfuname, 'fgid': outfgid, 'fgname': outfgname, 'finode': outfinode, 'flinkcount': outflinkcount, 'fdev': outfdev, 'fminor': outfdev_minor, 'fmajor': outfdev_major, 'fseeknextfile': outfseeknextfile, 'fheaderchecksumtype': inheaderdata[-4], 'fcontentchecksumtype': inheaderdata[-3], 'fnumfields': outfnumfields + 2, 'frawheader': inheaderdata, 'fextrafields': outfextrafields, 'fextrafieldsize': outfextrasize, 'fextralist': extrafieldslist, 'jsondata': outfjsoncontent, 'fheaderchecksum': outfcs, 'fcontentchecksum': outfccs, 'fhascontents': pyhascontents, 'fcontentstart': outfcontentstart, 'fcontentend': outfcontentend, 'fcontentasfile': contentasfile, 'fcontents': outfcontents})
77647811
fileidnum = fileidnum + 1
77657812
realidnum = realidnum + 1
77667813
if(returnfp):

0 commit comments

Comments
 (0)