Commit 489fc2e8 by heffnercj

Fixed python2.6 bugs

parent 37fbbcfa
...@@ -139,11 +139,11 @@ class BlockFile(io.FileIO): ...@@ -139,11 +139,11 @@ class BlockFile(io.FileIO):
io.FileIO.__init__(self, fname, mode) io.FileIO.__init__(self, fname, mode)
# Work around for python 2.6 where FileIO.name raises an exception # Work around for python 2.6 where FileIO._name is not defined
try: try:
self.name self.name
except AttributeError: except AttributeError:
self.name = fname self._name = fname
self.seek(self.offset) self.seek(self.offset)
......
...@@ -148,7 +148,7 @@ class CompressionEntropyAnalyzer(object): ...@@ -148,7 +148,7 @@ class CompressionEntropyAnalyzer(object):
Returns None. Returns None.
''' '''
self.fp = common.BlockFile(fname, 'rb', offset=start, length=length) self.fp = common.BlockFile(fname, 'r', offset=start, length=length)
# Read block size must be at least as large as our analysis block size # Read block size must be at least as large as our analysis block size
if self.fp.READ_BLOCK_SIZE < self.BLOCK_SIZE: if self.fp.READ_BLOCK_SIZE < self.BLOCK_SIZE:
......
...@@ -135,7 +135,7 @@ class FileEntropy(object): ...@@ -135,7 +135,7 @@ class FileEntropy(object):
if not self.block: if not self.block:
self.block = self.DEFAULT_BLOCK_SIZE self.block = self.DEFAULT_BLOCK_SIZE
self.fd = common.BlockFile(file_name, 'rb', offset=self.start, length=self.length) self.fd = common.BlockFile(file_name, 'r', offset=self.start, length=self.length)
self.start = self.fd.offset self.start = self.fd.offset
self.fd.MAX_TRAILING_SIZE = 0 self.fd.MAX_TRAILING_SIZE = 0
if self.fd.READ_BLOCK_SIZE < self.block: if self.fd.READ_BLOCK_SIZE < self.block:
......
...@@ -428,16 +428,16 @@ class Extractor: ...@@ -428,16 +428,16 @@ class Extractor:
try: try:
# Open the target file and seek to the offset # Open the target file and seek to the offset
fdin = BlockFile(file_name, "rb", length=size) fdin = BlockFile(file_name, 'r', length=size)
fdin.seek(offset) fdin.seek(offset)
# Open the output file # Open the output file
try: try:
fdout = BlockFile(fname, "wb") fdout = BlockFile(fname, 'w')
except Exception as e: except Exception as e:
# Fall back to the default name if the requested name fails # Fall back to the default name if the requested name fails
fname = unique_file_name(default_bname, extension) fname = unique_file_name(default_bname, extension)
fdout = BlockFile(fname, "wb") fdout = BlockFile(fname, 'w')
while total_size < size: while total_size < size:
(data, dlen) = fdin.read_block() (data, dlen) = fdin.read_block()
......
...@@ -112,7 +112,7 @@ class HexDiff(object): ...@@ -112,7 +112,7 @@ class HexDiff(object):
read_block_size = common.BlockFile.READ_BLOCK_SIZE read_block_size = common.BlockFile.READ_BLOCK_SIZE
for f in files: for f in files:
fp = common.BlockFile(f, 'rb', length=size, offset=offset) fp = common.BlockFile(f, 'r', length=size, offset=offset)
fp.READ_BLOCK_SIZE = read_block_size fp.READ_BLOCK_SIZE = read_block_size
fp.MAX_TRAILING_SIZE = 0 fp.MAX_TRAILING_SIZE = 0
fps.append(fp) fps.append(fp)
......
...@@ -75,7 +75,7 @@ class FileStrings(object): ...@@ -75,7 +75,7 @@ class FileStrings(object):
# the entropy analysis, block offsets won't line up. # the entropy analysis, block offsets won't line up.
self.start -= (self.start % self.block) self.start -= (self.start % self.block)
self.fd = common.BlockFile(file_name, 'rb', length=length, offset=self.start) self.fd = common.BlockFile(file_name, 'r', length=length, offset=self.start)
# TODO: This is not optimal. We should read in larger chunks and process it into self.block chunks. # TODO: This is not optimal. We should read in larger chunks and process it into self.block chunks.
self.fd.READ_BLOCK_SIZE = self.block self.fd.READ_BLOCK_SIZE = self.block
self.fd.MAX_TRAILING_SIZE = 0 self.fd.MAX_TRAILING_SIZE = 0
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment