Use the 'with' context manager when dealing with files
This commit is contained in:
parent
bfde7268c3
commit
52e855e4a4
@ -164,9 +164,8 @@ def readLWFN(path, onlyHeader=False):
|
|||||||
elif code in [3, 5]:
|
elif code in [3, 5]:
|
||||||
break
|
break
|
||||||
elif code == 4:
|
elif code == 4:
|
||||||
f = open(path, "rb")
|
with open(path, "rb") as f:
|
||||||
data.append(f.read())
|
data.append(f.read())
|
||||||
f.close()
|
|
||||||
elif code == 0:
|
elif code == 0:
|
||||||
pass # comment, ignore
|
pass # comment, ignore
|
||||||
else:
|
else:
|
||||||
@ -179,8 +178,8 @@ def readLWFN(path, onlyHeader=False):
|
|||||||
|
|
||||||
def readPFB(path, onlyHeader=False):
|
def readPFB(path, onlyHeader=False):
|
||||||
"""reads a PFB font file, returns raw data"""
|
"""reads a PFB font file, returns raw data"""
|
||||||
f = open(path, "rb")
|
|
||||||
data = []
|
data = []
|
||||||
|
with open(path, "rb") as f:
|
||||||
while True:
|
while True:
|
||||||
if f.read(1) != bytechr(128):
|
if f.read(1) != bytechr(128):
|
||||||
raise T1Error('corrupt PFB file')
|
raise T1Error('corrupt PFB file')
|
||||||
@ -196,18 +195,15 @@ def readPFB(path, onlyHeader=False):
|
|||||||
raise T1Error('bad chunk code: ' + repr(code))
|
raise T1Error('bad chunk code: ' + repr(code))
|
||||||
if onlyHeader:
|
if onlyHeader:
|
||||||
break
|
break
|
||||||
f.close()
|
|
||||||
data = bytesjoin(data)
|
data = bytesjoin(data)
|
||||||
assertType1(data)
|
assertType1(data)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def readOther(path):
|
def readOther(path):
|
||||||
"""reads any (font) file, returns raw data"""
|
"""reads any (font) file, returns raw data"""
|
||||||
f = open(path, "rb")
|
with open(path, "rb") as f:
|
||||||
data = f.read()
|
data = f.read()
|
||||||
f.close()
|
|
||||||
assertType1(data)
|
assertType1(data)
|
||||||
|
|
||||||
chunks = findEncryptedChunks(data)
|
chunks = findEncryptedChunks(data)
|
||||||
data = []
|
data = []
|
||||||
for isEncrypted, chunk in chunks:
|
for isEncrypted, chunk in chunks:
|
||||||
@ -244,8 +240,7 @@ def writeLWFN(path, data):
|
|||||||
|
|
||||||
def writePFB(path, data):
|
def writePFB(path, data):
|
||||||
chunks = findEncryptedChunks(data)
|
chunks = findEncryptedChunks(data)
|
||||||
f = open(path, "wb")
|
with open(path, "wb") as f:
|
||||||
try:
|
|
||||||
for isEncrypted, chunk in chunks:
|
for isEncrypted, chunk in chunks:
|
||||||
if isEncrypted:
|
if isEncrypted:
|
||||||
code = 2
|
code = 2
|
||||||
@ -255,13 +250,10 @@ def writePFB(path, data):
|
|||||||
f.write(longToString(len(chunk)))
|
f.write(longToString(len(chunk)))
|
||||||
f.write(chunk)
|
f.write(chunk)
|
||||||
f.write(bytechr(128) + bytechr(3))
|
f.write(bytechr(128) + bytechr(3))
|
||||||
finally:
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
def writeOther(path, data, dohex=False):
|
def writeOther(path, data, dohex=False):
|
||||||
chunks = findEncryptedChunks(data)
|
chunks = findEncryptedChunks(data)
|
||||||
f = open(path, "wb")
|
with open(path, "wb") as f:
|
||||||
try:
|
|
||||||
hexlinelen = HEXLINELENGTH // 2
|
hexlinelen = HEXLINELENGTH // 2
|
||||||
for isEncrypted, chunk in chunks:
|
for isEncrypted, chunk in chunks:
|
||||||
if isEncrypted:
|
if isEncrypted:
|
||||||
@ -275,8 +267,6 @@ def writeOther(path, data, dohex=False):
|
|||||||
chunk = chunk[hexlinelen:]
|
chunk = chunk[hexlinelen:]
|
||||||
else:
|
else:
|
||||||
f.write(chunk)
|
f.write(chunk)
|
||||||
finally:
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
|
|
||||||
# decryption tools
|
# decryption tools
|
||||||
|
@ -293,11 +293,11 @@ def ttCompile(input, output, options):
|
|||||||
def guessFileType(fileName):
|
def guessFileType(fileName):
|
||||||
base, ext = os.path.splitext(fileName)
|
base, ext = os.path.splitext(fileName)
|
||||||
try:
|
try:
|
||||||
f = open(fileName, "rb")
|
with open(fileName, "rb") as f:
|
||||||
|
header = f.read(256)
|
||||||
except IOError:
|
except IOError:
|
||||||
return None
|
return None
|
||||||
header = f.read(256)
|
|
||||||
f.close()
|
|
||||||
if header.startswith(b'\xef\xbb\xbf<?xml'):
|
if header.startswith(b'\xef\xbb\xbf<?xml'):
|
||||||
header = header.lstrip(b'\xef\xbb\xbf')
|
header = header.lstrip(b'\xef\xbb\xbf')
|
||||||
cr, tp = getMacCreatorAndType(fileName)
|
cr, tp = getMacCreatorAndType(fileName)
|
||||||
|
@ -18,8 +18,11 @@ class _UnicodeCustom(object):
|
|||||||
|
|
||||||
def __init__(self, f):
|
def __init__(self, f):
|
||||||
if isinstance(f, basestring):
|
if isinstance(f, basestring):
|
||||||
f = open(f)
|
with open(f) as fd:
|
||||||
self.codes = _makeunicodes(f)
|
codes = _makeunicodes(fd)
|
||||||
|
else:
|
||||||
|
codes = _makeunicodes(f)
|
||||||
|
self.codes = codes
|
||||||
|
|
||||||
def __getitem__(self, charCode):
|
def __getitem__(self, charCode):
|
||||||
try:
|
try:
|
||||||
|
@ -74,7 +74,7 @@ def main(args):
|
|||||||
if not files:
|
if not files:
|
||||||
usage()
|
usage()
|
||||||
|
|
||||||
report = open("report.txt", "a+")
|
with open("report.txt", "a+") as report:
|
||||||
options = ttx.Options(rawOptions, len(files))
|
options = ttx.Options(rawOptions, len(files))
|
||||||
for ttFile in files:
|
for ttFile in files:
|
||||||
try:
|
try:
|
||||||
@ -90,7 +90,6 @@ def main(args):
|
|||||||
report.write(" \"%s\"\n" % ttFile)
|
report.write(" \"%s\"\n" % ttFile)
|
||||||
traceback.print_exc(file=report)
|
traceback.print_exc(file=report)
|
||||||
report.write("-------------------------------------------------------------\n")
|
report.write("-------------------------------------------------------------\n")
|
||||||
report.close()
|
|
||||||
|
|
||||||
|
|
||||||
main(sys.argv[1:])
|
main(sys.argv[1:])
|
||||||
|
@ -235,12 +235,10 @@ def test_unicodes(tmpdir):
|
|||||||
new.read(testDocPath)
|
new.read(testDocPath)
|
||||||
new.write(testDocPath2)
|
new.write(testDocPath2)
|
||||||
# compare the file contents
|
# compare the file contents
|
||||||
f1 = open(testDocPath, 'r', encoding='utf-8')
|
with open(testDocPath, 'r', encoding='utf-8') as f1:
|
||||||
t1 = f1.read()
|
t1 = f1.read()
|
||||||
f1.close()
|
with open(testDocPath2, 'r', encoding='utf-8') as f2:
|
||||||
f2 = open(testDocPath2, 'r', encoding='utf-8')
|
|
||||||
t2 = f2.read()
|
t2 = f2.read()
|
||||||
f2.close()
|
|
||||||
assert t1 == t2
|
assert t1 == t2
|
||||||
# check the unicode values read from the document
|
# check the unicode values read from the document
|
||||||
assert new.instances[0].glyphs['arrow']['unicodes'] == [100,200,300]
|
assert new.instances[0].glyphs['arrow']['unicodes'] == [100,200,300]
|
||||||
@ -335,12 +333,10 @@ def test_localisedNames(tmpdir):
|
|||||||
new = DesignSpaceDocument()
|
new = DesignSpaceDocument()
|
||||||
new.read(testDocPath)
|
new.read(testDocPath)
|
||||||
new.write(testDocPath2)
|
new.write(testDocPath2)
|
||||||
f1 = open(testDocPath, 'r', encoding='utf-8')
|
with open(testDocPath, 'r', encoding='utf-8') as f1:
|
||||||
t1 = f1.read()
|
t1 = f1.read()
|
||||||
f1.close()
|
with open(testDocPath2, 'r', encoding='utf-8') as f2:
|
||||||
f2 = open(testDocPath2, 'r', encoding='utf-8')
|
|
||||||
t2 = f2.read()
|
t2 = f2.read()
|
||||||
f2.close()
|
|
||||||
assert t1 == t2
|
assert t1 == t2
|
||||||
|
|
||||||
|
|
||||||
@ -759,14 +755,12 @@ def _addUnwrappedCondition(path):
|
|||||||
# only for testing, so we can make an invalid designspace file
|
# only for testing, so we can make an invalid designspace file
|
||||||
# older designspace files may have conditions that are not wrapped in a conditionset
|
# older designspace files may have conditions that are not wrapped in a conditionset
|
||||||
# These can be read into a new conditionset.
|
# These can be read into a new conditionset.
|
||||||
f = open(path, 'r', encoding='utf-8')
|
with open(path, 'r', encoding='utf-8') as f:
|
||||||
d = f.read()
|
d = f.read()
|
||||||
print(d)
|
print(d)
|
||||||
f.close()
|
|
||||||
d = d.replace('<rule name="named.rule.1">', '<rule name="named.rule.1">\n\t<condition maximum="22" minimum="33" name="axisName_a" />')
|
d = d.replace('<rule name="named.rule.1">', '<rule name="named.rule.1">\n\t<condition maximum="22" minimum="33" name="axisName_a" />')
|
||||||
f = open(path, 'w', encoding='utf-8')
|
with open(path, 'w', encoding='utf-8') as f:
|
||||||
f.write(d)
|
f.write(d)
|
||||||
f.close()
|
|
||||||
|
|
||||||
def test_documentLib(tmpdir):
|
def test_documentLib(tmpdir):
|
||||||
# roundtrip test of the document lib with some nested data
|
# roundtrip test of the document lib with some nested data
|
||||||
|
Loading…
x
Reference in New Issue
Block a user