Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Some (more) Python3 fixes #3

Draft
wants to merge 6 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions bin/geometry_json_to_obj.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,8 +99,8 @@ def write_obj(mesh, outf_name, use_color):
t3 += 1
tmpf.write("f %d %d %d\n" % (t1, t2, t3))

with file(outf_name, 'wb') as outf:
outf.write(tmpf.getvalue())
with open(outf_name, 'wb') as outf:
outf.write(tmpf.getvalue().encode('utf-8'))


def main():
Expand Down
2 changes: 1 addition & 1 deletion bin/tilt_to_strokes_dae.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def make_id(self, prefix='ID'):
def write(self, filename):
header = '<?xml version="1.0" encoding="UTF-8"?>\n'
_indent(self.root)
with file(filename, 'wb') as outf:
with open(filename, 'wb') as outf:
outf.write(header)
self.tree.write(outf)

Expand Down
9 changes: 5 additions & 4 deletions openbrush/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ class TiltBrushMesh"""
from itertools import zip_longest
from uuid import UUID

SINGLE_SIDED_FLAT_BRUSH = set([
SINGLE_SIDED_FLAT_BRUSH = {
UUID("cb92b597-94ca-4255-b017-0e3f42f12f9e"), # Fire
UUID("cf019139-d41c-4eb0-a1d0-5cf54b0a42f3"), # Highlighter
UUID("e8ef32b1-baa8-460a-9c2c-9cf8506794f5"), # Hypercolor
Expand All @@ -33,7 +33,7 @@ class TiltBrushMesh"""
UUID("ad1ad437-76e2-450d-a23a-e17f8310b960"), # Rainbow
UUID("44bb800a-fbc3-4592-8426-94ecb05ddec3"), # Streamers
UUID("d229d335-c334-495a-a801-660ac8a87360"), # Velvet Ink
])
}


def _grouper(n, iterable, fillvalue=None):
Expand All @@ -44,7 +44,8 @@ def _grouper(n, iterable, fillvalue=None):

def iter_meshes(filename):
"""Given a Tilt Brush .json export, yields TiltBrushMesh instances."""
obj = json.load(file(filename, 'rb'))
with open(filename, 'rb') as f:
obj = json.load(f)
lookup = obj['brushes']
for dct in lookup:
dct['guid'] = UUID(dct['guid'])
Expand Down Expand Up @@ -165,7 +166,7 @@ def collapse_verts(self, ignore=None):
Put triangle indices into a canonical order, with lowest index first.
*ignore* is a list of attribute names to ignore when comparing."""
# Convert from SOA to AOS
compare = set(('n', 'uv0', 'uv1', 'c', 't'))
compare = {'n', 'uv0', 'uv1', 'c', 't'}
if ignore is not None:
compare -= set(ignore)
compare = sorted(compare)
Expand Down
26 changes: 13 additions & 13 deletions openbrush/tilt.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import os
import struct
import uuid
from io import StringIO
from io import BytesIO

__all__ = ('Tilt', 'Sketch', 'Stroke', 'ControlPoint',
'BadTilt', 'BadMetadata', 'MissingKey')
Expand All @@ -37,7 +37,7 @@
}
STROKE_EXTENSION_BY_NAME = dict(
(info[0], (bit, info[1]))
for (bit, info) in STROKE_EXTENSION_BITS.items()
for (bit, info) in list(STROKE_EXTENSION_BITS.items())
if bit != 'unknown'
)

Expand Down Expand Up @@ -232,7 +232,7 @@ def write_sketch(self):
@contextlib.contextmanager
def subfile_reader(self, subfile):
if os.path.isdir(self.filename):
with file(os.path.join(self.filename, subfile), 'rb') as inf:
with open(os.path.join(self.filename, subfile), 'rb') as inf:
yield inf
else:
from zipfile import ZipFile
Expand All @@ -244,7 +244,7 @@ def subfile_reader(self, subfile):
def subfile_writer(self, subfile):
# Kind of a large hammer, but it works
if os.path.isdir(self.filename):
with file(os.path.join(self.filename, subfile), 'wb') as outf:
with open(os.path.join(self.filename, subfile), 'wb') as outf:
yield outf
else:
with Tilt.as_directory(self.filename) as tilt2:
Expand All @@ -264,7 +264,7 @@ def mutable_metadata(self):
# Copy into self.metadata, preserving topmost reference
for k in list(self.metadata.keys()):
del self.metadata[k]
for k, v in mutable_dct.items():
for k, v in list(mutable_dct.items()):
self.metadata[k] = copy.deepcopy(v)

new_contents = json.dumps(
Expand Down Expand Up @@ -360,12 +360,12 @@ def __init__(self, source):
self._parse(binfile(source))
else:
self.filename = source
with file(source, 'rb') as inf:
with open(source, 'rb') as inf:
self._parse(binfile(inf))

def write(self, destination):
"""destination is either a file name, a file-like instance, or a Tilt instance."""
tmpf = StringIO()
tmpf = BytesIO()
self._write(binfile(tmpf))
data = tmpf.getvalue()

Expand All @@ -375,7 +375,7 @@ def write(self, destination):
elif hasattr(destination, 'write'):
destination.write(data)
else:
with file(destination, 'wb') as outf:
with open(destination, 'wb') as outf:
outf.write(data)

def _parse(self, b):
Expand Down Expand Up @@ -488,7 +488,7 @@ def _parse(self, b):
@memoized_property
def controlpoints(self):
(cp_ext_reader, num_cp, raw_data) = self.__dict__.pop('_controlpoints')
b = binfile(StringIO(raw_data))
b = binfile(BytesIO(raw_data))
return [ControlPoint.from_file(b, cp_ext_reader) for i in range(num_cp)]

def has_stroke_extension(self, name):
Expand All @@ -514,7 +514,7 @@ def set_stroke_extension(self, name, value):
else:
# Convert from idx->value to name->value
name_to_value = dict((name, self.extension[idx])
for (name, idx) in self.stroke_ext_lookup.items())
for (name, idx) in list(self.stroke_ext_lookup.items()))
name_to_value[name] = value

bit, exttype = STROKE_EXTENSION_BY_NAME[name]
Expand All @@ -524,7 +524,7 @@ def set_stroke_extension(self, name, value):

# Convert back to idx->value
self.extension = [None] * len(self.stroke_ext_lookup)
for (name, idx) in self.stroke_ext_lookup.items():
for (name, idx) in list(self.stroke_ext_lookup.items()):
self.extension[idx] = name_to_value[name]

def delete_stroke_extension(self, name):
Expand All @@ -534,7 +534,7 @@ def delete_stroke_extension(self, name):

# Convert from idx->value to name->value
name_to_value = dict((name, self.extension[idx])
for (name, idx) in self.stroke_ext_lookup.items())
for (name, idx) in list(self.stroke_ext_lookup.items()))
del name_to_value[name]

bit, exttype = STROKE_EXTENSION_BY_NAME[name]
Expand All @@ -544,7 +544,7 @@ def delete_stroke_extension(self, name):

# Convert back to idx->value
self.extension = [None] * len(self.stroke_ext_lookup)
for (name, idx) in self.stroke_ext_lookup.items():
for (name, idx) in list(self.stroke_ext_lookup.items()):
self.extension[idx] = name_to_value[name]

def has_cp_extension(self, name):
Expand Down
24 changes: 13 additions & 11 deletions openbrush/unpack.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
import os
import struct
import zipfile
from io import StringIO
from io import BytesIO


__all__ = ('ConversionError', 'convert_zip_to_dir', 'convert_dir_to_zip')

Expand Down Expand Up @@ -68,7 +69,7 @@ def _read_and_check_header(inf):
except struct.error as e:
raise ConversionError("Unexpected header error: %s" % (e,))

if sentinel != 'tilT':
if sentinel != b'tilT':
raise ConversionError("Sentinel looks weird: %r" % sentinel)

more = headerSize - len(base_bytes)
Expand All @@ -80,7 +81,7 @@ def _read_and_check_header(inf):
raise ConversionError("Bad header size (claim %s, actual %s)" % (more, len(more_bytes)))

zip_sentinel = inf.read(4)
if zip_sentinel != '' and zip_sentinel != 'PK\x03\x04':
if zip_sentinel != b'' and zip_sentinel != b'PK\x03\x04':
raise ConversionError("Don't see zip sentinel after header: %r" % (zip_sentinel,))

if headerVersion != 1:
Expand All @@ -90,7 +91,7 @@ def _read_and_check_header(inf):

def convert_zip_to_dir(in_name):
"""Returns True if compression was used"""
with file(in_name, 'rb') as inf:
with open(in_name, 'rb') as inf:
header_bytes = _read_and_check_header(inf)

compression = False
Expand All @@ -106,7 +107,7 @@ def convert_zip_to_dir(in_name):
if member.compress_size != member.file_size:
compression = True
zf.extract(member, out_name)
with file(os.path.join(out_name, 'header.bin'), 'wb') as outf:
with open(os.path.join(out_name, 'header.bin'), 'wb') as outf:
outf.write(header_bytes)

tmp = in_name + '._prev'
Expand Down Expand Up @@ -136,7 +137,7 @@ def by_standard_order(filename):
# Make sure metadata.json looks like valid utf-8 (rather than latin-1
# or something else that will cause mojibake)
try:
with file(os.path.join(in_name, 'metadata.json')) as inf:
with open(os.path.join(in_name, 'metadata.json')) as inf:
import json
json.load(inf)
except IOError as e:
Expand All @@ -150,26 +151,27 @@ def by_standard_order(filename):
try:
header_bytes = None

zipf = StringIO()
zipf = BytesIO()
with zipfile.ZipFile(zipf, 'a', compression, False) as zf:
for (r, ds, fs) in os.walk(in_name):
fs.sort(key=by_standard_order)
for f in fs:
fullf = os.path.join(r, f)
if f == 'header.bin':
header_bytes = file(fullf).read()
with open(fullf, 'rb') as fullf_fp:
header_bytes = fullf_fp.read()
continue
arcname = fullf[len(in_name) + 1:]
zf.write(fullf, arcname, compression)

if header_bytes is None:
print("Missing header; using default")
header_bytes = struct.pack(HEADER_V1_FMT, 'tilT', struct.calcsize(HEADER_V1_FMT), 1, 0, 0)
header_bytes = struct.pack(HEADER_V1_FMT, b'tilT', struct.calcsize(HEADER_V1_FMT), 1, 0, 0)

if not _read_and_check_header(StringIO(header_bytes)):
if not _read_and_check_header(BytesIO(header_bytes)):
raise ConversionError("Invalid header.bin")

with file(out_name, 'wb') as outf:
with open(out_name, 'wb') as outf:
outf.write(header_bytes)
outf.write(zipf.getvalue())

Expand Down
Loading