diff --git a/.editorconfig b/.editorconfig index d47791ee26..7c511996a1 100644 --- a/.editorconfig +++ b/.editorconfig @@ -7,7 +7,9 @@ root = true indent_style = space indent_size = 2 -# Based on Google's C# style guide +[*.py] +indent_size = 4 + [*.cs] indent_size = 4 diff --git a/.flake8 b/.flake8 index 8eef0b386d..654bee39f3 100644 --- a/.flake8 +++ b/.flake8 @@ -1,2 +1,2 @@ [flake8] -extend-ignore = E111, E114, E501, E722, E121 +extend-ignore = E111, E114, E501, E722, E121, E203, W503 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3351624af0..1da8b2c741 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,13 +4,19 @@ repos: rev: v1.17.0 hooks: - id: yamllint - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.0.0 + - repo: https://github.com/psf/black + rev: 20.8b1 + hooks: + - id: black + files: ^Support/ + language_version: python3 + - repo: https://github.com/pycqa/flake8 + rev: 3.9.0 hooks: - id: flake8 files: ^Support/ - repo: https://github.com/PyCQA/pylint.git - rev: pylint-2.6.0 + rev: pylint-2.7.4 hooks: - id: pylint name: pylint diff --git a/.pylintrc b/.pylintrc index a91a6e262c..89f4c4c3c2 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,2 +1,6 @@ [MESSAGES CONTROL] disable=bad-indentation,missing-class-docstring,missing-module-docstring,missing-function-docstring,invalid-name,fixme,line-too-long,duplicate-code + +extension-pkg-whitelist=math,zlib,struct +# Temporary, until https://github.com/PyCQA/pylint/issues/4297 is resolved +generated-members=struct.* diff --git a/Support/Python/tbdata/brush_lookup.py b/Support/Python/tbdata/brush_lookup.py index 60fe724889..abf688b934 100644 --- a/Support/Python/tbdata/brush_lookup.py +++ b/Support/Python/tbdata/brush_lookup.py @@ -18,61 +18,65 @@ from typing import cast, Dict, Iterator, List, Tuple, NewType from collections import defaultdict -Guid = NewType('Guid', str) +Guid = NewType("Guid", str) -class BrushLookup(): - """Helper for doing name <-> guid conversions for brushes.""" +class BrushLookup: + """Helper for doing name <-> guid conversions for brushes.""" - def iter_standard_brush_guids(self) -> Iterator[Guid]: - """Yields all the standard (non-experimental) brush guids""" - with open(os.path.join(self.dir, 'Assets/Manifest.asset')) as inf: - data = inf.read().replace('\r', '') - brush_chunk = re.search(r'Brushes:\n( -.*\n)*', data).group(0) - for match in re.finditer(r'guid: ([0-9a-f]{32})', brush_chunk): - yield cast(Guid, match.group(1)) + def iter_standard_brush_guids(self) -> Iterator[Guid]: + """Yields all the standard (non-experimental) brush guids""" + with open(os.path.join(self.dir, "Assets/Manifest.asset")) as inf: + data = inf.read().replace("\r", "") + brush_chunk = re.search(r"Brushes:\n( -.*\n)*", data).group(0) + for match in re.finditer(r"guid: ([0-9a-f]{32})", brush_chunk): + yield cast(Guid, match.group(1)) - @staticmethod - def iter_brush_guid_and_name(tilt_brush_dir: str) -> Iterator[Tuple[Guid, str]]: - """Yields (guid, name) tuples.""" - for brush_dir in ("Assets/Resources/Brushes", "Assets/Resources/X/Brushes"): - for r, _, fs in os.walk(os.path.join(tilt_brush_dir, brush_dir)): - for f in fs: - if f.lower().endswith('.asset'): - fullf = os.path.join(r, f) - with open(fullf) as inf: - data = inf.read() - guid = cast(Guid, re.search('m_storage: (.*)$', data, re.M).group(1)) - # name = re.search('m_Name: (.*)$', data, re.M).group(1) - name = f[:-6] - yield guid, name + @staticmethod + def iter_brush_guid_and_name(tilt_brush_dir: str) -> Iterator[Tuple[Guid, str]]: + """Yields (guid, name) tuples.""" + for brush_dir in ("Assets/Resources/Brushes", "Assets/Resources/X/Brushes"): + for r, _, fs in os.walk(os.path.join(tilt_brush_dir, brush_dir)): + for f in fs: + if f.lower().endswith(".asset"): + fullf = os.path.join(r, f) + with open(fullf) as inf: + data = inf.read() + guid = cast( + Guid, re.search("m_storage: (.*)$", data, re.M).group(1) + ) + # name = re.search('m_Name: (.*)$', data, re.M).group(1) + name = f[:-6] + yield guid, name - _instances: Dict[str, 'BrushLookup'] = {} + _instances: Dict[str, "BrushLookup"] = {} - @classmethod - def get(cls, tilt_brush_dir=None) -> 'BrushLookup': - if tilt_brush_dir is None: - tilt_brush_dir = os.path.normpath(os.path.join(os.path.abspath(__file__), "../../../..")) + @classmethod + def get(cls, tilt_brush_dir=None) -> "BrushLookup": + if tilt_brush_dir is None: + tilt_brush_dir = os.path.normpath( + os.path.join(os.path.abspath(__file__), "../../../..") + ) - try: - return cls._instances[tilt_brush_dir] - except KeyError: - pass - val = cls._instances[tilt_brush_dir] = BrushLookup(tilt_brush_dir) - return val + try: + return cls._instances[tilt_brush_dir] + except KeyError: + pass + val = cls._instances[tilt_brush_dir] = BrushLookup(tilt_brush_dir) + return val - def __init__(self, tilt_brush_dir: str): - self.dir = tilt_brush_dir - self.initialized = True - self.guid_to_name = dict(self.iter_brush_guid_and_name(self.dir)) - self.standard_brushes = set(self.iter_standard_brush_guids()) - name_to_guids: Dict[str, List[Guid]] = defaultdict(list) - for guid, name in self.guid_to_name.items(): - name_to_guids[name].append(guid) - self.name_to_guids = dict(name_to_guids) + def __init__(self, tilt_brush_dir: str): + self.dir = tilt_brush_dir + self.initialized = True + self.guid_to_name = dict(self.iter_brush_guid_and_name(self.dir)) + self.standard_brushes = set(self.iter_standard_brush_guids()) + name_to_guids: Dict[str, List[Guid]] = defaultdict(list) + for guid, name in self.guid_to_name.items(): + name_to_guids[name].append(guid) + self.name_to_guids = dict(name_to_guids) - def get_unique_guid(self, name: str) -> Guid: - lst = self.name_to_guids[name] - if len(lst) == 1: - return lst[0] - raise LookupError("%s refers to multiple brushes" % name) + def get_unique_guid(self, name: str) -> Guid: + lst = self.name_to_guids[name] + if len(lst) == 1: + return lst[0] + raise LookupError("%s refers to multiple brushes" % name) diff --git a/Support/Python/tbdata/bvh.py b/Support/Python/tbdata/bvh.py index 782f7bf2ea..308e5b6451 100644 --- a/Support/Python/tbdata/bvh.py +++ b/Support/Python/tbdata/bvh.py @@ -22,42 +22,42 @@ from io import StringIO try: - import rtree + import rtree except ImportError: - print("You need to install rtree (https://pypi.org/project/Rtree/).") - sys.exit(1) + print("You need to install rtree (https://pypi.org/project/Rtree/).") + sys.exit(1) # --------------------------------------------------------------------------- # BBox # --------------------------------------------------------------------------- + class BBox(tuple): - @staticmethod - def union(lhs, rhs): - lmin, lmax = lhs - rmin, rmax = rhs - nmin = (min(lmin[0], rmin[0]), - min(lmin[1], rmin[1]), - min(lmin[2], rmin[2])) - nmax = (max(lmax[0], rmax[0]), - max(lmax[1], rmax[1]), - max(lmax[2], rmax[2])) - for i in range(3): - assert nmax[i] > nmin[i] - return BBox((nmin, nmax)) - - def half_width(self): - bmin, bmax = self - return ((bmax[0] - bmin[0]) * .5, - (bmax[1] - bmin[1]) * .5, - (bmax[2] - bmin[2]) * .5) - - def surface_area(self): - dx, dy, dz = self.half_width() - # * 4 because these are half-widths; * 2 because there are 2 faces per axis - ret = (dx * dy + dy * dz + dz * dx) * 8 - return ret + @staticmethod + def union(lhs, rhs): + lmin, lmax = lhs + rmin, rmax = rhs + nmin = (min(lmin[0], rmin[0]), min(lmin[1], rmin[1]), min(lmin[2], rmin[2])) + nmax = (max(lmax[0], rmax[0]), max(lmax[1], rmax[1]), max(lmax[2], rmax[2])) + for i in range(3): + assert nmax[i] > nmin[i] + return BBox((nmin, nmax)) + + def half_width(self): + bmin, bmax = self + return ( + (bmax[0] - bmin[0]) * 0.5, + (bmax[1] - bmin[1]) * 0.5, + (bmax[2] - bmin[2]) * 0.5, + ) + + def surface_area(self): + dx, dy, dz = self.half_width() + # * 4 because these are half-widths; * 2 because there are 2 faces per axis + ret = (dx * dy + dy * dz + dz * dx) * 8 + return ret + # --------------------------------------------------------------------------- # RTree @@ -70,262 +70,276 @@ def surface_area(self): def str_vec3(vec3): - return "(%6.1f %6.1f %6.1f)" % vec3 + return "(%6.1f %6.1f %6.1f)" % vec3 def str_bounds(bounds): - # return "(%s, %s)" % (str_vec3(bounds[0]), str_vec3(bounds[1])) - bmin, bmax = bounds - halfsize = (bmax[0] - bmin[0], bmax[1] - bmin[1], bmax[2] - bmin[2]) - return "(%5.1f %5.1f %5.1f)" % halfsize + # return "(%s, %s)" % (str_vec3(bounds[0]), str_vec3(bounds[1])) + bmin, bmax = bounds + halfsize = (bmax[0] - bmin[0], bmax[1] - bmin[1], bmax[2] - bmin[2]) + return "(%5.1f %5.1f %5.1f)" % halfsize -class BinaryReader(): - # Wraps struct.unpack - def __init__(self, inf): - if isinstance(inf, bytes): - inf = StringIO(inf) - self.inf = inf +class BinaryReader: + # Wraps struct.unpack + def __init__(self, inf): + if isinstance(inf, bytes): + inf = StringIO(inf) + self.inf = inf - def read(self, fmt): - fmt_size = struct.calcsize(fmt) - return struct.unpack(fmt, self.inf.read(fmt_size)) + def read(self, fmt): + fmt_size = struct.calcsize(fmt) + return struct.unpack(fmt, self.inf.read(fmt_size)) - def read_bounds(self): - bmin = self.read("<3d") - bmax = self.read("<3d") - return (bmin, bmax) + def read_bounds(self): + bmin = self.read("<3d") + bmax = self.read("<3d") + return (bmin, bmax) class RTreeStorageDict(rtree.index.CustomStorage): - def __init__(self): - self.datas = {} - self.cached_nodes = {} - - # CustomStorage API - - def flush(self, returnError): - # print "flush" - pass + def __init__(self): + self.datas = {} + self.cached_nodes = {} + + # CustomStorage API + + def flush(self, returnError): + # print "flush" + pass + + def create(self, returnError): + # print "create" + pass + + def destroy(self, returnError): + # print "destroy" + pass + + def loadByteArray(self, page, returnError): + print("RTreeStorageDict: load", page) + assert page >= 0 + try: + data = self.datas[page] + assert data != "deleted" + except KeyError: + returnError.contents.value = self.InvalidPageError + + def storeByteArray(self, page, data): + if page == self.NewPage: + page = len(self.datas) + self.datas[page] = data + # print "store new %s %s" % (page, len(data)) + else: + assert page in self.datas + # old_data = self.datas[page] + self.datas[page] = data + # print "store %s %s -> %s" % (page, len(old_data), len(data)) + + return page + + def deleteByteArray(self, page): + # print "RTreeStorageDict: delete", page + assert page in self.datas + self.datas[page] = "deleted" + + +class RTree: + @classmethod + def from_bounds_iter(cls, bounds_iter, leaf_capacity_multiplier=1): + storage = RTreeStorageDict() + p = rtree.index.Property() + p.dimension = 3 + # p.variant = rtree.index.RT_Star + p.index_capacity = INDEX_CAPACITY + p.leaf_capacity = int(LEAF_CAPACITY * leaf_capacity_multiplier) + index = rtree.index.Index(storage, bounds_iter, interleaved=True, properties=p) + # Must close in order to flush changes to the storage + index.close() + return cls(storage, 1) + + def __init__(self, storage, header_id=1): + self.header = None # RTreeHeader + self.root = None # RTreeNode + self.nodes_by_id = {} # dict + + self.header = RTreeHeader(storage.datas[header_id]) + self.root = self._recursive_create_node(storage, self.header.rootId) + + def _recursive_create_node(self, storage, node_id): + node_data = storage.datas[node_id] + assert node_data != "deleted" + node = self.nodes_by_id[node_id] = RTreeNode(node_id, node_data) + if node.is_index(): + for c in node.children: + assert c.data is None + c.node = self._recursive_create_node(storage, c.id) + + return node + + def dfs_iter(self): + q = deque([self.root]) + while q: + n = q.popleft() + yield n + if n.is_index(): + q.extend(c.node for c in n.children) + + +class RTreeHeader: # pylint: disable=too-few-public-methods,too-many-instance-attributes + # RTree::storeHeader + # id_type root + # u32 RTreeVariant (0 linear, 1 quadratic, 2 rstar) + # double fill factor + # u32 index capacity + # u32 leaf capacity + # u32 nearMinimumOverlapFactor + # double m_splitDistributionFactor + # double m_reinsertFactor + # u32 m_dimension + # char m_bTightMBRs + # u32 m_stats.m_nodes + # u64 m_stats.m_data + # u32 m_stats.m_treeHeight + # height * u32 nodes in level + def __init__(self, data): + """storage: a RTreeStorageDict instance""" + reader = BinaryReader(data) + ( + self.rootId, + self.variant, + self.fill, + self.icap, + self.lcap, + self.nearMinimumOverlapFactor, + self.splitDistributionFactor, + self.reinsertFactor, + self.dimension, + self.bTightMBRs, + self.nNodes, + self.nData, + self.treeHeight, + ) = reader.read("= 0 - try: - data = self.datas[page] - assert data != 'deleted' - except KeyError: - returnError.contents.value = self.InvalidPageError - - def storeByteArray(self, page, data): - if page == self.NewPage: - page = len(self.datas) - self.datas[page] = data - # print "store new %s %s" % (page, len(data)) - else: - assert page in self.datas - # old_data = self.datas[page] - self.datas[page] = data - # print "store %s %s -> %s" % (page, len(old_data), len(data)) - - return page - - def deleteByteArray(self, page): - # print "RTreeStorageDict: delete", page - assert page in self.datas - self.datas[page] = 'deleted' - - -class RTree(): - @classmethod - def from_bounds_iter(cls, bounds_iter, leaf_capacity_multiplier=1): - storage = RTreeStorageDict() - p = rtree.index.Property() - p.dimension = 3 - # p.variant = rtree.index.RT_Star - p.index_capacity = INDEX_CAPACITY - p.leaf_capacity = int(LEAF_CAPACITY * leaf_capacity_multiplier) - index = rtree.index.Index(storage, bounds_iter, interleaved=True, properties=p) - # Must close in order to flush changes to the storage - index.close() - return cls(storage, 1) - - def __init__(self, storage, header_id=1): - self.header = None # RTreeHeader - self.root = None # RTreeNode - self.nodes_by_id = {} # dict - - self.header = RTreeHeader(storage.datas[header_id]) - self.root = self._recursive_create_node(storage, self.header.rootId) - - def _recursive_create_node(self, storage, node_id): - node_data = storage.datas[node_id] - assert node_data != 'deleted' - node = self.nodes_by_id[node_id] = RTreeNode(node_id, node_data) - if node.is_index(): - for c in node.children: - assert c.data is None - c.node = self._recursive_create_node(storage, c.id) - - return node - - def dfs_iter(self): - q = deque([self.root]) - while q: - n = q.popleft() - yield n - if n.is_index(): - q.extend(c.node for c in n.children) - - -class RTreeHeader(): # pylint: disable=too-few-public-methods,too-many-instance-attributes - # RTree::storeHeader - # id_type root - # u32 RTreeVariant (0 linear, 1 quadratic, 2 rstar) - # double fill factor - # u32 index capacity - # u32 leaf capacity - # u32 nearMinimumOverlapFactor - # double m_splitDistributionFactor - # double m_reinsertFactor - # u32 m_dimension - # char m_bTightMBRs - # u32 m_stats.m_nodes - # u64 m_stats.m_data - # u32 m_stats.m_treeHeight - # height * u32 nodes in level - def __init__(self, data): - """storage: a RTreeStorageDict instance""" - reader = BinaryReader(data) - (self.rootId, - self.variant, self.fill, self.icap, self.lcap, - self.nearMinimumOverlapFactor, - self.splitDistributionFactor, - self.reinsertFactor, - self.dimension, - self.bTightMBRs, - self.nNodes, - self.nData, - self.treeHeight) = reader.read(" 0: + self.data = reader.read("%ds" % data_len) else: - for leaf in iter_leaf_children(c.node): # pylint: disable=unused-variable - # TODO: Should this be "yield leaf"? (if so, remove the pylint comment above) - yield c - - lst = list(iter_leaf_children(new_node)) - for elt in lst: - assert isinstance(elt, RTreeChild) - assert elt.node.is_leaf() - return lst - - def as_str(self): - return "id=%3d nc=%3d %s" % ( - self.node_id, len(self.children), - str_bounds(self.bounds)) - - -class RTreeChild(): # pylint: disable=too-few-public-methods - # .id - # If parent is PERSISTENT_INDEX, this is a node id. - # If parent is PESISTENT_LEAF, this is some leaf-specific id (like a stroke number) - def __init__(self, reader): - self.bounds = reader.read_bounds() - self.id, data_len = reader.read(" 0: - self.data = reader.read("%ds" % data_len) - else: - self.data = None - self.node = None - - def as_str(self): - description = '' - if self.data is not None: - description = " + %d bytes" % len(self.data) - return "leaf %4d: %s%s" % (self.id, str_bounds(self.bounds), description) + self.data = None + self.node = None + + def as_str(self): + description = "" + if self.data is not None: + description = " + %d bytes" % len(self.data) + return "leaf %4d: %s%s" % (self.id, str_bounds(self.bounds), description) diff --git a/Support/Python/tbdata/glb.py b/Support/Python/tbdata/glb.py index fed669beab..a9ffb003fa 100644 --- a/Support/Python/tbdata/glb.py +++ b/Support/Python/tbdata/glb.py @@ -20,219 +20,230 @@ SIZES = { - # accessor.type - 'SCALAR': 1, 'VEC2': 2, 'VEC3': 3, 'VEC4': 4, - # accessor.componentType - 5120: 1, 5121: 1, # BYTE, UBYTE - 5122: 2, 5123: 2, # SHORT, USHORT - 5124: 4, 5125: 4, # INT, UINT - 5126: 4 # FLOAT + # accessor.type + "SCALAR": 1, + "VEC2": 2, + "VEC3": 3, + "VEC4": 4, + # accessor.componentType + 5120: 1, + 5121: 1, # BYTE, UBYTE + 5122: 2, + 5123: 2, # SHORT, USHORT + 5124: 4, + 5125: 4, # INT, UINT + 5126: 4, # FLOAT } # struct format characters, for accessor.componentType STRUCT_FORMAT = { - 5120: 'b', 5121: 'B', # BYTE, UBYTE - 5122: 'h', 5123: 'H', # SHORT, USHORT - 5124: 'i', 5125: 'I', # INT, UINT - 5126: 'f' # FLOAT + 5120: "b", + 5121: "B", # BYTE, UBYTE + 5122: "h", + 5123: "H", # SHORT, USHORT + 5124: "i", + 5125: "I", # INT, UINT + 5126: "f", # FLOAT } # From itertools docs def grouper(n, iterable, fillvalue=None): - "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx" - args = [iter(iterable)] * n - return itertools.zip_longest(fillvalue=fillvalue, *args) - - -class binfile(): - # Helper for parsing - def __init__(self, inf): - self.inf = inf - - def read(self, n): - data = self.inf.read(n) - if len(data) < n: - raise Exception("Short read %s < %s" % (len(data), n)) - return data - - def write(self, data): - return self.inf.write(data) - - def read_length_prefixed(self): - n, = self.unpack(" ABC DEF Gxx" + args = [iter(iterable)] * n + return itertools.zip_longest(fillvalue=fillvalue, *args) + + +class binfile: + # Helper for parsing + def __init__(self, inf): + self.inf = inf + + def read(self, n): + data = self.inf.read(n) + if len(data) < n: + raise Exception("Short read %s < %s" % (len(data), n)) + return data + + def write(self, data): + return self.inf.write(data) + + def read_length_prefixed(self): + (n,) = self.unpack(" Replace the brush for these strokes # names can also be guids, which is useful when the name is ambiguous BRUSH_REPLACEMENTS = [ - # Good brushes - ('SquarePaper', True), - ('ThickGeometry', True), - ('Wire', True), - # Brushes that should be replaced - ('TaperedMarker', 'ThickGeometry'), - ('OilPaint', 'ThickGeometry'), - ('Ink', 'ThickGeometry'), - ('Marker', 'ThickGeometry'), - ('Paper', 'ThickGeometry'), - ('FlatDeprecated', 'ThickGeometry'), - # Questionable - ('Highlighter', 'ThickGeometry'), - ('Light', 'Wire'), - - # Remove particles - ('Smoke', None), - ('Snow', None), - ('Embers', None), - ('Stars', None), - # Remove animated - ('Fire', None), - # Remove shader-based - ('Plasma', None), - ('Rainbow', None), - ('Streamers', None), + # Good brushes + ("SquarePaper", True), + ("ThickGeometry", True), + ("Wire", True), + # Brushes that should be replaced + ("TaperedMarker", "ThickGeometry"), + ("OilPaint", "ThickGeometry"), + ("Ink", "ThickGeometry"), + ("Marker", "ThickGeometry"), + ("Paper", "ThickGeometry"), + ("FlatDeprecated", "ThickGeometry"), + # Questionable + ("Highlighter", "ThickGeometry"), + ("Light", "Wire"), + # Remove particles + ("Smoke", None), + ("Snow", None), + ("Embers", None), + ("Stars", None), + # Remove animated + ("Fire", None), + # Remove shader-based + ("Plasma", None), + ("Rainbow", None), + ("Streamers", None), ] @@ -79,587 +80,636 @@ # Little utilities # ---------------------------------------------------------------------- + def msg(text): - sys.stdout.write("%-79s\r" % text[:79]) - sys.stdout.flush() + sys.stdout.write("%-79s\r" % text[:79]) + sys.stdout.flush() def msgln(text): - sys.stdout.write("%-79s\n" % text[:79]) - sys.stdout.flush() + sys.stdout.write("%-79s\n" % text[:79]) + sys.stdout.flush() def rgb8_to_hsl(rgb): - """Takes a rgb8 tuple, returns a hsl tuple.""" - r = rgb[0] / 255.0 - g = rgb[1] / 255.0 - b = rgb[2] / 255.0 - - cmin = min(r, g, b) - cmax = max(r, g, b) - delta = cmax - cmin - h = 0 - s = 0 - l = (cmax + cmin) # noqa: E741 - - if delta != 0: - if l < 0.5: - s = delta / l - else: - s = delta / (2 - l) + """Takes a rgb8 tuple, returns a hsl tuple.""" + r = rgb[0] / 255.0 + g = rgb[1] / 255.0 + b = rgb[2] / 255.0 + + cmin = min(r, g, b) + cmax = max(r, g, b) + delta = cmax - cmin + h = 0 + s = 0 + l = cmax + cmin # noqa: E741 + + if delta != 0: + if l < 0.5: + s = delta / l + else: + s = delta / (2 - l) - if r == cmax: - h = (g - b) / delta - elif g == cmax: - h = 2 + (b - r) / delta - elif b == cmax: - h = 4 + (r - g) / delta + if r == cmax: + h = (g - b) / delta + elif g == cmax: + h = 2 + (b - r) / delta + elif b == cmax: + h = 4 + (r - g) / delta - return h, s, l + return h, s, l # ---------------------------------------------------------------------- # Brush conversion # ---------------------------------------------------------------------- -def get_replacements_by_guid(replacements_by_name): - """Returns a lookup table that is by-guid rather than by-name.""" - brush_lookup = BrushLookup.get() - - def guid_or_name_to_guid(guid_or_name): - if guid_or_name in brush_lookup.guid_to_name: - return guid_or_name - if guid_or_name in brush_lookup.name_to_guids: - return brush_lookup.get_unique_guid(guid_or_name) - raise LookupError("Not a known brush or brush guid: %r" % guid_or_name) - - dct = {} - for before, after in replacements_by_name: - before_guid = guid_or_name_to_guid(before) - if after is True: - after_guid = before_guid - elif after is None: - after_guid = None - else: - after_guid = guid_or_name_to_guid(after) - dct[before_guid] = after_guid - return dct - - -def convert_brushes(tilt, replacements_by_name, show_removed=False): # pylint: disable=too-many-statements,too-many-branches,too-many-locals - """Convert brushes to 3d-printable versions, or remove their strokes from the tilt.""" - replacements = get_replacements_by_guid(replacements_by_name) - brush_lookup = BrushLookup.get() - with tilt.mutable_metadata() as dct: - index_to_guid = dct['BrushIndex'] - - # First, show us what brushes the tilt file uses - used_guids = Counter() - for stroke in tilt.sketch.strokes: - guid = index_to_guid[stroke.brush_idx] - used_guids[guid] += 1 - print("Brushes used:") - for guid, n in sorted(list(used_guids.items()), key=lambda p: -p[1]): - print(" %5d %s" % (n, brush_lookup.guid_to_name.get(guid))) - sys.stdout.flush() - del used_guids - - index_to_new_index = {} - - for i, guid in enumerate(index_to_guid): - name = brush_lookup.guid_to_name.get(guid, guid) - try: - new_guid = replacements[guid] - except KeyError: - print("%d: Don't know what to do with brush %s" % (i, name)) - index_to_new_index[i] = i - else: - new_name = brush_lookup.guid_to_name.get(new_guid, new_guid) - if new_guid is None: - print("%d: Remove %s" % (i, name)) - index_to_new_index[i] = None +def get_replacements_by_guid(replacements_by_name): + """Returns a lookup table that is by-guid rather than by-name.""" + brush_lookup = BrushLookup.get() + + def guid_or_name_to_guid(guid_or_name): + if guid_or_name in brush_lookup.guid_to_name: + return guid_or_name + if guid_or_name in brush_lookup.name_to_guids: + return brush_lookup.get_unique_guid(guid_or_name) + raise LookupError("Not a known brush or brush guid: %r" % guid_or_name) + + dct = {} + for before, after in replacements_by_name: + before_guid = guid_or_name_to_guid(before) + if after is True: + after_guid = before_guid + elif after is None: + after_guid = None else: - if guid == new_guid: - print("%d: Keep %s" % (i, name)) - elif name == new_name: - print("%d: Replace %s/%s -> %s/%s" % (i, name, guid, new_name, new_guid)) - else: - print("%d: Replace %s -> %s" % (i, name, new_name)) - try: - new_idx = index_to_guid.index(new_guid) - except ValueError: - new_idx = len(index_to_guid) - index_to_guid.append(new_guid) - index_to_new_index[i] = new_idx - - brush_indices_to_remove = set(i for (i, new_i) in list(index_to_new_index.items()) if new_i is None) - - if brush_indices_to_remove: - old_len = len(tilt.sketch.strokes) - if show_removed: - # Render in magenta instead of removing - for stroke in tilt.sketch.strokes: - if stroke.brush_idx in brush_indices_to_remove: - stroke.brush_color = (1, 0, 1, 1) + after_guid = guid_or_name_to_guid(after) + dct[before_guid] = after_guid + return dct + + +def convert_brushes( + tilt, replacements_by_name, show_removed=False +): # pylint: disable=too-many-statements,too-many-branches,too-many-locals + """Convert brushes to 3d-printable versions, or remove their strokes from the tilt.""" + replacements = get_replacements_by_guid(replacements_by_name) + brush_lookup = BrushLookup.get() + + with tilt.mutable_metadata() as dct: + index_to_guid = dct["BrushIndex"] + + # First, show us what brushes the tilt file uses + used_guids = Counter() + for stroke in tilt.sketch.strokes: + guid = index_to_guid[stroke.brush_idx] + used_guids[guid] += 1 + print("Brushes used:") + for guid, n in sorted(list(used_guids.items()), key=lambda p: -p[1]): + print(" %5d %s" % (n, brush_lookup.guid_to_name.get(guid))) + sys.stdout.flush() + del used_guids + + index_to_new_index = {} + + for i, guid in enumerate(index_to_guid): + name = brush_lookup.guid_to_name.get(guid, guid) + try: + new_guid = replacements[guid] + except KeyError: + print("%d: Don't know what to do with brush %s" % (i, name)) + index_to_new_index[i] = i + else: + new_name = brush_lookup.guid_to_name.get(new_guid, new_guid) + if new_guid is None: + print("%d: Remove %s" % (i, name)) + index_to_new_index[i] = None + else: + if guid == new_guid: + print("%d: Keep %s" % (i, name)) + elif name == new_name: + print( + "%d: Replace %s/%s -> %s/%s" + % (i, name, guid, new_name, new_guid) + ) + else: + print("%d: Replace %s -> %s" % (i, name, new_name)) + try: + new_idx = index_to_guid.index(new_guid) + except ValueError: + new_idx = len(index_to_guid) + index_to_guid.append(new_guid) + index_to_new_index[i] = new_idx + + brush_indices_to_remove = set( + i for (i, new_i) in list(index_to_new_index.items()) if new_i is None + ) + + if brush_indices_to_remove: + old_len = len(tilt.sketch.strokes) + if show_removed: + # Render in magenta instead of removing + for stroke in tilt.sketch.strokes: + if stroke.brush_idx in brush_indices_to_remove: + stroke.brush_color = (1, 0, 1, 1) + else: + stroke.brush_color = stroke.brush_color else: - stroke.brush_color = stroke.brush_color - else: - tilt.sketch.strokes[:] = [s for s in tilt.sketch.strokes if s.brush_idx not in brush_indices_to_remove] - new_len = len(tilt.sketch.strokes) - print("Strokes %d -> %d" % (old_len, new_len)) + tilt.sketch.strokes[:] = [ + s + for s in tilt.sketch.strokes + if s.brush_idx not in brush_indices_to_remove + ] + new_len = len(tilt.sketch.strokes) + print("Strokes %d -> %d" % (old_len, new_len)) - for stroke in tilt.sketch.strokes: - new_idx = index_to_new_index[stroke.brush_idx] - # Might be none if it's a removed brush - if new_idx is not None: - stroke.brush_idx = new_idx + for stroke in tilt.sketch.strokes: + new_idx = index_to_new_index[stroke.brush_idx] + # Might be none if it's a removed brush + if new_idx is not None: + stroke.brush_idx = new_idx # ---------------------------------------------------------------------- # Stroke simplification # ---------------------------------------------------------------------- + def calculate_pos_error(cp0, cp1, middle_cps): - # This function needs access to a lot of internal variables - # pylint: disable=protected-access - if len(middle_cps) == 0: - return 0 - strip_length = cp1._dist - cp0._dist - if strip_length <= 0: - return 0 + # This function needs access to a lot of internal variables + # pylint: disable=protected-access + if len(middle_cps) == 0: + return 0 + strip_length = cp1._dist - cp0._dist + if strip_length <= 0: + return 0 - max_pos_error = 0 - for _, cp in enumerate(middle_cps): - t = (cp._dist - cp0._dist) / strip_length - pos_interpolated = t * cp0._pos + (1 - t) * cp1._pos - pos_error = np.linalg.norm((pos_interpolated - cp._pos)) - if pos_error > max_pos_error: - max_pos_error = pos_error + max_pos_error = 0 + for _, cp in enumerate(middle_cps): + t = (cp._dist - cp0._dist) / strip_length + pos_interpolated = t * cp0._pos + (1 - t) * cp1._pos + pos_error = np.linalg.norm((pos_interpolated - cp._pos)) + if pos_error > max_pos_error: + max_pos_error = pos_error - return max_pos_error + return max_pos_error def simplify_stroke(stroke, max_error): - # This function needs access to a lot of internal variables - # pylint: disable=protected-access - - # Do greedy optimization of stroke. - REQUIRED_END_CPS = 1 # or 2 - keep_cps = [] - toss_cps = [] # The current set of candidates to toss - - n = len(stroke.controlpoints) - for i, cp in enumerate(stroke.controlpoints): - cp._pos = np.array(cp.position) - if i == 0: - cp._dist = 0 - else: - prev_cp = stroke.controlpoints[i - 1] - cp._dist = prev_cp._dist + np.linalg.norm(prev_cp._pos - cp._pos) + # This function needs access to a lot of internal variables + # pylint: disable=protected-access + + # Do greedy optimization of stroke. + REQUIRED_END_CPS = 1 # or 2 + keep_cps = [] + toss_cps = [] # The current set of candidates to toss + + n = len(stroke.controlpoints) + for i, cp in enumerate(stroke.controlpoints): + cp._pos = np.array(cp.position) + if i == 0: + cp._dist = 0 + else: + prev_cp = stroke.controlpoints[i - 1] + cp._dist = prev_cp._dist + np.linalg.norm(prev_cp._pos - cp._pos) - if REQUIRED_END_CPS <= i < n - REQUIRED_END_CPS: - pos_error = calculate_pos_error(keep_cps[-1], cp, toss_cps) - keep = (pos_error > max_error * stroke.brush_size) - # print " %3d: %s %f %f" % (i, keep, pos_error, stroke.brush_size * .2) - else: - keep = True - # print " %3d: True (End)" % i + if REQUIRED_END_CPS <= i < n - REQUIRED_END_CPS: + pos_error = calculate_pos_error(keep_cps[-1], cp, toss_cps) + keep = pos_error > max_error * stroke.brush_size + # print " %3d: %s %f %f" % (i, keep, pos_error, stroke.brush_size * .2) + else: + keep = True + # print " %3d: True (End)" % i - if keep: - keep_cps.append(cp) - toss_cps = [] - else: - toss_cps.append(cp) + if keep: + keep_cps.append(cp) + toss_cps = [] + else: + toss_cps.append(cp) - stroke.controlpoints[:] = keep_cps + stroke.controlpoints[:] = keep_cps def reduce_control_points(tilt, max_error): - # If debug_simplify, the resulting .tilt file shows both the old and the new - before_cp = 0 - after_cp = 0 - - msg("Simplify strokes") - pct = 0 - n = len(tilt.sketch.strokes) - for i, stroke in enumerate(tilt.sketch.strokes): - new_pct = (i + 1) * 100 / n - if new_pct != pct: - pct = new_pct - removed_pct = (before_cp - after_cp) * 100 / (before_cp + 1) - msg("Simplify strokes: %3d%% %5d/%5d Removed %3d%%" % (pct, i, n, removed_pct)) - - before_cp += len(stroke.controlpoints) - simplify_stroke(stroke, max_error) - after_cp += len(stroke.controlpoints) - msg("Simplify strokes: done") - - msgln("Control points: %5d -> %5d (%2d%%)" % ( - before_cp, after_cp, after_cp * 100 / before_cp)) + # If debug_simplify, the resulting .tilt file shows both the old and the new + before_cp = 0 + after_cp = 0 + + msg("Simplify strokes") + pct = 0 + n = len(tilt.sketch.strokes) + for i, stroke in enumerate(tilt.sketch.strokes): + new_pct = (i + 1) * 100 / n + if new_pct != pct: + pct = new_pct + removed_pct = (before_cp - after_cp) * 100 / (before_cp + 1) + msg( + "Simplify strokes: %3d%% %5d/%5d Removed %3d%%" + % (pct, i, n, removed_pct) + ) + + before_cp += len(stroke.controlpoints) + simplify_stroke(stroke, max_error) + after_cp += len(stroke.controlpoints) + msg("Simplify strokes: done") + + msgln( + "Control points: %5d -> %5d (%2d%%)" + % (before_cp, after_cp, after_cp * 100 / before_cp) + ) # ---------------------------------------------------------------------- # Stray strokes # ---------------------------------------------------------------------- -def remove_stray_strokes(tilt, max_dist=0, replacement_brush_guid=None): # pylint: disable=too-many-locals - # This function needs access to a lot of internal variables - # pylint: disable=protected-access - - """Show histograms of control point positions, to help with resizing.""" - - def iter_pos(tilt): - first_cp = 0 - for stroke in tilt.sketch.strokes: - stroke._first_cp = first_cp - first_cp += len(stroke.controlpoints) - for cp in stroke.controlpoints: - yield cp.position - - positions = np.array(list(iter_pos(tilt))) - - if False: # pylint: disable=using-constant-test - # Print out x/y/z histograms - histograms = [np.histogram(positions[..., i], bins=30) for i in range(3)] - for irow in range(len(histograms[0][0])+1): - for axis, histogram in enumerate(histograms): - try: - print("%s %3d %6d " % ('xyz'[axis], histogram[1][irow], histogram[0][irow]), end=' ') - except IndexError: - print("%s %3d %6s " % ('xyz'[axis], histogram[1][irow], ''), end=' ') - print() - - if max_dist > 0: - # Convert replacement guid -> replacement index - if replacement_brush_guid is None: - replacement_brush_index = None - else: - with tilt.mutable_metadata() as dct: - try: - replacement_brush_index = dct['BrushIndex'].index(replacement_brush_guid) - except ValueError: - dct['BrushIndex'].append(replacement_brush_guid) - replacement_brush_index = dct['BrushIndex'].index(replacement_brush_guid) - - # Compute Mahalanobis distance and remove strokes that fall outside - # https://en.wikipedia.org/wiki/Mahalanobis_distance - mean = np.mean(positions, axis=0) - cov = np.cov(positions, rowvar=False) - invcov = np.linalg.inv(cov) - - def mahalanobis_distance(v): - """Return distance of row vector""" - cv = (v - mean)[np.newaxis] - return sqrt(cv.dot(invcov).dot(cv.T)[0, 0]) - - def out_of_bounds(stroke): - i0 = stroke._first_cp - i1 = i0 + len(stroke.controlpoints) - dists = np.array([mahalanobis_distance(pos) for pos in positions[i0: i1]]) - return np.any(dists > max_dist) - - msg("Finding OOB strokes") - # TODO: figure out how to use np.einsum() and remove all the python-level loops - oob_strokes = [ - pair for pair in enumerate(tilt.sketch.strokes) - if out_of_bounds(pair[1]) - ] - msg("") - if oob_strokes: - if replacement_brush_index is not None: - for i, stroke in oob_strokes: - print("Replacing out-of-bounds stroke", i) - stroke.brush_idx = replacement_brush_index - stroke.brush_color = (1, 0, 1, 1) - else: - print("Removing %d strokes" % len(oob_strokes)) - remove_indices = set(pair[0] for pair in oob_strokes) - tilt.sketch.strokes[:] = [ - stroke for i, stroke in enumerate(tilt.sketch.stroke) - if i not in remove_indices +def remove_stray_strokes( + tilt, max_dist=0, replacement_brush_guid=None +): # pylint: disable=too-many-locals + # This function needs access to a lot of internal variables + # pylint: disable=protected-access + + """Show histograms of control point positions, to help with resizing.""" + + def iter_pos(tilt): + first_cp = 0 + for stroke in tilt.sketch.strokes: + stroke._first_cp = first_cp + first_cp += len(stroke.controlpoints) + for cp in stroke.controlpoints: + yield cp.position + + positions = np.array(list(iter_pos(tilt))) + + if False: # pylint: disable=using-constant-test + # Print out x/y/z histograms + histograms = [np.histogram(positions[..., i], bins=30) for i in range(3)] + for irow in range(len(histograms[0][0]) + 1): + for axis, histogram in enumerate(histograms): + try: + print( + "%s %3d %6d " + % ("xyz"[axis], histogram[1][irow], histogram[0][irow]), + end=" ", + ) + except IndexError: + print( + "%s %3d %6s " % ("xyz"[axis], histogram[1][irow], ""), end=" " + ) + print() + + if max_dist > 0: + # Convert replacement guid -> replacement index + if replacement_brush_guid is None: + replacement_brush_index = None + else: + with tilt.mutable_metadata() as dct: + try: + replacement_brush_index = dct["BrushIndex"].index( + replacement_brush_guid + ) + except ValueError: + dct["BrushIndex"].append(replacement_brush_guid) + replacement_brush_index = dct["BrushIndex"].index( + replacement_brush_guid + ) + + # Compute Mahalanobis distance and remove strokes that fall outside + # https://en.wikipedia.org/wiki/Mahalanobis_distance + mean = np.mean(positions, axis=0) + cov = np.cov(positions, rowvar=False) + invcov = np.linalg.inv(cov) + + def mahalanobis_distance(v): + """Return distance of row vector""" + cv = (v - mean)[np.newaxis] + return sqrt(cv.dot(invcov).dot(cv.T)[0, 0]) + + def out_of_bounds(stroke): + i0 = stroke._first_cp + i1 = i0 + len(stroke.controlpoints) + dists = np.array([mahalanobis_distance(pos) for pos in positions[i0:i1]]) + return np.any(dists > max_dist) + + msg("Finding OOB strokes") + # TODO: figure out how to use np.einsum() and remove all the python-level loops + oob_strokes = [ + pair for pair in enumerate(tilt.sketch.strokes) if out_of_bounds(pair[1]) ] + msg("") + + if oob_strokes: + if replacement_brush_index is not None: + for i, stroke in oob_strokes: + print("Replacing out-of-bounds stroke", i) + stroke.brush_idx = replacement_brush_index + stroke.brush_color = (1, 0, 1, 1) + else: + print("Removing %d strokes" % len(oob_strokes)) + remove_indices = set(pair[0] for pair in oob_strokes) + tilt.sketch.strokes[:] = [ + stroke + for i, stroke in enumerate(tilt.sketch.stroke) + if i not in remove_indices + ] # ---------------------------------------------------------------------- # Color reduction # ---------------------------------------------------------------------- + def get_most_similar_factors(n): - """Factorize n into two numbers. - Returns the best pair, in the sense that the numbers are the closest to each other.""" - i = int(n**0.5 + 0.5) - while n % i != 0: - i -= 1 - return i, n / i + """Factorize n into two numbers. + Returns the best pair, in the sense that the numbers are the closest to each other.""" + i = int(n ** 0.5 + 0.5) + while n % i != 0: + i -= 1 + return i, n / i def get_good_factors(n, max_aspect_ratio=None): - """Factorize n into two integers that are closest to each other. - If max_aspect_ratio is passed, search numbers >= n until - a pair is found whose aspect ratio is <= max_aspect_ratio.""" - if max_aspect_ratio is None: - return get_most_similar_factors(n) - for i in itertools.count(): - a, b = get_most_similar_factors(n + i) - if float(b) / a <= max_aspect_ratio: - return a, b - # The original code didn't return anything if we reached the end. Added an assert - raise AssertionError("No factors found!") + """Factorize n into two integers that are closest to each other. + If max_aspect_ratio is passed, search numbers >= n until + a pair is found whose aspect ratio is <= max_aspect_ratio.""" + if max_aspect_ratio is None: + return get_most_similar_factors(n) + for i in itertools.count(): + a, b = get_most_similar_factors(n + i) + if float(b) / a <= max_aspect_ratio: + return a, b + # The original code didn't return anything if we reached the end. Added an assert + raise AssertionError("No factors found!") def rgbaf_to_rgb8(rgbaf): - """Convert [r, g, b, a] floats to (r, g, b) bytes.""" - return tuple(int(channel * 255) for channel in rgbaf[0:3]) + """Convert [r, g, b, a] floats to (r, g, b) bytes.""" + return tuple(int(channel * 255) for channel in rgbaf[0:3]) def rgb8_to_rgbaf(rgb8): - """Convert (r, g, b) bytes to [r, g, b, a] floats.""" - lst = [channel / 255.0 for channel in rgb8] - lst.append(1.0) - return lst + """Convert (r, g, b) bytes to [r, g, b, a] floats.""" + lst = [channel / 255.0 for channel in rgb8] + lst.append(1.0) + return lst -def tilt_colors_to_image(tilt, max_aspect_ratio=None, preserve_colors=()): # pylint: disable=too-many-locals - """Returns a PIL.Image containing the colors used in the tilt. - The image will have colors in roughly the same proportion as the - control points in the tilt. +def tilt_colors_to_image( + tilt, max_aspect_ratio=None, preserve_colors=() +): # pylint: disable=too-many-locals + """Returns a PIL.Image containing the colors used in the tilt. + The image will have colors in roughly the same proportion as the + control points in the tilt. - preserve_colors is a list of rgb8 colors.""" - assert max_aspect_ratio is None or max_aspect_ratio > 0 + preserve_colors is a list of rgb8 colors.""" + assert max_aspect_ratio is None or max_aspect_ratio > 0 - preserve_colors = set(preserve_colors) + preserve_colors = set(preserve_colors) - def iter_rgb8_colors(tilt): - for stroke in tilt.sketch.strokes: - yield (rgbaf_to_rgb8(stroke.brush_color), len(stroke.controlpoints)) + def iter_rgb8_colors(tilt): + for stroke in tilt.sketch.strokes: + yield (rgbaf_to_rgb8(stroke.brush_color), len(stroke.controlpoints)) - # def by_decreasing_usage(counter_pair): + # def by_decreasing_usage(counter_pair): # # Sort function for colors # return -counter_pair[1] - def by_color_similarity(counter_pair): - # Sort function for colors - rgb8, _ = counter_pair - _, _, l = rgb8_to_hsl(rgb8) # noqa: E741 - return (rgb8 in preserve_colors), l + def by_color_similarity(counter_pair): + # Sort function for colors + rgb8, _ = counter_pair + _, _, l = rgb8_to_hsl(rgb8) # noqa: E741 + return (rgb8 in preserve_colors), l - counter = Counter() - for color, n in iter_rgb8_colors(tilt): - counter[color] += n - most_used_color, amt = max(iter(counter.items()), key=lambda pair: pair[1]) + counter = Counter() + for color, n in iter_rgb8_colors(tilt): + counter[color] += n + most_used_color, amt = max(iter(counter.items()), key=lambda pair: pair[1]) - for rgb8 in preserve_colors: - if rgb8 not in counter: - print("Ignoring: #%02x%02x%02x is not in the image" % rgb8) - else: - counter[rgb8] += amt / 2 - - # Find a "nice" width and height, possibly adjusting the number of texels - num_texels = sum(counter.values()) - width, height = get_good_factors(num_texels, max_aspect_ratio) - if width * height != num_texels: - counter[most_used_color] += width * height - num_texels - assert counter[most_used_color] > 0 - num_texels = sum(counter.values()) - assert width * height == num_texels + for rgb8 in preserve_colors: + if rgb8 not in counter: + print("Ignoring: #%02x%02x%02x is not in the image" % rgb8) + else: + counter[rgb8] += amt / 2 - # Expand the colors into a 1d array, then turn into an Image - colors_array = np.zeros(shape=(num_texels, 3), dtype='uint8') - i = 0 - # The sort used here only matters to humans when they look at the images - colors_and_counts = sorted(iter(counter.items()), key=by_color_similarity) - # colors_and_counts = sorted(counter.iteritems(), key=by_decreasing_usage) - for (color, count) in colors_and_counts: - colors_array[i:i + count] = color - i += count - colors_array.shape = (height, width, 3) - return Image.fromarray(colors_array, mode='RGB') + # Find a "nice" width and height, possibly adjusting the number of texels + num_texels = sum(counter.values()) + width, height = get_good_factors(num_texels, max_aspect_ratio) + if width * height != num_texels: + counter[most_used_color] += width * height - num_texels + assert counter[most_used_color] > 0 + num_texels = sum(counter.values()) + assert width * height == num_texels + + # Expand the colors into a 1d array, then turn into an Image + colors_array = np.zeros(shape=(num_texels, 3), dtype="uint8") + i = 0 + # The sort used here only matters to humans when they look at the images + colors_and_counts = sorted(iter(counter.items()), key=by_color_similarity) + # colors_and_counts = sorted(counter.iteritems(), key=by_decreasing_usage) + for (color, count) in colors_and_counts: + colors_array[i : i + count] = color + i += count + colors_array.shape = (height, width, 3) + return Image.fromarray(colors_array, mode="RGB") def get_quantized_image_pillow(im, num_colors): - MAXIMUM_COVERAGE = 1 - print("Falling back to old color quantization") - return im.quantize(colors=num_colors, method=MAXIMUM_COVERAGE), 'pillow' + MAXIMUM_COVERAGE = 1 + print("Falling back to old color quantization") + return im.quantize(colors=num_colors, method=MAXIMUM_COVERAGE), "pillow" def get_quantized_image_pngquant(im, num_colors): - # pngquant errors out if its best solution is below this "quality" - QUALITY_MIN = 0 # never error out - # pngquant stops using colors when "quality" goes above this. - # I have no real feeling for what this number means in practice - QUALITY_MAX = 40 - im.save('tmp_pngquant.png') - try: - subprocess.check_call([ - 'pngquant', - '--nofs', # no dithering - '--force', - '--quality', '%d-%d' % (QUALITY_MIN, QUALITY_MAX), - '-o', 'tmp_pngquant_out.png', - str(num_colors), '--', - 'tmp_pngquant.png' - ]) - imq = Image.open('tmp_pngquant_out.png') - imq.load() - finally: - if os.path.exists('tmp_pngquant.png'): - os.unlink('tmp_pngquant.png') - if os.path.exists('tmp_pngquant_out.png'): - os.unlink('tmp_pngquant_out.png') - return imq, 'pngquant' + # pngquant errors out if its best solution is below this "quality" + QUALITY_MIN = 0 # never error out + # pngquant stops using colors when "quality" goes above this. + # I have no real feeling for what this number means in practice + QUALITY_MAX = 40 + im.save("tmp_pngquant.png") + try: + subprocess.check_call( + [ + "pngquant", + "--nofs", # no dithering + "--force", + "--quality", + "%d-%d" % (QUALITY_MIN, QUALITY_MAX), + "-o", + "tmp_pngquant_out.png", + str(num_colors), + "--", + "tmp_pngquant.png", + ] + ) + imq = Image.open("tmp_pngquant_out.png") + imq.load() + finally: + if os.path.exists("tmp_pngquant.png"): + os.unlink("tmp_pngquant.png") + if os.path.exists("tmp_pngquant_out.png"): + os.unlink("tmp_pngquant_out.png") + return imq, "pngquant" def get_quantized_image(im, num_colors): - try: - return get_quantized_image_pngquant(im, num_colors) - except subprocess.CalledProcessError as e: - print("Error running pngquant: %s" % e) - except OSError as e: - print("Missing pngquant: %s" % e) - print("Download pngquant.exe it and put it in your PATH.") - return get_quantized_image_pillow(im, num_colors) - - -def simplify_colors(tilt, num_colors, preserve_colors): # pylint: disable=too-many-locals - im = tilt_colors_to_image(tilt, max_aspect_ratio=4, preserve_colors=preserve_colors) - if num_colors < 0: - # Little hack to force use of pillow - imq, method = get_quantized_image_pillow(im, -num_colors) - else: - imq, method = get_quantized_image(im, num_colors) - - def iter_rgb8(im): - return zip(im.getdata(0), im.getdata(1), im.getdata(2)) - - def get_imq_color(ipixel, data=imq.getdata(), palette=imq.getpalette()): - # Look up color in imq, which is awkward because it's palettized - palette_entry = data[ipixel] - r, g, b = palette[palette_entry * 3: (palette_entry + 1) * 3] - return (r, g, b) - - # Create table mapping unquantized rgb8 to quantized rgbaf - old_to_new = {} - idx = 0 - for (old_color, group) in itertools.groupby(iter_rgb8(im)): - assert old_color not in old_to_new - old_to_new[old_color] = rgb8_to_rgbaf(get_imq_color(idx)) - idx += len(list(group)) - - for stroke in tilt.sketch.strokes: - stroke.brush_color = old_to_new[rgbaf_to_rgb8(stroke.brush_color)] - - for old8, newf in old_to_new.items(): - oldv = np.array(rgb8_to_rgbaf(old8)[0:3]) - newv = np.array(newf[0:3]) - err = oldv - newv - err = math.sqrt(np.dot(err, err)) - if err > .2: - print("High color error: #%02x%02x%02x" % old8) - - num_colors = len({tuple(v) for v in old_to_new.values()}) - base, _ = os.path.splitext(tilt.filename) - im.save('%s_%s.png' % (base, 'orig')) - imq.save('%s_%s_%d.png' % (base, method, num_colors)) + try: + return get_quantized_image_pngquant(im, num_colors) + except subprocess.CalledProcessError as e: + print("Error running pngquant: %s" % e) + except OSError as e: + print("Missing pngquant: %s" % e) + print("Download pngquant.exe it and put it in your PATH.") + return get_quantized_image_pillow(im, num_colors) + + +def simplify_colors( + tilt, num_colors, preserve_colors +): # pylint: disable=too-many-locals + im = tilt_colors_to_image(tilt, max_aspect_ratio=4, preserve_colors=preserve_colors) + if num_colors < 0: + # Little hack to force use of pillow + imq, method = get_quantized_image_pillow(im, -num_colors) + else: + imq, method = get_quantized_image(im, num_colors) + + def iter_rgb8(im): + return zip(im.getdata(0), im.getdata(1), im.getdata(2)) + + def get_imq_color(ipixel, data=imq.getdata(), palette=imq.getpalette()): + # Look up color in imq, which is awkward because it's palettized + palette_entry = data[ipixel] + r, g, b = palette[palette_entry * 3 : (palette_entry + 1) * 3] + return (r, g, b) + + # Create table mapping unquantized rgb8 to quantized rgbaf + old_to_new = {} + idx = 0 + for (old_color, group) in itertools.groupby(iter_rgb8(im)): + assert old_color not in old_to_new + old_to_new[old_color] = rgb8_to_rgbaf(get_imq_color(idx)) + idx += len(list(group)) + + for stroke in tilt.sketch.strokes: + stroke.brush_color = old_to_new[rgbaf_to_rgb8(stroke.brush_color)] + + for old8, newf in old_to_new.items(): + oldv = np.array(rgb8_to_rgbaf(old8)[0:3]) + newv = np.array(newf[0:3]) + err = oldv - newv + err = math.sqrt(np.dot(err, err)) + if err > 0.2: + print("High color error: #%02x%02x%02x" % old8) + + num_colors = len({tuple(v) for v in old_to_new.values()}) + base, _ = os.path.splitext(tilt.filename) + im.save("%s_%s.png" % (base, "orig")) + imq.save("%s_%s_%d.png" % (base, method, num_colors)) # ---------------------------------------------------------------------- # Split export into multiple .obj files # ---------------------------------------------------------------------- + def iter_aggregated_by_color(json_filename): - """Yields TiltBrushMesh instances, each of a uniform color.""" - def by_color(m): - return m.c[0] + """Yields TiltBrushMesh instances, each of a uniform color.""" - meshes = iter_meshes(json_filename) - for (_, group) in itertools.groupby(sorted(meshes, key=by_color), key=by_color): - yield TiltBrushMesh.from_meshes(group) + def by_color(m): + return m.c[0] + + meshes = iter_meshes(json_filename) + for (_, group) in itertools.groupby(sorted(meshes, key=by_color), key=by_color): + yield TiltBrushMesh.from_meshes(group) def write_simple_obj(mesh, outf_name): - tmpf = StringIO() + tmpf = StringIO() - for v in mesh.v: - tmpf.write("v %f %f %f\n" % v) + for v in mesh.v: + tmpf.write("v %f %f %f\n" % v) - for (t1, t2, t3) in mesh.tri: - t1 += 1 - t2 += 1 - t3 += 1 - tmpf.write("f %d %d %d\n" % (t1, t2, t3)) + for (t1, t2, t3) in mesh.tri: + t1 += 1 + t2 += 1 + t3 += 1 + tmpf.write("f %d %d %d\n" % (t1, t2, t3)) - with open(outf_name, 'wb') as outf: - outf.write(tmpf.getvalue()) + with open(outf_name, "wb") as outf: + outf.write(tmpf.getvalue()) def split_json_into_obj(json_filename): - output_base = os.path.splitext(json_filename)[0].replace('_out', '') + output_base = os.path.splitext(json_filename)[0].replace("_out", "") - meshes = list(iter_aggregated_by_color(json_filename)) - meshes.sort(key=lambda m: len(m.v), reverse=True) - for i, mesh in enumerate(meshes): - # It's the "ignore normals" that does the most collapsing here. - mesh.collapse_verts(ignore=('uv0', 'uv1', 'c', 't', 'n')) - mesh.remove_degenerate() + meshes = list(iter_aggregated_by_color(json_filename)) + meshes.sort(key=lambda m: len(m.v), reverse=True) + for i, mesh in enumerate(meshes): + # It's the "ignore normals" that does the most collapsing here. + mesh.collapse_verts(ignore=("uv0", "uv1", "c", "t", "n")) + mesh.remove_degenerate() - (r, g, b, a) = struct.unpack('4B', struct.pack('I', mesh.c[0])) - assert a == 255, (r, g, b, a) - hex_color = '%02x%02x%02x' % (r, g, b) - outf_name = '%s %02d %s.obj' % (output_base, i, hex_color) - write_simple_obj(mesh, outf_name) - msgln("Wrote %s" % outf_name) + (r, g, b, a) = struct.unpack("4B", struct.pack("I", mesh.c[0])) + assert a == 255, (r, g, b, a) + hex_color = "%02x%02x%02x" % (r, g, b) + outf_name = "%s %02d %s.obj" % (output_base, i, hex_color) + write_simple_obj(mesh, outf_name) + msgln("Wrote %s" % outf_name) # ---------------------------------------------------------------------- # Main # ---------------------------------------------------------------------- + def process_tilt(filename, args): - msg("Load tilt") - tilt = Tilt(filename) - msg("Load strokes") - # TODO: this seems to do nothing; is there a function that's supposed to be called here? - tilt.sketch.strokes # pylint: disable=pointless-statement - msg("") - - if args.debug: - msg("Clone strokes") - before_strokes = [s.clone() for s in tilt.sketch.strokes] - - # Do this before color quantization, because it removes strokes (and their colors) - if args.convert_brushes: - convert_brushes(tilt, BRUSH_REPLACEMENTS) - - if args.remove_stray_strokes is not None: - remove_stray_strokes(tilt, args.remove_stray_strokes, - BrushLookup.get().get_unique_guid('Wire')) - - if args.pos_error_tolerance > 0: - reduce_control_points(tilt, args.pos_error_tolerance) - - if args.simplify_colors is not None: - simplify_colors(tilt, num_colors=args.simplify_colors, preserve_colors=args.preserve_colors) - - if args.debug: - final_strokes = [] - # interleave them so it renders semi-nicely... - for before, after in itertools.zip_longest(before_strokes, tilt.sketch.strokes): - if before is not None: - for cp in before.controlpoints: - cp.position[1] += 10 - final_strokes.append(before) - if after is not None: - final_strokes.append(after) - tilt.sketch.strokes[:] = final_strokes - - tilt.write_sketch() - msgln("Wrote %s" % os.path.basename(tilt.filename)) + msg("Load tilt") + tilt = Tilt(filename) + msg("Load strokes") + # TODO: this seems to do nothing; is there a function that's supposed to be called here? + tilt.sketch.strokes # pylint: disable=pointless-statement + msg("") + + if args.debug: + msg("Clone strokes") + before_strokes = [s.clone() for s in tilt.sketch.strokes] + + # Do this before color quantization, because it removes strokes (and their colors) + if args.convert_brushes: + convert_brushes(tilt, BRUSH_REPLACEMENTS) + + if args.remove_stray_strokes is not None: + remove_stray_strokes( + tilt, args.remove_stray_strokes, BrushLookup.get().get_unique_guid("Wire") + ) + + if args.pos_error_tolerance > 0: + reduce_control_points(tilt, args.pos_error_tolerance) + + if args.simplify_colors is not None: + simplify_colors( + tilt, num_colors=args.simplify_colors, preserve_colors=args.preserve_colors + ) + + if args.debug: + final_strokes = [] + # interleave them so it renders semi-nicely... + for before, after in itertools.zip_longest(before_strokes, tilt.sketch.strokes): + if before is not None: + for cp in before.controlpoints: + cp.position[1] += 10 + final_strokes.append(before) + if after is not None: + final_strokes.append(after) + tilt.sketch.strokes[:] = final_strokes + + tilt.write_sketch() + msgln("Wrote %s" % os.path.basename(tilt.filename)) def main(): - parser = argparse.ArgumentParser(usage='''%(prog)s [ files ] + parser = argparse.ArgumentParser( + usage="""%(prog)s [ files ] Process .tilt files to get them ready for 3D printing. @@ -672,55 +722,74 @@ def main(): 3. Use --convert-brushes and --pos-error-tolerance. 4. Load .tilt files in Tilt Brush, and export to .json 5. Convert from .json -> multiple .obj files -''') - - def hex_color(arg): - arg = arg.lower() - m = re.match(r'^#?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})$', arg) - if m is not None: - return tuple(int(m.group(i), 16) for i in (1, 2, 3)) - raise argparse.ArgumentTypeError("Must be exactly hex 6 digits: %r" % arg) - - parser.add_argument( - '--debug', action='store_true', - help='For debugging: put both the original and modified strokes in the resulting .tilt file') - - parser.add_argument( - '--remove-stray-strokes', metavar='float', type=float, default=None, - help="Replace strokes that are far away from the sketch with magenta wire. Argument is the number of standard deviations; 5.0 is a reasonable starting point.") - - parser.add_argument( - '--simplify-colors', type=int, metavar='N', - help='Simplify down to N colors. Use a negative number to try the alternate algorithm.') - parser.add_argument( - '--preserve-color', dest='preserve_colors', type=hex_color, action='append', - default=[], - help='Color to preserve, as a hex string like #ff00ff') - - parser.add_argument( - '--convert-brushes', action='store_true', - help='Convert brushes to 3d-printable ones') - parser.add_argument( - '--pos-error-tolerance', type=float, default=0, - help='Allowable positional error when simplifying strokes, as a fraction of stroke width. If 0, do not simplify. .1 to .3 are good values. (default %(default)s)') - - parser.add_argument('-o', dest='output_file', help='Name of output file (optional)') - parser.add_argument('files', type=str, nargs='+', help='File(s) to hack') - - args = parser.parse_args() - - for i, orig_filename in enumerate(args.files): - if orig_filename.endswith('.tilt'): - base, ext = os.path.splitext(orig_filename) - if i == 0 and args.output_file is not None: - working_filename = args.output_file - else: - working_filename = base + '_out' + ext - shutil.copyfile(orig_filename, working_filename) - process_tilt(working_filename, args) - elif orig_filename.endswith('.json'): - split_json_into_obj(orig_filename) - - -if __name__ == '__main__': - main() +""" + ) + + def hex_color(arg): + arg = arg.lower() + m = re.match(r"^#?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})$", arg) + if m is not None: + return tuple(int(m.group(i), 16) for i in (1, 2, 3)) + raise argparse.ArgumentTypeError("Must be exactly hex 6 digits: %r" % arg) + + parser.add_argument( + "--debug", + action="store_true", + help="For debugging: put both the original and modified strokes in the resulting .tilt file", + ) + + parser.add_argument( + "--remove-stray-strokes", + metavar="float", + type=float, + default=None, + help="Replace strokes that are far away from the sketch with magenta wire. Argument is the number of standard deviations; 5.0 is a reasonable starting point.", + ) + + parser.add_argument( + "--simplify-colors", + type=int, + metavar="N", + help="Simplify down to N colors. Use a negative number to try the alternate algorithm.", + ) + parser.add_argument( + "--preserve-color", + dest="preserve_colors", + type=hex_color, + action="append", + default=[], + help="Color to preserve, as a hex string like #ff00ff", + ) + + parser.add_argument( + "--convert-brushes", + action="store_true", + help="Convert brushes to 3d-printable ones", + ) + parser.add_argument( + "--pos-error-tolerance", + type=float, + default=0, + help="Allowable positional error when simplifying strokes, as a fraction of stroke width. If 0, do not simplify. .1 to .3 are good values. (default %(default)s)", + ) + + parser.add_argument("-o", dest="output_file", help="Name of output file (optional)") + parser.add_argument("files", type=str, nargs="+", help="File(s) to hack") + + args = parser.parse_args() + + for i, orig_filename in enumerate(args.files): + if orig_filename.endswith(".tilt"): + base, ext = os.path.splitext(orig_filename) + if i == 0 and args.output_file is not None: + working_filename = args.output_file + else: + working_filename = base + "_out" + ext + shutil.copyfile(orig_filename, working_filename) + process_tilt(working_filename, args) + elif orig_filename.endswith(".json"): + split_json_into_obj(orig_filename) + + +if __name__ == "__main__": + main() diff --git a/Support/Python/unitybuild/constants.py b/Support/Python/unitybuild/constants.py index 593c3a9081..dc39e5ff5d 100644 --- a/Support/Python/unitybuild/constants.py +++ b/Support/Python/unitybuild/constants.py @@ -14,24 +14,25 @@ class Error(Exception): - pass + pass class UserError(Error): - pass + pass class BuildFailed(Error): - pass + pass class BadVersionCode(BuildFailed): - """The Oculus store had a build with a code >= the one we uploaded. - self.desired_version_code is the lowest new version code that the store will accept.""" - def __init__(self, message, desired_version_code): - super().__init__(message) - self.desired_version_code = desired_version_code + """The Oculus store had a build with a code >= the one we uploaded. + self.desired_version_code is the lowest new version code that the store will accept.""" + + def __init__(self, message, desired_version_code): + super().__init__(message) + self.desired_version_code = desired_version_code class InternalError(Error): - pass + pass diff --git a/Support/Python/unitybuild/credentials.py b/Support/Python/unitybuild/credentials.py index 5eefd4256b..af03009ec3 100644 --- a/Support/Python/unitybuild/credentials.py +++ b/Support/Python/unitybuild/credentials.py @@ -21,105 +21,131 @@ import sys import getpass import webbrowser + try: - import keyring + import keyring except ImportError: - keyring = None # We'll catch this below when we try to use it + keyring = None # We'll catch this below when we try to use it -__all__ = ('get_credential',) +__all__ = ("get_credential",) -TB_OCULUS_RIFT_APP_ID = '1111640318951750' -TB_OCULUS_QUEST_APP_ID = '2322529091093901' +TB_OCULUS_RIFT_APP_ID = "1111640318951750" +TB_OCULUS_QUEST_APP_ID = "2322529091093901" # External API def get_credential(name): - """Returns a Credential object which you can query for its contents.""" - return CREDENTIALS_BY_NAME[name] + """Returns a Credential object which you can query for its contents.""" + return CREDENTIALS_BY_NAME[name] # Implementation -def import_keyring(): - """Returns null if unsupported.""" - if not keyring: - print("You don't have keyring support. Try running:\npip install keyring pywin32", file=sys.stderr) - assert keyring - - -class Credential(): - KEYRING_USERNAME = 'Open Brush Build' - def __init__(self, name, location, **extra): - self.name = name % extra - self.location = None if location is None else location % extra - self.extra = extra - - def get_secret(self): - """Fetches a secret from the user's keystore or keyboard. - Caches the result to the keystore, if possible.""" - import_keyring() - if keyring is not None: - secret = keyring.get_keyring().get_password(self.name, self.KEYRING_USERNAME) - if secret is not None: +def import_keyring(): + """Returns null if unsupported.""" + if not keyring: + print( + "You don't have keyring support. Try running:\npip install keyring pywin32", + file=sys.stderr, + ) + assert keyring + + +class Credential: + KEYRING_USERNAME = "Open Brush Build" + + def __init__(self, name, location, **extra): + self.name = name % extra + self.location = None if location is None else location % extra + self.extra = extra + + def get_secret(self): + """Fetches a secret from the user's keystore or keyboard. + Caches the result to the keystore, if possible.""" + import_keyring() + if keyring is not None: + secret = keyring.get_keyring().get_password( + self.name, self.KEYRING_USERNAME + ) + if secret is not None: + return secret + + # Pop open Chrome for them + if self.location is not None: + try: + webbrowser.open(self.location) + except: # pylint: disable=bare-except + # TODO this can't be a good idea + pass + + # Fetch and cache + secret = ( + getpass.getpass( + prompt="Enter secret for '%s' from\n%s\nPassword: " + % (self.name, self.location) + ) + or None + ) + if secret is not None and keyring is not None: + keyring.get_keyring().set_password(self.name, self.KEYRING_USERNAME, secret) return secret - # Pop open Chrome for them - if self.location is not None: - try: - webbrowser.open(self.location) - except: # pylint: disable=bare-except - # TODO this can't be a good idea - pass - - # Fetch and cache - secret = getpass.getpass(prompt="Enter secret for '%s' from\n%s\nPassword: " % (self.name, self.location)) or None - if secret is not None and keyring is not None: - keyring.get_keyring().set_password(self.name, self.KEYRING_USERNAME, secret) - return secret + def set_secret(self, value): + keyring.get_keyring().set_password(self.name, self.KEYRING_USERNAME, value) - def set_secret(self, value): - keyring.get_keyring().set_password(self.name, self.KEYRING_USERNAME, value) - - def delete_secret(self): - """Returns True if the password existed and was deleted.""" - keyring.get_keyring().delete_password(self.name, self.KEYRING_USERNAME) + def delete_secret(self): + """Returns True if the password existed and was deleted.""" + keyring.get_keyring().delete_password(self.name, self.KEYRING_USERNAME) CREDENTIALS_BY_NAME = dict( - (c.name, c) for c in [ - Credential('%(app_id)s', 'https://dashboard.oculus.com/application/%(app_id)s/api', app_id=TB_OCULUS_RIFT_APP_ID), - Credential('%(app_id)s', 'https://dashboard.oculus.com/application/%(app_id)s/api', app_id=TB_OCULUS_QUEST_APP_ID), - Credential('Open Brush keystore password', None), # Redacted - Credential('Open Brush Oculus Quest signing key password', None), # Redacted + (c.name, c) + for c in [ + Credential( + "%(app_id)s", + "https://dashboard.oculus.com/application/%(app_id)s/api", + app_id=TB_OCULUS_RIFT_APP_ID, + ), + Credential( + "%(app_id)s", + "https://dashboard.oculus.com/application/%(app_id)s/api", + app_id=TB_OCULUS_QUEST_APP_ID, + ), + Credential("Open Brush keystore password", None), # Redacted + Credential("Open Brush Oculus Quest signing key password", None), # Redacted ] ) def main(): - parser = argparse.ArgumentParser() - parser.add_argument('--delete', action='store_true', help='Delete all existing secrets') - parser.add_argument('--set', action='store_true', help='Prompt for any unknown secrets') - args = parser.parse_args() - - import_keyring() - if keyring is None: - print("Aborting.") - sys.exit(1) - - if args.delete: - for _, c in sorted(CREDENTIALS_BY_NAME.items()): - # Delete will fail if it doesn't exist - try: - c.delete_secret() - except keyring.errors.PasswordDeleteError: - pass - - if args.set: - for _, c in sorted(CREDENTIALS_BY_NAME.items()): - c.get_secret() - - -if __name__ == '__main__': - main() + parser = argparse.ArgumentParser() + parser.add_argument( + "--delete", action="store_true", help="Delete all existing secrets" + ) + parser.add_argument( + "--set", action="store_true", help="Prompt for any unknown secrets" + ) + args = parser.parse_args() + + import_keyring() + if keyring is None: + print("Aborting.") + sys.exit(1) + + if args.delete: + for _, c in sorted(CREDENTIALS_BY_NAME.items()): + # Delete will fail if it doesn't exist + try: + c.delete_secret() + except keyring.errors.PasswordDeleteError: + pass + + if args.set: + for _, c in sorted(CREDENTIALS_BY_NAME.items()): + c.get_secret() + + +if __name__ == "__main__": + main() diff --git a/Support/Python/unitybuild/main.py b/Support/Python/unitybuild/main.py index dd6a52284c..6e10b1a3db 100644 --- a/Support/Python/unitybuild/main.py +++ b/Support/Python/unitybuild/main.py @@ -62,13 +62,19 @@ import unitybuild.utils import unitybuild.push -from unitybuild.constants import Error, UserError, BuildFailed, BadVersionCode, InternalError +from unitybuild.constants import ( + Error, + UserError, + BuildFailed, + BadVersionCode, + InternalError, +) from unitybuild.credentials import get_credential, TB_OCULUS_QUEST_APP_ID from unitybuild.vcs import create as vcs_create VENDOR_NAME = "Icosa" -EXE_BASE_NAME = 'OpenBrush' +EXE_BASE_NAME = "OpenBrush" # ---------------------------------------------------------------------- # Build logic @@ -76,559 +82,639 @@ class LogTailer(threading.Thread): - """Copy interesting lines from Unity's logfile to stdout. - Necessary because Unity's batchmode is completely silent on Windows. - - When used in a "with" block, *logfile* is guaranteed to be closed - after the block exits.""" - POLL_TIME = 0.5 - - def __init__(self, logfile, disabled=False): - super().__init__() - self.daemon = True - self.logfile = logfile - # It's not very easy to have optional context managers in Python, - # so allow caller to pass an arg that makes this essentially a no-op - self.should_exit = disabled - - def __enter__(self): - self.start() - - def __exit__(self, *args): - # Joining the thread is the easiest and safest way to close the logfile - self.should_exit = True - try: - self.join(self.POLL_TIME + 0.5) - except RuntimeError: - # This exception is expected if the thread hasn't been started yet. - pass - sys.stdout.write("%-79s\r" % '') # clear line - return False - - def run(self): - # Wait for file to be created - while not os.access(self.logfile, os.R_OK): - if self.should_exit: - return - time.sleep(self.POLL_TIME) - - # All of BuildTiltBrush.CommandLine()'s output is prefixed with _btb_ - munge_pat = re.compile('Updating (Assets/.*) - GUID') - progress_pat = re.compile('(_btb_ |DisplayProgressbar: )(.*)') - with open(self.logfile) as inf: - while True: - where = inf.tell() - line = inf.readline() + """Copy interesting lines from Unity's logfile to stdout. + Necessary because Unity's batchmode is completely silent on Windows. + + When used in a "with" block, *logfile* is guaranteed to be closed + after the block exits.""" + + POLL_TIME = 0.5 + + def __init__(self, logfile, disabled=False): + super().__init__() + self.daemon = True + self.logfile = logfile + # It's not very easy to have optional context managers in Python, + # so allow caller to pass an arg that makes this essentially a no-op + self.should_exit = disabled + + def __enter__(self): + self.start() + + def __exit__(self, *args): + # Joining the thread is the easiest and safest way to close the logfile + self.should_exit = True try: - if not line: + self.join(self.POLL_TIME + 0.5) + except RuntimeError: + # This exception is expected if the thread hasn't been started yet. + pass + sys.stdout.write("%-79s\r" % "") # clear line + return False + + def run(self): + # Wait for file to be created + while not os.access(self.logfile, os.R_OK): if self.should_exit: - return + return time.sleep(self.POLL_TIME) - inf.seek(where) - elif progress_pat.match(line): - print('Unity> %-70s\r' % progress_pat.match(line).group(2)[-70:], end=' ') - elif munge_pat.match(line): - print('Munge> %-70s\r' % munge_pat.match(line).group(1)[-70:], end=' ') - except IOError: - # The "print" can raise IOError - pass + + # All of BuildTiltBrush.CommandLine()'s output is prefixed with _btb_ + munge_pat = re.compile("Updating (Assets/.*) - GUID") + progress_pat = re.compile("(_btb_ |DisplayProgressbar: )(.*)") + with open(self.logfile) as inf: + while True: + where = inf.tell() + line = inf.readline() + try: + if not line: + if self.should_exit: + return + time.sleep(self.POLL_TIME) + inf.seek(where) + elif progress_pat.match(line): + print( + "Unity> %-70s\r" % progress_pat.match(line).group(2)[-70:], + end=" ", + ) + elif munge_pat.match(line): + print( + "Munge> %-70s\r" % munge_pat.match(line).group(1)[-70:], + end=" ", + ) + except IOError: + # The "print" can raise IOError + pass def get_unity_exe(version, lenient=True): - """Returns a Unity executable of the same major version. - version - a (major, minor, point) tuple. Strings. - lenient - if True, allow the micro version to be higher. - """ - exes = sorted(iter_editors_and_versions(), reverse=True) - if len(exes) == 0: - raise BuildFailed("Cannot find any Unity versions (want %s)" % (version,)) - for (found_exe, found_version) in exes: - if found_version == version: - return found_exe - - if lenient: - # Compatible is defined as same major and minor version - compatible = [(exe, ver) for (exe, ver) in exes if ver[0:2] == version[0:2]] - if len(compatible) > 0: - def int_version(version): - (major, minor, micro) = version - return (int(major), int(minor), int(micro)) - - def by_int_version(xxx_todo_changeme): - (exe, ver) = xxx_todo_changeme - return (int_version(ver), exe) - found_exe, found_version = max(compatible, key=by_int_version) - if int_version(found_version) >= int_version(version): - return found_exe - - raise BuildFailed("Cannot find desired Unity version (want %s)" % (version,)) + """Returns a Unity executable of the same major version. + version - a (major, minor, point) tuple. Strings. + lenient - if True, allow the micro version to be higher. + """ + exes = sorted(iter_editors_and_versions(), reverse=True) + if len(exes) == 0: + raise BuildFailed("Cannot find any Unity versions (want %s)" % (version,)) + for (found_exe, found_version) in exes: + if found_version == version: + return found_exe + + if lenient: + # Compatible is defined as same major and minor version + compatible = [(exe, ver) for (exe, ver) in exes if ver[0:2] == version[0:2]] + if len(compatible) > 0: + + def int_version(version): + (major, minor, micro) = version + return (int(major), int(minor), int(micro)) + + def by_int_version(xxx_todo_changeme): + (exe, ver) = xxx_todo_changeme + return (int_version(ver), exe) + + found_exe, found_version = max(compatible, key=by_int_version) + if int_version(found_version) >= int_version(version): + return found_exe + + raise BuildFailed("Cannot find desired Unity version (want %s)" % (version,)) def iter_possible_windows_editor_locations(): - """Yields possible locations for Unity.exe""" - # New-style Unity Hub install locations - for editor_dir in glob.glob(r'c:\Program Files*\Unity*\Hub\Editor\*\Editor'): - yield editor_dir - # Old-school install locations - for editor_dir in glob.glob(r'c:\Program Files*\Unity*\Editor'): - yield editor_dir - # Check to see if UnityHub has a secondary install path defined. - install_config_file_path = os.path.join( - os.getenv('APPDATA'), - r'UnityHub\secondaryInstallPath.json') - if os.path.exists(install_config_file_path): - with open(install_config_file_path, 'r') as install_config_file: - install_dir = json.load(install_config_file) - for editor_dir in glob.glob(install_dir + r'\*\Editor'): + """Yields possible locations for Unity.exe""" + # New-style Unity Hub install locations + for editor_dir in glob.glob(r"c:\Program Files*\Unity*\Hub\Editor\*\Editor"): + yield editor_dir + # Old-school install locations + for editor_dir in glob.glob(r"c:\Program Files*\Unity*\Editor"): yield editor_dir + # Check to see if UnityHub has a secondary install path defined. + install_config_file_path = os.path.join( + os.getenv("APPDATA"), r"UnityHub\secondaryInstallPath.json" + ) + if os.path.exists(install_config_file_path): + with open(install_config_file_path, "r") as install_config_file: + install_dir = json.load(install_config_file) + for editor_dir in glob.glob(install_dir + r"\*\Editor"): + yield editor_dir def iter_editors_and_versions(): # pylint: disable=too-many-branches - """Yields (exe_path, (major, minor, micro)) tuples. - All elements are strings.""" - hub_exe = None - if sys.platform == 'win32': - hub_exe = r'c:\Program Files\Unity Hub\Unity Hub.exe' - elif sys.platform == 'darwin': - hub_exe = r'/Applications/Unity Hub.app/Contents/MacOS/Unity Hub' - else: + """Yields (exe_path, (major, minor, micro)) tuples. + All elements are strings.""" hub_exe = None - # Headless hub isn't totally headless; it forces the hub to pop up, which is irritating. - # Disabling for now. - if False and hub_exe and os.path.exists(hub_exe): - proc = subprocess.Popen([hub_exe, '--', '--headless', 'editors', '--installed'], - stdout=subprocess.PIPE) - for line in proc.stdout: - m = re.search(r'(\d+)\.(\d+)\.(\d+).* at (.*)', line) - if m: - yield (m.group(4), (m.group(1), m.group(2), m.group(3).strip())) - return - - if sys.platform == 'win32': - for editor_dir in iter_possible_windows_editor_locations(): - editor_data_dir = os.path.join(editor_dir, 'Data') - if os.path.exists(editor_data_dir): - try: - exe = os.path.join(editor_dir, 'Unity.exe') - if os.path.exists(exe): - yield (exe, get_editor_unity_version(exe, editor_data_dir)) - else: - print('WARN: Missing executable %s' % exe) - except LookupError as e: - print(e) - except Exception as e: # pylint: disable=broad-except - print('WARN: Cannot find version of %s: %s' % (editor_dir, e)) - elif sys.platform == 'darwin': - # Kind of a hacky way of detecting the Daydream build machine - is_build_machine = os.path.exists('/Users/jenkins/JenkinsCommon/Unity') - if is_build_machine: - app_list = glob.glob('/Users/jenkins/JenkinsCommon/Unity/Unity_*/Unity.app') + if sys.platform == "win32": + hub_exe = r"c:\Program Files\Unity Hub\Unity Hub.exe" + elif sys.platform == "darwin": + hub_exe = r"/Applications/Unity Hub.app/Contents/MacOS/Unity Hub" else: - # TODO: make it work with Unity hub? - app_list = ['/Applications/Unity/Unity.app'] - # Since we don't have Unity hub support (commented out above because headless isn't - # headless), look for where it installs directly - app_list.extend(glob.glob('/Applications/Unity/*/Unity.app')) - for editor_dir in app_list: - exe = os.path.join(editor_dir, 'Contents/MacOS/Unity') - editor_data_dir = os.path.join(editor_dir, 'Contents') - if os.path.exists(editor_dir): - yield (exe, get_editor_unity_version(editor_dir, editor_data_dir)) + hub_exe = None + # Headless hub isn't totally headless; it forces the hub to pop up, which is irritating. + # Disabling for now. + if False and hub_exe and os.path.exists(hub_exe): + proc = subprocess.Popen( + [hub_exe, "--", "--headless", "editors", "--installed"], + stdout=subprocess.PIPE, + ) + for line in proc.stdout: + m = re.search(r"(\d+)\.(\d+)\.(\d+).* at (.*)", line) + if m: + yield (m.group(4), (m.group(1), m.group(2), m.group(3).strip())) + return + + if sys.platform == "win32": + for editor_dir in iter_possible_windows_editor_locations(): + editor_data_dir = os.path.join(editor_dir, "Data") + if os.path.exists(editor_data_dir): + try: + exe = os.path.join(editor_dir, "Unity.exe") + if os.path.exists(exe): + yield (exe, get_editor_unity_version(exe, editor_data_dir)) + else: + print("WARN: Missing executable %s" % exe) + except LookupError as e: + print(e) + except Exception as e: # pylint: disable=broad-except + print("WARN: Cannot find version of %s: %s" % (editor_dir, e)) + elif sys.platform == "darwin": + # Kind of a hacky way of detecting the Daydream build machine + is_build_machine = os.path.exists("/Users/jenkins/JenkinsCommon/Unity") + if is_build_machine: + app_list = glob.glob("/Users/jenkins/JenkinsCommon/Unity/Unity_*/Unity.app") + else: + # TODO: make it work with Unity hub? + app_list = ["/Applications/Unity/Unity.app"] + # Since we don't have Unity hub support (commented out above because headless isn't + # headless), look for where it installs directly + app_list.extend(glob.glob("/Applications/Unity/*/Unity.app")) + for editor_dir in app_list: + exe = os.path.join(editor_dir, "Contents/MacOS/Unity") + editor_data_dir = os.path.join(editor_dir, "Contents") + if os.path.exists(editor_dir): + yield (exe, get_editor_unity_version(editor_dir, editor_data_dir)) def parse_version(txt): - txt = txt.strip() - major, minor, point = re.match(r'(\d+)\.(\d+)\.?(\d+)?', txt).groups() - if point is None: - point = 0 - return (major, minor, point) + txt = txt.strip() + major, minor, point = re.match(r"(\d+)\.(\d+)\.?(\d+)?", txt).groups() + if point is None: + point = 0 + return (major, minor, point) def get_editor_unity_version(editor_app, editor_data_dir): - """Pass the app and its Editor/Data directory. - The app should end with '.app' (OSX) or '.exe' (Windows) - Returns a version 3-tuple like ("5", "6", "1") or ("2017", "1", "1"). - Does not return any suffixes like "p4" or "f3". - Raises LookupError on failure.""" - - # This works for 5.x as well as 2017.x and 2018.x, but not 2019.x - packagemanager_dir = os.path.join(editor_data_dir, 'PackageManager/Unity/PackageManager') - if os.path.exists(packagemanager_dir): - # The package manager has names like "5.6.3". - _, dirs, _ = next(os.walk(packagemanager_dir)) - if len(dirs) > 0: - return parse_version(dirs[0]) - - # This works for 5.x releases, but not 2017.x - analytics_version = os.path.join(editor_data_dir, 'UnityExtensions/Unity/UnityAnalytics/version') - if os.path.exists(analytics_version): - with open(analytics_version) as inf: - return parse_version(inf.read()) - - # TODO(pld): For 2019, maybe search the modules.json file for strings - # like "UnitySetup-Android-Support-for-Editor-"? But that - # file doesn't live in the editor data directory and it might not - # exist at all for MacOS. - - try: - (major, minor, micro) = unitybuild.utils.get_file_version(editor_app) - except LookupError: - # Keep trying; we have one last fallback - pass - else: - return (str(major), str(minor), str(micro)) - - # I can't find a way to get the version out of 2019.x. - # This is pretty janky so only use for Jenkins and 2019. - for m in re.finditer(r'Unity/(Unity_)?(2019)\.(\d+)\.(\d+)', - editor_data_dir): - _, major, minor, point = m.groups() - ret = (major, minor, point) - print("WARNING: %s using fallback to determine Unity version %s" % (editor_data_dir, ret)) - return ret - - raise LookupError('%s: Cannot determine Unity version' % editor_data_dir) + """Pass the app and its Editor/Data directory. + The app should end with '.app' (OSX) or '.exe' (Windows) + Returns a version 3-tuple like ("5", "6", "1") or ("2017", "1", "1"). + Does not return any suffixes like "p4" or "f3". + Raises LookupError on failure.""" + + # This works for 5.x as well as 2017.x and 2018.x, but not 2019.x + packagemanager_dir = os.path.join( + editor_data_dir, "PackageManager/Unity/PackageManager" + ) + if os.path.exists(packagemanager_dir): + # The package manager has names like "5.6.3". + _, dirs, _ = next(os.walk(packagemanager_dir)) + if len(dirs) > 0: + return parse_version(dirs[0]) + + # This works for 5.x releases, but not 2017.x + analytics_version = os.path.join( + editor_data_dir, "UnityExtensions/Unity/UnityAnalytics/version" + ) + if os.path.exists(analytics_version): + with open(analytics_version) as inf: + return parse_version(inf.read()) + + # TODO(pld): For 2019, maybe search the modules.json file for strings + # like "UnitySetup-Android-Support-for-Editor-"? But that + # file doesn't live in the editor data directory and it might not + # exist at all for MacOS. + + try: + (major, minor, micro) = unitybuild.utils.get_file_version(editor_app) + except LookupError: + # Keep trying; we have one last fallback + pass + else: + return (str(major), str(minor), str(micro)) + + # I can't find a way to get the version out of 2019.x. + # This is pretty janky so only use for Jenkins and 2019. + for m in re.finditer(r"Unity/(Unity_)?(2019)\.(\d+)\.(\d+)", editor_data_dir): + _, major, minor, point = m.groups() + ret = (major, minor, point) + print( + "WARNING: %s using fallback to determine Unity version %s" + % (editor_data_dir, ret) + ) + return ret + + raise LookupError("%s: Cannot determine Unity version" % editor_data_dir) def get_project_unity_version(project_dir): - """Returns a (major, minor, point) tuple.""" - fn = os.path.join(project_dir, 'ProjectSettings/ProjectVersion.txt') - with open(fn) as inf: - m = re.search(r'^m_EditorVersion: (.*)', inf.read(), flags=re.M) - return parse_version(m.group(1)) + """Returns a (major, minor, point) tuple.""" + fn = os.path.join(project_dir, "ProjectSettings/ProjectVersion.txt") + with open(fn) as inf: + m = re.search(r"^m_EditorVersion: (.*)", inf.read(), flags=re.M) + return parse_version(m.group(1)) def indent(prefix, text): - return '\n'.join(prefix + line for line in text.split('\n')) + return "\n".join(prefix + line for line in text.split("\n")) def iter_compiler_output(log): - """Yields dicts containing the keys: - exitcode, compilationhadfailure, outfile, stdout, stderr""" - # Compile output looks like this: - # -----CompilerOutput:-stdout--exitcode: 1--compilationhadfailure: True--outfile: Temp/Assembly-CSharp-Editor.dll - # Compilation failed: 1 error(s), 0 warnings - # -----CompilerOutput:-stderr---------- - # Assets/Editor/BuildTiltBrush.cs(33,7): error CS1519: - # -----EndCompilerOutput--------------- - pat = re.compile(r''' + """Yields dicts containing the keys: + exitcode, compilationhadfailure, outfile, stdout, stderr""" + # Compile output looks like this: + # -----CompilerOutput:-stdout--exitcode: 1--compilationhadfailure: True--outfile: Temp/Assembly-CSharp-Editor.dll + # Compilation failed: 1 error(s), 0 warnings + # -----CompilerOutput:-stderr---------- + # Assets/Editor/BuildTiltBrush.cs(33,7): error CS1519: + # -----EndCompilerOutput--------------- + pat = re.compile( + r""" ^-----CompilerOutput:-stdout(?P.*?) \n (?P.*?) \n - -----EndCompilerOutput''', - re.DOTALL | re.MULTILINE | re.VERBOSE) - for m in pat.finditer(log): - dct = {} - for chunk in m.group('metadata').split('--'): - if chunk: - key, value = chunk.split(': ', 1) - dct[key] = value - dct['exitcode'] = int(dct['exitcode']) - dct['compilationhadfailure'] = (dct['compilationhadfailure'] != 'False') - body = m.group('body').strip().split('-----CompilerOutput:-stderr----------\n') - dct['stdout'] = body[0].strip() - dct['stderr'] = body[1].strip() if len(body) > 1 else '' - yield dct + -----EndCompilerOutput""", + re.DOTALL | re.MULTILINE | re.VERBOSE, + ) + for m in pat.finditer(log): + dct = {} + for chunk in m.group("metadata").split("--"): + if chunk: + key, value = chunk.split(": ", 1) + dct[key] = value + dct["exitcode"] = int(dct["exitcode"]) + dct["compilationhadfailure"] = dct["compilationhadfailure"] != "False" + body = m.group("body").strip().split("-----CompilerOutput:-stderr----------\n") + dct["stdout"] = body[0].strip() + dct["stderr"] = body[1].strip() if len(body) > 1 else "" + yield dct def check_compile_output(log): - """Raises BuildFailed if compile errors are found. - Spews to stderr if compile warnings are found.""" - dcts = list(iter_compiler_output(log)) - compiler_output = '\n'.join(stuff.strip() - for dct in dcts - for stuff in [dct['stderr'], dct['stdout']]) - if any(dct['compilationhadfailure'] for dct in dcts): - # Mono puts it in stderr; Roslyn puts it in stdout. - # But! Unity 2018 also gives us a good build report, so we might be able to - # get the compiler failures from the build report instead of this ugly parsing - # through Unity's log file. - raise BuildFailed('Compile\n%s' % indent('| ', compiler_output)) - if compiler_output != '': - print('Compile warnings:\n%s' % indent('| ', compiler_output), file=sys.stderr) + """Raises BuildFailed if compile errors are found. + Spews to stderr if compile warnings are found.""" + dcts = list(iter_compiler_output(log)) + compiler_output = "\n".join( + stuff.strip() for dct in dcts for stuff in [dct["stderr"], dct["stdout"]] + ) + if any(dct["compilationhadfailure"] for dct in dcts): + # Mono puts it in stderr; Roslyn puts it in stdout. + # But! Unity 2018 also gives us a good build report, so we might be able to + # get the compiler failures from the build report instead of this ugly parsing + # through Unity's log file. + raise BuildFailed("Compile\n%s" % indent("| ", compiler_output)) + if compiler_output != "": + print("Compile warnings:\n%s" % indent("| ", compiler_output), file=sys.stderr) def search_backwards(text, start_point, limit, pattern): - """Search the range [limit, start_point] for instances of |pattern|. - Returns the one closest to |start_point|. - Returns |limit| if none are found.""" - assert limit < start_point - matches = list(pattern.finditer(text[limit: start_point])) - if len(matches) == 0: - return limit - return limit + matches[-1].start(0) + """Search the range [limit, start_point] for instances of |pattern|. + Returns the one closest to |start_point|. + Returns |limit| if none are found.""" + assert limit < start_point + matches = list(pattern.finditer(text[limit:start_point])) + if len(matches) == 0: + return limit + return limit + matches[-1].start(0) def analyze_unity_failure(exitcode, log): - """Raise BuildFailed with as much information about the failure as possible.""" - # Build exceptions look like this: - # BuildFailedException: <> - # at BuildTiltBrush.DoBuild (BuildOptions options, BuildTarget target, System.String location, SdkMode vrSdk, Boolean isExperimental, System.String stamp) [0x0026a] in C:\src\tb\Assets\Editor\BuildTiltBrush.cs:430 - # at BuildTiltBrush.CommandLine () [0x001de] in C:\src\tb\Assets\Editor\BuildTiltBrush.cs:259 - - build_failed_pat = re.compile( - r'''BuildFailedException:\ <<(?P.*?)>> - (?P (\n\ \ at\ [^\n]+)* )''', - re.DOTALL | re.MULTILINE | re.VERBOSE) - m = build_failed_pat.search(log) - if m is not None: - raise BuildFailed("C# raised BuildFailedException\n%s\n| ---\n%s" % ( - indent('| ', m.group('traceback').strip()), - indent('| ', m.group('description').strip()))) - - internal_error_pat = re.compile( - r'^executeMethod method (?P.*) threw exception\.', - re.MULTILINE) - m = internal_error_pat.search(log) - if m is not None: - exception_pat = re.compile(r'^[A-Z][A-Za-z0-9]+(Exception|Error):', re.MULTILINE) - start = search_backwards(log, m.start(0), m.start(0) - 1024, exception_pat) - end = m.end(0) - suspicious_portion = log[start:end] - raise BuildFailed("""Build script '%s' had an internal error. + """Raise BuildFailed with as much information about the failure as possible.""" + # Build exceptions look like this: + # BuildFailedException: <> + # at BuildTiltBrush.DoBuild (BuildOptions options, BuildTarget target, System.String location, SdkMode vrSdk, Boolean isExperimental, System.String stamp) [0x0026a] in C:\src\tb\Assets\Editor\BuildTiltBrush.cs:430 + # at BuildTiltBrush.CommandLine () [0x001de] in C:\src\tb\Assets\Editor\BuildTiltBrush.cs:259 + + build_failed_pat = re.compile( + r"""BuildFailedException:\ <<(?P.*?)>> + (?P (\n\ \ at\ [^\n]+)* )""", + re.DOTALL | re.MULTILINE | re.VERBOSE, + ) + m = build_failed_pat.search(log) + if m is not None: + raise BuildFailed( + "C# raised BuildFailedException\n%s\n| ---\n%s" + % ( + indent("| ", m.group("traceback").strip()), + indent("| ", m.group("description").strip()), + ) + ) + + internal_error_pat = re.compile( + r"^executeMethod method (?P.*) threw exception\.", re.MULTILINE + ) + m = internal_error_pat.search(log) + if m is not None: + exception_pat = re.compile( + r"^[A-Z][A-Za-z0-9]+(Exception|Error):", re.MULTILINE + ) + start = search_backwards(log, m.start(0), m.start(0) - 1024, exception_pat) + end = m.end(0) + suspicious_portion = log[start:end] + raise BuildFailed( + """Build script '%s' had an internal error. Suspect log portion: -%s""" % (m.group('methodname'), indent('| ', suspicious_portion))) - - # Check for BuildTiltBrush.Die() - btb_die_pat = re.compile( - r'_btb_ Abort <<(?P.*?)>>', re.DOTALL | re.MULTILINE) - m = btb_die_pat.search(log) - if m is not None: - raise BuildFailed("C# called Die %s '%s'" % (exitcode, m.group('description'))) - - if exitcode is None: - raise BuildFailed("Unity build seems to have been terminated prematurely") - raise BuildFailed("""Unity build failed with exit code %s but no errors seen -This probably means the project is already open in Unity""" % exitcode) +%s""" + % (m.group("methodname"), indent("| ", suspicious_portion)) + ) + + # Check for BuildTiltBrush.Die() + btb_die_pat = re.compile( + r"_btb_ Abort <<(?P.*?)>>", re.DOTALL | re.MULTILINE + ) + m = btb_die_pat.search(log) + if m is not None: + raise BuildFailed("C# called Die %s '%s'" % (exitcode, m.group("description"))) + + if exitcode is None: + raise BuildFailed("Unity build seems to have been terminated prematurely") + raise BuildFailed( + """Unity build failed with exit code %s but no errors seen +This probably means the project is already open in Unity""" + % exitcode + ) def get_end_user_version(project_dir): - fn = os.path.join(project_dir, 'Assets', 'Scenes', 'Main.unity') - with open(fn) as inf: - m = re.search('^ m_VersionNumber: (.*)', inf.read(), flags=re.M) - if m: - return m.group(1).strip() - return '' + fn = os.path.join(project_dir, "Assets", "Scenes", "Main.unity") + with open(fn) as inf: + m = re.search("^ m_VersionNumber: (.*)", inf.read(), flags=re.M) + if m: + return m.group(1).strip() + return "" def make_unused_directory_name(directory_name): - dirname, filename = os.path.split(directory_name) - for i in itertools.count(1): - prospective_name = os.path.join(dirname, '%d_%s' % (i, filename)) - if not os.path.exists(prospective_name): - return prospective_name - # TODO: find a better way to come up wit h a unique directory name (probably mktemp), but for now, assert, since the caller can't handle this - raise AssertionError("No usable directory found") + dirname, filename = os.path.split(directory_name) + for i in itertools.count(1): + prospective_name = os.path.join(dirname, "%d_%s" % (i, filename)) + if not os.path.exists(prospective_name): + return prospective_name + # TODO: find a better way to come up wit h a unique directory name (probably mktemp), but for now, assert, since the caller can't handle this + raise AssertionError("No usable directory found") PLATFORM_TO_UNITYTARGET = { - 'Windows': 'StandaloneWindows64', - 'OSX': 'StandaloneOSX', - 'Linux': 'StandaloneLinux64', - 'Android': 'Android', - 'iOS': 'iOS', + "Windows": "StandaloneWindows64", + "OSX": "StandaloneOSX", + "Linux": "StandaloneLinux64", + "Android": "Android", + "iOS": "iOS", } -def build(stamp, output_dir, project_dir, exe_base_name, # pylint: disable=too-many-statements,too-many-branches,too-many-locals,too-many-arguments - experimental, platform, il2cpp, vrsdk, config, for_distribution, - is_jenkins): - """Create a build of Tilt Brush. - Pass: - stamp - string describing the version+build; will be embedded into the build somehow. - output_dir - desired output directory name - project_dir - directory name - project_name - name of the executable to create (sans extension) - experimental - boolean - platform - one of (Windows, OSX, Linux, Android, iOS) - il2cpp - boolean - vrsdk - Config.SdkMode; valid values are (Oculus, SteamVR, Monoscopic) - config - one of (Debug, Release) - for_distribution - boolean. Enables android signing, version code bump, removal of pdb files. - is_jenkins - boolean; used to customize stdout logging - Returns: - the actual output directory used - """ - def get_exe_name(platform, exe_base_name): - # This is a manually maintained duplicate of App.cs - if 'Windows' in platform: - return '%s.exe' % exe_base_name - if 'OSX' in platform: - return '%s.app' % exe_base_name - if 'Linux' in platform: - return '%s' %exe_base_name - if 'Android' in platform: - return 'com.%s.%s.apk' % (VENDOR_NAME, exe_base_name) - if 'iOS' in platform: - return '%s' % exe_base_name - raise InternalError("Don't know executable name for %s" % platform) - - try: - unitybuild.utils.destroy(output_dir) - except Exception as e: # pylint: disable=broad-except - print('WARN: could not use %s: %s' % (output_dir, e)) - output_dir = make_unused_directory_name(output_dir) - print('WARN: using %s intead' % output_dir) - unitybuild.utils.destroy(output_dir) - os.makedirs(output_dir) - logfile = os.path.join(output_dir, 'build_log.txt') - - exe_name = os.path.join(output_dir, get_exe_name(platform, exe_base_name)) - cmd_env = os.environ.copy() - cmdline = [get_unity_exe(get_project_unity_version(project_dir), - lenient=is_jenkins), - '-logFile', logfile, - '-batchmode', - # '-nographics', Might be needed on OSX if running w/o window server? - '-projectPath', project_dir, - '-executeMethod', 'BuildTiltBrush.CommandLine', - '-btb-target', PLATFORM_TO_UNITYTARGET[platform], - '-btb-out', exe_name, - '-btb-display', vrsdk] - if experimental: - cmdline.append('-btb-experimental') - - if il2cpp: - cmdline.append('-btb-il2cpp') - - # list of tuples: - # - the name of the credential in the environment (for Jenkins) - # - the name of the credential in the keystore (for interactive use) - required_credentials = [] - - if for_distribution and platform == 'Android': - if vrsdk != 'Oculus': - raise BuildFailed('Signing is currently only implemented for Oculus Quest') - keystore = os.path.abspath(os.path.join(project_dir, 'Support/Keystores/TiltBrush.keystore')) - keystore = keystore.replace('/', '\\') - if not os.path.exists(keystore): - raise BuildFailed("To sign you need %s.\n" % keystore) - - cmdline.extend([ - '-btb-keystore-name', keystore, - '-btb-keyalias-name', 'oculusquest', - ]) - required_credentials.extend([ - ('BTB_KEYSTORE_PASS', 'Tilt Brush keystore password'), - ('BTB_KEYALIAS_PASS', 'Tilt Brush Oculus Quest signing key password')]) - cmdline.extend(['-btb-stamp', stamp]) - - if config == 'Debug': - cmdline.extend([ - '-btb-bopt', 'Development', - '-btb-bopt', 'AllowDebugging', - ]) - - cmdline.append('-quit') - - full_version = "%s-%s" % (get_end_user_version(project_dir), stamp) - - # Populate environment with secrets just before calling subprocess - for (env_var, credential_name) in required_credentials: - if env_var not in cmd_env: - if is_jenkins: - # TODO(pld): Look into Jenkins plugins to get at these credentials - raise BuildFailed( - 'Credential "%s" is missing from Jenkins build environment' % env_var) - cmd_env[env_var] = get_credential(credential_name).get_secret().encode('ascii') - proc = subprocess.Popen(cmdline, stdout=sys.stdout, stderr=sys.stderr, env=cmd_env) - del cmd_env - - with unitybuild.utils.ensure_terminate(proc): - with LogTailer(logfile, disabled=is_jenkins): - with open(os.path.join(output_dir, 'build_stamp.txt'), 'w') as outf: - outf.write(full_version) - - # Use wait() instead of communicate() because Windows can't - # interrupt the thread joins that communicate() uses. - proc.wait() - - with open(logfile) as inf: - log = inf.read().replace('\r', '') - - check_compile_output(log) - - if proc.returncode != 0: - analyze_unity_failure(proc.returncode, log) - - # sanity-checking since we've been seeing bad Oculus builds - if platform == 'Windows': - required_files = [] - for f in required_files: - if not os.path.exists(os.path.join(output_dir, f)): - raise BuildFailed("""Build is missing the file '%s' +def build( + stamp, + output_dir, + project_dir, + exe_base_name, # pylint: disable=too-many-statements,too-many-branches,too-many-locals,too-many-arguments + experimental, + platform, + il2cpp, + vrsdk, + config, + for_distribution, + is_jenkins, +): + """Create a build of Tilt Brush. + Pass: + stamp - string describing the version+build; will be embedded into the build somehow. + output_dir - desired output directory name + project_dir - directory name + project_name - name of the executable to create (sans extension) + experimental - boolean + platform - one of (Windows, OSX, Linux, Android, iOS) + il2cpp - boolean + vrsdk - Config.SdkMode; valid values are (Oculus, SteamVR, Monoscopic) + config - one of (Debug, Release) + for_distribution - boolean. Enables android signing, version code bump, removal of pdb files. + is_jenkins - boolean; used to customize stdout logging + Returns: + the actual output directory used + """ + + def get_exe_name(platform, exe_base_name): + # This is a manually maintained duplicate of App.cs + if "Windows" in platform: + return "%s.exe" % exe_base_name + if "OSX" in platform: + return "%s.app" % exe_base_name + if "Linux" in platform: + return "%s" % exe_base_name + if "Android" in platform: + return "com.%s.%s.apk" % (VENDOR_NAME, exe_base_name) + if "iOS" in platform: + return "%s" % exe_base_name + raise InternalError("Don't know executable name for %s" % platform) + + try: + unitybuild.utils.destroy(output_dir) + except Exception as e: # pylint: disable=broad-except + print("WARN: could not use %s: %s" % (output_dir, e)) + output_dir = make_unused_directory_name(output_dir) + print("WARN: using %s intead" % output_dir) + unitybuild.utils.destroy(output_dir) + os.makedirs(output_dir) + logfile = os.path.join(output_dir, "build_log.txt") + + exe_name = os.path.join(output_dir, get_exe_name(platform, exe_base_name)) + cmd_env = os.environ.copy() + cmdline = [ + get_unity_exe(get_project_unity_version(project_dir), lenient=is_jenkins), + "-logFile", + logfile, + "-batchmode", + # '-nographics', Might be needed on OSX if running w/o window server? + "-projectPath", + project_dir, + "-executeMethod", + "BuildTiltBrush.CommandLine", + "-btb-target", + PLATFORM_TO_UNITYTARGET[platform], + "-btb-out", + exe_name, + "-btb-display", + vrsdk, + ] + if experimental: + cmdline.append("-btb-experimental") + + if il2cpp: + cmdline.append("-btb-il2cpp") + + # list of tuples: + # - the name of the credential in the environment (for Jenkins) + # - the name of the credential in the keystore (for interactive use) + required_credentials = [] + + if for_distribution and platform == "Android": + if vrsdk != "Oculus": + raise BuildFailed("Signing is currently only implemented for Oculus Quest") + keystore = os.path.abspath( + os.path.join(project_dir, "Support/Keystores/TiltBrush.keystore") + ) + keystore = keystore.replace("/", "\\") + if not os.path.exists(keystore): + raise BuildFailed("To sign you need %s.\n" % keystore) + + cmdline.extend( + [ + "-btb-keystore-name", + keystore, + "-btb-keyalias-name", + "oculusquest", + ] + ) + required_credentials.extend( + [ + ("BTB_KEYSTORE_PASS", "Tilt Brush keystore password"), + ("BTB_KEYALIAS_PASS", "Tilt Brush Oculus Quest signing key password"), + ] + ) + cmdline.extend(["-btb-stamp", stamp]) + + if config == "Debug": + cmdline.extend( + [ + "-btb-bopt", + "Development", + "-btb-bopt", + "AllowDebugging", + ] + ) + + cmdline.append("-quit") + + full_version = "%s-%s" % (get_end_user_version(project_dir), stamp) + + # Populate environment with secrets just before calling subprocess + for (env_var, credential_name) in required_credentials: + if env_var not in cmd_env: + if is_jenkins: + # TODO(pld): Look into Jenkins plugins to get at these credentials + raise BuildFailed( + 'Credential "%s" is missing from Jenkins build environment' + % env_var + ) + cmd_env[env_var] = ( + get_credential(credential_name).get_secret().encode("ascii") + ) + proc = subprocess.Popen(cmdline, stdout=sys.stdout, stderr=sys.stderr, env=cmd_env) + del cmd_env + + with unitybuild.utils.ensure_terminate(proc): + with LogTailer(logfile, disabled=is_jenkins): + with open(os.path.join(output_dir, "build_stamp.txt"), "w") as outf: + outf.write(full_version) + + # Use wait() instead of communicate() because Windows can't + # interrupt the thread joins that communicate() uses. + proc.wait() + + with open(logfile) as inf: + log = inf.read().replace("\r", "") + + check_compile_output(log) + + if proc.returncode != 0: + analyze_unity_failure(proc.returncode, log) + + # sanity-checking since we've been seeing bad Oculus builds + if platform == "Windows": + required_files = [] + for f in required_files: + if not os.path.exists(os.path.join(output_dir, f)): + raise BuildFailed( + """Build is missing the file '%s' This is a known Unity bug and the only thing to do is try the build -over and over again until it works""" % f) - return output_dir +over and over again until it works""" + % f + ) + return output_dir def finalize_build(src_dir, dst_dir): - """Attempts to move *src_dir* to *dst_dir*. - Return *dst_dir* on success, or some other directory name if there was some problem. - This should be as close to atomic as possible.""" - try: - unitybuild.utils.destroy(dst_dir) - except OSError: - print('WARN: Cannot remove %s; putting output in %s' % (dst_dir, src_dir)) - return src_dir - - try: - os.makedirs(os.path.dirname(dst_dir)) - except OSError: - pass - - try: - os.rename(src_dir, dst_dir) - return dst_dir - except OSError: - # TODO(pld): Try to do something better - # On Jon's computer, Android builds always leave behind a Java.exe process that - # holds onto the directory and prevents its rename. - # raise InternalError("Can't rename %s to %s: %s" % (src_dir, dst_dir, e)) - print('WARN: Cannot rename %s; leaving it as-is' % (src_dir,)) - return src_dir + """Attempts to move *src_dir* to *dst_dir*. + Return *dst_dir* on success, or some other directory name if there was some problem. + This should be as close to atomic as possible.""" + try: + unitybuild.utils.destroy(dst_dir) + except OSError: + print("WARN: Cannot remove %s; putting output in %s" % (dst_dir, src_dir)) + return src_dir + + try: + os.makedirs(os.path.dirname(dst_dir)) + except OSError: + pass + + try: + os.rename(src_dir, dst_dir) + return dst_dir + except OSError: + # TODO(pld): Try to do something better + # On Jon's computer, Android builds always leave behind a Java.exe process that + # holds onto the directory and prevents its rename. + # raise InternalError("Can't rename %s to %s: %s" % (src_dir, dst_dir, e)) + print("WARN: Cannot rename %s; leaving it as-is" % (src_dir,)) + return src_dir def create_notice_file(project_dir): - def iter_notice_files(): - """Yields (library_name, notice_file_name) tuples.""" - root = os.path.join(project_dir, 'Assets/ThirdParty') - if not os.path.exists(root): - raise BuildFailed("Cannot generate NOTICE: missing %s" % root) - for r, _, fs in os.walk(root): - for f in fs: - if f.lower() in ('notice', 'notice.txt', 'notice.tiltbrush', 'notice.md'): - yield (os.path.basename(r), os.path.join(r, f)) - root = os.path.join(project_dir, 'Assets/ThirdParty/NuGet/Packages') - if not os.path.exists(root): - raise BuildFailed("Cannot generate NOTICE: missing %s" % root) - for r, _, fs in os.walk(root): - for f in fs: - if f.lower() in ('notice', 'notice.md', 'notice.txt'): - m = re.match(r'\D+', os.path.basename(r)) - if m: - name = m.group(0).rstrip('.') - if (name[-2:] == '.v' or name[-2:] == '.V'): - name = name[:-2] - yield (name, os.path.join(r, f)) - - tmpf = io.StringIO() - tmpf.write('''This file is automatically generated. + def iter_notice_files(): + """Yields (library_name, notice_file_name) tuples.""" + root = os.path.join(project_dir, "Assets/ThirdParty") + if not os.path.exists(root): + raise BuildFailed("Cannot generate NOTICE: missing %s" % root) + for r, _, fs in os.walk(root): + for f in fs: + if f.lower() in ( + "notice", + "notice.txt", + "notice.tiltbrush", + "notice.md", + ): + yield (os.path.basename(r), os.path.join(r, f)) + root = os.path.join(project_dir, "Assets/ThirdParty/NuGet/Packages") + if not os.path.exists(root): + raise BuildFailed("Cannot generate NOTICE: missing %s" % root) + for r, _, fs in os.walk(root): + for f in fs: + if f.lower() in ("notice", "notice.md", "notice.txt"): + m = re.match(r"\D+", os.path.basename(r)) + if m: + name = m.group(0).rstrip(".") + if name[-2:] == ".v" or name[-2:] == ".V": + name = name[:-2] + yield (name, os.path.join(r, f)) + + tmpf = io.StringIO() + tmpf.write( + """This file is automatically generated. This software makes use of third-party software with the following notices. -''') - for (library_name, notice_file) in iter_notice_files(): - tmpf.write('\n \n=== %s ===\n' % library_name) - with open(notice_file) as inf: - contents = inf.read() - if contents.startswith(str(codecs.BOM_UTF8)): - contents = contents[len(codecs.BOM_UTF8):] - tmpf.write(contents) - tmpf.write('\n') - - output_filename = os.path.join(project_dir, - 'Support/ThirdParty/GeneratedThirdPartyNotices.txt') - with open(output_filename, 'w') as outf: - outf.write(tmpf.getvalue()) +""" + ) + for (library_name, notice_file) in iter_notice_files(): + tmpf.write("\n \n=== %s ===\n" % library_name) + with open(notice_file) as inf: + contents = inf.read() + if contents.startswith(str(codecs.BOM_UTF8)): + contents = contents[len(codecs.BOM_UTF8) :] + tmpf.write(contents) + tmpf.write("\n") + + output_filename = os.path.join( + project_dir, "Support/ThirdParty/GeneratedThirdPartyNotices.txt" + ) + with open(output_filename, "w") as outf: + outf.write(tmpf.getvalue()) + # ---------------------------------------------------------------------- # Front-end @@ -636,274 +722,355 @@ def iter_notice_files(): def parse_args(args): - parser = argparse.ArgumentParser(description="Make Open Brush builds") - parser.add_argument('--vrsdk', - action='append', dest='vrsdks', - choices=['Monoscopic', 'Oculus', 'SteamVR'], - help='Can pass multiple times; defaults to SteamVR (or Oculus on Android))') - parser.add_argument('--platform', - action='append', dest='platforms', - choices=['OSX', 'Windows', 'Android', 'iOS'], - help='Can pass multiple times; defaults to Windows') - parser.add_argument('--config', - action='append', dest='configs', - choices=['Debug', 'Release'], - help='Can pass multiple times; defaults to Release. Controls the ability to profile, the ability to debug scripts, and generation of debug symbols.') - parser.add_argument('--experimental', - action='store_true', default=False, - help='Include experimental features in the build') - parser.add_argument('--il2cpp', - action='store_true', default=False, - help='Build using il2cpp as the runtime instead of Mono') - parser.add_argument('--for-distribution', - dest='for_distribution', action='store_true', default=False, - help='Implicitly set when the build is being pushed; use explicitly if you want a signed build but do not want to push it yet') - - # TODO(pld): update docs to talk about Oculus Home? - grp = parser.add_argument_group('Pushing to Steam/Oculus') - grp.add_argument('--push', action='store_true', help='Push to Steam/Oculus') - grp.add_argument('--user', type=str, help='(optional) Steam user to authenticate as.') - grp.add_argument('--branch', type=str, help='(optional) Steam branch or Oculus release channel.') - - grp = parser.add_argument_group('Continuous Integration') - grp.add_argument('--jenkins', action='store_true', help='Build with continuous integration settings.') - - args = parser.parse_args(args) - if not args.configs: - args.configs = ['Release'] - - if not args.platforms and not args.vrsdks: - args.platforms = [os.getenv('TILT_BRUSH_BUILD_PLATFORM', 'Windows')] - args.vrsdks = [os.getenv('TILT_BRUSH_BUILD_VRSDK', 'SteamVR')] - elif not args.platforms: - args.platforms = ['Windows'] - elif not args.vrsdks: - if 'Android' in args.platforms: - args.vrsdks = ['Oculus'] - else: - args.vrsdks = ['SteamVR'] - - if args.branch is not None: - args.push = True - - if args.push: - args.for_distribution = True - - return args + parser = argparse.ArgumentParser(description="Make Open Brush builds") + parser.add_argument( + "--vrsdk", + action="append", + dest="vrsdks", + choices=["Monoscopic", "Oculus", "SteamVR"], + help="Can pass multiple times; defaults to SteamVR (or Oculus on Android))", + ) + parser.add_argument( + "--platform", + action="append", + dest="platforms", + choices=["OSX", "Windows", "Android", "iOS"], + help="Can pass multiple times; defaults to Windows", + ) + parser.add_argument( + "--config", + action="append", + dest="configs", + choices=["Debug", "Release"], + help="Can pass multiple times; defaults to Release. Controls the ability to profile, the ability to debug scripts, and generation of debug symbols.", + ) + parser.add_argument( + "--experimental", + action="store_true", + default=False, + help="Include experimental features in the build", + ) + parser.add_argument( + "--il2cpp", + action="store_true", + default=False, + help="Build using il2cpp as the runtime instead of Mono", + ) + parser.add_argument( + "--for-distribution", + dest="for_distribution", + action="store_true", + default=False, + help="Implicitly set when the build is being pushed; use explicitly if you want a signed build but do not want to push it yet", + ) + + # TODO(pld): update docs to talk about Oculus Home? + grp = parser.add_argument_group("Pushing to Steam/Oculus") + grp.add_argument("--push", action="store_true", help="Push to Steam/Oculus") + grp.add_argument( + "--user", type=str, help="(optional) Steam user to authenticate as." + ) + grp.add_argument( + "--branch", type=str, help="(optional) Steam branch or Oculus release channel." + ) + + grp = parser.add_argument_group("Continuous Integration") + grp.add_argument( + "--jenkins", + action="store_true", + help="Build with continuous integration settings.", + ) + + args = parser.parse_args(args) + if not args.configs: + args.configs = ["Release"] + + if not args.platforms and not args.vrsdks: + args.platforms = [os.getenv("TILT_BRUSH_BUILD_PLATFORM", "Windows")] + args.vrsdks = [os.getenv("TILT_BRUSH_BUILD_VRSDK", "SteamVR")] + elif not args.platforms: + args.platforms = ["Windows"] + elif not args.vrsdks: + if "Android" in args.platforms: + args.vrsdks = ["Oculus"] + else: + args.vrsdks = ["SteamVR"] + + if args.branch is not None: + args.push = True + + if args.push: + args.for_distribution = True + + return args def find_project_dir(): - def search_upwards_from(d): - # Search upwards for root of unity project - d = os.path.abspath(d) - while True: - if os.path.exists(os.path.join(d, 'Assets')) and os.path.exists(os.path.join(d, 'ProjectSettings')): - return d - parent = os.path.dirname(d) - if parent == d: - return None - d = parent - return search_upwards_from('.') or search_upwards_from(__file__) + def search_upwards_from(d): + # Search upwards for root of unity project + d = os.path.abspath(d) + while True: + if os.path.exists(os.path.join(d, "Assets")) and os.path.exists( + os.path.join(d, "ProjectSettings") + ): + return d + parent = os.path.dirname(d) + if parent == d: + return None + d = parent + + return search_upwards_from(".") or search_upwards_from(__file__) def iter_builds(args): - """Yields (platform, vrsdk, config) tuples.""" - for platform in args.platforms: - for vrsdk in args.vrsdks: - for config in args.configs: - yield (platform, vrsdk, config) + """Yields (platform, vrsdk, config) tuples.""" + for platform in args.platforms: + for vrsdk in args.vrsdks: + for config in args.configs: + yield (platform, vrsdk, config) def get_android_version_code(project_dir): - """Returns the integer AndroidBundleVersionCode, or raises LookupError.""" - filename = os.path.join(project_dir, 'ProjectSettings/ProjectSettings.asset') - contents = open(filename, 'rb').read() - m = re.search(r'(?<=AndroidBundleVersionCode: )(?P\d+)', contents) - if m is not None: - try: - return int(m.group('code')) - except: # pylint: disable=bare-except - pass - raise LookupError('code') + """Returns the integer AndroidBundleVersionCode, or raises LookupError.""" + filename = os.path.join(project_dir, "ProjectSettings/ProjectSettings.asset") + contents = open(filename, "rb").read() + m = re.search(r"(?<=AndroidBundleVersionCode: )(?P\d+)", contents) + if m is not None: + try: + return int(m.group("code")) + except: # pylint: disable=bare-except + pass + raise LookupError("code") def set_android_version_code(project_dir, code): - """If *code* is 'increment', increments the existing code.""" - filename = os.path.join(project_dir, 'ProjectSettings/ProjectSettings.asset') - contents = open(filename, 'rb').read() - - def replacement(match): - if code == 'increment': - return str(int(match.group('code')) + 1) - return str(code) - new_contents, n = re.subn(r'(?<=AndroidBundleVersionCode: )(?P\d+)', - replacement, contents) - if n < 1: - print("WARNING: Failed to set AndroidBundleVersionCode") - else: - open(filename, 'wb').write(new_contents) + """If *code* is 'increment', increments the existing code.""" + filename = os.path.join(project_dir, "ProjectSettings/ProjectSettings.asset") + contents = open(filename, "rb").read() + + def replacement(match): + if code == "increment": + return str(int(match.group("code")) + 1) + return str(code) + + new_contents, n = re.subn( + r"(?<=AndroidBundleVersionCode: )(?P\d+)", replacement, contents + ) + if n < 1: + print("WARNING: Failed to set AndroidBundleVersionCode") + else: + open(filename, "wb").write(new_contents) def maybe_prompt_and_set_version_code(project_dir): - existing_code = get_android_version_code(project_dir) - uri = 'https://dashboard.oculus.com/application/%s/build/' % TB_OCULUS_QUEST_APP_ID - webbrowser.open(uri) - print('Currently building version code %s' % existing_code) - print('Please enter the highest version code you see on this web page,') - print('or hit enter to skip.') - highest_seen = input('Code > ') - if highest_seen.strip() == '': - return - highest_seen = int(highest_seen) - if existing_code <= highest_seen: - set_android_version_code(project_dir, highest_seen + 1) - print('Now building version code %s' % get_android_version_code(project_dir)) + existing_code = get_android_version_code(project_dir) + uri = "https://dashboard.oculus.com/application/%s/build/" % TB_OCULUS_QUEST_APP_ID + webbrowser.open(uri) + print("Currently building version code %s" % existing_code) + print("Please enter the highest version code you see on this web page,") + print("or hit enter to skip.") + highest_seen = input("Code > ") + if highest_seen.strip() == "": + return + highest_seen = int(highest_seen) + if existing_code <= highest_seen: + set_android_version_code(project_dir, highest_seen + 1) + print("Now building version code %s" % get_android_version_code(project_dir)) def sanity_check_build(build_dir): - # We've had issues with Unity dying(?) or exiting(?) before emitting an exe - exes = [] - for pat in ('*.app', '*.exe', '*.apk'): - exes.extend(glob.glob(os.path.join(build_dir, pat))) - if len(exes) == 0: - raise BuildFailed("Cannot find any executables in %s" % build_dir) - - -def main(args=None): # pylint: disable=too-many-statements,too-many-branches,too-many-locals - unitybuild.utils.msys_control_c_workaround() - - if sys.platform == 'cygwin': - raise UserError("Running under cygwin python is not supported.") - args = parse_args(args) - - if args.push: - num = len(args.platforms) * len(args.vrsdks) * len(args.configs) - if num != 1: - raise UserError('Must specify exactly one build to push') - - vcs = vcs_create() - project_dir = find_project_dir() - print("Project dir:", os.path.normpath(project_dir)) - - if args.jenkins: - # Jenkins does not allow building outside of the source tree. - build_dir = os.path.normpath(os.path.join(project_dir, 'Builds')) - else: - # Local build setup. - build_dir = os.path.normpath(os.path.join(project_dir, '..', 'Builds')) - - # TODO(pld): maybe faster to call CommandLine() multiple times in the same - # Unity rather than to start up Unity multiple times. OTOH it requires faith - # in Unity's stability. - try: - tmp_dir = None + # We've had issues with Unity dying(?) or exiting(?) before emitting an exe + exes = [] + for pat in ("*.app", "*.exe", "*.apk"): + exes.extend(glob.glob(os.path.join(build_dir, pat))) + if len(exes) == 0: + raise BuildFailed("Cannot find any executables in %s" % build_dir) + + +def main( + args=None, +): # pylint: disable=too-many-statements,too-many-branches,too-many-locals + unitybuild.utils.msys_control_c_workaround() + + if sys.platform == "cygwin": + raise UserError("Running under cygwin python is not supported.") + args = parse_args(args) + + if args.push: + num = len(args.platforms) * len(args.vrsdks) * len(args.configs) + if num != 1: + raise UserError("Must specify exactly one build to push") + + vcs = vcs_create() + project_dir = find_project_dir() + print("Project dir:", os.path.normpath(project_dir)) + + if args.jenkins: + # Jenkins does not allow building outside of the source tree. + build_dir = os.path.normpath(os.path.join(project_dir, "Builds")) + else: + # Local build setup. + build_dir = os.path.normpath(os.path.join(project_dir, "..", "Builds")) + + # TODO(pld): maybe faster to call CommandLine() multiple times in the same + # Unity rather than to start up Unity multiple times. OTOH it requires faith + # in Unity's stability. try: - revision = vcs.get_build_stamp(project_dir) - except LookupError as e: - print('WARN: no build stamp (%s). Continue?' % (e,)) - if not input('(y/n) > ').strip().lower().startswith('y'): - raise UserError('Aborting: no stamp') from e - revision = 'nostamp' - - create_notice_file(project_dir) - - for (platform, vrsdk, config) in iter_builds(args): - stamp = revision + ('-exp' if args.experimental else '') - print("Building %s %s %s exp:%d signed:%d il2cpp:%d" % ( - platform, vrsdk, config, args.experimental, args.for_distribution, args.il2cpp)) - - sdk = vrsdk - if sdk == "Oculus" and platform == "Android": - sdk = "OculusMobile" - dirname = "%s_%s_%s%s%s%s%s_FromCli" % ( - sdk, - "Release", - EXE_BASE_NAME, - "_Experimental" if args.experimental else "", - "_Il2cpp" if args.il2cpp else "", - "", # GuiAutoProfile - "_signed" if args.for_distribution and platform != "Windows" else "" - ) - - tmp_dir = os.path.join(build_dir, 'tmp_' + dirname) - output_dir = os.path.join(build_dir, dirname) - - if args.for_distribution and platform == 'Android' and sys.stdin.isatty(): + tmp_dir = None try: - maybe_prompt_and_set_version_code(project_dir) - except Exception as e: # pylint: disable=broad-except - print('Error prompting for version code: %s' % e) - - tmp_dir = build(stamp, tmp_dir, project_dir, EXE_BASE_NAME, - experimental=args.experimental, - platform=platform, - il2cpp=args.il2cpp, vrsdk=vrsdk, config=config, - for_distribution=args.for_distribution, - is_jenkins=args.jenkins) - output_dir = finalize_build(tmp_dir, output_dir) - sanity_check_build(output_dir) - - if args.for_distribution and platform == 'Android': - set_android_version_code(project_dir, 'increment') - - if args.for_distribution and vrsdk == 'Oculus': - # .pdb files violate VRC.PC.Security.3 and ovr-platform-utils rejects the submission - to_remove = [] - for (r, _, fs) in os.walk(output_dir): - for f in fs: - if f.endswith('.pdb'): - to_remove.append(os.path.join(r, f)) - if to_remove: - print('Removing from submission:\n%s' % ('\n'.join( - os.path.relpath(f, output_dir) for f in to_remove))) - list(map(os.unlink, to_remove)) - - if platform == 'iOS': - # TODO: for iOS, invoke xcode to create ipa. E.g.: - # $ cd tmp_dir/TiltBrush - # $ xcodebuild -scheme Unity-iPhone archive -archivePath ARCHIVE_DIR - # $ xcodebuild -exportArchive -exportFormat ipa -archivePath ARCHIVE_DIR -exportPath IPA - print('iOS build must be completed from Xcode (%s)' % ( - os.path.join(output_dir, EXE_BASE_NAME, 'Unity-iPhone.xcodeproj'))) - continue - - if args.push: - with open(os.path.join(output_dir, 'build_stamp.txt')) as inf: - embedded_stamp = inf.read().strip() - description = '%s %s | %s@%s' % ( - config, embedded_stamp, getpass.getuser(), platform_node()) - if args.branch is not None: - description += ' to %s' % args.branch - - if vrsdk == 'SteamVR': - if platform not in ('Windows',): - raise BuildFailed("Unsupported platform for push to Steam: %s" % platform) - unitybuild.push.push_open_brush_to_steam( - output_dir, description, args.user or 'tiltbrush_build', args.branch) - elif vrsdk == 'Oculus': - if platform not in ('Windows', 'Android'): - raise BuildFailed("Unsupported platform for push to Oculus: %s" % platform) - release_channel = args.branch - if release_channel is None: - release_channel = 'ALPHA' - print(("No release channel specified for Oculus: using %s" % release_channel)) - unitybuild.push.push_open_brush_to_oculus(output_dir, release_channel, description) - except BadVersionCode as e: - if isinstance(e, BadVersionCode): - set_android_version_code(project_dir, e.desired_version_code) - print(("\n\nVersion code has been auto-updated to %s.\nPlease retry your build." % - e.desired_version_code)) - if tmp_dir: - print("\nSee %s" % os.path.join(tmp_dir, 'build_log.txt')) - sys.exit(1) - except Error as e: - print("\n%s: %s" % ('ERROR', e)) - if tmp_dir: - print("\nSee %s" % os.path.join(tmp_dir, 'build_log.txt')) - sys.exit(1) - except KeyboardInterrupt: - print("Aborted.") - sys.exit(2) + revision = vcs.get_build_stamp(project_dir) + except LookupError as e: + print("WARN: no build stamp (%s). Continue?" % (e,)) + if not input("(y/n) > ").strip().lower().startswith("y"): + raise UserError("Aborting: no stamp") from e + revision = "nostamp" + + create_notice_file(project_dir) + + for (platform, vrsdk, config) in iter_builds(args): + stamp = revision + ("-exp" if args.experimental else "") + print( + "Building %s %s %s exp:%d signed:%d il2cpp:%d" + % ( + platform, + vrsdk, + config, + args.experimental, + args.for_distribution, + args.il2cpp, + ) + ) + + sdk = vrsdk + if sdk == "Oculus" and platform == "Android": + sdk = "OculusMobile" + dirname = "%s_%s_%s%s%s%s%s_FromCli" % ( + sdk, + "Release", + EXE_BASE_NAME, + "_Experimental" if args.experimental else "", + "_Il2cpp" if args.il2cpp else "", + "", # GuiAutoProfile + "_signed" if args.for_distribution and platform != "Windows" else "", + ) + + tmp_dir = os.path.join(build_dir, "tmp_" + dirname) + output_dir = os.path.join(build_dir, dirname) + + if args.for_distribution and platform == "Android" and sys.stdin.isatty(): + try: + maybe_prompt_and_set_version_code(project_dir) + except Exception as e: # pylint: disable=broad-except + print("Error prompting for version code: %s" % e) + + tmp_dir = build( + stamp, + tmp_dir, + project_dir, + EXE_BASE_NAME, + experimental=args.experimental, + platform=platform, + il2cpp=args.il2cpp, + vrsdk=vrsdk, + config=config, + for_distribution=args.for_distribution, + is_jenkins=args.jenkins, + ) + output_dir = finalize_build(tmp_dir, output_dir) + sanity_check_build(output_dir) + + if args.for_distribution and platform == "Android": + set_android_version_code(project_dir, "increment") + + if args.for_distribution and vrsdk == "Oculus": + # .pdb files violate VRC.PC.Security.3 and ovr-platform-utils rejects the submission + to_remove = [] + for (r, _, fs) in os.walk(output_dir): + for f in fs: + if f.endswith(".pdb"): + to_remove.append(os.path.join(r, f)) + if to_remove: + print( + "Removing from submission:\n%s" + % ("\n".join(os.path.relpath(f, output_dir) for f in to_remove)) + ) + list(map(os.unlink, to_remove)) + + if platform == "iOS": + # TODO: for iOS, invoke xcode to create ipa. E.g.: + # $ cd tmp_dir/TiltBrush + # $ xcodebuild -scheme Unity-iPhone archive -archivePath ARCHIVE_DIR + # $ xcodebuild -exportArchive -exportFormat ipa -archivePath ARCHIVE_DIR -exportPath IPA + print( + "iOS build must be completed from Xcode (%s)" + % ( + os.path.join( + output_dir, EXE_BASE_NAME, "Unity-iPhone.xcodeproj" + ) + ) + ) + continue + + if args.push: + with open(os.path.join(output_dir, "build_stamp.txt")) as inf: + embedded_stamp = inf.read().strip() + description = "%s %s | %s@%s" % ( + config, + embedded_stamp, + getpass.getuser(), + platform_node(), + ) + if args.branch is not None: + description += " to %s" % args.branch + + if vrsdk == "SteamVR": + if platform not in ("Windows",): + raise BuildFailed( + "Unsupported platform for push to Steam: %s" % platform + ) + unitybuild.push.push_open_brush_to_steam( + output_dir, + description, + args.user or "tiltbrush_build", + args.branch, + ) + elif vrsdk == "Oculus": + if platform not in ("Windows", "Android"): + raise BuildFailed( + "Unsupported platform for push to Oculus: %s" % platform + ) + release_channel = args.branch + if release_channel is None: + release_channel = "ALPHA" + print( + ( + "No release channel specified for Oculus: using %s" + % release_channel + ) + ) + unitybuild.push.push_open_brush_to_oculus( + output_dir, release_channel, description + ) + except BadVersionCode as e: + if isinstance(e, BadVersionCode): + set_android_version_code(project_dir, e.desired_version_code) + print( + ( + "\n\nVersion code has been auto-updated to %s.\nPlease retry your build." + % e.desired_version_code + ) + ) + if tmp_dir: + print("\nSee %s" % os.path.join(tmp_dir, "build_log.txt")) + sys.exit(1) + except Error as e: + print("\n%s: %s" % ("ERROR", e)) + if tmp_dir: + print("\nSee %s" % os.path.join(tmp_dir, "build_log.txt")) + sys.exit(1) + except KeyboardInterrupt: + print("Aborted.") + sys.exit(2) # Tests @@ -912,18 +1079,27 @@ def main(args=None): # pylint: disable=too-many-statements,too-many-branches,to # pylint: disable=all # flake8: noqa def test_get_unity_exe(): - global iter_editors_and_versions - def iter_editors_and_versions(): - return [("Unity_%s.exe" % s, tuple(s.split('.'))) for s in [ - "2017.1.2", "2017.1.3", "2017.4.3", "2017.4.10", "2017.4.9"]] - assert get_unity_exe(('2017', '4', '8'), True) == 'Unity_2017.4.10.exe' - try: get_unity_exe(('2017', '4', '8'), False) - except BuildFailed as e: pass - else: assert False # must raise + global iter_editors_and_versions + + def iter_editors_and_versions(): + return [ + ("Unity_%s.exe" % s, tuple(s.split("."))) + for s in ["2017.1.2", "2017.1.3", "2017.4.3", "2017.4.10", "2017.4.9"] + ] + + assert get_unity_exe(("2017", "4", "8"), True) == "Unity_2017.4.10.exe" + try: + get_unity_exe(("2017", "4", "8"), False) + except BuildFailed as e: + pass + else: + assert False # must raise + def test_iter_editors(): - for tup in iter_editors_and_versions(): - print(tup) + for tup in iter_editors_and_versions(): + print(tup) + -if __name__ == '__main__': - maybe_prompt_and_set_version_code(os.getcwd()) +if __name__ == "__main__": + maybe_prompt_and_set_version_code(os.getcwd()) diff --git a/Support/Python/unitybuild/push.py b/Support/Python/unitybuild/push.py index 2d7949ee67..a5eceeb8b2 100755 --- a/Support/Python/unitybuild/push.py +++ b/Support/Python/unitybuild/push.py @@ -27,152 +27,160 @@ from subprocess import Popen, PIPE, STDOUT from unitybuild.constants import UserError, BuildFailed, BadVersionCode -from unitybuild.credentials import get_credential, TB_OCULUS_RIFT_APP_ID, TB_OCULUS_QUEST_APP_ID +from unitybuild.credentials import ( + get_credential, + TB_OCULUS_RIFT_APP_ID, + TB_OCULUS_QUEST_APP_ID, +) class ExpansionError(LookupError): - pass + pass def steamcmd(*args): - args = list(args) - if len(args) == 1: - args = args[0].split() + args = list(args) + if len(args) == 1: + args = args[0].split() - # SteamCmd has this behavior where it treats relative file paths - # as relative to the executable itself, rather than to os.getcwd(). - for arg in args: - if os.path.exists(arg) and not os.path.isabs(arg): - assert False, "steamcmd needs absolute paths" + # SteamCmd has this behavior where it treats relative file paths + # as relative to the executable itself, rather than to os.getcwd(). + for arg in args: + if os.path.exists(arg) and not os.path.isabs(arg): + assert False, "steamcmd needs absolute paths" - args.insert(0, 'steamcmd') - proc = Popen(args, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr) - if proc.wait() != 0: - raise BuildFailed("SteamCmd failed with code %s" % proc.wait()) + args.insert(0, "steamcmd") + proc = Popen(args, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr) + if proc.wait() != 0: + raise BuildFailed("SteamCmd failed with code %s" % proc.wait()) def get_builds_dir(): - # Look upwards for the Builds folder. Can fail. - cur = os.path.abspath(os.path.dirname(__file__)) - while True: - if os.path.exists(os.path.join(cur, 'Builds')): - return os.path.join(cur, 'Builds') - parent = os.path.dirname(cur) - if parent == cur: - raise BuildFailed("Cannot find Builds folder; specify one explicitly with --source DIR") - cur = parent + # Look upwards for the Builds folder. Can fail. + cur = os.path.abspath(os.path.dirname(__file__)) + while True: + if os.path.exists(os.path.join(cur, "Builds")): + return os.path.join(cur, "Builds") + parent = os.path.dirname(cur) + if parent == cur: + raise BuildFailed( + "Cannot find Builds folder; specify one explicitly with --source DIR" + ) + cur = parent def get_support_dir(): - # Look upwards for the Support folder. Can fail. - cur_dir = os.path.abspath(__file__) - start_dir = cur_dir - while True: - parent, name = os.path.split(cur_dir) - if parent == cur_dir: - raise LookupError("Can't find Support/ directory from %s" % start_dir) - if name.lower() == 'support': - return cur_dir - cur_dir = parent + # Look upwards for the Support folder. Can fail. + cur_dir = os.path.abspath(__file__) + start_dir = cur_dir + while True: + parent, name = os.path.split(cur_dir) + if parent == cur_dir: + raise LookupError("Can't find Support/ directory from %s" % start_dir) + if name.lower() == "support": + return cur_dir + cur_dir = parent def get_tmp_steam_dir(): - # Return an existing directory that we can write temp files into. - # The directory does not have to be persistent, but ideally it will be; - # it speeds up the content upload process. - ret = os.path.join(get_support_dir(), 'tmp_steam') - if not os.path.isdir(ret): - os.makedirs(ret) - return ret + # Return an existing directory that we can write temp files into. + # The directory does not have to be persistent, but ideally it will be; + # it speeds up the content upload process. + ret = os.path.join(get_support_dir(), "tmp_steam") + if not os.path.isdir(ret): + os.makedirs(ret) + return ret def get_build_stamp(directory): - filename = os.path.join(directory, 'build_stamp.txt') - try: - with open(filename, 'rb') as inf: - return inf.read().strip() - except IOError as e: - print("WARN: Build stamp not found with this build.") - print("Supply one manually, or leave empty to abort this push.") - stamp = input("Stamp: ").strip() - if not stamp: - raise BuildFailed("Aborted: no build stamp") from e - - -VDF_ESCAPES = { - '\r': '_', - '\n': '\\n', - '\t': '\\t', - '\\': '\\\\', - '"': '\\"' -} + filename = os.path.join(directory, "build_stamp.txt") + try: + with open(filename, "rb") as inf: + return inf.read().strip() + except IOError as e: + print("WARN: Build stamp not found with this build.") + print("Supply one manually, or leave empty to abort this push.") + stamp = input("Stamp: ").strip() + if not stamp: + raise BuildFailed("Aborted: no build stamp") from e + return stamp + + +VDF_ESCAPES = {"\r": "_", "\n": "\\n", "\t": "\\t", "\\": "\\\\", '"': '\\"'} def vdf_quote(txt): - # See https://developer.valvesoftware.com/wiki/KeyValues - def quote_char(match): - return VDF_ESCAPES[match.group(0)] - txt = re.sub(r'[\r\n\t\\\"]', quote_char, txt) - return '"%s"' % txt + # See https://developer.valvesoftware.com/wiki/KeyValues + def quote_char(match): + return VDF_ESCAPES[match.group(0)] + + txt = re.sub(r"[\r\n\t\\\"]", quote_char, txt) + return '"%s"' % txt def expand_vdf_template(input_text, variables): - """Expand variable references in input_text, ensuring that the expansion - is a single vdf token.""" - def expand_and_quote(match): - try: - expansion = variables[match.group(1)] - except KeyError as e: - raise ExpansionError("unknown variable %s" % (match.group(0),)) from e - return vdf_quote(expansion) + """Expand variable references in input_text, ensuring that the expansion + is a single vdf token.""" + + def expand_and_quote(match): + try: + expansion = variables[match.group(1)] + except KeyError as e: + raise ExpansionError("unknown variable %s" % (match.group(0),)) from e + return vdf_quote(expansion) - return re.sub(r'\$\{(.*?)\}', expand_and_quote, input_text) + return re.sub(r"\$\{(.*?)\}", expand_and_quote, input_text) def create_from_template(input_file, variables, tmp_dir): - # Returns the name of an output file - with open(input_file, 'rb') as inf: - data = inf.read() + # Returns the name of an output file + with open(input_file, "rb") as inf: + data = inf.read() - try: - expanded = expand_vdf_template(data, variables) - except ExpansionError as e: - raise BuildFailed("%s: %s" % (input_file, e)) from e + try: + expanded = expand_vdf_template(data, variables) + except ExpansionError as e: + raise BuildFailed("%s: %s" % (input_file, e)) from e - output_file = os.path.join(tmp_dir, os.path.basename(input_file).replace('_template', '')) - with open(output_file, 'wb') as outf: - outf.write(expanded) - return output_file + output_file = os.path.join( + tmp_dir, os.path.basename(input_file).replace("_template", "") + ) + with open(output_file, "wb") as outf: + outf.write(expanded) + return output_file def push_open_brush_to_steam(source_dir, description, steam_user, steam_branch=None): - try: - steamcmd('+exit') - except subprocess.CalledProcessError as e: - raise BuildFailed("You don't seem to have steamcmd installed") from e - - support_dir = get_support_dir() - tmp_steam_dir = get_tmp_steam_dir() - - variables = { - 'DESC': description, - 'TMP_STEAM': tmp_steam_dir, - 'CONTENT_ROOT': os.path.abspath(source_dir).replace('\\', '/'), - 'STEAM_BRANCH': '' if steam_branch is None else steam_branch, - } - # This file has no variables that need expanding, but steamcmd.exe - # mutates it to add a digital signature so we should copy it off to a temp file. - variables['INSTALLSCRIPT_WIN'] = create_from_template( - os.path.join(support_dir, 'steam/installscript_win.vdf'), {}, tmp_steam_dir) - variables['MAIN_DEPOT_VDF'] = create_from_template( - os.path.join(support_dir, 'steam/main_depot_template.vdf'), variables, tmp_steam_dir) - app_vdf = create_from_template( - os.path.join(support_dir, 'steam/app_template.vdf'), variables, tmp_steam_dir) - - print("Pushing %s to Steam" % (variables['CONTENT_ROOT'], )) - steamcmd('+login', steam_user, - '+run_app_build', app_vdf, - '+quit') + try: + steamcmd("+exit") + except subprocess.CalledProcessError as e: + raise BuildFailed("You don't seem to have steamcmd installed") from e + + support_dir = get_support_dir() + tmp_steam_dir = get_tmp_steam_dir() + + variables = { + "DESC": description, + "TMP_STEAM": tmp_steam_dir, + "CONTENT_ROOT": os.path.abspath(source_dir).replace("\\", "/"), + "STEAM_BRANCH": "" if steam_branch is None else steam_branch, + } + # This file has no variables that need expanding, but steamcmd.exe + # mutates it to add a digital signature so we should copy it off to a temp file. + variables["INSTALLSCRIPT_WIN"] = create_from_template( + os.path.join(support_dir, "steam/installscript_win.vdf"), {}, tmp_steam_dir + ) + variables["MAIN_DEPOT_VDF"] = create_from_template( + os.path.join(support_dir, "steam/main_depot_template.vdf"), + variables, + tmp_steam_dir, + ) + app_vdf = create_from_template( + os.path.join(support_dir, "steam/app_template.vdf"), variables, tmp_steam_dir + ) + + print("Pushing %s to Steam" % (variables["CONTENT_ROOT"],)) + steamcmd("+login", steam_user, "+run_app_build", app_vdf, "+quit") # ---------------------------------------------------------------------- @@ -180,200 +188,240 @@ def push_open_brush_to_steam(source_dir, description, steam_user, steam_branch=N # ---------------------------------------------------------------------- OCULUS_RIFT_REDISTS = [ - '1675031999409058', # Visual C++ 2013 - '1183534128364060', # Visual C++ 2015 + "1675031999409058", # Visual C++ 2013 + "1183534128364060", # Visual C++ 2015 ] def quote_oculus_release_notes(txt): - return txt.replace('\r', '').replace('\n', '\\n') + return txt.replace("\r", "").replace("\n", "\\n") def get_oculus_openbrush_exe(directory): - files = os.listdir(directory) - # Might be named OpenBrush_oculus.exe? - files = [f for f in files if f.endswith('.exe') and f.lower().startswith('openbrush')] - if len(files) == 0: - raise BuildFailed("Can't find launch executable") - if len(files) == 1: - return files[0] - raise BuildFailed("Ambiguous launch executable: %s" % (files,)) + files = os.listdir(directory) + # Might be named OpenBrush_oculus.exe? + files = [ + f for f in files if f.endswith(".exe") and f.lower().startswith("openbrush") + ] + if len(files) == 0: + raise BuildFailed("Can't find launch executable") + if len(files) == 1: + return files[0] + raise BuildFailed("Ambiguous launch executable: %s" % (files,)) def unbuffered_reads(inf): - """Yields unbuffered reads from file, until eof.""" - while True: - data = inf.read(1) - if data == '': - break - yield data + """Yields unbuffered reads from file, until eof.""" + while True: + data = inf.read(1) + if data == "": + break + yield data def group_into_lines(iterable): - """Yields complete lines (lines terminated with \\r and/or \\n).""" - current = [] - terminator = re.compile(r'^([^\r\n]*[\r\n]+)(.*)$', re.MULTILINE) - for data in iterable: - # Deal with any line terminators in the data - # print "IN %r" % data - while True: - m = terminator.match(data) - if m is None: - break + """Yields complete lines (lines terminated with \\r and/or \\n).""" + current = [] + terminator = re.compile(r"^([^\r\n]*[\r\n]+)(.*)$", re.MULTILINE) + for data in iterable: + # Deal with any line terminators in the data + # print "IN %r" % data + while True: + m = terminator.match(data) + if m is None: + break - eol, data = m.groups() - # print " match %r %r" % (eol, data) - current.append(eol) - yield ''.join(current) - current = [] + eol, data = m.groups() + # print " match %r %r" % (eol, data) + current.append(eol) + yield "".join(current) + current = [] - current.append(data) + current.append(data) - yield ''.join(current) + yield "".join(current) def get_oculus_build_type(build_path): - files = os.listdir(build_path) - if any(f.endswith('.apk') for f in files): - return 'quest' - if any(f.endswith('.exe') for f in files): - return 'rift' - raise BuildFailed("Don't know what kind of build is in %s" % build_path) + files = os.listdir(build_path) + if any(f.endswith(".apk") for f in files): + return "quest" + if any(f.endswith(".exe") for f in files): + return "rift" + raise BuildFailed("Don't know what kind of build is in %s" % build_path) def get_secret(env_var_name, credential_name): - """Look in environment and in credentials to fetch a secret.""" - # TODO(pld): env-var path currently unused, since we don't push from Jenkins - if env_var_name in os.environ: - return os.environ[env_var_name] - return get_credential(credential_name).get_secret() - - -def push_open_brush_to_oculus(build_path, release_channel, release_notes): # pylint: disable=too-many-statements,too-many-branches,too-many-locals - assert os.path.isabs(build_path) - assert os.path.exists(build_path) - assert release_channel is not None, "You must pass a release channel to push to Oculus Home" - - # TEMP: yucky code to figure out if rift or quest - build_type = get_oculus_build_type(build_path) - if build_type == 'rift': - app_id = TB_OCULUS_RIFT_APP_ID - args = [ - 'ovr-platform-util', - 'upload-rift-build', - '--app_id', app_id, - '--build_dir', build_path, - '--app_secret', get_secret('APP_SECRET_FOR_' + app_id, app_id), - '--channel', release_channel, - '--version', get_build_stamp(build_path), - '--notes', quote_oculus_release_notes(release_notes), - '--launch_file', get_oculus_openbrush_exe(build_path), - '--redistributables', ','.join(OCULUS_RIFT_REDISTS), - '--firewall_exceptions', 'true' - ] - elif build_type == 'quest': - apks = glob.glob(build_path + '/*.apk') - if len(apks) != 1: - raise BuildFailed("No or too many APKs in %s: %s" % (build_path, apks)) - apk = apks[0] - # This requires a recent build of ovr-platform-util - app_id = TB_OCULUS_QUEST_APP_ID - args = [ - 'ovr-platform-util', - 'upload-quest-build', - '--app_id', app_id, - '--app_secret', get_secret('APP_SECRET_FOR_' + app_id, app_id), - '--apk', apk, - # --assets-dir - # --assets-file-iap-configs-file - # --obb - '--channel', release_channel, - '--notes', quote_oculus_release_notes(release_notes), - ] - else: - raise BuildFailed("Internal error: %s" % build_type) - - try: - proc = Popen(args, stdin=PIPE, stdout=PIPE, stderr=STDOUT) - proc.stdin.close() - except OSError as e: - # Probably "cannot find the file specified" - if 'cannot find the file' in str(e): - raise BuildFailed("You don't seem to have ovr-platform-util installed.\nDownload it at https://dashboard.oculus.com/tools/cli") from e - raise - except subprocess.CalledProcessError as e: - raise BuildFailed("ovr-platform-util failed: %s" % e) from e - - saw_output = False - desired_version_code = None - for line in group_into_lines(unbuffered_reads(proc.stdout)): - if line.strip(): - saw_output = True - sys.stdout.write(line) - # The request will be retried indefinitely, so stall it out - if 'error occurred. The request will be retried.' in line: - print() - get_credential(app_id).delete_secret() - # Maybe the secret changed; ask user to re-enter it - raise BuildFailed("Your App Secret might be incorrect. Try again.") - m = re.search(r'higher version code has previously been uploaded .code: (?P\d+)', line) - if m is not None: - desired_version_code = int(m.group('code')) + 1 - - # Example error text: - # * An APK has already been uploaded with version code 59. Please update the application manifest's version code to 64 or higher and try again. - m = re.search(r'version code to (?P\d+) or higher and try again', line) - if m is not None: - desired_version_code = int(m.group('code')) - - if proc.wait() != 0: - message = 'ovr-platform-util failed with code %s' % proc.wait() - if desired_version_code is not None: - raise BadVersionCode(message, desired_version_code) - raise BuildFailed(message) - if not saw_output: - raise BuildFailed('ovr-platform-util seemed to do nothing.\nYou probably need a newer version.\nDownload it at https://dashboard.oculus.com/tools/cli') + """Look in environment and in credentials to fetch a secret.""" + # TODO(pld): env-var path currently unused, since we don't push from Jenkins + if env_var_name in os.environ: + return os.environ[env_var_name] + return get_credential(credential_name).get_secret() + + +def push_open_brush_to_oculus( + build_path, release_channel, release_notes +): # pylint: disable=too-many-statements,too-many-branches,too-many-locals + assert os.path.isabs(build_path) + assert os.path.exists(build_path) + assert ( + release_channel is not None + ), "You must pass a release channel to push to Oculus Home" + + # TEMP: yucky code to figure out if rift or quest + build_type = get_oculus_build_type(build_path) + if build_type == "rift": + app_id = TB_OCULUS_RIFT_APP_ID + args = [ + "ovr-platform-util", + "upload-rift-build", + "--app_id", + app_id, + "--build_dir", + build_path, + "--app_secret", + get_secret("APP_SECRET_FOR_" + app_id, app_id), + "--channel", + release_channel, + "--version", + get_build_stamp(build_path), + "--notes", + quote_oculus_release_notes(release_notes), + "--launch_file", + get_oculus_openbrush_exe(build_path), + "--redistributables", + ",".join(OCULUS_RIFT_REDISTS), + "--firewall_exceptions", + "true", + ] + elif build_type == "quest": + apks = glob.glob(build_path + "/*.apk") + if len(apks) != 1: + raise BuildFailed("No or too many APKs in %s: %s" % (build_path, apks)) + apk = apks[0] + # This requires a recent build of ovr-platform-util + app_id = TB_OCULUS_QUEST_APP_ID + args = [ + "ovr-platform-util", + "upload-quest-build", + "--app_id", + app_id, + "--app_secret", + get_secret("APP_SECRET_FOR_" + app_id, app_id), + "--apk", + apk, + # --assets-dir + # --assets-file-iap-configs-file + # --obb + "--channel", + release_channel, + "--notes", + quote_oculus_release_notes(release_notes), + ] + else: + raise BuildFailed("Internal error: %s" % build_type) + + try: + proc = Popen(args, stdin=PIPE, stdout=PIPE, stderr=STDOUT) + proc.stdin.close() + except OSError as e: + # Probably "cannot find the file specified" + if "cannot find the file" in str(e): + raise BuildFailed( + "You don't seem to have ovr-platform-util installed.\nDownload it at https://dashboard.oculus.com/tools/cli" + ) from e + raise + except subprocess.CalledProcessError as e: + raise BuildFailed("ovr-platform-util failed: %s" % e) from e + + saw_output = False + desired_version_code = None + for line in group_into_lines(unbuffered_reads(proc.stdout)): + if line.strip(): + saw_output = True + sys.stdout.write(line) + # The request will be retried indefinitely, so stall it out + if "error occurred. The request will be retried." in line: + print() + get_credential(app_id).delete_secret() + # Maybe the secret changed; ask user to re-enter it + raise BuildFailed("Your App Secret might be incorrect. Try again.") + m = re.search( + r"higher version code has previously been uploaded .code: (?P\d+)", + line, + ) + if m is not None: + desired_version_code = int(m.group("code")) + 1 + + # Example error text: + # * An APK has already been uploaded with version code 59. Please update the application manifest's version code to 64 or higher and try again. + m = re.search(r"version code to (?P\d+) or higher and try again", line) + if m is not None: + desired_version_code = int(m.group("code")) + + if proc.wait() != 0: + message = "ovr-platform-util failed with code %s" % proc.wait() + if desired_version_code is not None: + raise BadVersionCode(message, desired_version_code) + raise BuildFailed(message) + if not saw_output: + raise BuildFailed( + "ovr-platform-util seemed to do nothing.\nYou probably need a newer version.\nDownload it at https://dashboard.oculus.com/tools/cli" + ) # ---------------------------------------------------------------------- # Command-line use. Deprecated and mostly for testing # ---------------------------------------------------------------------- + def main(args=None): - parser = argparse.ArgumentParser(description="(deprecated) Upload a build directory to Steam or Oculus.") - - parser.add_argument('--user', type=str, default='openbrush_build', - help='(Steam only) User to authenticate as. (default: %(default)s)') - parser.add_argument('--desc', - help="Optional description of this build.") - parser.add_argument('--branch', - help='Steam Branch or Oculus Release Channel to set live.') - parser.add_argument('--what', required=True, metavar='DIR', - help='Path to a Open Brush build folder') - parser.add_argument('--where', metavar='SERVICE', required=True, choices=['Oculus', 'SteamVR'], - help='Oculus or SteamVR') - - args = parser.parse_args(args) - if args.branch == 'none': - args.branch = '' - - if not os.path.exists(args.what): - raise UserError("%s does not exist" % args.what) - args.what = os.path.abspath(args.what) - - if args.where == 'Oculus': - if args.branch == '' or args.branch is None: - raise UserError("For Oculus, you must specify a --branch") - push_open_brush_to_oculus(args.what, args.branch, "No release notes") - elif args.where == 'SteamVR': - description = 'Manual: %s | %s' % (get_build_stamp(args.what), args.desc) - push_open_brush_to_steam(args.what, description, args.user, steam_branch=None) - else: - raise BuildFailed("Don't know how to push %s" % args.display) - - -if __name__ == '__main__': - try: - main() - except BuildFailed as e: - print("ERROR: %s" % e) + parser = argparse.ArgumentParser( + description="(deprecated) Upload a build directory to Steam or Oculus." + ) + + parser.add_argument( + "--user", + type=str, + default="openbrush_build", + help="(Steam only) User to authenticate as. (default: %(default)s)", + ) + parser.add_argument("--desc", help="Optional description of this build.") + parser.add_argument( + "--branch", help="Steam Branch or Oculus Release Channel to set live." + ) + parser.add_argument( + "--what", required=True, metavar="DIR", help="Path to a Open Brush build folder" + ) + parser.add_argument( + "--where", + metavar="SERVICE", + required=True, + choices=["Oculus", "SteamVR"], + help="Oculus or SteamVR", + ) + + args = parser.parse_args(args) + if args.branch == "none": + args.branch = "" + + if not os.path.exists(args.what): + raise UserError("%s does not exist" % args.what) + args.what = os.path.abspath(args.what) + + if args.where == "Oculus": + if args.branch == "" or args.branch is None: + raise UserError("For Oculus, you must specify a --branch") + push_open_brush_to_oculus(args.what, args.branch, "No release notes") + elif args.where == "SteamVR": + description = "Manual: %s | %s" % (get_build_stamp(args.what), args.desc) + push_open_brush_to_steam(args.what, description, args.user, steam_branch=None) + else: + raise BuildFailed("Don't know how to push %s" % args.display) + + +if __name__ == "__main__": + try: + main() + except BuildFailed as e: + print("ERROR: %s" % e) diff --git a/Support/Python/unitybuild/refgraph.py b/Support/Python/unitybuild/refgraph.py index 28181b0bae..80a5d78a52 100644 --- a/Support/Python/unitybuild/refgraph.py +++ b/Support/Python/unitybuild/refgraph.py @@ -23,9 +23,9 @@ import unitybuild.tb_refgraph as tb -ROOT_GUID = '00001111222233334444555566667777' +ROOT_GUID = "00001111222233334444555566667777" -META_GUID_PAT = re.compile(r'^guid: ([a-f0-9]{32})\s*$', re.M) +META_GUID_PAT = re.compile(r"^guid: ([a-f0-9]{32})\s*$", re.M) CONTAINS_NO_GUIDS = """ fbx obj dae wav txt pdf tga png psd tif jpg jpeg @@ -34,162 +34,164 @@ def _get_guid_from_meta(meta_filename): - with open(meta_filename) as inf: - contents = inf.read() - m = META_GUID_PAT.search(contents) - if m is None: - raise LookupError("No guid in %s" % (meta_filename,)) - return m.group(1) + with open(meta_filename) as inf: + contents = inf.read() + m = META_GUID_PAT.search(contents) + if m is None: + raise LookupError("No guid in %s" % (meta_filename,)) + return m.group(1) # 0000000000000000e000000000000000 and 0000000000000000f000000000000000 # are some sort of hardcoded guid? def _iter_guid_names(project): - """Find all file guids and their corresponding filename (without the ".meta") -Yields (guid, filename)""" - chop = len(project) + 1 - for r, _, fs in os.walk(os.path.join(project, 'Assets')): - for f in fs: - if f.endswith('.meta'): - fullf = os.path.join(r, f) - guid = _get_guid_from_meta(fullf) - name = fullf[chop:-5].replace('\\', '/') - yield guid, name - yield ('0000000000000000e000000000000000', '?Unity hardcoded 0e?') - yield ('0000000000000000f000000000000000', '?Unity hardcoded 0f?') + """Find all file guids and their corresponding filename (without the ".meta") + Yields (guid, filename)""" + chop = len(project) + 1 + for r, _, fs in os.walk(os.path.join(project, "Assets")): + for f in fs: + if f.endswith(".meta"): + fullf = os.path.join(r, f) + guid = _get_guid_from_meta(fullf) + name = fullf[chop:-5].replace("\\", "/") + yield guid, name + yield ("0000000000000000e000000000000000", "?Unity hardcoded 0e?") + yield ("0000000000000000f000000000000000", "?Unity hardcoded 0f?") def _iter_refs(project): - """Yields (src_guid, dst_guid)""" - ignore_pat = '|'.join(CONTAINS_NO_GUIDS) - ignore_pat = re.compile(r'(%s)$' % ignore_pat, re.I) - guid_pat = re.compile(r'(? guid - self.guid_to_name = dict(_iter_guid_names(self.project_dir)) - self.g.add_nodes_from(iter(self.guid_to_name.keys())) - self.g.add_edges_from(_iter_refs(self.project_dir)) - - self._recreate_tb_stuff() - - def _recreate_tb_stuff(self): - """Tilt Brush specific refgraph stuff - Adds references from .unity and .cs files to GlobalCommands enum entries. - Also creates a dummy .cs file that can be used to find references to GlobalCommands - enums from within Visual Studio and Rider""" - name_to_guid = dict((n, g) for (g, n) in list(self.guid_to_name.items())) - - for command in tb.iter_command_nodes(self.project_dir): - self.g.add_node(command) - self.guid_to_name[command] = command - - command_edges = list(tb.iter_command_edges(self.project_dir)) - for (file_name, command) in command_edges: - try: - file_guid = name_to_guid[file_name] - except KeyError: - print("Couldn't find %s" % file_name) - else: - self.g.add_edge(file_guid, command) - - tb.create_dummy_cs(self.project_dir, command_edges) - - def _save(self): - tmpf = io.StringIO() - nx.write_gpickle(self.g, tmpf, -1) - pickle.dump(self.guid_to_name, tmpf, -1) - outf_name = os.path.join(self.project_dir, self.PICKLE) - with open(outf_name, 'wb') as outf: - outf.write(tmpf.getvalue()) - - def _finish(self): - """Perform post-load initialization""" - # Add synthetic guid to use as a root node - self.guid_to_name[ROOT_GUID] = 'ROOT' - - self.name_to_guid = {} - # For convenience, also add lowercased-versions - # (but this is incorrect on case-sensitive filesystems) - for (g, n) in self.guid_to_name.items(): - self.name_to_guid[n.lower()] = g - # True capitalization takes precedence - for (g, n) in self.guid_to_name.items(): - self.name_to_guid[n] = g - - # TILT BRUSH SPECIFIC: - # The one dynamic choice is "what .unity do you load at startup?" - # Also link environments to the root, because not all builds include all the envs, - # but we want to mark all of them as roots. - n2g = self.name_to_guid - for node_name in [ - 'Assets/Scenes/Main.unity', # Tilt Brush - 'Assets/TiltBrush/Resources/TiltBrushToolkitSettings.asset', # Tilt Brush Toolkit - ]: - if node_name in n2g: - self.g.add_edge(n2g['ROOT'], n2g[node_name]) - - prefab_pat = re.compile(r'^Assets/Resources/EnvironmentPrefabs/.*prefab|^Assets/Scenes', re.I) - for n in n2g: - if prefab_pat.search(n): - self.g.add_edge(n2g['ROOT'], n2g[n]) - - -if __name__ == '__main__': - rg = ReferenceGraph('c:/src/tb') + """Yields (src_guid, dst_guid)""" + ignore_pat = "|".join(CONTAINS_NO_GUIDS) + ignore_pat = re.compile(r"(%s)$" % ignore_pat, re.I) + guid_pat = re.compile(r"(? guid + self.guid_to_name = dict(_iter_guid_names(self.project_dir)) + self.g.add_nodes_from(iter(self.guid_to_name.keys())) + self.g.add_edges_from(_iter_refs(self.project_dir)) + + self._recreate_tb_stuff() + + def _recreate_tb_stuff(self): + """Tilt Brush specific refgraph stuff + Adds references from .unity and .cs files to GlobalCommands enum entries. + Also creates a dummy .cs file that can be used to find references to GlobalCommands + enums from within Visual Studio and Rider""" + name_to_guid = dict((n, g) for (g, n) in list(self.guid_to_name.items())) + + for command in tb.iter_command_nodes(self.project_dir): + self.g.add_node(command) + self.guid_to_name[command] = command + + command_edges = list(tb.iter_command_edges(self.project_dir)) + for (file_name, command) in command_edges: + try: + file_guid = name_to_guid[file_name] + except KeyError: + print("Couldn't find %s" % file_name) + else: + self.g.add_edge(file_guid, command) + + tb.create_dummy_cs(self.project_dir, command_edges) + + def _save(self): + tmpf = io.StringIO() + nx.write_gpickle(self.g, tmpf, -1) + pickle.dump(self.guid_to_name, tmpf, -1) + outf_name = os.path.join(self.project_dir, self.PICKLE) + with open(outf_name, "wb") as outf: + outf.write(tmpf.getvalue()) + + def _finish(self): + """Perform post-load initialization""" + # Add synthetic guid to use as a root node + self.guid_to_name[ROOT_GUID] = "ROOT" + + self.name_to_guid = {} + # For convenience, also add lowercased-versions + # (but this is incorrect on case-sensitive filesystems) + for (g, n) in self.guid_to_name.items(): + self.name_to_guid[n.lower()] = g + # True capitalization takes precedence + for (g, n) in self.guid_to_name.items(): + self.name_to_guid[n] = g + + # TILT BRUSH SPECIFIC: + # The one dynamic choice is "what .unity do you load at startup?" + # Also link environments to the root, because not all builds include all the envs, + # but we want to mark all of them as roots. + n2g = self.name_to_guid + for node_name in [ + "Assets/Scenes/Main.unity", # Tilt Brush + "Assets/TiltBrush/Resources/TiltBrushToolkitSettings.asset", # Tilt Brush Toolkit + ]: + if node_name in n2g: + self.g.add_edge(n2g["ROOT"], n2g[node_name]) + + prefab_pat = re.compile( + r"^Assets/Resources/EnvironmentPrefabs/.*prefab|^Assets/Scenes", re.I + ) + for n in n2g: + if prefab_pat.search(n): + self.g.add_edge(n2g["ROOT"], n2g[n]) + + +if __name__ == "__main__": + rg = ReferenceGraph("c:/src/tb") diff --git a/Support/Python/unitybuild/tb_refgraph.py b/Support/Python/unitybuild/tb_refgraph.py index c4c7b62727..ab7ba6f7b2 100644 --- a/Support/Python/unitybuild/tb_refgraph.py +++ b/Support/Python/unitybuild/tb_refgraph.py @@ -21,91 +21,99 @@ def _get_command_lookup(project_dir): - # Returns a dict that maps stuff to a GlobalCommand name - txt = open(os.path.join(project_dir, 'Assets/Scripts/SketchControlsScript.cs')).read() - pat = re.compile(r'public enum GlobalCommands[^}]+}') - vals = pat.search(txt).group(0).split('\n')[1:-1] - vals = [v.split(',')[0].strip() for v in vals] - vals = [v for v in vals if not v.startswith('//')] - for v in vals: - assert re.match(r'^[a-zA-Z_]+$', v), "Doesn't look like an enum name: %r" % (v,) - to_index = {} - to_name = {} - for (i, v) in enumerate(vals): - for key in (v, v.lower(), i, str(i)): - to_index[key] = i - to_name[key] = v - return to_name # , to_index + # Returns a dict that maps stuff to a GlobalCommand name + txt = open( + os.path.join(project_dir, "Assets/Scripts/SketchControlsScript.cs") + ).read() + pat = re.compile(r"public enum GlobalCommands[^}]+}") + vals = pat.search(txt).group(0).split("\n")[1:-1] + vals = [v.split(",")[0].strip() for v in vals] + vals = [v for v in vals if not v.startswith("//")] + for v in vals: + assert re.match(r"^[a-zA-Z_]+$", v), "Doesn't look like an enum name: %r" % (v,) + to_index = {} + to_name = {} + for (i, v) in enumerate(vals): + for key in (v, v.lower(), i, str(i)): + to_index[key] = i + to_name[key] = v + return to_name # , to_index def _iter_prefab_and_scene(project_dir): - pat = re.compile(r'.*\.(unity|prefab)$') - for (r, _, fs) in os.walk(os.path.join(project_dir, 'Assets')): - for f in fs: - if pat.match(f): - yield os.path.join(r, f) + pat = re.compile(r".*\.(unity|prefab)$") + for (r, _, fs) in os.walk(os.path.join(project_dir, "Assets")): + for f in fs: + if pat.match(f): + yield os.path.join(r, f) def _iter_cs(project_dir): - for (r, _, fs) in os.walk(os.path.join(project_dir, 'Assets')): - for f in fs: - if f.endswith('.cs'): - yield os.path.join(r, f) + for (r, _, fs) in os.walk(os.path.join(project_dir, "Assets")): + for f in fs: + if f.endswith(".cs"): + yield os.path.join(r, f) def iter_command_nodes(project_dir): - """Yields strings like 'GlobalCommands.ToggleWatermark'""" - to_name = _get_command_lookup(project_dir) - for name in set(to_name.values()): - yield 'GlobalCommands.' + name + """Yields strings like 'GlobalCommands.ToggleWatermark'""" + to_name = _get_command_lookup(project_dir) + for name in set(to_name.values()): + yield "GlobalCommands." + name def iter_command_edges(project_dir): - """Yields tuples like ('Assets/Prefabs/MyPrefab.prefab', 'GlobalCommands.ShowTos')""" - to_name = _get_command_lookup(project_dir) + """Yields tuples like ('Assets/Prefabs/MyPrefab.prefab', 'GlobalCommands.ShowTos')""" + to_name = _get_command_lookup(project_dir) - def command_to_node_name(command): - # Returns a canonical node name as it appears in the graph - return 'GlobalCommands.' + to_name[command] + def command_to_node_name(command): + # Returns a canonical node name as it appears in the graph + return "GlobalCommands." + to_name[command] - def file_to_node_name(filename): - # Returns a canonical node name as it appears in the graph - # Relative to the project root, forward slashes, etc - return os.path.relpath(filename, project_dir).replace('\\', '/') + def file_to_node_name(filename): + # Returns a canonical node name as it appears in the graph + # Relative to the project root, forward slashes, etc + return os.path.relpath(filename, project_dir).replace("\\", "/") - # Find references in .prefab, .unity files - yaml_pat = re.compile(r'm_(Delayed)?Command: (?P\d+)') - for path in _iter_prefab_and_scene(project_dir): - for match in yaml_pat.finditer(open(path).read()): - yield file_to_node_name(path), command_to_node_name(match.group('cmd')) + # Find references in .prefab, .unity files + yaml_pat = re.compile(r"m_(Delayed)?Command: (?P\d+)") + for path in _iter_prefab_and_scene(project_dir): + for match in yaml_pat.finditer(open(path).read()): + yield file_to_node_name(path), command_to_node_name(match.group("cmd")) - # Find references in .cs files - cs_pat = re.compile(r'GlobalCommands\.[A-Za-z0-9_]+') - for path in _iter_cs(project_dir): - if path.endswith('DummyCommandRefs.cs'): - continue - for match in cs_pat.finditer(open(path).read()): - yield file_to_node_name(path), match.group(0) + # Find references in .cs files + cs_pat = re.compile(r"GlobalCommands\.[A-Za-z0-9_]+") + for path in _iter_cs(project_dir): + if path.endswith("DummyCommandRefs.cs"): + continue + for match in cs_pat.finditer(open(path).read()): + yield file_to_node_name(path), match.group(0) def create_dummy_cs(project_dir, command_edges): - file_to_commands = defaultdict(set) - for src, dst in command_edges: - if src.endswith('.cs'): - break - file_to_commands[src].add(dst) - file_to_commands = [(k, sorted(file_to_commands[k])) - for k in sorted(file_to_commands.keys())] - - def as_func(file_and_commands): - file_name, commands = file_and_commands - func_name = os.path.splitext(file_name)[0][7:] - func_name = func_name.replace('/', '_').replace('.', '_') - return ' public static void %s() {\n' % func_name + '\n'.join(' Use(%s);' % c for c in commands) + '\n }\n' - - cs_name = os.path.join(project_dir, 'Assets/Editor/DummyCommandRefs.cs') - with open(cs_name, 'w') as outf: - outf.write('''// Copyright 2020 The Tilt Brush Authors + file_to_commands = defaultdict(set) + for src, dst in command_edges: + if src.endswith(".cs"): + break + file_to_commands[src].add(dst) + file_to_commands = [ + (k, sorted(file_to_commands[k])) for k in sorted(file_to_commands.keys()) + ] + + def as_func(file_and_commands): + file_name, commands = file_and_commands + func_name = os.path.splitext(file_name)[0][7:] + func_name = func_name.replace("/", "_").replace(".", "_") + return ( + " public static void %s() {\n" % func_name + + "\n".join(" Use(%s);" % c for c in commands) + + "\n }\n" + ) + + cs_name = os.path.join(project_dir, "Assets/Editor/DummyCommandRefs.cs") + with open(cs_name, "w") as outf: + outf.write( + """// Copyright 2020 The Tilt Brush Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -128,10 +136,12 @@ def as_func(file_and_commands): static void Use(GlobalCommands c) { UnityEngine.Debug.Log(c.ToString()); } %s} -}''' % '\n'.join([as_func(fc) for fc in file_to_commands])) +}""" + % "\n".join([as_func(fc) for fc in file_to_commands]) + ) -if __name__ == '__main__': - # This is also done as part of analyze_refgraph.py --recreate - test_project_dir = 'c:/src/tb' - create_dummy_cs(test_project_dir, iter_command_edges(test_project_dir)) +if __name__ == "__main__": + # This is also done as part of analyze_refgraph.py --recreate + test_project_dir = "c:/src/tb" + create_dummy_cs(test_project_dir, iter_command_edges(test_project_dir)) diff --git a/Support/Python/unitybuild/utils.py b/Support/Python/unitybuild/utils.py index 8b95a31e03..f2ec79f7d5 100644 --- a/Support/Python/unitybuild/utils.py +++ b/Support/Python/unitybuild/utils.py @@ -24,107 +24,120 @@ import subprocess from unitybuild.constants import InternalError -if platform.system() == 'Windows': - import win32api # pylint: disable=import-error +if platform.system() == "Windows": + import win32api # pylint: disable=import-error -if os.getenv('MSYSTEM'): - import msvcrt # pylint: disable=import-error - import ctypes - from ctypes.wintypes import HANDLE, DWORD - from _subprocess import WaitForSingleObject, WAIT_OBJECT_0 # pylint: disable=import-error +if os.getenv("MSYSTEM"): + import msvcrt # pylint: disable=import-error + import ctypes + from ctypes.wintypes import HANDLE, DWORD + from _subprocess import ( # pylint: disable=import-error + WaitForSingleObject, + WAIT_OBJECT_0, + ) @contextlib.contextmanager def ensure_terminate(proc): - """Ensure that *proc* is dead upon exiting the block.""" - try: - yield - finally: + """Ensure that *proc* is dead upon exiting the block.""" try: - # Windows raises WindowsError if the process is already dead. - if proc.poll() is None: - proc.terminate() - except Exception as e: # pylint: disable=broad-except - print("WARN: Could not kill process: %s" % (e,)) + yield + finally: + try: + # Windows raises WindowsError if the process is already dead. + if proc.poll() is None: + proc.terminate() + except Exception as e: # pylint: disable=broad-except + print("WARN: Could not kill process: %s" % (e,)) def destroy(file_or_dir): - """Ensure that *file_or_dir* does not exist in the filesystem, - deleting it if necessary.""" - if os.path.isfile(file_or_dir): - os.chmod(file_or_dir, stat.S_IWRITE) - os.unlink(file_or_dir) - elif os.path.isdir(file_or_dir): - for r, ds, fs in os.walk(file_or_dir, topdown=False): - for f in fs: - os.chmod(os.path.join(r, f), stat.S_IWRITE) - os.unlink(os.path.join(r, f)) - for d in ds: - os.rmdir(os.path.join(r, d)) - os.rmdir(file_or_dir) - if os.path.exists(file_or_dir): - raise InternalError("Temp build location '%s' is not empty" % file_or_dir) + """Ensure that *file_or_dir* does not exist in the filesystem, + deleting it if necessary.""" + if os.path.isfile(file_or_dir): + os.chmod(file_or_dir, stat.S_IWRITE) + os.unlink(file_or_dir) + elif os.path.isdir(file_or_dir): + for r, ds, fs in os.walk(file_or_dir, topdown=False): + for f in fs: + os.chmod(os.path.join(r, f), stat.S_IWRITE) + os.unlink(os.path.join(r, f)) + for d in ds: + os.rmdir(os.path.join(r, d)) + os.rmdir(file_or_dir) + if os.path.exists(file_or_dir): + raise InternalError("Temp build location '%s' is not empty" % file_or_dir) def msys_control_c_workaround(): - """Turn off console Ctrl-c support and implement it ourselves.""" - # Used to work around a bug in msys where control-c kills the process - # abruptly ~100ms after the process receives SIGINT. This prevents us - # from running cleanup handlers, like the one that kills the Unity.exe - # subprocess. - if not os.getenv('MSYSTEM'): - return - - kernel32 = ctypes.windll.kernel32 - kernel32.GetStdHandle.restype = HANDLE - kernel32.GetStdHandle.argtypes = (DWORD,) - # kernel32.GetConsoleMode.restype = BOOL - kernel32.GetConsoleMode.argtypes = (HANDLE, ctypes.POINTER(DWORD)) - # kernel32.SetConsoleMode.restype = BOOL - kernel32.SetConsoleMode.argtypes = (HANDLE, DWORD) - STD_INPUT_HANDLE = DWORD(-10) - ENABLE_PROCESSED_INPUT = DWORD(1) - - stdin = kernel32.GetStdHandle(STD_INPUT_HANDLE) - mode = DWORD() - kernel32.GetConsoleMode(stdin, ctypes.byref(mode)) - mode.value = mode.value & ~(ENABLE_PROCESSED_INPUT.value) - kernel32.SetConsoleMode(stdin, mode) - - # interrupt_main won't interrupt WaitForSingleObject, so monkey-patch - def polling_wait(self): - while WaitForSingleObject(self._handle, 3000) != WAIT_OBJECT_0: # pylint: disable=protected-access - continue - return self.poll() - subprocess.Popen.wait = polling_wait - - def look_for_control_c(): - while msvcrt.getch() != '\x03': - continue - _thread.interrupt_main() - t = threading.Thread(target=look_for_control_c) - t.daemon = True - t.start() + """Turn off console Ctrl-c support and implement it ourselves.""" + # Used to work around a bug in msys where control-c kills the process + # abruptly ~100ms after the process receives SIGINT. This prevents us + # from running cleanup handlers, like the one that kills the Unity.exe + # subprocess. + if not os.getenv("MSYSTEM"): + return + + kernel32 = ctypes.windll.kernel32 + kernel32.GetStdHandle.restype = HANDLE + kernel32.GetStdHandle.argtypes = (DWORD,) + # kernel32.GetConsoleMode.restype = BOOL + kernel32.GetConsoleMode.argtypes = (HANDLE, ctypes.POINTER(DWORD)) + # kernel32.SetConsoleMode.restype = BOOL + kernel32.SetConsoleMode.argtypes = (HANDLE, DWORD) + STD_INPUT_HANDLE = DWORD(-10) + ENABLE_PROCESSED_INPUT = DWORD(1) + + stdin = kernel32.GetStdHandle(STD_INPUT_HANDLE) + mode = DWORD() + kernel32.GetConsoleMode(stdin, ctypes.byref(mode)) + mode.value = mode.value & ~(ENABLE_PROCESSED_INPUT.value) + kernel32.SetConsoleMode(stdin, mode) + + # interrupt_main won't interrupt WaitForSingleObject, so monkey-patch + def polling_wait(self): + while ( + WaitForSingleObject(self._handle, 3000) # pylint: disable=protected-access + != WAIT_OBJECT_0 + ): + continue + return self.poll() + + subprocess.Popen.wait = polling_wait + + def look_for_control_c(): + while msvcrt.getch() != "\x03": + continue + _thread.interrupt_main() + + t = threading.Thread(target=look_for_control_c) + t.daemon = True + t.start() def get_file_version(filename): - """Raises LookupError if file has no version. - Returns (major, minor, micro)""" - if platform.system() == 'Windows': - ffi = win32api.GetFileVersionInfo(filename, "\\") - # I don't know the difference between ProductVersion and FileVersion - - def extract_16s(i32): - return ((i32 >> 16) & 0xffff), i32 & 0xffff - file_version = extract_16s(ffi['FileVersionMS']) + extract_16s(ffi['FileVersionLS']) - return file_version[0:3] - raise LookupError("Not supported yet on macOS") - - # Untested -- get it from the property list - # pylint: disable=unreachable - plist_file = os.path.join(filename, 'Contents', 'Info.plist') - plist_json = subprocess.check_output(['plutil', '-convert', 'json', '-o', '-', '-s', '--', plist_file]) - plist = json.loads(plist_json) - # XXX: need to parse this out but I don't know the format - return plist['CFBundleShortVersionString'] + """Raises LookupError if file has no version. + Returns (major, minor, micro)""" + if platform.system() == "Windows": + ffi = win32api.GetFileVersionInfo(filename, "\\") + # I don't know the difference between ProductVersion and FileVersion + + def extract_16s(i32): + return ((i32 >> 16) & 0xFFFF), i32 & 0xFFFF + + file_version = extract_16s(ffi["FileVersionMS"]) + extract_16s( + ffi["FileVersionLS"] + ) + return file_version[0:3] + raise LookupError("Not supported yet on macOS") + + # Untested -- get it from the property list + # pylint: disable=unreachable + plist_file = os.path.join(filename, "Contents", "Info.plist") + plist_json = subprocess.check_output( + ["plutil", "-convert", "json", "-o", "-", "-s", "--", plist_file] + ) + plist = json.loads(plist_json) + # XXX: need to parse this out but I don't know the format + return plist["CFBundleShortVersionString"] diff --git a/Support/Python/unitybuild/vcs.py b/Support/Python/unitybuild/vcs.py index 9c288f3469..0c60d71d44 100644 --- a/Support/Python/unitybuild/vcs.py +++ b/Support/Python/unitybuild/vcs.py @@ -20,155 +20,182 @@ def _plural(noun, num): - if num == 1: - return '%d %s' % (num, noun) - return '%d %ss' % (num, noun) + if num == 1: + return "%d %s" % (num, noun) + return "%d %ss" % (num, noun) def git(cmd, cwd=None): - """Runs git, returns stdout. -Raises CalledProcessError if process cannot be started, or exits with an error.""" - if cwd is None: - cwd = os.getcwd() - if isinstance(cmd, str): - cmd = ['git'] + cmd.split() - else: - cmd = ['git'] + list(cmd) - - try: - proc = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8') - except OSError as e: - raise subprocess.CalledProcessError(1, cmd, str(e)) - - (stdout, stderr) = proc.communicate() - if proc.wait() != 0: - raise subprocess.CalledProcessError(proc.wait(), cmd, "In %s:\nstderr: %s\nstdout: %s" % (cwd, stderr, stdout)) - return str(stdout) + """Runs git, returns stdout. + Raises CalledProcessError if process cannot be started, or exits with an error.""" + if cwd is None: + cwd = os.getcwd() + if isinstance(cmd, str): + cmd = ["git"] + cmd.split() + else: + cmd = ["git"] + list(cmd) + + try: + proc = subprocess.Popen( + cmd, + cwd=cwd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + encoding="utf-8", + ) + except OSError as e: + raise subprocess.CalledProcessError(1, cmd, str(e)) + + (stdout, stderr) = proc.communicate() + if proc.wait() != 0: + raise subprocess.CalledProcessError( + proc.wait(), cmd, "In %s:\nstderr: %s\nstdout: %s" % (cwd, stderr, stdout) + ) + return str(stdout) def create(): - """Returns a VCS instance.""" - try: - git('status') - except subprocess.CalledProcessError: - return NullVcs() - else: - return GitVcs() - - -class VcsBase(): # pylint: disable=too-few-public-methods - # Pretty much just here to define API - def __init__(self): - """Raises UserError if the appropriate VCS is not detected.""" - def get_build_stamp(self, input_directory): - """Returns a build stamp representing the build inputs. - Raises LookupError if this is not possible. - Build stamp is currently a p4 changelist number, eg '@1234'""" - raise NotImplementedError() + """Returns a VCS instance.""" + try: + git("status") + except subprocess.CalledProcessError: + return NullVcs() + else: + return GitVcs() + + +class VcsBase: # pylint: disable=too-few-public-methods + # Pretty much just here to define API + def __init__(self): + """Raises UserError if the appropriate VCS is not detected.""" + + def get_build_stamp(self, input_directory): + """Returns a build stamp representing the build inputs. + Raises LookupError if this is not possible. + Build stamp is currently a p4 changelist number, eg '@1234'""" + raise NotImplementedError() class NullVcs(VcsBase): # pylint: disable=too-few-public-methods - """VCS implementation that does nothing""" - def get_build_stamp(self, input_directory): - raise LookupError("Not using version control") + """VCS implementation that does nothing""" + + def get_build_stamp(self, input_directory): + raise LookupError("Not using version control") class GitVcs(VcsBase): # pylint: disable=too-few-public-methods - """VCS implementation that uses git (without p4)""" - def __init__(self): - super().__init__() - try: - git('status') - except subprocess.CalledProcessError as e: - raise UserError("Not in a git client") from e - - @staticmethod - def _get_local_branch(): - ref = git('rev-parse --symbolic-full-name HEAD') - m = re.match(r'refs/heads/(.*)', ref) - if m is None: - raise LookupError("Not on a branch") - return m.group(1) - - def _get_gob_branch(self): - """Returns the name of the branch on GoB that the current branch is tracking, - as well as the local name of the tracking branch. - eg, ("master", "refs/remotes/origin/master") - Raises LookupError on failure, eg if not on a branch, or remote is not GoB.""" - branch = self._get_local_branch() - try: - remote = git('config branch.%s.remote' % branch).strip() - except subprocess.CalledProcessError: - remote = '' - if remote == '': - raise LookupError("Can't determine GoB branch: no remote") - git('config remote.%s.url' % remote).strip() - remote_branch = git('config branch.%s.merge' % branch).strip() - m = re.match('refs/heads/(.*)', remote_branch) - if m is None: - raise LookupError("Can't determine GoB name: %s looks funny" % remote_branch) - - tracking = git('rev-parse --symbolic-full-name @{u}').strip() - assert tracking != '' - return m.group(1), tracking - - def get_build_stamp(self, input_directory): # pylint: disable=too-many-branches - """Stamp is of the form: - - + - is a sha of the lastest GoB commit included in the current build. - is a tiny description of any changes in the build that aren't on GoB.""" - try: - status = git('status --porcelain', cwd=input_directory) - except subprocess.CalledProcessError as e: - print('UNEXPECTED: %s\n%s' % (e, e.output)) - print('In:', os.getcwd()) - assert False - for match in re.finditer(r'^(.[MADR]|[MADR].) (.*)', status, re.MULTILINE): - # Ignore changes in build script files - filename = match.group(2) - if re.match(r'Support/(.*\.py|obfuscation_map\.txt)$', filename): - continue - # For practicality, ignore changes to ProjectSettings too; allows Jon to re-build - # with a corrected AndroidBundleVersionCode without requiring him to commit+push+fetch - # TODO(pld): We may want to remove this once our process settles down, or tighten this so - # we only ignore changes to AndroidBundleVersionCode. - if filename == 'ProjectSettings/ProjectSettings.asset': - continue - raise LookupError('repo has modified files (%s)' % filename) - - _, tracked_ref = self._get_gob_branch() - base = git('merge-base %s HEAD' % tracked_ref).strip() - if base == '': - raise LookupError('No common ancestor with %s' % tracked_ref) - base = git('rev-parse --short %s' % base).strip() - # It's verbose and redundant (with our human-made version number) to put the - # gob branch name in the stamp. The sha is all we really need. - # gob_name = '%s-%s' % (tracked_name.replace('-', ''), base) - gob_name = base - - ahead_commits = git(['log', '--pretty=tformat:%h %s', '%s..HEAD' % base]).split('\n')[:-1] - behind_commits = git(['log', '--pretty=tformat:%h %s', 'HEAD..%s' % tracked_ref]).split('\n')[:-1] - if len(ahead_commits) == 0: - if len(behind_commits) > 0: - # Still allow the build without a custom stamp, but warn that it's not head - print("HEAD is %s behind of %s:" % (_plural('commit', len(behind_commits)), tracked_ref)) - for c in behind_commits[::-1][:10]: - print(' ', c) - return gob_name - if len(ahead_commits) > 0: - print("HEAD is %s ahead of %s:" % (_plural('commit', len(ahead_commits)), tracked_ref)) - for c in ahead_commits[:10]: - print(' ', c) - if len(behind_commits) > 0: - print("HEAD is %s behind of %s:" % (_plural('commit', len(behind_commits)), tracked_ref)) - for c in behind_commits[::-1][:10]: - print(' ', c) - print("\nEnter a suffix to uniquify the build stamp, or empty string to abort") - suffix = input("> ") - if not suffix.strip(): - raise LookupError("Not at the official GoB commit") - return gob_name + '+' + suffix + """VCS implementation that uses git (without p4)""" + + def __init__(self): + super().__init__() + try: + git("status") + except subprocess.CalledProcessError as e: + raise UserError("Not in a git client") from e + + @staticmethod + def _get_local_branch(): + ref = git("rev-parse --symbolic-full-name HEAD") + m = re.match(r"refs/heads/(.*)", ref) + if m is None: + raise LookupError("Not on a branch") + return m.group(1) + + def _get_gob_branch(self): + """Returns the name of the branch on GoB that the current branch is tracking, + as well as the local name of the tracking branch. + eg, ("master", "refs/remotes/origin/master") + Raises LookupError on failure, eg if not on a branch, or remote is not GoB.""" + branch = self._get_local_branch() + try: + remote = git("config branch.%s.remote" % branch).strip() + except subprocess.CalledProcessError: + remote = "" + if remote == "": + raise LookupError("Can't determine GoB branch: no remote") + git("config remote.%s.url" % remote).strip() + remote_branch = git("config branch.%s.merge" % branch).strip() + m = re.match("refs/heads/(.*)", remote_branch) + if m is None: + raise LookupError( + "Can't determine GoB name: %s looks funny" % remote_branch + ) + + tracking = git("rev-parse --symbolic-full-name @{u}").strip() + assert tracking != "" + return m.group(1), tracking + + def get_build_stamp(self, input_directory): # pylint: disable=too-many-branches + """Stamp is of the form: + + + + is a sha of the lastest GoB commit included in the current build. + is a tiny description of any changes in the build that aren't on GoB.""" + try: + status = git("status --porcelain", cwd=input_directory) + except subprocess.CalledProcessError as e: + print("UNEXPECTED: %s\n%s" % (e, e.output)) + print("In:", os.getcwd()) + assert False + for match in re.finditer(r"^(.[MADR]|[MADR].) (.*)", status, re.MULTILINE): + # Ignore changes in build script files + filename = match.group(2) + if re.match(r"Support/(.*\.py|obfuscation_map\.txt)$", filename): + continue + # For practicality, ignore changes to ProjectSettings too; allows Jon to re-build + # with a corrected AndroidBundleVersionCode without requiring him to commit+push+fetch + # TODO(pld): We may want to remove this once our process settles down, or tighten this so + # we only ignore changes to AndroidBundleVersionCode. + if filename == "ProjectSettings/ProjectSettings.asset": + continue + raise LookupError("repo has modified files (%s)" % filename) + + _, tracked_ref = self._get_gob_branch() + base = git("merge-base %s HEAD" % tracked_ref).strip() + if base == "": + raise LookupError("No common ancestor with %s" % tracked_ref) + base = git("rev-parse --short %s" % base).strip() + # It's verbose and redundant (with our human-made version number) to put the + # gob branch name in the stamp. The sha is all we really need. + # gob_name = '%s-%s' % (tracked_name.replace('-', ''), base) + gob_name = base + + ahead_commits = git(["log", "--pretty=tformat:%h %s", "%s..HEAD" % base]).split( + "\n" + )[:-1] + behind_commits = git( + ["log", "--pretty=tformat:%h %s", "HEAD..%s" % tracked_ref] + ).split("\n")[:-1] + if len(ahead_commits) == 0: + if len(behind_commits) > 0: + # Still allow the build without a custom stamp, but warn that it's not head + print( + "HEAD is %s behind of %s:" + % (_plural("commit", len(behind_commits)), tracked_ref) + ) + for c in behind_commits[::-1][:10]: + print(" ", c) + return gob_name + if len(ahead_commits) > 0: + print( + "HEAD is %s ahead of %s:" + % (_plural("commit", len(ahead_commits)), tracked_ref) + ) + for c in ahead_commits[:10]: + print(" ", c) + if len(behind_commits) > 0: + print( + "HEAD is %s behind of %s:" + % (_plural("commit", len(behind_commits)), tracked_ref) + ) + for c in behind_commits[::-1][:10]: + print(" ", c) + print("\nEnter a suffix to uniquify the build stamp, or empty string to abort") + suffix = input("> ") + if not suffix.strip(): + raise LookupError("Not at the official GoB commit") + return gob_name + "+" + suffix + # # Testing @@ -176,11 +203,11 @@ def get_build_stamp(self, input_directory): # pylint: disable=too-many-branches def test(): - try: - print(create().get_build_stamp(os.getcwd())) - except LookupError as e: - print('No stamp (%s)' % e) + try: + print(create().get_build_stamp(os.getcwd())) + except LookupError as e: + print("No stamp (%s)" % e) -if __name__ == '__main__': - test() +if __name__ == "__main__": + test() diff --git a/Support/bin/analyze_refgraph.py b/Support/bin/analyze_refgraph.py index 756973cc30..c18a8a6bfb 100755 --- a/Support/bin/analyze_refgraph.py +++ b/Support/bin/analyze_refgraph.py @@ -23,109 +23,122 @@ # Add ../Python to sys.path sys.path.append( - os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'Python')) + os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "Python") +) import unitybuild.refgraph # noqa: E402 pylint: disable=import-error,wrong-import-position def find_project_dir(start=None): - cur = os.getcwd() if start is None else start - while True: - if os.path.isdir(os.path.join(cur, 'Assets')): - return cur - next_dir = os.path.dirname(cur) - if cur == next_dir: - raise LookupError("Cannot find project dir") - cur = next_dir + cur = os.getcwd() if start is None else start + while True: + if os.path.isdir(os.path.join(cur, "Assets")): + return cur + next_dir = os.path.dirname(cur) + if cur == next_dir: + raise LookupError("Cannot find project dir") + cur = next_dir def filter_case_folded_duplicates(lst): - def iterate(): - seen = set() - for elt in lst: - lowered = elt.lower() - if lowered not in seen: - seen.add(lowered) - yield elt - return list(iterate()) + def iterate(): + seen = set() + for elt in lst: + lowered = elt.lower() + if lowered not in seen: + seen.add(lowered) + yield elt + + return list(iterate()) def main(args): - parser = argparse.ArgumentParser() - parser.add_argument('--recreate', action='store_true', default=False, - help="Recreate the cached graph and DummyCommandRefs.cs") - grp = parser.add_argument_group("Graph queries") - grp.add_argument('--shortest-path', action='store_true', - help="Show the shortest path from Main.unity to ASSET") - grp.add_argument('--predecessors', action='store_true', - help="Show incoming references to ASSET") - grp.add_argument('--successors', action='store_true', - help="Show outgoing references from ASSET") - grp.add_argument('--all', action='store_true', - help="If asset argument is ambiguous, show all matches") - grp.add_argument('asset', nargs='*', - help="Asset(s) to examine") - args = parser.parse_args(args) - if not (args.shortest_path or args.predecessors or args.successors): - args.shortest_path = True - - rg = unitybuild.refgraph.ReferenceGraph(find_project_dir(), args.recreate) - root = rg.name_to_guid['ROOT'] - - def lookup_guids(asset): - """Returns a list of guids""" - asset = asset.lower().replace('\\', '/') - # Asset name? - if asset in rg.name_to_guid: - return [rg.name_to_guid[asset]] - # Looks like guid? - if re.match(r'^[a-f0-9]{32}$', asset): - return [asset] - # Exhaustive search - possibilities = [name for name in rg.name_to_guid if asset in name] - # name_to_guid contains duplicate lowercased names; don't consider that ambiguous - possibilities = filter_case_folded_duplicates(possibilities) - if len(possibilities) == 0: - raise LookupError("Cannot find any asset matching %s" % asset) - if len(possibilities) > 1 and not args.all: - print("Ambiguous:\n %s" % '\n '.join(possibilities)) - possibilities = [possibilities[0]] - return [rg.name_to_guid.get(p) for p in possibilities] - - def iter_desired_guids(): - if len(args.asset) == 0 and not args.recreate: - parser.error("Too few arguments") - for asset in args.asset: - try: - guids = lookup_guids(asset) - except LookupError as e: - print(e) - continue - for guid in guids: - yield guid - - for guid in iter_desired_guids(): - name = rg.guid_to_name.get(guid, guid) - - if args.shortest_path: - print("\n=== %s (shortest path)" % name) - try: - path = nx.shortest_path(rg.g, source=root, target=guid) - path.reverse() - for elt in path[:-1]: - print(' ', elt, rg.guid_to_name.get(elt, elt)) - except nx.exception.NetworkXNoPath: - print(' (no path)') - - if args.predecessors or args.successors: - if args.predecessors: - for guid2 in rg.g.predecessors(guid): - print('< ', rg.guid_to_name.get(guid2, guid2)) - print('* ', name) - if args.successors: - for guid2 in rg.g.successors(guid): - print('> ', rg.guid_to_name.get(guid2, guid2)) - - -if __name__ == '__main__': - main(sys.argv[1:]) + parser = argparse.ArgumentParser() + parser.add_argument( + "--recreate", + action="store_true", + default=False, + help="Recreate the cached graph and DummyCommandRefs.cs", + ) + grp = parser.add_argument_group("Graph queries") + grp.add_argument( + "--shortest-path", + action="store_true", + help="Show the shortest path from Main.unity to ASSET", + ) + grp.add_argument( + "--predecessors", action="store_true", help="Show incoming references to ASSET" + ) + grp.add_argument( + "--successors", action="store_true", help="Show outgoing references from ASSET" + ) + grp.add_argument( + "--all", + action="store_true", + help="If asset argument is ambiguous, show all matches", + ) + grp.add_argument("asset", nargs="*", help="Asset(s) to examine") + args = parser.parse_args(args) + if not (args.shortest_path or args.predecessors or args.successors): + args.shortest_path = True + + rg = unitybuild.refgraph.ReferenceGraph(find_project_dir(), args.recreate) + root = rg.name_to_guid["ROOT"] + + def lookup_guids(asset): + """Returns a list of guids""" + asset = asset.lower().replace("\\", "/") + # Asset name? + if asset in rg.name_to_guid: + return [rg.name_to_guid[asset]] + # Looks like guid? + if re.match(r"^[a-f0-9]{32}$", asset): + return [asset] + # Exhaustive search + possibilities = [name for name in rg.name_to_guid if asset in name] + # name_to_guid contains duplicate lowercased names; don't consider that ambiguous + possibilities = filter_case_folded_duplicates(possibilities) + if len(possibilities) == 0: + raise LookupError("Cannot find any asset matching %s" % asset) + if len(possibilities) > 1 and not args.all: + print("Ambiguous:\n %s" % "\n ".join(possibilities)) + possibilities = [possibilities[0]] + return [rg.name_to_guid.get(p) for p in possibilities] + + def iter_desired_guids(): + if len(args.asset) == 0 and not args.recreate: + parser.error("Too few arguments") + for asset in args.asset: + try: + guids = lookup_guids(asset) + except LookupError as e: + print(e) + continue + for guid in guids: + yield guid + + for guid in iter_desired_guids(): + name = rg.guid_to_name.get(guid, guid) + + if args.shortest_path: + print("\n=== %s (shortest path)" % name) + try: + path = nx.shortest_path(rg.g, source=root, target=guid) + path.reverse() + for elt in path[:-1]: + print(" ", elt, rg.guid_to_name.get(elt, elt)) + except nx.exception.NetworkXNoPath: + print(" (no path)") + + if args.predecessors or args.successors: + if args.predecessors: + for guid2 in rg.g.predecessors(guid): + print("< ", rg.guid_to_name.get(guid2, guid2)) + print("* ", name) + if args.successors: + for guid2 in rg.g.successors(guid): + print("> ", rg.guid_to_name.get(guid2, guid2)) + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/Support/bin/animateTriangles.py b/Support/bin/animateTriangles.py index f6791010be..de28e9e3b4 100644 --- a/Support/bin/animateTriangles.py +++ b/Support/bin/animateTriangles.py @@ -21,391 +21,422 @@ def clamp(x, lowerlimit, upperlimit): - if x < lowerlimit: - x = lowerlimit - if x > upperlimit: - x = upperlimit - return x + if x < lowerlimit: + x = lowerlimit + if x > upperlimit: + x = upperlimit + return x def smoothstep(edge0, edge1, x): - x = clamp((x - edge0) / (edge1 - edge0), 0.0, 1.0) - return x * x * (3 - 2 * x) + x = clamp((x - edge0) / (edge1 - edge0), 0.0, 1.0) + return x * x * (3 - 2 * x) def smootherstep(edge0, edge1, x): - x = clamp((x - edge0) / (edge1 - edge0), 0.0, 1.0) - return x * x * x * (x * (x * 6 - 15) + 10) + x = clamp((x - edge0) / (edge1 - edge0), 0.0, 1.0) + return x * x * x * (x * (x * 6 - 15) + 10) # # Filters dictate when a stroke becomes active, i.e. predicates for activation. # def isHigherVert(vertPos, height, radius): - length = Gf.Vec2f(vertPos[0], vertPos[2]).GetLength() - return vertPos[1] < height and length < radius + length = Gf.Vec2f(vertPos[0], vertPos[2]).GetLength() + return vertPos[1] < height and length < radius def isHigher(substroke, height, radius): - return substroke.avgHeight < height and substroke.minLen < radius + return substroke.avgHeight < height and substroke.minLen < radius def isInRadius(vert, radius): - return (vert - worldCenter).GetLength() < radius + return (vert - worldCenter).GetLength() < radius # # Animation Algorithm Starts Here, animate(). # -def animate(stage): # pylint: disable=too-many-statements,too-many-branches,too-many-locals - """The output from Tilt Brush is strokes with a growth velocity. - This could be authored per vertex later (e.g. for easing). - """ - # The current animation time, this will increase monotonicly to generate frames of animation. - time = 1 - - # The maximum number of animated strokes, for performance constraints. - maxActive = 30 - - # Filters dictate when a stroke becomes active, i.e. predicates for activation. - activeFilter = isInRadius - # TODO is this needed? - activeFilterVert = isHigherVert # noqa: F841 pylint: disable=unused-variable - - # The target length of the animation. - lengthInSeconds = 30 - # The playback framerate. - framesPerSecond = 30 - # The number of frames we will generate based on target time and rate. - numFrames = lengthInSeconds * framesPerSecond - - # Boundaries for activation. - minHeight = 0 - maxHeight = 20 - radius = 17.0 - maxRadius = 100. - height = minHeight - - # Compute the actual radius of the world bounds. - worldBounds = UsdGeom.Xform(stage.GetPrimAtPath("/")).ComputeWorldBound(0, "default") - maxRadius = (worldBounds.GetRange().max - worldBounds.GetRange().min).GetLength() - - # Compute the centroid of the world. - global worldCenter # pylint: disable=global-statement - worldCenter = Gf.Vec3f(worldBounds.ComputeCentroid()) - # Just for newIntroSketch.tilt - if "NewIntroSketch" in stage.GetRootLayer().identifier: - worldCenter = Gf.Vec3f(0.73135, 19.92212, 33.2210311) - worldCenter[1] = worldBounds.GetRange().min[1] - - print("World Center:", worldCenter) - print("Max Radius:", maxRadius) - - # Visualize the radius. - debugSphere = UsdGeom.Sphere(stage.DefinePrim("/debug", "Sphere")) - debugSphere.CreateRadiusAttr(radius) - debugSphere.GetPrim().GetAttribute("purpose").Set("guide") - attr = debugSphere.GetPrim().GetAttribute("primvars:displayOpacity") - attr.Set([0.125]) - attr.SetMetadata("interpolation", "constant") - UsdGeom.Xform(attr.GetPrim()).AddTranslateOp().Set(worldCenter) - - # Initialize data structures. - # - strokes are Unity meshes (or Tilt Brush batches). - # - substrokes are the individual brush strokes within a single mesh. - # - activeSubstrokes are sub-strokes currently in-flight. - # - completeSubstrokes are sub-strokes that are done animating. - strokes = MakeStrokes(stage) - substrokes = MakeSubstrokes(strokes) - activeStrokes = set() - activeSubstrokes = set() - completeSubstrokes = set() - - # Compute step sizes based on target animation length. - dRadius = (maxRadius - radius) / float(numFrames) / 1.5 - dHeight = (maxHeight - minHeight) / float(numFrames) - - # Set USD animation start/end times. - stage.SetStartTimeCode(time) - stage.SetEndTimeCode(numFrames) - - # Zero out stroke opacities - for s in strokes: - s.Save(time) - - # Main animation loop. - for time in range(0, numFrames): - print() - print("Time:", time, height, radius, smoothstep(1.0, float(numFrames), time)) - - if len(activeStrokes) < maxActive: - # On the final frame, increase activation volumes to "infinity" (and beyond ;) - if time == numFrames - 1: - height = 10000000 - radius = 10000000. - - # Search for strokes to be activated. - didAddStroke = 0 - for ss in substrokes: - # Already animating, skip. - if ss in activeSubstrokes: - continue - # Done animating, skip. - if ss in completeSubstrokes: - continue - # Overloaded. - if len(activeStrokes) >= maxActive: - break - # If this sub-stroke passes the filter, add it to the animating list. - if activeFilter(ss.minPoint, radius): - didAddStroke = 1 - activeSubstrokes.add(ss) - activeStrokes.add(ss.stroke) - # Mark the stroke as dirty to save its initial state. - ss.stroke.dirty = True - ss.SetRadius(radius, time) - print("+", end=' ') - if not didAddStroke: - # We didn't add any strokes, which means the radius needs to increase. - # Grow the activation volumes (increase sphere size, raise floor plane height). - height += dHeight - radius += dRadius * smoothstep(1.0, float(numFrames), time) - - # Update debug vis. - debugSphere.GetRadiusAttr().Set(radius, time) - - # Call save on everything, but only dirty strokes will actually write data. - # Save a key at the previous frame here so that when a stroke starts animating, when linearly - # interpolated, it will not start animating from frame zero to the first key frame. - # for s in strokes: - # s.Save(time - 1) - - # Update stroke animation. - remove = [] - for ss in activeSubstrokes: - print(".", end=' ') - if not ss.Update(dRadius, smoothstep(1.0, float(numFrames), time)): - if ss.indicesWritten != ss.indexCount: - raise "Fail" - remove.append(ss) - - # Remove all the completed strokes. - for ss in remove: - activeSubstrokes.remove(ss) - completeSubstrokes.add(ss) - - # Save keyframes for the current time. - for s in strokes: - s.Save(time) - - # Rebuild the activeStrokes set. - activeStrokes = set() - for ss in activeSubstrokes: - activeStrokes.add(ss.stroke) - - # Drainstop: we have leftover strokes that didn't finish animating within the target time, rather - # than popping them, we let them finish animating and run over the target time. - while len(activeSubstrokes) > 0: - remove = [] - time += 1 - # Since we blew past the initial frame estimate, we also need to update the USD end time. - stage.SetEndTimeCode(time) - # Loop: update, remove, save, rinse, repeat. - for ss in activeSubstrokes: - if not ss.Update(dRadius, 2.0): - if ss.indicesWritten != ss.indexCount: - raise "Fail" - remove.append(ss) - for ss in remove: - activeSubstrokes.remove(ss) - completeSubstrokes.add(ss) - for s in strokes: - s.Save(time) - - -class Substroke(): # pylint: disable=too-many-instance-attributes - def __init__(self, stroke, startVert, vertCount, startIndex, indexCount): # pylint: disable=too-many-arguments - self.stroke = stroke - self.startVert = startVert - self.vertCount = vertCount - self.startIndex = startIndex - self.indexCount = indexCount - self.i = startVert - self.step = 10 - self.radius = 0 - self.indicesWritten = 0 - self.growthVel = 1 - self.minHeight = self.GetVertex(0)[2] - self.maxHeight = self.GetVertex(0)[2] - self.avgHeight = self.GetVertex(0)[2] - self.minLen = 10000000 - self.minPoint = self.GetVertex(0) - - minVal = (self.minPoint - worldCenter).GetLength() - for i in range(vertCount): - v = self.GetVertex(i) - length = (v - worldCenter).GetLength() - if length < minVal: - minVal = length - self.minPoint = v - if Gf.IsClose(v, Gf.Vec3f(), 1e-7): - continue - length = Gf.Vec2f(v[0], v[2]).GetLength() - self.minHeight = min(self.minHeight, v[1]) - self.maxHeight = max(self.minHeight, v[1]) - self.avgHeight = (self.maxHeight - self.minHeight) / 2.0 - self.minLen = min(self.minLen, length) - - # Debug visualization. - self.minPtDebug = UsdGeom.Sphere.Define(stroke.prim.GetStage(), str(stroke.prim.GetPath()) + "/minPt" + str(startIndex)) - self.minPtDebug.GetPrim().GetAttribute("purpose").Set("guide") - attr = self.minPtDebug.GetPrim().GetAttribute("primvars:displayOpacity") - attr.Set([0.25]) - attr.SetMetadata("interpolation", "constant") - attr = self.minPtDebug.GetPrim().GetAttribute("primvars:displayColor") - attr.Set([Gf.Vec3f(1, 1, 1)], 0) +def animate( + stage, +): # pylint: disable=too-many-statements,too-many-branches,too-many-locals + """The output from Tilt Brush is strokes with a growth velocity. + This could be authored per vertex later (e.g. for easing). + """ + # The current animation time, this will increase monotonicly to generate frames of animation. + time = 1 + + # The maximum number of animated strokes, for performance constraints. + maxActive = 30 + + # Filters dictate when a stroke becomes active, i.e. predicates for activation. + activeFilter = isInRadius + # TODO is this needed? + activeFilterVert = isHigherVert # noqa: F841 pylint: disable=unused-variable + + # The target length of the animation. + lengthInSeconds = 30 + # The playback framerate. + framesPerSecond = 30 + # The number of frames we will generate based on target time and rate. + numFrames = lengthInSeconds * framesPerSecond + + # Boundaries for activation. + minHeight = 0 + maxHeight = 20 + radius = 17.0 + maxRadius = 100.0 + height = minHeight + + # Compute the actual radius of the world bounds. + worldBounds = UsdGeom.Xform(stage.GetPrimAtPath("/")).ComputeWorldBound( + 0, "default" + ) + maxRadius = (worldBounds.GetRange().max - worldBounds.GetRange().min).GetLength() + + # Compute the centroid of the world. + global worldCenter # pylint: disable=global-statement + worldCenter = Gf.Vec3f(worldBounds.ComputeCentroid()) + # Just for newIntroSketch.tilt + if "NewIntroSketch" in stage.GetRootLayer().identifier: + worldCenter = Gf.Vec3f(0.73135, 19.92212, 33.2210311) + worldCenter[1] = worldBounds.GetRange().min[1] + + print("World Center:", worldCenter) + print("Max Radius:", maxRadius) + + # Visualize the radius. + debugSphere = UsdGeom.Sphere(stage.DefinePrim("/debug", "Sphere")) + debugSphere.CreateRadiusAttr(radius) + debugSphere.GetPrim().GetAttribute("purpose").Set("guide") + attr = debugSphere.GetPrim().GetAttribute("primvars:displayOpacity") + attr.Set([0.125]) attr.SetMetadata("interpolation", "constant") - self.minPtDebug.CreateRadiusAttr(1.0) - UsdGeom.Xform(self.minPtDebug.GetPrim()).AddTranslateOp().Set(self.minPoint) - - def __len__(self): - return self.vertCount - - def SetRadius(self, radius, time): - self.radius = radius - attr = self.minPtDebug.GetPrim().GetAttribute("primvars:displayColor") - attr.Set([Gf.Vec3f(1, 1, 1)], time - 1) - attr.Set([Gf.Vec3f(0, .5, .5)], time) - attr.SetMetadata("interpolation", "constant") - - def SetStep(self, targetFrameCount, strokeCount, maxActiveStrokes): - # b = strokeCount / maxActiveStrokes - # strokeLength = targetFrameCount / b - # self.step = self.vertCount / strokeLength - pass - - def GetVertex(self, i): - return self.stroke.points[i + self.startVert] - - def GetIndex(self, i): - return self.stroke.originalIndices[i + self.startIndex] - - def SetIndex(self, i, value): - self.stroke.indices[i + self.startIndex] = value - self.stroke.maskIndices[i + self.startIndex] = 1 - - def Update(self, dRadius, t): - self.radius += dRadius - return self._GrowByTopology(t) - - def _GrowByTopology(self, t): - for _ in range(self.growthVel + int((t + .6) * 10)): - for __ in range(0, min(6, self.indexCount - self.indicesWritten), 1): - self.SetIndex(self.indicesWritten, self.GetIndex(self.indicesWritten)) - self.indicesWritten += 1 - self.stroke.dirty = True - self.growthVel += 4 - return self.indicesWritten < self.indexCount - - def _GrowByRadius(self): - for vi in range(0, self.indexCount, 3): - # Skip strokes that have already been processed. - if self.stroke.maskIndices[vi + self.startIndex] != 0: - continue - # No need to go through GetVertex here, since GetIndex returns the points index - for ii in range(3): - i0 = self.GetIndex(vi + ii) - p0 = self.stroke.points[i0] - if isInRadius(p0, self.radius): - for jj in range(3): - self.SetIndex(vi + jj, self.GetIndex(vi + jj)) - self.stroke.dirty = True - self.indicesWritten += 3 - break - return self.indicesWritten < self.indexCount - - -class Stroke(): # pylint: disable=too-many-instance-attributes - def __init__(self, prim, points, indices, vertOffsets, vertCounts, indexOffsets, indexCounts, displayOpacity): # pylint: disable=too-many-arguments - self.dirty = True - self.adj = 2 * random.random() - self.prim = prim - self.points = points - self.indices = Vt.IntArray(len(indices), 0) - self.originalIndices = Vt.IntArray(indices) - self.maskIndices = Vt.IntArray(len(indices), 0) - self.vertOffsets = vertOffsets - self.vertCounts = vertCounts - self.indexOffsets = indexOffsets - self.indexCounts = indexCounts - self.displayOpacity = Vt.FloatArray(len(displayOpacity)) - self.previousOpacity = Vt.FloatArray(displayOpacity) - self.originalOpacity = Vt.FloatArray(displayOpacity) - self.substrokes = self._GetSubstrokes() - self.adjs = [] - for i in enumerate(displayOpacity): - displayOpacity[i] = 0 - for i in self.vertOffsets: - self.adjs.append(random.random() - 1) - self.adjs.append(random.random() - 1) - - def _GetSubstrokes(self): - ret = [] - for i, offset in enumerate(self.vertOffsets): - ret.append(Substroke(self, offset, self.vertCounts[i], self.indexOffsets[i], self.indexCounts[i])) - return ret + UsdGeom.Xform(attr.GetPrim()).AddTranslateOp().Set(worldCenter) + + # Initialize data structures. + # - strokes are Unity meshes (or Tilt Brush batches). + # - substrokes are the individual brush strokes within a single mesh. + # - activeSubstrokes are sub-strokes currently in-flight. + # - completeSubstrokes are sub-strokes that are done animating. + strokes = MakeStrokes(stage) + substrokes = MakeSubstrokes(strokes) + activeStrokes = set() + activeSubstrokes = set() + completeSubstrokes = set() - def GetSubstroke(self, vertexIndex): - for i, offset in enumerate(self.vertOffsets): - if vertexIndex >= offset: - return (offset, self.vertCounts[i]) - raise "Vertex not found" + # Compute step sizes based on target animation length. + dRadius = (maxRadius - radius) / float(numFrames) / 1.5 + dHeight = (maxHeight - minHeight) / float(numFrames) - def GetAdj(self, vertIndex): - for i, offset in enumerate(self.vertOffsets): - if vertIndex >= offset: - return 3.0 * self.adjs[i] - raise "Vertex not found" + # Set USD animation start/end times. + stage.SetStartTimeCode(time) + stage.SetEndTimeCode(numFrames) - def Save(self, time): - if not self.dirty: - return - self.prim.GetAttribute("faceVertexIndices").Set(self.indices, time) - self.dirty = False + # Zero out stroke opacities + for s in strokes: + s.Save(time) + + # Main animation loop. + for time in range(0, numFrames): + print() + print("Time:", time, height, radius, smoothstep(1.0, float(numFrames), time)) + + if len(activeStrokes) < maxActive: + # On the final frame, increase activation volumes to "infinity" (and beyond ;) + if time == numFrames - 1: + height = 10000000 + radius = 10000000.0 + + # Search for strokes to be activated. + didAddStroke = 0 + for ss in substrokes: + # Already animating, skip. + if ss in activeSubstrokes: + continue + # Done animating, skip. + if ss in completeSubstrokes: + continue + # Overloaded. + if len(activeStrokes) >= maxActive: + break + # If this sub-stroke passes the filter, add it to the animating list. + if activeFilter(ss.minPoint, radius): + didAddStroke = 1 + activeSubstrokes.add(ss) + activeStrokes.add(ss.stroke) + # Mark the stroke as dirty to save its initial state. + ss.stroke.dirty = True + ss.SetRadius(radius, time) + print("+", end=" ") + if not didAddStroke: + # We didn't add any strokes, which means the radius needs to increase. + # Grow the activation volumes (increase sphere size, raise floor plane height). + height += dHeight + radius += dRadius * smoothstep(1.0, float(numFrames), time) + + # Update debug vis. + debugSphere.GetRadiusAttr().Set(radius, time) + + # Call save on everything, but only dirty strokes will actually write data. + # Save a key at the previous frame here so that when a stroke starts animating, when linearly + # interpolated, it will not start animating from frame zero to the first key frame. + # for s in strokes: + # s.Save(time - 1) + + # Update stroke animation. + remove = [] + for ss in activeSubstrokes: + print(".", end=" ") + if not ss.Update(dRadius, smoothstep(1.0, float(numFrames), time)): + if ss.indicesWritten != ss.indexCount: + raise "Fail" + remove.append(ss) + + # Remove all the completed strokes. + for ss in remove: + activeSubstrokes.remove(ss) + completeSubstrokes.add(ss) + + # Save keyframes for the current time. + for s in strokes: + s.Save(time) + + # Rebuild the activeStrokes set. + activeStrokes = set() + for ss in activeSubstrokes: + activeStrokes.add(ss.stroke) + + # Drainstop: we have leftover strokes that didn't finish animating within the target time, rather + # than popping them, we let them finish animating and run over the target time. + while len(activeSubstrokes) > 0: + remove = [] + time += 1 + # Since we blew past the initial frame estimate, we also need to update the USD end time. + stage.SetEndTimeCode(time) + # Loop: update, remove, save, rinse, repeat. + for ss in activeSubstrokes: + if not ss.Update(dRadius, 2.0): + if ss.indicesWritten != ss.indexCount: + raise "Fail" + remove.append(ss) + for ss in remove: + activeSubstrokes.remove(ss) + completeSubstrokes.add(ss) + for s in strokes: + s.Save(time) + + +class Substroke: # pylint: disable=too-many-instance-attributes + def __init__( + self, stroke, startVert, vertCount, startIndex, indexCount + ): # pylint: disable=too-many-arguments + self.stroke = stroke + self.startVert = startVert + self.vertCount = vertCount + self.startIndex = startIndex + self.indexCount = indexCount + self.i = startVert + self.step = 10 + self.radius = 0 + self.indicesWritten = 0 + self.growthVel = 1 + self.minHeight = self.GetVertex(0)[2] + self.maxHeight = self.GetVertex(0)[2] + self.avgHeight = self.GetVertex(0)[2] + self.minLen = 10000000 + self.minPoint = self.GetVertex(0) + + minVal = (self.minPoint - worldCenter).GetLength() + for i in range(vertCount): + v = self.GetVertex(i) + length = (v - worldCenter).GetLength() + if length < minVal: + minVal = length + self.minPoint = v + if Gf.IsClose(v, Gf.Vec3f(), 1e-7): + continue + length = Gf.Vec2f(v[0], v[2]).GetLength() + self.minHeight = min(self.minHeight, v[1]) + self.maxHeight = max(self.minHeight, v[1]) + self.avgHeight = (self.maxHeight - self.minHeight) / 2.0 + self.minLen = min(self.minLen, length) + + # Debug visualization. + self.minPtDebug = UsdGeom.Sphere.Define( + stroke.prim.GetStage(), + str(stroke.prim.GetPath()) + "/minPt" + str(startIndex), + ) + self.minPtDebug.GetPrim().GetAttribute("purpose").Set("guide") + attr = self.minPtDebug.GetPrim().GetAttribute("primvars:displayOpacity") + attr.Set([0.25]) + attr.SetMetadata("interpolation", "constant") + attr = self.minPtDebug.GetPrim().GetAttribute("primvars:displayColor") + attr.Set([Gf.Vec3f(1, 1, 1)], 0) + attr.SetMetadata("interpolation", "constant") + self.minPtDebug.CreateRadiusAttr(1.0) + UsdGeom.Xform(self.minPtDebug.GetPrim()).AddTranslateOp().Set(self.minPoint) + + def __len__(self): + return self.vertCount + + def SetRadius(self, radius, time): + self.radius = radius + attr = self.minPtDebug.GetPrim().GetAttribute("primvars:displayColor") + attr.Set([Gf.Vec3f(1, 1, 1)], time - 1) + attr.Set([Gf.Vec3f(0, 0.5, 0.5)], time) + attr.SetMetadata("interpolation", "constant") + + def SetStep(self, targetFrameCount, strokeCount, maxActiveStrokes): + # b = strokeCount / maxActiveStrokes + # strokeLength = targetFrameCount / b + # self.step = self.vertCount / strokeLength + pass + + def GetVertex(self, i): + return self.stroke.points[i + self.startVert] + + def GetIndex(self, i): + return self.stroke.originalIndices[i + self.startIndex] + + def SetIndex(self, i, value): + self.stroke.indices[i + self.startIndex] = value + self.stroke.maskIndices[i + self.startIndex] = 1 + + def Update(self, dRadius, t): + self.radius += dRadius + return self._GrowByTopology(t) + + def _GrowByTopology(self, t): + for _ in range(self.growthVel + int((t + 0.6) * 10)): + for __ in range(0, min(6, self.indexCount - self.indicesWritten), 1): + self.SetIndex(self.indicesWritten, self.GetIndex(self.indicesWritten)) + self.indicesWritten += 1 + self.stroke.dirty = True + self.growthVel += 4 + return self.indicesWritten < self.indexCount + + def _GrowByRadius(self): + for vi in range(0, self.indexCount, 3): + # Skip strokes that have already been processed. + if self.stroke.maskIndices[vi + self.startIndex] != 0: + continue + # No need to go through GetVertex here, since GetIndex returns the points index + for ii in range(3): + i0 = self.GetIndex(vi + ii) + p0 = self.stroke.points[i0] + if isInRadius(p0, self.radius): + for jj in range(3): + self.SetIndex(vi + jj, self.GetIndex(vi + jj)) + self.stroke.dirty = True + self.indicesWritten += 3 + break + return self.indicesWritten < self.indexCount + + +class Stroke: # pylint: disable=too-many-instance-attributes + def __init__( + self, + prim, + points, + indices, + vertOffsets, + vertCounts, + indexOffsets, + indexCounts, + displayOpacity, + ): # pylint: disable=too-many-arguments + self.dirty = True + self.adj = 2 * random.random() + self.prim = prim + self.points = points + self.indices = Vt.IntArray(len(indices), 0) + self.originalIndices = Vt.IntArray(indices) + self.maskIndices = Vt.IntArray(len(indices), 0) + self.vertOffsets = vertOffsets + self.vertCounts = vertCounts + self.indexOffsets = indexOffsets + self.indexCounts = indexCounts + self.displayOpacity = Vt.FloatArray(len(displayOpacity)) + self.previousOpacity = Vt.FloatArray(displayOpacity) + self.originalOpacity = Vt.FloatArray(displayOpacity) + self.substrokes = self._GetSubstrokes() + self.adjs = [] + for i in enumerate(displayOpacity): + displayOpacity[i] = 0 + for i in self.vertOffsets: + self.adjs.append(random.random() - 1) + self.adjs.append(random.random() - 1) + + def _GetSubstrokes(self): + ret = [] + for i, offset in enumerate(self.vertOffsets): + ret.append( + Substroke( + self, + offset, + self.vertCounts[i], + self.indexOffsets[i], + self.indexCounts[i], + ) + ) + return ret + + def GetSubstroke(self, vertexIndex): + for i, offset in enumerate(self.vertOffsets): + if vertexIndex >= offset: + return (offset, self.vertCounts[i]) + raise "Vertex not found" + + def GetAdj(self, vertIndex): + for i, offset in enumerate(self.vertOffsets): + if vertIndex >= offset: + return 3.0 * self.adjs[i] + raise "Vertex not found" + + def Save(self, time): + if not self.dirty: + return + self.prim.GetAttribute("faceVertexIndices").Set(self.indices, time) + self.dirty = False def MakeSubstrokes(strokes): - ret = [] - for stroke in strokes: - ret.extend(stroke.substrokes) - return ret + ret = [] + for stroke in strokes: + ret.extend(stroke.substrokes) + return ret def MakeStrokes(stage): - ret = [] - print("Reading strokes...") - for p in stage.Traverse(): - if not p.IsA(UsdGeom.Mesh): - continue - - print(".", end=' ') - ret.append(Stroke(p, - p.GetAttribute("points").Get(0), - p.GetAttribute("faceVertexIndices").Get(0), - p.GetAttribute("stroke:vertOffsets").Get(0), - p.GetAttribute("stroke:vertCounts").Get(0), - p.GetAttribute("stroke:triOffsets").Get(0), - p.GetAttribute("stroke:triCounts").Get(0), - p.GetAttribute("primvars:displayOpacity").Get(0))) - return ret + ret = [] + print("Reading strokes...") + for p in stage.Traverse(): + if not p.IsA(UsdGeom.Mesh): + continue + + print(".", end=" ") + ret.append( + Stroke( + p, + p.GetAttribute("points").Get(0), + p.GetAttribute("faceVertexIndices").Get(0), + p.GetAttribute("stroke:vertOffsets").Get(0), + p.GetAttribute("stroke:vertCounts").Get(0), + p.GetAttribute("stroke:triOffsets").Get(0), + p.GetAttribute("stroke:triCounts").Get(0), + p.GetAttribute("primvars:displayOpacity").Get(0), + ) + ) + return ret if __name__ == "__main__": - usdFile = sys.argv[1] - outputFile = usdFile.replace(".usd", "--animated.usd") - stg = Usd.Stage.Open(usdFile) - try: - animate(stg) - finally: - print("Saving...") - stg.Export(outputFile) + usdFile = sys.argv[1] + outputFile = usdFile.replace(".usd", "--animated.usd") + stg = Usd.Stage.Open(usdFile) + try: + animate(stg) + finally: + print("Saving...") + stg.Export(outputFile) diff --git a/Support/bin/build.py b/Support/bin/build.py index 94e2f92666..f44b8a3ec9 100755 --- a/Support/bin/build.py +++ b/Support/bin/build.py @@ -19,7 +19,9 @@ # Add ../Python to sys.path sys.path.append( - os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'Python')) + os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "Python") +) import unitybuild.main # noqa: E402 pylint: disable=import-error,wrong-import-position + unitybuild.main.main() diff --git a/Support/bin/check_brush_cullmodes.py b/Support/bin/check_brush_cullmodes.py index 42b21558ce..f43fe9b9a7 100644 --- a/Support/bin/check_brush_cullmodes.py +++ b/Support/bin/check_brush_cullmodes.py @@ -27,94 +27,95 @@ # Add ../Python to sys.path sys.path.append( - os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'Python')) + os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "Python") +) import unitybuild.refgraph # noqa: E402 pylint: disable=import-error,wrong-import-position -BASE = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..') +BASE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..") def dfs_iter(graph, guid): - """graph: networkx.DiGraph - guid: node name""" - seen = set() - q = collections.deque() - q.append(guid) - while q: - elt = q.pop() - seen.add(elt) - yield elt - q.extend(succ for succ in graph.successors_iter(elt) if succ not in seen) + """graph: networkx.DiGraph + guid: node name""" + seen = set() + q = collections.deque() + q.append(guid) + while q: + elt = q.pop() + seen.add(elt) + yield elt + q.extend(succ for succ in graph.successors_iter(elt) if succ not in seen) def shaders_for_brush(rg, g_brush): - """rg: unitybuild.refgraph.ReferenceGraph - g_brush: node (brush guid) - yields nodes for shaders.""" - for g in dfs_iter(rg.g, g_brush): - try: - n = rg.guid_to_name[g] - except KeyError: - continue - if n.lower().endswith('.shader'): - yield g + """rg: unitybuild.refgraph.ReferenceGraph + g_brush: node (brush guid) + yields nodes for shaders.""" + for g in dfs_iter(rg.g, g_brush): + try: + n = rg.guid_to_name[g] + except KeyError: + continue + if n.lower().endswith(".shader"): + yield g def cullmodes_for_brush(rg, g_brush): - """rg: unitybuild.refgraph.ReferenceGraph - g_brush: node (brush guid) - Returns list of culling modes used by shaders for that brush.""" - modes = set() - for g_shader in shaders_for_brush(rg, g_brush): - for mode in cullmodes_for_shader(rg.guid_to_name[g_shader]): - modes.add(mode) - return sorted(modes, key=lambda m: m.lower) + """rg: unitybuild.refgraph.ReferenceGraph + g_brush: node (brush guid) + Returns list of culling modes used by shaders for that brush.""" + modes = set() + for g_shader in shaders_for_brush(rg, g_brush): + for mode in cullmodes_for_shader(rg.guid_to_name[g_shader]): + modes.add(mode) + return sorted(modes, key=lambda m: m.lower) def cullmodes_for_shader(shader, memo=None): - """shader: name of shader asset - Returns list of culling modes used by the shader.""" - # This is to replace a risky default value, but it looks like this needed a parameter from the caller, or, failing that, a global. FIXME - if memo is None: - memo = {} - try: - return memo[shader] - except KeyError: - pass - with open(os.path.join(BASE, shader)) as f: - txt = f.read() - culls = [m.group(1) for m in - re.finditer(r'cull\s+(\w+)', txt, re.I | re.M)] - memo[shader] = culls - return culls + """shader: name of shader asset + Returns list of culling modes used by the shader.""" + # This is to replace a risky default value, but it looks like this needed a parameter from the caller, or, failing that, a global. FIXME + if memo is None: + memo = {} + try: + return memo[shader] + except KeyError: + pass + with open(os.path.join(BASE, shader)) as f: + txt = f.read() + culls = [m.group(1) for m in re.finditer(r"cull\s+(\w+)", txt, re.I | re.M)] + memo[shader] = culls + return culls def is_brush_doublesided(rg, g_brush): - """rg: unitybuild.refgraph.ReferenceGraph - g_brush: node (brush guid) - Returns True if brush generates doublesided geometry.""" - filename = rg.guid_to_name[g_brush] - with open(os.path.join(BASE, filename)) as f: - txt = f.read() - return int(re.search(r'm_RenderBackfaces: (.)', txt).group(1)) + """rg: unitybuild.refgraph.ReferenceGraph + g_brush: node (brush guid) + Returns True if brush generates doublesided geometry.""" + filename = rg.guid_to_name[g_brush] + with open(os.path.join(BASE, filename)) as f: + txt = f.read() + return int(re.search(r"m_RenderBackfaces: (.)", txt).group(1)) def main(): - rg = unitybuild.refgraph.ReferenceGraph(BASE) - g2n = rg.guid_to_name + rg = unitybuild.refgraph.ReferenceGraph(BASE) + g2n = rg.guid_to_name - def is_brush(guid): - try: - name = g2n[guid] - except KeyError: - return False - return re.search(r'Brush.*asset$', name) is not None - brushes = [node for node in rg.g.nodes_iter() if is_brush(node)] - for g_brush in sorted(brushes, key=g2n.get): - culls = cullmodes_for_brush(rg, g_brush) - if len(culls) > 0 and is_brush_doublesided(rg, g_brush): - print("Brush %s\n is double-sided but has cull %s" % (g2n[g_brush], culls)) + def is_brush(guid): + try: + name = g2n[guid] + except KeyError: + return False + return re.search(r"Brush.*asset$", name) is not None + + brushes = [node for node in rg.g.nodes_iter() if is_brush(node)] + for g_brush in sorted(brushes, key=g2n.get): + culls = cullmodes_for_brush(rg, g_brush) + if len(culls) > 0 and is_brush_doublesided(rg, g_brush): + print("Brush %s\n is double-sided but has cull %s" % (g2n[g_brush], culls)) -if __name__ == '__main__': - main() +if __name__ == "__main__": + main() diff --git a/Support/bin/codegen.py b/Support/bin/codegen.py index 391d03f12e..6391619ba1 100644 --- a/Support/bin/codegen.py +++ b/Support/bin/codegen.py @@ -20,71 +20,83 @@ # Stolen from unitybuild/main.py def find_project_dir(): - def search_upwards_from(dirname): - # Search upwards for root of unity project - dirname = os.path.abspath(dirname) - while True: - if os.path.exists(os.path.join(dirname, 'Assets')) and os.path.exists(os.path.join(dirname, 'ProjectSettings')): - return dirname - parent = os.path.dirname(dirname) - if parent == dirname: - return None - dirname = parent - return search_upwards_from('.') or search_upwards_from(__file__) + def search_upwards_from(dirname): + # Search upwards for root of unity project + dirname = os.path.abspath(dirname) + while True: + if os.path.exists(os.path.join(dirname, "Assets")) and os.path.exists( + os.path.join(dirname, "ProjectSettings") + ): + return dirname + parent = os.path.dirname(dirname) + if parent == dirname: + return None + dirname = parent + + return search_upwards_from(".") or search_upwards_from(__file__) def do_codegen(filename): - contents = open(filename).read() - pat = re.compile(r'''^ \#if\ USING_CODEGEN_PY \n + contents = open(filename).read() + pat = re.compile( + r"""^ \#if\ USING_CODEGEN_PY \n (?P \s* // \s+ EXPAND\( (?P .*?) \) \n (?P .*? ) ) \n \#else \n .*? # not captured because this part is ignored - \#endif \n''', re.MULTILINE | re.VERBOSE | re.DOTALL) + \#endif \n""", + re.MULTILINE | re.VERBOSE | re.DOTALL, + ) - def expand_one(varname, substitution, body): - body = body.replace(varname, substitution) - return ' // %s = %s\n%s' % (varname, substitution, body) + def expand_one(varname, substitution, body): + body = body.replace(varname, substitution) + return " // %s = %s\n%s" % (varname, substitution, body) - def expand_all(match): - # First line of the body should look like - # // EXPAND(VAR, value1, value2, value3) - var_values = [x.strip() for x in match.group('varlist').split(',')] - var_name = var_values.pop(0) - body = match.group('body') - # Strip comments, because they'll just duplicate the comments you can read - # in the #if'd out code. - body = re.sub(r'^\s*//.*\n', '', body, flags=re.M) - expansion = '\n\n'.join(expand_one(var_name, var_value, body) - for var_value in var_values) - return '''#if USING_CODEGEN_PY + def expand_all(match): + # First line of the body should look like + # // EXPAND(VAR, value1, value2, value3) + var_values = [x.strip() for x in match.group("varlist").split(",")] + var_name = var_values.pop(0) + body = match.group("body") + # Strip comments, because they'll just duplicate the comments you can read + # in the #if'd out code. + body = re.sub(r"^\s*//.*\n", "", body, flags=re.M) + expansion = "\n\n".join( + expand_one(var_name, var_value, body) for var_value in var_values + ) + return """#if USING_CODEGEN_PY %s #else # region codegen %s # endregion #endif -''' % (match.group('fullbody'), expansion) +""" % ( + match.group("fullbody"), + expansion, + ) - new_contents, n = pat.subn(expand_all, contents) - assert n == 1 - if new_contents != contents: - with open(filename, 'w') as outf: - outf.write(new_contents) - print('Updated', filename) - else: - print('Not updated', filename) + new_contents, n = pat.subn(expand_all, contents) + assert n == 1 + if new_contents != contents: + with open(filename, "w") as outf: + outf.write(new_contents) + print("Updated", filename) + else: + print("Not updated", filename) def main(): - project_dir = find_project_dir() - files = ['Assets/Scripts/SketchBinaryWriter.cs', - 'Assets/Scripts/SketchBinaryReader.cs'] - for filename in files: - do_codegen(os.path.join(project_dir, filename)) + project_dir = find_project_dir() + files = [ + "Assets/Scripts/SketchBinaryWriter.cs", + "Assets/Scripts/SketchBinaryReader.cs", + ] + for filename in files: + do_codegen(os.path.join(project_dir, filename)) -if __name__ == '__main__': - main() +if __name__ == "__main__": + main() diff --git a/Support/bin/compare_glb.py b/Support/bin/compare_glb.py index 6f38bac8f8..4a7dbb0fc2 100644 --- a/Support/bin/compare_glb.py +++ b/Support/bin/compare_glb.py @@ -21,238 +21,260 @@ import os import re import sys + # TODO -- this doesn't exist in tbdata.glb! from tbdata.glb import BaseGlb # pylint: disable=no-name-in-module,import-error try: - import jsondiff + import jsondiff except ImportError: - print("Try 'pip install jsondiff'", file=sys.stderr) - raise + print("Try 'pip install jsondiff'", file=sys.stderr) + raise -DEFAULT_BASELINE_DIR = 'Baseline 22.0' -ROOT = os.path.expanduser('~/Documents/Tilt Brush/Exports') +DEFAULT_BASELINE_DIR = "Baseline 22.0" +ROOT = os.path.expanduser("~/Documents/Tilt Brush/Exports") def first_glob(glob_pat): - """Returns the first match for the passed glob_pat, or raises error.""" - maybe = glob.glob(glob_pat) - if len(maybe) == 0: - raise LookupError("No %s" % glob_pat) - if len(maybe) == 1: - return maybe[0].replace('\\', '/') - raise LookupError("Too many %s: %s" % (glob_pat, maybe)) + """Returns the first match for the passed glob_pat, or raises error.""" + maybe = glob.glob(glob_pat) + if len(maybe) == 0: + raise LookupError("No %s" % glob_pat) + if len(maybe) == 1: + return maybe[0].replace("\\", "/") + raise LookupError("Too many %s: %s" % (glob_pat, maybe)) def get_latest_glb(tilt_name, poly): - """Gets the .glb file that was most-recently exported from .tilt - Pass: - poly - True for Poly-style glb1, False for glb2 """ - assert isinstance(poly, bool) - - def get_index(dirname): - """Returns the small NN if dirname matches ' NN'""" - length = len(tilt_name) - prefix, suffix = dirname[:length], dirname[length:] - # Careful; if tilt_name = 'ET_All', directories like 'ET_All_Huge 3' will pass this check - if prefix != tilt_name: - return None - if suffix == '': - return -1 - m = re.match(r' (\d+)$', suffix) - if m is not None: - return int(m.group(1)) - return None - - matches = [d for d in os.listdir(ROOT) if get_index(d) is not None] - matches.sort(key=get_index) - if len(matches) == 0: - raise LookupError("No export %s" % tilt_name) - directory = 'glb1' if poly else 'glb' - return first_glob(os.path.join(os.path.join(ROOT, matches[-1]), directory, '*.glb*')) + """Gets the .glb file that was most-recently exported from .tilt + Pass: + poly - True for Poly-style glb1, False for glb2""" + assert isinstance(poly, bool) + + def get_index(dirname): + """Returns the small NN if dirname matches ' NN'""" + length = len(tilt_name) + prefix, suffix = dirname[:length], dirname[length:] + # Careful; if tilt_name = 'ET_All', directories like 'ET_All_Huge 3' will pass this check + if prefix != tilt_name: + return None + if suffix == "": + return -1 + m = re.match(r" (\d+)$", suffix) + if m is not None: + return int(m.group(1)) + return None + + matches = [d for d in os.listdir(ROOT) if get_index(d) is not None] + matches.sort(key=get_index) + if len(matches) == 0: + raise LookupError("No export %s" % tilt_name) + directory = "glb1" if poly else "glb" + return first_glob( + os.path.join(os.path.join(ROOT, matches[-1]), directory, "*.glb*") + ) def get_baseline_glb(name, baseline_dir_name, poly=True): - """Gets a known-good .glb file that was exported from .tilt. - It's your responsibility to create them and save these off in the "Baseline" folder. - Pass: - poly - same as for get_latest_glb()""" - assert isinstance(poly, bool) - name_no_digit = re.sub(r' \d+$', '', os.path.basename(name)) - directory = 'glb1' if poly else 'glb' - parent = os.path.join(ROOT, baseline_dir_name, name_no_digit, directory) - return first_glob(parent + '/*.glb*') + """Gets a known-good .glb file that was exported from .tilt. + It's your responsibility to create them and save these off in the "Baseline" folder. + Pass: + poly - same as for get_latest_glb()""" + assert isinstance(poly, bool) + name_no_digit = re.sub(r" \d+$", "", os.path.basename(name)) + directory = "glb1" if poly else "glb" + parent = os.path.join(ROOT, baseline_dir_name, name_no_digit, directory) + return first_glob(parent + "/*.glb*") def redact(dct, keys): - """Helper for the tweak_ functions""" - if isinstance(keys, str): - keys = [keys] - for key in keys: - if key in dct: - dct[key] = 'redacted' + """Helper for the tweak_ functions""" + if isinstance(keys, str): + keys = [keys] + for key in keys: + if key in dct: + dct[key] = "redacted" def tweak_fix_sampler(dcts): - for label, dct in enumerate(dcts): - # Older files have an incorrect name for this sampler - # label==1 is the old file - if label == 1: - def rename(txt): - return txt.replace('sampler_LINEAR_LINEAR_REPEAT', - 'sampler_LINEAR_LINEAR_MIPMAP_LINEAR_REPEAT') - dct['samplers'] = dict((rename(k), v) for (k, v) in list(dct['samplers'].items())) - for texture in list(dct.get('textures', {}).values()): - texture['sampler'] = rename(texture['sampler']) + for label, dct in enumerate(dcts): + # Older files have an incorrect name for this sampler + # label==1 is the old file + if label == 1: + + def rename(txt): + return txt.replace( + "sampler_LINEAR_LINEAR_REPEAT", + "sampler_LINEAR_LINEAR_MIPMAP_LINEAR_REPEAT", + ) + + dct["samplers"] = dict( + (rename(k), v) for (k, v) in list(dct["samplers"].items()) + ) + for texture in list(dct.get("textures", {}).values()): + texture["sampler"] = rename(texture["sampler"]) def tweak_ignore_nondeterministic_geometry(dcts): - for _, dct in enumerate(dcts): - # Geometry is nondeterministic, so ignore min/max values - for accessor in list(dct.get('accessors', {}).values()): - redact(accessor, ['min', 'max']) + for _, dct in enumerate(dcts): + # Geometry is nondeterministic, so ignore min/max values + for accessor in list(dct.get("accessors", {}).values()): + redact(accessor, ["min", "max"]) def tweak_ignore_envlight(dcts): - for _, dct in enumerate(dcts): - # The exported light color is slightly nondeterminstic - # and also I changed the environment in one of the .tilt files and don't - # want to bother re-exporting it - for name, node in items(dct.get('nodes', {})): - if isinstance(name, int): - name = node['name'] - if 'SceneLight' in name: - redact(node, 'matrix') - for mat in values(dct.get('materials', {})): - redact(mat.get('values', {}), - ['SceneLight_0_color', 'SceneLight_1_color', 'ambient_light_color']) + for _, dct in enumerate(dcts): + # The exported light color is slightly nondeterminstic + # and also I changed the environment in one of the .tilt files and don't + # want to bother re-exporting it + for name, node in items(dct.get("nodes", {})): + if isinstance(name, int): + name = node["name"] + if "SceneLight" in name: + redact(node, "matrix") + for mat in values(dct.get("materials", {})): + redact( + mat.get("values", {}), + ["SceneLight_0_color", "SceneLight_1_color", "ambient_light_color"], + ) def tweak_remove_vertexid(dcts): - removed = [] # nodes that were deleted; may contain Nones - for _, dct in enumerate(dcts): - accs = dct['accessors'] - for k in list(accs.keys()): - if 'vertexId' in k: - removed.append(accs.pop(k)) - for m in list(dct['meshes'].values()): - for prim in m['primitives']: - removed.append(prim['attributes'].pop('VERTEXID', None)) - - # Only do this if we detected any vertexid; otherwise I want to verify the offsets, lengths, etc - if any([_f for _f in removed if _f]): + removed = [] # nodes that were deleted; may contain Nones for _, dct in enumerate(dcts): - dct['bufferViews'].pop('floatBufferView', None) - for bv in list(dct['bufferViews'].values()): - redact(bv, 'byteOffset') - redact(dct['buffers']['binary_glTF'], 'byteLength') + accs = dct["accessors"] + for k in list(accs.keys()): + if "vertexId" in k: + removed.append(accs.pop(k)) + for m in list(dct["meshes"].values()): + for prim in m["primitives"]: + removed.append(prim["attributes"].pop("VERTEXID", None)) + + # Only do this if we detected any vertexid; otherwise I want to verify the offsets, lengths, etc + if any(_f for _f in removed if _f): + for _, dct in enumerate(dcts): + dct["bufferViews"].pop("floatBufferView", None) + for bv in list(dct["bufferViews"].values()): + redact(bv, "byteOffset") + redact(dct["buffers"]["binary_glTF"], "byteLength") def tweak_remove_color_minmax(dcts): - # It's ok if the newer glb doesn't have min/max on color. I intentionally removed it. - for dct in dcts: - for name, acc in list(dct['accessors'].items()): - if 'color' in name: - acc.pop('min', None) - acc.pop('max', None) + # It's ok if the newer glb doesn't have min/max on color. I intentionally removed it. + for dct in dcts: + for name, acc in list(dct["accessors"].items()): + if "color" in name: + acc.pop("min", None) + acc.pop("max", None) def items(dct_or_lst): - """Returns list items, or dictionary items""" - if isinstance(dct_or_lst, dict): - return list(dct_or_lst.items()) - return list(enumerate(dct_or_lst)) + """Returns list items, or dictionary items""" + if isinstance(dct_or_lst, dict): + return list(dct_or_lst.items()) + return list(enumerate(dct_or_lst)) def values(dct_or_lst): - """Returns list values, or dictionary values""" - if isinstance(dct_or_lst, dict): - return list(dct_or_lst.values()) - return list(dct_or_lst) + """Returns list values, or dictionary values""" + if isinstance(dct_or_lst, dict): + return list(dct_or_lst.values()) + return list(dct_or_lst) def tweak_rename_refimage(dcts): - # I renamed reference image uris from "refimageN_" -> "media_"; change the baseline to suit - for _, image in items(dcts[1]['images']): - if 'uri' in image: - image['uri'] = re.sub(r'^refimage[0-9]*', 'media', image['uri']) + # I renamed reference image uris from "refimageN_" -> "media_"; change the baseline to suit + for _, image in items(dcts[1]["images"]): + if "uri" in image: + image["uri"] = re.sub(r"^refimage[0-9]*", "media", image["uri"]) def tweak_ignore_generator(dcts): - for dct in dcts: - redact(dct['asset'], 'generator') + for dct in dcts: + redact(dct["asset"], "generator") def binary_diff(bina, binb): - # Returns (success, details) - # No need to get fancy yet since the binary is identical :-D - bin_same = (bina == binb) - return bin_same, '' if bin_same else '\nBINARY DIFFERENCE' - - -def compare_glb(a, b, binary, - tweaks=( - # tweak_fix_sampler, - # tweak_remove_vertexid, - # tweak_ignore_nondeterministic_geometry, - # tweak_remove_color_minmax, - tweak_ignore_generator, - tweak_rename_refimage, - tweak_ignore_envlight, - )): - if open(a).read() == open(b).read(): - return (True, 'IDENTICAL') - - glbs = [BaseGlb.create(x) for x in [a, b]] - objs = [json.loads(g.get_json()) for g in glbs] - for tweak in tweaks: - tweak(objs) - details = jsondiff.diff(objs[0], objs[1], syntax='symmetric', - dump=True, - dumper=jsondiff.JsonDumper(indent=2)) - if binary: - bin_same, bin_details = binary_diff(glbs[0].bin_chunk, glbs[1].bin_chunk) - else: - bin_same, bin_details = True, 'n/a' - return details == '{}' and bin_same, details + bin_details - - -def compare_to_baseline(name, binary=True, poly=True, baseline_dir_name=DEFAULT_BASELINE_DIR): - """Compare the Poly .glb file to its baseline and report differences""" - try: - latest = get_latest_glb(name, poly=poly) - except LookupError: - print("%s: Not found" % name) - return - baseline = get_baseline_glb(name, baseline_dir_name, poly=poly) - result, details = compare_glb(latest, baseline, binary) - short = os.path.basename(os.path.dirname(os.path.dirname(latest))) - summary = ('ok' if result else ('FAIL: %s' % (details, ))) - print("%s ver %d: %s" % (short, 1 if poly else 2, summary)) + # Returns (success, details) + # No need to get fancy yet since the binary is identical :-D + bin_same = bina == binb + return bin_same, "" if bin_same else "\nBINARY DIFFERENCE" + + +def compare_glb( + a, + b, + binary, + tweaks=( + # tweak_fix_sampler, + # tweak_remove_vertexid, + # tweak_ignore_nondeterministic_geometry, + # tweak_remove_color_minmax, + tweak_ignore_generator, + tweak_rename_refimage, + tweak_ignore_envlight, + ), +): + if open(a).read() == open(b).read(): + return (True, "IDENTICAL") + + glbs = [BaseGlb.create(x) for x in [a, b]] + objs = [json.loads(g.get_json()) for g in glbs] + for tweak in tweaks: + tweak(objs) + details = jsondiff.diff( + objs[0], + objs[1], + syntax="symmetric", + dump=True, + dumper=jsondiff.JsonDumper(indent=2), + ) + if binary: + bin_same, bin_details = binary_diff(glbs[0].bin_chunk, glbs[1].bin_chunk) + else: + bin_same, bin_details = True, "n/a" + return details == "{}" and bin_same, details + bin_details + + +def compare_to_baseline( + name, binary=True, poly=True, baseline_dir_name=DEFAULT_BASELINE_DIR +): + """Compare the Poly .glb file to its baseline and report differences""" + try: + latest = get_latest_glb(name, poly=poly) + except LookupError: + print("%s: Not found" % name) + return + baseline = get_baseline_glb(name, baseline_dir_name, poly=poly) + result, details = compare_glb(latest, baseline, binary) + short = os.path.basename(os.path.dirname(os.path.dirname(latest))) + summary = "ok" if result else ("FAIL: %s" % (details,)) + print("%s ver %d: %s" % (short, 1 if poly else 2, summary)) def compare_two(name1, name2, binary=True): - def get_glb_named(name): - return first_glob(os.path.join(ROOT, name, 'glb1', '*.glb*')) - result, details = compare_glb(get_glb_named(name1), get_glb_named(name2), binary) - summary = ('ok' if result else ('FAIL: %s' % (details, ))) - print(summary) + def get_glb_named(name): + return first_glob(os.path.join(ROOT, name, "glb1", "*.glb*")) + + result, details = compare_glb(get_glb_named(name1), get_glb_named(name2), binary) + summary = "ok" if result else ("FAIL: %s" % (details,)) + print(summary) # ----- def test(): - for dirname in glob.glob(os.path.join(ROOT, DEFAULT_BASELINE_DIR, 'ET_All*')): - compare_to_baseline(os.path.basename(dirname), poly=True) - compare_to_baseline(os.path.basename(dirname), poly=False) + for dirname in glob.glob(os.path.join(ROOT, DEFAULT_BASELINE_DIR, "ET_All*")): + compare_to_baseline(os.path.basename(dirname), poly=True) + compare_to_baseline(os.path.basename(dirname), poly=False) def main(): - parser = argparse.ArgumentParser() - parser.add_argument('exports', nargs='*', help='Names of tilt exports to check') - args = parser.parse_args() - for arg in args.exports: - compare_to_baseline(arg) + parser = argparse.ArgumentParser() + parser.add_argument("exports", nargs="*", help="Names of tilt exports to check") + args = parser.parse_args() + for arg in args.exports: + compare_to_baseline(arg) -if __name__ == '__main__': - test() +if __name__ == "__main__": + test() diff --git a/Support/bin/convert_gltf1.py b/Support/bin/convert_gltf1.py index 88f6db3203..27a8e0d280 100644 --- a/Support/bin/convert_gltf1.py +++ b/Support/bin/convert_gltf1.py @@ -31,274 +31,307 @@ # (Brush guid, gltf alphaMode) PBR_BRUSH_DESCRIPTORS = [ - ('f86a096c-2f4f-4f9d-ae19-81b99f2944e0', 'OPAQUE'), - ("19826f62-42ac-4a9e-8b77-4231fbd0cfbf", 'BLEND') + ("f86a096c-2f4f-4f9d-ae19-81b99f2944e0", "OPAQUE"), + ("19826f62-42ac-4a9e-8b77-4231fbd0cfbf", "BLEND"), ] def convert_to_array_helper(dct, name_to_index, key): - if key not in dct: - return - by_name = dct[key] - by_index = [] - for name, value in list(by_name.items()): - assert name not in name_to_index, "Name %s already added as %s" % (name, name_to_index[name]) - - if 'name' in value: - assert value['name'] == name, "Mismatching names %s %s %s" % (key, name, value['name']) - value['name'] = name - - index = len(by_index) - by_index.append(value) - name_to_index[name] = (index, key) - dct[key] = by_index + if key not in dct: + return + by_name = dct[key] + by_index = [] + for name, value in list(by_name.items()): + assert name not in name_to_index, "Name %s already added as %s" % ( + name, + name_to_index[name], + ) + + if "name" in value: + assert value["name"] == name, "Mismatching names %s %s %s" % ( + key, + name, + value["name"], + ) + value["name"] = name + + index = len(by_index) + by_index.append(value) + name_to_index[name] = (index, key) + dct[key] = by_index def convert_to_index(container, key, name_to_index, required_object_type): - name = container[key] - try: - index, object_type = name_to_index[name] - except KeyError as e: - raise LookupError("No %s named %s" % (required_object_type, name)) from e - assert object_type == required_object_type - container[key] = index + name = container[key] + try: + index, object_type = name_to_index[name] + except KeyError as e: + raise LookupError("No %s named %s" % (required_object_type, name)) from e + assert object_type == required_object_type + container[key] = index def convert_to_indices(dct, key, name_to_index, required_object_type): - lst = dct[key] - assert isinstance(lst, list) - for i in range(len(lst)): - convert_to_index(lst, i, name_to_index, required_object_type) + lst = dct[key] + assert isinstance(lst, list) + for i in range(len(lst)): + convert_to_index(lst, i, name_to_index, required_object_type) COMPONENT_SIZES = { - 5120: 1, # byte - 5121: 1, # unsigned byte - 5122: 2, # short - 5123: 2, # unsigned short - 5126: 4, # float -} -NUM_COMPONENTS = { - 'SCALAR': 1, - 'VEC2': 2, - 'VEC3': 3, - 'VEC4': 4 + 5120: 1, # byte + 5121: 1, # unsigned byte + 5122: 2, # short + 5123: 2, # unsigned short + 5126: 4, # float } +NUM_COMPONENTS = {"SCALAR": 1, "VEC2": 2, "VEC3": 3, "VEC4": 4} def pop_explicit_byte_stride(accessor): - """Removes and returns a byteStride to move from the accessor to the bufferVies. - Returns None to mean "bufferView should not define it". - - This is useful because gltf1 defines 0 to mean "tightly packed", - but gltf2 says 0 is invalid.""" - stride = accessor.pop('byteStride', None) - calculated_stride = COMPONENT_SIZES[accessor['componentType']] * NUM_COMPONENTS[accessor['type']] - - if (calculated_stride % 4) != 0: - # The weirdo rules are: - # - if bufferView is used by more than one accessor, it must set stride - # - if not set, it means "tightly packed" - # - Values must be multiple of 4, and > 0 - # Thus sometimes we have to use the implicit version - return None - if stride is None: - return None - if stride == 0: - return calculated_stride - - # It would be surprising if the calculated stride differed from the tightly-packed stride, - # at least for Tilt Brush files - if calculated_stride != stride: - print('WARN: strange stride %s vs %s for accessor %s' % ( - calculated_stride, stride, accessor['name'])) - return stride + """Removes and returns a byteStride to move from the accessor to the bufferVies. + Returns None to mean "bufferView should not define it". + + This is useful because gltf1 defines 0 to mean "tightly packed", + but gltf2 says 0 is invalid.""" + stride = accessor.pop("byteStride", None) + calculated_stride = ( + COMPONENT_SIZES[accessor["componentType"]] * NUM_COMPONENTS[accessor["type"]] + ) + + if (calculated_stride % 4) != 0: + # The weirdo rules are: + # - if bufferView is used by more than one accessor, it must set stride + # - if not set, it means "tightly packed" + # - Values must be multiple of 4, and > 0 + # Thus sometimes we have to use the implicit version + return None + if stride is None: + return None + if stride == 0: + return calculated_stride + + # It would be surprising if the calculated stride differed from the tightly-packed stride, + # at least for Tilt Brush files + if calculated_stride != stride: + print( + "WARN: strange stride %s vs %s for accessor %s" + % (calculated_stride, stride, accessor["name"]) + ) + return stride def pop_non_gltf2_property(thing, property_name, gltf1_default): - """Removes thing[property_name] to make *thing* gltf2-compliant. - Asserts if the property value is anything other than the gltf1 default.""" - value = thing.pop(property_name, gltf1_default) - assert value == gltf1_default, "Non-default value %s for property %s.%s" % ( - value, thing['name'], property_name) - - -def convert(filename): # pylint: disable=too-many-statements,too-many-branches,too-many-locals - txt = open(filename).read() - txt = re.sub('// [^\"\n]*\n', '\n', txt) - - gltf = json.loads(txt, object_pairs_hook=collections.OrderedDict) - name_to_index = {} - - # Store the vertex shader URI for convenient access; it'll be removed again later down - for mat in list(gltf['materials'].values()): - if 'technique' in mat: - technique = gltf['techniques'][mat['technique']] - program = gltf['programs'][technique['program']] - shader = gltf['shaders'][program['vertexShader']] - mat['_vs_uri'] = shader['uri'] - - # Convert by-name lookups to by-index lookups - for key in ('accessors', 'bufferViews', 'buffers', 'cameras', 'images', 'materials', 'meshes', - 'nodes', 'samplers', 'scenes', 'textures', 'shaders', 'programs', 'techniques'): - convert_to_array_helper(gltf, name_to_index, key) - - # If there was a buffers['binary_glTF'], make sure it is now at buffers[0] - # This is required by the binary gltf spec. - try: - assert name_to_index['binary_glTF'] == (0, 'buffers') - except KeyError: - pass - - # Don't need these things in gltf 2 - for key in ('shaders', 'programs', 'techniques'): - if key in gltf: - del gltf[key] - - gltf['asset']['version'] = '2.0' - - if 'scene' in gltf: - convert_to_index(gltf, 'scene', name_to_index, 'scenes') - - if 'extensionsUsed' in gltf: - lst = gltf['extensionsUsed'] - # This extension is obsolete - lst[:] = [elt for elt in lst if elt != "KHR_binary_glTF"] - if len(lst) == 0: - del gltf['extensionsUsed'] - - for accessor in gltf['accessors']: - convert_to_index(accessor, 'bufferView', name_to_index, 'bufferViews') - # Move byteStride from accessor to bufferView. - buffer_view = gltf['bufferViews'][accessor['bufferView']] - byte_stride = pop_explicit_byte_stride(accessor) - assert buffer_view.get('byteStride', byte_stride) == byte_stride, \ - "byteStride conflict: %s vs %s" % (buffer_view.get('byteStride'), byte_stride) - if byte_stride is not None: - buffer_view['byteStride'] = byte_stride - - for thing in gltf['buffers']: - pop_non_gltf2_property(thing, 'type', 'arraybuffer') - - for thing in gltf['bufferViews']: - convert_to_index(thing, 'buffer', name_to_index, 'buffers') - - for material in gltf['materials']: - material.pop('technique', None) - vertex_shader_uri = material.pop('_vs_uri', '') - - # Convert to pbr material - values = material.pop('values', {}) - if 'BaseColorFactor' in values: - for (guid, alpha_mode) in PBR_BRUSH_DESCRIPTORS: - if guid in vertex_shader_uri: - material['alphaMode'] = alpha_mode - - material['pbrMetallicRoughness'] = { - 'baseColorFactor': values['BaseColorFactor'], - 'baseColorTexture': { - 'index': values['BaseColorTex'], - 'texCoord': 0 # ??? - }, - 'metallicFactor': values['MetallicFactor'], - 'roughnessFactor': values['RoughnessFactor'] - } - convert_to_index(material['pbrMetallicRoughness']['baseColorTexture'], 'index', - name_to_index, 'textures') - - for mesh in gltf['meshes']: - for primitive in mesh.get('primitives', []): - attributes = primitive.get('attributes', {}) - for semantic, accessor in list(attributes.items()): - convert_to_index(attributes, semantic, name_to_index, 'accessors') - convert_to_index(primitive, 'indices', name_to_index, 'accessors') - convert_to_index(primitive, 'material', name_to_index, 'materials') - # COLOR is not a valid semantic; COLOR_0 is - if 'COLOR' in attributes: - assert 'COLOR_0' not in attributes - attributes['COLOR_0'] = attributes.pop('COLOR') - - for node in gltf['nodes']: - # neither gltf 1 nor gltf 2 define a 'light' property on nodes. - node.pop('light', None) - - # gltf1 allows multiple meshes per node; gltf2 does not - meshes = node.pop('meshes', []) - if len(meshes) == 0: - pass - elif len(meshes) == 1: - node['mesh'] = meshes[0] - else: - assert False, "Unsupported: convert node with multiple meshes" - - if 'mesh' in node: - convert_to_index(node, 'mesh', name_to_index, 'meshes') - if 'children' in node: - convert_to_indices(node, 'children', name_to_index, 'nodes') - - for scene in gltf['scenes']: - convert_to_indices(scene, 'nodes', name_to_index, 'nodes') - - for texture in gltf['textures']: - pop_non_gltf2_property(texture, 'format', 6408) - pop_non_gltf2_property(texture, 'internalFormat', 6408) - pop_non_gltf2_property(texture, 'target', 3553) - pop_non_gltf2_property(texture, 'type', 5121) - convert_to_index(texture, 'sampler', name_to_index, 'samplers') - convert_to_index(texture, 'source', name_to_index, 'images') - - check_for_forbidden_values(gltf, set(name_to_index.keys())) - - return json.dumps(gltf, indent=2) + """Removes thing[property_name] to make *thing* gltf2-compliant. + Asserts if the property value is anything other than the gltf1 default.""" + value = thing.pop(property_name, gltf1_default) + assert value == gltf1_default, "Non-default value %s for property %s.%s" % ( + value, + thing["name"], + property_name, + ) + + +def convert( + filename, +): # pylint: disable=too-many-statements,too-many-branches,too-many-locals + txt = open(filename).read() + txt = re.sub('// [^"\n]*\n', "\n", txt) + + gltf = json.loads(txt, object_pairs_hook=collections.OrderedDict) + name_to_index = {} + + # Store the vertex shader URI for convenient access; it'll be removed again later down + for mat in list(gltf["materials"].values()): + if "technique" in mat: + technique = gltf["techniques"][mat["technique"]] + program = gltf["programs"][technique["program"]] + shader = gltf["shaders"][program["vertexShader"]] + mat["_vs_uri"] = shader["uri"] + + # Convert by-name lookups to by-index lookups + for key in ( + "accessors", + "bufferViews", + "buffers", + "cameras", + "images", + "materials", + "meshes", + "nodes", + "samplers", + "scenes", + "textures", + "shaders", + "programs", + "techniques", + ): + convert_to_array_helper(gltf, name_to_index, key) + + # If there was a buffers['binary_glTF'], make sure it is now at buffers[0] + # This is required by the binary gltf spec. + try: + assert name_to_index["binary_glTF"] == (0, "buffers") + except KeyError: + pass + + # Don't need these things in gltf 2 + for key in ("shaders", "programs", "techniques"): + if key in gltf: + del gltf[key] + + gltf["asset"]["version"] = "2.0" + + if "scene" in gltf: + convert_to_index(gltf, "scene", name_to_index, "scenes") + + if "extensionsUsed" in gltf: + lst = gltf["extensionsUsed"] + # This extension is obsolete + lst[:] = [elt for elt in lst if elt != "KHR_binary_glTF"] + if len(lst) == 0: + del gltf["extensionsUsed"] + + for accessor in gltf["accessors"]: + convert_to_index(accessor, "bufferView", name_to_index, "bufferViews") + # Move byteStride from accessor to bufferView. + buffer_view = gltf["bufferViews"][accessor["bufferView"]] + byte_stride = pop_explicit_byte_stride(accessor) + assert ( + buffer_view.get("byteStride", byte_stride) == byte_stride + ), "byteStride conflict: %s vs %s" % ( + buffer_view.get("byteStride"), + byte_stride, + ) + if byte_stride is not None: + buffer_view["byteStride"] = byte_stride + + for thing in gltf["buffers"]: + pop_non_gltf2_property(thing, "type", "arraybuffer") + + for thing in gltf["bufferViews"]: + convert_to_index(thing, "buffer", name_to_index, "buffers") + + for material in gltf["materials"]: + material.pop("technique", None) + vertex_shader_uri = material.pop("_vs_uri", "") + + # Convert to pbr material + values = material.pop("values", {}) + if "BaseColorFactor" in values: + for (guid, alpha_mode) in PBR_BRUSH_DESCRIPTORS: + if guid in vertex_shader_uri: + material["alphaMode"] = alpha_mode + + material["pbrMetallicRoughness"] = { + "baseColorFactor": values["BaseColorFactor"], + "baseColorTexture": { + "index": values["BaseColorTex"], + "texCoord": 0, # ??? + }, + "metallicFactor": values["MetallicFactor"], + "roughnessFactor": values["RoughnessFactor"], + } + convert_to_index( + material["pbrMetallicRoughness"]["baseColorTexture"], + "index", + name_to_index, + "textures", + ) + + for mesh in gltf["meshes"]: + for primitive in mesh.get("primitives", []): + attributes = primitive.get("attributes", {}) + for semantic, accessor in list(attributes.items()): + convert_to_index(attributes, semantic, name_to_index, "accessors") + convert_to_index(primitive, "indices", name_to_index, "accessors") + convert_to_index(primitive, "material", name_to_index, "materials") + # COLOR is not a valid semantic; COLOR_0 is + if "COLOR" in attributes: + assert "COLOR_0" not in attributes + attributes["COLOR_0"] = attributes.pop("COLOR") + + for node in gltf["nodes"]: + # neither gltf 1 nor gltf 2 define a 'light' property on nodes. + node.pop("light", None) + + # gltf1 allows multiple meshes per node; gltf2 does not + meshes = node.pop("meshes", []) + if len(meshes) == 0: + pass + elif len(meshes) == 1: + node["mesh"] = meshes[0] + else: + assert False, "Unsupported: convert node with multiple meshes" + + if "mesh" in node: + convert_to_index(node, "mesh", name_to_index, "meshes") + if "children" in node: + convert_to_indices(node, "children", name_to_index, "nodes") + + for scene in gltf["scenes"]: + convert_to_indices(scene, "nodes", name_to_index, "nodes") + + for texture in gltf["textures"]: + pop_non_gltf2_property(texture, "format", 6408) + pop_non_gltf2_property(texture, "internalFormat", 6408) + pop_non_gltf2_property(texture, "target", 3553) + pop_non_gltf2_property(texture, "type", 5121) + convert_to_index(texture, "sampler", name_to_index, "samplers") + convert_to_index(texture, "source", name_to_index, "images") + + check_for_forbidden_values(gltf, set(name_to_index.keys())) + + return json.dumps(gltf, indent=2) def check_for_forbidden_values(value, forbidden, primitives=None): - """Recursively check that value does not contain any values in forbidden.""" - if primitives is None: - primitives = set([int, int, float, str, str]) - if type(value) in (dict, collections.OrderedDict): - for (k, v) in value.items(): - # It's okay for the name to be in the forbidden list - if k != 'name': - check_for_forbidden_values(v, forbidden) - elif isinstance(value, list): - if len(value) > 0 and type(value[0]) in (int, float, int): - # Don't bother - return - for elt in value: - check_for_forbidden_values(elt, forbidden) - elif type(value) in primitives: - if value in forbidden: - print('Found forbidden %s' % (value,), file=sys.stderr) - else: - assert False, "Cannot handle type %s" % (type(value), ) + """Recursively check that value does not contain any values in forbidden.""" + if primitives is None: + primitives = set([int, int, float, str, str]) + if type(value) in (dict, collections.OrderedDict): + for (k, v) in value.items(): + # It's okay for the name to be in the forbidden list + if k != "name": + check_for_forbidden_values(v, forbidden) + elif isinstance(value, list): + if len(value) > 0 and type(value[0]) in (int, float, int): + # Don't bother + return + for elt in value: + check_for_forbidden_values(elt, forbidden) + elif type(value) in primitives: + if value in forbidden: + print("Found forbidden %s" % (value,), file=sys.stderr) + else: + assert False, "Cannot handle type %s" % (type(value),) def write_if_different(filename, contents): - try: - old_contents = open(filename).read() - except IOError: - old_contents = None - if old_contents != contents: - open(filename, 'w').write(contents) - print("Updated", filename) + try: + old_contents = open(filename).read() + except IOError: + old_contents = None + if old_contents != contents: + open(filename, "w").write(contents) + print("Updated", filename) def main(args): - parser = argparse.ArgumentParser() - parser.add_argument('files', nargs=1, help="File to convert") - parser.add_argument('--stdout', action='store_true') - args = parser.parse_args(args) - - for src in args.files: - gltf2 = convert(src) - if args.stdout: - print(gltf2) - else: - dst = os.path.splitext(src)[0] + '.2.gltf' - assert dst != src - write_if_different(dst, gltf2) - - -if __name__ == '__main__': - main(sys.argv[1:]) + parser = argparse.ArgumentParser() + parser.add_argument("files", nargs=1, help="File to convert") + parser.add_argument("--stdout", action="store_true") + args = parser.parse_args(args) + + for src in args.files: + gltf2 = convert(src) + if args.stdout: + print(gltf2) + else: + dst = os.path.splitext(src)[0] + ".2.gltf" + assert dst != src + write_if_different(dst, gltf2) + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/Support/bin/deflate_request_body.py b/Support/bin/deflate_request_body.py index f102e32aab..21a840f136 100644 --- a/Support/bin/deflate_request_body.py +++ b/Support/bin/deflate_request_body.py @@ -21,75 +21,76 @@ class Error(Exception): - pass + pass -class DebugHeaders(): # pylint: disable=too-few-public-methods - """Parses the NNN_headers.txt file""" - def __init__(self, filename): - self.headers = {} - with open(filename, 'rb') as f: - txt = f.read() - m = re.search(r'-- Headers follow --', txt) - if m is None: - raise ValueError("No headers in %r" % txt) - for line in txt[m.end():].lstrip().split('\r\n'): - if ':' in line: - key, val = line.split(':', 1) - self.headers[key] = val[1:] +class DebugHeaders: # pylint: disable=too-few-public-methods + """Parses the NNN_headers.txt file""" - def __str__(self): - return str(self.headers) + def __init__(self, filename): + self.headers = {} + with open(filename, "rb") as f: + txt = f.read() + m = re.search(r"-- Headers follow --", txt) + if m is None: + raise ValueError("No headers in %r" % txt) + for line in txt[m.end() :].lstrip().split("\r\n"): + if ":" in line: + key, val = line.split(":", 1) + self.headers[key] = val[1:] + + def __str__(self): + return str(self.headers) def decode_body_part_gzip(body_encoded): - # https://stackoverflow.com/a/2695575/194921 - return zlib.decompress(body_encoded, 16 + zlib.MAX_WBITS) + # https://stackoverflow.com/a/2695575/194921 + return zlib.decompress(body_encoded, 16 + zlib.MAX_WBITS) def decode_body_part_deflate(body_encoded): - # https://stackoverflow.com/a/2695466/194921 - # this works for incorrect implementations of deflated content - try: - return zlib.decompress(body_encoded, -15) - except: # pylint: disable=bare-except - # this works for correct implementations of deflated content - return zlib.decompress(body_encoded, +15) + # https://stackoverflow.com/a/2695466/194921 + # this works for incorrect implementations of deflated content + try: + return zlib.decompress(body_encoded, -15) + except: # pylint: disable=bare-except + # this works for correct implementations of deflated content + return zlib.decompress(body_encoded, +15) def print_request(headers_file, body_file): - """Pass: + """Pass: headers_file - the log file containing headers, usually 'NNN_headers.txt' body_file - the log file containing the body, usually 'NNN_form.file' - """ - headers = DebugHeaders(headers_file).headers - with open(body_file, 'rb') as f: - body_encoded = f.read() - encoding = headers.get('Content-Encoding') - try: - if encoding is None: - body_decoded = body_encoded - elif encoding == 'gzip': - body_decoded = decode_body_part_gzip(body_encoded) - elif encoding == 'deflate': - body_decoded = decode_body_part_deflate(body_encoded) - else: - raise ValueError("Unknown encoding") - except Exception as e: - raise Error("Decode %s %s" % (body_file, encoding), e) from e - - print(headers) - print(body_decoded) - - -def quick_print_request(n, prefix='c:/src/tb/Requests'): - prefix = "%s/%s" % (prefix, n) - return print_request(prefix + '_headers.txt', prefix + '_form.file') - - -if __name__ == '__main__': - try: - quick_print_request(440) - except Error as e: - for x in e.args: - print(x) + """ + headers = DebugHeaders(headers_file).headers + with open(body_file, "rb") as f: + body_encoded = f.read() + encoding = headers.get("Content-Encoding") + try: + if encoding is None: + body_decoded = body_encoded + elif encoding == "gzip": + body_decoded = decode_body_part_gzip(body_encoded) + elif encoding == "deflate": + body_decoded = decode_body_part_deflate(body_encoded) + else: + raise ValueError("Unknown encoding") + except Exception as e: + raise Error("Decode %s %s" % (body_file, encoding), e) from e + + print(headers) + print(body_decoded) + + +def quick_print_request(n, prefix="c:/src/tb/Requests"): + prefix = "%s/%s" % (prefix, n) + return print_request(prefix + "_headers.txt", prefix + "_form.file") + + +if __name__ == "__main__": + try: + quick_print_request(440) + except Error as e: + for x in e.args: + print(x) diff --git a/Support/bin/deobfuscate.py b/Support/bin/deobfuscate.py index faff681ce2..2091279f36 100755 --- a/Support/bin/deobfuscate.py +++ b/Support/bin/deobfuscate.py @@ -24,298 +24,368 @@ from subprocess import Popen, PIPE # Common 11-letter words that shouldn't be interpreted as obfuscated symbols -COMMON_WORDS_11 = set(['initializer']) - - -class ObfuscationSection(): - def __init__(self, name): - self.name = name - self.sym_to_ob = {} - self.ob_to_syms = {} # obfuscation -> set(symbols) - - def add_entry(self, sym, ob): - """Pass a symbol, and its obfuscation""" - # f(symbol) -> obfuscation is a one-to-one function - # f(obfuscation) -> symbol might be a one-to-many function - - if self.name == 'Parameters': - # Symbol is something like - # Anchor/Side Anchor::opposite(Anchor/Side) side - # ArtworkMetadata StellaMetadataFetcher/c__Iterator3C::<>m__81(Vr2dExhibitItem) o - # Strip off the type to avoid confusing shorten() - assert ' ' in sym - sym = sym.rsplit(' ')[-1] - - # Returns True if new entry was added - if sym in self.sym_to_ob: - if ob == self.sym_to_ob[sym]: +COMMON_WORDS_11 = set(["initializer"]) + + +class ObfuscationSection: + def __init__(self, name): + self.name = name + self.sym_to_ob = {} + self.ob_to_syms = {} # obfuscation -> set(symbols) + + def add_entry(self, sym, ob): + """Pass a symbol, and its obfuscation""" + # f(symbol) -> obfuscation is a one-to-one function + # f(obfuscation) -> symbol might be a one-to-many function + + if self.name == "Parameters": + # Symbol is something like + # Anchor/Side Anchor::opposite(Anchor/Side) side + # ArtworkMetadata StellaMetadataFetcher/c__Iterator3C::<>m__81(Vr2dExhibitItem) o + # Strip off the type to avoid confusing shorten() + assert " " in sym + sym = sym.rsplit(" ")[-1] + + # Returns True if new entry was added + if sym in self.sym_to_ob: + if ob == self.sym_to_ob[sym]: + return False + # The same symbol mapped to two different obfuscations? Should be impossible, + # since the mapping is a hash. + raise Exception( + "Unexpected: %s -> %s and %s" % (sym, ob, self.sym_to_ob[sym]) + ) + + self.sym_to_ob[sym] = ob + syms = self.ob_to_syms.setdefault(ob, set()) + if sym in syms: + return False + syms.add(sym) + return True + + @staticmethod + def shorten(symbol): + """Extract the piece of |symbol| that the obfuscator uses for hashing.""" + # Symbol can be: + # Namespace.Namespace.SHORTSYM + # Some.Class.Name::SHORTSYM(blah, blah) + # TiltBrush.Future`1/SHORTSYM (enum defined in a class?) + # If the symbol is a parameter, the extraneous type information will already + # have been stripped off by add_entry() + short = symbol.rsplit("::", 1)[-1] + short = short.split("(", 1)[0] + short = short.rsplit("/", 1)[-1] + short = short.rsplit(".", 1)[ + -1 + ] # Not QUITE sure about this, but let's see how it goes + return short + + +class ObfuscationMap: + def __init__(self): + self.sections_by_name = {} + # A dict that maps obfuscated symbol to a user-friendly, short symbol + self.ob_to_syms = None + + def is_empty(self): + return len(self.sections_by_name) == 0 + + def load_from_file(self, filename): + """Additively loads entries from the given file. + Returns True on success.""" + if os.path.exists(filename): + with open(filename) as f: + self._load_from_text(f.read()) + return True return False - # The same symbol mapped to two different obfuscations? Should be impossible, - # since the mapping is a hash. - raise Exception("Unexpected: %s -> %s and %s" % (sym, ob, self.sym_to_ob[sym])) - - self.sym_to_ob[sym] = ob - syms = self.ob_to_syms.setdefault(ob, set()) - if sym in syms: - return False - syms.add(sym) - return True - - @staticmethod - def shorten(symbol): - """Extract the piece of |symbol| that the obfuscator uses for hashing.""" - # Symbol can be: - # Namespace.Namespace.SHORTSYM - # Some.Class.Name::SHORTSYM(blah, blah) - # TiltBrush.Future`1/SHORTSYM (enum defined in a class?) - # If the symbol is a parameter, the extraneous type information will already - # have been stripped off by add_entry() - short = symbol.rsplit('::', 1)[-1] - short = short.split('(', 1)[0] - short = short.rsplit('/', 1)[-1] - short = short.rsplit('.', 1)[-1] # Not QUITE sure about this, but let's see how it goes - return short - - -class ObfuscationMap(): - def __init__(self): - self.sections_by_name = {} - # A dict that maps obfuscated symbol to a user-friendly, short symbol - self.ob_to_syms = None - - def is_empty(self): - return len(self.sections_by_name) == 0 - - def load_from_file(self, filename): - """Additively loads entries from the given file. - Returns True on success.""" - if os.path.exists(filename): - with open(filename) as f: - self._load_from_text(f.read()) - return True - return False - - def load_from_git_rev(self, git_object): - """Additively load entries from the given git object (eg HEAD:Assets/obfuscation_map.txt)""" - proc = Popen(['git', 'cat-file', '-p', git_object], stdout=PIPE, stderr=PIPE) - stdout, stderr = proc.communicate() - if proc.returncode != 0: - print("WARN: Couldn't load deobfuscation from '%s'\n%s" % (git_object, stderr), file=sys.stderr) - return - n = self._load_from_text(stdout) - if n > 0 and os.isatty(sys.stdout.fileno()): - print("Added %d symbols from '%s'" % (n, git_object)) - - def _load_from_text(self, text): - """Returns number of symbols added to the map.""" - SEP_CHAR = '\u21e8' - num_added = 0 - if not isinstance(text, str): - text = text.decode('utf-8') - for line in text.split('\n'): - if not line: - pass - elif line.startswith('#'): - section = self._get_section(line[1:]) - else: - sym, ob = line.split(SEP_CHAR) - if section.add_entry(sym, ob): - num_added += 1 - # Reset cache - self.ob_to_syms = None - return num_added - - def _get_section(self, name): - try: - return self.sections_by_name[name] - except KeyError: - ret = self.sections_by_name[name] = ObfuscationSection(name) - return ret - - def _create_ob_to_syms(self): - # Create a simple aggregation of the lookup table - ob_to_syms = defaultdict(set) - for (_, section) in sorted(self.sections_by_name.items()): - for (ob, syms) in section.ob_to_syms.items(): - ob_to_syms[ob] |= syms - self.ob_to_syms = dict(ob_to_syms) - - def deobfuscate(self, text): - if self.ob_to_syms is None: - self._create_ob_to_syms() - - def lookup(match): - ob = match.group(0) - try: - syms = self.ob_to_syms[ob] - except KeyError: - if ob in COMMON_WORDS_11: - return ob - return '' % ob - else: - short_syms = {ObfuscationSection.shorten(s) for s in syms} - if len(short_syms) == 1: - return short_syms.pop() - return '< ' + ' or '.join(sorted(syms)) + ' >' - pat = re.compile(r'\b[a-z]{11}\b') - return pat.sub(lookup, text) + + def load_from_git_rev(self, git_object): + """Additively load entries from the given git object (eg HEAD:Assets/obfuscation_map.txt)""" + proc = Popen(["git", "cat-file", "-p", git_object], stdout=PIPE, stderr=PIPE) + stdout, stderr = proc.communicate() + if proc.returncode != 0: + print( + "WARN: Couldn't load deobfuscation from '%s'\n%s" + % (git_object, stderr), + file=sys.stderr, + ) + return + n = self._load_from_text(stdout) + if n > 0 and os.isatty(sys.stdout.fileno()): + print("Added %d symbols from '%s'" % (n, git_object)) + + def _load_from_text(self, text): + """Returns number of symbols added to the map.""" + SEP_CHAR = "\u21e8" + num_added = 0 + if not isinstance(text, str): + text = text.decode("utf-8") + for line in text.split("\n"): + if not line: + pass + elif line.startswith("#"): + section = self._get_section(line[1:]) + else: + sym, ob = line.split(SEP_CHAR) + if section.add_entry(sym, ob): + num_added += 1 + # Reset cache + self.ob_to_syms = None + return num_added + + def _get_section(self, name): + try: + return self.sections_by_name[name] + except KeyError: + ret = self.sections_by_name[name] = ObfuscationSection(name) + return ret + + def _create_ob_to_syms(self): + # Create a simple aggregation of the lookup table + ob_to_syms = defaultdict(set) + for (_, section) in sorted(self.sections_by_name.items()): + for (ob, syms) in section.ob_to_syms.items(): + ob_to_syms[ob] |= syms + self.ob_to_syms = dict(ob_to_syms) + + def deobfuscate(self, text): + if self.ob_to_syms is None: + self._create_ob_to_syms() + + def lookup(match): + ob = match.group(0) + try: + syms = self.ob_to_syms[ob] + except KeyError: + if ob in COMMON_WORDS_11: + return ob + return "" % ob + else: + short_syms = {ObfuscationSection.shorten(s) for s in syms} + if len(short_syms) == 1: + return short_syms.pop() + return "< " + " or ".join(sorted(syms)) + " >" + + pat = re.compile(r"\b[a-z]{11}\b") + return pat.sub(lookup, text) def get_client_root(): - proc = Popen(['git', 'rev-parse', '--show-toplevel'], stdout=PIPE, stderr=PIPE) - stdout, _ = proc.communicate() - assert proc.returncode == 0, "Couldn't determine git client root" - return stdout.strip() + proc = Popen(["git", "rev-parse", "--show-toplevel"], stdout=PIPE, stderr=PIPE) + stdout, _ = proc.communicate() + assert proc.returncode == 0, "Couldn't determine git client root" + return stdout.strip() def format_nicely(txt, verbose): - lines = txt.split('\n') - del txt - - frame_pat = re.compile(r'( (?P[A-Za-z0-9_:.+`<>\[\]]+) ?(?P\([^)]*\)))$') - - def remove_instruction_pointer(line): - ignore_pat = re.compile(r' \(at <[a-f0-9]+>:\d+\)$') - ignore_pat2 = re.compile(r' \[0x[0-9a-f]+\] in <[a-f0-9]+>:\d+ *$') - # Gets rid of non-useful stuff like "(at :0)" and - # "[0x00016] in :0" - line = ignore_pat.sub('', line) - line = ignore_pat2.sub('', line) - return line - lines = [remove_instruction_pointer(line) for line in lines] - - # Exception lines have a stack frame stuck onto them; move those frames to the next line. - # Also clean up tabs and other junk that comes in when you copy/paste from the analytics table. - def move_trailing_stack_frame(line): - exception_pat = re.compile(r'^\t?(\d+\.\t)?(?P[A-Za-z0-9]+Exception:)') - if line.startswith(' Rethrow as'): - # Insert a newline before the stack frame - line = line.replace(' Rethrow as', '\nRethrow as') - return frame_pat.sub(r'\n\1', line) - if exception_pat.match(line): - m = exception_pat.match(line) - line = line[m.start('exc'):] - # Insert a newline before the stack frame - return frame_pat.sub(r'\n\1', line) - return line - lines = [move_trailing_stack_frame(line) for line in lines] - - if not verbose: - def remove_arglist(line): - m = frame_pat.match(line) - if m is None: + lines = txt.split("\n") + del txt + + frame_pat = re.compile( + r"( (?P[A-Za-z0-9_:.+`<>\[\]]+) ?(?P\([^)]*\)))$" + ) + + def remove_instruction_pointer(line): + ignore_pat = re.compile(r" \(at <[a-f0-9]+>:\d+\)$") + ignore_pat2 = re.compile(r" \[0x[0-9a-f]+\] in <[a-f0-9]+>:\d+ *$") + # Gets rid of non-useful stuff like "(at :0)" and + # "[0x00016] in :0" + line = ignore_pat.sub("", line) + line = ignore_pat2.sub("", line) + return line + + lines = [remove_instruction_pointer(line) for line in lines] + + # Exception lines have a stack frame stuck onto them; move those frames to the next line. + # Also clean up tabs and other junk that comes in when you copy/paste from the analytics table. + def move_trailing_stack_frame(line): + exception_pat = re.compile(r"^\t?(\d+\.\t)?(?P[A-Za-z0-9]+Exception:)") + if line.startswith(" Rethrow as"): + # Insert a newline before the stack frame + line = line.replace(" Rethrow as", "\nRethrow as") + return frame_pat.sub(r"\n\1", line) + if exception_pat.match(line): + m = exception_pat.match(line) + line = line[m.start("exc") :] + # Insert a newline before the stack frame + return frame_pat.sub(r"\n\1", line) return line - name = m.group('name') - args = m.group('args') - if len(name) + len(args) < 70: - return ' %s%s' % (name, args) - return ' %s(...)' % (name,) - lines = [remove_arglist(line) for line in lines if line != " (wrapper remoting-invoke-with-check)"] - - def demangle_coroutine(line): - # TiltBrush.d__38:MoveNext() - # Google.Apis.Requests.ClientServiceRequest`1+d__30[TResponse].MoveNext () - def repl(m): - return '[co] %(class)s.%(coroutine)s' % m.groupdict() - ret, n = re.subn( - r'(?P[a-zA-Z0-9_.`]+)[+.]<(?P[^>]+)>d_+\d+(?:\[[^\]]+\])?[:.]MoveNext', repl, line) - if n == 0: - # TiltBrush.DriveAccess+<g__InitializeAsync|30_0>d.MoveNext () - ret, n = re.subn( - r'(?P[a-zA-Z0-9_.`]+)[+.]<(?P<[^>]+>[^>]+)>d_*\d*[:.]MoveNext', - repl, line) - return ret - lines = [demangle_coroutine(line) for line in lines] - - lines = elide_async_frames(lines) - - def demangle_lambda(line): - # TiltBrush.SketchControlsScript+<>c.b__307_0() - def repl(m): - return '%(prefix)s.%(owner)s.[lambda %(id)s]' % m.groupdict() - return re.sub(r'(?P[a-zA-Z0-9_.]+)\+<>c\.<(?P[a-zA-Z0-9_]+)>b__(?P[0-9_]+)', repl, line) - lines = [demangle_lambda(line) for line in lines] - - return '\n'.join(lines) + + lines = [move_trailing_stack_frame(line) for line in lines] + + if not verbose: + + def remove_arglist(line): + m = frame_pat.match(line) + if m is None: + return line + name = m.group("name") + args = m.group("args") + if len(name) + len(args) < 70: + return " %s%s" % (name, args) + return " %s(...)" % (name,) + + lines = [ + remove_arglist(line) + for line in lines + if line != " (wrapper remoting-invoke-with-check)" + ] + + def demangle_coroutine(line): + # TiltBrush.d__38:MoveNext() + # Google.Apis.Requests.ClientServiceRequest`1+d__30[TResponse].MoveNext () + def repl(m): + return "[co] %(class)s.%(coroutine)s" % m.groupdict() + + ret, n = re.subn( + r"(?P[a-zA-Z0-9_.`]+)[+.]<(?P[^>]+)>d_+\d+(?:\[[^\]]+\])?[:.]MoveNext", + repl, + line, + ) + if n == 0: + # TiltBrush.DriveAccess+<g__InitializeAsync|30_0>d.MoveNext () + ret, n = re.subn( + r"(?P[a-zA-Z0-9_.`]+)[+.]<(?P<[^>]+>[^>]+)>d_*\d*[:.]MoveNext", + repl, + line, + ) + return ret + + lines = [demangle_coroutine(line) for line in lines] + + lines = elide_async_frames(lines) + + def demangle_lambda(line): + # TiltBrush.SketchControlsScript+<>c.b__307_0() + def repl(m): + return "%(prefix)s.%(owner)s.[lambda %(id)s]" % m.groupdict() + + return re.sub( + r"(?P[a-zA-Z0-9_.]+)\+<>c\.<(?P[a-zA-Z0-9_]+)>b__(?P[0-9_]+)", + repl, + line, + ) + + lines = [demangle_lambda(line) for line in lines] + + return "\n".join(lines) def elide_async_frames(lines): - def list_to_pat(lst): - """Returns a pattern that matches any of the items in lst""" - return '(?:' + '|'.join([re.escape(i) for i in lst]) + ')' - - # Gets rid of uninteresting stack frames that have to do with the C# async machinery, - # to show more clearly the frames that are awaiting each other. - # Sometimes the stack dump has " at " in it, sometimes not. I think it has to do with - # whether you're running in editor or not? - task_execute_pat = re.compile(r'^(?: at )?' + list_to_pat([ - 'System.Threading.Tasks.Task`1[TResult].InnerInvoke', - 'System.Threading.Tasks.Task.Execute' - ])) - task_await_pat = re.compile(r'^(?: at )?' + list_to_pat([ - 'System.Runtime.CompilerServices.TaskAwaiter.GetResult', - 'System.Runtime.CompilerServices.TaskAwaiter`1[TResult].GetResult', - 'System.Threading.Tasks.Task.Wait', - ])) - task_throw_pat = re.compile(r'^(?: at )?' + list_to_pat([ - 'System.Threading.Tasks.Task.ThrowIfExceptional', - 'System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw', - 'System.Runtime.CompilerServices.TaskAwaiter.ThrowForNonSuccess', - 'System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification', - 'System.Runtime.CompilerServices.TaskAwaiter.ValidateEnd', - 'System.Runtime.CompilerServices.ConfiguredTaskAwaitable`1+ConfiguredTaskAwaiter[TResult].GetResult', - 'System.Runtime.CompilerServices.AsyncMethodBuilderCore+<>c.', - 'System.Runtime.CompilerServices.AsyncMethodBuilderCore.ThrowAsync', - # '--- End of stack trace from previous location where exception was thrown ---', - ])) - - def elide_frame(line): - if task_execute_pat.match(line): - return '' - if task_throw_pat.match(line): - return '' - if task_await_pat.match(line): - return '' - return line - - txt = '\n'.join([elide_frame(line) for line in lines]) - txt = re.sub(r'(\n)*--- End of stack trace from previous location where exception was thrown ---\n(\n)* ?', ' [await]', txt) - return txt.split('\n') + def list_to_pat(lst): + """Returns a pattern that matches any of the items in lst""" + return "(?:" + "|".join([re.escape(i) for i in lst]) + ")" + + # Gets rid of uninteresting stack frames that have to do with the C# async machinery, + # to show more clearly the frames that are awaiting each other. + # Sometimes the stack dump has " at " in it, sometimes not. I think it has to do with + # whether you're running in editor or not? + task_execute_pat = re.compile( + r"^(?: at )?" + + list_to_pat( + [ + "System.Threading.Tasks.Task`1[TResult].InnerInvoke", + "System.Threading.Tasks.Task.Execute", + ] + ) + ) + task_await_pat = re.compile( + r"^(?: at )?" + + list_to_pat( + [ + "System.Runtime.CompilerServices.TaskAwaiter.GetResult", + "System.Runtime.CompilerServices.TaskAwaiter`1[TResult].GetResult", + "System.Threading.Tasks.Task.Wait", + ] + ) + ) + task_throw_pat = re.compile( + r"^(?: at )?" + + list_to_pat( + [ + "System.Threading.Tasks.Task.ThrowIfExceptional", + "System.Runtime.ExceptionServices.ExceptionDispatchInfo.Throw", + "System.Runtime.CompilerServices.TaskAwaiter.ThrowForNonSuccess", + "System.Runtime.CompilerServices.TaskAwaiter.HandleNonSuccessAndDebuggerNotification", + "System.Runtime.CompilerServices.TaskAwaiter.ValidateEnd", + "System.Runtime.CompilerServices.ConfiguredTaskAwaitable`1+ConfiguredTaskAwaiter[TResult].GetResult", + "System.Runtime.CompilerServices.AsyncMethodBuilderCore+<>c.", + "System.Runtime.CompilerServices.AsyncMethodBuilderCore.ThrowAsync", + # '--- End of stack trace from previous location where exception was thrown ---', + ] + ) + ) + + def elide_frame(line): + if task_execute_pat.match(line): + return "" + if task_throw_pat.match(line): + return "" + if task_await_pat.match(line): + return "" + return line + + txt = "\n".join([elide_frame(line) for line in lines]) + txt = re.sub( + r"(\n)*--- End of stack trace from previous location where exception was thrown ---\n(\n)* ?", + " [await]", + txt, + ) + return txt.split("\n") def main(): - parser = argparse.ArgumentParser() - parser.add_argument('-r', dest='releases', action='append', default=[], - help="Add symbols from specified release branch (eg 1.4, 5). (Shortcut for Tilt Brush release-N naming format.)") - parser.add_argument('-m', '--map_file', dest='map_file', action='store', - default='Support/obfuscation_map.txt', - help='Path of obfuscation map relative to client root') - parser.add_argument('-b', '--branch', dest='branches', action='append', - default=[], help='Add symbols from specified release branch') - parser.add_argument('-v', '--verbose', action='store_true', - help='Do not elide any information') - args = parser.parse_args() - - os.chdir(os.path.dirname(os.path.realpath(__file__))) - map_file = os.path.join(get_client_root(), args.map_file) - omap = ObfuscationMap() - omap.load_from_file(map_file) - # Assumes that the remote is called "origin", but that's typically the case - args.releases = ['origin/release/' + s for s in args.releases] - for branch in itertools.chain(args.releases, args.branches): - omap.load_from_git_rev('%s:%s' % (branch, args.map_file)) - sys.stdout.flush() - - if omap.is_empty(): - parser.error("No symbols loaded. Do you need to pass '--release' or '--branch'?") - - if os.isatty(sys.stdout.fileno()): - print('Paste text and hit Control-Z or Control-D') - txt = sys.stdin.read().decode('ascii', 'ignore') - txt = omap.deobfuscate(txt) - txt = format_nicely(txt, args.verbose) - print(txt) - - -if __name__ == '__main__': - main() + parser = argparse.ArgumentParser() + parser.add_argument( + "-r", + dest="releases", + action="append", + default=[], + help="Add symbols from specified release branch (eg 1.4, 5). (Shortcut for Tilt Brush release-N naming format.)", + ) + parser.add_argument( + "-m", + "--map_file", + dest="map_file", + action="store", + default="Support/obfuscation_map.txt", + help="Path of obfuscation map relative to client root", + ) + parser.add_argument( + "-b", + "--branch", + dest="branches", + action="append", + default=[], + help="Add symbols from specified release branch", + ) + parser.add_argument( + "-v", "--verbose", action="store_true", help="Do not elide any information" + ) + args = parser.parse_args() + + os.chdir(os.path.dirname(os.path.realpath(__file__))) + map_file = os.path.join(get_client_root(), args.map_file) + omap = ObfuscationMap() + omap.load_from_file(map_file) + # Assumes that the remote is called "origin", but that's typically the case + args.releases = ["origin/release/" + s for s in args.releases] + for branch in itertools.chain(args.releases, args.branches): + omap.load_from_git_rev("%s:%s" % (branch, args.map_file)) + sys.stdout.flush() + + if omap.is_empty(): + parser.error( + "No symbols loaded. Do you need to pass '--release' or '--branch'?" + ) + + if os.isatty(sys.stdout.fileno()): + print("Paste text and hit Control-Z or Control-D") + txt = sys.stdin.read().decode("ascii", "ignore") + txt = omap.deobfuscate(txt) + txt = format_nicely(txt, args.verbose) + print(txt) + + +if __name__ == "__main__": + main() diff --git a/Support/bin/find_unused.py b/Support/bin/find_unused.py index 815fbe50fc..34ecf2e20e 100755 --- a/Support/bin/find_unused.py +++ b/Support/bin/find_unused.py @@ -21,61 +21,65 @@ # Add ../Python to sys.path sys.path.append( - os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'Python')) + os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "Python") +) def gen_used_assets(build_dir): - log = os.path.join(build_dir, 'build_log.txt') - with open(log) as inf: - data = inf.read() - asset_pat = re.compile(r'% (.*)') - m = re.search(r'^Used Assets([\w ]+), sorted by[^\n]+\n(?P.*?)^DisplayProgressNotification', data, - re.MULTILINE | re.DOTALL) - for match in asset_pat.finditer(m.group('assets')): - yield match.group(1) + log = os.path.join(build_dir, "build_log.txt") + with open(log) as inf: + data = inf.read() + asset_pat = re.compile(r"% (.*)") + m = re.search( + r"^Used Assets([\w ]+), sorted by[^\n]+\n(?P.*?)^DisplayProgressNotification", + data, + re.MULTILINE | re.DOTALL, + ) + for match in asset_pat.finditer(m.group("assets")): + yield match.group(1) def gen_existing_assets(project_dir): - for r, ds, fs in os.walk(os.path.join(project_dir, 'Assets')): - rr = os.path.relpath(r, start=project_dir).replace('\\', '/') + '/' - ds[:] = [d for d in ds if d != 'Editor'] - for f in fs: - if f.endswith('.meta'): - continue - yield rr + f + for r, ds, fs in os.walk(os.path.join(project_dir, "Assets")): + rr = os.path.relpath(r, start=project_dir).replace("\\", "/") + "/" + ds[:] = [d for d in ds if d != "Editor"] + for f in fs: + if f.endswith(".meta"): + continue + yield rr + f def get_filesize(filename): - try: - return os.stat(filename).st_size - except IOError: - return -1 + try: + return os.stat(filename).st_size + except IOError: + return -1 def main(): - os.chdir(find_project_dir()) - used = set(gen_used_assets(r'../Builds/Windows_SteamVR_Release/')) - exist = set(gen_existing_assets('.')) - if len(used) == 0: - print('WARN: no used assets; did Unity change their build.log format again?') - return - - missing = used - exist - extra = exist - used - for m in sorted(missing): - print('miss', m) - print('---') - extra_with_size = [(get_filesize(x), x) for x in extra] - extra_with_size.sort(key=lambda x: -x[0]) - for size, filename in extra_with_size: - # TODO -- this isn't a valid variable! - if '/Resources/' in x: # noqa: F821 pylint: disable=undefined-variable - continue - print('xtra %8d %s' % (size, filename)) - - both = {str.lower(m) for m in missing} & {str.lower(e) for e in extra} - assert len(both) == 0 + os.chdir(find_project_dir()) + used = set(gen_used_assets(r"../Builds/Windows_SteamVR_Release/")) + exist = set(gen_existing_assets(".")) + if len(used) == 0: + print("WARN: no used assets; did Unity change their build.log format again?") + return + + missing = used - exist + extra = exist - used + for m in sorted(missing): + print("miss", m) + print("---") + extra_with_size = [(get_filesize(x), x) for x in extra] + extra_with_size.sort(key=lambda x: -x[0]) + for size, filename in extra_with_size: + # TODO -- this isn't a valid variable! + if "/Resources/" in x: # noqa: F821 pylint: disable=undefined-variable + continue + print("xtra %8d %s" % (size, filename)) + + both = {str.lower(m) for m in missing} & {str.lower(e) for e in extra} + assert len(both) == 0 main() diff --git a/Support/bin/gltfViewer/serve.py b/Support/bin/gltfViewer/serve.py index e0ae62729e..17af221f7e 100644 --- a/Support/bin/gltfViewer/serve.py +++ b/Support/bin/gltfViewer/serve.py @@ -25,19 +25,20 @@ port = 8000 if len(sys.argv) > 1: - port = int(sys.argv[1]) + port = int(sys.argv[1]) # If there's another server running at the chosen port, try to kill it. If that # fails (e.g. when running on Windows), we forge ahead. TODO: Implement the # equivalent behavior outside of Linux. try: - devnull = open(os.devnull, "w") - subprocess.check_call(["fuser", "-k", "%s/tcp" % port], - stdout=devnull, stderr=subprocess.STDOUT) - # Give the process a moment to die. - time.sleep(1) + devnull = open(os.devnull, "w") + subprocess.check_call( + ["fuser", "-k", "%s/tcp" % port], stdout=devnull, stderr=subprocess.STDOUT + ) + # Give the process a moment to die. + time.sleep(1) except (subprocess.CalledProcessError, OSError): - pass + pass # Prevents "Address already in use" error when socket lingers in TIME_WAIT, # even after the corresponding process has been killed. diff --git a/Support/bin/gltf_export_shaders.py b/Support/bin/gltf_export_shaders.py index 10ec2b18c5..1ab0ae591f 100644 --- a/Support/bin/gltf_export_shaders.py +++ b/Support/bin/gltf_export_shaders.py @@ -40,7 +40,7 @@ # Fill this out to help copy shaders from previous versions of brushes UPDATED_GUIDS_BY_NAME = { - # 'OilPaint': ('c515dad7-4393-4681-81ad-162ef052241b', 'f72ec0e7-a844-4e38-82e3-140c44772699'), + # 'OilPaint': ('c515dad7-4393-4681-81ad-162ef052241b', 'f72ec0e7-a844-4e38-82e3-140c44772699'), } # --------------------------------------------------------------------------- @@ -49,325 +49,357 @@ def destroy(file_or_dir): - """Ensure that *file_or_dir* does not exist in the filesystem, - deleting it if necessary.""" - if os.path.isfile(file_or_dir): - os.chmod(file_or_dir, stat.S_IWRITE) - os.unlink(file_or_dir) - elif os.path.isdir(file_or_dir): - for r, ds, fs in os.walk(file_or_dir, topdown=False): - for f in fs: - os.chmod(os.path.join(r, f), stat.S_IWRITE) - os.unlink(os.path.join(r, f)) - for d in ds: - os.rmdir(os.path.join(r, d)) - os.rmdir(file_or_dir) - if os.path.exists(file_or_dir): - raise Exception("Temp build location '%s' is not empty" % file_or_dir) + """Ensure that *file_or_dir* does not exist in the filesystem, + deleting it if necessary.""" + if os.path.isfile(file_or_dir): + os.chmod(file_or_dir, stat.S_IWRITE) + os.unlink(file_or_dir) + elif os.path.isdir(file_or_dir): + for r, ds, fs in os.walk(file_or_dir, topdown=False): + for f in fs: + os.chmod(os.path.join(r, f), stat.S_IWRITE) + os.unlink(os.path.join(r, f)) + for d in ds: + os.rmdir(os.path.join(r, d)) + os.rmdir(file_or_dir) + if os.path.exists(file_or_dir): + raise Exception("Temp build location '%s' is not empty" % file_or_dir) class PreprocessException(Exception): - """Exception raised by preprocess_lite() and preprocess()""" + """Exception raised by preprocess_lite() and preprocess()""" def preprocess_lite(input_file, defines, include_dirs): - """Returns contents of input_file with #includes expanded. -defines is a dict of #defines. -include_dirs is a list of directories. -Raises PreprocessException on error.""" - include_pat = re.compile(r'^[ \t]*#[ \t]*include[ \t]+([<"])(.*)[">].*$\n?', re.MULTILINE) - - def expand_include(include, current_file, is_quote): - """Given the body of an #include, returns replacement text.""" - # https://gcc.gnu.org/onlinedocs/cpp/Include-Syntax.html - if is_quote: - search_path = [os.path.dirname(current_file)] + include_dirs - else: - search_path = include_dirs - - for include_dir in search_path: - candidate = os.path.join(include_dir, include) - if os.path.exists(candidate): - with open(candidate, 'r') as inf: - candidate_text = inf.read() - if not candidate_text.endswith('\n'): - candidate_text += '\n' - # uncomment for debugging - # candidate_text = '// %s\n%s' % (candidate, candidate_text) - - def expand_include_match(match): - char, body = match.groups() - # This has a pylint warning which I don't know how to fix, but I suspect that candidate isn't updated - return expand_include(body, candidate, char == '"') # pylint: disable=cell-var-from-loop - return include_pat.sub(expand_include_match, candidate_text) - raise PreprocessException("%s : fatal error: Cannot open include file: '%s'" % ( - current_file, include)) - - contents = expand_include(input_file, input_file, True) - # inject defines - defines = ["#define %s %s\n" % (k, v) - for (k, v) in sorted(defines.items()) - if k in contents - ] - return ''.join(defines) + contents + """Returns contents of input_file with #includes expanded. + defines is a dict of #defines. + include_dirs is a list of directories. + Raises PreprocessException on error.""" + include_pat = re.compile( + r'^[ \t]*#[ \t]*include[ \t]+([<"])(.*)[">].*$\n?', re.MULTILINE + ) + + def expand_include(include, current_file, is_quote): + """Given the body of an #include, returns replacement text.""" + # https://gcc.gnu.org/onlinedocs/cpp/Include-Syntax.html + if is_quote: + search_path = [os.path.dirname(current_file)] + include_dirs + else: + search_path = include_dirs + + for include_dir in search_path: + candidate = os.path.join(include_dir, include) + if os.path.exists(candidate): + with open(candidate, "r") as inf: + candidate_text = inf.read() + if not candidate_text.endswith("\n"): + candidate_text += "\n" + # uncomment for debugging + # candidate_text = '// %s\n%s' % (candidate, candidate_text) + + def expand_include_match(match): + char, body = match.groups() + # This has a pylint warning which I don't know how to fix, but I suspect that candidate isn't updated + return expand_include( + body, + candidate, # pylint: disable=cell-var-from-loop + char == '"', + ) + + return include_pat.sub(expand_include_match, candidate_text) + raise PreprocessException( + "%s : fatal error: Cannot open include file: '%s'" + % (current_file, include) + ) + + contents = expand_include(input_file, input_file, True) + # inject defines + defines = [ + "#define %s %s\n" % (k, v) + for (k, v) in sorted(defines.items()) + if k in contents + ] + return "".join(defines) + contents # Currently unused def preprocess(input_file, defines, include_dirs): - """Returns C preprocessed contents of input_file. -defines is a dict of #defines. -include_dirs is a list of directories. -Raises PreprocessException on error.""" - assert not isinstance(include_dirs, str) - if platform.system() == 'Windows': - stdout = preprocess_msvc(input_file, defines, include_dirs) - else: - assert False, "Platform %s not (yet?) supported" % platform.system() - return stdout - - -def preprocess_msvc(input_file, defines, include_dirs): # pylint: disable=too-many-locals - def find_cpp_exe(): - for release in ('12.0', '13.0', '14.0'): - exe = r'C:\Program Files (x86)\Microsoft Visual Studio %s\VC\bin\cl.exe' % release - if os.path.exists(exe): - return exe - raise LookupError("Cannot find %s: Install MSVC?" % exe) - - with_line_directives = False - with_comments = True - - # See https://msdn.microsoft.com/en-us/library/19z1t1wy.aspx for - # docs on command-line args - cmd = [find_cpp_exe(), '/nologo'] - cmd.append('/X') # Ignore standard include paths - cmd.append('/we4668') # Enable C4668, "'X' is not defined" warning - for key, val in defines.items(): - assert re.match(r'^[A-Z0-9_]+$', key) - cmd.append('/D%s=%s' % (key, val)) - for directory_name in include_dirs: - directory_name = directory_name.replace('/', '\\') - assert os.path.exists(directory_name) - cmd.append('/I') - cmd.append(directory_name) - if with_comments: - cmd.append('/C') - cmd.append(('/E' if with_line_directives else '/EP')) - cmd.append(input_file) - - proc = Popen(cmd, stdout=PIPE, stderr=PIPE) - stdout, stderr = proc.communicate() - if proc.returncode != 0: - raise PreprocessException("%s returned result code %d: %s" % ( - cmd, proc.returncode, stderr)) - return stdout.replace('\r\n', '\n') + """Returns C preprocessed contents of input_file. + defines is a dict of #defines. + include_dirs is a list of directories. + Raises PreprocessException on error.""" + assert not isinstance(include_dirs, str) + if platform.system() == "Windows": + stdout = preprocess_msvc(input_file, defines, include_dirs) + else: + assert False, "Platform %s not (yet?) supported" % platform.system() + return stdout + + +def preprocess_msvc( + input_file, defines, include_dirs +): # pylint: disable=too-many-locals + def find_cpp_exe(): + for release in ("12.0", "13.0", "14.0"): + exe = ( + r"C:\Program Files (x86)\Microsoft Visual Studio %s\VC\bin\cl.exe" + % release + ) + if os.path.exists(exe): + return exe + raise LookupError("Cannot find %s: Install MSVC?" % exe) + + with_line_directives = False + with_comments = True + + # See https://msdn.microsoft.com/en-us/library/19z1t1wy.aspx for + # docs on command-line args + cmd = [find_cpp_exe(), "/nologo"] + cmd.append("/X") # Ignore standard include paths + cmd.append("/we4668") # Enable C4668, "'X' is not defined" warning + for key, val in defines.items(): + assert re.match(r"^[A-Z0-9_]+$", key) + cmd.append("/D%s=%s" % (key, val)) + for directory_name in include_dirs: + directory_name = directory_name.replace("/", "\\") + assert os.path.exists(directory_name) + cmd.append("/I") + cmd.append(directory_name) + if with_comments: + cmd.append("/C") + cmd.append(("/E" if with_line_directives else "/EP")) + cmd.append(input_file) + + proc = Popen(cmd, stdout=PIPE, stderr=PIPE) + stdout, stderr = proc.communicate() + if proc.returncode != 0: + raise PreprocessException( + "%s returned result code %d: %s" % (cmd, proc.returncode, stderr) + ) + return stdout.replace("\r\n", "\n") # --------------------------------------------------------------------------- # Generation # --------------------------------------------------------------------------- def get_defines(brush): - """Returns a dict of cpp #defines for the specified brush.""" - float_params = brush['floatParams'] - defines = {} - - try: - defines['TB_EMISSION_GAIN'] = str(float_params['EmissionGain']) - except KeyError: - pass - - try: - defines['TB_ALPHA_CUTOFF'] = str(float_params['Cutoff']) - defines['TB_HAS_ALPHA_CUTOFF'] = '1' if float_params['Cutoff'] < 1 else '0' - except KeyError: - defines['TB_HAS_ALPHA_CUTOFF'] = '0' - return defines - - -class Generator(): - """Instantiate this class to run generate().""" - def __init__(self, input_dir, include_dirs, brush_manifest_file): - self.input_dir = input_dir - self.include_dirs = include_dirs - self.template_dir = include_dirs[0] - assert os.path.exists(os.path.join(self.template_dir, "VertDefault.glsl")) - self.output_shaders = set() - self.brush_manifest_file = brush_manifest_file - with open(self.brush_manifest_file) as inf: - self.brush_manifest = json.load(inf) - - def get_handcrafted_shader(self, shader_name): - """shader_name is the name of the destination file. - Returns path to the handcrafted shader, which may not exist.""" - assert shader_name.endswith('.glsl') - full_name = os.path.join(self.input_dir, os.path.basename(shader_name)) - return full_name - - @staticmethod - def get_frag_template(brush): - """Given a brush, returns the path to a fragment shader template.""" - # Figure out the template -- should probably be replaced with explicit #includes - if int(brush["blendMode"]) == 2: - # Additive blending. - return "FragAdditive.glsl" - if "OutlineMax" in brush['floatParams']: - # For now, this is the best available mapping. - return "FragDiffuse.glsl" - if "Color" not in brush['colorParams']: - # The absence of a Color field here indicates this should be an unlit - # shader. Maybe there's a better test? - return "FragUnlit.glsl" - if "Shininess" not in brush['floatParams']: - return "FragDiffuse.glsl" - # Unity Standard Diffuse + Specular. - return "FragStandard.glsl" - - def generate(self, out_root): - """Generate output for all brushes in the manifest.""" - brushes = self.brush_manifest["brushes"] - for _, brush in brushes.items(): - self.generate_brush(brush, out_root) - - def copy_from_prev_brush(self, brush, out_dir): - """Copies vert and frag shaders from brush's predecessor, if possible.""" + """Returns a dict of cpp #defines for the specified brush.""" + float_params = brush["floatParams"] + defines = {} + try: - old_guid, new_guid = UPDATED_GUIDS_BY_NAME[brush["name"]] + defines["TB_EMISSION_GAIN"] = str(float_params["EmissionGain"]) except KeyError: - return - if brush['guid'] == old_guid: - return - old_brush = self.brush_manifest['brushes'][old_guid] - new_brush = self.brush_manifest['brushes'][new_guid] - - def maybe_copy(shader_type): - old_hc = self.get_handcrafted_shader(os.path.join(out_dir, old_brush[shader_type])) - new_hc = self.get_handcrafted_shader(os.path.join(out_dir, new_brush[shader_type])) - if os.path.exists(old_hc) and not os.path.exists(new_hc): - with open(old_hc) as f: - txt = f.read() - txt = "// Auto-copied from %s\n%s" % (os.path.basename(old_hc), txt) - with open(new_hc, 'w') as f: - f.write(txt) - print('copy %s -> %s' % (os.path.basename(old_hc), os.path.basename(new_hc))) - - maybe_copy('vertexShader') - maybe_copy('fragmentShader') - - def generate_brush(self, brush, out_root): - """Generate output for a single brush in the manifest. - Pass the manifest entry.""" - # name = brush["name"] - # version = brush["shaderVersion"] - # guid = brush["guid"] - out_dir = os.path.join(out_root, brush['folderName']) - - # float_params = brush["floatParams"] - # color_params = brush["colorParams"] - - defines = get_defines(brush) - - # Vertex shader - - vert_output = os.path.join(out_dir, brush['vertexShader']) - vert_input = self.get_handcrafted_shader(vert_output) - if not os.path.exists(vert_input): - self.copy_from_prev_brush(brush, out_dir) - if not os.path.exists(vert_input): - print("Auto-creating %s" % os.path.basename(vert_input)) - with open(vert_input, 'w') as f: - f.write('#include "VertDefault.glsl"\n') - self.preprocess(vert_input, vert_output, defines, self.include_dirs) - - # Fragment shader - - frag_output = os.path.join(out_dir, brush['fragmentShader']) - frag_input = self.get_handcrafted_shader(frag_output) - if not os.path.exists(frag_input): - print("Auto-creating %s" % os.path.basename(frag_input)) - with open(frag_input, 'w') as f: - f.write('#include "%s"\n' % self.get_frag_template(brush)) - self.preprocess(frag_input, frag_output, defines, self.include_dirs) - - @staticmethod - def preprocess(input_file, output_file, defines, include_dirs): - """Wrapper around global preprocess that does some massaging of - the input and output.""" - output_data = preprocess_lite(input_file, defines, include_dirs) + pass + try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as outf: - outf.write(output_data) + defines["TB_ALPHA_CUTOFF"] = str(float_params["Cutoff"]) + defines["TB_HAS_ALPHA_CUTOFF"] = "1" if float_params["Cutoff"] < 1 else "0" + except KeyError: + defines["TB_HAS_ALPHA_CUTOFF"] = "0" + return defines + + +class Generator: + """Instantiate this class to run generate().""" + + def __init__(self, input_dir, include_dirs, brush_manifest_file): + self.input_dir = input_dir + self.include_dirs = include_dirs + self.template_dir = include_dirs[0] + assert os.path.exists(os.path.join(self.template_dir, "VertDefault.glsl")) + self.output_shaders = set() + self.brush_manifest_file = brush_manifest_file + with open(self.brush_manifest_file) as inf: + self.brush_manifest = json.load(inf) + + def get_handcrafted_shader(self, shader_name): + """shader_name is the name of the destination file. + Returns path to the handcrafted shader, which may not exist.""" + assert shader_name.endswith(".glsl") + full_name = os.path.join(self.input_dir, os.path.basename(shader_name)) + return full_name + + @staticmethod + def get_frag_template(brush): + """Given a brush, returns the path to a fragment shader template.""" + # Figure out the template -- should probably be replaced with explicit #includes + if int(brush["blendMode"]) == 2: + # Additive blending. + return "FragAdditive.glsl" + if "OutlineMax" in brush["floatParams"]: + # For now, this is the best available mapping. + return "FragDiffuse.glsl" + if "Color" not in brush["colorParams"]: + # The absence of a Color field here indicates this should be an unlit + # shader. Maybe there's a better test? + return "FragUnlit.glsl" + if "Shininess" not in brush["floatParams"]: + return "FragDiffuse.glsl" + # Unity Standard Diffuse + Specular. + return "FragStandard.glsl" + + def generate(self, out_root): + """Generate output for all brushes in the manifest.""" + brushes = self.brush_manifest["brushes"] + for _, brush in brushes.items(): + self.generate_brush(brush, out_root) + + def copy_from_prev_brush(self, brush, out_dir): + """Copies vert and frag shaders from brush's predecessor, if possible.""" + try: + old_guid, new_guid = UPDATED_GUIDS_BY_NAME[brush["name"]] + except KeyError: + return + if brush["guid"] == old_guid: + return + old_brush = self.brush_manifest["brushes"][old_guid] + new_brush = self.brush_manifest["brushes"][new_guid] + + def maybe_copy(shader_type): + old_hc = self.get_handcrafted_shader( + os.path.join(out_dir, old_brush[shader_type]) + ) + new_hc = self.get_handcrafted_shader( + os.path.join(out_dir, new_brush[shader_type]) + ) + if os.path.exists(old_hc) and not os.path.exists(new_hc): + with open(old_hc) as f: + txt = f.read() + txt = "// Auto-copied from %s\n%s" % (os.path.basename(old_hc), txt) + with open(new_hc, "w") as f: + f.write(txt) + print( + "copy %s -> %s" + % (os.path.basename(old_hc), os.path.basename(new_hc)) + ) + + maybe_copy("vertexShader") + maybe_copy("fragmentShader") + + def generate_brush(self, brush, out_root): + """Generate output for a single brush in the manifest. + Pass the manifest entry.""" + # name = brush["name"] + # version = brush["shaderVersion"] + # guid = brush["guid"] + out_dir = os.path.join(out_root, brush["folderName"]) + + # float_params = brush["floatParams"] + # color_params = brush["colorParams"] + + defines = get_defines(brush) + + # Vertex shader + + vert_output = os.path.join(out_dir, brush["vertexShader"]) + vert_input = self.get_handcrafted_shader(vert_output) + if not os.path.exists(vert_input): + self.copy_from_prev_brush(brush, out_dir) + if not os.path.exists(vert_input): + print("Auto-creating %s" % os.path.basename(vert_input)) + with open(vert_input, "w") as f: + f.write('#include "VertDefault.glsl"\n') + self.preprocess(vert_input, vert_output, defines, self.include_dirs) + + # Fragment shader + + frag_output = os.path.join(out_dir, brush["fragmentShader"]) + frag_input = self.get_handcrafted_shader(frag_output) + if not os.path.exists(frag_input): + print("Auto-creating %s" % os.path.basename(frag_input)) + with open(frag_input, "w") as f: + f.write('#include "%s"\n' % self.get_frag_template(brush)) + self.preprocess(frag_input, frag_output, defines, self.include_dirs) + + @staticmethod + def preprocess(input_file, output_file, defines, include_dirs): + """Wrapper around global preprocess that does some massaging of + the input and output.""" + output_data = preprocess_lite(input_file, defines, include_dirs) + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, "w") as outf: + outf.write(output_data) def finalize_dir(tmp_dir, out_dir): - """Move files from tmp_dir to out_dir. - Print output for changed, new, or removed files. - Avoids touching timestamp if file not changed.""" - # Could handle this case, but it's unexpepcted - assert not os.path.isfile(out_dir), "Unexpected: %s is a file" % out_dir - try: - os.makedirs(out_dir) - except OSError: - pass - - tmp_files = set(os.listdir(tmp_dir)) - out_files = set(os.listdir(out_dir)) - - for filename in tmp_files: - tmp_file = os.path.join(tmp_dir, filename) - out_file = os.path.join(out_dir, filename) - if os.path.isdir(tmp_file): - finalize_dir(tmp_file, out_file) - elif os.path.isdir(out_file): - assert False, "Unexpected: %s is a dir" % out_file - elif filename not in out_files: - shutil.copyfile(tmp_file, out_file) - print('+', out_file) - else: - with open(tmp_file, 'rb') as tmp: - with open(out_file, "rb") as out: - if tmp.read() != out.read(): + """Move files from tmp_dir to out_dir. + Print output for changed, new, or removed files. + Avoids touching timestamp if file not changed.""" + # Could handle this case, but it's unexpepcted + assert not os.path.isfile(out_dir), "Unexpected: %s is a file" % out_dir + try: + os.makedirs(out_dir) + except OSError: + pass + + tmp_files = set(os.listdir(tmp_dir)) + out_files = set(os.listdir(out_dir)) + + for filename in tmp_files: + tmp_file = os.path.join(tmp_dir, filename) + out_file = os.path.join(out_dir, filename) + if os.path.isdir(tmp_file): + finalize_dir(tmp_file, out_file) + elif os.path.isdir(out_file): + assert False, "Unexpected: %s is a dir" % out_file + elif filename not in out_files: shutil.copyfile(tmp_file, out_file) - print('~', out_file) - - # Cannot remove unwanted files (yet); output directory contains input files also - if False: # pylint: disable=using-constant-test - for filename in out_files - tmp_files: - out_file = os.path.join(out_dir, filename) - if not os.path.isfile(out_file): - continue - print('-', out_file) - destroy(out_file) + print("+", out_file) + else: + with open(tmp_file, "rb") as tmp: + with open(out_file, "rb") as out: + if tmp.read() != out.read(): + shutil.copyfile(tmp_file, out_file) + print("~", out_file) + + # Cannot remove unwanted files (yet); output directory contains input files also + if False: # pylint: disable=using-constant-test + for filename in out_files - tmp_files: + out_file = os.path.join(out_dir, filename) + if not os.path.isfile(out_file): + continue + print("-", out_file) + destroy(out_file) def main(): - parser = argparse.ArgumentParser() - parser.add_argument('brush_manifest', nargs='?', default=None, - help='Path to exportManifest.json (optional)') - parser.add_argument('export_root', nargs='?', default=None, - help='Output root directory (optional)') - args = parser.parse_args() - - project_root = os.path.normpath( - os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..')) - if args.brush_manifest is None: - args.brush_manifest = os.path.join(project_root, 'Support/exportManifest.json') - if args.export_root is None: - args.export_root = os.path.join(project_root, 'Support/TiltBrush.com/shaders/brushes') - - tmp_dir = os.path.join(project_root, 'Temp/tmp_gltf') - input_dir = os.path.join(project_root, 'Support/GlTFShaders/Generators') - include_dirs = [os.path.join(project_root, 'Support/GlTFShaders/include')] - - gen = Generator(input_dir, include_dirs, args.brush_manifest) - destroy(tmp_dir) - gen.generate(tmp_dir) - print("Writing to %s" % os.path.normpath(args.export_root)) - finalize_dir(tmp_dir, args.export_root) - destroy(tmp_dir) - - -if __name__ == '__main__': - main() + parser = argparse.ArgumentParser() + parser.add_argument( + "brush_manifest", + nargs="?", + default=None, + help="Path to exportManifest.json (optional)", + ) + parser.add_argument( + "export_root", nargs="?", default=None, help="Output root directory (optional)" + ) + args = parser.parse_args() + + project_root = os.path.normpath( + os.path.join(os.path.dirname(os.path.abspath(__file__)), "../..") + ) + if args.brush_manifest is None: + args.brush_manifest = os.path.join(project_root, "Support/exportManifest.json") + if args.export_root is None: + args.export_root = os.path.join( + project_root, "Support/TiltBrush.com/shaders/brushes" + ) + + tmp_dir = os.path.join(project_root, "Temp/tmp_gltf") + input_dir = os.path.join(project_root, "Support/GlTFShaders/Generators") + include_dirs = [os.path.join(project_root, "Support/GlTFShaders/include")] + + gen = Generator(input_dir, include_dirs, args.brush_manifest) + destroy(tmp_dir) + gen.generate(tmp_dir) + print("Writing to %s" % os.path.normpath(args.export_root)) + finalize_dir(tmp_dir, args.export_root) + destroy(tmp_dir) + + +if __name__ == "__main__": + main() diff --git a/Support/bin/gltf_export_textures.py b/Support/bin/gltf_export_textures.py index 8effc135f2..e96e3e3afd 100644 --- a/Support/bin/gltf_export_textures.py +++ b/Support/bin/gltf_export_textures.py @@ -28,14 +28,14 @@ import sys try: - import PIL - import PIL.Image - import PIL.ImageFilter + import PIL + import PIL.Image + import PIL.ImageFilter except ImportError as e: - print(e) - print("You need to 'pip install pillow' to run this script") - sys.exit(1) + print(e) + print("You need to 'pip install pillow' to run this script") + sys.exit(1) MEG = 1024.0 * 1024.0 @@ -43,61 +43,68 @@ def process_request(request): - """Process a single downsample-and-copy request""" - im = PIL.Image.open(request['source']) - if 'P' in im.mode: - assert False, "Unexpected: png with indexed color" - # Un-palettize - im = im.convert() + """Process a single downsample-and-copy request""" + im = PIL.Image.open(request["source"]) + if "P" in im.mode: + assert False, "Unexpected: png with indexed color" + # Un-palettize + im = im.convert() - # Don't upsample! Only downsample - desired_width = int(request['desiredWidth']) - assert desired_width == request['desiredWidth'] - desired_height = int(request['desiredHeight']) - assert desired_height == request['desiredHeight'] + # Don't upsample! Only downsample + desired_width = int(request["desiredWidth"]) + assert desired_width == request["desiredWidth"] + desired_height = int(request["desiredHeight"]) + assert desired_height == request["desiredHeight"] - assert im.width >= desired_width - assert im.height >= desired_height + assert im.width >= desired_width + assert im.height >= desired_height - bpp = {'RGBA': 4, 'RGB': 3}[im.mode] - request['input_bytes'] = im.width * im.height * bpp - request['output_bytes'] = desired_width * desired_height * bpp + bpp = {"RGBA": 4, "RGB": 3}[im.mode] + request["input_bytes"] = im.width * im.height * bpp + request["output_bytes"] = desired_width * desired_height * bpp - if request['isBump']: - im = im.filter(PIL.ImageFilter.GaussianBlur(radius=BLUR_RADIUS_TEXELS)) + if request["isBump"]: + im = im.filter(PIL.ImageFilter.GaussianBlur(radius=BLUR_RADIUS_TEXELS)) - desired_size = (desired_width, desired_height) - if im.size != desired_size: - im = im.resize(desired_size, resample=PIL.Image.BILINEAR) + desired_size = (desired_width, desired_height) + if im.size != desired_size: + im = im.resize(desired_size, resample=PIL.Image.BILINEAR) - outdir = os.path.dirname(request['destination']) - if not os.path.isdir(outdir): - os.makedirs(outdir) + outdir = os.path.dirname(request["destination"]) + if not os.path.isdir(outdir): + os.makedirs(outdir) - im.save(request['destination']) + im.save(request["destination"]) def main(): - project_root = os.path.normpath( - os.path.join(os.path.dirname(os.path.abspath(__file__)), '../..')) - - parser = argparse.ArgumentParser() - parser.add_argument('requests', nargs='?', default=None, - help='Path to a json containing export requests') - args = parser.parse_args() - if args.requests is None: - args.requests = os.path.join(project_root, 'Temp', 'ExportRequests.json') - - with open(args.requests) as inf: - requests = json.load(inf) - - for request in requests['exports']: - process_request(request) - - input_bytes = sum(r['input_bytes'] for r in requests['exports']) - output_bytes = sum(r['output_bytes'] for r in requests['exports']) - print("Input: %.2f MiB Output: %.2f MiB" % (input_bytes / MEG, output_bytes / MEG)) - - -if __name__ == '__main__': - main() + project_root = os.path.normpath( + os.path.join(os.path.dirname(os.path.abspath(__file__)), "../..") + ) + + parser = argparse.ArgumentParser() + parser.add_argument( + "requests", + nargs="?", + default=None, + help="Path to a json containing export requests", + ) + args = parser.parse_args() + if args.requests is None: + args.requests = os.path.join(project_root, "Temp", "ExportRequests.json") + + with open(args.requests) as inf: + requests = json.load(inf) + + for request in requests["exports"]: + process_request(request) + + input_bytes = sum(r["input_bytes"] for r in requests["exports"]) + output_bytes = sum(r["output_bytes"] for r in requests["exports"]) + print( + "Input: %.2f MiB Output: %.2f MiB" % (input_bytes / MEG, output_bytes / MEG) + ) + + +if __name__ == "__main__": + main() diff --git a/Support/bin/hack_tilt.py b/Support/bin/hack_tilt.py index 7705c4fb67..a4acd0ed8d 100644 --- a/Support/bin/hack_tilt.py +++ b/Support/bin/hack_tilt.py @@ -15,40 +15,49 @@ # limitations under the License. import argparse + try: - from tiltbrush.tilt import Tilt + from tiltbrush.tilt import Tilt except ImportError: - print("You need the Tilt Brush Toolkit (https://github.com/googlevr/tilt-brush-toolkit)") - print("and then put its Python directory in your PYTHONPATH.") - raise + print( + "You need the Tilt Brush Toolkit (https://github.com/googlevr/tilt-brush-toolkit)" + ) + print("and then put its Python directory in your PYTHONPATH.") + raise def main(): - parser = argparse.ArgumentParser() - parser.add_argument('--set-min-y', dest='desired_min_y', type=float, - default=None, - help='Move sketch up/down to match the passed y value') - parser.add_argument('files', nargs='+') - args = parser.parse_args() - - for filename in args.files: - tilt = Tilt(filename) - sketch = tilt.sketch - print('=== %s ===' % filename) - - if args.desired_min_y is not None: - min_y = min(cp.position[1] - for stroke in sketch.strokes - for cp in stroke.controlpoints) - delta = args.desired_min_y - min_y - for stroke in sketch.strokes: - for cp in stroke.controlpoints: - cp.position[1] += delta - - print(filename) - print('Moved by %.3f' % delta) - tilt.write_sketch() - - -if __name__ == '__main__': - main() + parser = argparse.ArgumentParser() + parser.add_argument( + "--set-min-y", + dest="desired_min_y", + type=float, + default=None, + help="Move sketch up/down to match the passed y value", + ) + parser.add_argument("files", nargs="+") + args = parser.parse_args() + + for filename in args.files: + tilt = Tilt(filename) + sketch = tilt.sketch + print("=== %s ===" % filename) + + if args.desired_min_y is not None: + min_y = min( + cp.position[1] + for stroke in sketch.strokes + for cp in stroke.controlpoints + ) + delta = args.desired_min_y - min_y + for stroke in sketch.strokes: + for cp in stroke.controlpoints: + cp.position[1] += delta + + print(filename) + print("Moved by %.3f" % delta) + tilt.write_sketch() + + +if __name__ == "__main__": + main() diff --git a/Support/bin/import_tiltasaurus.py b/Support/bin/import_tiltasaurus.py index 701550f0f0..785f29cfef 100644 --- a/Support/bin/import_tiltasaurus.py +++ b/Support/bin/import_tiltasaurus.py @@ -21,35 +21,47 @@ def iter_words_and_categories(filename): - with open(filename) as inf: - reader = csv.reader(inf) - it = iter(reader) - try: - next(it) # Skip first row - except StopIteration: - # This should never happen; this code is to meet PEP479 by returning instead of raising - return - for row in it: - if len(row) == 2 and row[0] != '' and row[1] != '': - yield row + with open(filename) as inf: + reader = csv.reader(inf) + it = iter(reader) + try: + next(it) # Skip first row + except StopIteration: + # This should never happen; this code is to meet PEP479 by returning instead of raising + return + for row in it: + if len(row) == 2 and row[0] != "" and row[1] != "": + yield row def main(): - parser = argparse.ArgumentParser("Converts google docs .csv to tiltasaurus.json") - parser.add_argument('-i', dest='input', required=True, help='Name of input .csv file') - args = parser.parse_args() - data = list(iter_words_and_categories(args.input)) - data.sort(key=lambda word_category1: (word_category1[1].lower(), word_category1[0].lower())) - - categories = [] - for _, group in itertools.groupby(data, key=lambda word_category: word_category[1].lower()): - group = list(group) - category = {"Name": group[0][1], "Words": sorted(set(pair[0] for pair in group))} - categories.append(category) - - with open('tiltasaurus.json', 'w') as outf: - outf.write(json.dumps({"Categories": categories}, indent=2)) - print("Wrote tiltasaurus.json") + parser = argparse.ArgumentParser("Converts google docs .csv to tiltasaurus.json") + parser.add_argument( + "-i", dest="input", required=True, help="Name of input .csv file" + ) + args = parser.parse_args() + data = list(iter_words_and_categories(args.input)) + data.sort( + key=lambda word_category1: ( + word_category1[1].lower(), + word_category1[0].lower(), + ) + ) + + categories = [] + for _, group in itertools.groupby( + data, key=lambda word_category: word_category[1].lower() + ): + group = list(group) + category = { + "Name": group[0][1], + "Words": sorted(set(pair[0] for pair in group)), + } + categories.append(category) + + with open("tiltasaurus.json", "w") as outf: + outf.write(json.dumps({"Categories": categories}, indent=2)) + print("Wrote tiltasaurus.json") main() diff --git a/Support/bin/jpg2png.py b/Support/bin/jpg2png.py index a484873fb9..1a9aa03f3d 100755 --- a/Support/bin/jpg2png.py +++ b/Support/bin/jpg2png.py @@ -20,46 +20,50 @@ class InvalidFile(Exception): - pass + pass def get_alt_file(infile): - f, ext = os.path.splitext(infile) - ext = ext.lower() - if ext in ('.jpg', '.jpeg'): - return f + '.png' - if ext == '.png': - return f + '.jpg' - raise InvalidFile("Can't do anything with %s" % infile) + f, ext = os.path.splitext(infile) + ext = ext.lower() + if ext in (".jpg", ".jpeg"): + return f + ".png" + if ext == ".png": + return f + ".jpg" + raise InvalidFile("Can't do anything with %s" % infile) def convert(infile): - outfile = get_alt_file(infile) - if not os.path.exists(outfile): - Image.open(infile).save(outfile) - print("Saved ", outfile) - else: - print("%s already exists" % outfile) + outfile = get_alt_file(infile) + if not os.path.exists(outfile): + Image.open(infile).save(outfile) + print("Saved ", outfile) + else: + print("%s already exists" % outfile) def main(): - parser = argparse.ArgumentParser(description="Convert files between jpg and png") - parser.add_argument('--all-jpg', help="Recursively convert all jpg files to png", - action='store_true') - parser.add_argument('files', type=str, nargs='*', - help="Files to convert to the other format") - args = parser.parse_args() + parser = argparse.ArgumentParser(description="Convert files between jpg and png") + parser.add_argument( + "--all-jpg", + help="Recursively convert all jpg files to png", + action="store_true", + ) + parser.add_argument( + "files", type=str, nargs="*", help="Files to convert to the other format" + ) + args = parser.parse_args() - for arg in args.files: - convert(arg) + for arg in args.files: + convert(arg) - if args.all_jpg: - for (r, _, fs) in os.walk('.'): - for f in fs: - if f.endswith('.jpg'): - fullf = os.path.join(r, f) - if not os.path.exists(get_alt_file(fullf)): - convert(fullf) + if args.all_jpg: + for (r, _, fs) in os.walk("."): + for f in fs: + if f.endswith(".jpg"): + fullf = os.path.join(r, f) + if not os.path.exists(get_alt_file(fullf)): + convert(fullf) main() diff --git a/Support/bin/printing_tool.py b/Support/bin/printing_tool.py index 801d2e7d88..31613156cc 100644 --- a/Support/bin/printing_tool.py +++ b/Support/bin/printing_tool.py @@ -19,7 +19,11 @@ # Add ../Python to sys.path sys.path.append( - os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'Python')) + os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "Python") +) + +from tbdata.printing import ( # noqa: E402 pylint: disable=import-error,wrong-import-position + main, +) -from tbdata.printing import main # noqa: E402 pylint: disable=import-error,wrong-import-position main() diff --git a/Support/bin/tag_build.py b/Support/bin/tag_build.py index a3bbbe45b5..20e919fb73 100644 --- a/Support/bin/tag_build.py +++ b/Support/bin/tag_build.py @@ -21,157 +21,180 @@ import subprocess import webbrowser -TEST_STEAM_1 = 'testing_point_release 1327514 Sep 9, 2016 @ 12:26pm Release 7.1-06cb99f' -TEST_STEAM_2 = '1327514 Sep 9, 2016 @ 12:26pm Release 7.1-06cb99f' +TEST_STEAM_1 = "testing_point_release 1327514 Sep 9, 2016 @ 12:26pm Release 7.1-06cb99f" +TEST_STEAM_2 = "1327514 Sep 9, 2016 @ 12:26pm Release 7.1-06cb99f" # This is their old format # TEST_OCULUS_1 = '''9.0-9197ad956Feb 17, 2017 (4:34pm) # Complete # No release notes''' -TEST_OCULUS_1 = '''Sep 06, 2017 (4:27pm) +TEST_OCULUS_1 = """Sep 06, 2017 (4:27pm) Version: 14.0-67933466c Code: 204 Complete -Release 14.0-67933466c | machk@skillman0-w to testing_release''' +Release 14.0-67933466c | machk@skillman0-w to testing_release""" # This is actually more like storefront + platform STOREFRONT_INFO = { - 'steam': { - # Redacted: 'https://partner.steamgames.com/apps/builds/XXXXXX" - 'url': '' - }, - 'oculus-desktop': { - # Redacted: 'https://dashboard.oculus.com/application/XXXXXXXXXXXXXXXX/channel/XXXXXXXXXXXXXXXX' - '' - }, - 'oculus-quest': { - # Redacted: 'https://dashboard.oculus.com/application/XXXXXXXXXXXXXXXX/channel/XXXXXXXXXXXXXXX' - 'url': '' - }, + "steam": { + # Redacted: 'https://partner.steamgames.com/apps/builds/XXXXXX" + "url": "" + }, + "oculus-desktop": { + # Redacted: 'https://dashboard.oculus.com/application/XXXXXXXXXXXXXXXX/channel/XXXXXXXXXXXXXXXX' + "" + }, + "oculus-quest": { + # Redacted: 'https://dashboard.oculus.com/application/XXXXXXXXXXXXXXXX/channel/XXXXXXXXXXXXXXX' + "url": "" + }, } STOREFRONTS = sorted(STOREFRONT_INFO.keys()) class Error(Exception): - pass + pass def make_tag_name(full_version, store): - """Converts a version number to a tag name, and does some sanity checking.""" - (major_version, _) = full_version.split('.', 1) - if full_version.endswith('b'): - raise Error('Do you really want to tag a beta version %s?' % full_version) - if major_version == '19' and store != 'oculus-quest': - print('WARNING: 19.x builds are only for Oculus Quest') - return 'v%s/%s-%s' % (major_version, full_version, store) + """Converts a version number to a tag name, and does some sanity checking.""" + (major_version, _) = full_version.split(".", 1) + if full_version.endswith("b"): + raise Error("Do you really want to tag a beta version %s?" % full_version) + if major_version == "19" and store != "oculus-quest": + print("WARNING: 19.x builds are only for Oculus Quest") + return "v%s/%s-%s" % (major_version, full_version, store) def make_steam_cmd(line, store): - """Returns a dictionary with the keys 'buildid', 'version', 'sha'""" - # 1327514 Sep 9, 2016 @ 12:26pm Release 7.1-06cb99f - pat = re.compile(r''' + """Returns a dictionary with the keys 'buildid', 'version', 'sha'""" + # 1327514 Sep 9, 2016 @ 12:26pm Release 7.1-06cb99f + pat = re.compile( + r""" (?P[a-z_]+ \t )? (?P\d+) \t (?P[^\t]+) \t - Release \s (?P\d+\.\d+)-(?P[0-9a-f]+)''', re.X) - m = pat.match(line) - if m is None: - raise Error('Could not parse. Input should look something like\n%s' % TEST_STEAM_2) - dct = m.groupdict() - return ['git', 'tag', - '-m', 'Steam build %s' % (dct['buildid']), - make_tag_name(dct['version'], store), dct['sha']] + Release \s (?P\d+\.\d+)-(?P[0-9a-f]+)""", + re.X, + ) + m = pat.match(line) + if m is None: + raise Error( + "Could not parse. Input should look something like\n%s" % TEST_STEAM_2 + ) + dct = m.groupdict() + return [ + "git", + "tag", + "-m", + "Steam build %s" % (dct["buildid"]), + make_tag_name(dct["version"], store), + dct["sha"], + ] def make_oculus_cmd(txt, store): - """store: either 'oculus-desktop' or 'oculus-quest'""" - # From https://dashboard.oculus.com/application/1111640318951750/channel/1019939354782811 - # Apr 27, 2017 (9:44am) - # Version: 10.0-b3edd1a Code: 107 - # Complete - # Release 10.0-b3edd1a | pld@PHACKETT2-W to testing_release - assert store in ('oculus-desktop', 'oculus-quest') - pat = re.compile(r''' + """store: either 'oculus-desktop' or 'oculus-quest'""" + # From https://dashboard.oculus.com/application/1111640318951750/channel/1019939354782811 + # Apr 27, 2017 (9:44am) + # Version: 10.0-b3edd1a Code: 107 + # Complete + # Release 10.0-b3edd1a | pld@PHACKETT2-W to testing_release + assert store in ("oculus-desktop", "oculus-quest") + pat = re.compile( + r""" (?P[A-Za-z,0-9\ ]+) \s \( (?P