From aeb98aaca4822768d0d7d5eb0b7f2d1cc6e5b963 Mon Sep 17 00:00:00 2001 From: Mike Boers Date: Wed, 21 Oct 2015 11:00:22 -0700 Subject: [PATCH] Docs, and remove unused raw_* methods --- TODO.md | 3 ++ docs/index.rst | 42 +++++++----------- docs/python_api.rst | 4 +- docs/setup.rst | 4 -- sgschema/entity.py | 1 - sgschema/field.py | 3 +- sgschema/schema.py | 105 +++++++++++++++++++++++++++----------------- sgschema/utils.py | 18 -------- 8 files changed, 87 insertions(+), 93 deletions(-) delete mode 100644 docs/setup.rst diff --git a/TODO.md b/TODO.md index 3abc42f..7a223b1 100644 --- a/TODO.md +++ b/TODO.md @@ -1,4 +1,7 @@ +- Metadata/config on each entity/field at some point, for users to stuff + whatever they want. + - Public API: Schema.is_variable_len(spec) -> can it return non-one results? diff --git a/docs/index.rst b/docs/index.rst index 27b7704..526227a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -13,7 +13,7 @@ different across the history of Shotgun. You may provide aliases and tags for entity types and fields, as well as automatically detect and use the common ``"sg_"`` prefix on fields. Example uses -(from a theoretical pipeline pipeline): +(from a theoretical pipeline): - ``$Publish`` resolves to the ``PublishEvent`` entity type; - ``$sgpublish:type`` is aliased to the ``PublishEvent.sg_type`` field; @@ -25,39 +25,29 @@ automatically detect and use the common ``"sg_"`` prefix on fields. Example uses This project is tightly integrated into SGSession, and used in all operations. -Caching -------- +Dynamic Loading and Caching +--------------------------- -In general, schemas should be preprocessed and cached, then reloaded for each -use. To read the schema, reduce it, and cache it:: +Packages can define their own schemas at runtime via ``pkg_resources`` +entry points. The :meth:`.Schema.load_entry_points` calls registered +functions (to ``sgcache_loaders`` by default) in order to construct a schema. - schema = Schema() - schema.read(shotgun_object) - schema.dump('/path/to/cache.json') - -The cached schema can then be loaded manually:: +A good pattern for creating a schema object is:: schema = Schema() - schema.load('/path/to/cache.json') - -The :meth:`Schema.from_cache` method uses setuptools' entrypoints to find -cached schemas from the runtime environment:: + schema.read(shotgun_api3_instance) + schema.load_entry_points(base_url) - schema = sgschema.Schema.from_cache(shotgun.base_url) +This is extremely time consuming to run at startup, so it is recommended to +pre-process and cache the schema. First load the schema as above, then dump +it to a file:: -That class method calls any functions registered as a ``sgschema_cache`` -setuptools entrypoint. Those functions are called with the passed URL. -Whatever non-None value is returned first is loaded into the schema. The process -is effectively:: - - schema = Schema() - for func in funcs_from_entrypoints: - raw_schema = func(base_url) - if raw_schema: - schema.load(raw_schema) - break + schema.dump(os.path.join(cache_dir, '%s.json' % base_url)) +Then, register an entry point to the ``sgschema_cache`` group, which loads it:: + def load_cache(schema, base_url): + schema.load(os.path.join(cache_dir, '%s.json' % base_url)) diff --git a/docs/python_api.rst b/docs/python_api.rst index bb4bee2..cd8d433 100644 --- a/docs/python_api.rst +++ b/docs/python_api.rst @@ -8,11 +8,11 @@ Python API .. automodule:: sgschema.entity -..autoclass:: sgschema.entity.Entity +.. autoclass:: sgschema.entity.Entity :members: .. automodule:: sgschema.field -..autoclass:: sgschema.field.Field +.. autoclass:: sgschema.field.Field :members: diff --git a/docs/setup.rst b/docs/setup.rst deleted file mode 100644 index ced6240..0000000 --- a/docs/setup.rst +++ /dev/null @@ -1,4 +0,0 @@ -Setup -===== - -xxx diff --git a/sgschema/entity.py b/sgschema/entity.py index 2203bbb..e8f9373 100644 --- a/sgschema/entity.py +++ b/sgschema/entity.py @@ -1,5 +1,4 @@ from .field import Field -from .utils import cached_property class Entity(object): diff --git a/sgschema/field.py b/sgschema/field.py index d11652e..33f5d3c 100644 --- a/sgschema/field.py +++ b/sgschema/field.py @@ -1,4 +1,3 @@ -from .utils import cached_property class Field(dict): @@ -14,7 +13,7 @@ def _reduce_raw(self, schema, raw_field): self.data_type = raw_field['data_type']['value'] - raw_private = schema._raw_private['entity_fields'][self.entity.name].get(self.name, {}) + raw_private = schema.raw_private['entity_fields'][self.entity.name].get(self.name, {}) if raw_private.get('identifier_column'): # It would be nice to add a "name" alias, but that might be diff --git a/sgschema/schema.py b/sgschema/schema.py index ed95a8a..218e944 100644 --- a/sgschema/schema.py +++ b/sgschema/schema.py @@ -6,7 +6,7 @@ from .entity import Entity from .field import Field -from .utils import cached_property, merge_update +from .utils import merge_update class Schema(object): @@ -15,13 +15,17 @@ class Schema(object): @classmethod def from_cache(cls, base_url): - """Use setuptools' entrypoints to load a cached schema. + """Use setuptools' entry points to load a cached schema. - Calls functions registered to "sgschema_cache" until one of them - returns something non-None. That is loaded into the schema. + Calls functions registered to "sgschema_cache", passing them a + ``Schema`` instance and the base URL, giving them the oppourtunity + to load from their caches. - The resulting object is memoized by the given URL, so multiple calls - to this method result in the same ``Schema`` instance. + If a function wants to assert it is the last entry point, it can + raise ``StopIteration``. + + The resulting ``Schema`` is memoized for the base URL, so it is only + constructed once per Python session. :param str base_url: The ``shotgun.base_url`` to lookup the schema for. :returns: A ``Schema`` instance. @@ -51,9 +55,14 @@ def from_cache(cls, base_url): def __init__(self): - self._raw_fields = None - self._raw_entities = None - self._raw_private = None + #: Result from ``shotgun.schema_read()``. + self.raw_fields = None + + #: Result from ``shotgun.schema_entity_read()``. + self.raw_entities = None + + #: Result from scraping ``{base_url}/page/schema``. + self.raw_private = None self.entities = {} self.entity_aliases = {} @@ -86,8 +95,8 @@ def read(self, sg): # SG.schema_field_read() is the same data per-entity as SG.schema_read(). # SG.schema_entity_read() contains global name and visibility of each # entity type, but the visibility is likely to just be True for everything. - self._raw_fields = sg.schema_read() - self._raw_entities = sg.schema_entity_read() + self.raw_fields = sg.schema_read() + self.raw_entities = sg.schema_entity_read() # We also want the private schema which drives the website. # See . @@ -101,30 +110,22 @@ def read(self, sg): if not m: raise ValueError('schema does not appear to be at %s/page/schema' % sg.base_url) - self._raw_private = json.loads(m.group(1)) + self.raw_private = json.loads(m.group(1)) self._reduce_raw() def _reduce_raw(self): - for type_name, raw_entity in self._raw_entities.iteritems(): + for type_name, raw_entity in self.raw_entities.iteritems(): entity = self._get_or_make_entity(type_name) entity._reduce_raw(self, raw_entity) - for type_name, raw_fields in self._raw_fields.iteritems(): + for type_name, raw_fields in self.raw_fields.iteritems(): entity = self._get_or_make_entity(type_name) for field_name, raw_field in raw_fields.iteritems(): field = entity._get_or_make_field(field_name) field._reduce_raw(self, raw_field) - def dump_raw(self, path): - with open(path, 'w') as fh: - fh.write(json.dumps({ - 'raw_fields': self._raw_fields, - 'raw_entities': self._raw_entities, - 'raw_private': self._raw_private, - }, indent=4, sort_keys=True)) - def __getstate__(self): return dict((k, v) for k, v in ( ('entities', self.entities), @@ -141,24 +142,6 @@ def dump(self, path): with open(path, 'w') as fh: fh.write(json.dumps(self, indent=4, sort_keys=True, default=lambda x: x.__getstate__())) - def load_raw(self, path): - """Load a JSON file containing a raw schema.""" - raw = json.loads(open(path).read()) - keys = 'raw_entities', 'raw_fields', 'raw_private' - - # Make sure we have the right keys, and only the right keys. - missing = [k for k in keys if k not in raw] - if missing: - raise ValueError('missing keys in raw schema: %s' % ', '.join(missing)) - if len(keys) != 3: - extra = [k for k in raw if k not in keys] - raise ValueError('extra keys in raw schema: %s' % ', '.join(extra)) - - for k in keys: - setattr(self, '_' + k, raw[k]) - - self._reduce_raw() - def load_directory(self, dir_path): """Load all ``.json`` and ``.yaml`` files in the given directory.""" for file_name in os.listdir(dir_path): @@ -271,7 +254,14 @@ def __setstate__(self, raw_schema): raise ValueError('unknown schema keys: %s' % ', '.join(sorted(raw_schema))) def resolve_entity(self, entity_spec, implicit_aliases=True, strict=False): + """Resolve an entity-type specification into a list of entity types. + + :param str entity_spec: An entity-type specification. + :param bool implicit_aliases: Lookup aliases without explicit ``$`` prefix? + :param bool strict: Raise ``ValueError`` if we can't identify the entity type? + :returns: ``list`` of entity types (``str``). + """ op = entity_spec[0] if op == '!': return [entity_spec[1:]] @@ -298,6 +288,13 @@ def resolve_entity(self, entity_spec, implicit_aliases=True, strict=False): return [entity_spec] def resolve_one_entity(self, entity_spec, **kwargs): + """Resolve an entity-type specification into a single entity type. + + Parameters are the same as for :meth:`resolve_entity`. + + :raises ValueError: when zero or multiple entity types are resolved. + + """ res = self.resolve_entity(entity_spec, **kwargs) if len(res) == 1: return res[0] @@ -351,6 +348,16 @@ def _resolve_field(self, entity_spec, field_spec, auto_prefix=True, implicit_ali return [field_spec] def resolve_field(self, entity_type, field_spec, auto_prefix=True, implicit_aliases=True, strict=False): + """Resolve an field specification into a list of field names. + + :param str entity_type: An entity type (``str``). + :param str field_spec: An field specification. + :param bool auto_prefix: Lookup field with ``sg_`` prefix? + :param bool implicit_aliases: Lookup aliases without explicit ``$`` prefix? + :param bool strict: Raise ``ValueError`` if we can't identify the entity type? + :returns: ``list`` of field names. + + """ # Return a merge of lists of field specs. if isinstance(field_spec, (tuple, list)): @@ -388,6 +395,13 @@ def resolve_field(self, entity_type, field_spec, auto_prefix=True, implicit_alia return resolved_fields def resolve_one_field(self, entity_type, field_spec, **kwargs): + """Resolve a field specification into a single field name. + + Parameters are the same as for :meth:`resolve_fields`. + + :raises ValueError: when zero or multiple fields are resolved. + + """ res = self.resolve_field(entity_type, field_spec, **kwargs) if len(res) == 1: return res[0] @@ -395,6 +409,17 @@ def resolve_one_field(self, entity_type, field_spec, **kwargs): raise ValueError('%r returned %s %s fields' % (field_spec, len(res), entity_type)) def resolve_structure(self, x, entity_type=None, **kwargs): + """Traverse a nested structure resolving names in entities. + + Recurses into ``list``, ``tuple`` and ``dict``, looking for ``dicts`` + with both a ``type`` and ``id`` (e.g. they could be Shotgun entities), + and resolves all other keys within them. + + All ``**kwargs`` are passed to :meth:`resolve_field`. + + Returns a copy of the nested structure. + + """ if isinstance(x, (list, tuple)): return type(x)(self.resolve_structure(x, **kwargs) for x in x) diff --git a/sgschema/utils.py b/sgschema/utils.py index 4d7ef90..8162ab9 100644 --- a/sgschema/utils.py +++ b/sgschema/utils.py @@ -1,23 +1,5 @@ -class cached_property(object): - - def __init__(self, func, name=None, doc=None): - self.__name__ = name or func.__name__ - self.__module__ = func.__module__ - self.__doc__ = doc or func.__doc__ - self.func = func - - def __get__(self, obj, type=None): - if obj is None: - return self - try: - return obj.__dict__[self.__name__] - except KeyError: - obj.__dict__[self.__name__] = value = self.func(obj) - return value - - def merge_update(dst, src): for k, v in src.iteritems():