From ff02a855f6c5b1cf6cb9a06746d1c1a85bf1d4a5 Mon Sep 17 00:00:00 2001 From: Toke Jepsen Date: Mon, 1 Jul 2019 09:05:53 +0100 Subject: [PATCH 01/69] Create validate_attributes.py --- .../maya/publish/validate_attributes.py | 91 +++++++++++++++++++ 1 file changed, 91 insertions(+) create mode 100644 pype/plugins/maya/publish/validate_attributes.py diff --git a/pype/plugins/maya/publish/validate_attributes.py b/pype/plugins/maya/publish/validate_attributes.py new file mode 100644 index 0000000000..ae9bb29893 --- /dev/null +++ b/pype/plugins/maya/publish/validate_attributes.py @@ -0,0 +1,91 @@ +import pymel.core as pm + +import pyblish.api +import pype.api + + +class ValidateAttributes(pyblish.api.ContextPlugin): + """Ensure attributes are consistent. + + Attributes to validate and their values comes from the + "maya/attributes.json" preset, which needs this structure: + { + "family": { + "node_name.attribute_name": attribute_value + } + } + """ + + order = pype.api.ValidateContentsOrder + label = "Attributes" + hosts = ["maya"] + actions = [pype.api.RepairContextAction] + + def process(self, context): + invalid = self.get_invalid(context, compute=True) + if invalid: + raise RuntimeError( + "Found attributes with invalid values: {}".format(invalid) + ) + + @classmethod + def get_invalid(cls, context, compute=False): + invalid = context.data.get("invalid_attributes", []) + if compute: + invalid = cls.get_invalid_attributes(context) + + return invalid + + @classmethod + def get_invalid_attributes(cls, context): + presets = context.data["presets"]["maya"]["attributes"] + invalid_attributes = [] + for instance in context: + # Filter publisable instances. + if not instance.data["publish"]: + continue + + # Filter families. + families = [instance.data["family"]] + families += instance.data.get("families", []) + families = list(set(families) & set(presets.keys())) + if not families: + continue + + # Get all attributes to validate. + attributes = {} + for family in families: + for preset in presets[family]: + [node_name, attribute_name] = preset.split(".") + attributes.update( + {node_name: {attribute_name: presets[family][preset]}} + ) + + # Get invalid attributes. + nodes = [pm.PyNode(x) for x in instance] + for node in nodes: + name = node.name(stripNamespace=True) + if name not in attributes.keys(): + continue + + presets_to_validate = attributes[name] + for attribute in node.listAttr(): + if attribute.attrName() in presets_to_validate: + expected = presets_to_validate[attribute.attrName()] + if attribute.get() != expected: + invalid_attributes.append( + { + "attribute": attribute, + "expected": expected, + "current": attribute.get() + } + ) + + context.data["invalid_attributes"] = invalid_attributes + return invalid_attributes + + @classmethod + def repair(cls, instance): + invalid = cls.get_invalid(instance) + for data in invalid: + data["attribute"].set(data["expected"]) From b42e7bf8a3004c440151eb461ce3586d1257e919 Mon Sep 17 00:00:00 2001 From: antirotor Date: Sat, 6 Jul 2019 17:08:45 +0200 Subject: [PATCH 02/69] added global burnin extractor --- pype/plugins/global/publish/extract_burnin.py | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 pype/plugins/global/publish/extract_burnin.py diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py new file mode 100644 index 0000000000..e8eac1ef60 --- /dev/null +++ b/pype/plugins/global/publish/extract_burnin.py @@ -0,0 +1,57 @@ +import os +import subprocess +import pype.api +import json + + +class ExtractBurnin(pype.api.Extractor): + + label = "Quicktime with burnins" + families = ["burnin"] + optional = True + + def process(self, instance): + version = instance.context.data['version'] + stagingdir = self.staging_dir(instance) + filename = "{0}".format(instance.name) + + movieFile = filename + ".mov" + movieFileBurnin = filename + "Burn" + ".mov" + + full_movie_path = os.path.join(stagingdir, movieFile) + full_burnin_path = os.path.join(stagingdir, movieFileBurnin) + + burnin_data = { + "input": full_movie_path.replace("\\", "/"), + "output": full_burnin_path.replace("\\", "/"), + "burnin_data": { + "username": instance.context.data['user'], + "asset": os.environ['AVALON_ASSET'], + "task": os.environ['AVALON_TASK'], + "start_frame": int(instance.data['startFrame']), + "version": "v" + str(version) + } + } + + json_data = json.dumps(burnin_data) + scriptpath = os.path.join(os.environ['PYPE_MODULE_ROOT'], + "pype", + "scripts", + "otio_burnin.py") + + p = subprocess.Popen( + ['python', scriptpath, json_data] + ) + p.wait() + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'mov', + 'ext': 'mov', + 'files': movieFileBurnin, + "stagingDir": stagingdir, + 'preview': True + } + instance.data["representations"].append(representation) From d2b6c7c1a93212c756f41e4fe5a5d2a182f84bef Mon Sep 17 00:00:00 2001 From: antirotor Date: Mon, 8 Jul 2019 21:19:24 +0200 Subject: [PATCH 03/69] fix(burnin): will now work on representations, expecting burnin = True or 'burnin' in tags --- pype/plugins/global/publish/extract_burnin.py | 88 ++++++++++--------- 1 file changed, 47 insertions(+), 41 deletions(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index e8eac1ef60..34ee33f602 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -5,53 +5,59 @@ import json class ExtractBurnin(pype.api.Extractor): + """ + Extractor to create video with pre-defined burnins from + existing extracted video representation. + + It will work only on represenations having `burnin = True` or + `tags` including `burnin` + """ label = "Quicktime with burnins" families = ["burnin"] optional = True def process(self, instance): - version = instance.context.data['version'] - stagingdir = self.staging_dir(instance) - filename = "{0}".format(instance.name) - - movieFile = filename + ".mov" - movieFileBurnin = filename + "Burn" + ".mov" - - full_movie_path = os.path.join(stagingdir, movieFile) - full_burnin_path = os.path.join(stagingdir, movieFileBurnin) - - burnin_data = { - "input": full_movie_path.replace("\\", "/"), - "output": full_burnin_path.replace("\\", "/"), - "burnin_data": { - "username": instance.context.data['user'], - "asset": os.environ['AVALON_ASSET'], - "task": os.environ['AVALON_TASK'], - "start_frame": int(instance.data['startFrame']), - "version": "v" + str(version) - } - } - - json_data = json.dumps(burnin_data) - scriptpath = os.path.join(os.environ['PYPE_MODULE_ROOT'], - "pype", - "scripts", - "otio_burnin.py") - - p = subprocess.Popen( - ['python', scriptpath, json_data] - ) - p.wait() - if "representations" not in instance.data: - instance.data["representations"] = [] + raise RuntimeError("Burnin needs already created mov to work on.") - representation = { - 'name': 'mov', - 'ext': 'mov', - 'files': movieFileBurnin, - "stagingDir": stagingdir, - 'preview': True + # TODO: expand burnin data list to include all usefull keys + burnin_data = { + "username": instance.context.data['user'], + "asset": os.environ['AVALON_ASSET'], + "task": os.environ['AVALON_TASK'], + "start_frame": int(instance.data['startFrame']), + "version": "v" + str(instance.context.data['version']) } - instance.data["representations"].append(representation) + + for repre in instance.data["representations"]: + if (not repre.get("burnin", False) or + "burnin" not in repre.get("tags", [])): + continue + + stagingdir = self.staging_dir(instance) + filename = "{0}".format(repre["files"]) + + movieFileBurnin = filename + "Burn" + ".mov" + + full_movie_path = os.path.join(stagingdir, repre["files"]) + full_burnin_path = os.path.join(stagingdir, movieFileBurnin) + + burnin_data = { + "input": full_movie_path.replace("\\", "/"), + "output": full_burnin_path.replace("\\", "/"), + "burnin_data": burnin_data + } + + json_data = json.dumps(burnin_data) + scriptpath = os.path.join(os.environ['PYPE_MODULE_ROOT'], + "pype", + "scripts", + "otio_burnin.py") + + p = subprocess.Popen( + ['python', scriptpath, json_data] + ) + p.wait() + + repre['files']: movieFileBurnin From f09b05f7f07d02ebcbb09fbc22b9497c6eeb4ead Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 10 Jul 2019 14:49:10 +0200 Subject: [PATCH 04/69] fix(pype, nk, nks, premiere, aport): removing Anatomy, Dataflow, Colorspace loading as singleton --- pype/__init__.py | 6 +- pype/api.py | 17 ----- pype/aport/__init__.py | 3 - pype/aport/original/templates.py | 10 ++- pype/nuke/__init__.py | 6 -- pype/nuke/lib.py | 20 +++--- pype/nuke/templates.py | 30 +++++--- pype/nukestudio/__init__.py | 6 -- .../global/publish/collect_templates.py | 1 - .../launcher/actions/unused/PremierePro.py | 5 +- pype/premiere/__init__.py | 6 -- pype/premiere/templates.py | 10 ++- pype/templates.py | 71 ++----------------- 13 files changed, 53 insertions(+), 138 deletions(-) diff --git a/pype/__init__.py b/pype/__init__.py index db77a58cd2..35511eb6c1 100644 --- a/pype/__init__.py +++ b/pype/__init__.py @@ -7,11 +7,6 @@ from .lib import filter_pyblish_plugins import logging log = logging.getLogger(__name__) -# # do not delete these are mandatory -Anatomy = None -Dataflow = None -Colorspace = None - PACKAGE_DIR = os.path.dirname(__file__) PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins") @@ -26,6 +21,7 @@ def install(): pyblish.register_discovery_filter(filter_pyblish_plugins) avalon.register_plugin_path(avalon.Loader, LOAD_PATH) + def uninstall(): log.info("Deregistering global plug-ins..") pyblish.deregister_plugin_path(PUBLISH_PATH) diff --git a/pype/api.py b/pype/api.py index 2227236fd3..a5d7f0594d 100644 --- a/pype/api.py +++ b/pype/api.py @@ -18,15 +18,8 @@ from .action import ( from pypeapp import Logger -from . import ( - Anatomy, - Colorspace, - Dataflow -) from .templates import ( - load_data_from_templates, - reset_data_from_templates, get_project_name, get_project_code, get_hierarchy, @@ -65,11 +58,6 @@ __all__ = [ "ValidationException", - # contectual templates - # get data to preloaded templates - "load_data_from_templates", - "reset_data_from_templates", - # get contextual data "get_handle_irregular", "get_project_data", @@ -89,9 +77,4 @@ __all__ = [ "get_data_hierarchical_attr", "get_avalon_project_template", - # preloaded templates - "Anatomy", - "Colorspace", - "Dataflow", - ] diff --git a/pype/aport/__init__.py b/pype/aport/__init__.py index 9e1bde0a15..00e14924f0 100644 --- a/pype/aport/__init__.py +++ b/pype/aport/__init__.py @@ -50,9 +50,6 @@ def install(): avalon.data["familiesStateDefault"] = False avalon.data["familiesStateToggled"] = family_states - # load data from templates - api.load_data_from_templates() - # launch pico server pico_server_launch() diff --git a/pype/aport/original/templates.py b/pype/aport/original/templates.py index 2db1d58004..f66b5b35b1 100644 --- a/pype/aport/original/templates.py +++ b/pype/aport/original/templates.py @@ -1,10 +1,12 @@ from pype import api as pype +from pypeapp import Anatomy, config + log = pype.Logger().get_logger(__name__, "aport") def get_anatomy(**kwarg): - return pype.Anatomy + return Anatomy() def get_dataflow(**kwarg): @@ -15,7 +17,8 @@ def get_dataflow(**kwarg): assert any([host, cls]), log.error("aport.templates.get_dataflow():" "Missing mandatory kwargs `host`, `cls`") - aport_dataflow = getattr(pype.Dataflow, str(host), None) + presets = config.get_init_presets() + aport_dataflow = getattr(presets["dataflow"], str(host), None) aport_dataflow_node = getattr(aport_dataflow.nodes, str(cls), None) if preset: aport_dataflow_node = getattr(aport_dataflow_node, str(preset), None) @@ -32,7 +35,8 @@ def get_colorspace(**kwarg): assert any([host, cls]), log.error("aport.templates.get_colorspace():" "Missing mandatory kwargs `host`, `cls`") - aport_colorspace = getattr(pype.Colorspace, str(host), None) + presets = config.get_init_presets() + aport_colorspace = getattr(presets["colorspace"], str(host), None) aport_colorspace_node = getattr(aport_colorspace, str(cls), None) if preset: aport_colorspace_node = getattr(aport_colorspace_node, str(preset), None) diff --git a/pype/nuke/__init__.py b/pype/nuke/__init__.py index 8a6df23e01..b64f728771 100644 --- a/pype/nuke/__init__.py +++ b/pype/nuke/__init__.py @@ -132,9 +132,6 @@ def install(): menu.install() - # load data from templates - api.load_data_from_templates() - # Workfiles. launch_workfiles = os.environ.get("WORKFILES_STARTUP") @@ -156,9 +153,6 @@ def uninstall(): pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled) - # reset data from templates - api.reset_data_from_templates() - def on_pyblish_instance_toggled(instance, old_value, new_value): """Toggle node passthrough states on instance toggles.""" diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 4d96e6b772..a4592b2ced 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -7,8 +7,9 @@ import avalon.nuke import pype.api as pype import nuke from .templates import ( - get_dataflow, - get_colorspace + get_colorspace_preset, + get_node_dataflow_preset, + get_node_colorspace_preset ) from pypeapp import Logger @@ -129,8 +130,8 @@ def get_render_path(node): "preset": data['avalon']['families'] } - nuke_dataflow_writes = get_dataflow(**data_preset) - nuke_colorspace_writes = get_colorspace(**data_preset) + nuke_dataflow_writes = get_node_dataflow_preset(**data_preset) + nuke_colorspace_writes = get_node_colorspace_preset(**data_preset) application = lib.get_application(os.environ["AVALON_APP_NAME"]) data.update({ @@ -180,8 +181,8 @@ def script_name(): def create_write_node(name, data): - nuke_dataflow_writes = get_dataflow(**data) - nuke_colorspace_writes = get_colorspace(**data) + nuke_dataflow_writes = get_node_dataflow_preset(**data) + nuke_colorspace_writes = get_node_colorspace_preset(**data) application = lib.get_application(os.environ["AVALON_APP_NAME"]) try: @@ -319,9 +320,8 @@ def set_writes_colorspace(write_dict): def set_colorspace(): - from pype import api as pype - nuke_colorspace = pype.Colorspace.get("nuke", None) + nuke_colorspace = get_colorspace_preset().get("nuke", None) try: set_root_colorspace(nuke_colorspace["root"]) @@ -611,8 +611,8 @@ def get_write_node_template_attr(node): } # get template data - nuke_dataflow_writes = get_dataflow(**data_preset) - nuke_colorspace_writes = get_colorspace(**data_preset) + nuke_dataflow_writes = get_node_dataflow_preset(**data_preset) + nuke_colorspace_writes = get_node_colorspace_preset(**data_preset) # collecting correct data correct_data = OrderedDict({ diff --git a/pype/nuke/templates.py b/pype/nuke/templates.py index b3de6970d0..797335d982 100644 --- a/pype/nuke/templates.py +++ b/pype/nuke/templates.py @@ -1,21 +1,33 @@ from pype import api as pype +from pypeapp import Anatomy, config + log = pype.Logger().get_logger(__name__, "nuke") def get_anatomy(**kwarg): - return pype.Anatomy + return Anatomy() -def get_dataflow(**kwarg): +def get_dataflow_preset(): + presets = config.get_init_presets() + return presets["dataflow"] + + +def get_colorspace_preset(): + presets = config.get_init_presets() + return presets["colorspace"] + + +def get_node_dataflow_preset(**kwarg): log.info(kwarg) host = kwarg.get("host", "nuke") cls = kwarg.get("class", None) preset = kwarg.get("preset", None) - assert any([host, cls]), log.error("nuke.templates.get_dataflow():" - "Missing mandatory kwargs `host`, `cls`") + assert any([host, cls]), log.error("nuke.templates.get_node_dataflow_preset(): \ + Missing mandatory kwargs `host`, `cls`") - nuke_dataflow = pype.Dataflow.get(str(host), None) + nuke_dataflow = get_dataflow_preset().get(str(host), None) nuke_dataflow_nodes = nuke_dataflow.get('nodes', None) nuke_dataflow_node = nuke_dataflow_nodes.get(str(cls), None) @@ -26,15 +38,15 @@ def get_dataflow(**kwarg): return nuke_dataflow_node -def get_colorspace(**kwarg): +def get_node_colorspace_preset(**kwarg): log.info(kwarg) host = kwarg.get("host", "nuke") cls = kwarg.get("class", None) preset = kwarg.get("preset", None) - assert any([host, cls]), log.error("nuke.templates.get_colorspace():" - "Missing mandatory kwargs `host`, `cls`") + assert any([host, cls]), log.error("nuke.templates.get_node_colorspace_preset(): \ + Missing mandatory kwargs `host`, `cls`") - nuke_colorspace = pype.Colorspace.get(str(host), None) + nuke_colorspace = get_colorspace_preset().get(str(host), None) nuke_colorspace_node = nuke_colorspace.get(str(cls), None) if preset: nuke_colorspace_node = nuke_colorspace_node.get(str(preset), None) diff --git a/pype/nukestudio/__init__.py b/pype/nukestudio/__init__.py index ef9f639d49..834455168a 100644 --- a/pype/nukestudio/__init__.py +++ b/pype/nukestudio/__init__.py @@ -55,9 +55,6 @@ def install(config): menu_install() - # load data from templates - api.load_data_from_templates() - # Workfiles. launch_workfiles = os.environ.get("WORKFILES_STARTUP") @@ -95,9 +92,6 @@ def uninstall(): avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH) - # reset data from templates - api.reset_data_from_templates() - def _register_events(): avalon.on("taskChanged", _update_menu_task_label) diff --git a/pype/plugins/global/publish/collect_templates.py b/pype/plugins/global/publish/collect_templates.py index b59b20892b..fe48e97c03 100644 --- a/pype/plugins/global/publish/collect_templates.py +++ b/pype/plugins/global/publish/collect_templates.py @@ -12,6 +12,5 @@ class CollectTemplates(pyblish.api.ContextPlugin): label = "Collect Templates" def process(self, context): - # pype.load_data_from_templates() context.data['anatomy'] = Anatomy() self.log.info("Anatomy templates collected...") diff --git a/pype/plugins/launcher/actions/unused/PremierePro.py b/pype/plugins/launcher/actions/unused/PremierePro.py index c20c971d5e..7d94db4044 100644 --- a/pype/plugins/launcher/actions/unused/PremierePro.py +++ b/pype/plugins/launcher/actions/unused/PremierePro.py @@ -44,10 +44,7 @@ class PremierePro(api.Action): env = acre.merge(env, current_env=dict(os.environ)) if not env.get('AVALON_WORKDIR', None): - pype.load_data_from_templates() - os.environ["AVALON_WORKDIR"] = pype.get_workdir_template( - pype.Anatomy) - pype.reset_data_from_templates() + os.environ["AVALON_WORKDIR"] = pype.get_workdir_template() env.update(dict(os.environ)) diff --git a/pype/premiere/__init__.py b/pype/premiere/__init__.py index cc5abe115e..a331ef6514 100644 --- a/pype/premiere/__init__.py +++ b/pype/premiere/__init__.py @@ -96,9 +96,6 @@ def install(): avalon.data["familiesStateDefault"] = False avalon.data["familiesStateToggled"] = family_states - # load data from templates - api.load_data_from_templates() - # synchronize extensions extensions_sync() message(title="pyblish_paths", message=str(reg_paths), level="info") @@ -109,6 +106,3 @@ def uninstall(): pyblish.deregister_plugin_path(PUBLISH_PATH) avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH) avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH) - - # reset data from templates - api.reset_data_from_templates() diff --git a/pype/premiere/templates.py b/pype/premiere/templates.py index e53d529cc1..4f117a431c 100644 --- a/pype/premiere/templates.py +++ b/pype/premiere/templates.py @@ -1,10 +1,12 @@ from pype import api as pype +from pypeapp import Anatomy, config + log = pype.Logger().get_logger(__name__, "premiere") def get_anatomy(**kwarg): - return pype.Anatomy + return Anatomy() def get_dataflow(**kwarg): @@ -15,7 +17,8 @@ def get_dataflow(**kwarg): assert any([host, cls]), log.error("premiera.templates.get_dataflow():" "Missing mandatory kwargs `host`, `cls`") - pr_dataflow = getattr(pype.Dataflow, str(host), None) + presets = config.get_init_presets() + pr_dataflow = getattr(presets["dataflow"], str(host), None) pr_dataflow_node = getattr(pr_dataflow.nodes, str(cls), None) if preset: pr_dataflow_node = getattr(pr_dataflow_node, str(preset), None) @@ -32,7 +35,8 @@ def get_colorspace(**kwarg): assert any([host, cls]), log.error("premiera.templates.get_colorspace():" "Missing mandatory kwargs `host`, `cls`") - pr_colorspace = getattr(pype.Colorspace, str(host), None) + presets = config.get_init_presets() + pr_colorspace = getattr(presets["colorspace"], str(host), None) pr_colorspace_node = getattr(pr_colorspace, str(cls), None) if preset: pr_colorspace_node = getattr(pr_colorspace_node, str(preset), None) diff --git a/pype/templates.py b/pype/templates.py index 7d12801a00..5a31e2af45 100644 --- a/pype/templates.py +++ b/pype/templates.py @@ -4,7 +4,7 @@ import sys from avalon import io, api as avalon, lib as avalonlib from . import lib # from pypeapp.api import (Templates, Logger, format) -from pypeapp import Logger, config, Anatomy +from pypeapp import Logger, Anatomy log = Logger().get_logger(__name__, os.getenv("AVALON_APP", "pype-config")) @@ -17,63 +17,6 @@ def set_session(): self.SESSION = avalon.session -def load_data_from_templates(): - """ - Load Presets and Anatomy `contextual` data as singleton object - [info](https://en.wikipedia.org/wiki/Singleton_pattern) - - Returns: - singleton: adding data to sharable object variable - - """ - - from . import api - if not any([ - api.Dataflow, - api.Anatomy, - api.Colorspace - ] - ): - presets = config.get_presets() - anatomy = Anatomy() - - try: - # try if it is not in projects custom directory - # `{PYPE_PROJECT_CONFIGS}/[PROJECT_NAME]/init.json` - # init.json define preset names to be used - p_init = presets["init"] - colorspace = presets["colorspace"][p_init["colorspace"]] - dataflow = presets["dataflow"][p_init["dataflow"]] - except KeyError: - log.warning("No projects custom preset available...") - colorspace = presets["colorspace"]["default"] - dataflow = presets["dataflow"]["default"] - log.info("Presets `colorspace` and `dataflow` loaded from `default`...") - - api.Anatomy = anatomy - api.Dataflow = dataflow - api.Colorspace = colorspace - - log.info("Data from templates were Loaded...") - - -def reset_data_from_templates(): - """ - Clear Templates `contextual` data from singleton - object variable - - Returns: - singleton: clearing data to None - - """ - - from . import api - api.Dataflow = None - api.Anatomy = None - api.Colorspace = None - log.info("Data from templates were Unloaded...") - - def get_version_from_path(file): """ Finds version number in file path string @@ -265,7 +208,9 @@ def set_avalon_workdir(project=None, if self.SESSION is None: set_session() - awd = self.SESSION.get("AVALON_WORKDIR", None) or os.getenv("AVALON_WORKDIR", None) + awd = self.SESSION.get("AVALON_WORKDIR", None) or \ + os.getenv("AVALON_WORKDIR", None) + data = get_context_data(project, hierarchy, asset, task) if (not awd) or ("{" not in awd): @@ -280,7 +225,7 @@ def set_avalon_workdir(project=None, def get_workdir_template(data=None): """ - Obtain workdir templated path from api.Anatomy singleton + Obtain workdir templated path from Anatomy() Args: data (dict, optional): basic contextual data @@ -288,12 +233,8 @@ def get_workdir_template(data=None): Returns: string: template path """ - from . import api - """ Installs singleton data """ - load_data_from_templates() - - anatomy = api.Anatomy + anatomy = Anatomy() anatomy_filled = anatomy.format(data or get_context_data()) try: From d86b81a77c9af2e6e6e6b0ae4ba96b02f47d0dd9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 10 Jul 2019 15:11:10 +0200 Subject: [PATCH 05/69] added condition that timer must be running in ftrack to skip starting it --- pype/ftrack/tray/ftrack_module.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/ftrack/tray/ftrack_module.py b/pype/ftrack/tray/ftrack_module.py index dde7d1b3c4..9c7438c294 100644 --- a/pype/ftrack/tray/ftrack_module.py +++ b/pype/ftrack/tray/ftrack_module.py @@ -309,7 +309,12 @@ class FtrackEventsThread(QtCore.QThread): def ftrack_start_timer(self, input_data): if self.user is None: return + + actual_timer = self.timer_session.query( + 'Timer where user_id = "{0}"'.format(self.user['id']) + ).first() if ( + actual_timer is not None and input_data['task_name'] == self.last_task['name'] and input_data['hierarchy'][-1] == self.last_task['parent']['name'] ): From b829c861ae80a4a6033c3db2474f456bfab5ac64 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 10 Jul 2019 15:11:33 +0200 Subject: [PATCH 06/69] if timer is not running then stopping is skipped --- pype/ftrack/tray/ftrack_module.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/pype/ftrack/tray/ftrack_module.py b/pype/ftrack/tray/ftrack_module.py index 9c7438c294..ca93cc8527 100644 --- a/pype/ftrack/tray/ftrack_module.py +++ b/pype/ftrack/tray/ftrack_module.py @@ -299,12 +299,14 @@ class FtrackEventsThread(QtCore.QThread): self.signal_timer_stopped.emit() def ftrack_stop_timer(self): - try: + actual_timer = self.timer_session.query( + 'Timer where user_id = "{0}"'.format(self.user['id']) + ).first() + + if actual_timer is not None: self.user.stop_timer() self.timer_session.commit() self.signal_timer_stopped.emit() - except Exception as e: - log.debug("Timer stop had issues: {}".format(e)) def ftrack_start_timer(self, input_data): if self.user is None: From 35550bec81b4f1969ef22c356463b970f7f0d9b4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 10 Jul 2019 15:11:54 +0200 Subject: [PATCH 07/69] added key to data in ftrack so query is successful --- pype/ftrack/tray/ftrack_module.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/ftrack/tray/ftrack_module.py b/pype/ftrack/tray/ftrack_module.py index ca93cc8527..adcce9c2b1 100644 --- a/pype/ftrack/tray/ftrack_module.py +++ b/pype/ftrack/tray/ftrack_module.py @@ -321,6 +321,9 @@ class FtrackEventsThread(QtCore.QThread): input_data['hierarchy'][-1] == self.last_task['parent']['name'] ): return + + input_data['entity_name'] = input_data['hierarchy'][-1] + task_query = ( 'Task where name is "{task_name}"' ' and parent.name is "{entity_name}"' From 4924ddeab88b906ab4e38a6f9ab56ba46317e749 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 10 Jul 2019 15:12:17 +0200 Subject: [PATCH 08/69] times in presets support float numbers now --- pype/services/timers_manager/timers_manager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pype/services/timers_manager/timers_manager.py b/pype/services/timers_manager/timers_manager.py index 6f10a0ec68..e1980d3d90 100644 --- a/pype/services/timers_manager/timers_manager.py +++ b/pype/services/timers_manager/timers_manager.py @@ -37,8 +37,8 @@ class TimersManager(metaclass=Singleton): def set_signal_times(self): try: timer_info = get_presets()['services']['timers_manager']['timer'] - full_time = int(timer_info['full_time'])*60 - message_time = int(timer_info['message_time'])*60 + full_time = int(float(timer_info['full_time'])*60) + message_time = int(float(timer_info['message_time'])*60) self.time_show_message = full_time - message_time self.time_stop_timer = full_time return True From 04bc8a37083ade029707b1763c636565dfb5e30f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 10 Jul 2019 16:10:46 +0200 Subject: [PATCH 09/69] feat(nks): adding exception for environ.variable TAG_ASSETBUILD_STARTUP --- pype/nukestudio/tags.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/pype/nukestudio/tags.py b/pype/nukestudio/tags.py index 4743178933..d9574bdf2b 100644 --- a/pype/nukestudio/tags.py +++ b/pype/nukestudio/tags.py @@ -1,4 +1,5 @@ import re +import os from pypeapp import ( config, @@ -77,19 +78,20 @@ def add_tags_from_presets(): # Get project assets. Currently Ftrack specific to differentiate between # asset builds and shots. - nks_pres_tags["[AssetBuilds]"] = {} - for asset in io.find({"type": "asset"}): - if asset["data"]["entityType"] == "AssetBuild": - nks_pres_tags["[AssetBuilds]"][asset["name"]] = { - "editable": "1", - "note": "", - "icon": { - "path": "icons:TagActor.png" - }, - "metadata": { - "family": "assetbuild" + if int(os.getenv("TAG_ASSETBUILD_STARTUP", 0)) is 1: + nks_pres_tags["[AssetBuilds]"] = {} + for asset in io.find({"type": "asset"}): + if asset["data"]["entityType"] == "AssetBuild": + nks_pres_tags["[AssetBuilds]"][asset["name"]] = { + "editable": "1", + "note": "", + "icon": { + "path": "icons:TagActor.png" + }, + "metadata": { + "family": "assetbuild" + } } - } # get project and root bin object project = hiero.core.projects()[-1] From 6dc33af0750d06465fb734edb554abc5a58f9e86 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 10 Jul 2019 19:28:46 +0200 Subject: [PATCH 10/69] add shelf creating class and usersetup that tries to build it from presets --- pype/maya/lib.py | 86 +++++++++++++++++++++++++++++++++++++++++ setup/maya/userSetup.py | 15 +++++++ 2 files changed, 101 insertions(+) create mode 100644 setup/maya/userSetup.py diff --git a/pype/maya/lib.py b/pype/maya/lib.py index fce1772b8e..ee2ef57e34 100644 --- a/pype/maya/lib.py +++ b/pype/maya/lib.py @@ -2308,3 +2308,89 @@ def get_attr_in_layer(attr, layer): return value return cmds.getAttr(attr) + + +def _null(*args): + pass + + +class shelf(): + '''A simple class to build shelves in maya. Since the build method is empty, + it should be extended by the derived class to build the necessary shelf + elements. By default it creates an empty shelf called "customShelf".''' + + ########################################################################### + '''This is an example shelf.''' + # class customShelf(_shelf): + # def build(self): + # self.addButon(label="button1") + # self.addButon("button2") + # self.addButon("popup") + # p = cmds.popupMenu(b=1) + # self.addMenuItem(p, "popupMenuItem1") + # self.addMenuItem(p, "popupMenuItem2") + # sub = self.addSubMenu(p, "subMenuLevel1") + # self.addMenuItem(sub, "subMenuLevel1Item1") + # sub2 = self.addSubMenu(sub, "subMenuLevel2") + # self.addMenuItem(sub2, "subMenuLevel2Item1") + # self.addMenuItem(sub2, "subMenuLevel2Item2") + # self.addMenuItem(sub, "subMenuLevel1Item2") + # self.addMenuItem(p, "popupMenuItem3") + # self.addButon("button3") + # customShelf() + ########################################################################### + + def __init__(self, name="customShelf", iconPath="", preset={}): + self.name = name + + self.iconPath = iconPath + + self.labelBackground = (0, 0, 0, 0) + self.labelColour = (.9, .9, .9) + + self.preset = preset + + self._cleanOldShelf() + cmds.setParent(self.name) + self.build() + + def build(self): + '''This method should be overwritten in derived classes to actually + build the shelf elements. Otherwise, nothing is added to the shelf.''' + for item in self.preset['items']: + if not item.get('command'): + item['command'] = self._null + if item['type'] == 'button': + self.addButon(item['name'], command=item['command']) + if item['type'] == 'menuItem': + self.addMenuItem(item['parent'], item['name'], command=item['command']) + if item['type'] == 'subMenu': + self.addMenuItem(item['parent'], item['name'], command=item['command']) + + def addButon(self, label, icon="commandButton.png", command=_null, doubleCommand=_null): + '''Adds a shelf button with the specified label, command, double click command and image.''' + cmds.setParent(self.name) + if icon: + icon = self.iconPath + icon + cmds.shelfButton(width=37, height=37, image=icon, l=label, command=command, dcc=doubleCommand, imageOverlayLabel=label, olb=self.labelBackground, olc=self.labelColour) + + def addMenuItem(self, parent, label, command=_null, icon=""): + '''Adds a shelf button with the specified label, command, double click command and image.''' + if icon: + icon = self.iconPath + icon + return cmds.menuItem(p=parent, l=label, c=command, i="") + + def addSubMenu(self, parent, label, icon=None): + '''Adds a sub menu item with the specified label and icon to the specified parent popup menu.''' + if icon: + icon = self.iconPath + icon + return cmds.menuItem(p=parent, l=label, i=icon, subMenu=1) + + def _cleanOldShelf(self): + '''Checks if the shelf exists and empties it if it does or creates it if it does not.''' + if cmds.shelfLayout(self.name, ex=1): + if cmds.shelfLayout(self.name, q=1, ca=1): + for each in cmds.shelfLayout(self.name, q=1, ca=1): + cmds.deleteUI(each) + else: + cmds.shelfLayout(self.name, p="ShelfLayout") diff --git a/setup/maya/userSetup.py b/setup/maya/userSetup.py new file mode 100644 index 0000000000..bb346397c5 --- /dev/null +++ b/setup/maya/userSetup.py @@ -0,0 +1,15 @@ +import os +import sys +from pypeapp import config +from pype.maya import lib +reload(lib) + +presets = config.get_presets() +shelf_preset = presets['maya']['project_shelf'] +project = os.environ["AVALON_PROJECT"] + +modules = {} +for k, v in shelf_preset['imports'].items(): + sys.modules[k] = __import__(v, fromlist=[project]) + +projectShelf = lib.shelf(name=shelf_preset['name'], preset=shelf_preset) From 7fd9500ec84ead5c63fb0a3907a1e89941418b06 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 10 Jul 2019 19:30:02 +0200 Subject: [PATCH 11/69] only build project shelf if shelf config exists in presets --- setup/maya/userSetup.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/setup/maya/userSetup.py b/setup/maya/userSetup.py index bb346397c5..4686e1c7a9 100644 --- a/setup/maya/userSetup.py +++ b/setup/maya/userSetup.py @@ -2,14 +2,14 @@ import os import sys from pypeapp import config from pype.maya import lib -reload(lib) presets = config.get_presets() -shelf_preset = presets['maya']['project_shelf'] -project = os.environ["AVALON_PROJECT"] +shelf_preset = presets['maya'].get('project_shelf') +if shelf_preset: + project = os.environ["AVALON_PROJECT"] -modules = {} -for k, v in shelf_preset['imports'].items(): - sys.modules[k] = __import__(v, fromlist=[project]) + modules = {} + for k, v in shelf_preset['imports'].items(): + sys.modules[k] = __import__(v, fromlist=[project]) -projectShelf = lib.shelf(name=shelf_preset['name'], preset=shelf_preset) + projectShelf = lib.shelf(name=shelf_preset['name'], preset=shelf_preset) From daf295517412081679ebc6e64d486d4b87f132b9 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 10 Jul 2019 20:11:00 +0200 Subject: [PATCH 12/69] run shelf creation as deferred to make sure shelfLayout exists --- setup/maya/userSetup.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/setup/maya/userSetup.py b/setup/maya/userSetup.py index 4686e1c7a9..7b06fe7f33 100644 --- a/setup/maya/userSetup.py +++ b/setup/maya/userSetup.py @@ -2,14 +2,17 @@ import os import sys from pypeapp import config from pype.maya import lib +from maya import cmds -presets = config.get_presets() -shelf_preset = presets['maya'].get('project_shelf') -if shelf_preset: - project = os.environ["AVALON_PROJECT"] +def build_shelf(): + presets = config.get_presets() + shelf_preset = presets['maya'].get('project_shelf') + if shelf_preset: + project = os.environ["AVALON_PROJECT"] - modules = {} - for k, v in shelf_preset['imports'].items(): - sys.modules[k] = __import__(v, fromlist=[project]) + for k, v in shelf_preset['imports'].items(): + sys.modules[k] = __import__(v, fromlist=[project]) - projectShelf = lib.shelf(name=shelf_preset['name'], preset=shelf_preset) + lib.shelf(name=shelf_preset['name'], preset=shelf_preset) + +cmds.evalDeferred("build_shelf()") From 2387429eeef92befa8e1b5b6c131ce4d4d4b57b1 Mon Sep 17 00:00:00 2001 From: Toke Jepsen Date: Wed, 10 Jul 2019 20:21:23 +0100 Subject: [PATCH 13/69] Check for preset existence. --- pype/plugins/maya/publish/validate_attributes.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pype/plugins/maya/publish/validate_attributes.py b/pype/plugins/maya/publish/validate_attributes.py index ae9bb29893..8b0f14b8b2 100644 --- a/pype/plugins/maya/publish/validate_attributes.py +++ b/pype/plugins/maya/publish/validate_attributes.py @@ -22,6 +22,10 @@ class ValidateAttributes(pyblish.api.ContextPlugin): actions = [pype.api.RepairContextAction] def process(self, context): + # Check for preset existence. + if not context.data["presets"]["maya"].get("attributes"): + return + invalid = self.get_invalid(context, compute=True) if invalid: raise RuntimeError( From 8619d8838bb246288ab56ddbae13277278a7bb6b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 11 Jul 2019 13:57:09 +0200 Subject: [PATCH 14/69] fix(nuke): was not adding virtual_env to pythonpath --- pype/nuke/lib.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index 19781d6bf4..d9ab35b1a1 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -17,14 +17,6 @@ log = Logger().get_logger(__name__, "nuke") self = sys.modules[__name__] self._project = None - -for path in sys.path: - log.info(os.path.normpath(path)) - if "C:\\Users\\Public" in os.path.normpath(path): - log.info("_ removing from sys.path: `{}`".format(path)) - sys.path.remove(path) - - def onScriptLoad(): if nuke.env['LINUX']: nuke.tcl('load ffmpegReader') From e49aaab9f480ed6351cd7c917e442b66aa131bc3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 11 Jul 2019 13:58:47 +0200 Subject: [PATCH 15/69] fix(nuke): nukescript to workfile fix, handles_start/end workflow improvment --- pype/nuke/lib.py | 18 +++- .../global/_publish_unused/extract_review.py | 92 +++++++++++++++++++ pype/plugins/global/publish/integrate_new.py | 2 - ...{collect_script.py => collect_workfile.py} | 24 +++-- pype/plugins/nuke/publish/extract_script.py | 4 +- pype/plugins/nuke/publish/validate_script.py | 24 +++-- 6 files changed, 136 insertions(+), 28 deletions(-) create mode 100644 pype/plugins/global/_publish_unused/extract_review.py rename pype/plugins/nuke/publish/{collect_script.py => collect_workfile.py} (79%) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index d9ab35b1a1..4122902212 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -342,8 +342,9 @@ def set_colorspace(): def reset_frame_range_handles(): """Set frame range to current asset""" + root = nuke.root() fps = float(api.Session.get("AVALON_FPS", 25)) - nuke.root()["fps"].setValue(fps) + root["fps"].setValue(fps) name = api.Session["AVALON_ASSET"] asset = io.find_one({"name": name, "type": "asset"}) @@ -379,8 +380,8 @@ def reset_frame_range_handles(): edit_in = int(asset["data"]["fstart"]) - handle_start edit_out = int(asset["data"]["fend"]) + handle_end - nuke.root()["first_frame"].setValue(edit_in) - nuke.root()["last_frame"].setValue(edit_out) + root["first_frame"].setValue(edit_in) + root["last_frame"].setValue(edit_out) # setting active viewers nuke.frame(int(asset["data"]["fstart"])) @@ -400,6 +401,13 @@ def reset_frame_range_handles(): vv['frame_range'].setValue(range) vv['frame_range_lock'].setValue(True) + # adding handle_start/end to root avalon knob + if not avalon.nuke.set_avalon_knob_data(root, { + "handle_start": handle_start, + "handle_end": handle_end + }): + log.warning("Cannot set Avalon knob to Root node!") + def get_avalon_knob_data(node): import toml @@ -552,8 +560,8 @@ def get_hierarchical_attr(entity, attr, default=None): parent_id = entity['parent'] if ( - entity['type'].lower() == 'asset' and - entity.get('data', {}).get('visualParent') + entity['type'].lower() == 'asset' + and entity.get('data', {}).get('visualParent') ): parent_id = entity['data']['visualParent'] diff --git a/pype/plugins/global/_publish_unused/extract_review.py b/pype/plugins/global/_publish_unused/extract_review.py new file mode 100644 index 0000000000..885db1cfc9 --- /dev/null +++ b/pype/plugins/global/_publish_unused/extract_review.py @@ -0,0 +1,92 @@ +# import os +# import pyblish.api +# import subprocess +# from pype.vendor import clique +# from pypeapp import config +# +# +# class ExtractReview(pyblish.api.InstancePlugin): +# """Resolve any dependency issies +# +# This plug-in resolves any paths which, if not updated might break +# the published file. +# +# The order of families is important, when working with lookdev you want to +# first publish the texture, update the texture paths in the nodes and then +# publish the shading network. Same goes for file dependent assets. +# """ +# +# label = "Extract Review" +# order = pyblish.api.ExtractorOrder +# # families = ["imagesequence", "render", "write", "source"] +# # hosts = ["shell"] +# +# def process(self, instance): +# # adding plugin attributes from presets +# publish_presets = config.get_presets()["plugins"]["global"]["publish"] +# plugin_attrs = publish_presets[self.__class__.__name__] +# +# +# fps = instance.data.get("fps") +# start = instance.data.get("startFrame") +# stagingdir = os.path.normpath(instance.data.get("stagingDir")) +# +# collected_frames = os.listdir(stagingdir) +# collections, remainder = clique.assemble(collected_frames) +# +# full_input_path = os.path.join( +# stagingdir, collections[0].format('{head}{padding}{tail}') +# ) +# self.log.info("input {}".format(full_input_path)) +# +# filename = collections[0].format('{head}') +# if not filename.endswith('.'): +# filename += "." +# movFile = filename + "mov" +# full_output_path = os.path.join(stagingdir, movFile) +# +# self.log.info("output {}".format(full_output_path)) +# +# config_data = instance.context.data['output_repre_config'] +# +# proj_name = os.environ.get('AVALON_PROJECT', '__default__') +# profile = config_data.get(proj_name, config_data['__default__']) +# +# input_args = [] +# # overrides output file +# input_args.append("-y") +# # preset's input data +# input_args.extend(profile.get('input', [])) +# # necessary input data +# input_args.append("-start_number {}".format(start)) +# input_args.append("-i {}".format(full_input_path)) +# input_args.append("-framerate {}".format(fps)) +# +# output_args = [] +# # preset's output data +# output_args.extend(profile.get('output', [])) +# # output filename +# output_args.append(full_output_path) +# mov_args = [ +# "ffmpeg", +# " ".join(input_args), +# " ".join(output_args) +# ] +# subprocess_mov = " ".join(mov_args) +# sub_proc = subprocess.Popen(subprocess_mov) +# sub_proc.wait() +# +# if not os.path.isfile(full_output_path): +# raise("Quicktime wasn't created succesfully") +# +# if "representations" not in instance.data: +# instance.data["representations"] = [] +# +# representation = { +# 'name': 'mov', +# 'ext': 'mov', +# 'files': movFile, +# "stagingDir": stagingdir, +# "preview": True +# } +# instance.data["representations"].append(representation) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index 2bb5b3bd60..ec0cca10d6 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -4,7 +4,6 @@ import logging import speedcopy import clique import traceback -import sys import errno import pyblish.api from avalon import api, io @@ -59,7 +58,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "render", "imagesequence", "review", - "nukescript", "render", "rendersetup", "rig", diff --git a/pype/plugins/nuke/publish/collect_script.py b/pype/plugins/nuke/publish/collect_workfile.py similarity index 79% rename from pype/plugins/nuke/publish/collect_script.py rename to pype/plugins/nuke/publish/collect_workfile.py index d2585e4421..dc8472e4c2 100644 --- a/pype/plugins/nuke/publish/collect_script.py +++ b/pype/plugins/nuke/publish/collect_workfile.py @@ -1,26 +1,28 @@ -from avalon import api, io import nuke import pyblish.api import os -from avalon.nuke.lib import ( - add_publish_knob, - add_avalon_tab_knob + +from avalon.nuke import ( + get_avalon_knob_data, + add_publish_knob ) -class CollectScript(pyblish.api.ContextPlugin): +class CollectWorkfile(pyblish.api.ContextPlugin): """Publish current script version.""" order = pyblish.api.CollectorOrder + 0.1 - label = "Collect Script to publish" + label = "Collect Workfile" hosts = ['nuke'] def process(self, context): root = nuke.root() - add_avalon_tab_knob(root) + + knob_data = get_avalon_knob_data(root) + add_publish_knob(root) - family = "nukescript" + family = "workfile" # creating instances per write node file_path = root['name'].value() base_name = os.path.basename(file_path) @@ -30,6 +32,9 @@ class CollectScript(pyblish.api.ContextPlugin): first_frame = int(root["first_frame"].getValue()) last_frame = int(root["last_frame"].getValue()) + handle_start = int(knob_data["handle_start"]) + handle_end = int(knob_data["handle_end"]) + # Get format format = root['format'].value() resolution_width = format.width() @@ -53,7 +58,8 @@ class CollectScript(pyblish.api.ContextPlugin): "publish": root.knob('publish').value(), "family": family, "representation": "nk", - "handles": context.data['handles'], + "handle_start": handle_start, + "handle_end": handle_end, "step": 1, "fps": int(root['fps'].value()), }) diff --git a/pype/plugins/nuke/publish/extract_script.py b/pype/plugins/nuke/publish/extract_script.py index b54fa548a5..d0be98b93e 100644 --- a/pype/plugins/nuke/publish/extract_script.py +++ b/pype/plugins/nuke/publish/extract_script.py @@ -12,7 +12,7 @@ class ExtractScript(pype.api.Extractor): order = pyblish.api.ExtractorOrder - 0.05 optional = True hosts = ['nuke'] - families = ["nukescript"] + families = ["workfile"] def process(self, instance): self.log.debug("instance extracting: {}".format(instance.data)) @@ -28,7 +28,7 @@ class ExtractScript(pype.api.Extractor): if "representations" not in instance.data: instance.data["representations"] = [] - + representation = { 'name': 'nk', 'ext': '.nk', diff --git a/pype/plugins/nuke/publish/validate_script.py b/pype/plugins/nuke/publish/validate_script.py index 6cb42439b7..f79d4ab862 100644 --- a/pype/plugins/nuke/publish/validate_script.py +++ b/pype/plugins/nuke/publish/validate_script.py @@ -7,7 +7,7 @@ class ValidateScript(pyblish.api.InstancePlugin): """ Validates file output. """ order = pyblish.api.ValidatorOrder + 0.1 - families = ["nukescript"] + families = ["workfile"] label = "Check script settings" hosts = ["nuke"] @@ -24,11 +24,11 @@ class ValidateScript(pyblish.api.InstancePlugin): # These attributes will be checked attributes = [ "fps", "fstart", "fend", - "resolution_width", "resolution_height", "pixel_aspect", "handles" + "resolution_width", "resolution_height", "pixel_aspect", "handle_start", "handle_end" ] # Value of these attributes can be found on parents - hierarchical_attributes = ["fps", "resolution_width", "resolution_height", "pixel_aspect", "handles"] + hierarchical_attributes = ["fps", "resolution_width", "resolution_height", "pixel_aspect", "handle_start", "handle_end"] missing_attributes = [] asset_attributes = {} @@ -58,17 +58,21 @@ class ValidateScript(pyblish.api.InstancePlugin): raise ValueError(message) # Get handles from database, Default is 0 (if not found) - handles = 0 - if "handles" in asset_attributes: - handles = asset_attributes["handles"] + handle_start = 0 + handle_end = 0 + if "handle_start" in asset_attributes: + handle_start = asset_attributes["handle_start"] + if "handle_end" in asset_attributes: + handle_end = asset_attributes["handle_end"] # Set frame range with handles - asset_attributes["fstart"] -= handles - asset_attributes["fend"] += handles + asset_attributes["fstart"] -= handle_start + asset_attributes["fend"] += handle_end # Get values from nukescript script_attributes = { - "handles": handles, + "handle_start": instance_data["handle_start"], + "handle_end": instance_data["handle_end"], "fps": instance_data["fps"], "fstart": instance_data["startFrame"], "fend": instance_data["endFrame"], @@ -87,7 +91,7 @@ class ValidateScript(pyblish.api.InstancePlugin): # Raise error if not matching if len(not_matching) > 0: - msg = "Attributes '{}' aro not set correctly" + msg = "Attributes '{}' are not set correctly" # Alert user that handles are set if Frame start/end not match if ( (("fstart" in not_matching) or ("fend" in not_matching)) and From 933b371bccf51f655394122db72409bf0a21c147 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Jul 2019 14:12:40 +0200 Subject: [PATCH 16/69] (hotfix) removed studio specific actions --- .../actions/action_sync_asset_versions.py | 705 ------------------ pype/ftrack/actions/action_sync_notes.py | 230 ------ 2 files changed, 935 deletions(-) delete mode 100644 pype/ftrack/actions/action_sync_asset_versions.py delete mode 100644 pype/ftrack/actions/action_sync_notes.py diff --git a/pype/ftrack/actions/action_sync_asset_versions.py b/pype/ftrack/actions/action_sync_asset_versions.py deleted file mode 100644 index 48c7a7d632..0000000000 --- a/pype/ftrack/actions/action_sync_asset_versions.py +++ /dev/null @@ -1,705 +0,0 @@ -import os -import sys -import argparse -import json -import logging -import collections -import tempfile -import requests - -from pype.vendor import ftrack_api -from pype.ftrack import BaseAction -from pypeapp import config - - -class SyncAssetVersions(BaseAction): - - #: Action identifier. - identifier = 'sync.asset.versions' - #: Action label. - label = 'Sync Asset Versions' - #: Action description. - description = 'Synchronize Asset versions to another Ftrack' - #: roles that are allowed to register this action - role_list = ['Administrator', 'Project Manager', 'Pypeclub'] - - # ENTER VALUES HERE (change values based on keys) - # Custom attribute storing ftrack id of destination server - id_key_src = 'fridge_ftrackID' - # Custom attribute storing ftrack id of source server - id_key_dst = 'kredenc_ftrackID' - - components_name = ( - 'ftrackreview-mp4_src', - 'ftrackreview-image_src', - 'thumbnail_src' - ) - - # comp name mapping - comp_name_mapping = { - 'ftrackreview-mp4_src': 'ftrackreview-mp4', - 'ftrackreview-image_src': 'ftrackreview-image', - 'thumbnail_src': 'thumbnail' - } - - comp_location_mapping = { - 'ftrack.server': [ - 'ftrackreview-mp4', - 'ftrackreview-mp4_src', - 'ftrackreview-image', - 'ftrackreview-image_src', - 'thumbnail', - 'thumbnail_src' - ], - 'ftrack.unmanaged': [] - } - - def discover(self, session, entities, event): - ''' Validation ''' - for entity in entities: - if entity.entity_type.lower() != 'assetversion': - return False - - return True - - def launch(self, session, entities, event): - self.dst_ftrack_locations = {} - self.interface_messages = {} - # stop if custom attribute for storing second ftrack id is missing - if self.id_key_src not in entities[0]['custom_attributes']: - msg = ( - 'Custom attribute "{}" does not exist on AssetVersion' - ).format(self.id_key_src) - self.log.error(msg) - - return { - 'success': False, - 'message': msg - } - - source_credentials = config.get_presets()['ftrack'].get( - 'partnership_ftrack_cred', {} - ) - self.dst_session = ftrack_api.Session( - server_url=source_credentials.get('server_url'), - api_key=source_credentials.get('api_key'), - api_user=source_credentials.get('api_user'), - auto_connect_event_hub=True - ) - - # NOTE Shared session has issues with location definition - self.session_for_components = ftrack_api.Session( - server_url=session.server_url, - api_key=session.api_key, - api_user=session.api_user, - auto_connect_event_hub=True - ) - - for entity in entities: - asset = entity['asset'] - parent = asset['parent'] - - # Check if asset version already has entity on destinaition Ftrack - # TODO ? skip if yes - # ? show to user - with interface/message/note - # + or ask if user want to override found version ???? - dst_ftrack_id = entity['custom_attributes'].get(self.id_key_src) - if dst_ftrack_id: - dst_ftrack_ent = self.dst_session.query( - 'AssetVersion where id = "{}"'.format(dst_ftrack_id) - ).first() - - if dst_ftrack_ent: - self.log.warning( - '"{}" - Already exists. Skipping'.format(asset['name']) - ) - continue - - # Find parent where Version will be uploaded - dst_parent_id = parent['custom_attributes'].get(self.id_key_src) - if not dst_parent_id: - self.log.warning(( - 'Entity: "{}" don\'t have stored Custom attribute "{}"' - ).format(parent['name'], self.id_key_src)) - continue - - dst_parent_entity = self.dst_session.query( - 'TypedContext where id = "{}"'.format(dst_parent_id) - ).first() - - if not dst_parent_entity: - msg = ( - 'Didn\'t found mirrored entity in destination Ftrack' - ' for "{}"' - ).format(parent['name']) - self.log.warning(msg) - continue - - component_list = self.prepare_data(entity['id']) - id_stored = False - for comp_data in component_list: - dst_asset_ver_id = self.asset_version_creation( - dst_parent_entity, comp_data, entity - ) - - if id_stored: - continue - entity['custom_attributes'][self.id_key_src] = dst_asset_ver_id - session.commit() - id_stored = True - - self.dst_session.close() - self.session_for_components.close() - - self.dst_session = None - self.session_for_components = None - - return True - - def prepare_data(self, asset_version_id): - components_list = [] - asset_version = self.session_for_components.query( - 'AssetVersion where id is "{}"'.format(asset_version_id) - ).one() - # Asset data - asset_type = asset_version['asset']['type'].get('short', 'upload') - assettype_data = {'short': asset_type} - - asset_data = {'name': asset_version['asset']['name']} - - # Asset version data - assetversion_data = {'version': asset_version['version']} - - # Component data - components_of_interest = {} - for name in self.components_name: - components_of_interest[name] = False - - for key in components_of_interest: - # Find component by name - for comp in asset_version['components']: - if comp['name'] == key: - components_of_interest[key] = True - break - # NOTE if component was found then continue - if components_of_interest[key]: - continue - - # Look for alternative component name set in mapping - new_key = None - if key in self.comp_name_mapping: - new_key = self.comp_name_mapping[key] - - if not new_key: - self.log.warning( - 'Asset version do not have components "{}" or "{}"'.format( - key, new_key - ) - ) - continue - - components_of_interest[new_key] = components_of_interest.pop(key) - - # Try to look for alternative name - for comp in asset_version['components']: - if comp['name'] == new_key: - components_of_interest[new_key] = True - break - - # Check if at least one component is transferable - have_comp_to_transfer = False - for value in components_of_interest.values(): - if value: - have_comp_to_transfer = True - break - - if not have_comp_to_transfer: - return components_list - - thumbnail_id = asset_version.get('thumbnail_id') - temp_folder = tempfile.mkdtemp('components') - - # Data for transfer components - for comp in asset_version['components']: - comp_name = comp['name'] - - if comp_name not in components_of_interest: - continue - - if not components_of_interest[comp_name]: - continue - - if comp_name in self.comp_name_mapping: - comp_name = self.comp_name_mapping[comp_name] - - is_thumbnail = False - for _comp in asset_version['components']: - if _comp['name'] == comp_name: - if _comp['id'] == thumbnail_id: - is_thumbnail = True - break - - locatiom_name = comp['component_locations'][0]['location']['name'] - location = self.session_for_components.query( - 'Location where name is "{}"'.format(locatiom_name) - ).one() - file_path = None - if locatiom_name == 'ftrack.unmanaged': - file_path = '' - try: - file_path = location.get_filesystem_path(comp) - except Exception: - pass - - file_path = os.path.normpath(file_path) - if not os.path.exists(file_path): - file_path = comp['component_locations'][0][ - 'resource_identifier' - ] - - file_path = os.path.normpath(file_path) - if not os.path.exists(file_path): - self.log.warning( - 'In component: "{}" can\'t access filepath: "{}"'.format( - comp['name'], file_path - ) - ) - continue - - elif locatiom_name == 'ftrack.server': - download_url = location.get_url(comp) - - file_name = '{}{}{}'.format( - asset_version['asset']['name'], - comp_name, - comp['file_type'] - ) - file_path = os.path.sep.join([temp_folder, file_name]) - - self.download_file(download_url, file_path) - - if not file_path: - self.log.warning( - 'In component: "{}" is invalid file path'.format( - comp['name'] - ) - ) - continue - - # Default location name value is ftrack.unmanaged - location_name = 'ftrack.unmanaged' - - # Try to find location where component will be created - for name, keys in self.comp_location_mapping.items(): - if comp_name in keys: - location_name = name - break - dst_location = self.get_dst_location(location_name) - - # Metadata - metadata = {} - metadata.update(comp.get('metadata', {})) - - component_data = { - "name": comp_name, - "metadata": metadata - } - - data = { - 'assettype_data': assettype_data, - 'asset_data': asset_data, - 'assetversion_data': assetversion_data, - 'component_data': component_data, - 'component_overwrite': False, - 'thumbnail': is_thumbnail, - 'component_location': dst_location, - 'component_path': file_path - } - - components_list.append(data) - - return components_list - - def asset_version_creation(self, dst_parent_entity, data, src_entity): - assettype_data = data['assettype_data'] - self.log.debug("data: {}".format(data)) - - assettype_entity = self.dst_session.query( - self.query("AssetType", assettype_data) - ).first() - - # Create a new entity if none exits. - if not assettype_entity: - assettype_entity = self.dst_session.create( - "AssetType", assettype_data - ) - self.dst_session.commit() - self.log.debug( - "Created new AssetType with data: ".format(assettype_data) - ) - - # Asset - # Get existing entity. - asset_data = { - "name": src_entity['asset']['name'], - "type": assettype_entity, - "parent": dst_parent_entity - } - asset_data.update(data.get("asset_data", {})) - - asset_entity = self.dst_session.query( - self.query("Asset", asset_data) - ).first() - - self.log.info("asset entity: {}".format(asset_entity)) - - # Extracting metadata, and adding after entity creation. This is - # due to a ftrack_api bug where you can't add metadata on creation. - asset_metadata = asset_data.pop("metadata", {}) - - # Create a new entity if none exits. - info_msg = ( - 'Created new {entity_type} with data: {data}' - ", metadata: {metadata}." - ) - - if not asset_entity: - asset_entity = self.dst_session.create("Asset", asset_data) - self.dst_session.commit() - - self.log.debug( - info_msg.format( - entity_type="Asset", - data=asset_data, - metadata=asset_metadata - ) - ) - - # Adding metadata - existing_asset_metadata = asset_entity["metadata"] - existing_asset_metadata.update(asset_metadata) - asset_entity["metadata"] = existing_asset_metadata - - # AssetVersion - assetversion_data = { - 'version': 0, - 'asset': asset_entity - } - - # NOTE task is skipped (can't be identified in other ftrack) - # if task: - # assetversion_data['task'] = task - - # NOTE assetversion_data contains version number which is not correct - assetversion_data.update(data.get("assetversion_data", {})) - - assetversion_entity = self.dst_session.query( - self.query("AssetVersion", assetversion_data) - ).first() - - # Extracting metadata, and adding after entity creation. This is - # due to a ftrack_api bug where you can't add metadata on creation. - assetversion_metadata = assetversion_data.pop("metadata", {}) - - # Create a new entity if none exits. - if not assetversion_entity: - assetversion_entity = self.dst_session.create( - "AssetVersion", assetversion_data - ) - self.dst_session.commit() - - self.log.debug( - info_msg.format( - entity_type="AssetVersion", - data=assetversion_data, - metadata=assetversion_metadata - ) - ) - - # Check if custom attribute can of main Ftrack can be set - if self.id_key_dst not in assetversion_entity['custom_attributes']: - self.log.warning(( - 'Destination Asset Version do not have key "{}" in' - ' Custom attributes' - ).format(self.id_key_dst)) - return - - assetversion_entity['custom_attributes'][self.id_key_dst] = src_entity['id'] - - # Adding metadata - existing_assetversion_metadata = assetversion_entity["metadata"] - existing_assetversion_metadata.update(assetversion_metadata) - assetversion_entity["metadata"] = existing_assetversion_metadata - - # Have to commit the version and asset, because location can't - # determine the final location without. - self.dst_session.commit() - - # Component - # Get existing entity. - component_data = { - "name": "main", - "version": assetversion_entity - } - component_data.update(data.get("component_data", {})) - - component_entity = self.dst_session.query( - self.query("Component", component_data) - ).first() - - component_overwrite = data.get("component_overwrite", False) - - location = None - location_name = data.get("component_location", {}).get('name') - if location_name: - location = self.dst_session.query( - 'Location where name is "{}"'.format(location_name) - ).first() - - if not location: - location = self.dst_session.pick_location() - - # Overwrite existing component data if requested. - if component_entity and component_overwrite: - - origin_location = self.dst_session.query( - 'Location where name is "ftrack.origin"' - ).one() - - # Removing existing members from location - components = list(component_entity.get("members", [])) - components += [component_entity,] - for component in components: - for loc in component["component_locations"]: - if location["id"] == loc["location_id"]: - location.remove_component( - component, recursive=False - ) - - # Deleting existing members on component entity - for member in component_entity.get("members", []): - self.dst_session.delete(member) - del(member) - - self.dst_session.commit() - - # Reset members in memory - if "members" in component_entity.keys(): - component_entity["members"] = [] - - # Add components to origin location - try: - collection = clique.parse(data["component_path"]) - except ValueError: - # Assume its a single file - # Changing file type - name, ext = os.path.splitext(data["component_path"]) - component_entity["file_type"] = ext - - origin_location.add_component( - component_entity, data["component_path"] - ) - else: - # Changing file type - component_entity["file_type"] = collection.format("{tail}") - - # Create member components for sequence. - for member_path in collection: - - size = 0 - try: - size = os.path.getsize(member_path) - except OSError: - pass - - name = collection.match(member_path).group("index") - - member_data = { - "name": name, - "container": component_entity, - "size": size, - "file_type": os.path.splitext(member_path)[-1] - } - - component = self.dst_session.create( - "FileComponent", member_data - ) - origin_location.add_component( - component, member_path, recursive=False - ) - component_entity["members"].append(component) - - # Add components to location. - location.add_component( - component_entity, origin_location, recursive=True - ) - - data["component"] = component_entity - msg = "Overwriting Component with path: {0}, data: {1}, " - msg += "location: {2}" - self.log.info( - msg.format( - data["component_path"], - component_data, - location - ) - ) - - # Extracting metadata, and adding after entity creation. This is - # due to a ftrack_api bug where you can't add metadata on creation. - component_metadata = component_data.pop("metadata", {}) - - # Create new component if none exists. - new_component = False - if not component_entity: - component_entity = assetversion_entity.create_component( - data["component_path"], - data=component_data, - location=location - ) - data["component"] = component_entity - msg = ( - "Created new Component with path: {}, data: {}" - ", metadata: {}, location: {}" - ) - self.log.info(msg.format( - data["component_path"], - component_data, - component_metadata, - location['name'] - )) - new_component = True - - # Adding metadata - existing_component_metadata = component_entity["metadata"] - existing_component_metadata.update(component_metadata) - component_entity["metadata"] = existing_component_metadata - - # if component_data['name'] = 'ftrackreview-mp4-mp4': - # assetversion_entity["thumbnail_id"] - - # Setting assetversion thumbnail - if data.get("thumbnail", False): - assetversion_entity["thumbnail_id"] = component_entity["id"] - - # Inform user about no changes to the database. - if ( - component_entity and - not component_overwrite and - not new_component - ): - data["component"] = component_entity - self.log.info( - "Found existing component, and no request to overwrite. " - "Nothing has been changed." - ) - return - - # Commit changes. - self.dst_session.commit() - - return assetversion_entity['id'] - - def query(self, entitytype, data): - """ Generate a query expression from data supplied. - - If a value is not a string, we'll add the id of the entity to the - query. - - Args: - entitytype (str): The type of entity to query. - data (dict): The data to identify the entity. - exclusions (list): All keys to exclude from the query. - - Returns: - str: String query to use with "session.query" - """ - queries = [] - if sys.version_info[0] < 3: - for key, value in data.iteritems(): - if not isinstance(value, (basestring, int)): - self.log.info("value: {}".format(value)) - if "id" in value.keys(): - queries.append( - "{0}.id is \"{1}\"".format(key, value["id"]) - ) - else: - queries.append("{0} is \"{1}\"".format(key, value)) - else: - for key, value in data.items(): - if not isinstance(value, (str, int)): - self.log.info("value: {}".format(value)) - if "id" in value.keys(): - queries.append( - "{0}.id is \"{1}\"".format(key, value["id"]) - ) - else: - queries.append("{0} is \"{1}\"".format(key, value)) - - query = ( - entitytype + " where " + " and ".join(queries) - ) - return query - - def download_file(self, url, path): - r = requests.get(url, stream=True).content - with open(path, 'wb') as f: - f.write(r) - - def get_dst_location(self, name): - if name in self.dst_ftrack_locations: - return self.dst_ftrack_locations[name] - - location = self.dst_session.query( - 'Location where name is "{}"'.format(name) - ).one() - self.dst_ftrack_locations[name] = location - return location - - -def register(session, **kw): - '''Register plugin. Called when used as an plugin.''' - - if not isinstance(session, ftrack_api.session.Session): - return - - SyncAssetVersions(session).register() - - -def main(arguments=None): - '''Set up logging and register action.''' - if arguments is None: - arguments = [] - - parser = argparse.ArgumentParser() - # Allow setting of logging level from arguments. - loggingLevels = {} - for level in ( - logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, - logging.ERROR, logging.CRITICAL - ): - loggingLevels[logging.getLevelName(level).lower()] = level - - parser.add_argument( - '-v', '--verbosity', - help='Set the logging output verbosity.', - choices=loggingLevels.keys(), - default='info' - ) - namespace = parser.parse_args(arguments) - - # Set up basic logging - logging.basicConfig(level=loggingLevels[namespace.verbosity]) - - session = ftrack_api.Session() - register(session) - - # Wait for events - logging.info( - 'Registered actions and listening for events. Use Ctrl-C to abort.' - ) - session.event_hub.wait() - - -if __name__ == '__main__': - raise SystemExit(main(sys.argv[1:])) diff --git a/pype/ftrack/actions/action_sync_notes.py b/pype/ftrack/actions/action_sync_notes.py deleted file mode 100644 index 4c0788f858..0000000000 --- a/pype/ftrack/actions/action_sync_notes.py +++ /dev/null @@ -1,230 +0,0 @@ -import os -import sys -import time -import datetime -import requests -import tempfile - -from pypeapp import config -from pype.vendor import ftrack_api -from pype.ftrack import BaseAction -from pype.ftrack.lib.custom_db_connector import DbConnector, ClientSession - - -class SynchronizeNotes(BaseAction): - #: Action identifier. - identifier = 'sync.notes' - #: Action label. - label = 'Synchronize Notes' - #: Action description. - description = 'Synchronize notes from one Ftrack to another' - #: roles that are allowed to register this action - role_list = ['Administrator', 'Project Manager', 'Pypeclub'] - - db_con = DbConnector( - mongo_url=os.environ["AVALON_MONGO"], - database_name='notes_database', - table_name='notes_table' - ) - - id_key_src = 'fridge_ftrackID' - id_key_dst = 'kredenc_ftrackID' - - def discover(self, session, entities, event): - ''' Validation ''' - if len(entities) == 0: - return False - - for entity in entities: - if entity.entity_type.lower() != 'assetversion': - return False - - return True - - def launch(self, session, entities, event): - source_credentials = config.get_presets()['ftrack'].get( - 'partnership_ftrack_cred', {} - ) - - self.session_source = ftrack_api.Session( - server_url=source_credentials.get('server_url'), - api_key=source_credentials.get('api_key'), - api_user=source_credentials.get('api_user'), - auto_connect_event_hub=True - ) - - self.session_for_components = ftrack_api.Session( - server_url=session.server_url, - api_key=session.api_key, - api_user=session.api_user, - auto_connect_event_hub=True - ) - - self.user = self.session_for_components.query( - 'User where username is "{}"'.format(self.session.api_user) - ).one() - - self.db_con.install() - - missing_id_entities = [] - to_sync_data = [] - for dst_entity in entities: - # Ignore entities withoud stored id from second ftrack - from_id = dst_entity['custom_attributes'].get(self.id_key_src) - if not from_id: - missing_id_entities.append(dst_entity.get('name', dst_entity)) - continue - - to_sync_data.append((dst_entity.entity_type, dst_entity['id'])) - - for dst_entity_data in to_sync_data: - av_query = 'AssetVersion where id is "{}"'.format(from_id) - src_entity = self.session_source.query(av_query).one() - src_notes = src_entity['notes'] - self.sync_notes(src_notes, dst_entity_data) - - self.db_con.uninstall() - - if missing_id_entities: - self.log.info('Entities without Avalon ID:') - self.log.info(missing_id_entities) - - return True - - def sync_notes(self, src_notes, dst_entity_data): - # Sort notes by date time - src_notes = sorted(src_notes, key=lambda note: note['date']) - - for src_note in src_notes: - # Find if exists in DB - db_note_entity = self.db_con.find_one({ - self.id_key_src: src_note['id'] - }) - - # WARNING: expr `if not db_note_entity:` does not work! - if db_note_entity is None: - # Create note if not found in DB - dst_note_id = self.create_note( - src_note, dst_entity_data - ) - # Add references to DB for next sync - item = { - self.id_key_dst: dst_note_id, - self.id_key_src: src_note['id'], - 'content': src_note['content'], - 'entity_type': 'Note', - 'sync_date': str(datetime.date.today()) - } - self.db_con.insert_one(item) - else: - dst_note_id = db_note_entity[self.id_key_dst] - - replies = src_note.get('replies') - if not replies: - continue - - self.sync_notes(replies, ('Note', dst_note_id)) - - def create_note(self, src_note, dst_entity_data): - # dst_entity_data - tuple(entity type, entity id) - dst_entity = self.session.query( - '{} where id is "{}"'.format(*dst_entity_data) - ).one() - - is_reply = False - if dst_entity.entity_type.lower() != 'note': - # Category - category = None - cat = src_note['category'] - if cat: - cat_name = cat['name'] - category = self.session.query( - 'NoteCategory where name is "{}"'.format(cat_name) - ).first() - - new_note = dst_entity.create_note( - src_note['content'], self.user, category=category - ) - else: - new_note = dst_entity.create_reply( - src_note['content'], self.user - ) - is_reply = True - - # QUESTION Should we change date to match source Ftrack? - new_note['date'] = src_note['date'] - - self.session.commit() - new_note_id = new_note['id'] - - # Components - if src_note['note_components']: - self.reupload_components(src_note, new_note_id) - - # Bug in ftrack_api, when reply is added session must be reset - if is_reply: - self.session.reset() - time.sleep(0.2) - - return new_note_id - - def reupload_components(self, src_note, dst_note_id): - # Download and collect source components - src_server_location = self.session_source.query( - 'Location where name is "ftrack.server"' - ).one() - - temp_folder = tempfile.mkdtemp('note_components') - - #download and store path to upload - paths_to_upload = [] - count = 0 - for note_component in src_note['note_components']: - count +=1 - download_url = src_server_location.get_url( - note_component['component'] - ) - - file_name = '{}{}{}'.format( - str(src_note['date'].format('YYYYMMDDHHmmss')), - "{:0>3}".format(count), - note_component['component']['file_type'] - ) - path = os.path.sep.join([temp_folder, file_name]) - - self.download_file(download_url, path) - paths_to_upload.append(path) - - # Create downloaded components and add to note - dst_server_location = self.session_for_components.query( - 'Location where name is "ftrack.server"' - ).one() - - for path in paths_to_upload: - component = self.session_for_components.create_component( - path, - data={'name': 'My file'}, - location=dst_server_location - ) - - # Attach the component to the note. - self.session_for_components.create( - 'NoteComponent', - {'component_id': component['id'], 'note_id': dst_note_id} - ) - - self.session_for_components.commit() - - def download_file(self, url, path): - r = requests.get(url, stream=True).content - with open(path, 'wb') as f: - f.write(r) - - -def register(session, **kw): - '''Register plugin. Called when used as an plugin.''' - - if not isinstance(session, ftrack_api.session.Session): - return - - SynchronizeNotes(session).register() From 91bbf712184843af8a7f098ce47dea6d8d1fe1ea Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Jul 2019 14:17:25 +0200 Subject: [PATCH 17/69] (hotfix) fixed key accessing in hierarchical attr action discover method --- pype/ftrack/actions/action_sync_hier_attrs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/actions/action_sync_hier_attrs.py b/pype/ftrack/actions/action_sync_hier_attrs.py index 432cd6b493..3a884a017f 100644 --- a/pype/ftrack/actions/action_sync_hier_attrs.py +++ b/pype/ftrack/actions/action_sync_hier_attrs.py @@ -33,7 +33,7 @@ class SyncHierarchicalAttrs(BaseAction): ''' Validation ''' for entity in entities: if ( - entity['context_type'].lower() in ('show', 'task') and + entity.get('context_type', '').lower() in ('show', 'task') and entity.entity_type.lower() != 'task' ): return True From 322ec3e3c8801aff3edf0ec05e153fbc7b457bb1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 11 Jul 2019 16:53:42 +0200 Subject: [PATCH 18/69] fix(nuke): plublish workfile handle_start/end was not correct --- pype/plugins/global/publish/integrate_new.py | 3 +- pype/plugins/nuke/load/load_script_precomp.py | 6 +- .../nuke/publish/collect_current_file.py | 3 +- pype/plugins/nuke/publish/collect_workfile.py | 56 ++++++++++++++----- pype/plugins/nuke/publish/validate_script.py | 30 +++++----- 5 files changed, 66 insertions(+), 32 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index ec0cca10d6..e70657eef9 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -539,7 +539,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # Include optional data if present in optionals = [ - "startFrame", "endFrame", "step", "handles", "sourceHashes" + "startFrame", "endFrame", "step", "handles", + "handle_end", "handle_start", "sourceHashes" ] for key in optionals: if key in instance.data: diff --git a/pype/plugins/nuke/load/load_script_precomp.py b/pype/plugins/nuke/load/load_script_precomp.py index 6fd76edd03..bc63150206 100644 --- a/pype/plugins/nuke/load/load_script_precomp.py +++ b/pype/plugins/nuke/load/load_script_precomp.py @@ -39,12 +39,14 @@ class LinkAsGroup(api.Loader): precomp_name = context["representation"]["context"]["subset"] + self.log.info("versionData: {}\n".format(context["version"]["data"])) + # Set global in point to start frame (if in version.data) start = context["version"]["data"].get("startFrame", None) + self.log.info("start: {}\n".format(start)) # add additional metadata from the version to imprint to Avalon knob - add_keys = ["startFrame", "endFrame", "handles", - "source", "author", "fps"] + add_keys = ["startFrame", "endFrame", "handle_start", "handle_end", "source", "author", "fps"] data_imprint = { "start_frame": start, diff --git a/pype/plugins/nuke/publish/collect_current_file.py b/pype/plugins/nuke/publish/collect_current_file.py index 35a0ef4c2a..253b9a7a24 100644 --- a/pype/plugins/nuke/publish/collect_current_file.py +++ b/pype/plugins/nuke/publish/collect_current_file.py @@ -1,10 +1,11 @@ import pyblish.api -class SelectCurrentFile(pyblish.api.ContextPlugin): +class CollectCurrentFile(pyblish.api.ContextPlugin): """Inject the current working file into context""" order = pyblish.api.CollectorOrder - 0.5 + label = "Collect Current File" hosts = ["nuke"] def process(self, context): diff --git a/pype/plugins/nuke/publish/collect_workfile.py b/pype/plugins/nuke/publish/collect_workfile.py index dc8472e4c2..2ec6464ace 100644 --- a/pype/plugins/nuke/publish/collect_workfile.py +++ b/pype/plugins/nuke/publish/collect_workfile.py @@ -2,6 +2,8 @@ import nuke import pyblish.api import os +import pype.api as pype + from avalon.nuke import ( get_avalon_knob_data, add_publish_knob @@ -24,16 +26,20 @@ class CollectWorkfile(pyblish.api.ContextPlugin): family = "workfile" # creating instances per write node - file_path = root['name'].value() + file_path = context.data["currentFile"] + staging_dir = os.path.dirname(file_path) base_name = os.path.basename(file_path) subset = "{0}_{1}".format(os.getenv("AVALON_TASK", None), family) + # get version string + version = pype.get_version_from_path(base_name) + # Get frame range first_frame = int(root["first_frame"].getValue()) last_frame = int(root["last_frame"].getValue()) - handle_start = int(knob_data["handle_start"]) - handle_end = int(knob_data["handle_end"]) + handle_start = int(knob_data.get("handle_start", 0)) + handle_end = int(knob_data.get("handle_end", 0)) # Get format format = root['format'].value() @@ -45,23 +51,47 @@ class CollectWorkfile(pyblish.api.ContextPlugin): instance = context.create_instance(subset) instance.add(root) - instance.data.update({ - "subset": subset, + script_data = { "asset": os.getenv("AVALON_ASSET", None), - "label": base_name, - "name": base_name, - "startFrame": first_frame, - "endFrame": last_frame, + "version": version, + "startFrame": first_frame + handle_start, + "endFrame": last_frame - handle_end, "resolution_width": resolution_width, "resolution_height": resolution_height, "pixel_aspect": pixel_aspect, - "publish": root.knob('publish').value(), - "family": family, - "representation": "nk", + + # backward compatibility + "handles": handle_start, + "handle_start": handle_start, "handle_end": handle_end, "step": 1, - "fps": int(root['fps'].value()), + "fps": root['fps'].value(), + } + context.data.update(script_data) + + # creating instance data + instance.data.update({ + "subset": subset, + "label": base_name, + "name": base_name, + "publish": root.knob('publish').value(), + "family": family, + "representations": list() }) + + # adding basic script data + instance.data.update(script_data) + + # creating representation + representation = { + 'name': 'nk', + 'ext': 'nk', + 'files': base_name, + "stagingDir": staging_dir, + } + + instance.data["representations"].append(representation) + self.log.info('Publishing script version') context.data["instances"].append(instance) diff --git a/pype/plugins/nuke/publish/validate_script.py b/pype/plugins/nuke/publish/validate_script.py index f79d4ab862..4ad76b898b 100644 --- a/pype/plugins/nuke/publish/validate_script.py +++ b/pype/plugins/nuke/publish/validate_script.py @@ -12,8 +12,8 @@ class ValidateScript(pyblish.api.InstancePlugin): hosts = ["nuke"] def process(self, instance): - instance_data = instance.data - asset_name = instance_data["asset"] + ctx_data = instance.context.data + asset_name = ctx_data["asset"] asset = io.find_one({ "type": "asset", @@ -66,19 +66,19 @@ class ValidateScript(pyblish.api.InstancePlugin): handle_end = asset_attributes["handle_end"] # Set frame range with handles - asset_attributes["fstart"] -= handle_start - asset_attributes["fend"] += handle_end + # asset_attributes["fstart"] -= handle_start + # asset_attributes["fend"] += handle_end # Get values from nukescript script_attributes = { - "handle_start": instance_data["handle_start"], - "handle_end": instance_data["handle_end"], - "fps": instance_data["fps"], - "fstart": instance_data["startFrame"], - "fend": instance_data["endFrame"], - "resolution_width": instance_data["resolution_width"], - "resolution_height": instance_data["resolution_height"], - "pixel_aspect": instance_data["pixel_aspect"] + "handle_start": ctx_data["handle_start"], + "handle_end": ctx_data["handle_end"], + "fps": ctx_data["fps"], + "fstart": ctx_data["startFrame"], + "fend": ctx_data["endFrame"], + "resolution_width": ctx_data["resolution_width"], + "resolution_height": ctx_data["resolution_height"], + "pixel_aspect": ctx_data["pixel_aspect"] } # Compare asset's values Nukescript X Database @@ -95,10 +95,10 @@ class ValidateScript(pyblish.api.InstancePlugin): # Alert user that handles are set if Frame start/end not match if ( (("fstart" in not_matching) or ("fend" in not_matching)) and - (handles > 0) + ((handle_start > 0) or (handle_end > 0)) ): - handles = str(handles).replace(".0", "") - msg += " (handles are set to {})".format(handles) + msg += " (`handle_start` are set to {})".format(handle_start) + msg += " (`handle_end` are set to {})".format(handle_end) message = msg.format(", ".join(not_matching)) raise ValueError(message) From 353cccb54cd0d736d062689093226d89b60a679f Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 11 Jul 2019 17:03:34 +0200 Subject: [PATCH 19/69] (hotfix) mayaascii wasn't working with burnins and ftrack --- .../publish/integrate_ftrack_instances.py | 2 +- .../plugins/maya/publish/collect_mayaascii.py | 25 +++++++++++++++++++ .../plugins/maya/publish/extract_quicktime.py | 2 +- 3 files changed, 27 insertions(+), 2 deletions(-) create mode 100644 pype/plugins/maya/publish/collect_mayaascii.py diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index a79e1f8ce5..9be0210c4c 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -16,7 +16,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): family_mapping = {'camera': 'cam', 'look': 'look', - 'mayaAscii': 'scene', + 'mayaascii': 'scene', 'model': 'geo', 'rig': 'rig', 'setdress': 'setdress', diff --git a/pype/plugins/maya/publish/collect_mayaascii.py b/pype/plugins/maya/publish/collect_mayaascii.py new file mode 100644 index 0000000000..fbed8e0ead --- /dev/null +++ b/pype/plugins/maya/publish/collect_mayaascii.py @@ -0,0 +1,25 @@ +from maya import cmds + +import pyblish.api + + +class CollectMayaAscii(pyblish.api.InstancePlugin): + """Collect May Ascii Data + + """ + + order = pyblish.api.CollectorOrder + 0.2 + label = 'Collect Model Data' + families = ["mayaAscii"] + + def process(self, instance): + # Extract only current frame (override) + frame = cmds.currentTime(query=True) + instance.data['startFrame'] = frame + instance.data['endFrame'] = frame + + # make ftrack publishable + if instance.data.get('families'): + instance.data['families'].append('ftrack') + else: + instance.data['families'] = ['ftrack'] diff --git a/pype/plugins/maya/publish/extract_quicktime.py b/pype/plugins/maya/publish/extract_quicktime.py index 5d5454ca16..ff08799c0a 100644 --- a/pype/plugins/maya/publish/extract_quicktime.py +++ b/pype/plugins/maya/publish/extract_quicktime.py @@ -137,7 +137,7 @@ class ExtractQuicktime(pype.api.Extractor): "username": instance.context.data['user'], "asset": os.environ['AVALON_ASSET'], "task": os.environ['AVALON_TASK'], - "start_frame": int(instance.data['startFrame']), + "start_frame": int(start), "version": "v" + str(version) } } From 06b428b1eefa32bd61c8c9c868b774278f32098e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Jul 2019 17:10:15 +0200 Subject: [PATCH 20/69] (hotfix) default value of custom attributes can now be unset with `null` in json --- pype/ftrack/actions/action_create_cust_attrs.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pype/ftrack/actions/action_create_cust_attrs.py b/pype/ftrack/actions/action_create_cust_attrs.py index 7dd8335ecc..d665e92d4a 100644 --- a/pype/ftrack/actions/action_create_cust_attrs.py +++ b/pype/ftrack/actions/action_create_cust_attrs.py @@ -258,8 +258,8 @@ class CustomAttributes(BaseAction): ): continue - if 'is_hierarchical' in data: - if data['is_hierarchical'] == attr['is_hierarchical']: + if data.get('is_hierarchical', False) is True: + if attr['is_hierarchical'] is True: matching.append(attr) elif 'object_type_id' in data: if ( @@ -453,6 +453,8 @@ class CustomAttributes(BaseAction): def get_default(self, attr): type = attr['type'] default = attr['default'] + if default is None: + return default err_msg = 'Default value is not' if type == 'number': if not isinstance(default, (float, int)): From 6f9aefcc6f96fab2597f85197d396f22438cf664 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 11 Jul 2019 19:42:51 +0200 Subject: [PATCH 21/69] first version not tested --- .../ftrack/actions/action_cust_attr_doctor.py | 325 ++++++++++++++++++ 1 file changed, 325 insertions(+) create mode 100644 pype/ftrack/actions/action_cust_attr_doctor.py diff --git a/pype/ftrack/actions/action_cust_attr_doctor.py b/pype/ftrack/actions/action_cust_attr_doctor.py new file mode 100644 index 0000000000..199572f18a --- /dev/null +++ b/pype/ftrack/actions/action_cust_attr_doctor.py @@ -0,0 +1,325 @@ +import os +import sys +import argparse +import logging + +from pype.vendor import ftrack_api +from pype.ftrack import BaseAction + + +class CustomAttributeDoctor(BaseAction): + #: Action identifier. + identifier = 'custom.attributes.doctor' + #: Action label. + label = 'Custom Attributes Doctor' + #: Action description. + description = ( + 'Fix hierarchical custom attributes mainly handles, fstart' + ' and fend' + ) + + icon = '{}/ftrack/action_icons/TestAction.svg'.format( + os.environ.get('PYPE_STATICS_SERVER', '') + ) + hierarchical_ca = ['handle_start', 'handle_end', 'fstart', 'fend'] + hierarchical_alternatives = { + 'handle_start': 'handles', + 'handle_end': 'handles' + } + + # Roles for new custom attributes + read_roles = ['ALL',] + write_roles = ['ALL',] + + data_ca = { + 'handle_start': { + 'label': 'Frame handles start', + 'type': 'number' + }, + 'handle_end': { + 'label': 'Frame handles end', + 'type': 'number' + }, + 'fstart': { + 'label': 'Frame start', + 'type': 'number' + }, + 'fend': { + 'label': 'Frame end', + 'type': 'number' + } + } + + def discover(self, session, entities, event): + ''' Validation ''' + + return True + + def interface(self, session, entities, event): + if event['data'].get('values', {}): + return + + title = 'Select Project to fix Custom attributes' + + items = [] + item_splitter = {'type': 'label', 'value': '---'} + + all_projects = session.query('Project').all() + for project in all_projects: + item_label = { + 'type': 'label', + 'value': '{} ({})'.format( + project['full_name'], project['name'] + ) + } + item = { + 'name': project['id'], + 'type': 'boolean', + 'value': False + } + if len(items) > 0: + items.append(item_splitter) + items.append(item_label) + items.append(item) + + if len(items) == 0: + return { + 'success': False, + 'message': 'Didn\'t found any projects' + } + else: + return { + 'items': items, + 'title': title + } + + def launch(self, session, entities, event): + if 'values' not in event['data']: + return + + values = event['data']['values'] + projects_to_update = [] + for project_id, update_bool in values.items(): + if not update_bool: + continue + + project = session.query( + 'Project where id is "{}"'.format(project_id) + ).one() + projects_to_update.append(project) + + if not projects_to_update: + self.log.debug('Nothing to update') + return { + 'success': True, + 'message': 'Nothing to update' + } + + self.security_roles = {} + self.to_process = {} + # self.curent_default_values = {} + existing_attrs = session.query('CustomAttributeConfiguration').all() + self.prepare_custom_attributes(existing_attrs) + + self.projects_data = {} + for project in projects_to_update: + self.process_project_data(project) + + return True + + def process_data(self, entity): + cust_attrs = project.get('custom_attributes') + if not cust_attrs: + return + for dst_key, src_key in self.to_process.items(): + if src_key in cust_attrs: + value = cust_attrs[src_key] + project['custom_attributes'][dst_key] = value + self.session.commit() + + for child in entity.get('children', []): + self.process_data(child) + + def prepare_custom_attributes(self, existing_attrs): + to_process = {} + to_create = [] + all_keys = {attr['key']: attr for attr in existing_attrs} + for key in self.hierarchical_ca: + if key not in all_keys: + self.log.debug( + 'Custom attribute "{}" does not exist at all'.format(key) + ) + to_create.append(key) + if key in self.hierarchical_alternatives: + alt_key = self.hierarchical_alternatives[key] + if alt_key in all_keys: + self.log.debug(( + 'Custom attribute "{}" will use values from "{}"' + ).format(alt_key)) + + to_process[key] = alt_key + + obj = all_keys[alt_key] + # if alt_key not in self.curent_default_values: + # self.curent_default_values[alt_key] = obj['default'] + obj['default'] = None + self.sesion.commit() + + else: + obj = all_keys[key] + new_key = key + '_old' + + if obj['is_hierarchical']: + if new_key not in all_keys: + self.log.info(( + 'Custom attribute "{}" is already hierarchical' + ' and can\'t find old one' + ).format(key) + ) + continue + + to_process[key] = new_key + continue + + default_value = obj['default'] + if new_key not in self.curent_default_values: + self.curent_default_values[new_key] = default_value + + obj['key'] = new_key + obj['label'] = obj['label'] + '(old)' + obj['default'] = None + + self.session.commit() + + to_create.append(key) + to_process[key] = new_key + + self.to_process = to_process + for key in to_create: + data = { + 'key': key, + 'is_hierarchical': True, + 'default': None + } + for _key, _value in data_ca.get(key, {}).items(): + data[_key] = _value + + avalon_group = self.session.query( + 'CustomAttributeGroup where name is "avalon"' + ).first() + if avalon_group: + data['group'] = avalon_group + + data['read_security_role'] = self.get_security_role( + self.read_roles + ) + data['write_security_role'] = self.get_security_role( + self.write_roles + ) + + self.session.create('CustomAttributeConfiguration', data) + self.session.commit() + + # def return_back_defaults(self): + # existing_attrs = self.session.query( + # 'CustomAttributeConfiguration' + # ).all() + # + # for attr_key, default in self.curent_default_values.items(): + # for attr in existing_attrs: + # if attr['key'] != attr_key: + # continue + # attr['default'] = default + # self.session.commit() + # break + + def get_security_role(self, security_roles): + roles = [] + if len(security_roles) == 0 or security_roles[0] == 'ALL': + roles = self.get_role_ALL() + elif security_roles[0] == 'except': + excepts = security_roles[1:] + all = self.get_role_ALL() + for role in all: + if role['name'] not in excepts: + roles.append(role) + if role['name'] not in self.security_roles: + self.security_roles[role['name']] = role + else: + for role_name in security_roles: + if role_name in self.security_roles: + roles.append(self.security_roles[role_name]) + continue + + try: + query = 'SecurityRole where name is "{}"'.format(role_name) + role = self.session.query(query).one() + self.security_roles[role_name] = role + roles.append(role) + except Exception: + self.log.warning( + 'Securit role "{}" does not exist'.format(role_name) + ) + continue + + return roles + + def get_role_ALL(self): + role_name = 'ALL' + if role_name in self.security_roles: + all_roles = self.security_roles[role_name] + else: + all_roles = self.session.query('SecurityRole').all() + self.security_roles[role_name] = all_roles + for role in all_roles: + if role['name'] not in self.security_roles: + self.security_roles[role['name']] = role + return all_roles + + +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' + + if not isinstance(session, ftrack_api.session.Session): + return + + CustomAttributeDoctor(session).register() + + +def main(arguments=None): + '''Set up logging and register action.''' + if arguments is None: + arguments = [] + + parser = argparse.ArgumentParser() + # Allow setting of logging level from arguments. + loggingLevels = {} + for level in ( + logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, + logging.ERROR, logging.CRITICAL + ): + loggingLevels[logging.getLevelName(level).lower()] = level + + parser.add_argument( + '-v', '--verbosity', + help='Set the logging output verbosity.', + choices=loggingLevels.keys(), + default='info' + ) + namespace = parser.parse_args(arguments) + + # Set up basic logging + logging.basicConfig(level=loggingLevels[namespace.verbosity]) + + session = ftrack_api.Session() + register(session) + + # Wait for events + logging.info( + 'Registered actions and listening for events. Use Ctrl-C to abort.' + ) + session.event_hub.wait() + + +if __name__ == '__main__': + raise SystemExit(main(sys.argv[1:])) From 1491952cbdd6e6e82262125fde21fb11073b288f Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 12 Jul 2019 10:46:01 +0200 Subject: [PATCH 22/69] fix missing variables --- pype/ftrack/actions/action_cust_attr_doctor.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pype/ftrack/actions/action_cust_attr_doctor.py b/pype/ftrack/actions/action_cust_attr_doctor.py index 199572f18a..fdfa6f4984 100644 --- a/pype/ftrack/actions/action_cust_attr_doctor.py +++ b/pype/ftrack/actions/action_cust_attr_doctor.py @@ -123,18 +123,18 @@ class CustomAttributeDoctor(BaseAction): self.projects_data = {} for project in projects_to_update: - self.process_project_data(project) + self.process_data(project) return True def process_data(self, entity): - cust_attrs = project.get('custom_attributes') + cust_attrs = entity.get('custom_attributes') if not cust_attrs: return for dst_key, src_key in self.to_process.items(): if src_key in cust_attrs: value = cust_attrs[src_key] - project['custom_attributes'][dst_key] = value + entity['custom_attributes'][dst_key] = value self.session.commit() for child in entity.get('children', []): @@ -201,7 +201,7 @@ class CustomAttributeDoctor(BaseAction): 'is_hierarchical': True, 'default': None } - for _key, _value in data_ca.get(key, {}).items(): + for _key, _value in self.data_ca.get(key, {}).items(): data[_key] = _value avalon_group = self.session.query( From 20878ec3cb469e51d263c6162be5c4d905f2e42d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 12 Jul 2019 10:48:49 +0200 Subject: [PATCH 23/69] feat(pype, nuke): publishing with new review to ftrack with ffmpeg, wip --- .../ftrack/publish/collect_ftrack_api.py | 3 + .../global/_publish_unused/extract_review.py | 92 ------------ pype/plugins/global/publish/extract_review.py | 141 ++++++++++++++++++ .../_publish_unused/collect_render_target.py | 46 ++++++ .../extract_script.py | 4 +- .../plugins/nuke/publish/collect_instances.py | 2 +- pype/plugins/nuke/publish/collect_writes.py | 3 +- .../nuke/publish/extract_render_local.py | 1 - ...tract_review.py => extract_review_data.py} | 85 +++-------- 9 files changed, 213 insertions(+), 164 deletions(-) delete mode 100644 pype/plugins/global/_publish_unused/extract_review.py create mode 100644 pype/plugins/global/publish/extract_review.py create mode 100644 pype/plugins/nuke/_publish_unused/collect_render_target.py rename pype/plugins/nuke/{publish => _publish_unused}/extract_script.py (94%) rename pype/plugins/nuke/publish/{extract_review.py => extract_review_data.py} (69%) diff --git a/pype/plugins/ftrack/publish/collect_ftrack_api.py b/pype/plugins/ftrack/publish/collect_ftrack_api.py index e4923cac98..d09baec676 100644 --- a/pype/plugins/ftrack/publish/collect_ftrack_api.py +++ b/pype/plugins/ftrack/publish/collect_ftrack_api.py @@ -18,6 +18,9 @@ class CollectFtrackApi(pyblish.api.ContextPlugin): ftrack_log = logging.getLogger('ftrack_api') ftrack_log.setLevel(logging.WARNING) + ftrack_log = logging.getLogger('ftrack_api_old') + ftrack_log.setLevel(logging.WARNING) + # Collect session session = ftrack_api.Session() context.data["ftrackSession"] = session diff --git a/pype/plugins/global/_publish_unused/extract_review.py b/pype/plugins/global/_publish_unused/extract_review.py deleted file mode 100644 index 885db1cfc9..0000000000 --- a/pype/plugins/global/_publish_unused/extract_review.py +++ /dev/null @@ -1,92 +0,0 @@ -# import os -# import pyblish.api -# import subprocess -# from pype.vendor import clique -# from pypeapp import config -# -# -# class ExtractReview(pyblish.api.InstancePlugin): -# """Resolve any dependency issies -# -# This plug-in resolves any paths which, if not updated might break -# the published file. -# -# The order of families is important, when working with lookdev you want to -# first publish the texture, update the texture paths in the nodes and then -# publish the shading network. Same goes for file dependent assets. -# """ -# -# label = "Extract Review" -# order = pyblish.api.ExtractorOrder -# # families = ["imagesequence", "render", "write", "source"] -# # hosts = ["shell"] -# -# def process(self, instance): -# # adding plugin attributes from presets -# publish_presets = config.get_presets()["plugins"]["global"]["publish"] -# plugin_attrs = publish_presets[self.__class__.__name__] -# -# -# fps = instance.data.get("fps") -# start = instance.data.get("startFrame") -# stagingdir = os.path.normpath(instance.data.get("stagingDir")) -# -# collected_frames = os.listdir(stagingdir) -# collections, remainder = clique.assemble(collected_frames) -# -# full_input_path = os.path.join( -# stagingdir, collections[0].format('{head}{padding}{tail}') -# ) -# self.log.info("input {}".format(full_input_path)) -# -# filename = collections[0].format('{head}') -# if not filename.endswith('.'): -# filename += "." -# movFile = filename + "mov" -# full_output_path = os.path.join(stagingdir, movFile) -# -# self.log.info("output {}".format(full_output_path)) -# -# config_data = instance.context.data['output_repre_config'] -# -# proj_name = os.environ.get('AVALON_PROJECT', '__default__') -# profile = config_data.get(proj_name, config_data['__default__']) -# -# input_args = [] -# # overrides output file -# input_args.append("-y") -# # preset's input data -# input_args.extend(profile.get('input', [])) -# # necessary input data -# input_args.append("-start_number {}".format(start)) -# input_args.append("-i {}".format(full_input_path)) -# input_args.append("-framerate {}".format(fps)) -# -# output_args = [] -# # preset's output data -# output_args.extend(profile.get('output', [])) -# # output filename -# output_args.append(full_output_path) -# mov_args = [ -# "ffmpeg", -# " ".join(input_args), -# " ".join(output_args) -# ] -# subprocess_mov = " ".join(mov_args) -# sub_proc = subprocess.Popen(subprocess_mov) -# sub_proc.wait() -# -# if not os.path.isfile(full_output_path): -# raise("Quicktime wasn't created succesfully") -# -# if "representations" not in instance.data: -# instance.data["representations"] = [] -# -# representation = { -# 'name': 'mov', -# 'ext': 'mov', -# 'files': movFile, -# "stagingDir": stagingdir, -# "preview": True -# } -# instance.data["representations"].append(representation) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py new file mode 100644 index 0000000000..8c570d0c73 --- /dev/null +++ b/pype/plugins/global/publish/extract_review.py @@ -0,0 +1,141 @@ +import os +import pyblish.api +import subprocess +from pype.vendor import clique +from pypeapp import config + + +class ExtractReview(pyblish.api.InstancePlugin): + """Resolve any dependency issies + + This plug-in resolves any paths which, if not updated might break + the published file. + + The order of families is important, when working with lookdev you want to + first publish the texture, update the texture paths in the nodes and then + publish the shading network. Same goes for file dependent assets. + """ + + label = "Extract Review" + order = pyblish.api.ExtractorOrder + 0.02 + families = ["review"] + + def process(self, instance): + # adding plugin attributes from presets + publish_presets = config.get_presets()["plugins"]["global"]["publish"] + plugin_attrs = publish_presets[self.__class__.__name__] + output_profiles = plugin_attrs.get("outputs", {}) + + inst_data = instance.data + fps = inst_data.get("fps") + start_frame = inst_data.get("startFrame") + + # get representation and loop them + representations = instance.data["representations"] + + # filter out mov and img sequences + representations_new = list() + for repre in representations: + if repre['ext'] in plugin_attrs["ext_filter"]: + tags = repre.get("tags", []) + + self.log.info("Try repre: {}".format(repre)) + + if "review" in tags: + + repre_new = repre.copy() + del(repre) + + staging_dir = repre_new["stagingDir"] + + if "mov" not in repre_new['ext']: + # get output presets and loop them + collected_frames = os.listdir(staging_dir) + collections, remainder = clique.assemble( + collected_frames) + + full_input_path = os.path.join( + staging_dir, collections[0].format( + '{head}{padding}{tail}') + ) + + filename = collections[0].format('{head}') + if not filename.endswith('.'): + filename += "." + mov_file = filename + "mov" + + else: + full_input_path = os.path.join( + staging_dir, repre_new["files"]) + + filename = repre_new["files"].split(".")[0] + mov_file = filename + ".mov" + # test if the file is not the input file + if not os.path.isfile(os.path.join( + staging_dir, mov_file)): + mov_file = filename + "_.mov" + + full_output_path = os.path.join(staging_dir, mov_file) + + self.log.info("input {}".format(full_input_path)) + self.log.info("output {}".format(full_output_path)) + + for name, profile in output_profiles.items(): + self.log.debug("Profile name: {}".format(name)) + new_tags = tags + profile.get('tags', []) + input_args = [] + + # overrides output file + input_args.append("-y") + + # preset's input data + input_args.extend(profile.get('input', [])) + + # necessary input data + # adds start arg only if image sequence + if "mov" not in repre_new['ext']: + input_args.append("-start_number {}".format( + start_frame)) + + input_args.append("-i {}".format(full_input_path)) + input_args.append("-framerate {}".format(fps)) + + output_args = [] + # preset's output data + output_args.extend(profile.get('output', [])) + + # output filename + output_args.append(full_output_path) + mov_args = [ + "ffmpeg", + " ".join(input_args), + " ".join(output_args) + ] + subprocess_mov = " ".join(mov_args) + + # run subprocess + sub_proc = subprocess.Popen(subprocess_mov) + sub_proc.wait() + + if not os.path.isfile(full_output_path): + self.log.error( + "Quicktime wasn't created succesfully") + + # create representation data + repre_new.update({ + 'name': name, + 'ext': 'mov', + 'files': mov_file, + "thumbnail": False, + "preview": True, + "tags": new_tags + }) + + # adding representation + representations_new.append(repre_new) + else: + representations_new.append(repre) + + self.log.debug( + "new representations: {}".format(representations_new)) + instance.data["representations"] = representations_new diff --git a/pype/plugins/nuke/_publish_unused/collect_render_target.py b/pype/plugins/nuke/_publish_unused/collect_render_target.py new file mode 100644 index 0000000000..6c04414f69 --- /dev/null +++ b/pype/plugins/nuke/_publish_unused/collect_render_target.py @@ -0,0 +1,46 @@ +import pyblish.api + + +@pyblish.api.log +class CollectRenderTarget(pyblish.api.InstancePlugin): + """Collect families for all instances""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Render Target" + hosts = ["nuke", "nukeassist"] + families = ['write'] + + def process(self, instance): + + node = instance[0] + + self.log.info('processing {}'.format(node)) + + families = [] + if instance.data.get('families'): + families += instance.data['families'] + + # set for ftrack to accept + # instance.data["families"] = ["ftrack"] + + if node["render"].value(): + # dealing with local/farm rendering + if node["render_farm"].value(): + families.append("render.farm") + else: + families.append("render.local") + else: + families.append("render.frames") + # to ignore staging dir op in integrate + instance.data['transfer'] = False + + families.append('ftrack') + + instance.data["families"] = families + + # Sort/grouped by family (preserving local index) + instance.context[:] = sorted(instance.context, key=self.sort_by_family) + + def sort_by_family(self, instance): + """Sort by family""" + return instance.data.get("families", instance.data.get("family")) diff --git a/pype/plugins/nuke/publish/extract_script.py b/pype/plugins/nuke/_publish_unused/extract_script.py similarity index 94% rename from pype/plugins/nuke/publish/extract_script.py rename to pype/plugins/nuke/_publish_unused/extract_script.py index d0be98b93e..7d55ea0da4 100644 --- a/pype/plugins/nuke/publish/extract_script.py +++ b/pype/plugins/nuke/_publish_unused/extract_script.py @@ -27,8 +27,8 @@ class ExtractScript(pype.api.Extractor): shutil.copy(current_script, path) if "representations" not in instance.data: - instance.data["representations"] = [] - + instance.data["representations"] = list() + representation = { 'name': 'nk', 'ext': '.nk', diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index 7f119f9a1e..35673c5ff3 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -68,7 +68,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): "avalonKnob": avalon_knob_data, "publish": node.knob('publish').value(), "step": 1, - "fps": int(nuke.root()['fps'].value()) + "fps": nuke.root()['fps'].value() }) diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 2dae39a1fc..216160616b 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -11,7 +11,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.1 label = "Collect Writes" hosts = ["nuke", "nukeassist"] - families = ["render.local", "render", "render.farm"] + families = ["render", "render.local", "render.farm"] def process(self, instance): @@ -96,5 +96,4 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): "colorspace": node["colorspace"].value(), }) - self.log.debug("instance.data: {}".format(instance.data)) diff --git a/pype/plugins/nuke/publish/extract_render_local.py b/pype/plugins/nuke/publish/extract_render_local.py index f424bf1200..1d6550024f 100644 --- a/pype/plugins/nuke/publish/extract_render_local.py +++ b/pype/plugins/nuke/publish/extract_render_local.py @@ -21,7 +21,6 @@ class NukeRenderLocal(pype.api.Extractor): def process(self, instance): node = instance[0] - context = instance.context self.log.debug("instance collected: {}".format(instance.data)) diff --git a/pype/plugins/nuke/publish/extract_review.py b/pype/plugins/nuke/publish/extract_review_data.py similarity index 69% rename from pype/plugins/nuke/publish/extract_review.py rename to pype/plugins/nuke/publish/extract_review_data.py index bdbd3d17a6..552aa0cdb0 100644 --- a/pype/plugins/nuke/publish/extract_review.py +++ b/pype/plugins/nuke/publish/extract_review_data.py @@ -2,10 +2,9 @@ import os import nuke import pyblish.api import pype -from pype.vendor import ffmpeg -class ExtractDataForReview(pype.api.Extractor): +class ExtractReviewData(pype.api.Extractor): """Extracts movie and thumbnail with baked in luts must be run after extract_render_local.py @@ -13,8 +12,7 @@ class ExtractDataForReview(pype.api.Extractor): """ order = pyblish.api.ExtractorOrder + 0.01 - label = "Extract Review" - optional = True + label = "Extract Review Data" families = ["review"] hosts = ["nuke"] @@ -35,63 +33,15 @@ class ExtractDataForReview(pype.api.Extractor): if "still" not in instance.data["families"]: self.render_review_representation(instance, representation="mov") - self.log.debug("review mov:") - self.transcode_mov(instance) - self.log.debug("instance.data: {}".format(instance.data)) self.render_review_representation(instance, representation="jpeg") else: - self.log.debug("instance: {}".format(instance)) self.render_review_representation(instance, representation="jpeg") # Restore selection [i["selected"].setValue(False) for i in nuke.allNodes()] [i["selected"].setValue(True) for i in selection] - def transcode_mov(self, instance): - collection = instance.data["collection"] - stagingDir = instance.data["stagingDir"].replace("\\", "/") - file_name = collection.format("{head}mov") - - review_mov = os.path.join(stagingDir, file_name).replace("\\", "/") - - self.log.info("transcoding review mov: {0}".format(review_mov)) - if instance.data.get("baked_colorspace_movie"): - input_movie = instance.data["baked_colorspace_movie"] - out, err = ( - ffmpeg - .input(input_movie) - .output( - review_mov, - pix_fmt='yuv420p', - crf=18, - timecode="00:00:00:01" - ) - .overwrite_output() - .run() - ) - - self.log.debug("Removing `{0}`...".format( - instance.data["baked_colorspace_movie"])) - os.remove(instance.data["baked_colorspace_movie"]) - - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': 'review', - 'ext': 'mov', - 'files': file_name, - "stagingDir": stagingDir, - "anatomy_template": "render", - "thumbnail": False, - "preview": True, - 'startFrameReview': instance.data['startFrame'], - 'endFrameReview': instance.data['endFrame'], - 'frameRate': instance.context.data["framerate"] - } - instance.data["representations"].append(representation) - def render_review_representation(self, instance, representation="mov"): @@ -172,6 +122,7 @@ class ExtractDataForReview(pype.api.Extractor): temporary_nodes.append(write_node) thumbnail = False preview = True + tags = ["review"] elif representation in "jpeg": file = fhead + "jpeg" @@ -184,29 +135,31 @@ class ExtractDataForReview(pype.api.Extractor): temporary_nodes.append(write_node) thumbnail = True preview = False + tags = ["thumbnail"] # retime for first_frame = int(last_frame) / 2 last_frame = int(last_frame) / 2 - # add into files for integration as representation - if "representations" not in instance.data: - instance.data["representations"] = [] - - repre = { - 'name': representation, - 'ext': representation, - 'files': file, - "stagingDir": stagingDir, - "anatomy_template": "render", - "thumbnail": thumbnail, - "preview": preview - } - instance.data["representations"].append(repre) + repre = { + 'name': representation, + 'ext': representation, + 'files': file, + "stagingDir": stagingDir, + "startFrame": first_frame, + "endFrame": last_frame, + "anatomy_template": "render", + "thumbnail": thumbnail, + "preview": preview, + "tags": tags + } + instance.data["representations"].append(repre) # Render frames nuke.execute(write_node.name(), int(first_frame), int(last_frame)) + self.log.debug("representations: {}".format(instance.data["representations"])) + # Clean up for node in temporary_nodes: nuke.delete(node) From 6bf34a3f138a361e4d9004709ba47190522cfb72 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 12 Jul 2019 11:12:03 +0200 Subject: [PATCH 24/69] fix(ftrack): typos --- pype/ftrack/actions/action_cust_attr_doctor.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/pype/ftrack/actions/action_cust_attr_doctor.py b/pype/ftrack/actions/action_cust_attr_doctor.py index fdfa6f4984..36c8f7f6ee 100644 --- a/pype/ftrack/actions/action_cust_attr_doctor.py +++ b/pype/ftrack/actions/action_cust_attr_doctor.py @@ -155,7 +155,7 @@ class CustomAttributeDoctor(BaseAction): if alt_key in all_keys: self.log.debug(( 'Custom attribute "{}" will use values from "{}"' - ).format(alt_key)) + ).format(key, alt_key)) to_process[key] = alt_key @@ -163,7 +163,7 @@ class CustomAttributeDoctor(BaseAction): # if alt_key not in self.curent_default_values: # self.curent_default_values[alt_key] = obj['default'] obj['default'] = None - self.sesion.commit() + self.session.commit() else: obj = all_keys[key] @@ -181,9 +181,9 @@ class CustomAttributeDoctor(BaseAction): to_process[key] = new_key continue - default_value = obj['default'] - if new_key not in self.curent_default_values: - self.curent_default_values[new_key] = default_value + # default_value = obj['default'] + # if new_key not in self.curent_default_values: + # self.curent_default_values[new_key] = default_value obj['key'] = new_key obj['label'] = obj['label'] + '(old)' @@ -216,7 +216,8 @@ class CustomAttributeDoctor(BaseAction): data['write_security_role'] = self.get_security_role( self.write_roles ) - + from pprint import pprint + pprint(data) self.session.create('CustomAttributeConfiguration', data) self.session.commit() From b1a72ed43be6785988c3891505fdc0d2ee01cae4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 12 Jul 2019 12:12:09 +0200 Subject: [PATCH 25/69] fixed roles --- pype/ftrack/actions/action_cust_attr_doctor.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/pype/ftrack/actions/action_cust_attr_doctor.py b/pype/ftrack/actions/action_cust_attr_doctor.py index 36c8f7f6ee..914253f4d6 100644 --- a/pype/ftrack/actions/action_cust_attr_doctor.py +++ b/pype/ftrack/actions/action_cust_attr_doctor.py @@ -210,14 +210,11 @@ class CustomAttributeDoctor(BaseAction): if avalon_group: data['group'] = avalon_group - data['read_security_role'] = self.get_security_role( - self.read_roles - ) - data['write_security_role'] = self.get_security_role( - self.write_roles - ) - from pprint import pprint - pprint(data) + read_roles = self.get_security_role(self.read_roles) + write_roles = self.get_security_role(self.write_roles) + data['read_security_roles'] = read_roles + data['write_security_roles'] = write_roles + self.session.create('CustomAttributeConfiguration', data) self.session.commit() From 0be2be64405183986addf492bc63df1b2d6f892a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 12 Jul 2019 12:12:29 +0200 Subject: [PATCH 26/69] added entity_type (there must be any) --- pype/ftrack/actions/action_cust_attr_doctor.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/ftrack/actions/action_cust_attr_doctor.py b/pype/ftrack/actions/action_cust_attr_doctor.py index 914253f4d6..21b1fb12e1 100644 --- a/pype/ftrack/actions/action_cust_attr_doctor.py +++ b/pype/ftrack/actions/action_cust_attr_doctor.py @@ -198,6 +198,7 @@ class CustomAttributeDoctor(BaseAction): for key in to_create: data = { 'key': key, + 'entity_type': 'show', 'is_hierarchical': True, 'default': None } From 9a118aca6a4faa264ac5e0593a238f4d357a917f Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 12 Jul 2019 12:12:45 +0200 Subject: [PATCH 27/69] ca type is not string but object fixed --- pype/ftrack/actions/action_cust_attr_doctor.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pype/ftrack/actions/action_cust_attr_doctor.py b/pype/ftrack/actions/action_cust_attr_doctor.py index 21b1fb12e1..10e87742ea 100644 --- a/pype/ftrack/actions/action_cust_attr_doctor.py +++ b/pype/ftrack/actions/action_cust_attr_doctor.py @@ -203,6 +203,11 @@ class CustomAttributeDoctor(BaseAction): 'default': None } for _key, _value in self.data_ca.get(key, {}).items(): + if _key == 'type': + _value = self.session.query(( + 'CustomAttributeType where name is "{}"' + ).format(_value)).first() + data[_key] = _value avalon_group = self.session.query( From 15e594829ba1a8a96f5481d7b6415fce5df04a46 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 12 Jul 2019 12:12:56 +0200 Subject: [PATCH 28/69] added config to custom attributes --- pype/ftrack/actions/action_cust_attr_doctor.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/pype/ftrack/actions/action_cust_attr_doctor.py b/pype/ftrack/actions/action_cust_attr_doctor.py index 10e87742ea..9d0ce2071f 100644 --- a/pype/ftrack/actions/action_cust_attr_doctor.py +++ b/pype/ftrack/actions/action_cust_attr_doctor.py @@ -1,5 +1,6 @@ import os import sys +import json import argparse import logging @@ -34,19 +35,23 @@ class CustomAttributeDoctor(BaseAction): data_ca = { 'handle_start': { 'label': 'Frame handles start', - 'type': 'number' + 'type': 'number', + 'config': json.dumps({'isdecimal': False}) }, 'handle_end': { 'label': 'Frame handles end', - 'type': 'number' + 'type': 'number', + 'config': json.dumps({'isdecimal': False}) }, 'fstart': { 'label': 'Frame start', - 'type': 'number' + 'type': 'number', + 'config': json.dumps({'isdecimal': False}) }, 'fend': { 'label': 'Frame end', - 'type': 'number' + 'type': 'number', + 'config': json.dumps({'isdecimal': False}) } } From f45986a1ce5395c000c0a047194ae01b3d7d58a9 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 12 Jul 2019 16:30:13 +0200 Subject: [PATCH 29/69] feat(pype): adding burnin plugin to function with reviews --- .../publish/integrate_ftrack_instances.py | 9 +- pype/plugins/global/publish/extract_burnin.py | 41 +++++-- pype/plugins/global/publish/extract_review.py | 103 ++++++++++-------- 3 files changed, 91 insertions(+), 62 deletions(-) diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index a79e1f8ce5..9d8bd653d7 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -49,14 +49,14 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): for comp in instance.data['representations']: self.log.debug('component {}'.format(comp)) - if comp.get('thumbnail'): + if comp.get('thumbnail') or ("thumbnail" in comp.get('tags', [])): location = self.get_ftrack_location( 'ftrack.server', ft_session ) component_data = { "name": "thumbnail" # Default component name is "main". } - elif comp.get('preview'): + elif comp.get('preview') or ("preview" in comp.get('tags', [])): ''' Ftrack bug requirement: - Start frame must be 0 @@ -120,7 +120,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): componentList.append(component_item) # Create copy with ftrack.unmanaged location if thumb or prev - if comp.get('thumbnail') or comp.get('preview'): + if comp.get('thumbnail') or comp.get('preview') \ + or ("preview" in comp.get('tags', [])) \ + or ("thumbnail" in comp.get('tags', [])): unmanaged_loc = self.get_ftrack_location( 'ftrack.unmanaged', ft_session ) @@ -148,7 +150,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): componentList.append(component_item_src) - self.log.debug('componentsList: {}'.format(str(componentList))) instance.data["ftrackComponentsList"] = componentList diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 34ee33f602..b2956e330e 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -2,6 +2,7 @@ import os import subprocess import pype.api import json +import pyblish class ExtractBurnin(pype.api.Extractor): @@ -14,7 +15,8 @@ class ExtractBurnin(pype.api.Extractor): """ label = "Quicktime with burnins" - families = ["burnin"] + order = pyblish.api.ExtractorOrder + 0.03 + families = ["review", "burnin"] optional = True def process(self, instance): @@ -29,25 +31,30 @@ class ExtractBurnin(pype.api.Extractor): "start_frame": int(instance.data['startFrame']), "version": "v" + str(instance.context.data['version']) } + self.log.debug("__ burnin_data1: {}".format(burnin_data)) + for i, repre in enumerate(instance.data["representations"]): + self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre)) - for repre in instance.data["representations"]: - if (not repre.get("burnin", False) or - "burnin" not in repre.get("tags", [])): + if "burnin" not in repre.get("tags", []): continue - stagingdir = self.staging_dir(instance) + stagingdir = repre["stagingDir"] filename = "{0}".format(repre["files"]) - movieFileBurnin = filename + "Burn" + ".mov" + name = "_burnin" + movieFileBurnin = filename.replace(".mov", "") + name + ".mov" full_movie_path = os.path.join(stagingdir, repre["files"]) full_burnin_path = os.path.join(stagingdir, movieFileBurnin) + self.log.debug("__ full_burnin_path: {}".format(full_burnin_path)) burnin_data = { "input": full_movie_path.replace("\\", "/"), "output": full_burnin_path.replace("\\", "/"), "burnin_data": burnin_data - } + } + + self.log.debug("__ burnin_data2: {}".format(burnin_data)) json_data = json.dumps(burnin_data) scriptpath = os.path.join(os.environ['PYPE_MODULE_ROOT'], @@ -55,9 +62,19 @@ class ExtractBurnin(pype.api.Extractor): "scripts", "otio_burnin.py") - p = subprocess.Popen( - ['python', scriptpath, json_data] - ) - p.wait() + self.log.debug("__ scriptpath: {}".format(scriptpath)) - repre['files']: movieFileBurnin + try: + p = subprocess.Popen( + [os.getenv("PYPE_PYTHON_EXE"), scriptpath, json_data] + ) + p.wait() + except Exception as e: + raise RuntimeError("Burnin script didn't work: `{}`".format(e)) + + if os.path.exists(full_burnin_path): + repre_update = { + "files": movieFileBurnin, + "name": repre["name"] + name + } + instance.data["representations"][i].update(repre_update) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 8c570d0c73..af6d59d798 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -6,14 +6,14 @@ from pypeapp import config class ExtractReview(pyblish.api.InstancePlugin): - """Resolve any dependency issies + """Extracting Review mov file for Ftrack - This plug-in resolves any paths which, if not updated might break - the published file. + Compulsory attribute of representation is tags list with "review", + otherwise the representation is ignored. - The order of families is important, when working with lookdev you want to - first publish the texture, update the texture paths in the nodes and then - publish the shading network. Same goes for file dependent assets. + All new represetnations are created and encoded by ffmpeg following + presets found in `pype-config/presets/plugins/global/publish.json:ExtractReview:outputs`. To change the file extension + filter values use preset's attributes `ext_filter` """ label = "Extract Review" @@ -30,6 +30,8 @@ class ExtractReview(pyblish.api.InstancePlugin): fps = inst_data.get("fps") start_frame = inst_data.get("startFrame") + self.log.debug("Families In: `{}`".format(instance.data["families"])) + # get representation and loop them representations = instance.data["representations"] @@ -43,46 +45,50 @@ class ExtractReview(pyblish.api.InstancePlugin): if "review" in tags: - repre_new = repre.copy() - del(repre) - - staging_dir = repre_new["stagingDir"] - - if "mov" not in repre_new['ext']: - # get output presets and loop them - collected_frames = os.listdir(staging_dir) - collections, remainder = clique.assemble( - collected_frames) - - full_input_path = os.path.join( - staging_dir, collections[0].format( - '{head}{padding}{tail}') - ) - - filename = collections[0].format('{head}') - if not filename.endswith('.'): - filename += "." - mov_file = filename + "mov" - - else: - full_input_path = os.path.join( - staging_dir, repre_new["files"]) - - filename = repre_new["files"].split(".")[0] - mov_file = filename + ".mov" - # test if the file is not the input file - if not os.path.isfile(os.path.join( - staging_dir, mov_file)): - mov_file = filename + "_.mov" - - full_output_path = os.path.join(staging_dir, mov_file) - - self.log.info("input {}".format(full_input_path)) - self.log.info("output {}".format(full_output_path)) + staging_dir = repre["stagingDir"] for name, profile in output_profiles.items(): + if "mov" not in repre['ext']: + # get output presets and loop them + collections, remainder = clique.assemble( + repre["files"]) + + full_input_path = os.path.join( + staging_dir, collections[0].format( + '{head}{padding}{tail}') + ) + + filename = collections[0].format('{head}') + if filename.endswith('.'): + filename = filename[:-1] + else: + full_input_path = os.path.join( + staging_dir, repre["files"]) + filename = repre["files"].split(".")[0] + + mov_file = filename + "_{0}.{1}".format(name, "mov") + + full_output_path = os.path.join(staging_dir, mov_file) + + self.log.info("input {}".format(full_input_path)) + self.log.info("output {}".format(full_output_path)) + + repre_new = repre.copy() + self.log.debug("Profile name: {}".format(name)) - new_tags = tags + profile.get('tags', []) + + new_tags = tags[:] + p_tags = profile.get('tags', []) + self.log.info("p_tags: `{}`".format(p_tags)) + # add families + [instance.data["families"].append(t) for t in p_tags + if t not in instance.data["families"]] + # add to + [new_tags.append(t) for t in p_tags + if t not in new_tags] + + self.log.info("new_tags: `{}`".format(new_tags)) + input_args = [] # overrides output file @@ -126,16 +132,21 @@ class ExtractReview(pyblish.api.InstancePlugin): 'name': name, 'ext': 'mov', 'files': mov_file, - "thumbnail": False, - "preview": True, - "tags": new_tags + "tags": new_tags, + "outputName": name }) + repre_new.pop("preview") + repre_new.pop("thumbnail") # adding representation representations_new.append(repre_new) else: representations_new.append(repre) + else: + representations_new.append(repre) self.log.debug( "new representations: {}".format(representations_new)) instance.data["representations"] = representations_new + + self.log.debug("Families Out: `{}`".format(instance.data["families"])) From 37fd304026de383b399fab8a55afa9785e748cd1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 12 Jul 2019 16:30:50 +0200 Subject: [PATCH 30/69] feat(pype): addig additional path element for representation preset names --- pype/plugins/global/publish/integrate_new.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index e70657eef9..c03e66c670 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -342,6 +342,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): template_data["representation"] = repre['ext'] + if repre.get("outputName"): + template_data["output"] = repre['outputName'] + src = os.path.join(stagingdir, fname) anatomy_filled = anatomy.format(template_data) dst = os.path.normpath( From 61ebcee43cf0f611d9210cf9c8a91f73635d195b Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 12 Jul 2019 18:00:25 +0200 Subject: [PATCH 31/69] (hotfix) collect ftrack family --- .../maya/publish/collect_ftrack_family.py | 32 +++++++++++++++++++ pype/plugins/maya/publish/collect_model.py | 6 ---- 2 files changed, 32 insertions(+), 6 deletions(-) create mode 100644 pype/plugins/maya/publish/collect_ftrack_family.py diff --git a/pype/plugins/maya/publish/collect_ftrack_family.py b/pype/plugins/maya/publish/collect_ftrack_family.py new file mode 100644 index 0000000000..b339f2ef69 --- /dev/null +++ b/pype/plugins/maya/publish/collect_ftrack_family.py @@ -0,0 +1,32 @@ +import pyblish.api + + +class CollectFtrackFamilies(pyblish.api.InstancePlugin): + """Collect model data + + Ensures always only a single frame is extracted (current frame). + + Note: + This is a workaround so that the `pype.model` family can use the + same pointcache extractor implementation as animation and pointcaches. + This always enforces the "current" frame to be published. + + """ + + order = pyblish.api.CollectorOrder + 0.3 + label = 'Add ftrack family' + families = ["model", + "setdress", + "model", + "animation", + "workfile", + "look" + ] + + def process(self, instance): + + # make ftrack publishable + if instance.data.get('families'): + instance.data['families'].append('ftrack') + else: + instance.data['families'] = ['ftrack'] diff --git a/pype/plugins/maya/publish/collect_model.py b/pype/plugins/maya/publish/collect_model.py index 0fe5f29054..b412edf1e9 100644 --- a/pype/plugins/maya/publish/collect_model.py +++ b/pype/plugins/maya/publish/collect_model.py @@ -24,9 +24,3 @@ class CollectModelData(pyblish.api.InstancePlugin): frame = cmds.currentTime(query=True) instance.data['startFrame'] = frame instance.data['endFrame'] = frame - - # make ftrack publishable - if instance.data.get('families'): - instance.data['families'].append('ftrack') - else: - instance.data['families'] = ['ftrack'] From a6748223cd3af0aab76b2da7c9a665c311f193e1 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 15 Jul 2019 11:50:00 +0200 Subject: [PATCH 32/69] feat(nk): adding collector and validator for active viewer process --- .../nuke/publish/collect_active_viewer.py | 14 ++++++++++++++ pype/plugins/nuke/publish/extract_review.py | 2 +- .../nuke/publish/validate_active_viewer.py | 18 ++++++++++++++++++ 3 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 pype/plugins/nuke/publish/collect_active_viewer.py create mode 100644 pype/plugins/nuke/publish/validate_active_viewer.py diff --git a/pype/plugins/nuke/publish/collect_active_viewer.py b/pype/plugins/nuke/publish/collect_active_viewer.py new file mode 100644 index 0000000000..3bcc1367f3 --- /dev/null +++ b/pype/plugins/nuke/publish/collect_active_viewer.py @@ -0,0 +1,14 @@ +import pyblish.api +import nuke + + +class CollectActiveViewer(pyblish.api.ContextPlugin): + """Collect any active viewer from nodes + """ + + order = pyblish.api.CollectorOrder + 0.3 + label = "Collect Active Viewer" + hosts = ["nuke"] + + def process(self, context): + context.data["ViewerProcess"] = nuke.ViewerProcess.node() diff --git a/pype/plugins/nuke/publish/extract_review.py b/pype/plugins/nuke/publish/extract_review.py index bdbd3d17a6..fee1a5f4cd 100644 --- a/pype/plugins/nuke/publish/extract_review.py +++ b/pype/plugins/nuke/publish/extract_review.py @@ -140,7 +140,7 @@ class ExtractDataForReview(pype.api.Extractor): previous_node = reformat_node temporary_nodes.append(reformat_node) - viewer_process_node = nuke.ViewerProcess.node() + viewer_process_node = instance.context.data.get("ViewerProcess") dag_node = None if viewer_process_node: dag_node = nuke.createNode(viewer_process_node.Class()) diff --git a/pype/plugins/nuke/publish/validate_active_viewer.py b/pype/plugins/nuke/publish/validate_active_viewer.py new file mode 100644 index 0000000000..bcf7cab6b3 --- /dev/null +++ b/pype/plugins/nuke/publish/validate_active_viewer.py @@ -0,0 +1,18 @@ +import pyblish.api +import nuke + + +class ValidateActiveViewer(pyblish.api.ContextPlugin): + """Validate presentse of the active viewer from nodes + """ + + order = pyblish.api.ValidatorOrder + label = "Validate Active Viewer" + hosts = ["nuke"] + + def process(self, context): + viewer_process_node = context.data.get("ViewerProcess") + + assert viewer_process_node, ( + "Missing active viewer process! Please click on output write node and push key number 1-9" + ) From 76beb8bba5c4a8bdda60110fe086d527ce9a7bf1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Jul 2019 12:02:30 +0200 Subject: [PATCH 33/69] renamed users action sync hierarchical attributes to sync hierarchical attributes local --- ...ion_sync_hier_attrs.py => action_sync_hier_attrs_local.py} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename pype/ftrack/actions/{action_sync_hier_attrs.py => action_sync_hier_attrs_local.py} (98%) diff --git a/pype/ftrack/actions/action_sync_hier_attrs.py b/pype/ftrack/actions/action_sync_hier_attrs_local.py similarity index 98% rename from pype/ftrack/actions/action_sync_hier_attrs.py rename to pype/ftrack/actions/action_sync_hier_attrs_local.py index 3a884a017f..29c9f33c44 100644 --- a/pype/ftrack/actions/action_sync_hier_attrs.py +++ b/pype/ftrack/actions/action_sync_hier_attrs_local.py @@ -16,9 +16,9 @@ class SyncHierarchicalAttrs(BaseAction): ca_mongoid = lib.get_ca_mongoid() #: Action identifier. - identifier = 'sync.hierarchical.attrs' + identifier = 'sync.hierarchical.attrs.local' #: Action label. - label = 'Sync hierarchical attributes' + label = 'Sync hierarchical attributes - Local' #: Action description. description = 'Synchronize hierarchical attributes' #: Icon From 4fa113fba0d467347b0e3b8f2c13d1fbf2cdbfee Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Jul 2019 12:03:29 +0200 Subject: [PATCH 34/69] added job to sync hierarchical attrs --- .../actions/action_sync_hier_attrs_local.py | 131 +++++++++++------- 1 file changed, 78 insertions(+), 53 deletions(-) diff --git a/pype/ftrack/actions/action_sync_hier_attrs_local.py b/pype/ftrack/actions/action_sync_hier_attrs_local.py index 29c9f33c44..52c64841c2 100644 --- a/pype/ftrack/actions/action_sync_hier_attrs_local.py +++ b/pype/ftrack/actions/action_sync_hier_attrs_local.py @@ -40,67 +40,92 @@ class SyncHierarchicalAttrs(BaseAction): return False def launch(self, session, entities, event): - # Collect hierarchical attrs - custom_attributes = {} - all_avalon_attr = session.query( - 'CustomAttributeGroup where name is "avalon"' + user = session.query( + 'User where id is "{}"'.format(event['source']['user']['id']) ).one() - for cust_attr in all_avalon_attr['custom_attribute_configurations']: - if 'avalon_' in cust_attr['key']: - continue - if not cust_attr['is_hierarchical']: - continue + job = session.create('Job', { + 'user': user, + 'status': 'running', + 'data': json.dumps({ + 'description': 'Sync Hierachical attributes' + }) + }) + session.commit() - if cust_attr['default']: - self.log.warning(( - 'Custom attribute "{}" has set default value.' - ' This attribute can\'t be synchronized' - ).format(cust_attr['label'])) - continue - - custom_attributes[cust_attr['key']] = cust_attr - - if not custom_attributes: - msg = 'No hierarchical attributes to sync.' - self.log.debug(msg) - return { - 'success': True, - 'message': msg - } - - entity = entities[0] - if entity.entity_type.lower() == 'project': - project_name = entity['full_name'] - else: - project_name = entity['project']['full_name'] - - self.db_con.install() - self.db_con.Session['AVALON_PROJECT'] = project_name - - for entity in entities: - for key in custom_attributes: - # check if entity has that attribute - if key not in entity['custom_attributes']: - self.log.debug( - 'Hierachical attribute "{}" not found on "{}"'.format( - key, entity.get('name', entity) - ) - ) + try: + # Collect hierarchical attrs + custom_attributes = {} + all_avalon_attr = session.query( + 'CustomAttributeGroup where name is "avalon"' + ).one() + for cust_attr in all_avalon_attr['custom_attribute_configurations']: + if 'avalon_' in cust_attr['key']: continue - value = self.get_hierarchical_value(key, entity) - if value is None: - self.log.warning( - 'Hierarchical attribute "{}" not set on "{}"'.format( - key, entity.get('name', entity) - ) - ) + if not cust_attr['is_hierarchical']: continue - self.update_hierarchical_attribute(entity, key, value) + if cust_attr['default']: + self.log.warning(( + 'Custom attribute "{}" has set default value.' + ' This attribute can\'t be synchronized' + ).format(cust_attr['label'])) + continue - self.db_con.uninstall() + custom_attributes[cust_attr['key']] = cust_attr + + if not custom_attributes: + msg = 'No hierarchical attributes to sync.' + self.log.debug(msg) + return { + 'success': True, + 'message': msg + } + + entity = entities[0] + if entity.entity_type.lower() == 'project': + project_name = entity['full_name'] + else: + project_name = entity['project']['full_name'] + + self.db_con.install() + self.db_con.Session['AVALON_PROJECT'] = project_name + + for entity in entities: + for key in custom_attributes: + # check if entity has that attribute + if key not in entity['custom_attributes']: + self.log.debug( + 'Hierachical attribute "{}" not found on "{}"'.format( + key, entity.get('name', entity) + ) + ) + continue + + value = self.get_hierarchical_value(key, entity) + if value is None: + self.log.warning( + 'Hierarchical attribute "{}" not set on "{}"'.format( + key, entity.get('name', entity) + ) + ) + continue + + self.update_hierarchical_attribute(entity, key, value) + + except Exception: + self.log.error( + 'Action "{}" failed'.format(self.label), + exc_info=True + ) + + finally: + self.db_con.uninstall() + + if job['status'] in ('queued', 'running'): + job['status'] = 'failed' + session.commit() return True From b06f3a60a2d9dce414f8fb1be171462f13336d41 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Jul 2019 12:03:59 +0200 Subject: [PATCH 35/69] sync hier attrs action is launched after sync to avalon --- pype/ftrack/actions/action_sync_to_avalon_local.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/pype/ftrack/actions/action_sync_to_avalon_local.py b/pype/ftrack/actions/action_sync_to_avalon_local.py index bed28e1bef..34070c7e1f 100644 --- a/pype/ftrack/actions/action_sync_to_avalon_local.py +++ b/pype/ftrack/actions/action_sync_to_avalon_local.py @@ -6,6 +6,7 @@ import json from pype.vendor import ftrack_api from pype.ftrack import BaseAction, lib as ftracklib +from pype.vendor.ftrack_api import session as fa_session class SyncToAvalon(BaseAction): @@ -176,6 +177,18 @@ class SyncToAvalon(BaseAction): job['status'] = 'failed' session.commit() + event = fa_session.ftrack_api.event.base.Event( + topic='ftrack.action.launch', + data=dict( + actionIdentifier='sync.hierarchical.attrs.local', + selection=event['data']['selection'] + ), + source=dict( + user=event['source']['user'] + ) + ) + session.event_hub.publish(event, on_error='ignore') + if len(message) > 0: message = "Unable to sync: {}".format(message) return { From 4728d6065c2a2c05b0dd83b874b6adc7945793cf Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Jul 2019 12:42:20 +0200 Subject: [PATCH 36/69] added json import --- pype/ftrack/actions/action_sync_hier_attrs_local.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pype/ftrack/actions/action_sync_hier_attrs_local.py b/pype/ftrack/actions/action_sync_hier_attrs_local.py index 52c64841c2..bb8e7c5282 100644 --- a/pype/ftrack/actions/action_sync_hier_attrs_local.py +++ b/pype/ftrack/actions/action_sync_hier_attrs_local.py @@ -1,5 +1,6 @@ import os import sys +import json import argparse import logging import collections From b7eaad01feda384e938efab95e24046d48dccb93 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Jul 2019 12:42:37 +0200 Subject: [PATCH 37/69] shortened action label --- pype/ftrack/actions/action_sync_hier_attrs_local.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/actions/action_sync_hier_attrs_local.py b/pype/ftrack/actions/action_sync_hier_attrs_local.py index bb8e7c5282..21eea19436 100644 --- a/pype/ftrack/actions/action_sync_hier_attrs_local.py +++ b/pype/ftrack/actions/action_sync_hier_attrs_local.py @@ -19,7 +19,7 @@ class SyncHierarchicalAttrs(BaseAction): #: Action identifier. identifier = 'sync.hierarchical.attrs.local' #: Action label. - label = 'Sync hierarchical attributes - Local' + label = 'Sync HierAttrs - Local' #: Action description. description = 'Synchronize hierarchical attributes' #: Icon From 7356599adb5115f671d76cb704445194f607b332 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Jul 2019 12:42:56 +0200 Subject: [PATCH 38/69] added local action icon --- pype/ftrack/actions/action_sync_hier_attrs_local.py | 2 +- res/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 res/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg diff --git a/pype/ftrack/actions/action_sync_hier_attrs_local.py b/pype/ftrack/actions/action_sync_hier_attrs_local.py index 21eea19436..c6b12028bc 100644 --- a/pype/ftrack/actions/action_sync_hier_attrs_local.py +++ b/pype/ftrack/actions/action_sync_hier_attrs_local.py @@ -23,7 +23,7 @@ class SyncHierarchicalAttrs(BaseAction): #: Action description. description = 'Synchronize hierarchical attributes' #: Icon - icon = '{}/ftrack/action_icons/SyncHierarchicalAttrs.svg'.format( + icon = '{}/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg'.format( os.environ.get('PYPE_STATICS_SERVER', '') ) diff --git a/res/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg b/res/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg new file mode 100644 index 0000000000..f58448ac06 --- /dev/null +++ b/res/ftrack/action_icons/SyncHierarchicalAttrsLocal.svg @@ -0,0 +1 @@ + From 2d0fdfbbc99ccb63c3327ad28adedc1f2c675a9c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Jul 2019 12:43:44 +0200 Subject: [PATCH 39/69] added sync hierarchical attributes action for event server --- pype/ftrack/events/action_sync_hier_attrs.py | 274 ++++++++++++++++++ .../action_icons/SyncHierarchicalAttrs.svg | 10 +- 2 files changed, 283 insertions(+), 1 deletion(-) create mode 100644 pype/ftrack/events/action_sync_hier_attrs.py diff --git a/pype/ftrack/events/action_sync_hier_attrs.py b/pype/ftrack/events/action_sync_hier_attrs.py new file mode 100644 index 0000000000..7fa024edf4 --- /dev/null +++ b/pype/ftrack/events/action_sync_hier_attrs.py @@ -0,0 +1,274 @@ +import os +import sys +import json +import argparse +import logging +import collections + +from pypeapp import config +from pype.vendor import ftrack_api +from pype.ftrack import BaseAction, lib +from avalon.tools.libraryloader.io_nonsingleton import DbConnector +from bson.objectid import ObjectId + + +class SyncHierarchicalAttrs(BaseAction): + + db_con = DbConnector() + ca_mongoid = lib.get_ca_mongoid() + + #: Action identifier. + identifier = 'sync.hierarchical.attrs' + #: Action label. + label = 'Sync HierAttrs' + #: Action description. + description = 'Synchronize hierarchical attributes' + #: Icon + icon = '{}/ftrack/action_icons/SyncHierarchicalAttrs.svg'.format( + os.environ.get( + 'PYPE_STATICS_SERVER', + 'http://localhost:{}'.format( + config.get_presets().get('services', {}).get( + 'statics_server', {} + ).get('default_port', 8021) + ) + ) + ) + + def register(self): + self.session.event_hub.subscribe( + 'topic=ftrack.action.discover', + self._discover + ) + + self.session.event_hub.subscribe( + 'topic=ftrack.action.launch and data.actionIdentifier={}'.format( + self.identifier + ), + self._launch + ) + + def discover(self, session, entities, event): + ''' Validation ''' + role_check = False + discover = False + role_list = ['Pypeclub', 'Administrator', 'Project Manager'] + user = session.query( + 'User where id is "{}"'.format(event['source']['user']['id']) + ).one() + + for role in user['user_security_roles']: + if role['security_role']['name'] in role_list: + role_check = True + break + print(self.icon) + if role_check is True: + for entity in entities: + context_type = entity.get('context_type', '').lower() + if ( + context_type in ('show', 'task') and + entity.entity_type.lower() != 'task' + ): + discover = True + break + + return discover + + def launch(self, session, entities, event): + user = session.query( + 'User where id is "{}"'.format(event['source']['user']['id']) + ).one() + + job = session.create('Job', { + 'user': user, + 'status': 'running', + 'data': json.dumps({ + 'description': 'Sync Hierachical attributes' + }) + }) + session.commit() + + try: + # Collect hierarchical attrs + custom_attributes = {} + all_avalon_attr = session.query( + 'CustomAttributeGroup where name is "avalon"' + ).one() + for cust_attr in all_avalon_attr['custom_attribute_configurations']: + if 'avalon_' in cust_attr['key']: + continue + + if not cust_attr['is_hierarchical']: + continue + + if cust_attr['default']: + self.log.warning(( + 'Custom attribute "{}" has set default value.' + ' This attribute can\'t be synchronized' + ).format(cust_attr['label'])) + continue + + custom_attributes[cust_attr['key']] = cust_attr + + if not custom_attributes: + msg = 'No hierarchical attributes to sync.' + self.log.debug(msg) + return { + 'success': True, + 'message': msg + } + + entity = entities[0] + if entity.entity_type.lower() == 'project': + project_name = entity['full_name'] + else: + project_name = entity['project']['full_name'] + + self.db_con.install() + self.db_con.Session['AVALON_PROJECT'] = project_name + + for entity in entities: + for key in custom_attributes: + # check if entity has that attribute + if key not in entity['custom_attributes']: + self.log.debug( + 'Hierachical attribute "{}" not found on "{}"'.format( + key, entity.get('name', entity) + ) + ) + continue + + value = self.get_hierarchical_value(key, entity) + if value is None: + self.log.warning( + 'Hierarchical attribute "{}" not set on "{}"'.format( + key, entity.get('name', entity) + ) + ) + continue + + self.update_hierarchical_attribute(entity, key, value) + + except Exception: + self.log.error( + 'Action "{}" failed'.format(self.label), + exc_info=True + ) + + finally: + self.db_con.uninstall() + + if job['status'] in ('queued', 'running'): + job['status'] = 'failed' + session.commit() + + return True + + def get_hierarchical_value(self, key, entity): + value = entity['custom_attributes'][key] + if ( + value is not None or + entity.entity_type.lower() == 'project' + ): + return value + + return self.get_hierarchical_value(key, entity['parent']) + + def update_hierarchical_attribute(self, entity, key, value): + if ( + entity['context_type'].lower() not in ('show', 'task') or + entity.entity_type.lower() == 'task' + ): + return + # collect entity's custom attributes + custom_attributes = entity.get('custom_attributes') + if not custom_attributes: + return + + mongoid = custom_attributes.get(self.ca_mongoid) + if not mongoid: + self.log.debug('Entity "{}" is not synchronized to avalon.'.format( + entity.get('name', entity) + )) + return + + try: + mongoid = ObjectId(mongoid) + except Exception: + self.log.warning('Entity "{}" has stored invalid MongoID.'.format( + entity.get('name', entity) + )) + return + # Find entity in Mongo DB + mongo_entity = self.db_con.find_one({'_id': mongoid}) + if not mongo_entity: + self.log.warning( + 'Entity "{}" is not synchronized to avalon.'.format( + entity.get('name', entity) + ) + ) + return + + # Change value if entity has set it's own + entity_value = custom_attributes[key] + if entity_value is not None: + value = entity_value + + data = mongo_entity.get('data') or {} + + data[key] = value + self.db_con.update_many( + {'_id': mongoid}, + {'$set': {'data': data}} + ) + + for child in entity.get('children', []): + self.update_hierarchical_attribute(child, key, value) + + +def register(session, **kw): + '''Register plugin. Called when used as an plugin.''' + + if not isinstance(session, ftrack_api.session.Session): + return + + SyncHierarchicalAttrs(session).register() + + +def main(arguments=None): + '''Set up logging and register action.''' + if arguments is None: + arguments = [] + + parser = argparse.ArgumentParser() + # Allow setting of logging level from arguments. + loggingLevels = {} + for level in ( + logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, + logging.ERROR, logging.CRITICAL + ): + loggingLevels[logging.getLevelName(level).lower()] = level + + parser.add_argument( + '-v', '--verbosity', + help='Set the logging output verbosity.', + choices=loggingLevels.keys(), + default='info' + ) + namespace = parser.parse_args(arguments) + + # Set up basic logging + logging.basicConfig(level=loggingLevels[namespace.verbosity]) + + session = ftrack_api.Session() + register(session) + + # Wait for events + logging.info( + 'Registered actions and listening for events. Use Ctrl-C to abort.' + ) + session.event_hub.wait() + + +if __name__ == '__main__': + raise SystemExit(main(sys.argv[1:])) diff --git a/res/ftrack/action_icons/SyncHierarchicalAttrs.svg b/res/ftrack/action_icons/SyncHierarchicalAttrs.svg index 0c59189168..8b7953299f 100644 --- a/res/ftrack/action_icons/SyncHierarchicalAttrs.svg +++ b/res/ftrack/action_icons/SyncHierarchicalAttrs.svg @@ -1 +1,9 @@ - \ No newline at end of file + + + + + + + + + From 3e894ec7b014ee6de4d1cfa24ddef72b3ef50359 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Jul 2019 12:45:40 +0200 Subject: [PATCH 40/69] sync to avalon action launches sync shierarcdhical attrs when finishes --- pype/ftrack/events/action_sync_to_avalon.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/events/action_sync_to_avalon.py b/pype/ftrack/events/action_sync_to_avalon.py index f1a5b37f36..46a25caea5 100644 --- a/pype/ftrack/events/action_sync_to_avalon.py +++ b/pype/ftrack/events/action_sync_to_avalon.py @@ -5,6 +5,7 @@ import logging import json from pype.vendor import ftrack_api from pype.ftrack import BaseAction, lib +from pype.vendor.ftrack_api import session as fa_session class Sync_To_Avalon(BaseAction): @@ -70,7 +71,7 @@ class Sync_To_Avalon(BaseAction): ''' Validation ''' roleCheck = False discover = False - roleList = ['Administrator', 'Project Manager'] + roleList = ['Pypeclub', 'Administrator', 'Project Manager'] userId = event['source']['user']['id'] user = session.query('User where id is ' + userId).one() @@ -191,6 +192,24 @@ class Sync_To_Avalon(BaseAction): ' - Please check Log for more information' ) + finally: + if job['status'] in ['queued', 'running']: + job['status'] = 'failed' + + session.commit() + + event = fa_session.ftrack_api.event.base.Event( + topic='ftrack.action.launch', + data=dict( + actionIdentifier='sync.hierarchical.attrs', + selection=event['data']['selection'] + ), + source=dict( + user=event['source']['user'] + ) + ) + session.event_hub.publish(event, on_error='ignore') + if len(message) > 0: message = "Unable to sync: {}".format(message) return { From 1b3f72936ec97df3c6f990ec00eb2aca1d0a66a5 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 15 Jul 2019 12:45:58 +0200 Subject: [PATCH 41/69] icon "fix" --- pype/ftrack/events/action_sync_to_avalon.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/pype/ftrack/events/action_sync_to_avalon.py b/pype/ftrack/events/action_sync_to_avalon.py index 46a25caea5..e78b209fac 100644 --- a/pype/ftrack/events/action_sync_to_avalon.py +++ b/pype/ftrack/events/action_sync_to_avalon.py @@ -3,6 +3,8 @@ import sys import argparse import logging import json + +from pypeapp import config from pype.vendor import ftrack_api from pype.ftrack import BaseAction, lib from pype.vendor.ftrack_api import session as fa_session @@ -51,7 +53,14 @@ class Sync_To_Avalon(BaseAction): description = 'Send data from Ftrack to Avalon' #: Action icon. icon = '{}/ftrack/action_icons/SyncToAvalon.svg'.format( - os.environ.get('PYPE_STATICS_SERVER', '') + os.environ.get( + 'PYPE_STATICS_SERVER', + 'http://localhost:{}'.format( + config.get_presets().get('services', {}).get( + 'statics_server', {} + ).get('default_port', 8021) + ) + ) ) def register(self): From 91021668d24bc824f6b00b4c4872aea5fe4778b8 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 15 Jul 2019 17:04:14 +0200 Subject: [PATCH 42/69] feat(nks): adding workfile version plugins - collect, validate, export --- pype/api.py | 1 + .../publish/collect_current_file.py | 4 - .../publish/collect_workfile_version.py | 15 ++++ .../nukestudio/publish/validate_version.py | 74 +++++++++++++++++++ .../nukestudio/publish/version_up_workfile.py | 23 ++++++ 5 files changed, 113 insertions(+), 4 deletions(-) create mode 100644 pype/plugins/nukestudio/publish/collect_workfile_version.py create mode 100644 pype/plugins/nukestudio/publish/validate_version.py create mode 100644 pype/plugins/nukestudio/publish/version_up_workfile.py diff --git a/pype/api.py b/pype/api.py index 2227236fd3..bcbf5eb8de 100644 --- a/pype/api.py +++ b/pype/api.py @@ -40,6 +40,7 @@ from .templates import ( ) from .lib import ( + version_up, get_handle_irregular, get_project_data, get_asset_data, diff --git a/pype/plugins/nukestudio/publish/collect_current_file.py b/pype/plugins/nukestudio/publish/collect_current_file.py index 0c194e8d3d..c0c217dd23 100644 --- a/pype/plugins/nukestudio/publish/collect_current_file.py +++ b/pype/plugins/nukestudio/publish/collect_current_file.py @@ -1,5 +1,4 @@ import pyblish.api -import pype.api as pype class CollectCurrentFile(pyblish.api.ContextPlugin): """Inject the current working file into context""" @@ -8,10 +7,7 @@ class CollectCurrentFile(pyblish.api.ContextPlugin): def process(self, context): - """Todo, inject the current working file""" project = context.data('activeProject') context.data["currentFile"] = path = project.path() - context.data["version"] = pype.get_version_from_path(path) self.log.info("currentFile: {}".format(context.data["currentFile"])) - self.log.info("version: {}".format(context.data["version"])) diff --git a/pype/plugins/nukestudio/publish/collect_workfile_version.py b/pype/plugins/nukestudio/publish/collect_workfile_version.py new file mode 100644 index 0000000000..3904c22f52 --- /dev/null +++ b/pype/plugins/nukestudio/publish/collect_workfile_version.py @@ -0,0 +1,15 @@ +import pyblish.api +import pype.api as pype + +class CollectWorkfileVersion(pyblish.api.ContextPlugin): + """Inject the current working file version into context""" + + order = pyblish.api.CollectorOrder - 0.1 + label = "Collect workfile version" + + def process(self, context): + + project = context.data('activeProject') + path = project.path() + context.data["version"] = pype.get_version_from_path(path) + self.log.info("version: {}".format(context.data["version"])) diff --git a/pype/plugins/nukestudio/publish/validate_version.py b/pype/plugins/nukestudio/publish/validate_version.py new file mode 100644 index 0000000000..194b270d51 --- /dev/null +++ b/pype/plugins/nukestudio/publish/validate_version.py @@ -0,0 +1,74 @@ +import pyblish +from avalon import io +from pype.action import get_errored_instances_from_context +import pype.api as pype + +@pyblish.api.log +class RepairNukestudioVersionUp(pyblish.api.Action): + label = "Version Up Workfile" + on = "failed" + icon = "wrench" + + def process(self, context, plugin): + + errored_instances = get_errored_instances_from_context(context) + + # Apply pyblish logic to get the instances for the plug-in + instances = pyblish.api.instances_by_plugin(errored_instances, plugin) + + if instances: + project = context.data["activeProject"] + path = context.data.get("currentFile") + + new_path = pype.version_up(path) + + if project: + project.saveAs(new_path) + + self.log.info("Project workfile version was fixed") + + +class ValidateVersion(pyblish.api.InstancePlugin): + """Validate clip's versions. + + """ + + order = pyblish.api.ValidatorOrder + families = ["plate"] + label = "Validate Version" + actions = [RepairNukestudioVersionUp] + hosts = ["nukestudio"] + + def process(self, instance): + version = int(instance.data.get("version", 0)) + asset_name = instance.data.get("asset", None) + subset_name = instance.data.get("subset", None) + + assert version, "The file is missing version string! example: filename_v001.hrox `{}`" + + self.log.debug("Collected version: `{0}`".format(version)) + + found_v = 0 + try: + io.install() + project = io.find_one({"type": "project"}) + + asset = io.find_one({"type": "asset", + "name": asset_name, + "parent": project["_id"]}) + + subset = io.find_one({"type": "subset", + "parent": asset["_id"], + "name": subset_name}) + + version_db = io.find_one({ + 'type': 'version', + 'parent': subset["_id"], + 'name': version + }) or {} + found_v = version_db.get("name", 0) + self.log.debug("Found version: `{0}`".format(found_v)) + except Exception as e: + self.log.debug("Problem to get data from database for asset `{0}` subset `{1}`. Error: `{2}`".format(asset_name, subset_name, e)) + + assert (found_v != version), "Version must not be the same as in database `{0}`, Versions file: `{1}`, db: `{2}`".format(asset_name, version, found_v) diff --git a/pype/plugins/nukestudio/publish/version_up_workfile.py b/pype/plugins/nukestudio/publish/version_up_workfile.py new file mode 100644 index 0000000000..195099dd09 --- /dev/null +++ b/pype/plugins/nukestudio/publish/version_up_workfile.py @@ -0,0 +1,23 @@ +from pyblish import api +import pype.api as pype + + +class VersionUpWorkfile(api.ContextPlugin): + """Save as new workfile version""" + + order = api.IntegratorOrder + 10.1 + label = "Version-up Workfile" + hosts = ["nukestudio"] + + optional = True + active = True + + def process(self, context): + project = context.data["activeProject"] + path = context.data.get("currentFile") + new_path = pype.version_up(path) + + if project: + project.saveAs(new_path) + + self.log.info("Project workfile was versioned up") From 1c92e96b31dffb4fb645ca3fae59ec3f108b2b27 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 16 Jul 2019 15:07:31 +0200 Subject: [PATCH 43/69] (hotfix) alembic loading was breaking transforms in hierarchy due to makeIdentity code. this instead parents top root to newly created group untill we find a better solution --- pype/plugins/maya/load/load_alembic.py | 17 +++++++++++++---- pype/plugins/maya/load/load_model.py | 22 ++++++++++++++-------- 2 files changed, 27 insertions(+), 12 deletions(-) diff --git a/pype/plugins/maya/load/load_alembic.py b/pype/plugins/maya/load/load_alembic.py index d3d85249c5..bdec9e6b06 100644 --- a/pype/plugins/maya/load/load_alembic.py +++ b/pype/plugins/maya/load/load_alembic.py @@ -28,13 +28,22 @@ class AbcLoader(pype.maya.plugin.ReferenceLoader): nodes = cmds.file(self.fname, namespace=namespace, sharedReferenceFile=False, - groupReference=True, - groupName="{}:{}".format(namespace, name), reference=True, returnNewNodes=True) - cmds.makeIdentity(groupName, apply=False, rotate=True, - translate=True, scale=True) + group = cmds.createNode("transform", name=groupName) + + roots = set() + for node in nodes: + try: + roots.add(cmds.ls(node, long=True)[0].split('|')[1]) + except: + pass + + cmds.parent(roots, group) + + # cmds.makeIdentity(groupName, apply=False, rotate=True, + # translate=True, scale=True) presets = config.get_presets(project=os.environ['AVALON_PROJECT']) colors = presets['plugins']['maya']['load']['colors'] diff --git a/pype/plugins/maya/load/load_model.py b/pype/plugins/maya/load/load_model.py index 913d4d9e79..e7152cf633 100644 --- a/pype/plugins/maya/load/load_model.py +++ b/pype/plugins/maya/load/load_model.py @@ -28,9 +28,7 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader): nodes = cmds.file(self.fname, namespace=namespace, reference=True, - returnNewNodes=True, - groupReference=True, - groupName=groupName) + returnNewNodes=True) cmds.makeIdentity(groupName, apply=False, rotate=True, translate=True, scale=True) @@ -171,16 +169,24 @@ class AbcModelLoader(pype.maya.plugin.ReferenceLoader): nodes = cmds.file(self.fname, namespace=namespace, sharedReferenceFile=False, - groupReference=True, - groupName=groupName, reference=True, returnNewNodes=True) namespace = cmds.referenceQuery(nodes[0], namespace=True) - groupName = "{}:{}".format(namespace, name) - cmds.makeIdentity(groupName, apply=False, rotate=True, - translate=True, scale=True) + group = cmds.createNode("transform", name=groupName) + + roots = set() + for node in nodes: + try: + roots.add(cmds.ls(node, long=True)[0].split('|')[1]) + except: + pass + + cmds.parent(roots, group) + + # cmds.makeIdentity(groupName, apply=False, rotate=True, + # translate=True, scale=True) presets = config.get_presets(project=os.environ['AVALON_PROJECT']) colors = presets['plugins']['maya']['load']['colors'] From 502d2d445ece998367330a98650d7beba1ddd707 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 16 Jul 2019 17:03:06 +0200 Subject: [PATCH 44/69] (hotfix) probles with containrise after last makeIdentity fix --- pype/plugins/maya/load/load_alembic.py | 16 +++++++------ pype/plugins/maya/load/load_model.py | 31 +++++++++++++++++++------- 2 files changed, 32 insertions(+), 15 deletions(-) diff --git a/pype/plugins/maya/load/load_alembic.py b/pype/plugins/maya/load/load_alembic.py index bdec9e6b06..51caaf6adc 100644 --- a/pype/plugins/maya/load/load_alembic.py +++ b/pype/plugins/maya/load/load_alembic.py @@ -28,22 +28,24 @@ class AbcLoader(pype.maya.plugin.ReferenceLoader): nodes = cmds.file(self.fname, namespace=namespace, sharedReferenceFile=False, + groupReference=True, + groupName=groupName, reference=True, returnNewNodes=True) - group = cmds.createNode("transform", name=groupName) - + nodes.pop(0) roots = set() for node in nodes: try: - roots.add(cmds.ls(node, long=True)[0].split('|')[1]) + roots.add(cmds.ls(node, long=True)[0].split('|')[2]) except: pass + cmds.parent(roots, world=True) + cmds.makeIdentity(groupName, apply=False, rotate=True, + translate=True, scale=True) + cmds.parent(roots, groupName) - cmds.parent(roots, group) - - # cmds.makeIdentity(groupName, apply=False, rotate=True, - # translate=True, scale=True) + nodes.append(groupName) presets = config.get_presets(project=os.environ['AVALON_PROJECT']) colors = presets['plugins']['maya']['load']['colors'] diff --git a/pype/plugins/maya/load/load_model.py b/pype/plugins/maya/load/load_model.py index e7152cf633..478f2e59aa 100644 --- a/pype/plugins/maya/load/load_model.py +++ b/pype/plugins/maya/load/load_model.py @@ -27,11 +27,24 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader): groupName = "{}:{}".format(namespace, name) nodes = cmds.file(self.fname, namespace=namespace, + groupReference=True, + groupName=groupName, reference=True, returnNewNodes=True) + nodes.pop(0) + roots = set() + for node in nodes: + try: + roots.add(cmds.ls(node, long=True)[0].split('|')[2]) + except: + pass + cmds.parent(roots, world=True) cmds.makeIdentity(groupName, apply=False, rotate=True, translate=True, scale=True) + cmds.parent(roots, groupName) + + nodes.append(groupName) presets = config.get_presets(project=os.environ['AVALON_PROJECT']) colors = presets['plugins']['maya']['load']['colors'] @@ -169,24 +182,26 @@ class AbcModelLoader(pype.maya.plugin.ReferenceLoader): nodes = cmds.file(self.fname, namespace=namespace, sharedReferenceFile=False, + groupReference=True, + groupName="{}:{}".format(namespace, name), reference=True, returnNewNodes=True) namespace = cmds.referenceQuery(nodes[0], namespace=True) - group = cmds.createNode("transform", name=groupName) - + nodes.pop(0) roots = set() for node in nodes: try: - roots.add(cmds.ls(node, long=True)[0].split('|')[1]) + roots.add(cmds.ls(node, long=True)[0].split('|')[2]) except: pass + cmds.parent(roots, world=True) + cmds.makeIdentity(groupName, apply=False, rotate=True, + translate=True, scale=True) + cmds.parent(roots, groupName) - cmds.parent(roots, group) - - # cmds.makeIdentity(groupName, apply=False, rotate=True, - # translate=True, scale=True) + nodes.append(groupName) presets = config.get_presets(project=os.environ['AVALON_PROJECT']) colors = presets['plugins']['maya']['load']['colors'] @@ -198,7 +213,7 @@ class AbcModelLoader(pype.maya.plugin.ReferenceLoader): self[:] = nodes - return nodes + return roots def switch(self, container, representation): self.update(container, representation) From bc277d5ad27a6e470e4aa0b8565a2ecd2497d666 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 16 Jul 2019 18:49:45 +0200 Subject: [PATCH 45/69] fix(nks): distributing handles didnt respect individual clip with tag, renaming collect_tag_types to *_main --- pype/plugins/nukestudio/publish/collect_handles.py | 2 +- .../publish/{collect_tag_types.py => collect_tag_main.py} | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) rename pype/plugins/nukestudio/publish/{collect_tag_types.py => collect_tag_main.py} (86%) diff --git a/pype/plugins/nukestudio/publish/collect_handles.py b/pype/plugins/nukestudio/publish/collect_handles.py index 104a60d02c..03652989b8 100644 --- a/pype/plugins/nukestudio/publish/collect_handles.py +++ b/pype/plugins/nukestudio/publish/collect_handles.py @@ -41,7 +41,7 @@ class CollectClipHandles(api.ContextPlugin): }) for instance in filtered_instances: - if not instance.data.get("main") or not instance.data.get("handleTag"): + if not instance.data.get("main") and not instance.data.get("handleTag"): self.log.debug("Synchronize handles on: `{}`".format( instance.data["name"])) name = instance.data["asset"] diff --git a/pype/plugins/nukestudio/publish/collect_tag_types.py b/pype/plugins/nukestudio/publish/collect_tag_main.py similarity index 86% rename from pype/plugins/nukestudio/publish/collect_tag_types.py rename to pype/plugins/nukestudio/publish/collect_tag_main.py index fad9e54735..36d9b95554 100644 --- a/pype/plugins/nukestudio/publish/collect_tag_types.py +++ b/pype/plugins/nukestudio/publish/collect_tag_main.py @@ -5,7 +5,7 @@ class CollectClipTagTypes(api.InstancePlugin): """Collect Types from Tags of selected track items.""" order = api.CollectorOrder + 0.012 - label = "Collect Plate Type from Tag" + label = "Collect main flag" hosts = ["nukestudio"] families = ['clip'] @@ -25,7 +25,8 @@ class CollectClipTagTypes(api.InstancePlugin): t_subset.capitalize()) if "plateMain" in subset_name: - instance.data["main"] = True + if not instance.data.get("main"): + instance.data["main"] = True self.log.info("`plateMain` found in instance.name: `{}`".format( instance.data["name"])) return From b07bf3d20f4e5340f90132f1c12f96b70cefcd1c Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 16 Jul 2019 18:50:35 +0200 Subject: [PATCH 46/69] fix(global): debug print after loop --- pype/plugins/global/publish/integrate_new.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index e70657eef9..1b6c203343 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -286,8 +286,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): os.path.normpath( anatomy_filled[template_name]["path"]) ) - self.log.debug( - "test_dest_files: {}".format(str(test_dest_files))) + + self.log.debug( + "test_dest_files: {}".format(str(test_dest_files))) dst_collections, remainder = clique.assemble(test_dest_files) dst_collection = dst_collections[0] From 1d5a2b874e6bddf1b891019598a7a742798ac852 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 17 Jul 2019 09:37:45 +0200 Subject: [PATCH 47/69] (hotfix) more model import fixes --- pype/plugins/maya/load/load_model.py | 126 ++++++++++++++------------- 1 file changed, 65 insertions(+), 61 deletions(-) diff --git a/pype/plugins/maya/load/load_model.py b/pype/plugins/maya/load/load_model.py index 478f2e59aa..a886c6f644 100644 --- a/pype/plugins/maya/load/load_model.py +++ b/pype/plugins/maya/load/load_model.py @@ -9,7 +9,7 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader): """Load the model""" families = ["model"] - representations = ["ma"] + representations = ["ma", "abc"] tool_names = ["loader"] label = "Reference Model" @@ -25,14 +25,18 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader): with maya.maintained_selection(): groupName = "{}:{}".format(namespace, name) + cmds.loadPlugin("AbcImport.mll", quiet=True) nodes = cmds.file(self.fname, namespace=namespace, + sharedReferenceFile=False, groupReference=True, - groupName=groupName, + groupName="{}:{}".format(namespace, name), reference=True, returnNewNodes=True) - nodes.pop(0) + namespace = cmds.referenceQuery(nodes[0], namespace=True) + + nodes.pop(1) roots = set() for node in nodes: try: @@ -54,9 +58,9 @@ class ModelLoader(pype.maya.plugin.ReferenceLoader): cmds.setAttr(groupName + ".outlinerColor", c[0], c[1], c[2]) - self[:] = nodes + self[:] = nodes - return nodes + return nodes def switch(self, container, representation): self.update(container, representation) @@ -161,59 +165,59 @@ class GpuCacheLoader(api.Loader): pass -class AbcModelLoader(pype.maya.plugin.ReferenceLoader): - """Specific loader of Alembic for the studio.animation family""" - - families = ["model"] - representations = ["abc"] - tool_names = ["loader"] - - label = "Reference Model" - order = -10 - icon = "code-fork" - color = "orange" - - def process_reference(self, context, name, namespace, data): - - import maya.cmds as cmds - - groupName = "{}:{}".format(namespace, name) - cmds.loadPlugin("AbcImport.mll", quiet=True) - nodes = cmds.file(self.fname, - namespace=namespace, - sharedReferenceFile=False, - groupReference=True, - groupName="{}:{}".format(namespace, name), - reference=True, - returnNewNodes=True) - - namespace = cmds.referenceQuery(nodes[0], namespace=True) - - nodes.pop(0) - roots = set() - for node in nodes: - try: - roots.add(cmds.ls(node, long=True)[0].split('|')[2]) - except: - pass - cmds.parent(roots, world=True) - cmds.makeIdentity(groupName, apply=False, rotate=True, - translate=True, scale=True) - cmds.parent(roots, groupName) - - nodes.append(groupName) - - presets = config.get_presets(project=os.environ['AVALON_PROJECT']) - colors = presets['plugins']['maya']['load']['colors'] - c = colors.get('model') - if c is not None: - cmds.setAttr(groupName + ".useOutlinerColor", 1) - cmds.setAttr(groupName + ".outlinerColor", - c[0], c[1], c[2]) - - self[:] = nodes - - return roots - - def switch(self, container, representation): - self.update(container, representation) +# class AbcModelLoader(pype.maya.plugin.ReferenceLoader): +# """Specific loader of Alembic for the studio.animation family""" +# +# families = ["model"] +# representations = ["abc"] +# tool_names = ["loader"] +# +# label = "Reference Model" +# order = -10 +# icon = "code-fork" +# color = "orange" +# +# def process_reference(self, context, name, namespace, data): +# +# import maya.cmds as cmds +# +# groupName = "{}:{}".format(namespace, name) +# cmds.loadPlugin("AbcImport.mll", quiet=True) +# nodes = cmds.file(self.fname, +# namespace=namespace, +# sharedReferenceFile=False, +# groupReference=True, +# groupName="{}:{}".format(namespace, name), +# reference=True, +# returnNewNodes=True) +# +# namespace = cmds.referenceQuery(nodes[0], namespace=True) +# +# nodes.pop(0) +# roots = set() +# for node in nodes: +# try: +# roots.add(cmds.ls(node, long=True)[0].split('|')[2]) +# except: +# pass +# cmds.parent(roots, world=True) +# cmds.makeIdentity(groupName, apply=False, rotate=True, +# translate=True, scale=True) +# cmds.parent(roots, groupName) +# +# nodes.append(groupName) +# +# presets = config.get_presets(project=os.environ['AVALON_PROJECT']) +# colors = presets['plugins']['maya']['load']['colors'] +# c = colors.get('model') +# if c is not None: +# cmds.setAttr(groupName + ".useOutlinerColor", 1) +# cmds.setAttr(groupName + ".outlinerColor", +# c[0], c[1], c[2]) +# +# self[:] = nodes +# +# return nodes +# +# def switch(self, container, representation): +# self.update(container, representation) From 37a4a4af3d6657e21072ba781d412f109eace60b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 17 Jul 2019 10:21:39 +0200 Subject: [PATCH 48/69] fix(pype): supporting search for version with `.v001` --- pype/templates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/templates.py b/pype/templates.py index 7d12801a00..0bc5fc9d99 100644 --- a/pype/templates.py +++ b/pype/templates.py @@ -85,7 +85,7 @@ def get_version_from_path(file): v: version number in string ('001') """ - pattern = re.compile(r"_v([0-9]*)") + pattern = re.compile(r"[\.\_]v([0-9]*)") try: v = pattern.findall(file)[0] return v From 460abbd74e6fa75bb29a6c67386b8e829cd03832 Mon Sep 17 00:00:00 2001 From: jezschaj Date: Wed, 17 Jul 2019 11:34:08 +0200 Subject: [PATCH 49/69] fix(nuke): get version with dot --- pype/templates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/templates.py b/pype/templates.py index 7d12801a00..596bf8d661 100644 --- a/pype/templates.py +++ b/pype/templates.py @@ -85,7 +85,7 @@ def get_version_from_path(file): v: version number in string ('001') """ - pattern = re.compile(r"_v([0-9]*)") + pattern = re.compile(r"[\._]v([0-9]*)") try: v = pattern.findall(file)[0] return v From 1fca72ce8ce05a982b72be977460b48a10619e28 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 17 Jul 2019 13:44:39 +0200 Subject: [PATCH 50/69] feat(ftrack): user (de)assigment will run configurable shell scripts --- pype/ftrack/events/event_user_assigment.py | 239 +++++++++++++++++++++ 1 file changed, 239 insertions(+) create mode 100644 pype/ftrack/events/event_user_assigment.py diff --git a/pype/ftrack/events/event_user_assigment.py b/pype/ftrack/events/event_user_assigment.py new file mode 100644 index 0000000000..ea2d3c93b0 --- /dev/null +++ b/pype/ftrack/events/event_user_assigment.py @@ -0,0 +1,239 @@ +from pype.vendor import ftrack_api +from pype.ftrack import BaseEvent, lib +from avalon.tools.libraryloader.io_nonsingleton import DbConnector +from bson.objectid import ObjectId +from pypeapp import config +from pypeapp import Anatomy +import subprocess +import os +import re + + +class UserAssigmentEvent(BaseEvent): + """ + This script will intercept user assigment / de-assigment event and + run shell script, providing as much context as possible. + + It expects configuration file ``presets/ftrack/user_assigment_event.json``. + In it, you define paths to scripts to be run for user assigment event and + for user-deassigment:: + { + "add": [ + "/path/to/script1", + "/path/to/script2" + ], + "remove": [ + "/path/to/script3", + "/path/to/script4" + ] + } + + Those scripts are executed in shell. Three arguments will be passed to + to them: + 1) user name of user (de)assigned + 2) path to workfiles of task user was (de)assigned to + 3) path to publish files of task user was (de)assigned to + """ + + db_con = DbConnector() + ca_mongoid = lib.get_ca_mongoid() + + def error(self, *err): + for e in err: + self.log.error(e, exc_info=True) + + def _run_script(self, script, args): + """ + Run shell script with arguments as subprocess + + :param script: script path + :type script: str + :param args: list of arguments passed to script + :type args: list + :returns: return code + :rtype: int + """ + p = subprocess.call([script, args], shell=True) + return p + + def _get_task_and_user(self, session, action, changes): + """ + Get Task and User entities from Ftrack session + + :param session: ftrack session + :type session: ftrack_api.session + :param action: event action + :type action: str + :param changes: what was changed by event + :type changes: dict + :returns: User and Task entities + :rtype: tuple + """ + if not changes: + return None, None + + if action == 'add': + task_id = changes.get('context_id', {}).get('new') + user_id = changes.get('resource_id', {}).get('new') + + elif action == 'remove': + task_id = changes.get('context_id', {}).get('old') + user_id = changes.get('resource_id', {}).get('old') + + if not task_id: + return None, None + + if not user_id: + return None, None + + task = session.query('Task where id is "{}"'.format(task_id)).one() + user = session.query('User where id is "{}"'.format(user_id)).one() + + return task, user + + def _get_asset(self, task): + """ + Get asset from task entity + + :param task: Task entity + :type task: dict + :returns: Asset entity + :rtype: dict + """ + parent = task['parent'] + self.db_con.install() + self.db_con.Session['AVALON_PROJECT'] = task['project']['full_name'] + + avalon_entity = None + parent_id = parent['custom_attributes'].get(self.ca_mongoid) + if parent_id: + parent_id = ObjectId(parent_id) + avalon_entity = self.db_con.find_one({ + '_id': parent_id, + 'type': 'asset' + }) + + if not avalon_entity: + avalon_entity = self.db_con.find_one({ + 'type': 'asset', + 'name': parent['name'] + }) + + if not avalon_entity: + self.db_con.uninstall() + msg = 'Entity "{}" not found in avalon database'.format( + parent['name'] + ) + self.error(msg) + return { + 'success': False, + 'message': msg + } + self.db_con.uninstall() + return avalon_entity + + def _get_hierarchy(self, asset): + """ + Get hierarchy from Asset entity + + :param asset: Asset entity + :type asset: dict + :returns: hierarchy string + :rtype: str + """ + return asset['data']['hierarchy'] + + def _get_template_data(self, task): + """ + Get data to fill template from task + + .. seealso:: :mod:`pypeapp.Anatomy` + + :param task: Task entity + :type task: dict + :returns: data for anatomy template + :rtype: dict + """ + project_name = task['project']['full_name'] + project_code = task['project']['name'] + try: + root = os.environ['PYPE_STUDIO_PROJECTS_PATH'] + except KeyError: + msg = 'Project ({}) root not set'.format(project_name) + self.log.error(msg) + return { + 'success': False, + 'message': msg + } + + # fill in template data + asset = self._get_asset(task) + t_data = { + 'root': root, + 'project': { + 'name': project_name, + 'code': project_code + }, + 'asset': asset['name'], + 'task': task['name'], + 'hierarchy': self._get_hierarchy(asset) + } + + return t_data + + def launch(self, session, event): + # load shell scripts presets + presets = config.get_presets()['ftrack']["user_assigment_event"] + if not presets: + return + for entity in event.get('data', {}).get('entities', []): + if entity.get('entity_type') != 'Appointment': + continue + + task, user = self._get_task_and_user(session, + entity.get('action'), + entity.get('changes')) + + if not task or not user: + self.log.error( + 'Task or User was not found.') + continue + + data = self._get_template_data(task) + # format directories to pass to shell script + anatomy = Anatomy() + # formatting work dir is easiest part as we can use whole path + work_dir = anatomy.format(data)['avalon']['work'] + # we also need publish but not whole + publish = anatomy.format_all(data)['partial']['avalon']['publish'] + # now find path to {asset} + m = re.search("(^.+?{})".format(data['asset']), + publish) + + if not m: + msg = 'Cannot get part of publish path {}'.format(publish) + self.log.error(msg) + return { + 'success': False, + 'message': msg + } + publish_dir = m.group(1) + + for script in presets.get(entity.get('action')): + self.log.info( + '[{}] : running script for user {}'.format( + entity.get('action'), user["username"])) + self._run_script(script, [user["username"], + work_dir, publish_dir]) + + return True + + +def register(session, **kw): + """ + Register plugin. Called when used as an plugin. + """ + if not isinstance(session, ftrack_api.session.Session): + return + + UserAssigmentEvent(session).register() From e75cabd8010085ac550e6767900ea93206068d64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 17 Jul 2019 14:24:04 +0200 Subject: [PATCH 51/69] fix: added project resolution for Anatomy --- pype/ftrack/events/event_user_assigment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_user_assigment.py b/pype/ftrack/events/event_user_assigment.py index ea2d3c93b0..80a985c2a0 100644 --- a/pype/ftrack/events/event_user_assigment.py +++ b/pype/ftrack/events/event_user_assigment.py @@ -201,7 +201,7 @@ class UserAssigmentEvent(BaseEvent): data = self._get_template_data(task) # format directories to pass to shell script - anatomy = Anatomy() + anatomy = Anatomy(data["project"]["name"]) # formatting work dir is easiest part as we can use whole path work_dir = anatomy.format(data)['avalon']['work'] # we also need publish but not whole From 615afe938267ccc9ac263b2650cc9902e9451a59 Mon Sep 17 00:00:00 2001 From: jezschaj Date: Wed, 17 Jul 2019 14:44:23 +0200 Subject: [PATCH 52/69] fix(glob): burnin and review didn't work properly --- pype/plugins/global/publish/extract_burnin.py | 5 ++++- pype/plugins/global/publish/extract_review.py | 12 ++++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index b2956e330e..425db087d3 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -62,13 +62,16 @@ class ExtractBurnin(pype.api.Extractor): "scripts", "otio_burnin.py") - self.log.debug("__ scriptpath: {}".format(scriptpath)) + self.log.debug("Burnin scriptpath: {}".format(scriptpath)) try: p = subprocess.Popen( [os.getenv("PYPE_PYTHON_EXE"), scriptpath, json_data] ) p.wait() + if not os.path.isfile(full_burnin_path): + self.log.error( + "Burnin file wasn't created succesfully") except Exception as e: raise RuntimeError("Burnin script didn't work: `{}`".format(e)) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index af6d59d798..98013c518a 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -48,6 +48,10 @@ class ExtractReview(pyblish.api.InstancePlugin): staging_dir = repre["stagingDir"] for name, profile in output_profiles.items(): + ext = profile.get("ext", None) + if not ext: + ext = "mov" + self.log.warning("`ext` attribute not in output profile. Setting to default ext: `mov`") if "mov" not in repre['ext']: # get output presets and loop them collections, remainder = clique.assemble( @@ -66,9 +70,9 @@ class ExtractReview(pyblish.api.InstancePlugin): staging_dir, repre["files"]) filename = repre["files"].split(".")[0] - mov_file = filename + "_{0}.{1}".format(name, "mov") + repr_file = filename + "_{0}.{1}".format(name, ext) - full_output_path = os.path.join(staging_dir, mov_file) + full_output_path = os.path.join(staging_dir, repr_file) self.log.info("input {}".format(full_input_path)) self.log.info("output {}".format(full_output_path)) @@ -130,8 +134,8 @@ class ExtractReview(pyblish.api.InstancePlugin): # create representation data repre_new.update({ 'name': name, - 'ext': 'mov', - 'files': mov_file, + 'ext': ext, + 'files': repr_file, "tags": new_tags, "outputName": name }) From cab44138cb3caade3bb350520d38743c5204c89d Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 17 Jul 2019 15:26:16 +0200 Subject: [PATCH 53/69] simplyfy model maya reference loader and add proxy classes for backwards compatibility remove dots from extensions to prevents double dots in names --- pype/plugins/maya/load/load_alembic.py | 63 ----- pype/plugins/maya/load/load_gpucache.py | 105 +++++++++ pype/plugins/maya/load/load_model.py | 223 ------------------ pype/plugins/maya/load/load_reference.py | 85 +++++++ .../plugins/maya/publish/extract_animation.py | 2 +- pype/plugins/maya/publish/extract_ass.py | 2 +- pype/plugins/maya/publish/extract_assproxy.py | 2 +- .../maya/publish/extract_camera_alembic.py | 2 +- .../maya/publish/extract_camera_mayaAscii.py | 2 +- pype/plugins/maya/publish/extract_fbx.py | 2 +- .../maya/publish/extract_maya_ascii_raw.py | 2 +- pype/plugins/maya/publish/extract_model.py | 2 +- .../maya/publish/extract_pointcache.py | 2 +- .../maya/publish/extract_rendersetup.py | 2 +- pype/plugins/maya/publish/extract_rig.py | 2 +- .../plugins/maya/publish/extract_thumbnail.py | 2 +- .../plugins/maya/publish/extract_vrayproxy.py | 2 +- 17 files changed, 203 insertions(+), 299 deletions(-) delete mode 100644 pype/plugins/maya/load/load_alembic.py create mode 100644 pype/plugins/maya/load/load_gpucache.py delete mode 100644 pype/plugins/maya/load/load_model.py create mode 100644 pype/plugins/maya/load/load_reference.py diff --git a/pype/plugins/maya/load/load_alembic.py b/pype/plugins/maya/load/load_alembic.py deleted file mode 100644 index 51caaf6adc..0000000000 --- a/pype/plugins/maya/load/load_alembic.py +++ /dev/null @@ -1,63 +0,0 @@ -import pype.maya.plugin -import os -from pypeapp import config - - -class AbcLoader(pype.maya.plugin.ReferenceLoader): - """Specific loader of Alembic for the pype.animation family""" - - families = ["animation", - "pointcache"] - label = "Reference animation" - representations = ["abc"] - order = -10 - icon = "code-fork" - color = "orange" - - def process_reference(self, context, name, namespace, data): - - import maya.cmds as cmds - - try: - family = context["representation"]["context"]["family"] - except ValueError: - family = "animation" - - groupName = "{}:{}".format(namespace, name) - cmds.loadPlugin("AbcImport.mll", quiet=True) - nodes = cmds.file(self.fname, - namespace=namespace, - sharedReferenceFile=False, - groupReference=True, - groupName=groupName, - reference=True, - returnNewNodes=True) - - nodes.pop(0) - roots = set() - for node in nodes: - try: - roots.add(cmds.ls(node, long=True)[0].split('|')[2]) - except: - pass - cmds.parent(roots, world=True) - cmds.makeIdentity(groupName, apply=False, rotate=True, - translate=True, scale=True) - cmds.parent(roots, groupName) - - nodes.append(groupName) - - presets = config.get_presets(project=os.environ['AVALON_PROJECT']) - colors = presets['plugins']['maya']['load']['colors'] - c = colors.get(family) - if c is not None: - cmds.setAttr(groupName + ".useOutlinerColor", 1) - cmds.setAttr(groupName + ".outlinerColor", - c[0], c[1], c[2]) - - self[:] = nodes - - return nodes - - def switch(self, container, representation): - self.update(container, representation) diff --git a/pype/plugins/maya/load/load_gpucache.py b/pype/plugins/maya/load/load_gpucache.py new file mode 100644 index 0000000000..b98ca8b7f4 --- /dev/null +++ b/pype/plugins/maya/load/load_gpucache.py @@ -0,0 +1,105 @@ +from avalon import api +import pype.maya.plugin +import os +from pypeapp import config +import pymel.core as pm +reload(config) + + +class GpuCacheLoader(api.Loader): + """Load model Alembic as gpuCache""" + + families = ["model"] + representations = ["abc"] + + label = "Import Gpu Cache" + order = -5 + icon = "code-fork" + color = "orange" + + def load(self, context, name, namespace, data): + + import maya.cmds as cmds + import avalon.maya.lib as lib + from avalon.maya.pipeline import containerise + + asset = context['asset']['name'] + namespace = namespace or lib.unique_namespace( + asset + "_", + prefix="_" if asset[0].isdigit() else "", + suffix="_", + ) + + cmds.loadPlugin("gpuCache", quiet=True) + + # Root group + label = "{}:{}".format(namespace, name) + root = cmds.group(name=label, empty=True) + + presets = config.get_presets(project=os.environ['AVALON_PROJECT']) + colors = presets['plugins']['maya']['load']['colors'] + c = colors.get('model') + if c is not None: + cmds.setAttr(root + ".useOutlinerColor", 1) + cmds.setAttr(root + ".outlinerColor", + c[0], c[1], c[2]) + + # Create transform with shape + transform_name = label + "_GPU" + transform = cmds.createNode("transform", name=transform_name, + parent=root) + cache = cmds.createNode("gpuCache", + parent=transform, + name="{0}Shape".format(transform_name)) + + # Set the cache filepath + cmds.setAttr(cache + '.cacheFileName', self.fname, type="string") + cmds.setAttr(cache + '.cacheGeomPath', "|", type="string") # root + + # Lock parenting of the transform and cache + cmds.lockNode([transform, cache], lock=True) + + nodes = [root, transform, cache] + self[:] = nodes + + return containerise( + name=name, + namespace=namespace, + nodes=nodes, + context=context, + loader=self.__class__.__name__) + + def update(self, container, representation): + + import maya.cmds as cmds + + path = api.get_representation_path(representation) + + # Update the cache + members = cmds.sets(container['objectName'], query=True) + caches = cmds.ls(members, type="gpuCache", long=True) + + assert len(caches) == 1, "This is a bug" + + for cache in caches: + cmds.setAttr(cache + ".cacheFileName", path, type="string") + + cmds.setAttr(container["objectName"] + ".representation", + str(representation["_id"]), + type="string") + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + import maya.cmds as cmds + members = cmds.sets(container['objectName'], query=True) + cmds.lockNode(members, lock=False) + cmds.delete([container['objectName']] + members) + + # Clean up the namespace + try: + cmds.namespace(removeNamespace=container['namespace'], + deleteNamespaceContent=True) + except RuntimeError: + pass diff --git a/pype/plugins/maya/load/load_model.py b/pype/plugins/maya/load/load_model.py deleted file mode 100644 index a886c6f644..0000000000 --- a/pype/plugins/maya/load/load_model.py +++ /dev/null @@ -1,223 +0,0 @@ -from avalon import api -import pype.maya.plugin -import os -from pypeapp import config -reload(config) - - -class ModelLoader(pype.maya.plugin.ReferenceLoader): - """Load the model""" - - families = ["model"] - representations = ["ma", "abc"] - tool_names = ["loader"] - - label = "Reference Model" - order = -10 - icon = "code-fork" - color = "orange" - - def process_reference(self, context, name, namespace, data): - - import maya.cmds as cmds - from avalon import maya - - with maya.maintained_selection(): - - groupName = "{}:{}".format(namespace, name) - cmds.loadPlugin("AbcImport.mll", quiet=True) - nodes = cmds.file(self.fname, - namespace=namespace, - sharedReferenceFile=False, - groupReference=True, - groupName="{}:{}".format(namespace, name), - reference=True, - returnNewNodes=True) - - namespace = cmds.referenceQuery(nodes[0], namespace=True) - - nodes.pop(1) - roots = set() - for node in nodes: - try: - roots.add(cmds.ls(node, long=True)[0].split('|')[2]) - except: - pass - cmds.parent(roots, world=True) - cmds.makeIdentity(groupName, apply=False, rotate=True, - translate=True, scale=True) - cmds.parent(roots, groupName) - - nodes.append(groupName) - - presets = config.get_presets(project=os.environ['AVALON_PROJECT']) - colors = presets['plugins']['maya']['load']['colors'] - c = colors.get('model') - if c is not None: - cmds.setAttr(groupName + ".useOutlinerColor", 1) - cmds.setAttr(groupName + ".outlinerColor", - c[0], c[1], c[2]) - - self[:] = nodes - - return nodes - - def switch(self, container, representation): - self.update(container, representation) - - -class GpuCacheLoader(api.Loader): - """Load model Alembic as gpuCache""" - - families = ["model"] - representations = ["abc"] - - label = "Import Gpu Cache" - order = -5 - icon = "code-fork" - color = "orange" - - def load(self, context, name, namespace, data): - - import maya.cmds as cmds - import avalon.maya.lib as lib - from avalon.maya.pipeline import containerise - - asset = context['asset']['name'] - namespace = namespace or lib.unique_namespace( - asset + "_", - prefix="_" if asset[0].isdigit() else "", - suffix="_", - ) - - cmds.loadPlugin("gpuCache", quiet=True) - - # Root group - label = "{}:{}".format(namespace, name) - root = cmds.group(name=label, empty=True) - - presets = config.get_presets(project=os.environ['AVALON_PROJECT']) - colors = presets['plugins']['maya']['load']['colors'] - c = colors.get('model') - if c is not None: - cmds.setAttr(root + ".useOutlinerColor", 1) - cmds.setAttr(root + ".outlinerColor", - c[0], c[1], c[2]) - - # Create transform with shape - transform_name = label + "_GPU" - transform = cmds.createNode("transform", name=transform_name, - parent=root) - cache = cmds.createNode("gpuCache", - parent=transform, - name="{0}Shape".format(transform_name)) - - # Set the cache filepath - cmds.setAttr(cache + '.cacheFileName', self.fname, type="string") - cmds.setAttr(cache + '.cacheGeomPath', "|", type="string") # root - - # Lock parenting of the transform and cache - cmds.lockNode([transform, cache], lock=True) - - nodes = [root, transform, cache] - self[:] = nodes - - return containerise( - name=name, - namespace=namespace, - nodes=nodes, - context=context, - loader=self.__class__.__name__) - - def update(self, container, representation): - - import maya.cmds as cmds - - path = api.get_representation_path(representation) - - # Update the cache - members = cmds.sets(container['objectName'], query=True) - caches = cmds.ls(members, type="gpuCache", long=True) - - assert len(caches) == 1, "This is a bug" - - for cache in caches: - cmds.setAttr(cache + ".cacheFileName", path, type="string") - - cmds.setAttr(container["objectName"] + ".representation", - str(representation["_id"]), - type="string") - - def switch(self, container, representation): - self.update(container, representation) - - def remove(self, container): - import maya.cmds as cmds - members = cmds.sets(container['objectName'], query=True) - cmds.lockNode(members, lock=False) - cmds.delete([container['objectName']] + members) - - # Clean up the namespace - try: - cmds.namespace(removeNamespace=container['namespace'], - deleteNamespaceContent=True) - except RuntimeError: - pass - - -# class AbcModelLoader(pype.maya.plugin.ReferenceLoader): -# """Specific loader of Alembic for the studio.animation family""" -# -# families = ["model"] -# representations = ["abc"] -# tool_names = ["loader"] -# -# label = "Reference Model" -# order = -10 -# icon = "code-fork" -# color = "orange" -# -# def process_reference(self, context, name, namespace, data): -# -# import maya.cmds as cmds -# -# groupName = "{}:{}".format(namespace, name) -# cmds.loadPlugin("AbcImport.mll", quiet=True) -# nodes = cmds.file(self.fname, -# namespace=namespace, -# sharedReferenceFile=False, -# groupReference=True, -# groupName="{}:{}".format(namespace, name), -# reference=True, -# returnNewNodes=True) -# -# namespace = cmds.referenceQuery(nodes[0], namespace=True) -# -# nodes.pop(0) -# roots = set() -# for node in nodes: -# try: -# roots.add(cmds.ls(node, long=True)[0].split('|')[2]) -# except: -# pass -# cmds.parent(roots, world=True) -# cmds.makeIdentity(groupName, apply=False, rotate=True, -# translate=True, scale=True) -# cmds.parent(roots, groupName) -# -# nodes.append(groupName) -# -# presets = config.get_presets(project=os.environ['AVALON_PROJECT']) -# colors = presets['plugins']['maya']['load']['colors'] -# c = colors.get('model') -# if c is not None: -# cmds.setAttr(groupName + ".useOutlinerColor", 1) -# cmds.setAttr(groupName + ".outlinerColor", -# c[0], c[1], c[2]) -# -# self[:] = nodes -# -# return nodes -# -# def switch(self, container, representation): -# self.update(container, representation) diff --git a/pype/plugins/maya/load/load_reference.py b/pype/plugins/maya/load/load_reference.py new file mode 100644 index 0000000000..199d79c941 --- /dev/null +++ b/pype/plugins/maya/load/load_reference.py @@ -0,0 +1,85 @@ +from avalon import api +import pype.maya.plugin +import os +from pypeapp import config +import pymel.core as pm +reload(config) + + +class ReferenceLoader(pype.maya.plugin.ReferenceLoader): + """Load the model""" + + families = ["model", "pointcache", "animation"] + representations = ["ma", "abc"] + tool_names = ["loader"] + + label = "Reference" + order = -10 + icon = "code-fork" + color = "orange" + + def process_reference(self, context, name, namespace, data): + + import maya.cmds as cmds + from avalon import maya + + try: + family = context["representation"]["context"]["family"] + except ValueError: + family = "model" + + with maya.maintained_selection(): + + groupName = "{}:{}".format(namespace, name) + cmds.loadPlugin("AbcImport.mll", quiet=True) + nodes = cmds.file(self.fname, + namespace=namespace, + sharedReferenceFile=False, + groupReference=True, + groupName="{}:{}".format(namespace, name), + reference=True, + returnNewNodes=True) + + namespace = cmds.referenceQuery(nodes[0], namespace=True) + + groupNode = pm.PyNode(groupName) + roots = set() + print(nodes) + + for node in nodes: + try: + roots.add(pm.PyNode(node).getAllParents()[-2]) + except: + pass + for root in roots: + root.setParent(world=True) + + groupNode.root().zeroTransformPivots() + for root in roots: + root.setParent(groupNode) + + presets = config.get_presets(project=os.environ['AVALON_PROJECT']) + colors = presets['plugins']['maya']['load']['colors'] + c = colors.get(family) + if c is not None: + groupNode.useOutlinerColor.set(1) + groupNode.outlinerColor.set(c[0], c[1], c[2]) + + self[:] = nodes + + return nodes + + def switch(self, container, representation): + self.update(container, representation) + +# for backwards compatibility +class AbcLoader(ReferenceLoader): + families = ["pointcache", "animation"] + representations = ["abc"] + tool_names = [] + +# for backwards compatibility +class ModelLoader(ReferenceLoader): + families = ["model", "pointcache"] + representations = ["abc"] + tool_names = [] diff --git a/pype/plugins/maya/publish/extract_animation.py b/pype/plugins/maya/publish/extract_animation.py index 30d5dae92b..794a80e7a6 100644 --- a/pype/plugins/maya/publish/extract_animation.py +++ b/pype/plugins/maya/publish/extract_animation.py @@ -82,7 +82,7 @@ class ExtractAnimation(pype.api.Extractor): representation = { 'name': 'abc', - 'ext': '.abc', + 'ext': 'abc', 'files': filename, "stagingDir": dirname, } diff --git a/pype/plugins/maya/publish/extract_ass.py b/pype/plugins/maya/publish/extract_ass.py index 0c7ef02b4b..1fed6c8dd7 100644 --- a/pype/plugins/maya/publish/extract_ass.py +++ b/pype/plugins/maya/publish/extract_ass.py @@ -42,7 +42,7 @@ class ExtractAssStandin(pype.api.Extractor): representation = { 'name': 'ass', - 'ext': '.ass', + 'ext': 'ass', 'files': filename, "stagingDir": staging_dir } diff --git a/pype/plugins/maya/publish/extract_assproxy.py b/pype/plugins/maya/publish/extract_assproxy.py index bc807be9b0..34c3113e11 100644 --- a/pype/plugins/maya/publish/extract_assproxy.py +++ b/pype/plugins/maya/publish/extract_assproxy.py @@ -68,7 +68,7 @@ class ExtractAssProxy(pype.api.Extractor): representation = { 'name': 'ma', - 'ext': '.ma', + 'ext': 'ma', 'files': filename, "stagingDir": stagingdir } diff --git a/pype/plugins/maya/publish/extract_camera_alembic.py b/pype/plugins/maya/publish/extract_camera_alembic.py index 01239fd1e8..77e055daa6 100644 --- a/pype/plugins/maya/publish/extract_camera_alembic.py +++ b/pype/plugins/maya/publish/extract_camera_alembic.py @@ -75,7 +75,7 @@ class ExtractCameraAlembic(pype.api.Extractor): representation = { 'name': 'abc', - 'ext': '.abc', + 'ext': 'abc', 'files': filename, "stagingDir": dir_path, } diff --git a/pype/plugins/maya/publish/extract_camera_mayaAscii.py b/pype/plugins/maya/publish/extract_camera_mayaAscii.py index 152acb98fe..cafee6593d 100644 --- a/pype/plugins/maya/publish/extract_camera_mayaAscii.py +++ b/pype/plugins/maya/publish/extract_camera_mayaAscii.py @@ -173,7 +173,7 @@ class ExtractCameraMayaAscii(pype.api.Extractor): representation = { 'name': 'ma', - 'ext': '.ma', + 'ext': 'ma', 'files': filename, "stagingDir": dir_path, } diff --git a/pype/plugins/maya/publish/extract_fbx.py b/pype/plugins/maya/publish/extract_fbx.py index 93a99eea72..73d56f9a2c 100644 --- a/pype/plugins/maya/publish/extract_fbx.py +++ b/pype/plugins/maya/publish/extract_fbx.py @@ -213,7 +213,7 @@ class ExtractFBX(pype.api.Extractor): representation = { 'name': 'mov', - 'ext': '.mov', + 'ext': 'mov', 'files': filename, "stagingDir": stagingDir, } diff --git a/pype/plugins/maya/publish/extract_maya_ascii_raw.py b/pype/plugins/maya/publish/extract_maya_ascii_raw.py index c8f10d5b9b..895b6acbfe 100644 --- a/pype/plugins/maya/publish/extract_maya_ascii_raw.py +++ b/pype/plugins/maya/publish/extract_maya_ascii_raw.py @@ -56,7 +56,7 @@ class ExtractMayaAsciiRaw(pype.api.Extractor): representation = { 'name': 'ma', - 'ext': '.ma', + 'ext': 'ma', 'files': filename, "stagingDir": dir_path } diff --git a/pype/plugins/maya/publish/extract_model.py b/pype/plugins/maya/publish/extract_model.py index f6d9681222..d6e5e66c23 100644 --- a/pype/plugins/maya/publish/extract_model.py +++ b/pype/plugins/maya/publish/extract_model.py @@ -74,7 +74,7 @@ class ExtractModel(pype.api.Extractor): representation = { 'name': 'ma', - 'ext': '.ma', + 'ext': 'ma', 'files': filename, "stagingDir": stagingdir, } diff --git a/pype/plugins/maya/publish/extract_pointcache.py b/pype/plugins/maya/publish/extract_pointcache.py index 907dfe0e18..0879a4bfe3 100644 --- a/pype/plugins/maya/publish/extract_pointcache.py +++ b/pype/plugins/maya/publish/extract_pointcache.py @@ -84,7 +84,7 @@ class ExtractAlembic(pype.api.Extractor): representation = { 'name': 'abc', - 'ext': '.abc', + 'ext': 'abc', 'files': filename, "stagingDir": dirname } diff --git a/pype/plugins/maya/publish/extract_rendersetup.py b/pype/plugins/maya/publish/extract_rendersetup.py index b8dbfc178e..c8d8db0bbb 100644 --- a/pype/plugins/maya/publish/extract_rendersetup.py +++ b/pype/plugins/maya/publish/extract_rendersetup.py @@ -30,7 +30,7 @@ class ExtractRenderSetup(pype.api.Extractor): representation = { 'name': 'json', - 'ext': '.json', + 'ext': 'json', 'files': json_filename, "stagingDir": parent_dir, } diff --git a/pype/plugins/maya/publish/extract_rig.py b/pype/plugins/maya/publish/extract_rig.py index 713d5e2b59..c98e562313 100644 --- a/pype/plugins/maya/publish/extract_rig.py +++ b/pype/plugins/maya/publish/extract_rig.py @@ -39,7 +39,7 @@ class ExtractRig(pype.api.Extractor): representation = { 'name': 'ma', - 'ext': '.ma', + 'ext': 'ma', 'files': filename, "stagingDir": dir_path } diff --git a/pype/plugins/maya/publish/extract_thumbnail.py b/pype/plugins/maya/publish/extract_thumbnail.py index 4bc1d91174..e47915c4cf 100644 --- a/pype/plugins/maya/publish/extract_thumbnail.py +++ b/pype/plugins/maya/publish/extract_thumbnail.py @@ -137,7 +137,7 @@ class ExtractThumbnail(pype.api.Extractor): representation = { 'name': 'thumbnail', - 'ext': '.jpg', + 'ext': 'jpg', 'files': thumbnail, "stagingDir": stagingDir, "thumbnail": True diff --git a/pype/plugins/maya/publish/extract_vrayproxy.py b/pype/plugins/maya/publish/extract_vrayproxy.py index b2c84db22b..dcaa910730 100644 --- a/pype/plugins/maya/publish/extract_vrayproxy.py +++ b/pype/plugins/maya/publish/extract_vrayproxy.py @@ -59,7 +59,7 @@ class ExtractVRayProxy(pype.api.Extractor): representation = { 'name': 'vrmesh', - 'ext': '.vrmesh', + 'ext': 'vrmesh', 'files': file_name, "stagingDir": staging_dir, } From 149b295156f0ba9e6a395e0c504ac62d9b1c33c3 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Wed, 17 Jul 2019 15:38:22 +0200 Subject: [PATCH 54/69] removing another dot --- pype/plugins/maya/publish/collect_scene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/maya/publish/collect_scene.py b/pype/plugins/maya/publish/collect_scene.py index 88c9ed7a47..c1e3c75021 100644 --- a/pype/plugins/maya/publish/collect_scene.py +++ b/pype/plugins/maya/publish/collect_scene.py @@ -42,7 +42,7 @@ class CollectMayaScene(pyblish.api.ContextPlugin): data['representations'] = [{ 'name': 'ma', - 'ext': '.ma', + 'ext': 'ma', 'files': file, "stagingDir": folder, }] From 73fe101afecaba8a8ed61073ec14349021bb9d18 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Wed, 17 Jul 2019 15:41:25 +0200 Subject: [PATCH 55/69] fix: removed handling of tracebacks --- pype/ftrack/events/event_user_assigment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/ftrack/events/event_user_assigment.py b/pype/ftrack/events/event_user_assigment.py index 80a985c2a0..0bb7f21590 100644 --- a/pype/ftrack/events/event_user_assigment.py +++ b/pype/ftrack/events/event_user_assigment.py @@ -40,7 +40,7 @@ class UserAssigmentEvent(BaseEvent): def error(self, *err): for e in err: - self.log.error(e, exc_info=True) + self.log.error(e) def _run_script(self, script, args): """ From c4c3252b33adf477c5b1bf02fa8728105bfdb332 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 18 Jul 2019 09:59:57 +0200 Subject: [PATCH 56/69] adding maya support --- .../_publish_unused/extract_quicktime.py | 86 +++++++++++++++++++ .../global/publish/extract_quicktime.py | 86 ------------------- .../plugins/maya/publish/extract_quicktime.py | 49 +---------- 3 files changed, 89 insertions(+), 132 deletions(-) create mode 100644 pype/plugins/global/_publish_unused/extract_quicktime.py delete mode 100644 pype/plugins/global/publish/extract_quicktime.py diff --git a/pype/plugins/global/_publish_unused/extract_quicktime.py b/pype/plugins/global/_publish_unused/extract_quicktime.py new file mode 100644 index 0000000000..6a33d825d0 --- /dev/null +++ b/pype/plugins/global/_publish_unused/extract_quicktime.py @@ -0,0 +1,86 @@ +import os +import pyblish.api +import subprocess +from pype.vendor import clique + + +class ExtractQuicktimeEXR(pyblish.api.InstancePlugin): + """Resolve any dependency issies + + This plug-in resolves any paths which, if not updated might break + the published file. + + The order of families is important, when working with lookdev you want to + first publish the texture, update the texture paths in the nodes and then + publish the shading network. Same goes for file dependent assets. + """ + + label = "Extract Quicktime" + order = pyblish.api.ExtractorOrder + families = ["imagesequence", "render", "write", "source"] + hosts = ["shell"] + + def process(self, instance): + # fps = instance.data.get("fps") + # start = instance.data.get("startFrame") + # stagingdir = os.path.normpath(instance.data.get("stagingDir")) + # + # collected_frames = os.listdir(stagingdir) + # collections, remainder = clique.assemble(collected_frames) + # + # full_input_path = os.path.join( + # stagingdir, collections[0].format('{head}{padding}{tail}') + # ) + # self.log.info("input {}".format(full_input_path)) + # + # filename = collections[0].format('{head}') + # if not filename.endswith('.'): + # filename += "." + # movFile = filename + "mov" + # full_output_path = os.path.join(stagingdir, movFile) + # + # self.log.info("output {}".format(full_output_path)) + # + # config_data = instance.context.data['output_repre_config'] + # + # proj_name = os.environ.get('AVALON_PROJECT', '__default__') + # profile = config_data.get(proj_name, config_data['__default__']) + # + # input_args = [] + # # overrides output file + # input_args.append("-y") + # # preset's input data + # input_args.extend(profile.get('input', [])) + # # necessary input data + # input_args.append("-start_number {}".format(start)) + # input_args.append("-i {}".format(full_input_path)) + # input_args.append("-framerate {}".format(fps)) + # + # output_args = [] + # # preset's output data + # output_args.extend(profile.get('output', [])) + # # output filename + # output_args.append(full_output_path) + # mov_args = [ + # "ffmpeg", + # " ".join(input_args), + # " ".join(output_args) + # ] + # subprocess_mov = " ".join(mov_args) + # sub_proc = subprocess.Popen(subprocess_mov) + # sub_proc.wait() + # + # if not os.path.isfile(full_output_path): + # raise("Quicktime wasn't created succesfully") + # + # if "representations" not in instance.data: + # instance.data["representations"] = [] + # + # representation = { + # 'name': 'mov', + # 'ext': 'mov', + # 'files': movFile, + # "stagingDir": stagingdir, + # "preview": True + # } + # instance.data["representations"].append(representation) diff --git a/pype/plugins/global/publish/extract_quicktime.py b/pype/plugins/global/publish/extract_quicktime.py deleted file mode 100644 index b6ccf38385..0000000000 --- a/pype/plugins/global/publish/extract_quicktime.py +++ /dev/null @@ -1,86 +0,0 @@ -import os -import pyblish.api -import subprocess -from pype.vendor import clique - - -class ExtractQuicktimeEXR(pyblish.api.InstancePlugin): - """Resolve any dependency issies - - This plug-in resolves any paths which, if not updated might break - the published file. - - The order of families is important, when working with lookdev you want to - first publish the texture, update the texture paths in the nodes and then - publish the shading network. Same goes for file dependent assets. - """ - - label = "Extract Quicktime" - order = pyblish.api.ExtractorOrder - families = ["imagesequence", "render", "write", "source"] - hosts = ["shell"] - - def process(self, instance): - fps = instance.data.get("fps") - start = instance.data.get("startFrame") - stagingdir = os.path.normpath(instance.data.get("stagingDir")) - - collected_frames = os.listdir(stagingdir) - collections, remainder = clique.assemble(collected_frames) - - full_input_path = os.path.join( - stagingdir, collections[0].format('{head}{padding}{tail}') - ) - self.log.info("input {}".format(full_input_path)) - - filename = collections[0].format('{head}') - if not filename.endswith('.'): - filename += "." - movFile = filename + "mov" - full_output_path = os.path.join(stagingdir, movFile) - - self.log.info("output {}".format(full_output_path)) - - config_data = instance.context.data['output_repre_config'] - - proj_name = os.environ.get('AVALON_PROJECT', '__default__') - profile = config_data.get(proj_name, config_data['__default__']) - - input_args = [] - # overrides output file - input_args.append("-y") - # preset's input data - input_args.extend(profile.get('input', [])) - # necessary input data - input_args.append("-start_number {}".format(start)) - input_args.append("-i {}".format(full_input_path)) - input_args.append("-framerate {}".format(fps)) - - output_args = [] - # preset's output data - output_args.extend(profile.get('output', [])) - # output filename - output_args.append(full_output_path) - mov_args = [ - "ffmpeg", - " ".join(input_args), - " ".join(output_args) - ] - subprocess_mov = " ".join(mov_args) - sub_proc = subprocess.Popen(subprocess_mov) - sub_proc.wait() - - if not os.path.isfile(full_output_path): - raise("Quicktime wasn't created succesfully") - - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': 'mov', - 'ext': 'mov', - 'files': movFile, - "stagingDir": stagingdir, - "preview": True - } - instance.data["representations"].append(representation) diff --git a/pype/plugins/maya/publish/extract_quicktime.py b/pype/plugins/maya/publish/extract_quicktime.py index ff08799c0a..87608af641 100644 --- a/pype/plugins/maya/publish/extract_quicktime.py +++ b/pype/plugins/maya/publish/extract_quicktime.py @@ -99,7 +99,6 @@ class ExtractQuicktime(pype.api.Extractor): playblast = capture_gui.lib.capture_scene(preset) self.log.info("file list {}".format(playblast)) - # self.log.info("Calculating HUD data overlay") collected_frames = os.listdir(stagingdir) collections, remainder = clique.assemble(collected_frames) @@ -107,61 +106,19 @@ class ExtractQuicktime(pype.api.Extractor): stagingdir, collections[0].format('{head}{padding}{tail}')) self.log.info("input {}".format(input_path)) - movieFile = filename + ".mov" - movieFileBurnin = filename + "Burn" + ".mov" - - full_movie_path = os.path.join(stagingdir, movieFile) - full_burnin_path = os.path.join(stagingdir, movieFileBurnin) - self.log.info("output {}".format(full_movie_path)) - with avalon.maya.suspended_refresh(): - try: - ( - ffmpeg - .input(input_path, framerate=fps, start_number=int(start)) - .output(full_movie_path) - .run(overwrite_output=True, - capture_stdout=True, - capture_stderr=True) - ) - except ffmpeg.Error as e: - ffmpeg_error = 'ffmpeg error: {}'.format(e.stderr) - self.log.error(ffmpeg_error) - raise RuntimeError(ffmpeg_error) - - version = instance.context.data['version'] - - burnin_data = { - "input": full_movie_path.replace("\\", "/"), - "output": full_burnin_path.replace("\\", "/"), - "burnin_data": { - "username": instance.context.data['user'], - "asset": os.environ['AVALON_ASSET'], - "task": os.environ['AVALON_TASK'], - "start_frame": int(start), - "version": "v" + str(version) - } - } - - json_data = json.dumps(burnin_data) - scriptpath = os.path.join(os.environ['PYPE_MODULE_ROOT'], "pype", "scripts", "otio_burnin.py") - - p = subprocess.Popen( - ['python', scriptpath, json_data] - ) - p.wait() - if "representations" not in instance.data: instance.data["representations"] = [] representation = { 'name': 'mov', 'ext': 'mov', - 'files': movieFileBurnin, + 'files': collected_frames, "stagingDir": stagingdir, 'startFrame': start, 'endFrame': end, 'frameRate': fps, - 'preview': True + 'preview': True, + 'tags': ['review'] } instance.data["representations"].append(representation) From f77ba219d92de4bbaa8e9f062ddb49a4037b09c3 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 18 Jul 2019 10:00:20 +0200 Subject: [PATCH 57/69] addin deadline support for the new review plugins --- .../global/publish/collect_filesequences.py | 15 +++++++++++---- pype/plugins/global/publish/submit_publish_job.py | 2 +- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index ad128c099b..5c3914aa41 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -6,6 +6,7 @@ from pprint import pformat import pyblish.api from avalon import api +import pype.api as pype def collect(root, @@ -64,7 +65,7 @@ def collect(root, return collections -class CollectFileSequences(pyblish.api.ContextPlugin): +class CollectRenderedFrames(pyblish.api.ContextPlugin): """Gather file sequences from working directory When "FILESEQUENCE" environment variable is set these paths (folders or @@ -87,7 +88,7 @@ class CollectFileSequences(pyblish.api.ContextPlugin): order = pyblish.api.CollectorOrder targets = ["filesequence"] - label = "File Sequences" + label = "RenderedFrames" def process(self, context): if os.environ.get("PYPE_PUBLISH_PATHS"): @@ -128,6 +129,9 @@ class CollectFileSequences(pyblish.api.ContextPlugin): self.log.info("setting session using metadata") api.Session.update(session) os.environ.update(session) + + version = data.get("version") + context.data['version'] = version else: # Search in directory data = dict() @@ -161,6 +165,7 @@ class CollectFileSequences(pyblish.api.ContextPlugin): assert isinstance(families, (list, tuple)), "Must be iterable" assert families, "Must have at least a single family" families.append("ftrack") + families.append("review") for collection in collections: instance = context.create_instance(str(collection)) self.log.info("Collection: %s" % list(collection)) @@ -191,7 +196,8 @@ class CollectFileSequences(pyblish.api.ContextPlugin): "startFrame": start, "endFrame": end, "fps": fps, - "source": data.get('source', '') + "source": data.get('source', ''), + "version": version }) instance.append(collection) instance.context.data['fps'] = fps @@ -205,7 +211,8 @@ class CollectFileSequences(pyblish.api.ContextPlugin): 'files': list(collection), "stagingDir": root, "anatomy_template": "render", - "frameRate": fps + "frameRate": fps, + "tags": ['review'] } instance.data["representations"].append(representation) diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py index 992553cc7e..057fd2362c 100644 --- a/pype/plugins/global/publish/submit_publish_job.py +++ b/pype/plugins/global/publish/submit_publish_job.py @@ -276,7 +276,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "families": ["render"], "source": source, "user": context.data["user"], - + "version": context.data["version"], # Optional metadata (for debugging) "metadata": { "instance": data, From 8b4dc6332e6e79fecaecb43e542a03ac3afdd395 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 18 Jul 2019 10:00:47 +0200 Subject: [PATCH 58/69] making sure we're sending clean paths to otio --- pype/plugins/global/publish/extract_burnin.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index b2956e330e..2e9e61ad82 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -44,8 +44,8 @@ class ExtractBurnin(pype.api.Extractor): name = "_burnin" movieFileBurnin = filename.replace(".mov", "") + name + ".mov" - full_movie_path = os.path.join(stagingdir, repre["files"]) - full_burnin_path = os.path.join(stagingdir, movieFileBurnin) + full_movie_path = os.path.join(os.path.normpath(stagingdir), repre["files"]) + full_burnin_path = os.path.join(os.path.normpath(stagingdir), movieFileBurnin) self.log.debug("__ full_burnin_path: {}".format(full_burnin_path)) burnin_data = { @@ -57,12 +57,13 @@ class ExtractBurnin(pype.api.Extractor): self.log.debug("__ burnin_data2: {}".format(burnin_data)) json_data = json.dumps(burnin_data) - scriptpath = os.path.join(os.environ['PYPE_MODULE_ROOT'], + scriptpath = os.path.normpath(os.path.join(os.environ['PYPE_MODULE_ROOT'], "pype", "scripts", - "otio_burnin.py") + "otio_burnin.py")) self.log.debug("__ scriptpath: {}".format(scriptpath)) + self.log.debug("__ EXE: {}".format(os.getenv("PYPE_PYTHON_EXE"))) try: p = subprocess.Popen( From 35ebde3a419863cec6c29de505880db4975f4a7d Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 18 Jul 2019 10:01:51 +0200 Subject: [PATCH 59/69] add option to filter by family in the review presets --- pype/plugins/global/publish/extract_review.py | 159 +++++++++--------- 1 file changed, 82 insertions(+), 77 deletions(-) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index af6d59d798..62a2eb0bd4 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -36,7 +36,7 @@ class ExtractReview(pyblish.api.InstancePlugin): representations = instance.data["representations"] # filter out mov and img sequences - representations_new = list() + representations_new = representations.copy() for repre in representations: if repre['ext'] in plugin_attrs["ext_filter"]: tags = repre.get("tags", []) @@ -44,106 +44,111 @@ class ExtractReview(pyblish.api.InstancePlugin): self.log.info("Try repre: {}".format(repre)) if "review" in tags: - staging_dir = repre["stagingDir"] - for name, profile in output_profiles.items(): - if "mov" not in repre['ext']: - # get output presets and loop them - collections, remainder = clique.assemble( - repre["files"]) + if any(item in instance.data['families'] for item in profile['families']): + if isinstance(repre["files"], list): + # if "mov" not in repre['ext']: + # get output presets and loop them + collections, remainder = clique.assemble( + repre["files"]) - full_input_path = os.path.join( - staging_dir, collections[0].format( - '{head}{padding}{tail}') - ) + full_input_path = os.path.join( + staging_dir, collections[0].format( + '{head}{padding}{tail}') + ) - filename = collections[0].format('{head}') - if filename.endswith('.'): - filename = filename[:-1] - else: - full_input_path = os.path.join( - staging_dir, repre["files"]) - filename = repre["files"].split(".")[0] + filename = collections[0].format('{head}') + if filename.endswith('.'): + filename = filename[:-1] + else: + self.log.info("1: {}".format(full_input_path)) + full_input_path = os.path.join( + staging_dir, repre["files"]) + filename = repre["files"].split(".")[0] - mov_file = filename + "_{0}.{1}".format(name, "mov") + mov_file = filename + "_{0}.{1}".format(name, "mov") - full_output_path = os.path.join(staging_dir, mov_file) + full_output_path = os.path.join(staging_dir, mov_file) - self.log.info("input {}".format(full_input_path)) - self.log.info("output {}".format(full_output_path)) + self.log.info("input {}".format(full_input_path)) + self.log.info("output {}".format(full_output_path)) - repre_new = repre.copy() + repre_new = repre.copy() - self.log.debug("Profile name: {}".format(name)) + self.log.debug("Profile name: {}".format(name)) - new_tags = tags[:] - p_tags = profile.get('tags', []) - self.log.info("p_tags: `{}`".format(p_tags)) - # add families - [instance.data["families"].append(t) for t in p_tags - if t not in instance.data["families"]] - # add to - [new_tags.append(t) for t in p_tags - if t not in new_tags] + new_tags = tags[:] + p_tags = profile.get('tags', []) + self.log.info("p_tags: `{}`".format(p_tags)) + # add families + [instance.data["families"].append(t) for t in p_tags + if t not in instance.data["families"]] + # add to + [new_tags.append(t) for t in p_tags + if t not in new_tags] - self.log.info("new_tags: `{}`".format(new_tags)) + self.log.info("new_tags: `{}`".format(new_tags)) - input_args = [] + input_args = [] - # overrides output file - input_args.append("-y") + # overrides output file + input_args.append("-y") - # preset's input data - input_args.extend(profile.get('input', [])) + # preset's input data + input_args.extend(profile.get('input', [])) - # necessary input data - # adds start arg only if image sequence - if "mov" not in repre_new['ext']: - input_args.append("-start_number {}".format( - start_frame)) + # necessary input data + # adds start arg only if image sequence + if "mov" not in repre_new['ext']: + input_args.append("-start_number {}".format( + start_frame)) - input_args.append("-i {}".format(full_input_path)) - input_args.append("-framerate {}".format(fps)) + input_args.append("-i {}".format(full_input_path)) + input_args.append("-framerate {}".format(fps)) - output_args = [] - # preset's output data - output_args.extend(profile.get('output', [])) + output_args = [] + # preset's output data + output_args.extend(profile.get('output', [])) - # output filename - output_args.append(full_output_path) - mov_args = [ - "ffmpeg", - " ".join(input_args), - " ".join(output_args) - ] - subprocess_mov = " ".join(mov_args) + # output filename + output_args.append(full_output_path) + mov_args = [ + "ffmpeg", + " ".join(input_args), + " ".join(output_args) + ] + subprocess_mov = " ".join(mov_args) - # run subprocess - sub_proc = subprocess.Popen(subprocess_mov) - sub_proc.wait() + # run subprocess + sub_proc = subprocess.Popen(subprocess_mov) + sub_proc.wait() - if not os.path.isfile(full_output_path): - self.log.error( - "Quicktime wasn't created succesfully") + if not os.path.isfile(full_output_path): + self.log.error( + "Quicktime wasn't created succesfully") - # create representation data - repre_new.update({ - 'name': name, - 'ext': 'mov', - 'files': mov_file, - "tags": new_tags, - "outputName": name - }) - repre_new.pop("preview") - repre_new.pop("thumbnail") + # create representation data + repre_new.update({ + 'name': name, + 'ext': 'mov', + 'files': mov_file, + "tags": new_tags, + "outputName": name + }) - # adding representation - representations_new.append(repre_new) + if repre_new.get('preview'): + repre_new.pop("preview") + if repre_new.get('thumbnail'): + repre_new.pop("thumbnail") + + # adding representation + representations_new.append(repre_new) else: - representations_new.append(repre) + continue else: - representations_new.append(repre) + continue + self.log.debug( "new representations: {}".format(representations_new)) From 5c819475c6edc6c18d32d032b806b1f760b9e03f Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 18 Jul 2019 14:29:27 +0200 Subject: [PATCH 60/69] fix allwo burnin without explicit verson --- pype/plugins/global/publish/collect_filesequences.py | 5 +---- pype/plugins/global/publish/extract_burnin.py | 7 ++++++- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/pype/plugins/global/publish/collect_filesequences.py b/pype/plugins/global/publish/collect_filesequences.py index 5c3914aa41..ed48404a98 100644 --- a/pype/plugins/global/publish/collect_filesequences.py +++ b/pype/plugins/global/publish/collect_filesequences.py @@ -130,8 +130,6 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): api.Session.update(session) os.environ.update(session) - version = data.get("version") - context.data['version'] = version else: # Search in directory data = dict() @@ -196,8 +194,7 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin): "startFrame": start, "endFrame": end, "fps": fps, - "source": data.get('source', ''), - "version": version + "source": data.get('source', '') }) instance.append(collection) instance.context.data['fps'] = fps diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 2e9e61ad82..721b7e75f8 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -24,13 +24,18 @@ class ExtractBurnin(pype.api.Extractor): raise RuntimeError("Burnin needs already created mov to work on.") # TODO: expand burnin data list to include all usefull keys + version = '' + if instance.context.data.get('version'): + version = "v" + str(instance.context.data['version']) + burnin_data = { "username": instance.context.data['user'], "asset": os.environ['AVALON_ASSET'], "task": os.environ['AVALON_TASK'], "start_frame": int(instance.data['startFrame']), - "version": "v" + str(instance.context.data['version']) + "version": version } + self.log.debug("__ burnin_data1: {}".format(burnin_data)) for i, repre in enumerate(instance.data["representations"]): self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre)) From 39d03919341eb5b129c751c51ec5192bd4a9ea30 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 18 Jul 2019 16:19:21 +0200 Subject: [PATCH 61/69] fix(nuke): review workflow and other fixes --- pype/nuke/lib.py | 23 +++++--- .../publish/integrate_ftrack_instances.py | 1 + pype/plugins/global/publish/extract_burnin.py | 16 +++--- pype/plugins/global/publish/extract_review.py | 55 ++++++++++++------- pype/plugins/global/publish/integrate_new.py | 48 ++++++++-------- .../plugins/nuke/publish/collect_instances.py | 2 +- pype/plugins/nuke/publish/collect_writes.py | 9 +-- .../nuke/publish/extract_render_local.py | 15 ++--- .../nuke/publish/extract_review_data.py | 25 +++++---- .../nuke/publish/validate_rendered_frames.py | 9 ++- pype/plugins/nuke/publish/validate_script.py | 2 +- 11 files changed, 115 insertions(+), 90 deletions(-) diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py index a9aac47228..6a57704fff 100644 --- a/pype/nuke/lib.py +++ b/pype/nuke/lib.py @@ -343,8 +343,6 @@ def reset_frame_range_handles(): """Set frame range to current asset""" root = nuke.root() - fps = float(api.Session.get("AVALON_FPS", 25)) - root["fps"].setValue(fps) name = api.Session["AVALON_ASSET"] asset = io.find_one({"name": name, "type": "asset"}) @@ -356,7 +354,7 @@ def reset_frame_range_handles(): data = asset["data"] missing_cols = [] - check_cols = ["fstart", "fend", "handle_start", "handle_end"] + check_cols = ["fps", "fstart", "fend", "handle_start", "handle_end"] for col in check_cols: if col not in data: @@ -373,20 +371,29 @@ def reset_frame_range_handles(): handles = avalon.nuke.get_handles(asset) handle_start, handle_end = pype.get_handle_irregular(asset) - log.info("__ handles: `{}`".format(handles)) - log.info("__ handle_start: `{}`".format(handle_start)) - log.info("__ handle_end: `{}`".format(handle_end)) - + fps = asset["data"]["fps"] edit_in = int(asset["data"]["fstart"]) - handle_start edit_out = int(asset["data"]["fend"]) + handle_end + root["fps"].setValue(fps) root["first_frame"].setValue(edit_in) root["last_frame"].setValue(edit_out) + log.info("__ handles: `{}`".format(handles)) + log.info("__ handle_start: `{}`".format(handle_start)) + log.info("__ handle_end: `{}`".format(handle_end)) + log.info("__ edit_in: `{}`".format(edit_in)) + log.info("__ edit_out: `{}`".format(edit_out)) + log.info("__ fps: `{}`".format(fps)) + # setting active viewers nuke.frame(int(asset["data"]["fstart"])) - vv = nuke.activeViewer().node() + try: + vv = nuke.activeViewer().node() + except AttributeError: + log.error("No active viewer. Select any node and hit num `1`") + return range = '{0}-{1}'.format( int(asset["data"]["fstart"]), diff --git a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py index ef2ea6f6ca..02455454bb 100644 --- a/pype/plugins/ftrack/publish/integrate_ftrack_instances.py +++ b/pype/plugins/ftrack/publish/integrate_ftrack_instances.py @@ -56,6 +56,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin): component_data = { "name": "thumbnail" # Default component name is "main". } + comp['thumbnail'] = True elif comp.get('preview') or ("preview" in comp.get('tags', [])): ''' Ftrack bug requirement: diff --git a/pype/plugins/global/publish/extract_burnin.py b/pype/plugins/global/publish/extract_burnin.py index 0559325ff2..5f16cc91f2 100644 --- a/pype/plugins/global/publish/extract_burnin.py +++ b/pype/plugins/global/publish/extract_burnin.py @@ -28,15 +28,14 @@ class ExtractBurnin(pype.api.Extractor): if instance.context.data.get('version'): version = "v" + str(instance.context.data['version']) - burnin_data = { + prep_data = { "username": instance.context.data['user'], "asset": os.environ['AVALON_ASSET'], "task": os.environ['AVALON_TASK'], "start_frame": int(instance.data['startFrame']), "version": version } - - self.log.debug("__ burnin_data1: {}".format(burnin_data)) + self.log.debug("__ prep_data: {}".format(prep_data)) for i, repre in enumerate(instance.data["representations"]): self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre)) @@ -56,7 +55,7 @@ class ExtractBurnin(pype.api.Extractor): burnin_data = { "input": full_movie_path.replace("\\", "/"), "output": full_burnin_path.replace("\\", "/"), - "burnin_data": burnin_data + "burnin_data": prep_data } self.log.debug("__ burnin_data2: {}".format(burnin_data)) @@ -76,14 +75,17 @@ class ExtractBurnin(pype.api.Extractor): ) p.wait() if not os.path.isfile(full_burnin_path): - self.log.error( - "Burnin file wasn't created succesfully") + raise RuntimeError("File not existing: {}".format(full_burnin_path)) except Exception as e: raise RuntimeError("Burnin script didn't work: `{}`".format(e)) if os.path.exists(full_burnin_path): repre_update = { "files": movieFileBurnin, - "name": repre["name"] + name + "name": repre["name"] } instance.data["representations"][i].update(repre_update) + + # removing the source mov file + os.remove(full_movie_path) + self.log.debug("Removed: `{}`".format(full_movie_path)) diff --git a/pype/plugins/global/publish/extract_review.py b/pype/plugins/global/publish/extract_review.py index 62a2eb0bd4..3a764b19c3 100644 --- a/pype/plugins/global/publish/extract_review.py +++ b/pype/plugins/global/publish/extract_review.py @@ -36,7 +36,7 @@ class ExtractReview(pyblish.api.InstancePlugin): representations = instance.data["representations"] # filter out mov and img sequences - representations_new = representations.copy() + representations_new = representations[:] for repre in representations: if repre['ext'] in plugin_attrs["ext_filter"]: tags = repre.get("tags", []) @@ -46,10 +46,19 @@ class ExtractReview(pyblish.api.InstancePlugin): if "review" in tags: staging_dir = repre["stagingDir"] for name, profile in output_profiles.items(): + self.log.debug("Profile name: {}".format(name)) + + ext = profile.get("ext", None) + if not ext: + ext = "mov" + self.log.warning( + "`ext` attribute not in output profile. Setting to default ext: `mov`") + + self.log.debug("instance.families: {}".format(instance.data['families'])) + self.log.debug("profile.families: {}".format(profile['families'])) + if any(item in instance.data['families'] for item in profile['families']): if isinstance(repre["files"], list): - # if "mov" not in repre['ext']: - # get output presets and loop them collections, remainder = clique.assemble( repre["files"]) @@ -62,27 +71,26 @@ class ExtractReview(pyblish.api.InstancePlugin): if filename.endswith('.'): filename = filename[:-1] else: - self.log.info("1: {}".format(full_input_path)) full_input_path = os.path.join( staging_dir, repre["files"]) filename = repre["files"].split(".")[0] - mov_file = filename + "_{0}.{1}".format(name, "mov") + repr_file = filename + "_{0}.{1}".format(name, ext) - full_output_path = os.path.join(staging_dir, mov_file) + full_output_path = os.path.join( + staging_dir, repr_file) self.log.info("input {}".format(full_input_path)) self.log.info("output {}".format(full_output_path)) repre_new = repre.copy() - self.log.debug("Profile name: {}".format(name)) - new_tags = tags[:] p_tags = profile.get('tags', []) self.log.info("p_tags: `{}`".format(p_tags)) # add families - [instance.data["families"].append(t) for t in p_tags + [instance.data["families"].append(t) + for t in p_tags if t not in instance.data["families"]] # add to [new_tags.append(t) for t in p_tags @@ -101,16 +109,22 @@ class ExtractReview(pyblish.api.InstancePlugin): # necessary input data # adds start arg only if image sequence if "mov" not in repre_new['ext']: - input_args.append("-start_number {}".format( - start_frame)) + input_args.append("-start_number {0} -framerate {1}".format( + start_frame, fps)) input_args.append("-i {}".format(full_input_path)) - input_args.append("-framerate {}".format(fps)) output_args = [] # preset's output data output_args.extend(profile.get('output', [])) + # letter_box + # TODO: add to documentation + lb = profile.get('letter_box', None) + if lb: + output_args.append( + "-filter:v drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb)) + # output filename output_args.append(full_output_path) mov_args = [ @@ -118,25 +132,25 @@ class ExtractReview(pyblish.api.InstancePlugin): " ".join(input_args), " ".join(output_args) ] - subprocess_mov = " ".join(mov_args) + subprcs_cmd = " ".join(mov_args) # run subprocess - sub_proc = subprocess.Popen(subprocess_mov) + self.log.debug("{}".format(subprcs_cmd)) + sub_proc = subprocess.Popen(subprcs_cmd) sub_proc.wait() if not os.path.isfile(full_output_path): - self.log.error( + raise FileExistsError( "Quicktime wasn't created succesfully") # create representation data repre_new.update({ 'name': name, - 'ext': 'mov', - 'files': mov_file, + 'ext': ext, + 'files': repr_file, "tags": new_tags, "outputName": name }) - if repre_new.get('preview'): repre_new.pop("preview") if repre_new.get('thumbnail'): @@ -144,12 +158,15 @@ class ExtractReview(pyblish.api.InstancePlugin): # adding representation representations_new.append(repre_new) + # if "delete" in tags: + # if "mov" in full_input_path: + # os.remove(full_input_path) + # self.log.debug("Removed: `{}`".format(full_input_path)) else: continue else: continue - self.log.debug( "new representations: {}".format(representations_new)) instance.data["representations"] = representations_new diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index d9e4f3f533..e758789c37 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -99,18 +99,18 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # \ / # o __/ # - for result in context.data["results"]: - if not result["success"]: - self.log.debug(result) - exc_type, exc_value, exc_traceback = result["error_info"] - extracted_traceback = traceback.extract_tb(exc_traceback)[-1] - self.log.debug( - "Error at line {}: \"{}\"".format( - extracted_traceback[1], result["error"] - ) - ) - assert all(result["success"] for result in context.data["results"]), ( - "Atomicity not held, aborting.") + # for result in context.data["results"]: + # if not result["success"]: + # self.log.debug(result) + # exc_type, exc_value, exc_traceback = result["error_info"] + # extracted_traceback = traceback.extract_tb(exc_traceback)[-1] + # self.log.debug( + # "Error at line {}: \"{}\"".format( + # extracted_traceback[1], result["error"] + # ) + # ) + # assert all(result["success"] for result in context.data["results"]), ( + # "Atomicity not held, aborting.") # Assemble # @@ -225,17 +225,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # hierarchy = os.path.sep.join(hierarchy) hierarchy = os.path.join(*parents) - template_data = {"root": root, - "project": {"name": PROJECT, - "code": project['data']['code']}, - "silo": asset['silo'], - "task": TASK, - "asset": ASSET, - "family": instance.data['family'], - "subset": subset["name"], - "version": int(version["name"]), - "hierarchy": hierarchy} - anatomy = instance.context.data['anatomy'] # Find the representations to transfer amongst the files @@ -257,6 +246,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): # | || # |_______| # + # create template data for Anatomy + template_data = {"root": root, + "project": {"name": PROJECT, + "code": project['data']['code']}, + "silo": asset['silo'], + "task": TASK, + "asset": ASSET, + "family": instance.data['family'], + "subset": subset["name"], + "version": int(version["name"]), + "hierarchy": hierarchy} files = repre['files'] if repre.get('stagingDir'): @@ -286,7 +286,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): os.path.normpath( anatomy_filled[template_name]["path"]) ) - + self.log.debug( "test_dest_files: {}".format(str(test_dest_files))) diff --git a/pype/plugins/nuke/publish/collect_instances.py b/pype/plugins/nuke/publish/collect_instances.py index 35673c5ff3..cca5a861ff 100644 --- a/pype/plugins/nuke/publish/collect_instances.py +++ b/pype/plugins/nuke/publish/collect_instances.py @@ -64,7 +64,7 @@ class CollectNukeInstances(pyblish.api.ContextPlugin): "name": node.name(), "subset": subset, "family": avalon_knob_data["family"], - "families": [family], + "families": [avalon_knob_data["family"], family], "avalonKnob": avalon_knob_data, "publish": node.knob('publish').value(), "step": 1, diff --git a/pype/plugins/nuke/publish/collect_writes.py b/pype/plugins/nuke/publish/collect_writes.py index 216160616b..f98a3a0f7d 100644 --- a/pype/plugins/nuke/publish/collect_writes.py +++ b/pype/plugins/nuke/publish/collect_writes.py @@ -66,19 +66,20 @@ class CollectNukeWrites(pyblish.api.InstancePlugin): instance.data['families'].append('ftrack') if "representations" not in instance.data: instance.data["representations"] = list() - try: - collected_frames = os.listdir(output_dir) representation = { 'name': ext, 'ext': ext, - 'files': collected_frames, "stagingDir": output_dir, "anatomy_template": "render" } - instance.data["representations"].append(representation) + try: + collected_frames = os.listdir(output_dir) + representation['files'] = collected_frames + instance.data["representations"].append(representation) except Exception: + instance.data["representations"].append(representation) self.log.debug("couldn't collect frames: {}".format(label)) if 'render.local' in instance.data['families']: diff --git a/pype/plugins/nuke/publish/extract_render_local.py b/pype/plugins/nuke/publish/extract_render_local.py index 1d6550024f..2b185720a6 100644 --- a/pype/plugins/nuke/publish/extract_render_local.py +++ b/pype/plugins/nuke/publish/extract_render_local.py @@ -28,12 +28,6 @@ class NukeRenderLocal(pype.api.Extractor): last_frame = instance.data.get("endFrame", None) node_subset_name = instance.data.get("name", None) - # swap path to stageDir - temp_dir = self.staging_dir(instance).replace("\\", "/") - output_dir = instance.data.get("outputDir") - path = node['file'].value() - node['file'].setValue(path.replace(output_dir, temp_dir)) - self.log.info("Starting render") self.log.info("Start frame: {}".format(first_frame)) self.log.info("End frame: {}".format(last_frame)) @@ -45,27 +39,26 @@ class NukeRenderLocal(pype.api.Extractor): int(last_frame) ) - # swap path back to publish path path = node['file'].value() - node['file'].setValue(path.replace(temp_dir, output_dir)) + out_dir = os.path.dirname(path) ext = node["file_type"].value() if "representations" not in instance.data: instance.data["representations"] = [] - collected_frames = os.listdir(temp_dir) + collected_frames = os.listdir(out_dir) repre = { 'name': ext, 'ext': ext, 'files': collected_frames, - "stagingDir": temp_dir, + "stagingDir": out_dir, "anatomy_template": "render" } instance.data["representations"].append(repre) self.log.info("Extracted instance '{0}' to: {1}".format( instance.name, - temp_dir + out_dir )) instance.data['family'] = 'render' diff --git a/pype/plugins/nuke/publish/extract_review_data.py b/pype/plugins/nuke/publish/extract_review_data.py index 552aa0cdb0..69df0ab31f 100644 --- a/pype/plugins/nuke/publish/extract_review_data.py +++ b/pype/plugins/nuke/publish/extract_review_data.py @@ -82,10 +82,15 @@ class ExtractReviewData(pype.api.Extractor): temporary_nodes.append(node) reformat_node = nuke.createNode("Reformat") - reformat_node["format"].setValue("HD_1080") - reformat_node["resize"].setValue("fit") - reformat_node["filter"].setValue("Lanczos6") - reformat_node["black_outside"].setValue(True) + + ref_node = self.nodes.get("Reformat", None) + if ref_node: + for k, v in ref_node: + self.log.debug("k,v: {0}:{1}".format(k,v)) + if isinstance(v, unicode): + v = str(v) + reformat_node[k].setValue(v) + reformat_node.setInput(0, previous_node) previous_node = reformat_node temporary_nodes.append(reformat_node) @@ -112,6 +117,7 @@ class ExtractReviewData(pype.api.Extractor): if representation in "mov": file = fhead + "baked.mov" + name = "baked" path = os.path.join(stagingDir, file).replace("\\", "/") self.log.debug("Path: {}".format(path)) instance.data["baked_colorspace_movie"] = path @@ -120,12 +126,11 @@ class ExtractReviewData(pype.api.Extractor): write_node["raw"].setValue(1) write_node.setInput(0, previous_node) temporary_nodes.append(write_node) - thumbnail = False - preview = True - tags = ["review"] + tags = ["review", "delete"] elif representation in "jpeg": file = fhead + "jpeg" + name = "thumbnail" path = os.path.join(stagingDir, file).replace("\\", "/") instance.data["thumbnail"] = path write_node["file"].setValue(path) @@ -133,8 +138,6 @@ class ExtractReviewData(pype.api.Extractor): write_node["raw"].setValue(1) write_node.setInput(0, previous_node) temporary_nodes.append(write_node) - thumbnail = True - preview = False tags = ["thumbnail"] # retime for @@ -142,15 +145,13 @@ class ExtractReviewData(pype.api.Extractor): last_frame = int(last_frame) / 2 repre = { - 'name': representation, + 'name': name, 'ext': representation, 'files': file, "stagingDir": stagingDir, "startFrame": first_frame, "endFrame": last_frame, "anatomy_template": "render", - "thumbnail": thumbnail, - "preview": preview, "tags": tags } instance.data["representations"].append(repre) diff --git a/pype/plugins/nuke/publish/validate_rendered_frames.py b/pype/plugins/nuke/publish/validate_rendered_frames.py index 841001ef43..93eb84f304 100644 --- a/pype/plugins/nuke/publish/validate_rendered_frames.py +++ b/pype/plugins/nuke/publish/validate_rendered_frames.py @@ -11,9 +11,12 @@ class RepairCollectionAction(pyblish.api.Action): icon = "wrench" def process(self, context, plugin): - + self.log.info(context[0]) files_remove = [os.path.join(context[0].data["outputDir"], f) - for f in context[0].data["files"]] + for r in context[0].data.get("representations", []) + for f in r.get("files", []) + ] + self.log.info(files_remove) for f in files_remove: os.remove(f) self.log.debug("removing file: {}".format(f)) @@ -38,7 +41,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin): if not repre.get('files'): msg = ("no frames were collected, " "you need to render them") - self.log.error(msg) + self.log.warning(msg) raise ValidationException(msg) collections, remainder = clique.assemble(repre["files"]) diff --git a/pype/plugins/nuke/publish/validate_script.py b/pype/plugins/nuke/publish/validate_script.py index 4ad76b898b..efb0537246 100644 --- a/pype/plugins/nuke/publish/validate_script.py +++ b/pype/plugins/nuke/publish/validate_script.py @@ -24,7 +24,7 @@ class ValidateScript(pyblish.api.InstancePlugin): # These attributes will be checked attributes = [ "fps", "fstart", "fend", - "resolution_width", "resolution_height", "pixel_aspect", "handle_start", "handle_end" + "resolution_width", "resolution_height", "handle_start", "handle_end" ] # Value of these attributes can be found on parents From 0e0dea31237bc611e6319dee8855aabe4b036e0f Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Thu, 18 Jul 2019 18:02:52 +0200 Subject: [PATCH 62/69] (hotfix) project modules were not loading into shelfs --- setup/maya/userSetup.py | 29 ++++++++++++++++++----------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/setup/maya/userSetup.py b/setup/maya/userSetup.py index 7b06fe7f33..b419e9d27e 100644 --- a/setup/maya/userSetup.py +++ b/setup/maya/userSetup.py @@ -1,18 +1,25 @@ import os -import sys from pypeapp import config -from pype.maya import lib +import pype.maya.lib as mlib from maya import cmds -def build_shelf(): - presets = config.get_presets() - shelf_preset = presets['maya'].get('project_shelf') - if shelf_preset: - project = os.environ["AVALON_PROJECT"] - for k, v in shelf_preset['imports'].items(): - sys.modules[k] = __import__(v, fromlist=[project]) +print("starting PYPE usersetup") - lib.shelf(name=shelf_preset['name'], preset=shelf_preset) +# build a shelf +presets = config.get_presets() +shelf_preset = presets['maya'].get('project_shelf') -cmds.evalDeferred("build_shelf()") + +if shelf_preset: + project = os.environ["AVALON_PROJECT"] + + for i in shelf_preset['imports']: + import_string = "from {} import {}".format(project, i) + print(import_string) + exec(import_string) + +cmds.evalDeferred("mlib.shelf(name=shelf_preset['name'], preset=shelf_preset)") + + +print("finished PYPE usersetup") From c183f044545331fc0f343e539c3d5b0174bb5509 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 19 Jul 2019 16:31:06 +0200 Subject: [PATCH 63/69] PYPE-253_fix_aov_publishing_from_maya --- pype/plugins/maya/publish/validate_rendersettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pype/plugins/maya/publish/validate_rendersettings.py b/pype/plugins/maya/publish/validate_rendersettings.py index 0450cb83b5..a41fe7b5f2 100644 --- a/pype/plugins/maya/publish/validate_rendersettings.py +++ b/pype/plugins/maya/publish/validate_rendersettings.py @@ -35,7 +35,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin): DEFAULT_PADDING = 4 RENDERER_PREFIX = {"vray": "//"} - DEFAULT_PREFIX = "//" + DEFAULT_PREFIX = "//_" def process(self, instance): From 6f5913e027d09b329f62f88293b6b1765aea3b17 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Fri, 19 Jul 2019 17:41:27 +0200 Subject: [PATCH 64/69] fix(global): integrate new didnt create correct padding for image sequences --- pype/plugins/global/publish/integrate_new.py | 23 ++++++++++++-------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py index e758789c37..2d04c3ec1a 100644 --- a/pype/plugins/global/publish/integrate_new.py +++ b/pype/plugins/global/publish/integrate_new.py @@ -271,15 +271,20 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug( "src_tail_collections: {}".format(str(src_collections))) src_collection = src_collections[0] + # Assert that each member has identical suffix src_head = src_collection.format("{head}") src_tail = src_collection.format("{tail}") + + # fix dst_padding + padd_len = len(files[0].replace(src_head, "").replace(src_tail, "")) + src_padding_exp = "%0{}d".format(padd_len) + test_dest_files = list() for i in [1, 2]: template_data["representation"] = repre['ext'] - template_data["frame"] = src_collection.format( - "{padding}") % i + template_data["frame"] = src_padding_exp % i anatomy_filled = anatomy.format(template_data) test_dest_files.append( @@ -295,24 +300,23 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): dst_head = dst_collection.format("{head}") dst_tail = dst_collection.format("{tail}") - repre['published_path'] = dst_collection.format() - index_frame_start = None if repre.get('startFrame'): frame_start_padding = len(str( repre.get('endFrame'))) index_frame_start = repre.get('startFrame') + dst_padding_exp = src_padding_exp for i in src_collection.indexes: - src_padding = src_collection.format("{padding}") % i + src_padding = src_padding_exp % i src_file_name = "{0}{1}{2}".format( src_head, src_padding, src_tail) - dst_padding = dst_collection.format("{padding}") % i + dst_padding = src_padding_exp % i if index_frame_start: - dst_padding = "%0{}d".format( - frame_start_padding) % index_frame_start + dst_padding_exp = "%0{}d".format(frame_start_padding) + dst_padding = dst_padding_exp % index_frame_start index_frame_start += 1 dst = "{0}{1}{2}".format(dst_head, dst_padding, dst_tail) @@ -321,6 +325,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): self.log.debug("source: {}".format(src)) instance.data["transfers"].append([src, dst]) + repre['published_path'] = "{0}{1}{2}".format(dst_head, dst_padding_exp, dst_tail) # for imagesequence version data hashes = '#' * len(dst_padding) dst = os.path.normpath("{0}{1}{2}".format( @@ -380,7 +385,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "representation": repre['ext'] } } - self.log.debug("__ _representation: {}".format(representation)) + self.log.debug("__ representation: {}".format(representation)) destination_list.append(dst) self.log.debug("__ destination_list: {}".format(destination_list)) instance.data['destination_list'] = destination_list From 0799cf1d04010ddb96cae54b373ed8de14c133da Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 22 Jul 2019 11:52:05 +0200 Subject: [PATCH 65/69] fix(nks): hierarchical custom attributes, starting frame frame start distribute to other instances, resolution_width/height,pixel_aspect to asset custom attributes --- .../publish/collect_hierarchy_context.py | 74 ++++++++++++------- .../publish/collect_tag_framestart.py | 3 +- 2 files changed, 48 insertions(+), 29 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_hierarchy_context.py b/pype/plugins/nukestudio/publish/collect_hierarchy_context.py index bbae365fa6..fd646451b6 100644 --- a/pype/plugins/nukestudio/publish/collect_hierarchy_context.py +++ b/pype/plugins/nukestudio/publish/collect_hierarchy_context.py @@ -34,6 +34,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin): def process(self, context): for instance in context[:]: + assets_shared = context.data.get("assetsShared") tags = instance.data.get("tags", None) clip = instance.data["item"] asset = instance.data.get("asset") @@ -139,19 +140,28 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin): "Clip: `{}`".format(asset) ) - assetsShared = { - asset: { - "asset": instance.data["asset"], - "hierarchy": hierarchy, - "parents": parents, - "tasks": instance.data['tasks'] - }} - self.log.debug("__ assetsShared: {}".format(assetsShared)) # add formated hierarchy path into instance data instance.data["hierarchy"] = hierarchy instance.data["parents"] = parents - context.data["assetsShared"].update( - assetsShared) + + # adding to asset shared dict + if assets_shared.get(asset): + self.log.debug("Adding to shared assets: `{}`".format( + asset)) + assets_shared[asset].update({ + "asset": instance.data["asset"], + "hierarchy": hierarchy, + "parents": parents, + "tasks": instance.data["tasks"] + }) + + # adding frame start if any on instance + start_frame = instance.data.get("frameStart") + if start_frame: + assets_shared[asset].update({ + "frameStart": start_frame + }) + class CollectHierarchyContext(pyblish.api.ContextPlugin): @@ -176,6 +186,7 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin): def process(self, context): instances = context[:] + sequence = context.data['activeSequence'] # create hierarchyContext attr if context has none temp_context = {} @@ -201,6 +212,12 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin): instance.data["hierarchy"] = s_asset_data["hierarchy"] instance.data["tasks"] = s_asset_data["tasks"] + # adding frame start if any on instance + start_frame = s_asset_data.get("frameStart") + if start_frame: + instance.data["frameStart"] = start_frame + + self.log.debug( "__ instance.data[parents]: {}".format( instance.data["parents"] @@ -226,8 +243,6 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin): # get custom attributes of the shot if instance.data.get("main"): - start_frame = instance.data.get("frameStart", 0) - in_info['custom_attributes'] = { 'handles': int(instance.data.get('handles')), 'handle_start': handle_start, @@ -238,27 +253,30 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin): "edit_in": int(instance.data["startFrame"]), "edit_out": int(instance.data["endFrame"]) } - if start_frame is not 0: - in_info['custom_attributes'].update({ - 'fstart': start_frame, - 'fend': start_frame + ( - instance.data["endFrame"] - instance.data["startFrame"]) - }) + # adding SourceResolution if Tag was present - s_res = instance.data.get("sourceResolution") - if s_res and instance.data.get("main"): - item = instance.data["item"] - self.log.debug("TrackItem: `{0}`".format( - item)) - width = int(item.source().mediaSource().width()) - height = int(item.source().mediaSource().height()) - self.log.info("Source Width and Height are: `{0} x {1}`".format( - width, height)) + if instance.data.get("main"): + width = int(sequence.format().width()) + height = int(sequence.format().height()) + pixel_aspect = sequence.format().pixelAspect() + self.log.info("Sequence Width,Height,PixelAspect are: `{0},{1},{2}`".format( + width, height, pixel_aspect)) + in_info['custom_attributes'].update({ "resolution_width": width, - "resolution_height": height + "resolution_height": height, + "pixel_aspect": pixel_aspect }) + start_frame = instance.data.get("frameStart") + if start_frame: + in_info['custom_attributes'].update({ + 'fstart': start_frame, + 'fend': start_frame + ( + instance.data["endFrame"] - + instance.data["startFrame"]) + }) + in_info['tasks'] = instance.data['tasks'] parents = instance.data.get('parents', []) diff --git a/pype/plugins/nukestudio/publish/collect_tag_framestart.py b/pype/plugins/nukestudio/publish/collect_tag_framestart.py index 244a86e9f4..256350b2a4 100644 --- a/pype/plugins/nukestudio/publish/collect_tag_framestart.py +++ b/pype/plugins/nukestudio/publish/collect_tag_framestart.py @@ -20,4 +20,5 @@ class CollectClipTagFrameStart(api.InstancePlugin): # gets only task family tags and collect labels if "frameStart" in t_family: t_number = t_metadata.get("tag.number", "") - instance.data["frameStart"] = int(t_number) + start_frame = int(t_number) + instance.data["frameStart"] = start_frame From ee11908a33a337f446972e29213a89dd9864dc0d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 23 Jul 2019 10:19:52 +0200 Subject: [PATCH 66/69] fix(nks): asset share handles --- .../nukestudio/publish/collect_handles.py | 19 +++-- .../publish/collect_hierarchy_context.py | 31 ++++---- .../nukestudio/publish/collect_shots.py | 77 ++++++++++--------- 3 files changed, 71 insertions(+), 56 deletions(-) diff --git a/pype/plugins/nukestudio/publish/collect_handles.py b/pype/plugins/nukestudio/publish/collect_handles.py index 03652989b8..ed13691daf 100644 --- a/pype/plugins/nukestudio/publish/collect_handles.py +++ b/pype/plugins/nukestudio/publish/collect_handles.py @@ -32,13 +32,18 @@ class CollectClipHandles(api.ContextPlugin): if instance.data.get("main"): name = instance.data["asset"] if assets_shared.get(name): - self.log.debug("Adding to shared assets: `{}`".format( - instance.data["name"])) - assets_shared[name].update({ - "handles": handles, - "handleStart": handle_start, - "handleEnd": handle_end - }) + asset_shared = assets_shared.get(name) + else: + asset_shared = assets_shared[name] + + self.log.debug("Adding to shared assets: `{}`".format( + instance.data["name"])) + asset_shared.update({ + "handles": handles, + "handleStart": handle_start, + "handleEnd": handle_end + }) + for instance in filtered_instances: if not instance.data.get("main") and not instance.data.get("handleTag"): diff --git a/pype/plugins/nukestudio/publish/collect_hierarchy_context.py b/pype/plugins/nukestudio/publish/collect_hierarchy_context.py index fd646451b6..28b007b109 100644 --- a/pype/plugins/nukestudio/publish/collect_hierarchy_context.py +++ b/pype/plugins/nukestudio/publish/collect_hierarchy_context.py @@ -145,22 +145,27 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin): instance.data["parents"] = parents # adding to asset shared dict + self.log.debug("__ assets_shared: {}".format(assets_shared)) if assets_shared.get(asset): self.log.debug("Adding to shared assets: `{}`".format( - asset)) - assets_shared[asset].update({ - "asset": instance.data["asset"], - "hierarchy": hierarchy, - "parents": parents, - "tasks": instance.data["tasks"] - }) + instance.data["name"])) + asset_shared = assets_shared.get(asset) + else: + asset_shared = assets_shared[asset] - # adding frame start if any on instance - start_frame = instance.data.get("frameStart") - if start_frame: - assets_shared[asset].update({ - "frameStart": start_frame - }) + asset_shared.update({ + "asset": instance.data["asset"], + "hierarchy": hierarchy, + "parents": parents, + "tasks": instance.data["tasks"] + }) + + # adding frame start if any on instance + start_frame = instance.data.get("frameStart") + if start_frame: + asset_shared.update({ + "frameStart": start_frame + }) diff --git a/pype/plugins/nukestudio/publish/collect_shots.py b/pype/plugins/nukestudio/publish/collect_shots.py index 26a4c7fb6b..506020bbc3 100644 --- a/pype/plugins/nukestudio/publish/collect_shots.py +++ b/pype/plugins/nukestudio/publish/collect_shots.py @@ -25,50 +25,55 @@ class CollectShots(api.ContextPlugin): ) continue - # Collect data. - data = {} - for key, value in instance.data.iteritems(): - data[key] = value + if instance.data.get("main"): + # Collect data. + data = {} + for key, value in instance.data.iteritems(): + if key in "main": + continue + data[key] = value - data["family"] = "shot" - data["families"] = [] - data["frameStart"] = instance.data.get("frameStart", 1) + data["family"] = "shot" + data["families"] = [] + data["frameStart"] = instance.data.get("frameStart", 1) - data["subset"] = data["family"] + "Main" + data["subset"] = data["family"] + "Main" - data["name"] = data["subset"] + "_" + data["asset"] + data["name"] = data["subset"] + "_" + data["asset"] - data["label"] = data["asset"] + " - " + data["subset"] + " - tasks: {} - assetbuilds: {}".format( - data["tasks"], [x["name"] for x in data.get("assetbuilds", [])] - ) + data["label"] = data["asset"] + " - " + data["subset"] + " - tasks: {} - assetbuilds: {}".format( + data["tasks"], [x["name"] for x in data.get("assetbuilds", [])] + ) - # Get handles. - data["handleStart"] = instance.data["handleStart"] - data["handleEnd"] = instance.data["handleEnd"] + # Get handles. + data["handleStart"] = instance.data["handleStart"] + data["handleEnd"] = instance.data["handleEnd"] - # Frame-ranges with handles. - data["sourceInH"] = data["sourceIn"] - data["handleStart"] - data["sourceOutH"] = data["sourceOut"] + data["handleEnd"] + # Frame-ranges with handles. + data["sourceInH"] = data["sourceIn"] - data["handleStart"] + data["sourceOutH"] = data["sourceOut"] + data["handleEnd"] - # Get timeline frames. - data["timelineIn"] = int(data["item"].timelineIn()) - data["timelineOut"] = int(data["item"].timelineOut()) + # Get timeline frames. + data["timelineIn"] = int(data["item"].timelineIn()) + data["timelineOut"] = int(data["item"].timelineOut()) - # Frame-ranges with handles. - data["timelineInHandles"] = data["timelineIn"] - data["timelineInHandles"] -= data["handleStart"] - data["timelineOutHandles"] = data["timelineOut"] - data["timelineOutHandles"] += data["handleEnd"] + # Frame-ranges with handles. + data["timelineInHandles"] = data["timelineIn"] + data["timelineInHandles"] -= data["handleStart"] + data["timelineOutHandles"] = data["timelineOut"] + data["timelineOutHandles"] += data["handleEnd"] - # Creating comp frame range. - data["endFrame"] = ( - data["frameStart"] + (data["sourceOut"] - data["sourceIn"]) - ) + # Creating comp frame range. + data["endFrame"] = ( + data["frameStart"] + (data["sourceOut"] - data["sourceIn"]) + ) - # Get fps. - sequence = instance.context.data["activeSequence"] - data["fps"] = sequence.framerate() + # Get fps. + sequence = instance.context.data["activeSequence"] + data["fps"] = sequence.framerate() - # Create instance. - self.log.debug("Creating instance with: {}".format(data["name"])) - instance.context.create_instance(**data) + # Create instance. + self.log.debug("Creating instance with: {}".format(data["name"])) + instance.context.create_instance(**data) + + self.log.debug("_ context: {}".format(context[:])) From 6f5a80bf1b2379e560c06648a106308febda951e Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 23 Jul 2019 18:10:57 +0200 Subject: [PATCH 67/69] apps actions wont show on project task --- pype/ftrack/lib/ftrack_app_handler.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pype/ftrack/lib/ftrack_app_handler.py b/pype/ftrack/lib/ftrack_app_handler.py index 2d1d88f7d4..29d478b10f 100644 --- a/pype/ftrack/lib/ftrack_app_handler.py +++ b/pype/ftrack/lib/ftrack_app_handler.py @@ -94,6 +94,9 @@ class AppAction(BaseHandler): ): return False + if entities[0]['parent'].entity_type.lower() == 'project': + return False + ft_project = entities[0]['project'] database = pypelib.get_avalon_database() From cef42b84b7ea76463446a37046fd340e6c08e3fa Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 23 Jul 2019 18:14:51 +0200 Subject: [PATCH 68/69] timer service wont try to launch timers on project tasks --- pype/services/timers_manager/timers_manager.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pype/services/timers_manager/timers_manager.py b/pype/services/timers_manager/timers_manager.py index e1980d3d90..72140de323 100644 --- a/pype/services/timers_manager/timers_manager.py +++ b/pype/services/timers_manager/timers_manager.py @@ -79,6 +79,14 @@ class TimersManager(metaclass=Singleton): } ''' self.last_task = data + + if len(input_data['hierarchy']) < 1: + self.log.error(( + 'Timer has been launched on task which is child of Project.' + ' That is not allowed in Pype!' + )) + return + for module in self.modules: module.start_timer_manager(data) self.is_running = True From e1cc291605562985030bbddb75c00d34777072ac Mon Sep 17 00:00:00 2001 From: iLLiCiTiT Date: Tue, 23 Jul 2019 18:18:20 +0200 Subject: [PATCH 69/69] fixed variable naming --- pype/services/timers_manager/timers_manager.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pype/services/timers_manager/timers_manager.py b/pype/services/timers_manager/timers_manager.py index 72140de323..2259dfc34d 100644 --- a/pype/services/timers_manager/timers_manager.py +++ b/pype/services/timers_manager/timers_manager.py @@ -78,15 +78,15 @@ class TimersManager(metaclass=Singleton): 'task_name': 'Lookdev BG' } ''' - self.last_task = data - - if len(input_data['hierarchy']) < 1: + if len(data['hierarchy']) < 1: self.log.error(( - 'Timer has been launched on task which is child of Project.' - ' That is not allowed in Pype!' + 'Not allowed action in Pype!!' + ' Timer has been launched on task which is child of Project.' )) return + self.last_task = data + for module in self.modules: module.start_timer_manager(data) self.is_running = True