mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge pull request #1108 from pypeclub/feature/3_0_bulk_render_publishing
Bulk mov render publishing
This commit is contained in:
commit
ece3e843d7
7 changed files with 382 additions and 85 deletions
|
|
@ -3,40 +3,52 @@ import pyblish.api
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
|
||||||
|
|
||||||
class CollectPsdInstances(pyblish.api.InstancePlugin):
|
class CollectBatchInstances(pyblish.api.InstancePlugin):
|
||||||
"""
|
"""Collect all available instances for batch publish."""
|
||||||
Collect all available instances from psd batch.
|
|
||||||
"""
|
|
||||||
|
|
||||||
label = "Collect Psd Instances"
|
label = "Collect Batch Instances"
|
||||||
order = pyblish.api.CollectorOrder + 0.489
|
order = pyblish.api.CollectorOrder + 0.489
|
||||||
hosts = ["standalonepublisher"]
|
hosts = ["standalonepublisher"]
|
||||||
families = ["background_batch"]
|
families = ["background_batch", "render_mov_batch"]
|
||||||
|
|
||||||
# presets
|
# presets
|
||||||
|
default_subset_task = {
|
||||||
|
"background_batch": "background",
|
||||||
|
"render_mov_batch": "compositing"
|
||||||
|
}
|
||||||
subsets = {
|
subsets = {
|
||||||
"backgroundLayout": {
|
"background_batch": {
|
||||||
"task": "background",
|
"backgroundLayout": {
|
||||||
"family": "backgroundLayout"
|
"task": "background",
|
||||||
|
"family": "backgroundLayout"
|
||||||
|
},
|
||||||
|
"backgroundComp": {
|
||||||
|
"task": "background",
|
||||||
|
"family": "backgroundComp"
|
||||||
|
},
|
||||||
|
"workfileBackground": {
|
||||||
|
"task": "background",
|
||||||
|
"family": "workfile"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"backgroundComp": {
|
"render_mov_batch": {
|
||||||
"task": "background",
|
"renderCompositingDefault": {
|
||||||
"family": "backgroundComp"
|
"task": "compositing",
|
||||||
},
|
"family": "render"
|
||||||
"workfileBackground": {
|
}
|
||||||
"task": "background",
|
|
||||||
"family": "workfile"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
unchecked_by_default = []
|
unchecked_by_default = []
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
context = instance.context
|
context = instance.context
|
||||||
asset_data = instance.data["assetEntity"]
|
|
||||||
asset_name = instance.data["asset"]
|
asset_name = instance.data["asset"]
|
||||||
for subset_name, subset_data in self.subsets.items():
|
family = instance.data["family"]
|
||||||
|
|
||||||
|
default_task_name = self.default_subset_task.get(family)
|
||||||
|
for subset_name, subset_data in self.subsets[family].items():
|
||||||
instance_name = f"{asset_name}_{subset_name}"
|
instance_name = f"{asset_name}_{subset_name}"
|
||||||
task = subset_data.get("task", "background")
|
task_name = subset_data.get("task") or default_task_name
|
||||||
|
|
||||||
# create new instance
|
# create new instance
|
||||||
new_instance = context.create_instance(instance_name)
|
new_instance = context.create_instance(instance_name)
|
||||||
|
|
@ -51,10 +63,9 @@ class CollectPsdInstances(pyblish.api.InstancePlugin):
|
||||||
# add subset data from preset
|
# add subset data from preset
|
||||||
new_instance.data.update(subset_data)
|
new_instance.data.update(subset_data)
|
||||||
|
|
||||||
new_instance.data["label"] = f"{instance_name}"
|
new_instance.data["label"] = instance_name
|
||||||
new_instance.data["subset"] = subset_name
|
new_instance.data["subset"] = subset_name
|
||||||
new_instance.data["task"] = task
|
new_instance.data["task"] = task_name
|
||||||
|
|
||||||
|
|
||||||
if subset_name in self.unchecked_by_default:
|
if subset_name in self.unchecked_by_default:
|
||||||
new_instance.data["publish"] = False
|
new_instance.data["publish"] = False
|
||||||
|
|
@ -14,12 +14,12 @@ Provides:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import pyblish.api
|
|
||||||
from avalon import io
|
|
||||||
import json
|
import json
|
||||||
import copy
|
import copy
|
||||||
import clique
|
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
import clique
|
||||||
|
import pyblish.api
|
||||||
|
from avalon import io
|
||||||
|
|
||||||
|
|
||||||
class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
||||||
|
|
@ -45,11 +45,16 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
with open(input_json_path, "r") as f:
|
with open(input_json_path, "r") as f:
|
||||||
in_data = json.load(f)
|
in_data = json.load(f)
|
||||||
self.log.debug(f"_ in_data: {pformat(in_data)}")
|
self.log.debug(f"_ in_data: {pformat(in_data)}")
|
||||||
|
|
||||||
|
self.add_files_to_ignore_cleanup(in_data, context)
|
||||||
# exception for editorial
|
# exception for editorial
|
||||||
if in_data["family"] in ["editorial", "background_batch"]:
|
if in_data["family"] == "render_mov_batch":
|
||||||
|
in_data_list = self.prepare_mov_batch_instances(in_data)
|
||||||
|
|
||||||
|
elif in_data["family"] in ["editorial", "background_batch"]:
|
||||||
in_data_list = self.multiple_instances(context, in_data)
|
in_data_list = self.multiple_instances(context, in_data)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
in_data_list = [in_data]
|
in_data_list = [in_data]
|
||||||
|
|
||||||
|
|
@ -59,6 +64,21 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
||||||
# create instance
|
# create instance
|
||||||
self.create_instance(context, in_data)
|
self.create_instance(context, in_data)
|
||||||
|
|
||||||
|
def add_files_to_ignore_cleanup(self, in_data, context):
|
||||||
|
all_filepaths = context.data.get("skipCleanupFilepaths") or []
|
||||||
|
for repre in in_data["representations"]:
|
||||||
|
files = repre["files"]
|
||||||
|
if not isinstance(files, list):
|
||||||
|
files = [files]
|
||||||
|
|
||||||
|
dirpath = repre["stagingDir"]
|
||||||
|
for filename in files:
|
||||||
|
filepath = os.path.normpath(os.path.join(dirpath, filename))
|
||||||
|
if filepath not in all_filepaths:
|
||||||
|
all_filepaths.append(filepath)
|
||||||
|
|
||||||
|
context.data["skipCleanupFilepaths"] = all_filepaths
|
||||||
|
|
||||||
def multiple_instances(self, context, in_data):
|
def multiple_instances(self, context, in_data):
|
||||||
# avoid subset name duplicity
|
# avoid subset name duplicity
|
||||||
if not context.data.get("subsetNamesCheck"):
|
if not context.data.get("subsetNamesCheck"):
|
||||||
|
|
@ -66,38 +86,38 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
in_data_list = list()
|
in_data_list = list()
|
||||||
representations = in_data.pop("representations")
|
representations = in_data.pop("representations")
|
||||||
for repre in representations:
|
for repr in representations:
|
||||||
in_data_copy = copy.deepcopy(in_data)
|
in_data_copy = copy.deepcopy(in_data)
|
||||||
ext = repre["ext"][1:]
|
ext = repr["ext"][1:]
|
||||||
subset = in_data_copy["subset"]
|
subset = in_data_copy["subset"]
|
||||||
# filter out non editorial files
|
# filter out non editorial files
|
||||||
if ext not in self.batch_extensions:
|
if ext not in self.batch_extensions:
|
||||||
in_data_copy["representations"] = [repre]
|
in_data_copy["representations"] = [repr]
|
||||||
in_data_copy["subset"] = f"{ext}{subset}"
|
in_data_copy["subset"] = f"{ext}{subset}"
|
||||||
in_data_list.append(in_data_copy)
|
in_data_list.append(in_data_copy)
|
||||||
|
|
||||||
files = repre.get("files")
|
files = repr.get("files")
|
||||||
|
|
||||||
# delete unneeded keys
|
# delete unneeded keys
|
||||||
delete_repr_keys = ["frameStart", "frameEnd"]
|
delete_repr_keys = ["frameStart", "frameEnd"]
|
||||||
for k in delete_repr_keys:
|
for k in delete_repr_keys:
|
||||||
if repre.get(k):
|
if repr.get(k):
|
||||||
repre.pop(k)
|
repr.pop(k)
|
||||||
|
|
||||||
# convert files to list if it isnt
|
# convert files to list if it isnt
|
||||||
if not isinstance(files, (tuple, list)):
|
if not isinstance(files, (tuple, list)):
|
||||||
files = [files]
|
files = [files]
|
||||||
|
|
||||||
self.log.debug(f"_ files: {files}")
|
self.log.debug(f"_ files: {files}")
|
||||||
for index, _file in enumerate(files):
|
for index, f in enumerate(files):
|
||||||
index += 1
|
index += 1
|
||||||
# copy dictionaries
|
# copy dictionaries
|
||||||
in_data_copy = copy.deepcopy(in_data_copy)
|
in_data_copy = copy.deepcopy(in_data_copy)
|
||||||
new_repre = copy.deepcopy(repre)
|
repr_new = copy.deepcopy(repr)
|
||||||
|
|
||||||
new_repre["files"] = _file
|
repr_new["files"] = f
|
||||||
new_repre["name"] = ext
|
repr_new["name"] = ext
|
||||||
in_data_copy["representations"] = [new_repre]
|
in_data_copy["representations"] = [repr_new]
|
||||||
|
|
||||||
# create subset Name
|
# create subset Name
|
||||||
new_subset = f"{ext}{index}{subset}"
|
new_subset = f"{ext}{index}{subset}"
|
||||||
|
|
@ -112,8 +132,91 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
return in_data_list
|
return in_data_list
|
||||||
|
|
||||||
|
def prepare_mov_batch_instances(self, in_data):
|
||||||
|
"""Copy of `multiple_instances` method.
|
||||||
|
|
||||||
|
Method was copied because `batch_extensions` is used in
|
||||||
|
`multiple_instances` but without any family filtering. Since usage
|
||||||
|
of the filtering is unknown and modification of that part may break
|
||||||
|
editorial or PSD batch publishing it was decided to create a copy with
|
||||||
|
this family specific filtering. Also "frameStart" and "frameEnd" keys
|
||||||
|
are removed from instance which is needed for this processing.
|
||||||
|
|
||||||
|
Instance data will also care about families.
|
||||||
|
|
||||||
|
TODO:
|
||||||
|
- Merge possible logic with `multiple_instances` method.
|
||||||
|
"""
|
||||||
|
self.log.info("Preparing data for mov batch processing.")
|
||||||
|
in_data_list = []
|
||||||
|
|
||||||
|
representations = in_data.pop("representations")
|
||||||
|
for repre in representations:
|
||||||
|
self.log.debug("Processing representation with files {}".format(
|
||||||
|
str(repre["files"])
|
||||||
|
))
|
||||||
|
ext = repre["ext"][1:]
|
||||||
|
|
||||||
|
# Rename representation name
|
||||||
|
repre_name = repre["name"]
|
||||||
|
if repre_name.startswith(ext + "_"):
|
||||||
|
repre["name"] = ext
|
||||||
|
# Skip files that are not available for mov batch publishing
|
||||||
|
# TODO add dynamic expected extensions by family from `in_data`
|
||||||
|
# - with this modification it would be possible to use only
|
||||||
|
# `multiple_instances` method
|
||||||
|
expected_exts = ["mov"]
|
||||||
|
if ext not in expected_exts:
|
||||||
|
self.log.warning((
|
||||||
|
"Skipping representation."
|
||||||
|
" Does not match expected extensions <{}>. {}"
|
||||||
|
).format(", ".join(expected_exts), str(repre)))
|
||||||
|
continue
|
||||||
|
|
||||||
|
files = repre["files"]
|
||||||
|
# Convert files to list if it isnt
|
||||||
|
if not isinstance(files, (tuple, list)):
|
||||||
|
files = [files]
|
||||||
|
|
||||||
|
# Loop through files and create new instance per each file
|
||||||
|
for filename in files:
|
||||||
|
# Create copy of representation and change it's files and name
|
||||||
|
new_repre = copy.deepcopy(repre)
|
||||||
|
new_repre["files"] = filename
|
||||||
|
new_repre["name"] = ext
|
||||||
|
new_repre["thumbnail"] = True
|
||||||
|
|
||||||
|
if "tags" not in new_repre:
|
||||||
|
new_repre["tags"] = []
|
||||||
|
new_repre["tags"].append("review")
|
||||||
|
|
||||||
|
# Prepare new subset name (temporary name)
|
||||||
|
# - subset name will be changed in batch specific plugins
|
||||||
|
new_subset_name = "{}{}".format(
|
||||||
|
in_data["subset"],
|
||||||
|
os.path.basename(filename)
|
||||||
|
)
|
||||||
|
# Create copy of instance data as new instance and pass in new
|
||||||
|
# representation
|
||||||
|
in_data_copy = copy.deepcopy(in_data)
|
||||||
|
in_data_copy["representations"] = [new_repre]
|
||||||
|
in_data_copy["subset"] = new_subset_name
|
||||||
|
if "families" not in in_data_copy:
|
||||||
|
in_data_copy["families"] = []
|
||||||
|
in_data_copy["families"].append("review")
|
||||||
|
|
||||||
|
in_data_list.append(in_data_copy)
|
||||||
|
|
||||||
|
return in_data_list
|
||||||
|
|
||||||
def create_instance(self, context, in_data):
|
def create_instance(self, context, in_data):
|
||||||
subset = in_data["subset"]
|
subset = in_data["subset"]
|
||||||
|
# If instance data already contain families then use it
|
||||||
|
instance_families = in_data.get("families") or []
|
||||||
|
# Make sure default families are in instance
|
||||||
|
for default_family in self.default_families or []:
|
||||||
|
if default_family not in instance_families:
|
||||||
|
instance_families.append(default_family)
|
||||||
|
|
||||||
instance = context.create_instance(subset)
|
instance = context.create_instance(subset)
|
||||||
instance.data.update(
|
instance.data.update(
|
||||||
|
|
@ -130,7 +233,7 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
||||||
"frameEnd": in_data.get("representations", [None])[0].get(
|
"frameEnd": in_data.get("representations", [None])[0].get(
|
||||||
"frameEnd", None
|
"frameEnd", None
|
||||||
),
|
),
|
||||||
"families": self.default_families or [],
|
"families": instance_families
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
self.log.info("collected instance: {}".format(pformat(instance.data)))
|
self.log.info("collected instance: {}".format(pformat(instance.data)))
|
||||||
|
|
@ -157,7 +260,6 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
if component["preview"]:
|
if component["preview"]:
|
||||||
instance.data["families"].append("review")
|
instance.data["families"].append("review")
|
||||||
instance.data["repreProfiles"] = ["h264"]
|
|
||||||
component["tags"] = ["review"]
|
component["tags"] = ["review"]
|
||||||
self.log.debug("Adding review family")
|
self.log.debug("Adding review family")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import collections
|
import collections
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
from avalon import io
|
from avalon import io
|
||||||
|
|
@ -14,36 +15,78 @@ class CollectMatchingAssetToInstance(pyblish.api.InstancePlugin):
|
||||||
label = "Collect Matching Asset to Instance"
|
label = "Collect Matching Asset to Instance"
|
||||||
order = pyblish.api.CollectorOrder - 0.05
|
order = pyblish.api.CollectorOrder - 0.05
|
||||||
hosts = ["standalonepublisher"]
|
hosts = ["standalonepublisher"]
|
||||||
families = ["background_batch"]
|
families = ["background_batch", "render_mov_batch"]
|
||||||
|
|
||||||
|
# Version regex to parse asset name and version from filename
|
||||||
|
version_regex = re.compile(r"^(.+)_v([0-9]+)$")
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
source_file = os.path.basename(instance.data["source"]).lower()
|
source_filename = self.get_source_filename(instance)
|
||||||
self.log.info("Looking for asset document for file \"{}\"".format(
|
self.log.info("Looking for asset document for file \"{}\"".format(
|
||||||
instance.data["source"]
|
source_filename
|
||||||
))
|
))
|
||||||
|
asset_name = os.path.splitext(source_filename)[0].lower()
|
||||||
|
|
||||||
asset_docs_by_name = self.selection_children_by_name(instance)
|
asset_docs_by_name = self.selection_children_by_name(instance)
|
||||||
|
|
||||||
matching_asset_doc = asset_docs_by_name.get(source_file)
|
version_number = None
|
||||||
|
# Always first check if source filename is in assets
|
||||||
|
matching_asset_doc = asset_docs_by_name.get(asset_name)
|
||||||
|
if matching_asset_doc is None:
|
||||||
|
# Check if source file contain version in name
|
||||||
|
self.log.debug((
|
||||||
|
"Asset doc by \"{}\" was not found trying version regex."
|
||||||
|
).format(asset_name))
|
||||||
|
regex_result = self.version_regex.findall(asset_name)
|
||||||
|
if regex_result:
|
||||||
|
_asset_name, _version_number = regex_result[0]
|
||||||
|
matching_asset_doc = asset_docs_by_name.get(_asset_name)
|
||||||
|
if matching_asset_doc:
|
||||||
|
version_number = int(_version_number)
|
||||||
|
|
||||||
if matching_asset_doc is None:
|
if matching_asset_doc is None:
|
||||||
for asset_name_low, asset_doc in asset_docs_by_name.items():
|
for asset_name_low, asset_doc in asset_docs_by_name.items():
|
||||||
if asset_name_low in source_file:
|
if asset_name_low in asset_name:
|
||||||
matching_asset_doc = asset_doc
|
matching_asset_doc = asset_doc
|
||||||
break
|
break
|
||||||
|
|
||||||
if matching_asset_doc:
|
if not matching_asset_doc:
|
||||||
instance.data["asset"] = matching_asset_doc["name"]
|
self.log.debug("Available asset names {}".format(
|
||||||
instance.data["assetEntity"] = matching_asset_doc
|
str(list(asset_docs_by_name.keys()))
|
||||||
self.log.info(
|
))
|
||||||
f"Matching asset found: {pformat(matching_asset_doc)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# TODO better error message
|
# TODO better error message
|
||||||
raise AssertionError((
|
raise AssertionError((
|
||||||
"Filename \"{}\" does not match"
|
"Filename \"{}\" does not match"
|
||||||
" any name of asset documents in database for your selection."
|
" any name of asset documents in database for your selection."
|
||||||
).format(instance.data["source"]))
|
).format(source_filename))
|
||||||
|
|
||||||
|
instance.data["asset"] = matching_asset_doc["name"]
|
||||||
|
instance.data["assetEntity"] = matching_asset_doc
|
||||||
|
if version_number is not None:
|
||||||
|
instance.data["version"] = version_number
|
||||||
|
|
||||||
|
self.log.info(
|
||||||
|
f"Matching asset found: {pformat(matching_asset_doc)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_source_filename(self, instance):
|
||||||
|
if instance.data["family"] == "background_batch":
|
||||||
|
return os.path.basename(instance.data["source"])
|
||||||
|
|
||||||
|
if len(instance.data["representations"]) != 1:
|
||||||
|
raise ValueError((
|
||||||
|
"Implementation bug: Instance data contain"
|
||||||
|
" more than one representation."
|
||||||
|
))
|
||||||
|
|
||||||
|
repre = instance.data["representations"][0]
|
||||||
|
repre_files = repre["files"]
|
||||||
|
if not isinstance(repre_files, str):
|
||||||
|
raise ValueError((
|
||||||
|
"Implementation bug: Instance's representation contain"
|
||||||
|
" unexpected value (expected single file). {}"
|
||||||
|
).format(str(repre_files)))
|
||||||
|
return repre_files
|
||||||
|
|
||||||
def selection_children_by_name(self, instance):
|
def selection_children_by_name(self, instance):
|
||||||
storing_key = "childrenDocsForSelection"
|
storing_key = "childrenDocsForSelection"
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,16 @@
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
import avalon.api
|
||||||
|
|
||||||
|
|
||||||
class CollectFtrackApi(pyblish.api.ContextPlugin):
|
class CollectFtrackApi(pyblish.api.ContextPlugin):
|
||||||
""" Collects an ftrack session and the current task id. """
|
""" Collects an ftrack session and the current task id. """
|
||||||
|
|
||||||
order = pyblish.api.CollectorOrder
|
order = pyblish.api.CollectorOrder + 0.4999
|
||||||
label = "Collect Ftrack Api"
|
label = "Collect Ftrack Api"
|
||||||
|
|
||||||
def process(self, context):
|
def process(self, context):
|
||||||
|
|
||||||
ftrack_log = logging.getLogger('ftrack_api')
|
ftrack_log = logging.getLogger('ftrack_api')
|
||||||
ftrack_log.setLevel(logging.WARNING)
|
ftrack_log.setLevel(logging.WARNING)
|
||||||
ftrack_log = logging.getLogger('ftrack_api_old')
|
ftrack_log = logging.getLogger('ftrack_api_old')
|
||||||
|
|
@ -22,28 +22,27 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
session = ftrack_api.Session(auto_connect_event_hub=True)
|
session = ftrack_api.Session(auto_connect_event_hub=True)
|
||||||
self.log.debug("Ftrack user: \"{0}\"".format(session.api_user))
|
self.log.debug("Ftrack user: \"{0}\"".format(session.api_user))
|
||||||
context.data["ftrackSession"] = session
|
|
||||||
|
|
||||||
# Collect task
|
# Collect task
|
||||||
project_name = os.environ.get('AVALON_PROJECT', '')
|
project_name = avalon.api.Session["AVALON_PROJECT"]
|
||||||
asset_name = os.environ.get('AVALON_ASSET', '')
|
asset_name = avalon.api.Session["AVALON_ASSET"]
|
||||||
task_name = os.environ.get('AVALON_TASK', None)
|
task_name = avalon.api.Session["AVALON_TASK"]
|
||||||
|
|
||||||
# Find project entity
|
# Find project entity
|
||||||
project_query = 'Project where full_name is "{0}"'.format(project_name)
|
project_query = 'Project where full_name is "{0}"'.format(project_name)
|
||||||
self.log.debug("Project query: < {0} >".format(project_query))
|
self.log.debug("Project query: < {0} >".format(project_query))
|
||||||
project_entity = list(session.query(project_query).all())
|
project_entities = list(session.query(project_query).all())
|
||||||
if len(project_entity) == 0:
|
if len(project_entities) == 0:
|
||||||
raise AssertionError(
|
raise AssertionError(
|
||||||
"Project \"{0}\" not found in Ftrack.".format(project_name)
|
"Project \"{0}\" not found in Ftrack.".format(project_name)
|
||||||
)
|
)
|
||||||
# QUESTION Is possible to happen?
|
# QUESTION Is possible to happen?
|
||||||
elif len(project_entity) > 1:
|
elif len(project_entities) > 1:
|
||||||
raise AssertionError((
|
raise AssertionError((
|
||||||
"Found more than one project with name \"{0}\" in Ftrack."
|
"Found more than one project with name \"{0}\" in Ftrack."
|
||||||
).format(project_name))
|
).format(project_name))
|
||||||
|
|
||||||
project_entity = project_entity[0]
|
project_entity = project_entities[0]
|
||||||
self.log.debug("Project found: {0}".format(project_entity))
|
self.log.debug("Project found: {0}".format(project_entity))
|
||||||
|
|
||||||
# Find asset entity
|
# Find asset entity
|
||||||
|
|
@ -93,7 +92,119 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
|
||||||
task_entity = None
|
task_entity = None
|
||||||
self.log.warning("Task name is not set.")
|
self.log.warning("Task name is not set.")
|
||||||
|
|
||||||
|
context.data["ftrackSession"] = session
|
||||||
context.data["ftrackPythonModule"] = ftrack_api
|
context.data["ftrackPythonModule"] = ftrack_api
|
||||||
context.data["ftrackProject"] = project_entity
|
context.data["ftrackProject"] = project_entity
|
||||||
context.data["ftrackEntity"] = asset_entity
|
context.data["ftrackEntity"] = asset_entity
|
||||||
context.data["ftrackTask"] = task_entity
|
context.data["ftrackTask"] = task_entity
|
||||||
|
|
||||||
|
self.per_instance_process(context, asset_name, task_name)
|
||||||
|
|
||||||
|
def per_instance_process(
|
||||||
|
self, context, context_asset_name, context_task_name
|
||||||
|
):
|
||||||
|
instance_by_asset_and_task = {}
|
||||||
|
for instance in context:
|
||||||
|
self.log.debug(
|
||||||
|
"Checking entities of instance \"{}\"".format(str(instance))
|
||||||
|
)
|
||||||
|
instance_asset_name = instance.data.get("asset")
|
||||||
|
instance_task_name = instance.data.get("task")
|
||||||
|
|
||||||
|
if not instance_asset_name and not instance_task_name:
|
||||||
|
self.log.debug("Instance does not have set context keys.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif instance_asset_name and instance_task_name:
|
||||||
|
if (
|
||||||
|
instance_asset_name == context_asset_name
|
||||||
|
and instance_task_name == context_task_name
|
||||||
|
):
|
||||||
|
self.log.debug((
|
||||||
|
"Instance's context is same as in publish context."
|
||||||
|
" Asset: {} | Task: {}"
|
||||||
|
).format(context_asset_name, context_task_name))
|
||||||
|
continue
|
||||||
|
asset_name = instance_asset_name
|
||||||
|
task_name = instance_task_name
|
||||||
|
|
||||||
|
elif instance_task_name:
|
||||||
|
if instance_task_name == context_task_name:
|
||||||
|
self.log.debug((
|
||||||
|
"Instance's context task is same as in publish"
|
||||||
|
" context. Task: {}"
|
||||||
|
).format(context_task_name))
|
||||||
|
continue
|
||||||
|
|
||||||
|
asset_name = context_asset_name
|
||||||
|
task_name = instance_task_name
|
||||||
|
|
||||||
|
elif instance_asset_name:
|
||||||
|
if instance_asset_name == context_asset_name:
|
||||||
|
self.log.debug((
|
||||||
|
"Instance's context asset is same as in publish"
|
||||||
|
" context. Asset: {}"
|
||||||
|
).format(context_asset_name))
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Do not use context's task name
|
||||||
|
task_name = instance_task_name
|
||||||
|
asset_name = instance_asset_name
|
||||||
|
|
||||||
|
if asset_name not in instance_by_asset_and_task:
|
||||||
|
instance_by_asset_and_task[asset_name] = {}
|
||||||
|
|
||||||
|
if task_name not in instance_by_asset_and_task[asset_name]:
|
||||||
|
instance_by_asset_and_task[asset_name][task_name] = []
|
||||||
|
instance_by_asset_and_task[asset_name][task_name].append(instance)
|
||||||
|
|
||||||
|
if not instance_by_asset_and_task:
|
||||||
|
return
|
||||||
|
|
||||||
|
session = context.data["ftrackSession"]
|
||||||
|
project_entity = context.data["ftrackProject"]
|
||||||
|
asset_names = set()
|
||||||
|
for asset_name in instance_by_asset_and_task.keys():
|
||||||
|
asset_names.add(asset_name)
|
||||||
|
|
||||||
|
joined_asset_names = ",".join([
|
||||||
|
"\"{}\"".format(name)
|
||||||
|
for name in asset_names
|
||||||
|
])
|
||||||
|
entities = session.query((
|
||||||
|
"TypedContext where project_id is \"{}\" and name in ({})"
|
||||||
|
).format(project_entity["id"], joined_asset_names)).all()
|
||||||
|
|
||||||
|
entities_by_name = {
|
||||||
|
entity["name"]: entity
|
||||||
|
for entity in entities
|
||||||
|
}
|
||||||
|
|
||||||
|
for asset_name, by_task_data in instance_by_asset_and_task.items():
|
||||||
|
entity = entities_by_name.get(asset_name)
|
||||||
|
task_entity_by_name = {}
|
||||||
|
if not entity:
|
||||||
|
self.log.warning((
|
||||||
|
"Didn't find entity with name \"{}\" in Project \"{}\""
|
||||||
|
).format(asset_name, project_entity["full_name"]))
|
||||||
|
else:
|
||||||
|
task_entities = session.query((
|
||||||
|
"select id, name from Task where parent_id is \"{}\""
|
||||||
|
).format(entity["id"])).all()
|
||||||
|
for task_entity in task_entities:
|
||||||
|
task_name_low = task_entity["name"].lower()
|
||||||
|
task_entity_by_name[task_name_low] = task_entity
|
||||||
|
|
||||||
|
for task_name, instances in by_task_data.items():
|
||||||
|
task_entity = None
|
||||||
|
if task_name and entity:
|
||||||
|
task_entity = task_entity_by_name.get(task_name.lower())
|
||||||
|
|
||||||
|
for instance in instances:
|
||||||
|
instance.data["ftrackEntity"] = entity
|
||||||
|
instance.data["ftrackTask"] = task_entity
|
||||||
|
|
||||||
|
self.log.debug((
|
||||||
|
"Instance {} has own ftrack entities"
|
||||||
|
" as has different context. TypedContext: {} Task: {}"
|
||||||
|
).format(str(instance), str(entity), str(task_entity)))
|
||||||
|
|
|
||||||
|
|
@ -102,25 +102,37 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
session = instance.context.data["ftrackSession"]
|
session = instance.context.data["ftrackSession"]
|
||||||
if instance.data.get("ftrackTask"):
|
context = instance.context
|
||||||
task = instance.data["ftrackTask"]
|
|
||||||
name = task
|
|
||||||
parent = task["parent"]
|
|
||||||
elif instance.data.get("ftrackEntity"):
|
|
||||||
task = None
|
|
||||||
name = instance.data.get("ftrackEntity")['name']
|
|
||||||
parent = instance.data.get("ftrackEntity")
|
|
||||||
elif instance.context.data.get("ftrackTask"):
|
|
||||||
task = instance.context.data["ftrackTask"]
|
|
||||||
name = task
|
|
||||||
parent = task["parent"]
|
|
||||||
elif instance.context.data.get("ftrackEntity"):
|
|
||||||
task = None
|
|
||||||
name = instance.context.data.get("ftrackEntity")['name']
|
|
||||||
parent = instance.context.data.get("ftrackEntity")
|
|
||||||
|
|
||||||
info_msg = "Created new {entity_type} with data: {data}"
|
name = None
|
||||||
info_msg += ", metadata: {metadata}."
|
# If instance has set "ftrackEntity" or "ftrackTask" then use them from
|
||||||
|
# instance. Even if they are set to None. If they are set to None it
|
||||||
|
# has a reason. (like has different context)
|
||||||
|
if "ftrackEntity" in instance.data or "ftrackTask" in instance.data:
|
||||||
|
task = instance.data.get("ftrackTask")
|
||||||
|
parent = instance.data.get("ftrackEntity")
|
||||||
|
|
||||||
|
elif "ftrackEntity" in context.data or "ftrackTask" in context.data:
|
||||||
|
task = context.data.get("ftrackTask")
|
||||||
|
parent = context.data.get("ftrackEntity")
|
||||||
|
|
||||||
|
if task:
|
||||||
|
parent = task["parent"]
|
||||||
|
name = task
|
||||||
|
elif parent:
|
||||||
|
name = parent["name"]
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
self.log.info((
|
||||||
|
"Skipping ftrack integration. Instance \"{}\" does not"
|
||||||
|
" have specified ftrack entities."
|
||||||
|
).format(str(instance)))
|
||||||
|
return
|
||||||
|
|
||||||
|
info_msg = (
|
||||||
|
"Created new {entity_type} with data: {data}"
|
||||||
|
", metadata: {metadata}."
|
||||||
|
)
|
||||||
|
|
||||||
used_asset_versions = []
|
used_asset_versions = []
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -37,9 +37,16 @@ class CleanUp(pyblish.api.InstancePlugin):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
_skip_cleanup_filepaths = instance.context.data.get(
|
||||||
|
"skipCleanupFilepaths"
|
||||||
|
) or []
|
||||||
|
skip_cleanup_filepaths = set()
|
||||||
|
for path in _skip_cleanup_filepaths:
|
||||||
|
skip_cleanup_filepaths.add(os.path.normpath(path))
|
||||||
|
|
||||||
if self.remove_temp_renders:
|
if self.remove_temp_renders:
|
||||||
self.log.info("Cleaning renders new...")
|
self.log.info("Cleaning renders new...")
|
||||||
self.clean_renders(instance)
|
self.clean_renders(instance, skip_cleanup_filepaths)
|
||||||
|
|
||||||
if [ef for ef in self.exclude_families
|
if [ef for ef in self.exclude_families
|
||||||
if instance.data["family"] in ef]:
|
if instance.data["family"] in ef]:
|
||||||
|
|
@ -65,7 +72,7 @@ class CleanUp(pyblish.api.InstancePlugin):
|
||||||
self.log.info("Removing staging directory {}".format(staging_dir))
|
self.log.info("Removing staging directory {}".format(staging_dir))
|
||||||
shutil.rmtree(staging_dir)
|
shutil.rmtree(staging_dir)
|
||||||
|
|
||||||
def clean_renders(self, instance):
|
def clean_renders(self, instance, skip_cleanup_filepaths):
|
||||||
transfers = instance.data.get("transfers", list())
|
transfers = instance.data.get("transfers", list())
|
||||||
|
|
||||||
current_families = instance.data.get("families", list())
|
current_families = instance.data.get("families", list())
|
||||||
|
|
@ -84,6 +91,12 @@ class CleanUp(pyblish.api.InstancePlugin):
|
||||||
# add dest dir into clearing dir paths (regex paterns)
|
# add dest dir into clearing dir paths (regex paterns)
|
||||||
transfers_dirs.append(os.path.dirname(dest))
|
transfers_dirs.append(os.path.dirname(dest))
|
||||||
|
|
||||||
|
if src in skip_cleanup_filepaths:
|
||||||
|
self.log.debug((
|
||||||
|
"Source file is marked to be skipped in cleanup. {}"
|
||||||
|
).format(src))
|
||||||
|
continue
|
||||||
|
|
||||||
if os.path.normpath(src) != os.path.normpath(dest):
|
if os.path.normpath(src) != os.path.normpath(dest):
|
||||||
if instance_family == 'render' or 'render' in current_families:
|
if instance_family == 'render' or 'render' in current_families:
|
||||||
self.log.info("Removing src: `{}`...".format(src))
|
self.log.info("Removing src: `{}`...".format(src))
|
||||||
|
|
@ -116,6 +129,9 @@ class CleanUp(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
# remove all files which match regex patern
|
# remove all files which match regex patern
|
||||||
for f in files:
|
for f in files:
|
||||||
|
if os.path.normpath(f) in skip_cleanup_filepaths:
|
||||||
|
continue
|
||||||
|
|
||||||
for p in self.paterns:
|
for p in self.paterns:
|
||||||
patern = re.compile(p)
|
patern = re.compile(p)
|
||||||
if not patern.findall(f):
|
if not patern.findall(f):
|
||||||
|
|
|
||||||
|
|
@ -812,7 +812,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
matching_profiles = {}
|
matching_profiles = {}
|
||||||
highest_value = -1
|
highest_value = -1
|
||||||
self.log.info(self.template_name_profiles)
|
self.log.debug(
|
||||||
|
"Template name profiles:\n{}".format(self.template_name_profiles)
|
||||||
|
)
|
||||||
for name, filters in self.template_name_profiles.items():
|
for name, filters in self.template_name_profiles.items():
|
||||||
value = 0
|
value = 0
|
||||||
families = filters.get("families")
|
families = filters.get("families")
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue