collect and integrate workfile centric dependency links

This commit is contained in:
davidlatwe 2021-11-01 02:45:58 +08:00
parent e81a054353
commit bb02a57acd
2 changed files with 151 additions and 0 deletions

View file

@ -0,0 +1,61 @@
import pyblish.api
class CollectSceneLoadedVersions(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder + 0.0001
label = "Collect Versions Loaded in Scene"
hosts = [
"aftereffects",
"blender",
"celaction",
"fusion",
"harmony",
"hiero",
"houdini",
"maya",
"nuke",
"photoshop",
"resolve",
"tvpaint"
]
def process(self, context):
from avalon import api, io
current_file = context.data.get("currentFile")
if not current_file:
self.log.warn("No work file collected.")
return
host = api.registered_host()
if host is None:
self.log.warn("No registered host.")
return
if not hasattr(host, "ls"):
host_name = host.__name__
self.log.warn("Host %r doesn't have ls() implemented." % host_name)
return
loaded_versions = []
_containers = list(host.ls())
_repr_ids = [io.ObjectId(c["representation"]) for c in _containers]
version_by_repr = {
str(doc["_id"]): doc["parent"] for doc in
io.find({"_id": {"$in": _repr_ids}}, projection={"parent": 1})
}
for con in _containers:
# NOTE:
# may have more then one representation that are same version
version = {
"objectName": con["objectName"], # container node name
"subsetName": con["name"],
"representation": io.ObjectId(con["representation"]),
"version": version_by_repr[con["representation"]], # _id
}
loaded_versions.append(version)
context.data["loadedVersions"] = loaded_versions

View file

@ -0,0 +1,90 @@
import pyblish.api
class IntegrateInputLinks(pyblish.api.ContextPlugin):
"""Connecting version level dependency links"""
order = pyblish.api.IntegratorOrder + 0.2
label = "Connect Dependency InputLinks"
def process(self, context):
workfile = None
publishing = []
for instance in context:
version_doc = instance.data.get("versionEntity")
if not version_doc:
self.log.debug("Instance %s doesn't have version." % instance)
continue
version_data = version_doc.get("data", {})
families = version_data.get("families", [])
self.log.debug(families)
if "workfile" in families:
workfile = instance
else:
publishing.append(instance)
if workfile is None:
self.log.warn("No workfile in this publish session.")
else:
workfile_version_doc = workfile.data["versionEntity"]
# link all loaded versions in scene into workfile
for version in context.data.get("loadedVersions", []):
self.add_link(
link_type="reference",
input_id=version["version"],
version_doc=workfile_version_doc,
)
# link workfile to all publishing versions
for instance in publishing:
self.add_link(
link_type="generative",
input_id=workfile_version_doc["_id"],
version_doc=instance.data["versionEntity"],
)
# link versions as dependencies to the instance
for instance in publishing:
for input_version in instance.data.get("inputVersions") or []:
self.add_link(
link_type="generative",
input_id=input_version,
version_doc=instance.data["versionEntity"],
)
self.write_links_to_database(context)
def add_link(self, link_type, input_id, version_doc):
from collections import OrderedDict
from avalon import io
# NOTE:
# using OrderedDict() here is just for ensuring field order between
# python versions, if we ever need to use mongodb operation '$addToSet'
# to update and avoid duplicating elements in 'inputLinks' array in the
# future.
link = OrderedDict()
link["type"] = link_type
link["input"] = io.ObjectId(input_id)
link["linkedBy"] = "publish"
if "inputLinks" not in version_doc["data"]:
version_doc["data"]["inputLinks"] = []
version_doc["data"]["inputLinks"].append(link)
def write_links_to_database(self, context):
from avalon import io
for instance in context:
version_doc = instance.data.get("versionEntity")
if version_doc is None:
continue
input_links = version_doc["data"].get("inputLinks")
if input_links is None:
continue
io.update_one({"_id": version_doc["_id"]},
{"$set": {"data.inputLinks": input_links}})