OP-2787 - WIP implementation

This commit is contained in:
Petr Kalis 2022-05-19 17:05:55 +02:00
parent 85c5b7aea8
commit 13b4b18d16
13 changed files with 214 additions and 1 deletions

View file

@ -44,6 +44,7 @@ from . import resources
from .plugin import (
Extractor,
Integrator,
ValidatePipelineOrder,
ValidateContentsOrder,
@ -86,6 +87,7 @@ __all__ = [
# plugin classes
"Extractor",
"Integrator",
# ordering
"ValidatePipelineOrder",
"ValidateContentsOrder",

View file

@ -71,8 +71,15 @@ def install():
if lib.IS_HEADLESS:
log.info(("Running in headless mode, skipping Maya "
"save/open/new callback installation.."))
# Register default "local" target
print("Registering pyblish target: farm")
pyblish.api.register_target("farm")
return
print("Registering pyblish target: local")
pyblish.api.register_target("local")
_set_project()
_register_callbacks()

View file

@ -38,3 +38,7 @@ class CreateAnimation(plugin.Creator):
# Default to exporting world-space
self.data["worldSpace"] = True
# Default to not send to farm.
self.data["farm"] = False
self.data["priority"] = 50

View file

@ -28,3 +28,7 @@ class CreatePointCache(plugin.Creator):
# Add options for custom attributes
self.data["attr"] = ""
self.data["attrPrefix"] = ""
# Default to not send to farm.
self.data["farm"] = False
self.data["priority"] = 50

View file

@ -55,3 +55,6 @@ class CollectAnimationOutputGeometry(pyblish.api.InstancePlugin):
# Store data in the instance for the validator
instance.data["out_hierarchy"] = hierarchy
if instance.data.get("farm"):
instance.data["families"].append("deadline")

View file

@ -0,0 +1,14 @@
import pyblish.api
class CollectPointcache(pyblish.api.InstancePlugin):
"""Collect pointcache data for instance."""
order = pyblish.api.CollectorOrder + 0.4
families = ["pointcache"]
label = "Collect Pointcache"
hosts = ["maya"]
def process(self, instance):
if instance.data.get("farm"):
instance.data["families"].append("deadline")

View file

@ -23,6 +23,14 @@ class ExtractAnimation(openpype.api.Extractor):
families = ["animation"]
def process(self, instance):
if instance.data.get("farm"):
path = os.path.join(
os.path.dirname(instance.context.data["currentFile"]),
"cache",
instance.data["name"] + ".abc"
)
instance.data["expectedFiles"] = [os.path.normpath(path)]
return
# Collect the out set nodes
out_sets = [node for node in instance if node.endswith("out_SET")]

View file

@ -25,6 +25,14 @@ class ExtractAlembic(openpype.api.Extractor):
"vrayproxy"]
def process(self, instance):
if instance.data.get("farm"):
path = os.path.join(
os.path.dirname(instance.context.data["currentFile"]),
"cache",
instance.data["name"] + ".abc"
)
instance.data["expectedFiles"] = [os.path.normpath(path)]
return
nodes = instance[:]

View file

@ -228,7 +228,7 @@ def _get_close_plugin(close_plugin_name, log):
if plugin.__name__ == close_plugin_name:
return plugin
log.warning("Close plugin not found, app might not close.")
log.debug("Close plugin not found, app might not close.")
def get_task_data(batch_dir):

View file

@ -0,0 +1,137 @@
import os
import requests
from maya import cmds
from openpype.pipeline import legacy_io
import pyblish.api
class MayaSubmitRemotePublishDeadline(pyblish.api.ContextPlugin):
"""Submit Maya scene to perform a local publish in Deadline.
Publishing in Deadline can be helpful for scenes that publish very slow.
This way it can process in the background on another machine without the
Artist having to wait for the publish to finish on their local machine.
Submission is done through the Deadline Web Service.
Different from `ProcessSubmittedJobOnFarm` which creates publish job
depending on metadata json containing context and instance data of
rendered files.
"""
label = "Submit Scene to Deadline"
order = pyblish.api.IntegratorOrder
hosts = ["maya"]
families = ["deadline"]
# custom deadline attributes
deadline_department = ""
deadline_pool = ""
deadline_pool_secondary = ""
deadline_group = ""
deadline_chunk_size = 1
deadline_priority = 50
def process(self, context):
# Ensure no errors so far
assert all(result["success"] for result in context.data["results"]), (
"Errors found, aborting integration..")
# Note that `publish` data member might change in the future.
# See: https://github.com/pyblish/pyblish-base/issues/307
actives = [i for i in context if i.data["publish"]]
instance_names = sorted(instance.name for instance in actives)
if not instance_names:
self.log.warning("No active instances found. "
"Skipping submission..")
return
scene = context.data["currentFile"]
scenename = os.path.basename(scene)
# Get project code
project_name = legacy_io.Session["AVALON_PROJECT"]
job_name = "{scene} [PUBLISH]".format(scene=scenename)
batch_name = "{code} - {scene}".format(code=project_name,
scene=scenename)
# Generate the payload for Deadline submission
payload = {
"JobInfo": {
"Plugin": "MayaBatch",
"BatchName": batch_name,
"Priority": 50,
"Name": job_name,
"UserName": context.data["user"],
# "Comment": instance.context.data.get("comment", ""),
# "InitialStatus": state
"Department": self.deadline_department,
"ChunkSize": self.deadline_chunk_size,
"Priority": self.deadline_priority,
"Group": self.deadline_group,
},
"PluginInfo": {
"Build": None, # Don't force build
"StrictErrorChecking": True,
"ScriptJob": True,
# Inputs
"SceneFile": scene,
"ScriptFilename": "{OPENPYPE_ROOT}/scripts/remote_publish.py",
# Mandatory for Deadline
"Version": cmds.about(version=True),
# Resolve relative references
"ProjectPath": cmds.workspace(query=True,
rootDirectory=True),
},
# Mandatory for Deadline, may be empty
"AuxFiles": []
}
# Include critical environment variables with submission + api.Session
keys = [
"FTRACK_API_USER",
"FTRACK_API_KEY",
"FTRACK_SERVER"
]
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **legacy_io.Session)
# TODO replace legacy_io with context.data ?
environment["AVALON_PROJECT"] = legacy_io.Session["AVALON_PROJECT"]
environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"]
environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"]
environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME")
environment["OPENPYPE_LOG_NO_COLORS"] = "1"
environment["OPENPYPE_USERNAME"] = context.data["user"]
environment["OPENPYPE_PUBLISH_JOB"] = "1"
environment["OPENPYPE_RENDER_JOB"] = "0"
environment["PYBLISH_ACTIVE_INSTANCES"] = ",".join(instance_names)
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
key=key,
value=environment[key]
) for index, key in enumerate(environment)
})
self.log.info("Submitting Deadline job ...")
deadline_url = context.data["defaultDeadline"]
assert deadline_url, "Requires Deadline Webservice URL"
url = "{}/api/jobs".format(deadline_url)
response = requests.post(url, json=payload, timeout=10)
if not response.ok:
raise Exception(response.text)

View file

@ -18,6 +18,16 @@ class InstancePlugin(pyblish.api.InstancePlugin):
super(InstancePlugin, cls).process(cls, *args, **kwargs)
class Integrator(InstancePlugin):
"""Integrator base class.
Wraps pyblish instance plugin. Targets set to "local" which means all
integrators should run on "local" publishes, by default.
"farm" targets could be used for integrators that should run on a farm.
"""
targets = ["local"]
class Extractor(InstancePlugin):
"""Extractor base class.
@ -28,6 +38,8 @@ class Extractor(InstancePlugin):
"""
targets = ["local"]
order = 2.0
def staging_dir(self, instance):

View file

@ -139,6 +139,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
ef, instance.data["family"], instance.data["families"]))
return
if "deadline" in instance.data["families"]:
return
self.integrated_file_sizes = {}
try:
self.register(instance)

View file

@ -0,0 +1,11 @@
try:
from openpype.api import Logger
import openpype.lib.remote_publish
except ImportError as exc:
# Ensure Deadline fails by output an error that contains "Fatal Error:"
raise ImportError("Fatal Error: %s" % exc)
if __name__ == "__main__":
# Perform remote publish with thorough error checking
log = Logger.get_logger(__name__)
openpype.lib.remote_publish.publish(log)