Merge pull request #94 from ynput/enhancement/OP-8248_Remove-maya-remote-publish

Maya: Remove remote publish logic
This commit is contained in:
Jakub Trllo 2024-02-20 11:52:24 +01:00 committed by GitHub
commit ea3d6ab1b9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 0 additions and 202 deletions

View file

@ -1,39 +0,0 @@
# -*- coding: utf-8 -*-
"""Collect instances that should be processed and published on DL.
"""
import os
import pyblish.api
from ayon_core.pipeline import PublishValidationError
class CollectDeadlinePublishableInstances(pyblish.api.InstancePlugin):
"""Collect instances that should be processed and published on DL.
Some long running publishes (not just renders) could be offloaded to DL,
this plugin compares theirs name against env variable, marks only
publishable by farm.
Triggered only when running only in headless mode, eg on a farm.
"""
order = pyblish.api.CollectorOrder + 0.499
label = "Collect Deadline Publishable Instance"
targets = ["remote"]
def process(self, instance):
self.log.debug("CollectDeadlinePublishableInstances")
publish_inst = os.environ.get("OPENPYPE_PUBLISH_SUBSET", '')
if not publish_inst:
raise PublishValidationError("OPENPYPE_PUBLISH_SUBSET env var "
"required for remote publishing")
subset_name = instance.data["subset"]
if subset_name == publish_inst:
self.log.debug("Publish {}".format(subset_name))
instance.data["publish"] = True
instance.data["farm"] = False
else:
self.log.debug("Skipping {}".format(subset_name))
instance.data["publish"] = False

View file

@ -1,131 +0,0 @@
import os
import attr
from datetime import datetime
from ayon_core.pipeline import PublishXmlValidationError
from ayon_core.lib import is_in_tests
from openpype_modules.deadline import abstract_submit_deadline
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
import pyblish.api
@attr.s
class MayaPluginInfo(object):
Build = attr.ib(default=None) # Don't force build
StrictErrorChecking = attr.ib(default=True)
SceneFile = attr.ib(default=None) # Input scene
Version = attr.ib(default=None) # Mandatory for Deadline
ProjectPath = attr.ib(default=None)
ScriptJob = attr.ib(default=True)
ScriptFilename = attr.ib(default=None)
class MayaSubmitRemotePublishDeadline(
abstract_submit_deadline.AbstractSubmitDeadline):
"""Submit Maya scene to perform a local publish in Deadline.
Publishing in Deadline can be helpful for scenes that publish very slow.
This way it can process in the background on another machine without the
Artist having to wait for the publish to finish on their local machine.
Submission is done through the Deadline Web Service. DL then triggers
`openpype/scripts/remote_publish.py`.
Each publishable instance creates its own full publish job.
Different from `ProcessSubmittedJobOnFarm` which creates publish job
depending on metadata json containing context and instance data of
rendered files.
"""
label = "Submit Scene to Deadline"
order = pyblish.api.IntegratorOrder
hosts = ["maya"]
families = ["publish.farm"]
targets = ["local"]
def process(self, instance):
# Ensure no errors so far
if not (all(result["success"]
for result in instance.context.data["results"])):
raise PublishXmlValidationError("Publish process has errors")
if not instance.data["publish"]:
self.log.warning("No active instances found. "
"Skipping submission..")
return
super(MayaSubmitRemotePublishDeadline, self).process(instance)
def get_job_info(self):
instance = self._instance
context = instance.context
project_name = instance.context.data["projectName"]
scene = instance.context.data["currentFile"]
scenename = os.path.basename(scene)
job_name = "{scene} [PUBLISH]".format(scene=scenename)
batch_name = "{code} - {scene}".format(code=project_name,
scene=scenename)
if is_in_tests():
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
job_info = DeadlineJobInfo(Plugin="MayaBatch")
job_info.BatchName = batch_name
job_info.Name = job_name
job_info.UserName = context.data.get("user")
job_info.Comment = context.data.get("comment", "")
# use setting for publish job on farm, no reason to have it separately
project_settings = context.data["project_settings"]
deadline_publish_job_sett = project_settings["deadline"]["publish"]["ProcessSubmittedJobOnFarm"] # noqa
job_info.Department = deadline_publish_job_sett["deadline_department"]
job_info.ChunkSize = deadline_publish_job_sett["deadline_chunk_size"]
job_info.Priority = deadline_publish_job_sett["deadline_priority"]
job_info.Group = deadline_publish_job_sett["deadline_group"]
job_info.Pool = deadline_publish_job_sett["deadline_pool"]
# Include critical environment variables with submission + Session
keys = [
"FTRACK_API_USER",
"FTRACK_API_KEY",
"FTRACK_SERVER"
]
environment = {
key: os.environ[key]
for key in keys
if key in os.environ
}
environment["AYON_PROJECT_NAME"] = project_name
environment["AYON_FOLDER_PATH"] = instance.context.data["asset"]
environment["AYON_TASK_NAME"] = instance.context.data["task"]
environment["AYON_APP_NAME"] = os.environ.get("AYON_APP_NAME")
environment["OPENPYPE_PUBLISH_SUBSET"] = instance.data["subset"]
environment["AYON_LOG_NO_COLORS"] = "1"
environment["AYON_USERNAME"] = instance.context.data["user"]
environment["AYON_REMOTE_PUBLISH"] = "1"
for key, value in environment.items():
job_info.EnvironmentKeyValue[key] = value
def get_plugin_info(self):
# Not all hosts can import this module.
from maya import cmds
scene = self._instance.context.data["currentFile"]
plugin_info = MayaPluginInfo()
plugin_info.SceneFile = scene
plugin_info.ScriptFilename = "{OPENPYPE_REPOS_ROOT}/openpype/scripts/remote_publish.py" # noqa
plugin_info.Version = cmds.about(version=True)
plugin_info.ProjectPath = cmds.workspace(query=True,
rootDirectory=True)
return attr.asdict(plugin_info)

View file

@ -485,26 +485,6 @@ def filter_pyblish_plugins(plugins):
plugins.remove(plugin)
def remote_publish(log):
"""Loops through all plugins, logs to console. Used for tests.
Args:
log (Logger)
"""
# Error exit as soon as any error occurs.
error_format = "Failed {plugin.__name__}: {error}\n{error.traceback}"
for result in pyblish.util.publish_iter():
if not result["error"]:
continue
error_message = error_format.format(**result)
log.error(error_message)
# 'Fatal Error: ' is because of Deadline
raise RuntimeError("Fatal Error: {}".format(error_message))
def get_errored_instances_from_context(context, plugin=None):
"""Collect failed instances from pyblish context.

View file

@ -1,12 +0,0 @@
try:
from ayon_core.lib import Logger
from ayon_core.pipeline.publish.lib import remote_publish
except ImportError as exc:
# Ensure Deadline fails by output an error that contains "Fatal Error:"
raise ImportError("Fatal Error: %s" % exc)
if __name__ == "__main__":
# Perform remote publish with thorough error checking
log = Logger.get_logger(__name__)
remote_publish(log)