diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_instances_usd_layered.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_instances_usd_layered.py index 800d6fb883..ee259fb70d 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_instances_usd_layered.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_instances_usd_layered.py @@ -2,7 +2,7 @@ import hou import pyblish.api from ayon_core.hosts.houdini.api import lib import ayon_core.hosts.houdini.api.usd as hou_usdlib -import ayon_core.lib.usdlib as usdlib +from ayon_core.pipeline import usdlib class CollectInstancesUsdLayered(pyblish.api.ContextPlugin): diff --git a/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_bootstrap.py index 0fb269516c..c43ff4f442 100644 --- a/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ b/client/ayon_core/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -5,7 +5,7 @@ from ayon_core.client import ( get_asset_by_name, get_asset_name_identifier, ) -import ayon_core.lib.usdlib as usdlib +from ayon_core.pipeline import usdlib class CollectUsdBootstrap(pyblish.api.InstancePlugin): diff --git a/client/ayon_core/hosts/maya/api/commands.py b/client/ayon_core/hosts/maya/api/commands.py index b52d5e6c2d..f69dca97a8 100644 --- a/client/ayon_core/hosts/maya/api/commands.py +++ b/client/ayon_core/hosts/maya/api/commands.py @@ -38,25 +38,6 @@ class ToolWindows: cls._windows[tool] = window -def edit_shader_definitions(): - from qtpy import QtWidgets - from ayon_core.hosts.maya.api.shader_definition_editor import ( - ShaderDefinitionsEditor - ) - from ayon_core.tools.utils import qt_app_context - - top_level_widgets = QtWidgets.QApplication.topLevelWidgets() - main_window = next(widget for widget in top_level_widgets - if widget.objectName() == "MayaWindow") - - with qt_app_context(): - window = ToolWindows.get_window("shader_definition_editor") - if not window: - window = ShaderDefinitionsEditor(parent=main_window) - ToolWindows.set_window("shader_definition_editor", window) - window.show() - - def _resolution_from_document(doc): if not doc or "data" not in doc: print("Entered document is not valid. \"{}\"".format(str(doc))) diff --git a/client/ayon_core/hosts/maya/api/shader_definition_editor.py b/client/ayon_core/hosts/maya/api/shader_definition_editor.py deleted file mode 100644 index bfa531eb87..0000000000 --- a/client/ayon_core/hosts/maya/api/shader_definition_editor.py +++ /dev/null @@ -1,176 +0,0 @@ -# -*- coding: utf-8 -*- -"""Editor for shader definitions. - -Shader names are stored as simple text file over GridFS in mongodb. - -""" -import os -from qtpy import QtWidgets, QtCore, QtGui -from ayon_core.client.mongo import OpenPypeMongoConnection -from ayon_core import resources -import gridfs - - -DEFINITION_FILENAME = "{}/maya/shader_definition.txt".format( - os.getenv("AYON_PROJECT_NAME")) - - -class ShaderDefinitionsEditor(QtWidgets.QWidget): - """Widget serving as simple editor for shader name definitions.""" - - # name of the file used to store definitions - - def __init__(self, parent=None): - super(ShaderDefinitionsEditor, self).__init__(parent) - self._mongo = OpenPypeMongoConnection.get_mongo_client() - self._gridfs = gridfs.GridFS( - self._mongo[os.getenv("OPENPYPE_DATABASE_NAME")]) - self._editor = None - - self._original_content = self._read_definition_file() - - self.setObjectName("shaderDefinitionEditor") - self.setWindowTitle("OpenPype shader name definition editor") - icon = QtGui.QIcon(resources.get_ayon_icon_filepath()) - self.setWindowIcon(icon) - self.setWindowFlags(QtCore.Qt.Window) - self.setParent(parent) - self.setAttribute(QtCore.Qt.WA_DeleteOnClose) - self.resize(750, 500) - - self._setup_ui() - self._reload() - - def _setup_ui(self): - """Setup UI of Widget.""" - layout = QtWidgets.QVBoxLayout(self) - label = QtWidgets.QLabel() - label.setText("Put shader names here - one name per line:") - layout.addWidget(label) - self._editor = QtWidgets.QPlainTextEdit() - self._editor.setStyleSheet("border: none;") - layout.addWidget(self._editor) - - btn_layout = QtWidgets.QHBoxLayout() - save_btn = QtWidgets.QPushButton("Save") - save_btn.clicked.connect(self._save) - - reload_btn = QtWidgets.QPushButton("Reload") - reload_btn.clicked.connect(self._reload) - - exit_btn = QtWidgets.QPushButton("Exit") - exit_btn.clicked.connect(self._close) - - btn_layout.addWidget(reload_btn) - btn_layout.addWidget(save_btn) - btn_layout.addWidget(exit_btn) - - layout.addLayout(btn_layout) - - def _read_definition_file(self, file=None): - """Read definition file from database. - - Args: - file (gridfs.grid_file.GridOut, Optional): File to read. If not - set, new query will be issued to find it. - - Returns: - str: Content of the file or empty string if file doesn't exist. - - """ - content = "" - if not file: - file = self._gridfs.find_one( - {"filename": DEFINITION_FILENAME}) - if not file: - print(">>> [SNDE]: nothing in database yet") - return content - content = file.read() - file.close() - return content - - def _write_definition_file(self, content, force=False): - """Write content as definition to file in database. - - Before file is written, check is made if its content has not - changed. If is changed, warning is issued to user if he wants - it to overwrite. Note: GridFs doesn't allow changing file content. - You need to delete existing file and create new one. - - Args: - content (str): Content to write. - - Raises: - ContentException: If file is changed in database while - editor is running. - """ - file = self._gridfs.find_one( - {"filename": DEFINITION_FILENAME}) - if file: - content_check = self._read_definition_file(file) - if content == content_check: - print(">>> [SNDE]: content not changed") - return - if self._original_content != content_check: - if not force: - raise ContentException("Content changed") - print(">>> [SNDE]: overwriting data") - file.close() - self._gridfs.delete(file._id) - - file = self._gridfs.new_file( - filename=DEFINITION_FILENAME, - content_type='text/plain', - encoding='utf-8') - file.write(content) - file.close() - QtCore.QTimer.singleShot(200, self._reset_style) - self._editor.setStyleSheet("border: 1px solid #33AF65;") - self._original_content = content - - def _reset_style(self): - """Reset editor style back. - - Used to visually indicate save. - - """ - self._editor.setStyleSheet("border: none;") - - def _close(self): - self.hide() - - def closeEvent(self, event): - event.ignore() - self.hide() - - def _reload(self): - print(">>> [SNDE]: reloading") - self._set_content(self._read_definition_file()) - - def _save(self): - try: - self._write_definition_file(content=self._editor.toPlainText()) - except ContentException: - # content has changed meanwhile - print(">>> [SNDE]: content has changed") - self._show_overwrite_warning() - - def _set_content(self, content): - self._editor.setPlainText(content) - - def _show_overwrite_warning(self): - reply = QtWidgets.QMessageBox.question( - self, - "Warning", - ("Content you are editing was changed meanwhile in database.\n" - "Please, reload and solve the conflict."), - QtWidgets.QMessageBox.OK) - - if reply == QtWidgets.QMessageBox.OK: - # do nothing - pass - - -class ContentException(Exception): - """This is risen during save if file is changed in database.""" - pass diff --git a/client/ayon_core/hosts/maya/plugins/publish/validate_model_name.py b/client/ayon_core/hosts/maya/plugins/publish/validate_model_name.py deleted file mode 100644 index 673cfd0d29..0000000000 --- a/client/ayon_core/hosts/maya/plugins/publish/validate_model_name.py +++ /dev/null @@ -1,161 +0,0 @@ -# -*- coding: utf-8 -*- -"""Validate model nodes names.""" -import os -import platform -import re - -import gridfs -import pyblish.api -from maya import cmds - -import ayon_core.hosts.maya.api.action -from ayon_core.client.mongo import OpenPypeMongoConnection -from ayon_core.hosts.maya.api.shader_definition_editor import ( - DEFINITION_FILENAME) -from ayon_core.pipeline.publish import ( - OptionalPyblishPluginMixin, PublishValidationError, ValidateContentsOrder) - - -class ValidateModelName(pyblish.api.InstancePlugin, - OptionalPyblishPluginMixin): - """Validate name of model - - starts with (somename)_###_(materialID)_GEO - materialID must be present in list - padding number doesn't have limit - - """ - optional = True - order = ValidateContentsOrder - hosts = ["maya"] - families = ["model"] - label = "Model Name" - actions = [ayon_core.hosts.maya.api.action.SelectInvalidAction] - material_file = None - database_file = DEFINITION_FILENAME - - @classmethod - def get_invalid(cls, instance): - """Get invalid nodes.""" - use_db = cls.database - - def is_group(group_name): - """Find out if supplied transform is group or not.""" - try: - children = cmds.listRelatives(group_name, children=True) - for child in children: - if not cmds.ls(child, transforms=True): - return False - return True - except Exception: - return False - - invalid = [] - content_instance = instance.data.get("setMembers", None) - if not content_instance: - cls.log.error("Instance has no nodes!") - return True - pass - - # validate top level group name - assemblies = cmds.ls(content_instance, assemblies=True, long=True) - if len(assemblies) != 1: - cls.log.error("Must have exactly one top group") - return assemblies or True - top_group = assemblies[0] - regex = cls.top_level_regex - r = re.compile(regex) - m = r.match(top_group) - project_name = instance.context.data["projectName"] - current_folder_path = instance.context.data["folderPath"] - if m is None: - cls.log.error("invalid name on: {}".format(top_group)) - cls.log.error("name doesn't match regex {}".format(regex)) - invalid.append(top_group) - else: - if "asset" in r.groupindex: - if m.group("folderPath") != current_folder_path: - cls.log.error("Invalid asset name in top level group.") - return top_group - if "subset" in r.groupindex: - if m.group("subset") != instance.data.get("subset"): - cls.log.error("Invalid subset name in top level group.") - return top_group - if "project" in r.groupindex: - if m.group("project") != project_name: - cls.log.error("Invalid project name in top level group.") - return top_group - - descendants = cmds.listRelatives(content_instance, - allDescendents=True, - fullPath=True) or [] - - descendants = cmds.ls(descendants, noIntermediate=True, long=True) - trns = cmds.ls(descendants, long=False, type='transform') - - # filter out groups - filtered = [node for node in trns if not is_group(node)] - - # load shader list file as utf-8 - shaders = [] - if not use_db: - material_file = cls.material_file[platform.system().lower()] - if material_file: - if os.path.isfile(material_file): - shader_file = open(material_file, "r") - shaders = shader_file.readlines() - shader_file.close() - else: - cls.log.error("Missing shader name definition file.") - return True - else: - client = OpenPypeMongoConnection.get_mongo_client() - fs = gridfs.GridFS(client[os.getenv("OPENPYPE_DATABASE_NAME")]) - shader_file = fs.find_one({"filename": cls.database_file}) - if not shader_file: - cls.log.error("Missing shader name definition in database.") - return True - shaders = shader_file.read().splitlines() - shader_file.close() - - # strip line endings from list - shaders = [s.rstrip() for s in shaders if s.rstrip()] - - # compile regex for testing names - regex = cls.regex - r = re.compile(regex) - - for obj in filtered: - cls.log.debug("testing: {}".format(obj)) - m = r.match(obj) - if m is None: - cls.log.error("invalid name on: {}".format(obj)) - invalid.append(obj) - else: - # if we have shader files and shader named group is in - # regex, test this group against names in shader file - if "shader" in r.groupindex and shaders: - try: - if not m.group('shader') in shaders: - cls.log.error( - "invalid materialID on: {0} ({1})".format( - obj, m.group('shader'))) - invalid.append(obj) - except IndexError: - # shader named group doesn't match - cls.log.error( - "shader group doesn't match: {}".format(obj)) - invalid.append(obj) - - return invalid - - def process(self, instance): - """Plugin entry point.""" - if not self.is_active(instance.data): - return - - invalid = self.get_invalid(instance) - - if invalid: - raise PublishValidationError( - "Model naming is invalid. See the log.") diff --git a/client/ayon_core/modules/deadline/plugins/publish/collect_publishable_instances.py b/client/ayon_core/modules/deadline/plugins/publish/collect_publishable_instances.py deleted file mode 100644 index 347da86360..0000000000 --- a/client/ayon_core/modules/deadline/plugins/publish/collect_publishable_instances.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- coding: utf-8 -*- -"""Collect instances that should be processed and published on DL. - -""" -import os - -import pyblish.api -from ayon_core.pipeline import PublishValidationError - - -class CollectDeadlinePublishableInstances(pyblish.api.InstancePlugin): - """Collect instances that should be processed and published on DL. - - Some long running publishes (not just renders) could be offloaded to DL, - this plugin compares theirs name against env variable, marks only - publishable by farm. - - Triggered only when running only in headless mode, eg on a farm. - """ - - order = pyblish.api.CollectorOrder + 0.499 - label = "Collect Deadline Publishable Instance" - targets = ["remote"] - - def process(self, instance): - self.log.debug("CollectDeadlinePublishableInstances") - publish_inst = os.environ.get("OPENPYPE_PUBLISH_SUBSET", '') - if not publish_inst: - raise PublishValidationError("OPENPYPE_PUBLISH_SUBSET env var " - "required for remote publishing") - - subset_name = instance.data["subset"] - if subset_name == publish_inst: - self.log.debug("Publish {}".format(subset_name)) - instance.data["publish"] = True - instance.data["farm"] = False - else: - self.log.debug("Skipping {}".format(subset_name)) - instance.data["publish"] = False diff --git a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py b/client/ayon_core/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py deleted file mode 100644 index 772fa03628..0000000000 --- a/client/ayon_core/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py +++ /dev/null @@ -1,131 +0,0 @@ -import os -import attr -from datetime import datetime - -from ayon_core.pipeline import PublishXmlValidationError -from ayon_core.lib import is_in_tests -from openpype_modules.deadline import abstract_submit_deadline -from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo - -import pyblish.api - - -@attr.s -class MayaPluginInfo(object): - Build = attr.ib(default=None) # Don't force build - StrictErrorChecking = attr.ib(default=True) - - SceneFile = attr.ib(default=None) # Input scene - Version = attr.ib(default=None) # Mandatory for Deadline - ProjectPath = attr.ib(default=None) - - ScriptJob = attr.ib(default=True) - ScriptFilename = attr.ib(default=None) - - -class MayaSubmitRemotePublishDeadline( - abstract_submit_deadline.AbstractSubmitDeadline): - """Submit Maya scene to perform a local publish in Deadline. - - Publishing in Deadline can be helpful for scenes that publish very slow. - This way it can process in the background on another machine without the - Artist having to wait for the publish to finish on their local machine. - - Submission is done through the Deadline Web Service. DL then triggers - `openpype/scripts/remote_publish.py`. - - Each publishable instance creates its own full publish job. - - Different from `ProcessSubmittedJobOnFarm` which creates publish job - depending on metadata json containing context and instance data of - rendered files. - """ - - label = "Submit Scene to Deadline" - order = pyblish.api.IntegratorOrder - hosts = ["maya"] - families = ["publish.farm"] - targets = ["local"] - - def process(self, instance): - - # Ensure no errors so far - if not (all(result["success"] - for result in instance.context.data["results"])): - raise PublishXmlValidationError("Publish process has errors") - - if not instance.data["publish"]: - self.log.warning("No active instances found. " - "Skipping submission..") - return - - super(MayaSubmitRemotePublishDeadline, self).process(instance) - - def get_job_info(self): - instance = self._instance - context = instance.context - - project_name = instance.context.data["projectName"] - scene = instance.context.data["currentFile"] - scenename = os.path.basename(scene) - - job_name = "{scene} [PUBLISH]".format(scene=scenename) - batch_name = "{code} - {scene}".format(code=project_name, - scene=scenename) - - if is_in_tests(): - batch_name += datetime.now().strftime("%d%m%Y%H%M%S") - - job_info = DeadlineJobInfo(Plugin="MayaBatch") - job_info.BatchName = batch_name - job_info.Name = job_name - job_info.UserName = context.data.get("user") - job_info.Comment = context.data.get("comment", "") - - # use setting for publish job on farm, no reason to have it separately - project_settings = context.data["project_settings"] - deadline_publish_job_sett = project_settings["deadline"]["publish"]["ProcessSubmittedJobOnFarm"] # noqa - job_info.Department = deadline_publish_job_sett["deadline_department"] - job_info.ChunkSize = deadline_publish_job_sett["deadline_chunk_size"] - job_info.Priority = deadline_publish_job_sett["deadline_priority"] - job_info.Group = deadline_publish_job_sett["deadline_group"] - job_info.Pool = deadline_publish_job_sett["deadline_pool"] - - # Include critical environment variables with submission + Session - keys = [ - "FTRACK_API_USER", - "FTRACK_API_KEY", - "FTRACK_SERVER" - ] - - environment = { - key: os.environ[key] - for key in keys - if key in os.environ - } - - environment["AYON_PROJECT_NAME"] = project_name - environment["AYON_FOLDER_PATH"] = instance.context.data["folderPath"] - environment["AYON_TASK_NAME"] = instance.context.data["task"] - environment["AYON_APP_NAME"] = os.environ.get("AYON_APP_NAME") - environment["OPENPYPE_PUBLISH_SUBSET"] = instance.data["subset"] - environment["AYON_LOG_NO_COLORS"] = "1" - environment["AYON_USERNAME"] = instance.context.data["user"] - environment["AYON_REMOTE_PUBLISH"] = "1" - - for key, value in environment.items(): - job_info.EnvironmentKeyValue[key] = value - - def get_plugin_info(self): - # Not all hosts can import this module. - from maya import cmds - scene = self._instance.context.data["currentFile"] - - plugin_info = MayaPluginInfo() - plugin_info.SceneFile = scene - plugin_info.ScriptFilename = "{OPENPYPE_REPOS_ROOT}/openpype/scripts/remote_publish.py" # noqa - plugin_info.Version = cmds.about(version=True) - plugin_info.ProjectPath = cmds.workspace(query=True, - rootDirectory=True) - - return attr.asdict(plugin_info) diff --git a/client/ayon_core/pipeline/publish/lib.py b/client/ayon_core/pipeline/publish/lib.py index 90725e6d79..7d980b4bbe 100644 --- a/client/ayon_core/pipeline/publish/lib.py +++ b/client/ayon_core/pipeline/publish/lib.py @@ -485,26 +485,6 @@ def filter_pyblish_plugins(plugins): plugins.remove(plugin) -def remote_publish(log): - """Loops through all plugins, logs to console. Used for tests. - - Args: - log (Logger) - """ - - # Error exit as soon as any error occurs. - error_format = "Failed {plugin.__name__}: {error}\n{error.traceback}" - - for result in pyblish.util.publish_iter(): - if not result["error"]: - continue - - error_message = error_format.format(**result) - log.error(error_message) - # 'Fatal Error: ' is because of Deadline - raise RuntimeError("Fatal Error: {}".format(error_message)) - - def get_errored_instances_from_context(context, plugin=None): """Collect failed instances from pyblish context. diff --git a/client/ayon_core/lib/usdlib.py b/client/ayon_core/pipeline/usdlib.py similarity index 100% rename from client/ayon_core/lib/usdlib.py rename to client/ayon_core/pipeline/usdlib.py diff --git a/client/ayon_core/scripts/remote_publish.py b/client/ayon_core/scripts/remote_publish.py deleted file mode 100644 index 7e7bf2493b..0000000000 --- a/client/ayon_core/scripts/remote_publish.py +++ /dev/null @@ -1,12 +0,0 @@ -try: - from ayon_core.lib import Logger - from ayon_core.pipeline.publish.lib import remote_publish -except ImportError as exc: - # Ensure Deadline fails by output an error that contains "Fatal Error:" - raise ImportError("Fatal Error: %s" % exc) - - -if __name__ == "__main__": - # Perform remote publish with thorough error checking - log = Logger.get_logger(__name__) - remote_publish(log)