Merge branch 'develop' into bugfix/OP-6306_Nuke-create-first-work-file-broken

This commit is contained in:
Jakub Ježek 2023-08-03 13:58:53 +02:00 committed by GitHub
commit 9267426404
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
76 changed files with 970 additions and 388 deletions

View file

@ -35,6 +35,7 @@ body:
label: Version
description: What version are you running? Look to OpenPype Tray
options:
- 3.16.3-nightly.3
- 3.16.3-nightly.2
- 3.16.3-nightly.1
- 3.16.2
@ -134,7 +135,6 @@ body:
- 3.14.7-nightly.4
- 3.14.7-nightly.3
- 3.14.7-nightly.2
- 3.14.7-nightly.1
validations:
required: true
- type: dropdown

View file

@ -212,16 +212,12 @@ def _process_referenced_pipeline_result(result, link_type):
continue
for output in sorted(outputs_recursive, key=lambda o: o["depth"]):
output_links = output.get("data", {}).get("inputLinks")
if not output_links and output["type"] != "hero_version":
continue
# Leaf
if output["_id"] not in correctly_linked_ids:
continue
_filter_input_links(
output_links,
output.get("data", {}).get("inputLinks"),
link_type,
correctly_linked_ids
)

View file

@ -1,6 +1,6 @@
import os
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class AddLastWorkfileToLaunchArgs(PreLaunchHook):
@ -13,7 +13,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
# Execute after workfile template copy
order = 10
app_groups = [
app_groups = {
"3dsmax",
"maya",
"nuke",
@ -26,8 +26,9 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
"photoshop",
"tvpaint",
"substancepainter",
"aftereffects"
]
"aftereffects",
}
launch_types = {LaunchTypes.local}
def execute(self):
if not self.data.get("start_last_workfile"):

View file

@ -1,7 +1,7 @@
import os
import shutil
from openpype.lib import PreLaunchHook
from openpype.settings import get_project_settings
from openpype.lib.applications import PreLaunchHook, LaunchTypes
from openpype.pipeline.workfile import (
get_custom_workfile_template,
get_custom_workfile_template_by_string_context
@ -19,7 +19,8 @@ class CopyTemplateWorkfile(PreLaunchHook):
# Before `AddLastWorkfileToLaunchArgs`
order = 0
app_groups = ["blender", "photoshop", "tvpaint", "aftereffects"]
app_groups = {"blender", "photoshop", "tvpaint", "aftereffects"}
launch_types = {LaunchTypes.local}
def execute(self):
"""Check if can copy template for context and do it if possible.

View file

@ -1,5 +1,5 @@
import os
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
from openpype.pipeline.workfile import create_workdir_extra_folders
@ -14,6 +14,7 @@ class CreateWorkdirExtraFolders(PreLaunchHook):
# Execute after workfile template copy
order = 15
launch_types = {LaunchTypes.local}
def execute(self):
if not self.application.is_host:

View file

@ -1,5 +1,5 @@
import subprocess
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class LaunchFoundryAppsWindows(PreLaunchHook):
@ -13,8 +13,9 @@ class LaunchFoundryAppsWindows(PreLaunchHook):
# Should be as last hook because must change launch arguments to string
order = 1000
app_groups = ["nuke", "nukeassist", "nukex", "hiero", "nukestudio"]
platforms = ["windows"]
app_groups = {"nuke", "nukeassist", "nukex", "hiero", "nukestudio"}
platforms = {"windows"}
launch_types = {LaunchTypes.local}
def execute(self):
# Change `creationflags` to CREATE_NEW_CONSOLE

View file

@ -1,5 +1,5 @@
from openpype.client import get_project, get_asset_by_name
from openpype.lib import (
from openpype.lib.applications import (
PreLaunchHook,
EnvironmentPrepData,
prepare_app_environments,
@ -10,6 +10,7 @@ from openpype.pipeline import Anatomy
class GlobalHostDataHook(PreLaunchHook):
order = -100
launch_types = set()
def execute(self):
"""Prepare global objects to `data` that will be used for sure."""

View file

@ -1,5 +1,5 @@
import os
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class LaunchWithTerminal(PreLaunchHook):
@ -12,7 +12,8 @@ class LaunchWithTerminal(PreLaunchHook):
"""
order = 1000
platforms = ["darwin"]
platforms = {"darwin"}
launch_types = {LaunchTypes.local}
def execute(self):
executable = str(self.launch_context.executable)

View file

@ -1,10 +1,11 @@
import os
from openpype.lib import (
from openpype.lib import get_openpype_execute_args
from openpype.lib.applications import (
get_non_python_host_kwargs,
PreLaunchHook,
get_openpype_execute_args
LaunchTypes,
)
from openpype.lib.applications import get_non_python_host_kwargs
from openpype import PACKAGE_DIR as OPENPYPE_DIR
@ -16,9 +17,10 @@ class NonPythonHostHook(PreLaunchHook):
python script which launch the host. For these cases it is necessary to
prepend python (or openpype) executable and script path before application's.
"""
app_groups = ["harmony", "photoshop", "aftereffects"]
app_groups = {"harmony", "photoshop", "aftereffects"}
order = 20
launch_types = {LaunchTypes.local}
def execute(self):
# Pop executable
@ -54,4 +56,3 @@ class NonPythonHostHook(PreLaunchHook):
self.launch_context.kwargs = \
get_non_python_host_kwargs(self.launch_context.kwargs)

View file

@ -1,8 +1,6 @@
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook
from openpype.pipeline.colorspace import (
get_imageio_config
)
from openpype.pipeline.colorspace import get_imageio_config
from openpype.pipeline.template_data import get_template_data_with_names
@ -10,7 +8,7 @@ class OCIOEnvHook(PreLaunchHook):
"""Set OCIO environment variable for hosts that use OpenColorIO."""
order = 0
hosts = [
hosts = {
"substancepainter",
"fusion",
"blender",
@ -20,8 +18,9 @@ class OCIOEnvHook(PreLaunchHook):
"maya",
"nuke",
"hiero",
"resolve"
]
"resolve",
}
launch_types = set()
def execute(self):
"""Hook entry method."""

View file

@ -1,11 +1,5 @@
import os
import sys
import six
from openpype.lib import (
get_ffmpeg_tool_path,
run_subprocess,
)
from openpype.pipeline import publish
from openpype.hosts.aftereffects.api import get_stub

View file

@ -1,6 +1,6 @@
from pathlib import Path
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class AddPythonScriptToLaunchArgs(PreLaunchHook):
@ -8,9 +8,8 @@ class AddPythonScriptToLaunchArgs(PreLaunchHook):
# Append after file argument
order = 15
app_groups = [
"blender",
]
app_groups = {"blender"}
launch_types = {LaunchTypes.local}
def execute(self):
if not self.launch_context.data.get("python_scripts"):

View file

@ -2,7 +2,7 @@ import os
import re
import subprocess
from platform import system
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class InstallPySideToBlender(PreLaunchHook):
@ -16,7 +16,8 @@ class InstallPySideToBlender(PreLaunchHook):
blender's python packages.
"""
app_groups = ["blender"]
app_groups = {"blender"}
launch_types = {LaunchTypes.local}
def execute(self):
# Prelaunch hook is not crucial

View file

@ -1,5 +1,5 @@
import subprocess
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class BlenderConsoleWindows(PreLaunchHook):
@ -13,8 +13,9 @@ class BlenderConsoleWindows(PreLaunchHook):
# Should be as last hook because must change launch arguments to string
order = 1000
app_groups = ["blender"]
platforms = ["windows"]
app_groups = {"blender"}
platforms = {"windows"}
launch_types = {LaunchTypes.local}
def execute(self):
# Change `creationflags` to CREATE_NEW_CONSOLE

View file

@ -2,20 +2,18 @@ import os
import shutil
import winreg
import subprocess
from openpype.lib import PreLaunchHook, get_openpype_execute_args
from openpype.hosts.celaction import scripts
CELACTION_SCRIPTS_DIR = os.path.dirname(
os.path.abspath(scripts.__file__)
)
from openpype.lib import get_openpype_execute_args
from openpype.lib.applications import PreLaunchHook, LaunchTypes
from openpype.hosts.celaction import CELACTION_ROOT_DIR
class CelactionPrelaunchHook(PreLaunchHook):
"""
Bootstrap celacion with pype
"""
app_groups = ["celaction"]
platforms = ["windows"]
app_groups = {"celaction"}
platforms = {"windows"}
launch_types = {LaunchTypes.local}
def execute(self):
asset_doc = self.data["asset_doc"]
@ -37,7 +35,9 @@ class CelactionPrelaunchHook(PreLaunchHook):
winreg.KEY_ALL_ACCESS
)
path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py")
path_to_cli = os.path.join(
CELACTION_ROOT_DIR, "scripts", "publish_cli.py"
)
subprocess_args = get_openpype_execute_args("run", path_to_cli)
openpype_executable = subprocess_args.pop(0)
workfile_settings = self.get_workfile_settings()
@ -122,9 +122,8 @@ class CelactionPrelaunchHook(PreLaunchHook):
if not os.path.exists(workfile_path):
# TODO add ability to set different template workfile path via
# settings
openpype_celaction_dir = os.path.dirname(CELACTION_SCRIPTS_DIR)
template_path = os.path.join(
openpype_celaction_dir,
CELACTION_ROOT_DIR,
"resources",
"celaction_template_scene.scn"
)

View file

@ -6,13 +6,10 @@ import socket
from pprint import pformat
from openpype.lib import (
PreLaunchHook,
get_openpype_username,
run_subprocess,
)
from openpype.lib.applications import (
ApplicationLaunchFailed
)
from openpype.lib.applications import PreLaunchHook, LaunchTypes
from openpype.hosts import flame as opflame
@ -22,11 +19,12 @@ class FlamePrelaunch(PreLaunchHook):
Will make sure flame_script_dirs are copied to user's folder defined
in environment var FLAME_SCRIPT_DIR.
"""
app_groups = ["flame"]
app_groups = {"flame"}
permissions = 0o777
wtc_script_path = os.path.join(
opflame.HOST_DIR, "api", "scripts", "wiretap_com.py")
launch_types = {LaunchTypes.local}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

View file

@ -2,12 +2,16 @@ import os
import shutil
import platform
from pathlib import Path
from openpype.lib import PreLaunchHook, ApplicationLaunchFailed
from openpype.hosts.fusion import (
FUSION_HOST_DIR,
FUSION_VERSIONS_DICT,
get_fusion_version,
)
from openpype.lib.applications import (
PreLaunchHook,
LaunchTypes,
ApplicationLaunchFailed,
)
class FusionCopyPrefsPrelaunch(PreLaunchHook):
@ -21,8 +25,9 @@ class FusionCopyPrefsPrelaunch(PreLaunchHook):
Master.prefs is defined in openpype/hosts/fusion/deploy/fusion_shared.prefs
"""
app_groups = ["fusion"]
app_groups = {"fusion"}
order = 2
launch_types = {LaunchTypes.local}
def get_fusion_profile_name(self, profile_version) -> str:
# Returns 'Default', unless FUSION16_PROFILE is set

View file

@ -1,5 +1,9 @@
import os
from openpype.lib import PreLaunchHook, ApplicationLaunchFailed
from openpype.lib.applications import (
PreLaunchHook,
LaunchTypes,
ApplicationLaunchFailed,
)
from openpype.hosts.fusion import (
FUSION_HOST_DIR,
FUSION_VERSIONS_DICT,
@ -17,8 +21,9 @@ class FusionPrelaunch(PreLaunchHook):
Fusion 18 : Python 3.6 - 3.10
"""
app_groups = ["fusion"]
app_groups = {"fusion"}
order = 1
launch_types = {LaunchTypes.local}
def execute(self):
# making sure python 3 is installed at provided path

View file

@ -1,4 +1,4 @@
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class SetPath(PreLaunchHook):
@ -6,7 +6,8 @@ class SetPath(PreLaunchHook):
Hook `GlobalHostDataHook` must be executed before this hook.
"""
app_groups = ["houdini"]
app_groups = {"houdini"}
launch_types = {LaunchTypes.local}
def execute(self):
workdir = self.launch_context.env.get("AVALON_WORKDIR", "")

View file

@ -8,7 +8,7 @@ from openpype.lib import EnumDef
class CreateBGEO(plugin.HoudiniCreator):
"""BGEO pointcache creator."""
identifier = "io.openpype.creators.houdini.bgeo"
label = "BGEO PointCache"
label = "PointCache (Bgeo)"
family = "pointcache"
icon = "gears"

View file

@ -8,7 +8,7 @@ import hou
class CreatePointCache(plugin.HoudiniCreator):
"""Alembic ROP to pointcache"""
identifier = "io.openpype.creators.houdini.pointcache"
label = "Point Cache"
label = "PointCache (Abc)"
family = "pointcache"
icon = "gears"

View file

@ -1,26 +0,0 @@
# -*- coding: utf-8 -*-
"""Validator plugin for SOP Path in bgeo isntance."""
import pyblish.api
from openpype.pipeline import PublishValidationError
class ValidateNoSOPPath(pyblish.api.InstancePlugin):
"""Validate if SOP Path in BGEO instance exists."""
order = pyblish.api.ValidatorOrder
families = ["bgeo"]
label = "Validate BGEO SOP Path"
def process(self, instance):
import hou
node = hou.node(instance.data.get("instance_node"))
sop_path = node.evalParm("soppath")
if not sop_path:
raise PublishValidationError(
("Empty SOP Path ('soppath' parameter) found in "
f"the BGEO instance Geometry - {node.path()}"))
if not isinstance(hou.node(sop_path), hou.SopNode):
raise PublishValidationError(
"SOP path is not pointing to valid SOP node.")

View file

@ -1,7 +1,8 @@
# -*- coding: utf-8 -*-
"""Pre-launch to force 3ds max startup script."""
from openpype.lib import PreLaunchHook
import os
from openpype.hosts.max import MAX_HOST_DIR
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class ForceStartupScript(PreLaunchHook):
@ -13,12 +14,14 @@ class ForceStartupScript(PreLaunchHook):
Hook `GlobalHostDataHook` must be executed before this hook.
"""
app_groups = ["3dsmax"]
app_groups = {"3dsmax"}
order = 11
launch_types = {LaunchTypes.local}
def execute(self):
startup_args = [
"-U",
"MAXScript",
f"{os.getenv('OPENPYPE_ROOT')}\\openpype\\hosts\\max\\startup\\startup.ms"] # noqa
os.path.join(MAX_HOST_DIR, "startup", "startup.ms"),
]
self.launch_context.launch_args.append(startup_args)

View file

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""Pre-launch hook to inject python environment."""
from openpype.lib import PreLaunchHook
import os
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class InjectPythonPath(PreLaunchHook):
@ -13,7 +13,8 @@ class InjectPythonPath(PreLaunchHook):
Hook `GlobalHostDataHook` must be executed before this hook.
"""
app_groups = ["3dsmax"]
app_groups = {"3dsmax"}
launch_types = {LaunchTypes.local}
def execute(self):
self.launch_context.env["MAX_PYTHONPATH"] = os.environ["PYTHONPATH"]

View file

@ -1,4 +1,4 @@
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class SetPath(PreLaunchHook):
@ -6,7 +6,8 @@ class SetPath(PreLaunchHook):
Hook `GlobalHostDataHook` must be executed before this hook.
"""
app_groups = ["max"]
app_groups = {"max"}
launch_types = {LaunchTypes.local}
def execute(self):
workdir = self.launch_context.env.get("AVALON_WORKDIR", "")

View file

@ -1,4 +1,4 @@
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class MayaPreAutoLoadPlugins(PreLaunchHook):
@ -6,7 +6,8 @@ class MayaPreAutoLoadPlugins(PreLaunchHook):
# Before AddLastWorkfileToLaunchArgs
order = 9
app_groups = ["maya"]
app_groups = {"maya"}
launch_types = {LaunchTypes.local}
def execute(self):

View file

@ -1,4 +1,4 @@
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
from openpype.hosts.maya.lib import create_workspace_mel
@ -7,7 +7,8 @@ class PreCopyMel(PreLaunchHook):
Hook `GlobalHostDataHook` must be executed before this hook.
"""
app_groups = ["maya"]
app_groups = {"maya"}
launch_types = {LaunchTypes.local}
def execute(self):
project_doc = self.data["project_doc"]

View file

@ -1,4 +1,4 @@
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class MayaPreOpenWorkfilePostInitialization(PreLaunchHook):
@ -6,7 +6,8 @@ class MayaPreOpenWorkfilePostInitialization(PreLaunchHook):
# Before AddLastWorkfileToLaunchArgs.
order = 9
app_groups = ["maya"]
app_groups = {"maya"}
launch_types = {LaunchTypes.local}
def execute(self):

View file

@ -548,6 +548,8 @@ def list_instances(creator_id=None):
"""
instances_by_order = defaultdict(list)
subset_instances = []
instance_ids = set()
for node in nuke.allNodes(recurseGroups=True):
if node.Class() in ["Viewer", "Dot"]:
@ -573,6 +575,14 @@ def list_instances(creator_id=None):
if creator_id and instance_data["creator_identifier"] != creator_id:
continue
if instance_data["instance_id"] in instance_ids:
instance_data.pop("instance_id")
else:
instance_ids.add(instance_data["instance_id"])
# node name could change, so update subset name data
_update_subset_name_data(instance_data, node)
if "render_order" not in node.knobs():
subset_instances.append((node, instance_data))
continue
@ -581,23 +591,43 @@ def list_instances(creator_id=None):
instances_by_order[order].append((node, instance_data))
# Sort instances based on order attribute or subset name.
# TODO: remove in future Publisher enhanced with sorting
ordered_instances = []
for key in sorted(instances_by_order.keys()):
instances_by_subset = {}
for node, data in instances_by_order[key]:
instances_by_subset[data["subset"]] = (node, data)
instances_by_subset = defaultdict(list)
for node, data_ in instances_by_order[key]:
instances_by_subset[data_["subset"]].append((node, data_))
for subkey in sorted(instances_by_subset.keys()):
ordered_instances.append(instances_by_subset[subkey])
ordered_instances.extend(instances_by_subset[subkey])
instances_by_subset = {}
for node, data in subset_instances:
instances_by_subset[data["subset"]] = (node, data)
instances_by_subset = defaultdict(list)
for node, data_ in subset_instances:
instances_by_subset[data_["subset"]].append((node, data_))
for key in sorted(instances_by_subset.keys()):
ordered_instances.append(instances_by_subset[key])
ordered_instances.extend(instances_by_subset[key])
return ordered_instances
def _update_subset_name_data(instance_data, node):
"""Update subset name data in instance data.
Args:
instance_data (dict): instance creator data
node (nuke.Node): nuke node
"""
# make sure node name is subset name
old_subset_name = instance_data["subset"]
old_variant = instance_data["variant"]
subset_name_root = old_subset_name.replace(old_variant, "")
new_subset_name = node.name()
new_variant = new_subset_name.replace(subset_name_root, "")
instance_data["subset"] = new_subset_name
instance_data["variant"] = new_variant
def remove_instance(instance):
"""Remove instance from current workfile metadata.

View file

@ -212,9 +212,15 @@ class NukeCreator(NewCreator):
created_instance["creator_attributes"].pop(key)
def update_instances(self, update_list):
for created_inst, _changes in update_list:
for created_inst, changes in update_list:
instance_node = created_inst.transient_data["node"]
# update instance node name if subset name changed
if "subset" in changes.changed_keys:
instance_node["name"].setValue(
changes["subset"].new_value
)
# in case node is not existing anymore (user erased it manually)
try:
instance_node.fullName()
@ -256,6 +262,17 @@ class NukeWriteCreator(NukeCreator):
family = "write"
icon = "sign-out"
def get_linked_knobs(self):
linked_knobs = []
if "channels" in self.instance_attributes:
linked_knobs.append("channels")
if "ordered" in self.instance_attributes:
linked_knobs.append("render_order")
if "use_range_limit" in self.instance_attributes:
linked_knobs.extend(["___", "first", "last", "use_limit"])
return linked_knobs
def integrate_links(self, node, outputs=True):
# skip if no selection
if not self.selected_node:
@ -921,7 +938,11 @@ class ExporterReviewMov(ExporterReview):
except Exception:
self.log.info("`mov64_codec` knob was not found")
write_node["mov64_write_timecode"].setValue(1)
try:
write_node["mov64_write_timecode"].setValue(1)
except Exception:
self.log.info("`mov64_write_timecode` knob was not found")
write_node["raw"].setValue(1)
# connect
write_node.setInput(0, self.previous_node)

View file

@ -1,11 +1,12 @@
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook
class PrelaunchNukeAssistHook(PreLaunchHook):
"""
Adding flag when nukeassist
"""
app_groups = ["nukeassist"]
app_groups = {"nukeassist"}
launch_types = set()
def execute(self):
self.launch_context.env["NUKEASSIST"] = "1"

View file

@ -64,9 +64,6 @@ class CreateWriteImage(napi.NukeWriteCreator):
)
def create_instance_node(self, subset_name, instance_data):
linked_knobs_ = []
if "use_range_limit" in self.instance_attributes:
linked_knobs_ = ["channels", "___", "first", "last", "use_limit"]
# add fpath_template
write_data = {
@ -81,7 +78,7 @@ class CreateWriteImage(napi.NukeWriteCreator):
write_data,
input=self.selected_node,
prenodes=self.prenodes,
linked_knobs=linked_knobs_,
linked_knobs=self.get_linked_knobs(),
**{
"frame": nuke.frame()
}

View file

@ -45,12 +45,6 @@ class CreateWritePrerender(napi.NukeWriteCreator):
return attr_defs
def create_instance_node(self, subset_name, instance_data):
linked_knobs_ = []
if "use_range_limit" in self.instance_attributes:
linked_knobs_ = ["channels", "___", "first", "last", "use_limit"]
linked_knobs_.append("render_order")
# add fpath_template
write_data = {
"creator": self.__class__.__name__,
@ -73,7 +67,7 @@ class CreateWritePrerender(napi.NukeWriteCreator):
write_data,
input=self.selected_node,
prenodes=self.prenodes,
linked_knobs=linked_knobs_,
linked_knobs=self.get_linked_knobs(),
**{
"width": width,
"height": height

View file

@ -39,10 +39,6 @@ class CreateWriteRender(napi.NukeWriteCreator):
return attr_defs
def create_instance_node(self, subset_name, instance_data):
linked_knobs_ = [
"channels", "___", "first", "last", "use_limit", "render_order"
]
# add fpath_template
write_data = {
"creator": self.__class__.__name__,
@ -60,12 +56,15 @@ class CreateWriteRender(napi.NukeWriteCreator):
actual_format = nuke.root().knob('format').value()
width, height = (actual_format.width(), actual_format.height())
self.log.debug(">>>>>>> : {}".format(self.instance_attributes))
self.log.debug(">>>>>>> : {}".format(self.get_linked_knobs()))
created_node = napi.create_write_node(
subset_name,
write_data,
input=self.selected_node,
prenodes=self.prenodes,
linked_knobs=linked_knobs_,
linked_knobs=self.get_linked_knobs(),
**{
"width": width,
"height": height

View file

@ -91,14 +91,14 @@ class LoadClip(plugin.NukeLoader):
# reset container id so it is always unique for each instance
self.reset_container_id()
self.log.warning(self.extensions)
is_sequence = len(representation["files"]) > 1
if is_sequence:
representation = self._representation_with_hash_in_frame(
representation
context["representation"] = \
self._representation_with_hash_in_frame(
representation
)
filepath = self.filepath_from_context(context)
filepath = filepath.replace("\\", "/")
self.log.debug("_ filepath: {}".format(filepath))
@ -260,6 +260,7 @@ class LoadClip(plugin.NukeLoader):
representation = self._representation_with_hash_in_frame(
representation
)
filepath = get_representation_path(representation).replace("\\", "/")
self.log.debug("_ filepath: {}".format(filepath))

View file

@ -193,4 +193,10 @@ class CollectNukeWrites(pyblish.api.InstancePlugin,
if not instance.data.get("review"):
instance.data["useSequenceForReview"] = False
# TODO temporarily set stagingDir as persistent for backward
# compatibility. This is mainly focused on `renders`folders which
# were previously not cleaned up (and could be used in read notes)
# this logic should be removed and replaced with custom staging dir
instance.data["stagingDir_persistent"] = True
self.log.debug("instance.data: {}".format(pformat(instance.data)))

View file

@ -1,5 +1,5 @@
import os
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class PreLaunchResolveLastWorkfile(PreLaunchHook):
@ -9,7 +9,8 @@ class PreLaunchResolveLastWorkfile(PreLaunchHook):
workfile. This property is set explicitly in Launcher.
"""
order = 10
app_groups = ["resolve"]
app_groups = {"resolve"}
launch_types = {LaunchTypes.local}
def execute(self):
if not self.data.get("start_last_workfile"):

View file

@ -1,7 +1,7 @@
import os
from pathlib import Path
import platform
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
from openpype.hosts.resolve.utils import setup
@ -30,7 +30,8 @@ class PreLaunchResolveSetup(PreLaunchHook):
"""
app_groups = ["resolve"]
app_groups = {"resolve"}
launch_types = {LaunchTypes.local}
def execute(self):
current_platform = platform.system().lower()

View file

@ -1,6 +1,6 @@
import os
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook, LaunchTypes
import openpype.hosts.resolve
@ -9,7 +9,8 @@ class PreLaunchResolveStartup(PreLaunchHook):
"""
order = 11
app_groups = ["resolve"]
app_groups = {"resolve"}
launch_types = {LaunchTypes.local}
def execute(self):
# Set the openpype prelaunch startup script path for easy access

View file

@ -1,7 +1,5 @@
from openpype.lib import (
PreLaunchHook,
get_openpype_execute_args
)
from openpype.lib import get_openpype_execute_args
from openpype.lib.applications import PreLaunchHook, LaunchTypes
class TvpaintPrelaunchHook(PreLaunchHook):
@ -13,7 +11,8 @@ class TvpaintPrelaunchHook(PreLaunchHook):
Existence of last workfile is checked. If workfile does not exists tries
to copy templated workfile from predefined path.
"""
app_groups = ["tvpaint"]
app_groups = {"tvpaint"}
launch_types = {LaunchTypes.local}
def execute(self):
# Pop tvpaint executable

View file

@ -12,6 +12,11 @@ class UnrealAddon(OpenPypeModule, IHostAddon):
def initialize(self, module_settings):
self.enabled = True
def get_global_environments(self):
return {
"AYON_UNREAL_ROOT": UNREAL_ROOT_DIR,
}
def add_implementation_envs(self, env, app):
"""Modify environments to contain all required for implementation."""
# Set AYON_UNREAL_PLUGIN required for Unreal implementation

View file

@ -7,9 +7,10 @@ from pathlib import Path
from qtpy import QtCore
from openpype import resources
from openpype.lib import (
from openpype.lib.applications import (
PreLaunchHook,
ApplicationLaunchFailed,
LaunchTypes,
)
from openpype.pipeline.workfile import get_workfile_template_key
import openpype.hosts.unreal.lib as unreal_lib
@ -29,6 +30,8 @@ class UnrealPrelaunchHook(PreLaunchHook):
shell script.
"""
app_groups = {"unreal"}
launch_types = {LaunchTypes.local}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

@ -1 +1 @@
Subproject commit ff15c700771e719cc5f3d561ac5d6f7590623986
Subproject commit 63266607ceb972a61484f046634ddfc9eb0b5757

View file

@ -369,11 +369,11 @@ def get_compatible_integration(
def get_path_to_cmdlet_project(ue_version: str) -> Path:
cmd_project = Path(
os.path.abspath(os.getenv("OPENPYPE_ROOT")))
os.path.dirname(os.path.abspath(__file__)))
# For now, only tested on Windows (For Linux and Mac
# it has to be implemented)
cmd_project /= f"openpype/hosts/unreal/integration/UE_{ue_version}"
cmd_project /= f"integration/UE_{ue_version}"
# if the integration doesn't exist for current engine version
# try to find the closest to it.

View file

@ -40,17 +40,34 @@ def retrieve_exit_code(line: str):
return None
class UEProjectGenerationWorker(QtCore.QObject):
class UEWorker(QtCore.QObject):
finished = QtCore.Signal(str)
failed = QtCore.Signal(str)
failed = QtCore.Signal(str, int)
progress = QtCore.Signal(int)
log = QtCore.Signal(str)
engine_path: Path = None
env = None
def execute(self):
raise NotImplementedError("Please implement this method!")
def run(self):
try:
self.execute()
except Exception as e:
import traceback
self.log.emit(str(e))
self.log.emit(traceback.format_exc())
self.failed.emit(str(e), 1)
raise e
class UEProjectGenerationWorker(UEWorker):
stage_begin = QtCore.Signal(str)
ue_version: str = None
project_name: str = None
env = None
engine_path: Path = None
project_dir: Path = None
dev_mode = False
@ -87,7 +104,7 @@ class UEProjectGenerationWorker(QtCore.QObject):
self.project_name = unreal_project_name
self.engine_path = engine_path
def run(self):
def execute(self):
# engine_path should be the location of UE_X.X folder
ue_editor_exe = ue_lib.get_editor_exe_path(self.engine_path,
@ -298,15 +315,8 @@ class UEProjectGenerationWorker(QtCore.QObject):
self.finished.emit("Project successfully built!")
class UEPluginInstallWorker(QtCore.QObject):
finished = QtCore.Signal(str)
class UEPluginInstallWorker(UEWorker):
installing = QtCore.Signal(str)
failed = QtCore.Signal(str, int)
progress = QtCore.Signal(int)
log = QtCore.Signal(str)
engine_path: Path = None
env = None
def setup(self, engine_path: Path, env: dict = None, ):
self.engine_path = engine_path
@ -374,7 +384,7 @@ class UEPluginInstallWorker(QtCore.QObject):
dir_util.remove_tree(temp_dir.as_posix())
def run(self):
def execute(self):
src_plugin_dir = Path(self.env.get("AYON_UNREAL_PLUGIN", ""))
if not os.path.isdir(src_plugin_dir):

View file

@ -6,7 +6,7 @@ import pyblish.util
from openpype.lib import Logger
from openpype.lib.applications import (
ApplicationManager,
get_app_environments_for_context,
LaunchTypes,
)
from openpype.pipeline import install_host
from openpype.hosts.webpublisher.api import WebpublisherHost
@ -156,22 +156,31 @@ def cli_publish_from_app(
found_variant_key = find_variant_key(application_manager, host_name)
app_name = "{}/{}".format(host_name, found_variant_key)
data = {
"last_workfile_path": workfile_path,
"start_last_workfile": True,
"project_name": project_name,
"asset_name": asset_name,
"task_name": task_name,
"launch_type": LaunchTypes.automated,
}
launch_context = application_manager.create_launch_context(
app_name, **data)
launch_context.run_prelaunch_hooks()
# must have for proper launch of app
env = get_app_environments_for_context(
project_name,
asset_name,
task_name,
app_name
)
env = launch_context.env
print("env:: {}".format(env))
env["OPENPYPE_PUBLISH_DATA"] = batch_path
# must pass identifier to update log lines for a batch
env["BATCH_LOG_ID"] = str(_id)
env["HEADLESS_PUBLISH"] = 'true' # to use in app lib
env["USER_EMAIL"] = user_email
os.environ.update(env)
os.environ["OPENPYPE_PUBLISH_DATA"] = batch_path
# must pass identifier to update log lines for a batch
os.environ["BATCH_LOG_ID"] = str(_id)
os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
os.environ["USER_EMAIL"] = user_email
# Why is this here? Registered host in this process does not affect
# regitered host in launched process.
pyblish.api.register_host(host_name)
if targets:
if isinstance(targets, str):
@ -184,15 +193,7 @@ def cli_publish_from_app(
os.environ["PYBLISH_TARGETS"] = os.pathsep.join(
set(current_targets))
data = {
"last_workfile_path": workfile_path,
"start_last_workfile": True,
"project_name": project_name,
"asset_name": asset_name,
"task_name": task_name
}
launched_app = application_manager.launch(app_name, **data)
launched_app = application_manager.launch_with_context(launch_context)
timeout = get_timeout(project_name, host_name, task_type)

View file

@ -12,10 +12,6 @@ from abc import ABCMeta, abstractmethod
import six
from openpype import AYON_SERVER_ENABLED, PACKAGE_DIR
from openpype.client import (
get_project,
get_asset_by_name,
)
from openpype.settings import (
get_system_settings,
get_project_settings,
@ -47,6 +43,25 @@ CUSTOM_LAUNCH_APP_GROUPS = {
}
class LaunchTypes:
"""Launch types are filters for pre/post-launch hooks.
Please use these variables in case they'll change values.
"""
# Local launch - application is launched on local machine
local = "local"
# Farm render job - application is on farm
farm_render = "farm-render"
# Farm publish job - integration post-render job
farm_publish = "farm-publish"
# Remote launch - application is launched on remote machine from which
# can be started publishing
remote = "remote"
# Automated launch - application is launched with automated publishing
automated = "automated"
def parse_environments(env_data, env_group=None, platform_name=None):
"""Parse environment values from settings byt group and platform.
@ -483,6 +498,42 @@ class ApplicationManager:
break
return output
def create_launch_context(self, app_name, **data):
"""Prepare launch context for application.
Args:
app_name (str): Name of application that should be launched.
**data (Any): Any additional data. Data may be used during
Returns:
ApplicationLaunchContext: Launch context for application.
Raises:
ApplicationNotFound: Application was not found by entered name.
"""
app = self.applications.get(app_name)
if not app:
raise ApplicationNotFound(app_name)
executable = app.find_executable()
return ApplicationLaunchContext(
app, executable, **data
)
def launch_with_context(self, launch_context):
"""Launch application using existing launch context.
Args:
launch_context (ApplicationLaunchContext): Prepared launch
context.
"""
if not launch_context.executable:
raise ApplictionExecutableNotFound(launch_context.application)
return launch_context.launch()
def launch(self, app_name, **data):
"""Launch procedure.
@ -503,18 +554,10 @@ class ApplicationManager:
failed. Exception should contain explanation message,
traceback should not be needed.
"""
app = self.applications.get(app_name)
if not app:
raise ApplicationNotFound(app_name)
executable = app.find_executable()
if not executable:
raise ApplictionExecutableNotFound(app)
context = self.create_launch_context(app_name, **data)
return self.launch_with_context(context)
context = ApplicationLaunchContext(
app, executable, **data
)
return context.launch()
class EnvironmentToolGroup:
@ -736,13 +779,17 @@ class LaunchHook:
# Order of prelaunch hook, will be executed as last if set to None.
order = None
# List of host implementations, skipped if empty.
hosts = []
# List of application groups
app_groups = []
# List of specific application names
app_names = []
# List of platform availability, skipped if empty.
platforms = []
hosts = set()
# Set of application groups
app_groups = set()
# Set of specific application names
app_names = set()
# Set of platform availability
platforms = set()
# Set of launch types for which is available
# - if empty then is available for all launch types
# - by default has 'local' which is most common reason for launc hooks
launch_types = {LaunchTypes.local}
def __init__(self, launch_context):
"""Constructor of launch hook.
@ -790,6 +837,10 @@ class LaunchHook:
if launch_context.app_name not in cls.app_names:
return False
if cls.launch_types:
if launch_context.launch_type not in cls.launch_types:
return False
return True
@property
@ -859,9 +910,9 @@ class PostLaunchHook(LaunchHook):
class ApplicationLaunchContext:
"""Context of launching application.
Main purpose of context is to prepare launch arguments and keyword arguments
for new process. Most important part of keyword arguments preparations
are environment variables.
Main purpose of context is to prepare launch arguments and keyword
arguments for new process. Most important part of keyword arguments
preparations are environment variables.
During the whole process is possible to use `data` attribute to store
object usable in multiple places.
@ -874,14 +925,30 @@ class ApplicationLaunchContext:
insert argument between `nuke.exe` and `--NukeX`. To keep them together
it is better to wrap them in another list: `[["nuke.exe", "--NukeX"]]`.
Notes:
It is possible to use launch context only to prepare environment
variables. In that case `executable` may be None and can be used
'run_prelaunch_hooks' method to run prelaunch hooks which prepare
them.
Args:
application (Application): Application definition.
executable (ApplicationExecutable): Object with path to executable.
env_group (Optional[str]): Environment variable group. If not set
'DEFAULT_ENV_SUBGROUP' is used.
launch_type (Optional[str]): Launch type. If not set 'local' is used.
**data (dict): Any additional data. Data may be used during
preparation to store objects usable in multiple places.
"""
def __init__(self, application, executable, env_group=None, **data):
def __init__(
self,
application,
executable,
env_group=None,
launch_type=None,
**data
):
from openpype.modules import ModulesManager
# Application object
@ -896,6 +963,10 @@ class ApplicationLaunchContext:
self.executable = executable
if launch_type is None:
launch_type = LaunchTypes.local
self.launch_type = launch_type
if env_group is None:
env_group = DEFAULT_ENV_SUBGROUP
@ -903,8 +974,11 @@ class ApplicationLaunchContext:
self.data = dict(data)
launch_args = []
if executable is not None:
launch_args = executable.as_args()
# subprocess.Popen launch arguments (first argument in constructor)
self.launch_args = executable.as_args()
self.launch_args = launch_args
self.launch_args.extend(application.arguments)
if self.data.get("app_args"):
self.launch_args.extend(self.data.pop("app_args"))
@ -946,6 +1020,7 @@ class ApplicationLaunchContext:
self.postlaunch_hooks = None
self.process = None
self._prelaunch_hooks_executed = False
@property
def env(self):
@ -1215,6 +1290,27 @@ class ApplicationLaunchContext:
# Return process which is already terminated
return process
def run_prelaunch_hooks(self):
"""Run prelaunch hooks.
This method will be executed only once, any future calls will skip
the processing.
"""
if self._prelaunch_hooks_executed:
self.log.warning("Prelaunch hooks were already executed.")
return
# Discover launch hooks
self.discover_launch_hooks()
# Execute prelaunch hooks
for prelaunch_hook in self.prelaunch_hooks:
self.log.debug("Executing prelaunch hook: {}".format(
str(prelaunch_hook.__class__.__name__)
))
prelaunch_hook.execute()
self._prelaunch_hooks_executed = True
def launch(self):
"""Collect data for new process and then create it.
@ -1227,15 +1323,8 @@ class ApplicationLaunchContext:
self.log.warning("Application was already launched.")
return
# Discover launch hooks
self.discover_launch_hooks()
# Execute prelaunch hooks
for prelaunch_hook in self.prelaunch_hooks:
self.log.debug("Executing prelaunch hook: {}".format(
str(prelaunch_hook.__class__.__name__)
))
prelaunch_hook.execute()
if not self._prelaunch_hooks_executed:
self.run_prelaunch_hooks()
self.log.debug("All prelaunch hook executed. Starting new process.")
@ -1353,6 +1442,7 @@ def get_app_environments_for_context(
task_name,
app_name,
env_group=None,
launch_type=None,
env=None,
modules_manager=None
):
@ -1363,54 +1453,33 @@ def get_app_environments_for_context(
task_name (str): Name of task.
app_name (str): Name of application that is launched and can be found
by ApplicationManager.
env (dict): Initial environment variables. `os.environ` is used when
not passed.
modules_manager (ModulesManager): Initialized modules manager.
env_group (Optional[str]): Name of environment group. If not passed
default group is used.
launch_type (Optional[str]): Type for which prelaunch hooks are
executed.
env (Optional[dict[str, str]]): Initial environment variables.
`os.environ` is used when not passed.
modules_manager (Optional[ModulesManager]): Initialized modules
manager.
Returns:
dict: Environments for passed context and application.
"""
from openpype.modules import ModulesManager
from openpype.pipeline import Anatomy
from openpype.lib.openpype_version import is_running_staging
# Project document
project_doc = get_project(project_name)
asset_doc = get_asset_by_name(project_name, asset_name)
if modules_manager is None:
modules_manager = ModulesManager()
# Prepare app object which can be obtained only from ApplciationManager
# Prepare app object which can be obtained only from ApplicationManager
app_manager = ApplicationManager()
app = app_manager.applications[app_name]
# Project's anatomy
anatomy = Anatomy(project_name)
data = EnvironmentPrepData({
"project_name": project_name,
"asset_name": asset_name,
"task_name": task_name,
"app": app,
"project_doc": project_doc,
"asset_doc": asset_doc,
"anatomy": anatomy,
"env": env
})
data["env"].update(anatomy.root_environments())
if is_running_staging():
data["env"]["OPENPYPE_IS_STAGING"] = "1"
prepare_app_environments(data, env_group, modules_manager)
prepare_context_environments(data, env_group, modules_manager)
return data["env"]
context = app_manager.create_launch_context(
app_name,
project_name=project_name,
asset_name=asset_name,
task_name=task_name,
env_group=env_group,
launch_type=launch_type,
env=env,
modules_manager=modules_manager,
)
context.run_prelaunch_hooks()
return context.env
def _merge_env(env, current_env):

View file

@ -25,6 +25,7 @@ from openpype.pipeline.publish import (
from openpype.pipeline.publish.lib import (
replace_with_published_scene_path
)
from openpype import AYON_SERVER_ENABLED
JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError)
@ -397,6 +398,15 @@ class DeadlineJobInfo(object):
for key, value in data.items():
setattr(self, key, value)
def add_render_job_env_var(self):
"""Check if in OP or AYON mode and use appropriate env var."""
if AYON_SERVER_ENABLED:
self.EnvironmentKeyValue["AYON_RENDER_JOB"] = "1"
self.EnvironmentKeyValue["AYON_BUNDLE_NAME"] = (
os.environ["AYON_BUNDLE_NAME"])
else:
self.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1"
@six.add_metaclass(AbstractMetaInstancePlugin)
class AbstractSubmitDeadline(pyblish.api.InstancePlugin,

View file

@ -21,6 +21,8 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
def process(self, instance):
instance.data["deadlineUrl"] = self._collect_deadline_url(instance)
instance.data["deadlineUrl"] = \
instance.data["deadlineUrl"].strip().rstrip("/")
self.log.info(
"Using {} for submission.".format(instance.data["deadlineUrl"]))

View file

@ -48,3 +48,6 @@ class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin):
context.data["defaultDeadline"] = deadline_webservice
self.log.debug("Overriding from project settings with {}".format( # noqa: E501
deadline_webservice))
context.data["defaultDeadline"] = \
context.data["defaultDeadline"].strip().rstrip("/")

View file

@ -106,8 +106,8 @@ class AfterEffectsSubmitDeadline(
if value:
dln_job_info.EnvironmentKeyValue[key] = value
# to recognize job from PYPE for turning Event On/Off
dln_job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1"
# to recognize render jobs
dln_job_info.add_render_job_env_var()
return dln_job_info

View file

@ -299,8 +299,8 @@ class HarmonySubmitDeadline(
if value:
job_info.EnvironmentKeyValue[key] = value
# to recognize job from PYPE for turning Event On/Off
job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1"
# to recognize render jobs
job_info.add_render_job_env_var()
return job_info

View file

@ -105,8 +105,8 @@ class HoudiniSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
if value:
job_info.EnvironmentKeyValue[key] = value
# to recognize job from PYPE for turning Event On/Off
job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1"
# to recognize render jobs
job_info.add_render_job_env_var(job_info)
for i, filepath in enumerate(instance.data["files"]):
dirname = os.path.dirname(filepath)

View file

@ -131,8 +131,8 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
continue
job_info.EnvironmentKeyValue[key] = value
# to recognize job from PYPE for turning Event On/Off
job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1"
# to recognize render jobs
job_info.add_render_job_env_var(job_info)
job_info.EnvironmentKeyValue["OPENPYPE_LOG_NO_COLORS"] = "1"
# Add list of expected files to job

View file

@ -226,8 +226,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
continue
job_info.EnvironmentKeyValue[key] = value
# to recognize job from PYPE for turning Event On/Off
job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1"
# to recognize render jobs
job_info.add_render_job_env_var()
job_info.EnvironmentKeyValue["OPENPYPE_LOG_NO_COLORS"] = "1"
# Adding file dependencies.

View file

@ -4,6 +4,7 @@ from datetime import datetime
from maya import cmds
from openpype import AYON_SERVER_ENABLED
from openpype.pipeline import legacy_io, PublishXmlValidationError
from openpype.tests.lib import is_in_tests
from openpype.lib import is_running_from_build
@ -114,11 +115,14 @@ class MayaSubmitRemotePublishDeadline(
environment["AVALON_TASK"] = instance.context.data["task"]
environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME")
environment["OPENPYPE_LOG_NO_COLORS"] = "1"
environment["OPENPYPE_REMOTE_JOB"] = "1"
environment["OPENPYPE_USERNAME"] = instance.context.data["user"]
environment["OPENPYPE_PUBLISH_SUBSET"] = instance.data["subset"]
environment["OPENPYPE_REMOTE_PUBLISH"] = "1"
if AYON_SERVER_ENABLED:
environment["AYON_REMOTE_PUBLISH"] = "1"
else:
environment["OPENPYPE_REMOTE_PUBLISH"] = "1"
for key, value in environment.items():
job_info.EnvironmentKeyValue[key] = value

View file

@ -8,6 +8,8 @@ import requests
import pyblish.api
import nuke
from openpype import AYON_SERVER_ENABLED
from openpype.pipeline import legacy_io
from openpype.pipeline.publish import (
OpenPypePyblishPluginMixin
@ -337,8 +339,14 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
if _path.lower().startswith('openpype_'):
environment[_path] = os.environ[_path]
# to recognize job from PYPE for turning Event On/Off
environment["OPENPYPE_RENDER_JOB"] = "1"
# to recognize render jobs
if AYON_SERVER_ENABLED:
environment["AYON_BUNDLE_NAME"] = os.environ["AYON_BUNDLE_NAME"]
render_job_label = "AYON_RENDER_JOB"
else:
render_job_label = "OPENPYPE_RENDER_JOB"
environment[render_job_label] = "1"
# finally search replace in values of any key
if self.env_search_replace_values:

View file

@ -9,16 +9,13 @@ import clique
import pyblish.api
from openpype import AYON_SERVER_ENABLED
from openpype.client import (
get_last_version_by_subset_name,
)
from openpype.pipeline import (
legacy_io,
)
from openpype.pipeline import publish
from openpype.lib import EnumDef
from openpype.pipeline import publish, legacy_io
from openpype.lib import EnumDef, is_running_from_build
from openpype.tests.lib import is_in_tests
from openpype.lib import is_running_from_build
from openpype.pipeline.farm.pyblish_functions import (
create_skeleton_instance,
@ -94,7 +91,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
label = "Submit image sequence jobs to Deadline or Muster"
order = pyblish.api.IntegratorOrder + 0.2
icon = "tractor"
deadline_plugin = "OpenPype"
targets = ["local"]
hosts = ["fusion", "max", "maya", "nuke", "houdini",
@ -126,10 +123,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
"OPENPYPE_SG_USER"
]
# Add OpenPype version if we are running from build.
if is_running_from_build():
environ_keys.append("OPENPYPE_VERSION")
# custom deadline attributes
deadline_department = ""
deadline_pool = ""
@ -189,7 +182,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
instance.data.get("asset"),
instances[0]["subset"],
instance.context,
'render',
instances[0]["family"],
override_version
)
@ -203,13 +196,25 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
"AVALON_ASSET": instance.context.data["asset"],
"AVALON_TASK": instance.context.data["task"],
"OPENPYPE_USERNAME": instance.context.data["user"],
"OPENPYPE_PUBLISH_JOB": "1",
"OPENPYPE_RENDER_JOB": "0",
"OPENPYPE_REMOTE_JOB": "0",
"OPENPYPE_LOG_NO_COLORS": "1",
"IS_TEST": str(int(is_in_tests()))
}
if AYON_SERVER_ENABLED:
environment["AYON_PUBLISH_JOB"] = "1"
environment["AYON_RENDER_JOB"] = "0"
environment["AYON_REMOTE_PUBLISH"] = "0"
environment["AYON_BUNDLE_NAME"] = os.environ["AYON_BUNDLE_NAME"]
deadline_plugin = "Ayon"
else:
environment["OPENPYPE_PUBLISH_JOB"] = "1"
environment["OPENPYPE_RENDER_JOB"] = "0"
environment["OPENPYPE_REMOTE_PUBLISH"] = "0"
deadline_plugin = "Openpype"
# Add OpenPype version if we are running from build.
if is_running_from_build():
self.environ_keys.append("OPENPYPE_VERSION")
# add environments from self.environ_keys
for env_key in self.environ_keys:
if os.getenv(env_key):
@ -252,7 +257,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
)
payload = {
"JobInfo": {
"Plugin": self.deadline_plugin,
"Plugin": deadline_plugin,
"BatchName": job["Props"]["Batch"],
"Name": job_name,
"UserName": job["Props"]["User"],
@ -563,11 +568,22 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
else:
version = 1
host_name = context.data["hostName"]
task_info = template_data.get("task") or {}
template_name = publish.get_publish_template_name(
project_name,
host_name,
family,
task_info.get("name"),
task_info.get("type"),
)
template_data["subset"] = subset
template_data["family"] = family
template_data["version"] = version
render_templates = anatomy.templates_obj["render"]
render_templates = anatomy.templates_obj[template_name]
if "folder" in render_templates:
publish_folder = render_templates["folder"].format_strict(
template_data

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.5 KiB

View file

@ -0,0 +1,9 @@
[Arguments]
Type=string
Label=Arguments
Category=Python Options
CategoryOrder=0
Index=1
Description=The arguments to pass to the script. If no arguments are required, leave this blank.
Required=false
DisableIfBlank=true

View file

@ -0,0 +1,35 @@
[About]
Type=label
Label=About
Category=About Plugin
CategoryOrder=-1
Index=0
Default=Ayon Plugin for Deadline
Description=Not configurable
[AyonExecutable]
Type=multilinemultifilename
Label=Ayon Executable
Category=Ayon Executables
CategoryOrder=1
Index=0
Default=
Description=The path to the Ayon executable. Enter alternative paths on separate lines.
[AyonServerUrl]
Type=string
Label=Ayon Server Url
Category=Ayon Credentials
CategoryOrder=2
Index=0
Default=
Description=Url to Ayon server
[AyonApiKey]
Type=password
Label=Ayon API key
Category=Ayon Credentials
CategoryOrder=2
Index=0
Default=
Description=API key for service account on Ayon Server

View file

@ -0,0 +1,149 @@
#!/usr/bin/env python3
from System.IO import Path
from System.Text.RegularExpressions import Regex
from Deadline.Plugins import PluginType, DeadlinePlugin
from Deadline.Scripting import (
StringUtils,
FileUtils,
DirectoryUtils,
RepositoryUtils
)
import re
import os
import platform
######################################################################
# This is the function that Deadline calls to get an instance of the
# main DeadlinePlugin class.
######################################################################
def GetDeadlinePlugin():
return AyonDeadlinePlugin()
def CleanupDeadlinePlugin(deadlinePlugin):
deadlinePlugin.Cleanup()
class AyonDeadlinePlugin(DeadlinePlugin):
"""
Standalone plugin for publishing from Ayon
Calls Ayonexecutable 'ayon_console' from first correctly found
file based on plugin configuration. Uses 'publish' command and passes
path to metadata json file, which contains all needed information
for publish process.
"""
def __init__(self):
self.InitializeProcessCallback += self.InitializeProcess
self.RenderExecutableCallback += self.RenderExecutable
self.RenderArgumentCallback += self.RenderArgument
def Cleanup(self):
for stdoutHandler in self.StdoutHandlers:
del stdoutHandler.HandleCallback
del self.InitializeProcessCallback
del self.RenderExecutableCallback
del self.RenderArgumentCallback
def InitializeProcess(self):
self.PluginType = PluginType.Simple
self.StdoutHandling = True
self.SingleFramesOnly = self.GetBooleanPluginInfoEntryWithDefault(
"SingleFramesOnly", False)
self.LogInfo("Single Frames Only: %s" % self.SingleFramesOnly)
self.AddStdoutHandlerCallback(
".*Progress: (\d+)%.*").HandleCallback += self.HandleProgress
def RenderExecutable(self):
job = self.GetJob()
# set required env vars for Ayon
# cannot be in InitializeProcess as it is too soon
config = RepositoryUtils.GetPluginConfig("Ayon")
ayon_server_url = (
job.GetJobEnvironmentKeyValue("AYON_SERVER_URL") or
config.GetConfigEntryWithDefault("AyonServerUrl", "")
)
ayon_api_key = (
job.GetJobEnvironmentKeyValue("AYON_API_KEY") or
config.GetConfigEntryWithDefault("AyonApiKey", "")
)
ayon_bundle_name = job.GetJobEnvironmentKeyValue("AYON_BUNDLE_NAME")
environment = {
"AYON_SERVER_URL": ayon_server_url,
"AYON_API_KEY": ayon_api_key,
"AYON_BUNDLE_NAME": ayon_bundle_name,
}
for env, val in environment.items():
self.SetProcessEnvironmentVariable(env, val)
exe_list = self.GetConfigEntry("AyonExecutable")
# clean '\ ' for MacOS pasting
if platform.system().lower() == "darwin":
exe_list = exe_list.replace("\\ ", " ")
exe = FileUtils.SearchFileList(exe_list)
if exe == "":
self.FailRender(
"Ayon executable was not found " +
"in the semicolon separated list " +
"\"" + ";".join(exe_list) + "\". " +
"The path to the render executable can be configured " +
"from the Plugin Configuration in the Deadline Monitor.")
return exe
def RenderArgument(self):
arguments = str(self.GetPluginInfoEntryWithDefault("Arguments", ""))
arguments = RepositoryUtils.CheckPathMapping(arguments)
arguments = re.sub(r"<(?i)STARTFRAME>", str(self.GetStartFrame()),
arguments)
arguments = re.sub(r"<(?i)ENDFRAME>", str(self.GetEndFrame()),
arguments)
arguments = re.sub(r"<(?i)QUOTE>", "\"", arguments)
arguments = self.ReplacePaddedFrame(arguments,
"<(?i)STARTFRAME%([0-9]+)>",
self.GetStartFrame())
arguments = self.ReplacePaddedFrame(arguments,
"<(?i)ENDFRAME%([0-9]+)>",
self.GetEndFrame())
count = 0
for filename in self.GetAuxiliaryFilenames():
localAuxFile = Path.Combine(self.GetJobsDataDirectory(), filename)
arguments = re.sub(r"<(?i)AUXFILE" + str(count) + r">",
localAuxFile.replace("\\", "/"), arguments)
count += 1
return arguments
def ReplacePaddedFrame(self, arguments, pattern, frame):
frameRegex = Regex(pattern)
while True:
frameMatch = frameRegex.Match(arguments)
if not frameMatch.Success:
break
paddingSize = int(frameMatch.Groups[1].Value)
if paddingSize > 0:
padding = StringUtils.ToZeroPaddedString(
frame, paddingSize, False)
else:
padding = str(frame)
arguments = arguments.replace(
frameMatch.Groups[0].Value, padding)
return arguments
def HandleProgress(self):
progress = float(self.GetRegexMatch(1))
self.SetProgress(progress)

View file

@ -355,6 +355,13 @@ def inject_openpype_environment(deadlinePlugin):
" AVALON_TASK, AVALON_APP_NAME"
))
openpype_mongo = job.GetJobEnvironmentKeyValue("OPENPYPE_MONGO")
if openpype_mongo:
# inject env var for OP extractenvironments
# SetEnvironmentVariable is important, not SetProcessEnv...
deadlinePlugin.SetEnvironmentVariable("OPENPYPE_MONGO",
openpype_mongo)
if not os.environ.get("OPENPYPE_MONGO"):
print(">>> Missing OPENPYPE_MONGO env var, process won't work")
@ -398,6 +405,151 @@ def inject_openpype_environment(deadlinePlugin):
raise
def inject_ayon_environment(deadlinePlugin):
""" Pull env vars from Ayon and push them to rendering process.
Used for correct paths, configuration from OpenPype etc.
"""
job = deadlinePlugin.GetJob()
print(">>> Injecting Ayon environments ...")
try:
exe_list = get_ayon_executable()
exe = FileUtils.SearchFileList(exe_list)
if not exe:
raise RuntimeError((
"Ayon executable was not found in the semicolon "
"separated list \"{}\"."
"The path to the render executable can be configured"
" from the Plugin Configuration in the Deadline Monitor."
).format(";".join(exe_list)))
print("--- Ayon executable: {}".format(exe))
ayon_bundle_name = job.GetJobEnvironmentKeyValue("AYON_BUNDLE_NAME")
if not ayon_bundle_name:
raise RuntimeError("Missing env var in job properties "
"AYON_BUNDLE_NAME")
config = RepositoryUtils.GetPluginConfig("Ayon")
ayon_server_url = (
job.GetJobEnvironmentKeyValue("AYON_SERVER_URL") or
config.GetConfigEntryWithDefault("AyonServerUrl", "")
)
ayon_api_key = (
job.GetJobEnvironmentKeyValue("AYON_API_KEY") or
config.GetConfigEntryWithDefault("AyonApiKey", "")
)
if not all([ayon_server_url, ayon_api_key]):
raise RuntimeError((
"Missing required values for server url and api key. "
"Please fill in Ayon Deadline plugin or provide by "
"AYON_SERVER_URL and AYON_API_KEY"
))
# tempfile.TemporaryFile cannot be used because of locking
temp_file_name = "{}_{}.json".format(
datetime.utcnow().strftime('%Y%m%d%H%M%S%f'),
str(uuid.uuid1())
)
export_url = os.path.join(tempfile.gettempdir(), temp_file_name)
print(">>> Temporary path: {}".format(export_url))
args = [
"--headless",
"extractenvironments",
export_url
]
add_kwargs = {
"project": job.GetJobEnvironmentKeyValue("AVALON_PROJECT"),
"asset": job.GetJobEnvironmentKeyValue("AVALON_ASSET"),
"task": job.GetJobEnvironmentKeyValue("AVALON_TASK"),
"app": job.GetJobEnvironmentKeyValue("AVALON_APP_NAME"),
"envgroup": "farm",
}
if job.GetJobEnvironmentKeyValue('IS_TEST'):
args.append("--automatic-tests")
if all(add_kwargs.values()):
for key, value in add_kwargs.items():
args.extend(["--{}".format(key), value])
else:
raise RuntimeError((
"Missing required env vars: AVALON_PROJECT, AVALON_ASSET,"
" AVALON_TASK, AVALON_APP_NAME"
))
environment = {
"AYON_SERVER_URL": ayon_server_url,
"AYON_API_KEY": ayon_api_key,
"AYON_BUNDLE_NAME": ayon_bundle_name,
}
for env, val in environment.items():
deadlinePlugin.SetEnvironmentVariable(env, val)
args_str = subprocess.list2cmdline(args)
print(">>> Executing: {} {}".format(exe, args_str))
process_exitcode = deadlinePlugin.RunProcess(
exe, args_str, os.path.dirname(exe), -1
)
if process_exitcode != 0:
raise RuntimeError(
"Failed to run Ayon process to extract environments."
)
print(">>> Loading file ...")
with open(export_url) as fp:
contents = json.load(fp)
for key, value in contents.items():
deadlinePlugin.SetProcessEnvironmentVariable(key, value)
script_url = job.GetJobPluginInfoKeyValue("ScriptFilename")
if script_url:
script_url = script_url.format(**contents).replace("\\", "/")
print(">>> Setting script path {}".format(script_url))
job.SetJobPluginInfoKeyValue("ScriptFilename", script_url)
print(">>> Removing temporary file")
os.remove(export_url)
print(">> Injection end.")
except Exception as e:
if hasattr(e, "output"):
print(">>> Exception {}".format(e.output))
import traceback
print(traceback.format_exc())
print("!!! Injection failed.")
RepositoryUtils.FailJob(job)
raise
def get_ayon_executable():
"""Return OpenPype Executable from Event Plug-in Settings
Returns:
(list) of paths
Raises:
(RuntimeError) if no path configured at all
"""
config = RepositoryUtils.GetPluginConfig("Ayon")
exe_list = config.GetConfigEntryWithDefault("AyonExecutable", "")
if not exe_list:
raise RuntimeError("Path to Ayon executable not configured."
"Please set it in Ayon Deadline Plugin.")
# clean '\ ' for MacOS pasting
if platform.system().lower() == "darwin":
exe_list = exe_list.replace("\\ ", " ")
return exe_list
def inject_render_job_id(deadlinePlugin):
"""Inject dependency ids to publish process as env var for validation."""
print(">>> Injecting render job id ...")
@ -422,16 +574,29 @@ def __main__(deadlinePlugin):
openpype_publish_job = \
job.GetJobEnvironmentKeyValue('OPENPYPE_PUBLISH_JOB') or '0'
openpype_remote_job = \
job.GetJobEnvironmentKeyValue('OPENPYPE_REMOTE_JOB') or '0'
job.GetJobEnvironmentKeyValue('OPENPYPE_REMOTE_PUBLISH') or '0'
print("--- Job type - render {}".format(openpype_render_job))
print("--- Job type - publish {}".format(openpype_publish_job))
print("--- Job type - remote {}".format(openpype_remote_job))
if openpype_publish_job == '1' and openpype_render_job == '1':
raise RuntimeError("Misconfiguration. Job couldn't be both " +
"render and publish.")
if openpype_publish_job == '1':
inject_render_job_id(deadlinePlugin)
elif openpype_render_job == '1' or openpype_remote_job == '1':
if openpype_render_job == '1' or openpype_remote_job == '1':
inject_openpype_environment(deadlinePlugin)
ayon_render_job = \
job.GetJobEnvironmentKeyValue('AYON_RENDER_JOB') or '0'
ayon_publish_job = \
job.GetJobEnvironmentKeyValue('AYON_PUBLISH_JOB') or '0'
ayon_remote_job = \
job.GetJobEnvironmentKeyValue('AYON_REMOTE_PUBLISH') or '0'
if ayon_publish_job == '1' and ayon_render_job == '1':
raise RuntimeError("Misconfiguration. Job couldn't be both " +
"render and publish.")
if ayon_publish_job == '1':
inject_render_job_id(deadlinePlugin)
if ayon_render_job == '1' or ayon_remote_job == '1':
inject_ayon_environment(deadlinePlugin)

View file

@ -2,11 +2,12 @@ import os
import ftrack_api
from openpype.settings import get_project_settings
from openpype.lib import PostLaunchHook
from openpype.lib.applications import PostLaunchHook, LaunchTypes
class PostFtrackHook(PostLaunchHook):
order = None
launch_types = {LaunchTypes.local}
def execute(self):
project_name = self.data.get("project_name")

View file

@ -1,5 +1,5 @@
import os
from openpype.lib import PreLaunchHook
from openpype.lib.applications import PreLaunchHook
from openpype_modules.slack import SLACK_MODULE_DIR
@ -8,6 +8,7 @@ class PrePython2Support(PreLaunchHook):
Path to vendor modules is added to the beginning of PYTHONPATH.
"""
launch_types = set()
def execute(self):
if not self.application.use_python_2:

View file

@ -1,12 +1,8 @@
import os
import shutil
from openpype.client.entities import (
get_representations,
get_project
)
from openpype.lib import PreLaunchHook
from openpype.client.entities import get_representations
from openpype.lib.applications import PreLaunchHook, LaunchTypes
from openpype.lib.profiles_filtering import filter_profiles
from openpype.modules.sync_server.sync_server import (
download_last_published_workfile,
@ -32,6 +28,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook):
"nuke", "nukeassist", "nukex", "hiero", "nukestudio",
"maya", "harmony", "celaction", "flame", "fusion",
"houdini", "tvpaint"]
launch_types = {LaunchTypes.local}
def execute(self):
"""Check if local workfile doesn't exist, else copy it.

View file

@ -1,4 +1,4 @@
from openpype.lib import PostLaunchHook
from openpype.lib.applications import PostLaunchHook, LaunchTypes
class PostStartTimerHook(PostLaunchHook):
@ -7,6 +7,7 @@ class PostStartTimerHook(PostLaunchHook):
This module requires enabled TimerManager module.
"""
order = None
launch_types = {LaunchTypes.local}
def execute(self):
project_name = self.data.get("project_name")

View file

@ -268,6 +268,9 @@ def create_skeleton_instance(
instance_skeleton_data["representations"] = []
instance_skeleton_data["representations"] += representations
persistent = instance.data.get("stagingDir_persistent") is True
instance_skeleton_data["stagingDir_persistent"] = persistent
return instance_skeleton_data

View file

@ -981,7 +981,7 @@ def add_repre_files_for_cleanup(instance, repre):
"""
files = repre["files"]
staging_dir = repre.get("stagingDir")
if not staging_dir:
if not staging_dir or instance.data.get("stagingDir_persistent"):
return
if isinstance(files, str):

View file

@ -88,7 +88,10 @@ class PypeCommands:
"""
from openpype.lib import Logger
from openpype.lib.applications import get_app_environments_for_context
from openpype.lib.applications import (
get_app_environments_for_context,
LaunchTypes,
)
from openpype.modules import ModulesManager
from openpype.pipeline import (
install_openpype_plugins,
@ -122,7 +125,8 @@ class PypeCommands:
context["project_name"],
context["asset_name"],
context["task_name"],
app_full_name
app_full_name,
launch_type=LaunchTypes.farm_publish,
)
os.environ.update(env)
@ -237,11 +241,19 @@ class PypeCommands:
Called by Deadline plugin to propagate environment into render jobs.
"""
from openpype.lib.applications import get_app_environments_for_context
from openpype.lib.applications import (
get_app_environments_for_context,
LaunchTypes,
)
if all((project, asset, task, app)):
env = get_app_environments_for_context(
project, asset, task, app, env_group
project,
asset,
task,
app,
env_group=env_group,
launch_type=LaunchTypes.farm_render,
)
else:
env = os.environ.copy()

View file

@ -267,6 +267,7 @@ def _convert_modules_system(
):
func(ayon_settings, output, addon_versions, default_settings)
modules_settings = output["modules"]
for module_name in (
"sync_server",
"log_viewer",
@ -279,7 +280,16 @@ def _convert_modules_system(
settings = default_settings["modules"][module_name]
if "enabled" in settings:
settings["enabled"] = False
output["modules"][module_name] = settings
modules_settings[module_name] = settings
for key, value in ayon_settings.items():
if key not in output:
output[key] = value
# Make sure addons have access to settings in initialization
# - ModulesManager passes only modules settings into initialization
if key not in modules_settings:
modules_settings[key] = value
def convert_system_settings(ayon_settings, default_settings, addon_versions):
@ -293,15 +303,16 @@ def convert_system_settings(ayon_settings, default_settings, addon_versions):
if "core" in ayon_settings:
_convert_general(ayon_settings, output, default_settings)
for key, value in default_settings.items():
if key not in output:
output[key] = value
_convert_modules_system(
ayon_settings,
output,
addon_versions,
default_settings
)
for key, value in default_settings.items():
if key not in output:
output[key] = value
return output

View file

@ -13,6 +13,12 @@
},
{
"use_range_limit": "Use range limit"
},
{
"ordered": "Defined order"
},
{
"channels": "Channels override"
}
]
}

View file

@ -28,12 +28,14 @@ class OverviewWidget(QtWidgets.QFrame):
self._refreshing_instances = False
self._controller = controller
create_widget = CreateWidget(controller, self)
subset_content_widget = QtWidgets.QWidget(self)
create_widget = CreateWidget(controller, subset_content_widget)
# --- Created Subsets/Instances ---
# Common widget for creation and overview
subset_views_widget = BorderedLabelWidget(
"Subsets to publish", self
"Subsets to publish", subset_content_widget
)
subset_view_cards = InstanceCardView(controller, subset_views_widget)
@ -45,14 +47,14 @@ class OverviewWidget(QtWidgets.QFrame):
subset_views_layout.setCurrentWidget(subset_view_cards)
# Buttons at the bottom of subset view
create_btn = CreateInstanceBtn(self)
delete_btn = RemoveInstanceBtn(self)
change_view_btn = ChangeViewBtn(self)
create_btn = CreateInstanceBtn(subset_views_widget)
delete_btn = RemoveInstanceBtn(subset_views_widget)
change_view_btn = ChangeViewBtn(subset_views_widget)
# --- Overview ---
# Subset details widget
subset_attributes_wrap = BorderedLabelWidget(
"Publish options", self
"Publish options", subset_content_widget
)
subset_attributes_widget = SubsetAttributesWidget(
controller, subset_attributes_wrap
@ -81,7 +83,6 @@ class OverviewWidget(QtWidgets.QFrame):
subset_views_widget.set_center_widget(subset_view_widget)
# Whole subset layout with attributes and details
subset_content_widget = QtWidgets.QWidget(self)
subset_content_layout = QtWidgets.QHBoxLayout(subset_content_widget)
subset_content_layout.setContentsMargins(0, 0, 0, 0)
subset_content_layout.addWidget(create_widget, 7)
@ -161,44 +162,62 @@ class OverviewWidget(QtWidgets.QFrame):
self._change_anim = change_anim
# Start in create mode
self._create_widget_policy = create_widget.sizePolicy()
self._subset_views_widget_policy = subset_views_widget.sizePolicy()
self._subset_attributes_wrap_policy = (
subset_attributes_wrap.sizePolicy()
)
self._max_widget_width = None
self._current_state = "create"
subset_attributes_wrap.setVisible(False)
def make_sure_animation_is_finished(self):
if self._change_anim.state() == QtCore.QAbstractAnimation.Running:
self._change_anim.stop()
self._on_change_anim_finished()
def set_state(self, new_state, animate):
if new_state == self._current_state:
return
self._current_state = new_state
anim_is_running = (
self._change_anim.state() == QtCore.QAbstractAnimation.Running
)
if not animate:
self._change_visibility_for_state()
if anim_is_running:
self._change_anim.stop()
self.make_sure_animation_is_finished()
return
if self._max_widget_width is None:
self._max_widget_width = self._subset_views_widget.maximumWidth()
if new_state == "create":
direction = QtCore.QAbstractAnimation.Backward
else:
direction = QtCore.QAbstractAnimation.Forward
self._change_anim.setDirection(direction)
if not anim_is_running:
view_width = self._subset_views_widget.width()
self._subset_views_widget.setMinimumWidth(view_width)
self._subset_views_widget.setMaximumWidth(view_width)
if (
self._change_anim.state() != QtCore.QAbstractAnimation.Running
):
self._start_animation()
def _start_animation(self):
views_geo = self._subset_views_widget.geometry()
layout_spacing = self._subset_content_layout.spacing()
if self._create_widget.isVisible():
create_geo = self._create_widget.geometry()
subset_geo = QtCore.QRect(create_geo)
subset_geo.moveTop(views_geo.top())
subset_geo.moveLeft(views_geo.right() + layout_spacing)
self._subset_attributes_wrap.setVisible(True)
elif self._subset_attributes_wrap.isVisible():
subset_geo = self._subset_attributes_wrap.geometry()
create_geo = QtCore.QRect(subset_geo)
create_geo.moveTop(views_geo.top())
create_geo.moveRight(views_geo.left() - (layout_spacing + 1))
self._create_widget.setVisible(True)
else:
self._change_anim.start()
return
while self._subset_content_layout.count():
self._subset_content_layout.takeAt(0)
self._subset_views_widget.setGeometry(views_geo)
self._create_widget.setGeometry(create_geo)
self._subset_attributes_wrap.setGeometry(subset_geo)
self._change_anim.start()
def get_subset_views_geo(self):
parent = self._subset_views_widget.parent()
@ -281,41 +300,39 @@ class OverviewWidget(QtWidgets.QFrame):
def _on_change_anim(self, value):
self._create_widget.setVisible(True)
self._subset_attributes_wrap.setVisible(True)
width = (
self._subset_content_widget.width()
- (
self._subset_views_widget.width()
+ (self._subset_content_layout.spacing() * 2)
)
)
subset_attrs_width = int((float(width) / self.anim_end_value) * value)
if subset_attrs_width > width:
subset_attrs_width = width
layout_spacing = self._subset_content_layout.spacing()
content_width = (
self._subset_content_widget.width() - (layout_spacing * 2)
)
content_height = self._subset_content_widget.height()
views_width = max(
int(content_width * 0.3),
self._subset_views_widget.minimumWidth()
)
width = content_width - views_width
# Visible widths of other widgets
subset_attrs_width = int((float(width) / self.anim_end_value) * value)
create_width = width - subset_attrs_width
self._create_widget.setMinimumWidth(create_width)
self._create_widget.setMaximumWidth(create_width)
self._subset_attributes_wrap.setMinimumWidth(subset_attrs_width)
self._subset_attributes_wrap.setMaximumWidth(subset_attrs_width)
views_geo = QtCore.QRect(
create_width + layout_spacing, 0,
views_width, content_height
)
create_geo = QtCore.QRect(0, 0, width, content_height)
subset_attrs_geo = QtCore.QRect(create_geo)
create_geo.moveRight(views_geo.left() - (layout_spacing + 1))
subset_attrs_geo.moveLeft(views_geo.right() + layout_spacing)
self._subset_views_widget.setGeometry(views_geo)
self._create_widget.setGeometry(create_geo)
self._subset_attributes_wrap.setGeometry(subset_attrs_geo)
def _on_change_anim_finished(self):
self._change_visibility_for_state()
self._create_widget.setMinimumWidth(0)
self._create_widget.setMaximumWidth(self._max_widget_width)
self._subset_attributes_wrap.setMinimumWidth(0)
self._subset_attributes_wrap.setMaximumWidth(self._max_widget_width)
self._subset_views_widget.setMinimumWidth(0)
self._subset_views_widget.setMaximumWidth(self._max_widget_width)
self._create_widget.setSizePolicy(
self._create_widget_policy
)
self._subset_attributes_wrap.setSizePolicy(
self._subset_attributes_wrap_policy
)
self._subset_views_widget.setSizePolicy(
self._subset_views_widget_policy
)
self._subset_content_layout.addWidget(self._create_widget, 7)
self._subset_content_layout.addWidget(self._subset_views_widget, 3)
self._subset_content_layout.addWidget(self._subset_attributes_wrap, 7)
def _change_visibility_for_state(self):
self._create_widget.setVisible(

View file

@ -634,16 +634,7 @@ class PublisherWindow(QtWidgets.QDialog):
if old_tab == "details":
self._publish_details_widget.close_details_popup()
if new_tab in ("create", "publish"):
animate = True
if old_tab not in ("create", "publish"):
animate = False
self._content_stacked_layout.setCurrentWidget(
self._overview_widget
)
self._overview_widget.set_state(new_tab, animate)
elif new_tab == "details":
if new_tab == "details":
self._content_stacked_layout.setCurrentWidget(
self._publish_details_widget
)
@ -654,6 +645,21 @@ class PublisherWindow(QtWidgets.QDialog):
self._report_widget
)
old_on_overview = old_tab in ("create", "publish")
if new_tab in ("create", "publish"):
self._content_stacked_layout.setCurrentWidget(
self._overview_widget
)
# Overview state is animated only when switching between
# 'create' and 'publish' tab
self._overview_widget.set_state(new_tab, old_on_overview)
elif old_on_overview:
# Make sure animation finished if previous tab was 'create'
# or 'publish'. That is just for safety to avoid stuck animation
# when user clicks too fast.
self._overview_widget.make_sure_animation_is_finished()
is_create = new_tab == "create"
if is_create:
self._install_app_event_listener()

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring Pype version."""
__version__ = "3.16.3-nightly.2"
__version__ = "3.16.3-nightly.3"