mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' of github.com:pypeclub/OpenPype into chore/testing_fixes
This commit is contained in:
commit
ef20efc37a
485 changed files with 45256 additions and 3963 deletions
8
.gitmodules
vendored
8
.gitmodules
vendored
|
|
@ -3,10 +3,4 @@
|
|||
url = https://github.com/pypeclub/avalon-core.git
|
||||
[submodule "repos/avalon-unreal-integration"]
|
||||
path = repos/avalon-unreal-integration
|
||||
url = https://github.com/pypeclub/avalon-unreal-integration.git
|
||||
[submodule "openpype/modules/default_modules/ftrack/python2_vendor/arrow"]
|
||||
path = openpype/modules/default_modules/ftrack/python2_vendor/arrow
|
||||
url = https://github.com/arrow-py/arrow.git
|
||||
[submodule "openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api"]
|
||||
path = openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api
|
||||
url = https://bitbucket.org/ftrack/ftrack-python-api.git
|
||||
url = https://github.com/pypeclub/avalon-unreal-integration.git
|
||||
|
|
@ -42,6 +42,12 @@ def standalonepublisher():
|
|||
PypeCommands().launch_standalone_publisher()
|
||||
|
||||
|
||||
@main.command()
|
||||
def traypublisher():
|
||||
"""Show new OpenPype Standalone publisher UI."""
|
||||
PypeCommands().launch_traypublisher()
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("-d", "--debug",
|
||||
is_flag=True, help=("Run pype tray in debug mode"))
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
|||
"nuke",
|
||||
"nukex",
|
||||
"hiero",
|
||||
"houdini",
|
||||
"nukestudio",
|
||||
"blender",
|
||||
"photoshop",
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from openpype.api import Anatomy
|
|||
from openpype.lib import (
|
||||
PreLaunchHook,
|
||||
EnvironmentPrepData,
|
||||
prepare_host_environments,
|
||||
prepare_app_environments,
|
||||
prepare_context_environments
|
||||
)
|
||||
|
||||
|
|
@ -14,14 +14,6 @@ class GlobalHostDataHook(PreLaunchHook):
|
|||
|
||||
def execute(self):
|
||||
"""Prepare global objects to `data` that will be used for sure."""
|
||||
if not self.application.is_host:
|
||||
self.log.info(
|
||||
"Skipped hook {}. Application is not marked as host.".format(
|
||||
self.__class__.__name__
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
self.prepare_global_data()
|
||||
|
||||
if not self.data.get("asset_doc"):
|
||||
|
|
@ -49,7 +41,7 @@ class GlobalHostDataHook(PreLaunchHook):
|
|||
"log": self.log
|
||||
})
|
||||
|
||||
prepare_host_environments(temp_data, self.launch_context.env_group)
|
||||
prepare_app_environments(temp_data, self.launch_context.env_group)
|
||||
prepare_context_environments(temp_data)
|
||||
|
||||
temp_data.pop("log")
|
||||
|
|
|
|||
|
|
@ -527,6 +527,7 @@ def get_segment_attributes(segment):
|
|||
|
||||
# Add timeline segment to tree
|
||||
clip_data = {
|
||||
"shot_name": segment.shot_name.get_value(),
|
||||
"segment_name": segment.name.get_value(),
|
||||
"segment_comment": segment.comment.get_value(),
|
||||
"tape_name": segment.tape_name,
|
||||
|
|
|
|||
|
|
@ -361,6 +361,7 @@ class PublishableClip:
|
|||
vertical_sync_default = False
|
||||
driving_layer_default = ""
|
||||
index_from_segment_default = False
|
||||
use_shot_name_default = False
|
||||
|
||||
def __init__(self, segment, **kwargs):
|
||||
self.rename_index = kwargs["rename_index"]
|
||||
|
|
@ -376,6 +377,7 @@ class PublishableClip:
|
|||
# segment (clip) main attributes
|
||||
self.cs_name = self.clip_data["segment_name"]
|
||||
self.cs_index = int(self.clip_data["segment"])
|
||||
self.shot_name = self.clip_data["shot_name"]
|
||||
|
||||
# get track name and index
|
||||
self.track_index = int(self.clip_data["track"])
|
||||
|
|
@ -419,18 +421,21 @@ class PublishableClip:
|
|||
# deal with clip name
|
||||
new_name = self.marker_data.pop("newClipName")
|
||||
|
||||
if self.rename:
|
||||
if self.rename and not self.use_shot_name:
|
||||
# rename segment
|
||||
self.current_segment.name = str(new_name)
|
||||
self.marker_data["asset"] = str(new_name)
|
||||
elif self.use_shot_name:
|
||||
self.marker_data["asset"] = self.shot_name
|
||||
self.marker_data["hierarchyData"]["shot"] = self.shot_name
|
||||
else:
|
||||
self.marker_data["asset"] = self.cs_name
|
||||
self.marker_data["hierarchyData"]["shot"] = self.cs_name
|
||||
|
||||
if self.marker_data["heroTrack"] and self.review_layer:
|
||||
self.marker_data.update({"reviewTrack": self.review_layer})
|
||||
self.marker_data["reviewTrack"] = self.review_layer
|
||||
else:
|
||||
self.marker_data.update({"reviewTrack": None})
|
||||
self.marker_data["reviewTrack"] = None
|
||||
|
||||
# create pype tag on track_item and add data
|
||||
fpipeline.imprint(self.current_segment, self.marker_data)
|
||||
|
|
@ -463,6 +468,8 @@ class PublishableClip:
|
|||
# ui_inputs data or default values if gui was not used
|
||||
self.rename = self.ui_inputs.get(
|
||||
"clipRename", {}).get("value") or self.rename_default
|
||||
self.use_shot_name = self.ui_inputs.get(
|
||||
"useShotName", {}).get("value") or self.use_shot_name_default
|
||||
self.clip_name = self.ui_inputs.get(
|
||||
"clipName", {}).get("value") or self.clip_name_default
|
||||
self.hierarchy = self.ui_inputs.get(
|
||||
|
|
|
|||
|
|
@ -87,41 +87,48 @@ class CreateShotClip(opfapi.Creator):
|
|||
"target": "tag",
|
||||
"toolTip": "Parents folder for shot root folder, Template filled with `Hierarchy Data` section", # noqa
|
||||
"order": 0},
|
||||
"useShotName": {
|
||||
"value": True,
|
||||
"type": "QCheckBox",
|
||||
"label": "Use Shot Name",
|
||||
"target": "ui",
|
||||
"toolTip": "Use name form Shot name clip attribute", # noqa
|
||||
"order": 1},
|
||||
"clipRename": {
|
||||
"value": False,
|
||||
"type": "QCheckBox",
|
||||
"label": "Rename clips",
|
||||
"target": "ui",
|
||||
"toolTip": "Renaming selected clips on fly", # noqa
|
||||
"order": 1},
|
||||
"order": 2},
|
||||
"clipName": {
|
||||
"value": "{sequence}{shot}",
|
||||
"type": "QLineEdit",
|
||||
"label": "Clip Name Template",
|
||||
"target": "ui",
|
||||
"toolTip": "template for creating shot namespaused for renaming (use rename: on)", # noqa
|
||||
"order": 2},
|
||||
"order": 3},
|
||||
"segmentIndex": {
|
||||
"value": True,
|
||||
"type": "QCheckBox",
|
||||
"label": "Segment index",
|
||||
"target": "ui",
|
||||
"toolTip": "Take number from segment index", # noqa
|
||||
"order": 3},
|
||||
"order": 4},
|
||||
"countFrom": {
|
||||
"value": 10,
|
||||
"type": "QSpinBox",
|
||||
"label": "Count sequence from",
|
||||
"target": "ui",
|
||||
"toolTip": "Set when the sequence number stafrom", # noqa
|
||||
"order": 4},
|
||||
"order": 5},
|
||||
"countSteps": {
|
||||
"value": 10,
|
||||
"type": "QSpinBox",
|
||||
"label": "Stepping number",
|
||||
"target": "ui",
|
||||
"toolTip": "What number is adding every new step", # noqa
|
||||
"order": 5},
|
||||
"order": 6},
|
||||
}
|
||||
},
|
||||
"hierarchyData": {
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@
|
|||
<fileType>Jpeg</fileType>
|
||||
<codec>923688</codec>
|
||||
<codecProfile></codecProfile>
|
||||
<namePattern><segment name></namePattern>
|
||||
<namePattern><shot name></namePattern>
|
||||
<compressionQuality>100</compressionQuality>
|
||||
<transferCharacteristic>2</transferCharacteristic>
|
||||
<colorimetricSpecification>4</colorimetricSpecification>
|
||||
|
|
@ -27,7 +27,7 @@
|
|||
</sequence>
|
||||
<movie>
|
||||
<fileType>QuickTime</fileType>
|
||||
<namePattern><segment name></namePattern>
|
||||
<namePattern><shot name></namePattern>
|
||||
<yuvHeadroom>0</yuvHeadroom>
|
||||
<yuvColourSpace>PCS_709</yuvColourSpace>
|
||||
<operationalPattern>None</operationalPattern>
|
||||
|
|
@ -43,7 +43,7 @@
|
|||
<targetVersion>2021</targetVersion>
|
||||
<pathSuffix>/profiles/.33622016/HDTV_720p_8Mbits.cdxprof</pathSuffix>
|
||||
</codecProfile>
|
||||
<namePattern><segment name>_<video codec></namePattern>
|
||||
<namePattern><shot name>_<video codec></namePattern>
|
||||
<compressionQuality>50</compressionQuality>
|
||||
<transferCharacteristic>2</transferCharacteristic>
|
||||
<colorimetricSpecification>4</colorimetricSpecification>
|
||||
|
|
@ -8,7 +8,7 @@ PLUGIN_DIR = os.path.dirname(os.path.dirname(__file__))
|
|||
EXPORT_PRESETS_DIR = os.path.join(PLUGIN_DIR, "export_preset")
|
||||
|
||||
CONFIG_DIR = os.path.join(os.path.expanduser(
|
||||
"~/.openpype"), "openpype_flame_to_ftrack")
|
||||
"~/.openpype"), "openpype_babypublisher")
|
||||
|
||||
|
||||
@contextmanager
|
||||
|
|
@ -360,6 +360,8 @@ class FtrackComponentCreator:
|
|||
|
||||
|
||||
class FtrackEntityOperator:
|
||||
existing_tasks = []
|
||||
|
||||
def __init__(self, session, project_entity):
|
||||
self.session = session
|
||||
self.project_entity = project_entity
|
||||
|
|
@ -392,10 +394,7 @@ class FtrackEntityOperator:
|
|||
query = '{} where name is "{}" and project_id is "{}"'.format(
|
||||
type, name, self.project_entity["id"])
|
||||
|
||||
try:
|
||||
entity = session.query(query).one()
|
||||
except Exception:
|
||||
entity = None
|
||||
entity = session.query(query).first()
|
||||
|
||||
# if entity doesnt exist then create one
|
||||
if not entity:
|
||||
|
|
@ -430,10 +429,21 @@ class FtrackEntityOperator:
|
|||
return parents
|
||||
|
||||
def create_task(self, task_type, task_types, parent):
|
||||
existing_task = [
|
||||
_exising_tasks = [
|
||||
child for child in parent['children']
|
||||
if child.entity_type.lower() == 'task'
|
||||
if child['name'].lower() in task_type.lower()
|
||||
]
|
||||
|
||||
# add task into existing tasks if they are not already there
|
||||
for _t in _exising_tasks:
|
||||
if _t in self.existing_tasks:
|
||||
continue
|
||||
self.existing_tasks.append(_t)
|
||||
|
||||
existing_task = [
|
||||
task for task in self.existing_tasks
|
||||
if task['name'].lower() in task_type.lower()
|
||||
if task['parent'] == parent
|
||||
]
|
||||
|
||||
if existing_task:
|
||||
|
|
@ -445,4 +455,5 @@ class FtrackEntityOperator:
|
|||
})
|
||||
task["type"] = task_types[task_type]
|
||||
|
||||
self.existing_tasks.append(task)
|
||||
return task
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
from PySide2 import QtWidgets, QtCore
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
import uiwidgets
|
||||
import app_utils
|
||||
|
|
@ -33,11 +33,12 @@ class MainWindow(QtWidgets.QWidget):
|
|||
self.panel_class.clear_temp_data()
|
||||
self.panel_class.close()
|
||||
clear_inner_modules()
|
||||
ftrack_lib.FtrackEntityOperator.existing_tasks = []
|
||||
# now the panel can be closed
|
||||
event.accept()
|
||||
|
||||
|
||||
class FlameToFtrackPanel(object):
|
||||
class FlameBabyPublisherPanel(object):
|
||||
session = None
|
||||
temp_data_dir = None
|
||||
processed_components = []
|
||||
|
|
@ -78,7 +79,7 @@ class FlameToFtrackPanel(object):
|
|||
|
||||
# creating ui
|
||||
self.window.setMinimumSize(1500, 600)
|
||||
self.window.setWindowTitle('Sequence Shots to Ftrack')
|
||||
self.window.setWindowTitle('OpenPype: Baby-publisher')
|
||||
self.window.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
|
||||
self.window.setAttribute(QtCore.Qt.WA_DeleteOnClose)
|
||||
self.window.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
|
|
@ -469,10 +470,14 @@ class FlameToFtrackPanel(object):
|
|||
for sequence in self.selection:
|
||||
frame_rate = float(str(sequence.frame_rate)[:-4])
|
||||
for ver in sequence.versions:
|
||||
for tracks in ver.tracks:
|
||||
for segment in tracks.segments:
|
||||
for track in ver.tracks:
|
||||
if len(track.segments) == 0 and track.hidden:
|
||||
continue
|
||||
for segment in track.segments:
|
||||
print(segment.attributes)
|
||||
if str(segment.name)[1:-1] == "":
|
||||
if segment.name.get_value() == "":
|
||||
continue
|
||||
if segment.hidden.get_value() is True:
|
||||
continue
|
||||
# get clip frame duration
|
||||
record_duration = str(segment.record_duration)[1:-1]
|
||||
|
|
@ -492,11 +497,11 @@ class FlameToFtrackPanel(object):
|
|||
|
||||
# Add timeline segment to tree
|
||||
QtWidgets.QTreeWidgetItem(self.tree, [
|
||||
str(sequence.name)[1:-1], # seq
|
||||
str(segment.name)[1:-1], # shot
|
||||
sequence.name.get_value(), # seq name
|
||||
segment.shot_name.get_value(), # shot name
|
||||
str(clip_duration), # clip duration
|
||||
shot_description, # shot description
|
||||
str(segment.comment)[1:-1] # task description
|
||||
segment.comment.get_value() # task description
|
||||
]).setFlags(
|
||||
QtCore.Qt.ItemIsEditable
|
||||
| QtCore.Qt.ItemIsEnabled
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
from PySide2 import QtWidgets, QtCore
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
|
||||
class FlameLabel(QtWidgets.QLabel):
|
||||
|
|
@ -16,10 +16,11 @@ def flame_panel_executor(selection):
|
|||
if "panel_app" in sys.modules.keys():
|
||||
print("panel_app module is already loaded")
|
||||
del sys.modules["panel_app"]
|
||||
import panel_app
|
||||
reload(panel_app) # noqa
|
||||
print("panel_app module removed from sys.modules")
|
||||
|
||||
import panel_app
|
||||
panel_app.FlameToFtrackPanel(selection)
|
||||
panel_app.FlameBabyPublisherPanel(selection)
|
||||
|
||||
|
||||
def scope_sequence(selection):
|
||||
|
|
@ -30,7 +31,7 @@ def scope_sequence(selection):
|
|||
def get_media_panel_custom_ui_actions():
|
||||
return [
|
||||
{
|
||||
"name": "OpenPype: Ftrack",
|
||||
"name": "OpenPype: Baby-publisher",
|
||||
"actions": [
|
||||
{
|
||||
"name": "Create Shots",
|
||||
|
|
@ -1,77 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateOutputNode(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance SOP Output Node.
|
||||
|
||||
This will ensure:
|
||||
- The SOP Path is set.
|
||||
- The SOP Path refers to an existing object.
|
||||
- The SOP Path node is a SOP node.
|
||||
- The SOP Path node has at least one input connection (has an input)
|
||||
- The SOP Path has geometry data.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["pointcache", "vdbcache"]
|
||||
hosts = ["houdini"]
|
||||
label = "Validate Output Node"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Output node(s) `%s` are incorrect. "
|
||||
"See plug-in log for details." % invalid
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
|
||||
import hou
|
||||
|
||||
output_node = instance.data["output_node"]
|
||||
|
||||
if output_node is None:
|
||||
node = instance[0]
|
||||
cls.log.error(
|
||||
"SOP Output node in '%s' does not exist. "
|
||||
"Ensure a valid SOP output path is set." % node.path()
|
||||
)
|
||||
|
||||
return [node.path()]
|
||||
|
||||
# Output node must be a Sop node.
|
||||
if not isinstance(output_node, hou.SopNode):
|
||||
cls.log.error(
|
||||
"Output node %s is not a SOP node. "
|
||||
"SOP Path must point to a SOP node, "
|
||||
"instead found category type: %s"
|
||||
% (output_node.path(), output_node.type().category().name())
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
||||
# For the sake of completeness also assert the category type
|
||||
# is Sop to avoid potential edge case scenarios even though
|
||||
# the isinstance check above should be stricter than this category
|
||||
assert output_node.type().category().name() == "Sop", (
|
||||
"Output node %s is not of category Sop. This is a bug.."
|
||||
% output_node.path()
|
||||
)
|
||||
|
||||
# Check if output node has incoming connections
|
||||
if not output_node.inputConnections():
|
||||
cls.log.error(
|
||||
"Output node `%s` has no incoming connections"
|
||||
% output_node.path()
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
||||
# Ensure the output node has at least Geometry data
|
||||
if not output_node.geometry():
|
||||
cls.log.error(
|
||||
"Output node `%s` has no geometry data." % output_node.path()
|
||||
)
|
||||
return [output_node.path()]
|
||||
|
|
@ -10,12 +10,6 @@ from .pipeline import (
|
|||
|
||||
ls,
|
||||
containerise,
|
||||
|
||||
lock,
|
||||
unlock,
|
||||
is_locked,
|
||||
lock_ignored,
|
||||
|
||||
)
|
||||
from .plugin import (
|
||||
Creator,
|
||||
|
|
@ -38,11 +32,9 @@ from .lib import (
|
|||
read,
|
||||
|
||||
apply_shaders,
|
||||
without_extension,
|
||||
maintained_selection,
|
||||
suspended_refresh,
|
||||
|
||||
unique_name,
|
||||
unique_namespace,
|
||||
)
|
||||
|
||||
|
|
@ -54,11 +46,6 @@ __all__ = [
|
|||
"ls",
|
||||
"containerise",
|
||||
|
||||
"lock",
|
||||
"unlock",
|
||||
"is_locked",
|
||||
"lock_ignored",
|
||||
|
||||
"Creator",
|
||||
"Loader",
|
||||
|
||||
|
|
@ -76,11 +63,9 @@ __all__ = [
|
|||
"lsattrs",
|
||||
"read",
|
||||
|
||||
"unique_name",
|
||||
"unique_namespace",
|
||||
|
||||
"apply_shaders",
|
||||
"without_extension",
|
||||
"maintained_selection",
|
||||
"suspended_refresh",
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import platform
|
||||
import uuid
|
||||
import math
|
||||
|
|
@ -154,53 +153,9 @@ def maintained_selection():
|
|||
cmds.select(clear=True)
|
||||
|
||||
|
||||
def unique_name(name, format="%02d", namespace="", prefix="", suffix=""):
|
||||
"""Return unique `name`
|
||||
|
||||
The function takes into consideration an optional `namespace`
|
||||
and `suffix`. The suffix is included in evaluating whether a
|
||||
name exists - such as `name` + "_GRP" - but isn't included
|
||||
in the returned value.
|
||||
|
||||
If a namespace is provided, only names within that namespace
|
||||
are considered when evaluating whether the name is unique.
|
||||
|
||||
Arguments:
|
||||
format (str, optional): The `name` is given a number, this determines
|
||||
how this number is formatted. Defaults to a padding of 2.
|
||||
E.g. my_name01, my_name02.
|
||||
namespace (str, optional): Only consider names within this namespace.
|
||||
suffix (str, optional): Only consider names with this suffix.
|
||||
|
||||
Example:
|
||||
>>> name = cmds.createNode("transform", name="MyName")
|
||||
>>> cmds.objExists(name)
|
||||
True
|
||||
>>> unique = unique_name(name)
|
||||
>>> cmds.objExists(unique)
|
||||
False
|
||||
|
||||
"""
|
||||
|
||||
iteration = 1
|
||||
unique = prefix + (name + format % iteration) + suffix
|
||||
|
||||
while cmds.objExists(namespace + ":" + unique):
|
||||
iteration += 1
|
||||
unique = prefix + (name + format % iteration) + suffix
|
||||
|
||||
if suffix:
|
||||
return unique[:-len(suffix)]
|
||||
|
||||
return unique
|
||||
|
||||
|
||||
def unique_namespace(namespace, format="%02d", prefix="", suffix=""):
|
||||
"""Return unique namespace
|
||||
|
||||
Similar to :func:`unique_name` but evaluating namespaces
|
||||
as opposed to object names.
|
||||
|
||||
Arguments:
|
||||
namespace (str): Name of namespace to consider
|
||||
format (str, optional): Formatting of the given iteration number
|
||||
|
|
@ -316,153 +271,6 @@ def pairwise(iterable):
|
|||
return itertools.izip(a, a)
|
||||
|
||||
|
||||
def unique(name):
|
||||
assert isinstance(name, string_types), "`name` must be string"
|
||||
|
||||
while cmds.objExists(name):
|
||||
matches = re.findall(r"\d+$", name)
|
||||
|
||||
if matches:
|
||||
match = matches[-1]
|
||||
name = name.rstrip(match)
|
||||
number = int(match) + 1
|
||||
else:
|
||||
number = 1
|
||||
|
||||
name = name + str(number)
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def uv_from_element(element):
|
||||
"""Return the UV coordinate of given 'element'
|
||||
|
||||
Supports components, meshes, nurbs.
|
||||
|
||||
"""
|
||||
|
||||
supported = ["mesh", "nurbsSurface"]
|
||||
|
||||
uv = [0.5, 0.5]
|
||||
|
||||
if "." not in element:
|
||||
type = cmds.nodeType(element)
|
||||
if type == "transform":
|
||||
geometry_shape = cmds.listRelatives(element, shapes=True)
|
||||
|
||||
if len(geometry_shape) >= 1:
|
||||
geometry_shape = geometry_shape[0]
|
||||
else:
|
||||
return
|
||||
|
||||
elif type in supported:
|
||||
geometry_shape = element
|
||||
|
||||
else:
|
||||
cmds.error("Could not do what you wanted..")
|
||||
return
|
||||
else:
|
||||
# If it is indeed a component - get the current Mesh
|
||||
try:
|
||||
parent = element.split(".", 1)[0]
|
||||
|
||||
# Maya is funny in that when the transform of the shape
|
||||
# of the component element has children, the name returned
|
||||
# by that elementection is the shape. Otherwise, it is
|
||||
# the transform. So lets see what type we're dealing with here.
|
||||
if cmds.nodeType(parent) in supported:
|
||||
geometry_shape = parent
|
||||
else:
|
||||
geometry_shape = cmds.listRelatives(parent, shapes=1)[0]
|
||||
|
||||
if not geometry_shape:
|
||||
cmds.error("Skipping %s: Could not find shape." % element)
|
||||
return
|
||||
|
||||
if len(cmds.ls(geometry_shape)) > 1:
|
||||
cmds.warning("Multiple shapes with identical "
|
||||
"names found. This might not work")
|
||||
|
||||
except TypeError as e:
|
||||
cmds.warning("Skipping %s: Didn't find a shape "
|
||||
"for component elementection. %s" % (element, e))
|
||||
return
|
||||
|
||||
try:
|
||||
type = cmds.nodeType(geometry_shape)
|
||||
|
||||
if type == "nurbsSurface":
|
||||
# If a surfacePoint is elementected on a nurbs surface
|
||||
root, u, v = element.rsplit("[", 2)
|
||||
uv = [float(u[:-1]), float(v[:-1])]
|
||||
|
||||
if type == "mesh":
|
||||
# -----------
|
||||
# Average the U and V values
|
||||
# ===========
|
||||
uvs = cmds.polyListComponentConversion(element, toUV=1)
|
||||
if not uvs:
|
||||
cmds.warning("Couldn't derive any UV's from "
|
||||
"component, reverting to default U and V")
|
||||
raise TypeError
|
||||
|
||||
# Flatten list of Uv's as sometimes it returns
|
||||
# neighbors like this [2:3] instead of [2], [3]
|
||||
flattened = []
|
||||
|
||||
for uv in uvs:
|
||||
flattened.extend(cmds.ls(uv, flatten=True))
|
||||
|
||||
uvs = flattened
|
||||
|
||||
sumU = 0
|
||||
sumV = 0
|
||||
for uv in uvs:
|
||||
try:
|
||||
u, v = cmds.polyEditUV(uv, query=True)
|
||||
except Exception:
|
||||
cmds.warning("Couldn't find any UV coordinated, "
|
||||
"reverting to default U and V")
|
||||
raise TypeError
|
||||
|
||||
sumU += u
|
||||
sumV += v
|
||||
|
||||
averagedU = sumU / len(uvs)
|
||||
averagedV = sumV / len(uvs)
|
||||
|
||||
uv = [averagedU, averagedV]
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
return uv
|
||||
|
||||
|
||||
def shape_from_element(element):
|
||||
"""Return shape of given 'element'
|
||||
|
||||
Supports components, meshes, and surfaces
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
# Get either shape or transform, based on element-type
|
||||
node = cmds.ls(element, objectsOnly=True)[0]
|
||||
except Exception:
|
||||
cmds.warning("Could not find node in %s" % element)
|
||||
return None
|
||||
|
||||
if cmds.nodeType(node) == 'transform':
|
||||
try:
|
||||
return cmds.listRelatives(node, shapes=True)[0]
|
||||
except Exception:
|
||||
cmds.warning("Could not find shape in %s" % element)
|
||||
return None
|
||||
|
||||
else:
|
||||
return node
|
||||
|
||||
|
||||
def export_alembic(nodes,
|
||||
file,
|
||||
frame_range=None,
|
||||
|
|
@ -607,115 +415,6 @@ def imprint(node, data):
|
|||
cmds.setAttr(node + "." + key, value, **set_type)
|
||||
|
||||
|
||||
def serialise_shaders(nodes):
|
||||
"""Generate a shader set dictionary
|
||||
|
||||
Arguments:
|
||||
nodes (list): Absolute paths to nodes
|
||||
|
||||
Returns:
|
||||
dictionary of (shader: id) pairs
|
||||
|
||||
Schema:
|
||||
{
|
||||
"shader1": ["id1", "id2"],
|
||||
"shader2": ["id3", "id1"]
|
||||
}
|
||||
|
||||
Example:
|
||||
{
|
||||
"Bazooka_Brothers01_:blinn4SG": [
|
||||
"f9520572-ac1d-11e6-b39e-3085a99791c9.f[4922:5001]",
|
||||
"f9520572-ac1d-11e6-b39e-3085a99791c9.f[4587:4634]",
|
||||
"f9520572-ac1d-11e6-b39e-3085a99791c9.f[1120:1567]",
|
||||
"f9520572-ac1d-11e6-b39e-3085a99791c9.f[4251:4362]"
|
||||
],
|
||||
"lambert2SG": [
|
||||
"f9520571-ac1d-11e6-9dbb-3085a99791c9"
|
||||
]
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
valid_nodes = cmds.ls(
|
||||
nodes,
|
||||
long=True,
|
||||
recursive=True,
|
||||
showType=True,
|
||||
objectsOnly=True,
|
||||
type="transform"
|
||||
)
|
||||
|
||||
meshes_by_id = {}
|
||||
for mesh in valid_nodes:
|
||||
shapes = cmds.listRelatives(valid_nodes[0],
|
||||
shapes=True,
|
||||
fullPath=True) or list()
|
||||
|
||||
if shapes:
|
||||
shape = shapes[0]
|
||||
if not cmds.nodeType(shape):
|
||||
continue
|
||||
|
||||
try:
|
||||
id_ = cmds.getAttr(mesh + ".mbID")
|
||||
|
||||
if id_ not in meshes_by_id:
|
||||
meshes_by_id[id_] = list()
|
||||
|
||||
meshes_by_id[id_].append(mesh)
|
||||
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
meshes_by_shader = dict()
|
||||
for mesh in meshes_by_id.values():
|
||||
shape = cmds.listRelatives(mesh,
|
||||
shapes=True,
|
||||
fullPath=True) or list()
|
||||
|
||||
for shader in cmds.listConnections(shape,
|
||||
type="shadingEngine") or list():
|
||||
|
||||
# Objects in this group are those that haven't got
|
||||
# any shaders. These are expected to be managed
|
||||
# elsewhere, such as by the default model loader.
|
||||
if shader == "initialShadingGroup":
|
||||
continue
|
||||
|
||||
if shader not in meshes_by_shader:
|
||||
meshes_by_shader[shader] = list()
|
||||
|
||||
shaded = cmds.sets(shader, query=True) or list()
|
||||
meshes_by_shader[shader].extend(shaded)
|
||||
|
||||
shader_by_id = {}
|
||||
for shader, shaded in meshes_by_shader.items():
|
||||
|
||||
if shader not in shader_by_id:
|
||||
shader_by_id[shader] = list()
|
||||
|
||||
for mesh in shaded:
|
||||
|
||||
# Enable shader assignment to faces.
|
||||
name = mesh.split(".f[")[0]
|
||||
|
||||
transform = name
|
||||
if cmds.objectType(transform) == "mesh":
|
||||
transform = cmds.listRelatives(name, parent=True)[0]
|
||||
|
||||
try:
|
||||
id_ = cmds.getAttr(transform + ".mbID")
|
||||
shader_by_id[shader].append(mesh.replace(name, id_))
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
# Remove duplicates
|
||||
shader_by_id[shader] = list(set(shader_by_id[shader]))
|
||||
|
||||
return shader_by_id
|
||||
|
||||
|
||||
def lsattr(attr, value=None):
|
||||
"""Return nodes matching `key` and `value`
|
||||
|
||||
|
|
@ -794,17 +493,6 @@ def lsattrs(attrs):
|
|||
return list(matches)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def without_extension():
|
||||
"""Use cmds.file with defaultExtensions=False"""
|
||||
previous_setting = cmds.file(defaultExtensions=True, query=True)
|
||||
try:
|
||||
cmds.file(defaultExtensions=False)
|
||||
yield
|
||||
finally:
|
||||
cmds.file(defaultExtensions=previous_setting)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def attribute_values(attr_values):
|
||||
"""Remaps node attributes to values during context.
|
||||
|
|
@ -883,26 +571,6 @@ def evaluation(mode="off"):
|
|||
cmds.evaluationManager(mode=original)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def no_refresh():
|
||||
"""Temporarily disables Maya's UI updates
|
||||
|
||||
Note:
|
||||
This only disabled the main pane and will sometimes still
|
||||
trigger updates in torn off panels.
|
||||
|
||||
"""
|
||||
|
||||
pane = _get_mel_global('gMainPane')
|
||||
state = cmds.paneLayout(pane, query=True, manage=True)
|
||||
cmds.paneLayout(pane, edit=True, manage=False)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
cmds.paneLayout(pane, edit=True, manage=state)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def empty_sets(sets, force=False):
|
||||
"""Remove all members of the sets during the context"""
|
||||
|
|
@ -1569,15 +1237,6 @@ def extract_alembic(file,
|
|||
return file
|
||||
|
||||
|
||||
def maya_temp_folder():
|
||||
scene_dir = os.path.dirname(cmds.file(query=True, sceneName=True))
|
||||
tmp_dir = os.path.abspath(os.path.join(scene_dir, "..", "tmp"))
|
||||
if not os.path.isdir(tmp_dir):
|
||||
os.makedirs(tmp_dir)
|
||||
|
||||
return tmp_dir
|
||||
|
||||
|
||||
# region ID
|
||||
def get_id_required_nodes(referenced_nodes=False, nodes=None):
|
||||
"""Filter out any node which are locked (reference) or readOnly
|
||||
|
|
@ -1762,22 +1421,6 @@ def set_id(node, unique_id, overwrite=False):
|
|||
cmds.setAttr(attr, unique_id, type="string")
|
||||
|
||||
|
||||
def remove_id(node):
|
||||
"""Remove the id attribute from the input node.
|
||||
|
||||
Args:
|
||||
node (str): The node name
|
||||
|
||||
Returns:
|
||||
bool: Whether an id attribute was deleted
|
||||
|
||||
"""
|
||||
if cmds.attributeQuery("cbId", node=node, exists=True):
|
||||
cmds.deleteAttr("{}.cbId".format(node))
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
# endregion ID
|
||||
def get_reference_node(path):
|
||||
"""
|
||||
|
|
@ -2453,6 +2096,7 @@ def reset_scene_resolution():
|
|||
|
||||
set_scene_resolution(width, height, pixelAspect)
|
||||
|
||||
|
||||
def set_context_settings():
|
||||
"""Apply the project settings from the project definition
|
||||
|
||||
|
|
@ -2911,7 +2555,7 @@ def get_attr_in_layer(attr, layer):
|
|||
|
||||
|
||||
def fix_incompatible_containers():
|
||||
"""Return whether the current scene has any outdated content"""
|
||||
"""Backwards compatibility: old containers to use new ReferenceLoader"""
|
||||
|
||||
host = api.registered_host()
|
||||
for container in host.ls():
|
||||
|
|
@ -3150,7 +2794,7 @@ class RenderSetupListObserver:
|
|||
cmds.delete(render_layer_set_name)
|
||||
|
||||
|
||||
class RenderSetupItemObserver():
|
||||
class RenderSetupItemObserver:
|
||||
"""Handle changes in render setup items."""
|
||||
|
||||
def __init__(self, item):
|
||||
|
|
@ -3386,7 +3030,7 @@ def set_colorspace():
|
|||
@contextlib.contextmanager
|
||||
def root_parent(nodes):
|
||||
# type: (list) -> list
|
||||
"""Context manager to un-parent provided nodes and return then back."""
|
||||
"""Context manager to un-parent provided nodes and return them back."""
|
||||
import pymel.core as pm # noqa
|
||||
|
||||
node_parents = []
|
||||
|
|
|
|||
|
|
@ -1,924 +0,0 @@
|
|||
[
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\others\\save_scene_incremental.py",
|
||||
"sourcetype": "file",
|
||||
"title": "# Version Up",
|
||||
"tooltip": "Incremental save with a specific format"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\others\\open_current_folder.py",
|
||||
"sourcetype": "file",
|
||||
"title": "Open working folder..",
|
||||
"tooltip": "Show current scene in Explorer"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\avalon\\launch_manager.py",
|
||||
"sourcetype": "file",
|
||||
"title": "# Project Manager",
|
||||
"tooltip": "Add assets to the project"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "from openpype.tools.assetcreator import app as assetcreator; assetcreator.show(context='maya')",
|
||||
"sourcetype": "python",
|
||||
"title": "Asset Creator",
|
||||
"tooltip": "Open the Asset Creator"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "menu",
|
||||
"title": "Modeling",
|
||||
"items": [
|
||||
{
|
||||
"type": "action",
|
||||
"command": "import easyTreezSource; reload(easyTreezSource); easyTreezSource.easyTreez()",
|
||||
"sourcetype": "python",
|
||||
"tags": ["modeling", "trees", "generate", "create", "plants"],
|
||||
"title": "EasyTreez",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\modeling\\separateMeshPerShader.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["modeling", "separateMeshPerShader"],
|
||||
"title": "# Separate Mesh Per Shader",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\modeling\\polyDetachSeparate.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["modeling", "poly", "detach", "separate"],
|
||||
"title": "# Polygon Detach and Separate",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\modeling\\polySelectEveryNthEdgeUI.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["modeling", "select", "nth", "edge", "ui"],
|
||||
"title": "# Select Every Nth Edge"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\modeling\\djPFXUVs.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["modeling", "djPFX", "UVs"],
|
||||
"title": "# dj PFX UVs",
|
||||
"tooltip": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "menu",
|
||||
"title": "Rigging",
|
||||
"items": [
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\rigging\\advancedSkeleton.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"rigging",
|
||||
"autorigger",
|
||||
"advanced",
|
||||
"skeleton",
|
||||
"advancedskeleton",
|
||||
"file"
|
||||
],
|
||||
"title": "Advanced Skeleton"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "menu",
|
||||
"title": "Shading",
|
||||
"items": [
|
||||
{
|
||||
"type": "menu",
|
||||
"title": "# VRay",
|
||||
"items": [
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Import Proxies",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\vrayImportProxies.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["shading", "vray", "import", "proxies"],
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Select All GES",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\selectAllGES.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": ["shading", "vray", "select All GES"]
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Select All GES Under Selection",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\selectAllGESUnderSelection.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": ["shading", "vray", "select", "all", "GES"]
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Selection To VRay Mesh",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\selectionToVrayMesh.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": ["shading", "vray", "selection", "vraymesh"]
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Add VRay Round Edges Attribute",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\addVrayRoundEdgesAttribute.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": ["shading", "vray", "round edges", "attribute"]
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Add Gamma",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\vrayAddGamma.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": ["shading", "vray", "add gamma"]
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\select_vraymesh_materials_with_unconnected_shader_slots.py",
|
||||
"sourcetype": "file",
|
||||
"title": "# Select Unconnected Shader Materials",
|
||||
"tags": [
|
||||
"shading",
|
||||
"vray",
|
||||
"select",
|
||||
"vraymesh",
|
||||
"materials",
|
||||
"unconnected shader slots"
|
||||
],
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\vrayMergeSimilarVRayMeshMaterials.py",
|
||||
"sourcetype": "file",
|
||||
"title": "# Merge Similar VRay Mesh Materials",
|
||||
"tags": [
|
||||
"shading",
|
||||
"vray",
|
||||
"Merge",
|
||||
"VRayMesh",
|
||||
"Materials"
|
||||
],
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Create Two Sided Material",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\vrayCreate2SidedMtlForSelectedMtlRenamed.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "Creates two sided material for selected material and renames it",
|
||||
"tags": ["shading", "vray", "two sided", "material"]
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Create Two Sided Material For Selected",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\vrayCreate2SidedMtlForSelectedMtl.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "Select material to create a two sided version from it",
|
||||
"tags": [
|
||||
"shading",
|
||||
"vray",
|
||||
"Create2SidedMtlForSelectedMtl.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Add OpenSubdiv Attribute",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\addVrayOpenSubdivAttribute.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": [
|
||||
"shading",
|
||||
"vray",
|
||||
"add",
|
||||
"open subdiv",
|
||||
"attribute"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Remove OpenSubdiv Attribute",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\removeVrayOpenSubdivAttribute.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": [
|
||||
"shading",
|
||||
"vray",
|
||||
"remove",
|
||||
"opensubdiv",
|
||||
"attributee"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Add Subdivision Attribute",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\addVraySubdivisionAttribute.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": [
|
||||
"shading",
|
||||
"vray",
|
||||
"addVraySubdivisionAttribute"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Remove Subdivision Attribute.py",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\removeVraySubdivisionAttribute.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": [
|
||||
"shading",
|
||||
"vray",
|
||||
"remove",
|
||||
"subdivision",
|
||||
"attribute"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Add Vray Object Ids",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\addVrayObjectIds.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": ["shading", "vray", "add", "object id"]
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Add Vray Material Ids",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\addVrayMaterialIds.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": ["shading", "vray", "addVrayMaterialIds.py"]
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Set Physical DOF Depth",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\vrayPhysicalDOFSetDepth.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": ["shading", "vray", "physical", "DOF ", "Depth"]
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Magic Vray Proxy UI",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vray\\magicVrayProxyUI.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": ["shading", "vray", "magicVrayProxyUI"]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\pyblish\\lighting\\set_filename_prefix.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"shading",
|
||||
"lookdev",
|
||||
"assign",
|
||||
"shaders",
|
||||
"prefix",
|
||||
"filename",
|
||||
"render"
|
||||
],
|
||||
"title": "# Set filename prefix",
|
||||
"tooltip": "Set the render file name prefix."
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "import mayalookassigner; mayalookassigner.show()",
|
||||
"sourcetype": "python",
|
||||
"tags": ["shading", "look", "assign", "shaders", "auto"],
|
||||
"title": "Look Manager",
|
||||
"tooltip": "Open the Look Manager UI for look assignment"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\LightLinkUi.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["shading", "light", "link", "ui"],
|
||||
"title": "# Light Link UI",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\vdviewer_ui.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"shading",
|
||||
"look",
|
||||
"vray",
|
||||
"displacement",
|
||||
"shaders",
|
||||
"auto"
|
||||
],
|
||||
"title": "# VRay Displ Viewer",
|
||||
"tooltip": "Open the VRay Displacement Viewer, select and control the content of the set"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\setTexturePreviewToCLRImage.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["shading", "CLRImage", "textures", "preview"],
|
||||
"title": "# Set Texture Preview To CLRImage",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\fixDefaultShaderSetBehavior.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["shading", "fix", "DefaultShaderSet", "Behavior"],
|
||||
"title": "# Fix Default Shader Set Behavior",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\fixSelectedShapesReferenceAssignments.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"shading",
|
||||
"fix",
|
||||
"Selected",
|
||||
"Shapes",
|
||||
"Reference",
|
||||
"Assignments"
|
||||
],
|
||||
"title": "# Fix Shapes Reference Assignments",
|
||||
"tooltip": "Select shapes to fix the reference assignments"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\selectLambert1Members.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["shading", "selectLambert1Members"],
|
||||
"title": "# Select Lambert1 Members",
|
||||
"tooltip": "Selects all objects which have the Lambert1 shader assigned"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\selectShapesWithoutShader.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["shading", "selectShapesWithoutShader"],
|
||||
"title": "# Select Shapes Without Shader",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\fixRenderLayerOutAdjustmentErrors.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["shading", "fixRenderLayerOutAdjustmentErrors"],
|
||||
"title": "# Fix RenderLayer Out Adjustment Errors",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\fix_renderlayer_missing_node_override.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"shading",
|
||||
"renderlayer",
|
||||
"missing",
|
||||
"reference",
|
||||
"switch",
|
||||
"layer"
|
||||
],
|
||||
"title": "# Fix RenderLayer Missing Referenced Nodes Overrides",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"title": "# Image 2 Tiled EXR",
|
||||
"command": "$OPENPYPE_SCRIPTS\\shading\\open_img2exr.py",
|
||||
"sourcetype": "file",
|
||||
"tooltip": "",
|
||||
"tags": ["shading", "vray", "exr"]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "menu",
|
||||
"title": "# Rendering",
|
||||
"items": [
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\pyblish\\open_deadline_submission_settings.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["settings", "deadline", "globals", "render"],
|
||||
"title": "# DL Submission Settings UI",
|
||||
"tooltip": "Open the Deadline Submission Settings UI"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "menu",
|
||||
"title": "Animation",
|
||||
"items": [
|
||||
{
|
||||
"type": "menu",
|
||||
"title": "# Attributes",
|
||||
"tooltip": "",
|
||||
"items": [
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\attributes\\copyValues.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["animation", "copy", "attributes"],
|
||||
"title": "# Copy Values",
|
||||
"tooltip": "Copy attribute values"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\attributes\\copyInConnections.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"animation",
|
||||
"copy",
|
||||
"attributes",
|
||||
"connections",
|
||||
"incoming"
|
||||
],
|
||||
"title": "# Copy In Connections",
|
||||
"tooltip": "Copy incoming connections"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\attributes\\copyOutConnections.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"animation",
|
||||
"copy",
|
||||
"attributes",
|
||||
"connections",
|
||||
"out"
|
||||
],
|
||||
"title": "# Copy Out Connections",
|
||||
"tooltip": "Copy outcoming connections"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\attributes\\copyTransformLocal.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"animation",
|
||||
"copy",
|
||||
"attributes",
|
||||
"transforms",
|
||||
"local"
|
||||
],
|
||||
"title": "# Copy Local Transforms",
|
||||
"tooltip": "Copy local transforms"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\attributes\\copyTransformMatrix.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"animation",
|
||||
"copy",
|
||||
"attributes",
|
||||
"transforms",
|
||||
"matrix"
|
||||
],
|
||||
"title": "# Copy Matrix Transforms",
|
||||
"tooltip": "Copy Matrix transforms"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\attributes\\copyTransformUI.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"animation",
|
||||
"copy",
|
||||
"attributes",
|
||||
"transforms",
|
||||
"UI"
|
||||
],
|
||||
"title": "# Copy Transforms UI",
|
||||
"tooltip": "Open the Copy Transforms UI"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\attributes\\simpleCopyUI.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"animation",
|
||||
"copy",
|
||||
"attributes",
|
||||
"transforms",
|
||||
"UI",
|
||||
"simple"
|
||||
],
|
||||
"title": "# Simple Copy UI",
|
||||
"tooltip": "Open the simple Copy Transforms UI"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "menu",
|
||||
"title": "# Optimize",
|
||||
"tooltip": "Optimization scripts",
|
||||
"items": [
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\optimize\\toggleFreezeHierarchy.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["animation", "hierarchy", "toggle", "freeze"],
|
||||
"title": "# Toggle Freeze Hierarchy",
|
||||
"tooltip": "Freeze and unfreeze hierarchy"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\optimize\\toggleParallelNucleus.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["animation", "nucleus", "toggle", "parallel"],
|
||||
"title": "# Toggle Parallel Nucleus",
|
||||
"tooltip": "Toggle parallel nucleus"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"sourcetype": "file",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\bakeSelectedToWorldSpace.py",
|
||||
"tags": ["animation", "bake", "selection", "worldspace.py"],
|
||||
"title": "# Bake Selected To Worldspace",
|
||||
"type": "action"
|
||||
},
|
||||
{
|
||||
"sourcetype": "file",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\timeStepper.py",
|
||||
"tags": ["animation", "time", "stepper"],
|
||||
"title": "# Time Stepper",
|
||||
"type": "action"
|
||||
},
|
||||
{
|
||||
"sourcetype": "file",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\capture_ui.py",
|
||||
"tags": [
|
||||
"animation",
|
||||
"capture",
|
||||
"ui",
|
||||
"screen",
|
||||
"movie",
|
||||
"image"
|
||||
],
|
||||
"title": "# Capture UI",
|
||||
"type": "action"
|
||||
},
|
||||
{
|
||||
"sourcetype": "file",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\simplePlayblastUI.py",
|
||||
"tags": ["animation", "simple", "playblast", "ui"],
|
||||
"title": "# Simple Playblast UI",
|
||||
"type": "action"
|
||||
},
|
||||
{
|
||||
"sourcetype": "file",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\tweenMachineUI.py",
|
||||
"tags": ["animation", "tween", "machine"],
|
||||
"title": "# Tween Machine UI",
|
||||
"type": "action"
|
||||
},
|
||||
{
|
||||
"sourcetype": "file",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\selectAllAnimationCurves.py",
|
||||
"tags": ["animation", "select", "curves"],
|
||||
"title": "# Select All Animation Curves",
|
||||
"type": "action"
|
||||
},
|
||||
{
|
||||
"sourcetype": "file",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\pathAnimation.py",
|
||||
"tags": ["animation", "path", "along"],
|
||||
"title": "# Path Animation",
|
||||
"type": "action"
|
||||
},
|
||||
{
|
||||
"sourcetype": "file",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\offsetSelectedObjectsUI.py",
|
||||
"tags": ["animation", "offsetSelectedObjectsUI.py"],
|
||||
"title": "# Offset Selected Objects UI",
|
||||
"type": "action"
|
||||
},
|
||||
{
|
||||
"sourcetype": "file",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\key_amplifier_ui.py",
|
||||
"tags": ["animation", "key", "amplifier"],
|
||||
"title": "# Key Amplifier UI",
|
||||
"type": "action"
|
||||
},
|
||||
{
|
||||
"sourcetype": "file",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\anim_scene_optimizer.py",
|
||||
"tags": ["animation", "anim_scene_optimizer.py"],
|
||||
"title": "# Anim_Scene_Optimizer",
|
||||
"type": "action"
|
||||
},
|
||||
{
|
||||
"sourcetype": "file",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\zvParentMaster.py",
|
||||
"tags": ["animation", "zvParentMaster.py"],
|
||||
"title": "# ZV Parent Master",
|
||||
"type": "action"
|
||||
},
|
||||
{
|
||||
"sourcetype": "file",
|
||||
"command": "$OPENPYPE_SCRIPTS\\animation\\animLibrary.py",
|
||||
"tags": ["animation", "studiolibrary.py"],
|
||||
"title": "Anim Library",
|
||||
"type": "action"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "menu",
|
||||
"title": "# Layout",
|
||||
"items": [
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\layout\\alignDistributeUI.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["layout", "align", "Distribute", "UI"],
|
||||
"title": "# Align Distribute UI",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\layout\\alignSimpleUI.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["layout", "align", "UI", "Simple"],
|
||||
"title": "# Align Simple UI",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\layout\\center_locator.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["layout", "center", "locator"],
|
||||
"title": "# Center Locator",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\layout\\average_locator.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["layout", "average", "locator"],
|
||||
"title": "# Average Locator",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\layout\\selectWithinProximityUI.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["layout", "select", "proximity", "ui"],
|
||||
"title": "# Select Within Proximity UI",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\layout\\dupCurveUI.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["layout", "Duplicate", "Curve", "UI"],
|
||||
"title": "# Duplicate Curve UI",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\layout\\randomDeselectUI.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["layout", "random", "Deselect", "UI"],
|
||||
"title": "# Random Deselect UI",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\layout\\multiReferencerUI.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["layout", "multi", "reference"],
|
||||
"title": "# Multi Referencer UI",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\layout\\duplicateOffsetUI.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["layout", "duplicate", "offset", "UI"],
|
||||
"title": "# Duplicate Offset UI",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\layout\\spPaint3d.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["layout", "spPaint3d", "paint", "tool"],
|
||||
"title": "# SP Paint 3d",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\layout\\randomizeUI.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["layout", "randomize", "UI"],
|
||||
"title": "# Randomize UI",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\layout\\distributeWithinObjectUI.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["layout", "distribute", "ObjectUI", "within"],
|
||||
"title": "# Distribute Within Object UI",
|
||||
"tooltip": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "menu",
|
||||
"title": "# Particles",
|
||||
"items": [
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\particles\\instancerToObjects.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["particles", "instancerToObjects"],
|
||||
"title": "# Instancer To Objects",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\particles\\instancerToObjectsInstances.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["particles", "instancerToObjectsInstances"],
|
||||
"title": "# Instancer To Objects Instances",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\particles\\instancerToObjectsInstancesWithAnimation.py",
|
||||
"sourcetype": "file",
|
||||
"tags": [
|
||||
"particles",
|
||||
"instancerToObjectsInstancesWithAnimation"
|
||||
],
|
||||
"title": "# Instancer To Objects Instances With Animation",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\particles\\instancerToObjectsWithAnimation.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["particles", "instancerToObjectsWithAnimation"],
|
||||
"title": "# Instancer To Objects With Animation",
|
||||
"tooltip": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "menu",
|
||||
"title": "Cleanup",
|
||||
"items": [
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\cleanup\\repair_faulty_containers.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["cleanup", "repair", "containers"],
|
||||
"title": "# Find and Repair Containers",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\cleanup\\removeNamespaces.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["cleanup", "remove", "namespaces"],
|
||||
"title": "# Remove Namespaces",
|
||||
"tooltip": "Remove all namespaces"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\cleanup\\remove_user_defined_attributes.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["cleanup", "remove_user_defined_attributes"],
|
||||
"title": "# Remove User Defined Attributes",
|
||||
"tooltip": "Remove all user-defined attributes from all nodes"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\cleanup\\removeUnknownNodes.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["cleanup", "removeUnknownNodes"],
|
||||
"title": "# Remove Unknown Nodes",
|
||||
"tooltip": "Remove all unknown nodes"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\cleanup\\removeUnloadedReferences.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["cleanup", "removeUnloadedReferences"],
|
||||
"title": "# Remove Unloaded References",
|
||||
"tooltip": "Remove all unloaded references"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\cleanup\\removeReferencesFailedEdits.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["cleanup", "removeReferencesFailedEdits"],
|
||||
"title": "# Remove References Failed Edits",
|
||||
"tooltip": "Remove failed edits for all references"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\cleanup\\remove_unused_looks.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["cleanup", "removeUnusedLooks"],
|
||||
"title": "# Remove Unused Looks",
|
||||
"tooltip": "Remove all loaded yet unused Avalon look containers"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\cleanup\\uniqifyNodeNames.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["cleanup", "uniqifyNodeNames"],
|
||||
"title": "# Uniqify Node Names",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\cleanup\\autoRenameFileNodes.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["cleanup", "auto", "rename", "filenodes"],
|
||||
"title": "# Auto Rename File Nodes",
|
||||
"tooltip": ""
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\cleanup\\update_asset_id.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["cleanup", "update", "database", "asset", "id"],
|
||||
"title": "# Update Asset ID",
|
||||
"tooltip": "Will replace the Colorbleed ID with a new one (asset ID : Unique number)"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\cleanup\\ccRenameReplace.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["cleanup", "rename", "ui"],
|
||||
"title": "Renamer",
|
||||
"tooltip": "Rename UI"
|
||||
},
|
||||
{
|
||||
"type": "action",
|
||||
"command": "$OPENPYPE_SCRIPTS\\cleanup\\renameShapesToTransform.py",
|
||||
"sourcetype": "file",
|
||||
"tags": ["cleanup", "renameShapesToTransform"],
|
||||
"title": "# Rename Shapes To Transform",
|
||||
"tooltip": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -184,76 +184,6 @@ def uninstall():
|
|||
menu.uninstall()
|
||||
|
||||
|
||||
def lock():
|
||||
"""Lock scene
|
||||
|
||||
Add an invisible node to your Maya scene with the name of the
|
||||
current file, indicating that this file is "locked" and cannot
|
||||
be modified any further.
|
||||
|
||||
"""
|
||||
|
||||
if not cmds.objExists("lock"):
|
||||
with lib.maintained_selection():
|
||||
cmds.createNode("objectSet", name="lock")
|
||||
cmds.addAttr("lock", ln="basename", dataType="string")
|
||||
|
||||
# Permanently hide from outliner
|
||||
cmds.setAttr("lock.verticesOnlySet", True)
|
||||
|
||||
fname = cmds.file(query=True, sceneName=True)
|
||||
basename = os.path.basename(fname)
|
||||
cmds.setAttr("lock.basename", basename, type="string")
|
||||
|
||||
|
||||
def unlock():
|
||||
"""Permanently unlock a locked scene
|
||||
|
||||
Doesn't throw an error if scene is already unlocked.
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
cmds.delete("lock")
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
|
||||
def is_locked():
|
||||
"""Query whether current scene is locked"""
|
||||
fname = cmds.file(query=True, sceneName=True)
|
||||
basename = os.path.basename(fname)
|
||||
|
||||
if self._ignore_lock:
|
||||
return False
|
||||
|
||||
try:
|
||||
return cmds.getAttr("lock.basename") == basename
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def lock_ignored():
|
||||
"""Context manager for temporarily ignoring the lock of a scene
|
||||
|
||||
The purpose of this function is to enable locking a scene and
|
||||
saving it with the lock still in place.
|
||||
|
||||
Example:
|
||||
>>> with lock_ignored():
|
||||
... pass # Do things without lock
|
||||
|
||||
"""
|
||||
|
||||
self._ignore_lock = True
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self._ignore_lock = False
|
||||
|
||||
|
||||
def parse_container(container):
|
||||
"""Return the container node's full container data.
|
||||
|
||||
|
|
|
|||
|
|
@ -344,6 +344,28 @@ class PhotoshopServerStub:
|
|||
)
|
||||
)
|
||||
|
||||
def hide_all_others_layers(self, layers):
|
||||
"""hides all layers that are not part of the list or that are not
|
||||
children of this list
|
||||
|
||||
Args:
|
||||
layers (list): list of PSItem - highest hierarchy
|
||||
"""
|
||||
extract_ids = set([ll.id for ll in self.get_layers_in_layers(layers)])
|
||||
|
||||
self.hide_all_others_layers_ids(extract_ids)
|
||||
|
||||
def hide_all_others_layers_ids(self, extract_ids):
|
||||
"""hides all layers that are not part of the list or that are not
|
||||
children of this list
|
||||
|
||||
Args:
|
||||
extract_ids (list): list of integer that should be visible
|
||||
"""
|
||||
for layer in self.get_layers():
|
||||
if layer.visible and layer.id not in extract_ids:
|
||||
self.set_visible(layer.id, False)
|
||||
|
||||
def get_layers_metadata(self):
|
||||
"""Reads layers metadata from Headline from active document in PS.
|
||||
(Headline accessible by File > File Info)
|
||||
|
|
|
|||
|
|
@ -26,7 +26,6 @@ class ExtractImage(openpype.api.Extractor):
|
|||
with photoshop.maintained_selection():
|
||||
self.log.info("Extracting %s" % str(list(instance)))
|
||||
with photoshop.maintained_visibility():
|
||||
# Hide all other layers.
|
||||
layer = instance.data.get("layer")
|
||||
ids = set([layer.id])
|
||||
add_ids = instance.data.pop("ids", None)
|
||||
|
|
@ -34,11 +33,7 @@ class ExtractImage(openpype.api.Extractor):
|
|||
ids.update(set(add_ids))
|
||||
extract_ids = set([ll.id for ll in stub.
|
||||
get_layers_in_layers_ids(ids)])
|
||||
|
||||
for layer in stub.get_layers():
|
||||
# limit unnecessary calls to client
|
||||
if layer.visible and layer.id not in extract_ids:
|
||||
stub.set_visible(layer.id, False)
|
||||
stub.hide_all_others_layers_ids(extract_ids)
|
||||
|
||||
file_basename = os.path.splitext(
|
||||
stub.get_active_document_name()
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import shutil
|
||||
|
||||
import openpype.api
|
||||
import openpype.lib
|
||||
|
|
@ -7,7 +8,7 @@ from openpype.hosts.photoshop import api as photoshop
|
|||
|
||||
class ExtractReview(openpype.api.Extractor):
|
||||
"""
|
||||
Produce a flattened image file from all 'image' instances.
|
||||
Produce a flattened or sequence image file from all 'image' instances.
|
||||
|
||||
If no 'image' instance is created, it produces flattened image from
|
||||
all visible layers.
|
||||
|
|
@ -20,54 +21,58 @@ class ExtractReview(openpype.api.Extractor):
|
|||
# Extract Options
|
||||
jpg_options = None
|
||||
mov_options = None
|
||||
make_image_sequence = None
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
self.log.info("Outputting image to {}".format(staging_dir))
|
||||
|
||||
fps = instance.data.get("fps", 25)
|
||||
stub = photoshop.stub()
|
||||
self.output_seq_filename = os.path.splitext(
|
||||
stub.get_active_document_name())[0] + ".%04d.jpg"
|
||||
|
||||
layers = []
|
||||
for image_instance in instance.context:
|
||||
if image_instance.data["family"] != "image":
|
||||
continue
|
||||
layers.append(image_instance.data.get("layer"))
|
||||
layers = self._get_layers_from_image_instances(instance)
|
||||
self.log.info("Layers image instance found: {}".format(layers))
|
||||
|
||||
# Perform extraction
|
||||
output_image = "{}.jpg".format(
|
||||
os.path.splitext(stub.get_active_document_name())[0]
|
||||
)
|
||||
output_image_path = os.path.join(staging_dir, output_image)
|
||||
with photoshop.maintained_visibility():
|
||||
if layers:
|
||||
# Hide all other layers.
|
||||
extract_ids = set([ll.id for ll in stub.
|
||||
get_layers_in_layers(layers)])
|
||||
self.log.debug("extract_ids {}".format(extract_ids))
|
||||
for layer in stub.get_layers():
|
||||
# limit unnecessary calls to client
|
||||
if layer.visible and layer.id not in extract_ids:
|
||||
stub.set_visible(layer.id, False)
|
||||
if self.make_image_sequence and len(layers) > 1:
|
||||
self.log.info("Extract layers to image sequence.")
|
||||
img_list = self._saves_sequences_layers(staging_dir, layers)
|
||||
|
||||
stub.saveAs(output_image_path, 'jpg', True)
|
||||
instance.data["representations"].append({
|
||||
"name": "jpg",
|
||||
"ext": "jpg",
|
||||
"files": img_list,
|
||||
"frameStart": 0,
|
||||
"frameEnd": len(img_list),
|
||||
"fps": fps,
|
||||
"stagingDir": staging_dir,
|
||||
"tags": self.jpg_options['tags'],
|
||||
})
|
||||
|
||||
else:
|
||||
self.log.info("Extract layers to flatten image.")
|
||||
img_list = self._saves_flattened_layers(staging_dir, layers)
|
||||
|
||||
instance.data["representations"].append({
|
||||
"name": "jpg",
|
||||
"ext": "jpg",
|
||||
"files": img_list,
|
||||
"stagingDir": staging_dir,
|
||||
"tags": self.jpg_options['tags']
|
||||
})
|
||||
|
||||
ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
|
||||
|
||||
instance.data["representations"].append({
|
||||
"name": "jpg",
|
||||
"ext": "jpg",
|
||||
"files": output_image,
|
||||
"stagingDir": staging_dir,
|
||||
"tags": self.jpg_options['tags']
|
||||
})
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
# Generate thumbnail.
|
||||
thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg")
|
||||
self.log.info(f"Generate thumbnail {thumbnail_path}")
|
||||
args = [
|
||||
ffmpeg_path,
|
||||
"-y",
|
||||
"-i", output_image_path,
|
||||
"-i", os.path.join(staging_dir, self.output_seq_filename),
|
||||
"-vf", "scale=300:-1",
|
||||
"-vframes", "1",
|
||||
thumbnail_path
|
||||
|
|
@ -81,14 +86,17 @@ class ExtractReview(openpype.api.Extractor):
|
|||
"stagingDir": staging_dir,
|
||||
"tags": ["thumbnail"]
|
||||
})
|
||||
|
||||
# Generate mov.
|
||||
mov_path = os.path.join(staging_dir, "review.mov")
|
||||
self.log.info(f"Generate mov review: {mov_path}")
|
||||
img_number = len(img_list)
|
||||
args = [
|
||||
ffmpeg_path,
|
||||
"-y",
|
||||
"-i", output_image_path,
|
||||
"-i", os.path.join(staging_dir, self.output_seq_filename),
|
||||
"-vf", "pad=ceil(iw/2)*2:ceil(ih/2)*2",
|
||||
"-vframes", "1",
|
||||
"-vframes", str(img_number),
|
||||
mov_path
|
||||
]
|
||||
output = openpype.lib.run_subprocess(args)
|
||||
|
|
@ -99,15 +107,86 @@ class ExtractReview(openpype.api.Extractor):
|
|||
"files": os.path.basename(mov_path),
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": 1,
|
||||
"frameEnd": 1,
|
||||
"fps": 25,
|
||||
"frameEnd": img_number,
|
||||
"fps": fps,
|
||||
"preview": True,
|
||||
"tags": self.mov_options['tags']
|
||||
})
|
||||
|
||||
# Required for extract_review plugin (L222 onwards).
|
||||
instance.data["frameStart"] = 1
|
||||
instance.data["frameEnd"] = 1
|
||||
instance.data["frameEnd"] = img_number
|
||||
instance.data["fps"] = 25
|
||||
|
||||
self.log.info(f"Extracted {instance} to {staging_dir}")
|
||||
|
||||
def _get_image_path_from_instances(self, instance):
|
||||
img_list = []
|
||||
|
||||
for instance in sorted(instance.context):
|
||||
if instance.data["family"] != "image":
|
||||
continue
|
||||
|
||||
for rep in instance.data["representations"]:
|
||||
img_path = os.path.join(
|
||||
rep["stagingDir"],
|
||||
rep["files"]
|
||||
)
|
||||
img_list.append(img_path)
|
||||
|
||||
return img_list
|
||||
|
||||
def _copy_image_to_staging_dir(self, staging_dir, img_list):
|
||||
copy_files = []
|
||||
for i, img_src in enumerate(img_list):
|
||||
img_filename = self.output_seq_filename % i
|
||||
img_dst = os.path.join(staging_dir, img_filename)
|
||||
|
||||
self.log.debug(
|
||||
"Copying file .. {} -> {}".format(img_src, img_dst)
|
||||
)
|
||||
shutil.copy(img_src, img_dst)
|
||||
copy_files.append(img_filename)
|
||||
|
||||
return copy_files
|
||||
|
||||
def _get_layers_from_image_instances(self, instance):
|
||||
layers = []
|
||||
for image_instance in instance.context:
|
||||
if image_instance.data["family"] != "image":
|
||||
continue
|
||||
layers.append(image_instance.data.get("layer"))
|
||||
|
||||
return sorted(layers)
|
||||
|
||||
def _saves_flattened_layers(self, staging_dir, layers):
|
||||
img_filename = self.output_seq_filename % 0
|
||||
output_image_path = os.path.join(staging_dir, img_filename)
|
||||
stub = photoshop.stub()
|
||||
|
||||
with photoshop.maintained_visibility():
|
||||
self.log.info("Extracting {}".format(layers))
|
||||
if layers:
|
||||
stub.hide_all_others_layers(layers)
|
||||
|
||||
stub.saveAs(output_image_path, 'jpg', True)
|
||||
|
||||
return img_filename
|
||||
|
||||
def _saves_sequences_layers(self, staging_dir, layers):
|
||||
stub = photoshop.stub()
|
||||
|
||||
list_img_filename = []
|
||||
with photoshop.maintained_visibility():
|
||||
for i, layer in enumerate(layers):
|
||||
self.log.info("Extracting {}".format(layer))
|
||||
|
||||
img_filename = self.output_seq_filename % i
|
||||
output_image_path = os.path.join(staging_dir, img_filename)
|
||||
list_img_filename.append(img_filename)
|
||||
|
||||
with photoshop.maintained_visibility():
|
||||
stub.hide_all_others_layers([layer])
|
||||
stub.saveAs(output_image_path, 'jpg', True)
|
||||
|
||||
return list_img_filename
|
||||
|
|
|
|||
|
|
@ -70,9 +70,9 @@ def get_resolve_module():
|
|||
sys.exit()
|
||||
# assign global var and return
|
||||
bmdvr = bmd.scriptapp("Resolve")
|
||||
# bmdvf = bmd.scriptapp("Fusion")
|
||||
bmdvf = bmd.scriptapp("Fusion")
|
||||
resolve.api.bmdvr = bmdvr
|
||||
resolve.api.bmdvf = bmdvr.Fusion()
|
||||
resolve.api.bmdvf = bmdvf
|
||||
log.info(("Assigning resolve module to "
|
||||
f"`pype.hosts.resolve.api.bmdvr`: {resolve.api.bmdvr}"))
|
||||
log.info(("Assigning resolve module to "
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
"asset": "sq01_sh0010",
|
||||
"task": "Compositing",
|
||||
"variant": "myVariant",
|
||||
"uuid": "a485f148-9121-46a5-8157-aa64df0fb449",
|
||||
"instance_id": "a485f148-9121-46a5-8157-aa64df0fb449",
|
||||
"creator_attributes": {
|
||||
"number_key": 10,
|
||||
"ha": 10
|
||||
|
|
@ -29,8 +29,8 @@
|
|||
"asset": "sq01_sh0010",
|
||||
"task": "Compositing",
|
||||
"variant": "myVariant2",
|
||||
"uuid": "a485f148-9121-46a5-8157-aa64df0fb444",
|
||||
"creator_attributes": {},
|
||||
"instance_id": "a485f148-9121-46a5-8157-aa64df0fb444",
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
"add_ftrack_family": true
|
||||
|
|
@ -47,8 +47,8 @@
|
|||
"asset": "sq01_sh0010",
|
||||
"task": "Compositing",
|
||||
"variant": "Main",
|
||||
"uuid": "3607bc95-75f6-4648-a58d-e699f413d09f",
|
||||
"creator_attributes": {},
|
||||
"instance_id": "3607bc95-75f6-4648-a58d-e699f413d09f",
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
"add_ftrack_family": true
|
||||
|
|
@ -65,7 +65,7 @@
|
|||
"asset": "sq01_sh0020",
|
||||
"task": "Compositing",
|
||||
"variant": "Main2",
|
||||
"uuid": "4ccf56f6-9982-4837-967c-a49695dbe8eb",
|
||||
"instance_id": "4ccf56f6-9982-4837-967c-a49695dbe8eb",
|
||||
"creator_attributes": {},
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
|
|
@ -83,7 +83,7 @@
|
|||
"asset": "sq01_sh0020",
|
||||
"task": "Compositing",
|
||||
"variant": "Main2",
|
||||
"uuid": "4ccf56f6-9982-4837-967c-a49695dbe8ec",
|
||||
"instance_id": "4ccf56f6-9982-4837-967c-a49695dbe8ec",
|
||||
"creator_attributes": {},
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
|
|
@ -101,7 +101,7 @@
|
|||
"asset": "Alpaca_01",
|
||||
"task": "modeling",
|
||||
"variant": "Main",
|
||||
"uuid": "7c9ddfc7-9f9c-4c1c-b233-38c966735fb6",
|
||||
"instance_id": "7c9ddfc7-9f9c-4c1c-b233-38c966735fb6",
|
||||
"creator_attributes": {},
|
||||
"publish_attributes": {}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -114,7 +114,7 @@ def update_instances(update_list):
|
|||
|
||||
instances = HostContext.get_instances()
|
||||
for instance_data in instances:
|
||||
instance_id = instance_data["uuid"]
|
||||
instance_id = instance_data["instance_id"]
|
||||
if instance_id in updated_instances:
|
||||
new_instance_data = updated_instances[instance_id]
|
||||
old_keys = set(instance_data.keys())
|
||||
|
|
@ -132,10 +132,10 @@ def remove_instances(instances):
|
|||
|
||||
current_instances = HostContext.get_instances()
|
||||
for instance in instances:
|
||||
instance_id = instance.data["uuid"]
|
||||
instance_id = instance.data["instance_id"]
|
||||
found_idx = None
|
||||
for idx, _instance in enumerate(current_instances):
|
||||
if instance_id == _instance["uuid"]:
|
||||
if instance_id == _instance["instance_id"]:
|
||||
found_idx = idx
|
||||
break
|
||||
|
||||
|
|
|
|||
20
openpype/hosts/traypublisher/api/__init__.py
Normal file
20
openpype/hosts/traypublisher/api/__init__.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
from .pipeline import (
|
||||
install,
|
||||
ls,
|
||||
|
||||
set_project_name,
|
||||
get_context_title,
|
||||
get_context_data,
|
||||
update_context_data,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"install",
|
||||
"ls",
|
||||
|
||||
"set_project_name",
|
||||
"get_context_title",
|
||||
"get_context_data",
|
||||
"update_context_data",
|
||||
)
|
||||
180
openpype/hosts/traypublisher/api/pipeline.py
Normal file
180
openpype/hosts/traypublisher/api/pipeline.py
Normal file
|
|
@ -0,0 +1,180 @@
|
|||
import os
|
||||
import json
|
||||
import tempfile
|
||||
import atexit
|
||||
|
||||
from avalon import io
|
||||
import avalon.api
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import BaseCreator
|
||||
|
||||
ROOT_DIR = os.path.dirname(os.path.dirname(
|
||||
os.path.abspath(__file__)
|
||||
))
|
||||
PUBLISH_PATH = os.path.join(ROOT_DIR, "plugins", "publish")
|
||||
CREATE_PATH = os.path.join(ROOT_DIR, "plugins", "create")
|
||||
|
||||
|
||||
class HostContext:
|
||||
_context_json_path = None
|
||||
|
||||
@staticmethod
|
||||
def _on_exit():
|
||||
if (
|
||||
HostContext._context_json_path
|
||||
and os.path.exists(HostContext._context_json_path)
|
||||
):
|
||||
os.remove(HostContext._context_json_path)
|
||||
|
||||
@classmethod
|
||||
def get_context_json_path(cls):
|
||||
if cls._context_json_path is None:
|
||||
output_file = tempfile.NamedTemporaryFile(
|
||||
mode="w", prefix="traypub_", suffix=".json"
|
||||
)
|
||||
output_file.close()
|
||||
cls._context_json_path = output_file.name
|
||||
atexit.register(HostContext._on_exit)
|
||||
print(cls._context_json_path)
|
||||
return cls._context_json_path
|
||||
|
||||
@classmethod
|
||||
def _get_data(cls, group=None):
|
||||
json_path = cls.get_context_json_path()
|
||||
data = {}
|
||||
if not os.path.exists(json_path):
|
||||
with open(json_path, "w") as json_stream:
|
||||
json.dump(data, json_stream)
|
||||
else:
|
||||
with open(json_path, "r") as json_stream:
|
||||
content = json_stream.read()
|
||||
if content:
|
||||
data = json.loads(content)
|
||||
if group is None:
|
||||
return data
|
||||
return data.get(group)
|
||||
|
||||
@classmethod
|
||||
def _save_data(cls, group, new_data):
|
||||
json_path = cls.get_context_json_path()
|
||||
data = cls._get_data()
|
||||
data[group] = new_data
|
||||
with open(json_path, "w") as json_stream:
|
||||
json.dump(data, json_stream)
|
||||
|
||||
@classmethod
|
||||
def add_instance(cls, instance):
|
||||
instances = cls.get_instances()
|
||||
instances.append(instance)
|
||||
cls.save_instances(instances)
|
||||
|
||||
@classmethod
|
||||
def get_instances(cls):
|
||||
return cls._get_data("instances") or []
|
||||
|
||||
@classmethod
|
||||
def save_instances(cls, instances):
|
||||
cls._save_data("instances", instances)
|
||||
|
||||
@classmethod
|
||||
def get_context_data(cls):
|
||||
return cls._get_data("context") or {}
|
||||
|
||||
@classmethod
|
||||
def save_context_data(cls, data):
|
||||
cls._save_data("context", data)
|
||||
|
||||
@classmethod
|
||||
def get_project_name(cls):
|
||||
return cls._get_data("project_name")
|
||||
|
||||
@classmethod
|
||||
def set_project_name(cls, project_name):
|
||||
cls._save_data("project_name", project_name)
|
||||
|
||||
@classmethod
|
||||
def get_data_to_store(cls):
|
||||
return {
|
||||
"project_name": cls.get_project_name(),
|
||||
"instances": cls.get_instances(),
|
||||
"context": cls.get_context_data(),
|
||||
}
|
||||
|
||||
|
||||
def list_instances():
|
||||
return HostContext.get_instances()
|
||||
|
||||
|
||||
def update_instances(update_list):
|
||||
updated_instances = {}
|
||||
for instance, _changes in update_list:
|
||||
updated_instances[instance.id] = instance.data_to_store()
|
||||
|
||||
instances = HostContext.get_instances()
|
||||
for instance_data in instances:
|
||||
instance_id = instance_data["instance_id"]
|
||||
if instance_id in updated_instances:
|
||||
new_instance_data = updated_instances[instance_id]
|
||||
old_keys = set(instance_data.keys())
|
||||
new_keys = set(new_instance_data.keys())
|
||||
instance_data.update(new_instance_data)
|
||||
for key in (old_keys - new_keys):
|
||||
instance_data.pop(key)
|
||||
|
||||
HostContext.save_instances(instances)
|
||||
|
||||
|
||||
def remove_instances(instances):
|
||||
if not isinstance(instances, (tuple, list)):
|
||||
instances = [instances]
|
||||
|
||||
current_instances = HostContext.get_instances()
|
||||
for instance in instances:
|
||||
instance_id = instance.data["instance_id"]
|
||||
found_idx = None
|
||||
for idx, _instance in enumerate(current_instances):
|
||||
if instance_id == _instance["instance_id"]:
|
||||
found_idx = idx
|
||||
break
|
||||
|
||||
if found_idx is not None:
|
||||
current_instances.pop(found_idx)
|
||||
HostContext.save_instances(current_instances)
|
||||
|
||||
|
||||
def get_context_data():
|
||||
return HostContext.get_context_data()
|
||||
|
||||
|
||||
def update_context_data(data, changes):
|
||||
HostContext.save_context_data(data)
|
||||
|
||||
|
||||
def get_context_title():
|
||||
return HostContext.get_project_name()
|
||||
|
||||
|
||||
def ls():
|
||||
"""Probably will never return loaded containers."""
|
||||
return []
|
||||
|
||||
|
||||
def install():
|
||||
"""This is called before a project is known.
|
||||
|
||||
Project is defined with 'set_project_name'.
|
||||
"""
|
||||
os.environ["AVALON_APP"] = "traypublisher"
|
||||
|
||||
pyblish.api.register_host("traypublisher")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.api.register_plugin_path(BaseCreator, CREATE_PATH)
|
||||
|
||||
|
||||
def set_project_name(project_name):
|
||||
# TODO Deregister project specific plugins and register new project plugins
|
||||
os.environ["AVALON_PROJECT"] = project_name
|
||||
avalon.api.Session["AVALON_PROJECT"] = project_name
|
||||
io.install()
|
||||
HostContext.set_project_name(project_name)
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
from openpype.hosts.traypublisher.api import pipeline
|
||||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
lib
|
||||
)
|
||||
|
||||
|
||||
class WorkfileCreator(Creator):
|
||||
identifier = "workfile"
|
||||
label = "Workfile"
|
||||
family = "workfile"
|
||||
description = "Publish backup of workfile"
|
||||
|
||||
create_allow_context_change = True
|
||||
|
||||
extensions = [
|
||||
# Maya
|
||||
".ma", ".mb",
|
||||
# Nuke
|
||||
".nk",
|
||||
# Hiero
|
||||
".hrox",
|
||||
# Houdini
|
||||
".hip", ".hiplc", ".hipnc",
|
||||
# Blender
|
||||
".blend",
|
||||
# Celaction
|
||||
".scn",
|
||||
# TVPaint
|
||||
".tvpp",
|
||||
# Fusion
|
||||
".comp",
|
||||
# Harmony
|
||||
".zip",
|
||||
# Premiere
|
||||
".prproj",
|
||||
# Resolve
|
||||
".drp",
|
||||
# Photoshop
|
||||
".psd", ".psb",
|
||||
# Aftereffects
|
||||
".aep"
|
||||
]
|
||||
|
||||
def get_icon(self):
|
||||
return "fa.file"
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in pipeline.list_instances():
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
pipeline.update_instances(update_list)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
pipeline.remove_instances(instances)
|
||||
for instance in instances:
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def create(self, subset_name, data, pre_create_data):
|
||||
# Pass precreate data to creator attributes
|
||||
data["creator_attributes"] = pre_create_data
|
||||
# Create new instance
|
||||
new_instance = CreatedInstance(self.family, subset_name, data, self)
|
||||
# Host implementation of storing metadata about instance
|
||||
pipeline.HostContext.add_instance(new_instance.data_to_store())
|
||||
# Add instance to current context
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
def get_default_variants(self):
|
||||
return [
|
||||
"Main"
|
||||
]
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
output = [
|
||||
lib.FileDef(
|
||||
"filepath",
|
||||
folders=False,
|
||||
extensions=self.extensions,
|
||||
label="Filepath"
|
||||
)
|
||||
]
|
||||
return output
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
# Use same attributes as for instance attrobites
|
||||
return self.get_instance_attr_defs()
|
||||
|
||||
def get_detail_description(self):
|
||||
return """# Publish workfile backup"""
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectSource(pyblish.api.ContextPlugin):
|
||||
"""Collecting instances from traypublisher host."""
|
||||
|
||||
label = "Collect source"
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
hosts = ["traypublisher"]
|
||||
|
||||
def process(self, context):
|
||||
# get json paths from os and load them
|
||||
source_name = "traypublisher"
|
||||
for instance in context:
|
||||
source = instance.data.get("source")
|
||||
if not source:
|
||||
instance.data["source"] = source_name
|
||||
self.log.info((
|
||||
"Source of instance \"{}\" is changed to \"{}\""
|
||||
).format(instance.data["name"], source_name))
|
||||
else:
|
||||
self.log.info((
|
||||
"Source of instance \"{}\" was already set to \"{}\""
|
||||
).format(instance.data["name"], source))
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.InstancePlugin):
|
||||
"""Collect representation of workfile instances."""
|
||||
|
||||
label = "Collect Workfile"
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
families = ["workfile"]
|
||||
hosts = ["traypublisher"]
|
||||
|
||||
def process(self, instance):
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
repres = instance.data["representations"]
|
||||
|
||||
creator_attributes = instance.data["creator_attributes"]
|
||||
filepath = creator_attributes["filepath"]
|
||||
instance.data["sourceFilepath"] = filepath
|
||||
|
||||
staging_dir = os.path.dirname(filepath)
|
||||
filename = os.path.basename(filepath)
|
||||
ext = os.path.splitext(filename)[-1]
|
||||
|
||||
repres.append({
|
||||
"ext": ext,
|
||||
"name": ext,
|
||||
"stagingDir": staging_dir,
|
||||
"files": filename
|
||||
})
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateWorkfilePath(pyblish.api.InstancePlugin):
|
||||
"""Validate existence of workfile instance existence."""
|
||||
|
||||
label = "Collect Workfile"
|
||||
order = pyblish.api.ValidatorOrder - 0.49
|
||||
families = ["workfile"]
|
||||
hosts = ["traypublisher"]
|
||||
|
||||
def process(self, instance):
|
||||
filepath = instance.data["sourceFilepath"]
|
||||
if not filepath:
|
||||
raise PublishValidationError((
|
||||
"Filepath of 'workfile' instance \"{}\" is not set"
|
||||
).format(instance.data["name"]))
|
||||
|
||||
if not os.path.exists(filepath):
|
||||
raise PublishValidationError((
|
||||
"Filepath of 'workfile' instance \"{}\" does not exist: {}"
|
||||
).format(instance.data["name"], filepath))
|
||||
|
|
@ -29,6 +29,7 @@ from .execute import (
|
|||
get_linux_launcher_args,
|
||||
execute,
|
||||
run_subprocess,
|
||||
run_detached_process,
|
||||
run_openpype_process,
|
||||
clean_envs_for_openpype_process,
|
||||
path_to_subprocess_arg,
|
||||
|
|
@ -130,7 +131,7 @@ from .applications import (
|
|||
PostLaunchHook,
|
||||
|
||||
EnvironmentPrepData,
|
||||
prepare_host_environments,
|
||||
prepare_app_environments,
|
||||
prepare_context_environments,
|
||||
get_app_environments_for_context,
|
||||
apply_project_environments_value
|
||||
|
|
@ -188,6 +189,7 @@ __all__ = [
|
|||
"get_linux_launcher_args",
|
||||
"execute",
|
||||
"run_subprocess",
|
||||
"run_detached_process",
|
||||
"run_openpype_process",
|
||||
"clean_envs_for_openpype_process",
|
||||
"path_to_subprocess_arg",
|
||||
|
|
@ -261,7 +263,7 @@ __all__ = [
|
|||
"PreLaunchHook",
|
||||
"PostLaunchHook",
|
||||
"EnvironmentPrepData",
|
||||
"prepare_host_environments",
|
||||
"prepare_app_environments",
|
||||
"prepare_context_environments",
|
||||
"get_app_environments_for_context",
|
||||
"apply_project_environments_value",
|
||||
|
|
|
|||
|
|
@ -1295,7 +1295,7 @@ def get_app_environments_for_context(
|
|||
"env": env
|
||||
})
|
||||
|
||||
prepare_host_environments(data, env_group)
|
||||
prepare_app_environments(data, env_group)
|
||||
prepare_context_environments(data, env_group)
|
||||
|
||||
# Discard avalon connection
|
||||
|
|
@ -1316,7 +1316,7 @@ def _merge_env(env, current_env):
|
|||
return result
|
||||
|
||||
|
||||
def prepare_host_environments(data, env_group=None, implementation_envs=True):
|
||||
def prepare_app_environments(data, env_group=None, implementation_envs=True):
|
||||
"""Modify launch environments based on launched app and context.
|
||||
|
||||
Args:
|
||||
|
|
@ -1474,6 +1474,22 @@ def prepare_context_environments(data, env_group=None):
|
|||
)
|
||||
|
||||
app = data["app"]
|
||||
context_env = {
|
||||
"AVALON_PROJECT": project_doc["name"],
|
||||
"AVALON_ASSET": asset_doc["name"],
|
||||
"AVALON_TASK": task_name,
|
||||
"AVALON_APP_NAME": app.full_name
|
||||
}
|
||||
|
||||
log.debug(
|
||||
"Context environments set:\n{}".format(
|
||||
json.dumps(context_env, indent=4)
|
||||
)
|
||||
)
|
||||
data["env"].update(context_env)
|
||||
if not app.is_host:
|
||||
return
|
||||
|
||||
workdir_data = get_workdir_data(
|
||||
project_doc, asset_doc, task_name, app.host_name
|
||||
)
|
||||
|
|
@ -1504,20 +1520,8 @@ def prepare_context_environments(data, env_group=None):
|
|||
"Couldn't create workdir because: {}".format(str(exc))
|
||||
)
|
||||
|
||||
context_env = {
|
||||
"AVALON_PROJECT": project_doc["name"],
|
||||
"AVALON_ASSET": asset_doc["name"],
|
||||
"AVALON_TASK": task_name,
|
||||
"AVALON_APP": app.host_name,
|
||||
"AVALON_APP_NAME": app.full_name,
|
||||
"AVALON_WORKDIR": workdir
|
||||
}
|
||||
log.debug(
|
||||
"Context environments set:\n{}".format(
|
||||
json.dumps(context_env, indent=4)
|
||||
)
|
||||
)
|
||||
data["env"].update(context_env)
|
||||
data["env"]["AVALON_APP"] = app.host_name
|
||||
data["env"]["AVALON_WORKDIR"] = workdir
|
||||
|
||||
_prepare_last_workfile(data, workdir)
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ def collect_frames(files):
|
|||
Returns:
|
||||
(dict): {'/asset/subset_v001.0001.png': '0001', ....}
|
||||
"""
|
||||
collections, remainder = clique.assemble(files)
|
||||
collections, remainder = clique.assemble(files, minimum_items=1)
|
||||
|
||||
sources_and_frames = {}
|
||||
if collections:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,9 @@
|
|||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import platform
|
||||
import json
|
||||
import tempfile
|
||||
import distutils.spawn
|
||||
|
||||
from .log import PypeLogger as Logger
|
||||
|
|
@ -181,6 +185,80 @@ def run_openpype_process(*args, **kwargs):
|
|||
return run_subprocess(args, env=env, **kwargs)
|
||||
|
||||
|
||||
def run_detached_process(args, **kwargs):
|
||||
"""Execute process with passed arguments as separated process.
|
||||
|
||||
Values from 'os.environ' are used for environments if are not passed.
|
||||
They are cleaned using 'clean_envs_for_openpype_process' function.
|
||||
|
||||
Example:
|
||||
```
|
||||
run_detached_openpype_process("run", "<path to .py script>")
|
||||
```
|
||||
|
||||
Args:
|
||||
*args (tuple): OpenPype cli arguments.
|
||||
**kwargs (dict): Keyword arguments for for subprocess.Popen.
|
||||
|
||||
Returns:
|
||||
subprocess.Popen: Pointer to launched process but it is possible that
|
||||
launched process is already killed (on linux).
|
||||
"""
|
||||
env = kwargs.pop("env", None)
|
||||
# Keep env untouched if are passed and not empty
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# Create copy of passed env
|
||||
kwargs["env"] = {k: v for k, v in env.items()}
|
||||
|
||||
low_platform = platform.system().lower()
|
||||
if low_platform == "darwin":
|
||||
new_args = ["open", "-na", args.pop(0), "--args"]
|
||||
new_args.extend(args)
|
||||
args = new_args
|
||||
|
||||
elif low_platform == "windows":
|
||||
flags = (
|
||||
subprocess.CREATE_NEW_PROCESS_GROUP
|
||||
| subprocess.DETACHED_PROCESS
|
||||
)
|
||||
kwargs["creationflags"] = flags
|
||||
|
||||
if not sys.stdout:
|
||||
kwargs["stdout"] = subprocess.DEVNULL
|
||||
kwargs["stderr"] = subprocess.DEVNULL
|
||||
|
||||
elif low_platform == "linux" and get_linux_launcher_args() is not None:
|
||||
json_data = {
|
||||
"args": args,
|
||||
"env": kwargs.pop("env")
|
||||
}
|
||||
json_temp = tempfile.NamedTemporaryFile(
|
||||
mode="w", prefix="op_app_args", suffix=".json", delete=False
|
||||
)
|
||||
json_temp.close()
|
||||
json_temp_filpath = json_temp.name
|
||||
with open(json_temp_filpath, "w") as stream:
|
||||
json.dump(json_data, stream)
|
||||
|
||||
new_args = get_linux_launcher_args()
|
||||
new_args.append(json_temp_filpath)
|
||||
|
||||
# Create mid-process which will launch application
|
||||
process = subprocess.Popen(new_args, **kwargs)
|
||||
# Wait until the process finishes
|
||||
# - This is important! The process would stay in "open" state.
|
||||
process.wait()
|
||||
# Remove the temp file
|
||||
os.remove(json_temp_filpath)
|
||||
# Return process which is already terminated
|
||||
return process
|
||||
|
||||
process = subprocess.Popen(args, **kwargs)
|
||||
return process
|
||||
|
||||
|
||||
def path_to_subprocess_arg(path):
|
||||
"""Prepare path for subprocess arguments.
|
||||
|
||||
|
|
|
|||
|
|
@ -49,11 +49,13 @@ class Terminal:
|
|||
"""
|
||||
|
||||
from openpype.lib import env_value_to_bool
|
||||
use_colors = env_value_to_bool(
|
||||
"OPENPYPE_LOG_NO_COLORS", default=Terminal.use_colors
|
||||
log_no_colors = env_value_to_bool(
|
||||
"OPENPYPE_LOG_NO_COLORS", default=None
|
||||
)
|
||||
if not use_colors:
|
||||
Terminal.use_colors = use_colors
|
||||
if log_no_colors is not None:
|
||||
Terminal.use_colors = not log_no_colors
|
||||
|
||||
if not Terminal.use_colors:
|
||||
Terminal._initialized = True
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -33,16 +33,21 @@ DEFAULT_OPENPYPE_MODULES = (
|
|||
"avalon_apps",
|
||||
"clockify",
|
||||
"log_viewer",
|
||||
"deadline",
|
||||
"muster",
|
||||
"royalrender",
|
||||
"python_console_interpreter",
|
||||
"ftrack",
|
||||
"slack",
|
||||
"webserver",
|
||||
"launcher_action",
|
||||
"project_manager_action",
|
||||
"settings_action",
|
||||
"standalonepublish_action",
|
||||
"traypublish_action",
|
||||
"job_queue",
|
||||
"timers_manager",
|
||||
"sync_server",
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -218,8 +223,6 @@ def load_interfaces(force=False):
|
|||
|
||||
def _load_interfaces():
|
||||
# Key under which will be modules imported in `sys.modules`
|
||||
from openpype.lib import import_filepath
|
||||
|
||||
modules_key = "openpype_interfaces"
|
||||
|
||||
sys.modules[modules_key] = openpype_interfaces = (
|
||||
|
|
@ -844,6 +847,7 @@ class TrayModulesManager(ModulesManager):
|
|||
"avalon",
|
||||
"clockify",
|
||||
"standalonepublish_tool",
|
||||
"traypublish_tool",
|
||||
"log_viewer",
|
||||
"local_settings",
|
||||
"settings"
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ import attr
|
|||
import requests
|
||||
|
||||
import pyblish.api
|
||||
from .abstract_metaplugins import AbstractMetaInstancePlugin
|
||||
from openpype.lib.abstract_metaplugins import AbstractMetaInstancePlugin
|
||||
|
||||
|
||||
def requests_post(*args, **kwargs):
|
||||
|
|
@ -5,9 +5,9 @@ import pyblish.api
|
|||
|
||||
from avalon import api
|
||||
|
||||
from openpype.lib import abstract_submit_deadline
|
||||
from openpype.lib.abstract_submit_deadline import DeadlineJobInfo
|
||||
from openpype.lib import env_value_to_bool
|
||||
from openpype_modules.deadline import abstract_submit_deadline
|
||||
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
@attr.s
|
||||
|
|
@ -24,7 +24,9 @@ class DeadlinePluginInfo():
|
|||
MultiProcess = attr.ib(default=None)
|
||||
|
||||
|
||||
class AfterEffectsSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
||||
class AfterEffectsSubmitDeadline(
|
||||
abstract_submit_deadline.AbstractSubmitDeadline
|
||||
):
|
||||
|
||||
label = "Submit AE to Deadline"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
|
|
@ -8,11 +8,11 @@ import re
|
|||
|
||||
import attr
|
||||
import pyblish.api
|
||||
|
||||
import openpype.lib.abstract_submit_deadline
|
||||
from openpype.lib.abstract_submit_deadline import DeadlineJobInfo
|
||||
from avalon import api
|
||||
|
||||
from openpype_modules.deadline import abstract_submit_deadline
|
||||
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
|
||||
|
||||
class _ZipFile(ZipFile):
|
||||
"""Extended check for windows invalid characters."""
|
||||
|
|
@ -217,7 +217,8 @@ class PluginInfo(object):
|
|||
|
||||
|
||||
class HarmonySubmitDeadline(
|
||||
openpype.lib.abstract_submit_deadline.AbstractSubmitDeadline):
|
||||
abstract_submit_deadline.AbstractSubmitDeadline
|
||||
):
|
||||
"""Submit render write of Harmony scene to Deadline.
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
|
|
@ -1,11 +1,10 @@
|
|||
import os
|
||||
import json
|
||||
import requests
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib.abstract_submit_deadline import requests_get
|
||||
from openpype.lib.delivery import collect_frames
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_get
|
||||
|
||||
|
||||
class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
||||
|
|
@ -30,47 +29,58 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
staging_dir = repre["stagingDir"]
|
||||
existing_files = self._get_existing_files(staging_dir)
|
||||
|
||||
expected_non_existent = expected_files.difference(
|
||||
existing_files)
|
||||
if len(expected_non_existent) != 0:
|
||||
self.log.info("Some expected files missing {}".format(
|
||||
expected_non_existent))
|
||||
if self.allow_user_override:
|
||||
# We always check for user override because the user might have
|
||||
# also overridden the Job frame list to be longer than the
|
||||
# originally submitted frame range
|
||||
# todo: We should first check if Job frame range was overridden
|
||||
# at all so we don't unnecessarily override anything
|
||||
file_name_template, frame_placeholder = \
|
||||
self._get_file_name_template_and_placeholder(
|
||||
expected_files)
|
||||
|
||||
if self.allow_user_override:
|
||||
file_name_template, frame_placeholder = \
|
||||
self._get_file_name_template_and_placeholder(
|
||||
expected_files)
|
||||
if not file_name_template:
|
||||
raise RuntimeError("Unable to retrieve file_name template"
|
||||
"from files: {}".format(expected_files))
|
||||
|
||||
if not file_name_template:
|
||||
return
|
||||
job_expected_files = self._get_job_expected_files(
|
||||
file_name_template,
|
||||
frame_placeholder,
|
||||
frame_list)
|
||||
|
||||
real_expected_rendered = self._get_real_render_expected(
|
||||
file_name_template,
|
||||
frame_placeholder,
|
||||
frame_list)
|
||||
job_files_diff = job_expected_files.difference(expected_files)
|
||||
if job_files_diff:
|
||||
self.log.debug(
|
||||
"Detected difference in expected output files from "
|
||||
"Deadline job. Assuming an updated frame list by the "
|
||||
"user. Difference: {}".format(sorted(job_files_diff))
|
||||
)
|
||||
|
||||
real_expected_non_existent = \
|
||||
real_expected_rendered.difference(existing_files)
|
||||
if len(real_expected_non_existent) != 0:
|
||||
raise RuntimeError("Still missing some files {}".
|
||||
format(real_expected_non_existent))
|
||||
self.log.info("Update range from actual job range")
|
||||
repre["files"] = sorted(list(real_expected_rendered))
|
||||
else:
|
||||
raise RuntimeError("Some expected files missing {}".format(
|
||||
expected_non_existent))
|
||||
# Update the representation expected files
|
||||
self.log.info("Update range from actual job range "
|
||||
"to frame list: {}".format(frame_list))
|
||||
repre["files"] = sorted(job_expected_files)
|
||||
|
||||
# Update the expected files
|
||||
expected_files = job_expected_files
|
||||
|
||||
# We don't use set.difference because we do allow other existing
|
||||
# files to be in the folder that we might not want to use.
|
||||
missing = expected_files - existing_files
|
||||
if missing:
|
||||
raise RuntimeError("Missing expected files: {}".format(
|
||||
sorted(missing)))
|
||||
|
||||
def _get_frame_list(self, original_job_id):
|
||||
"""
|
||||
Returns list of frame ranges from all render job.
|
||||
"""Returns list of frame ranges from all render job.
|
||||
|
||||
Render job might be requeried so job_id in metadata.json is invalid
|
||||
GlobalJobPreload injects current ids to RENDER_JOB_IDS.
|
||||
Render job might be re-submitted so job_id in metadata.json could be
|
||||
invalid. GlobalJobPreload injects current job id to RENDER_JOB_IDS.
|
||||
|
||||
Args:
|
||||
original_job_id (str)
|
||||
Returns:
|
||||
(list)
|
||||
Args:
|
||||
original_job_id (str)
|
||||
Returns:
|
||||
(list)
|
||||
"""
|
||||
all_frame_lists = []
|
||||
render_job_ids = os.environ.get("RENDER_JOB_IDS")
|
||||
|
|
@ -87,13 +97,15 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
|
||||
return all_frame_lists
|
||||
|
||||
def _get_real_render_expected(self, file_name_template, frame_placeholder,
|
||||
frame_list):
|
||||
"""
|
||||
Calculates list of names of expected rendered files.
|
||||
def _get_job_expected_files(self,
|
||||
file_name_template,
|
||||
frame_placeholder,
|
||||
frame_list):
|
||||
"""Calculates list of names of expected rendered files.
|
||||
|
||||
Might be different from expected files from submission if user
|
||||
explicitly and manually changed the frame list on the Deadline job.
|
||||
|
||||
Might be different from job expected files if user explicitly and
|
||||
manually change frame list on Deadline job.
|
||||
"""
|
||||
real_expected_rendered = set()
|
||||
src_padding_exp = "%0{}d".format(len(frame_placeholder))
|
||||
|
|
@ -115,6 +127,14 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
|
||||
file_name_template = frame_placeholder = None
|
||||
for file_name, frame in sources_and_frames.items():
|
||||
|
||||
# There might be cases where clique was unable to collect
|
||||
# collections in `collect_frames` - thus we capture that case
|
||||
if frame is None:
|
||||
self.log.warning("Unable to detect frame from filename: "
|
||||
"{}".format(file_name))
|
||||
continue
|
||||
|
||||
frame_placeholder = "#" * len(frame)
|
||||
file_name_template = os.path.basename(
|
||||
file_name.replace(frame, frame_placeholder))
|
||||
|
|
@ -123,11 +143,11 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
return file_name_template, frame_placeholder
|
||||
|
||||
def _get_job_info(self, job_id):
|
||||
"""
|
||||
Calls DL for actual job info for 'job_id'
|
||||
"""Calls DL for actual job info for 'job_id'
|
||||
|
||||
Might be different than job info saved in metadata.json if user
|
||||
manually changes job pre/during rendering.
|
||||
|
||||
Might be different than job info saved in metadata.json if user
|
||||
manually changes job pre/during rendering.
|
||||
"""
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = self.instance.context.data["defaultDeadline"]
|
||||
|
|
@ -140,8 +160,8 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
try:
|
||||
response = requests_get(url)
|
||||
except requests.exceptions.ConnectionError:
|
||||
print("Deadline is not accessible at {}".format(deadline_url))
|
||||
# self.log("Deadline is not accessible at {}".format(deadline_url))
|
||||
self.log.error("Deadline is not accessible at "
|
||||
"{}".format(deadline_url))
|
||||
return {}
|
||||
|
||||
if not response.ok:
|
||||
|
|
@ -155,29 +175,26 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
return json_content.pop()
|
||||
return {}
|
||||
|
||||
def _parse_metadata_json(self, json_path):
|
||||
if not os.path.exists(json_path):
|
||||
msg = "Metadata file {} doesn't exist".format(json_path)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
with open(json_path) as fp:
|
||||
try:
|
||||
return json.load(fp)
|
||||
except Exception as exc:
|
||||
self.log.error(
|
||||
"Error loading json: "
|
||||
"{} - Exception: {}".format(json_path, exc)
|
||||
)
|
||||
|
||||
def _get_existing_files(self, out_dir):
|
||||
"""Returns set of existing file names from 'out_dir'"""
|
||||
def _get_existing_files(self, staging_dir):
|
||||
"""Returns set of existing file names from 'staging_dir'"""
|
||||
existing_files = set()
|
||||
for file_name in os.listdir(out_dir):
|
||||
for file_name in os.listdir(staging_dir):
|
||||
existing_files.add(file_name)
|
||||
return existing_files
|
||||
|
||||
def _get_expected_files(self, repre):
|
||||
"""Returns set of file names from metadata.json"""
|
||||
"""Returns set of file names in representation['files']
|
||||
|
||||
The representations are collected from `CollectRenderedFiles` using
|
||||
the metadata.json file submitted along with the render job.
|
||||
|
||||
Args:
|
||||
repre (dict): The representation containing 'files'
|
||||
|
||||
Returns:
|
||||
set: Set of expected file_names in the staging directory.
|
||||
|
||||
"""
|
||||
expected_files = set()
|
||||
|
||||
files = repre["files"]
|
||||
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
|
Before Width: | Height: | Size: 124 KiB After Width: | Height: | Size: 124 KiB |
|
Before Width: | Height: | Size: 124 KiB After Width: | Height: | Size: 124 KiB |
|
|
@ -1 +0,0 @@
|
|||
Subproject commit b746fedf7286c3755a46f07ab72f4c414cd41fc0
|
||||
|
|
@ -1 +0,0 @@
|
|||
Subproject commit d277f474ab016e7b53479c36af87cb861d0cc53e
|
||||
|
|
@ -3,8 +3,9 @@ import uuid
|
|||
from datetime import datetime
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
from openpype_modules.ftrack.lib import BaseAction, statics_icon
|
||||
from avalon.api import AvalonMongoDB
|
||||
from openpype_modules.ftrack.lib import BaseAction, statics_icon
|
||||
from openpype_modules.ftrack.lib.avalon_sync import create_chunks
|
||||
|
||||
|
||||
class DeleteAssetSubset(BaseAction):
|
||||
|
|
@ -554,8 +555,8 @@ class DeleteAssetSubset(BaseAction):
|
|||
ftrack_proc_txt, ", ".join(ftrack_ids_to_delete)
|
||||
))
|
||||
|
||||
entities_by_link_len = (
|
||||
self._filter_entities_to_delete(ftrack_ids_to_delete, session)
|
||||
entities_by_link_len = self._prepare_entities_before_delete(
|
||||
ftrack_ids_to_delete, session
|
||||
)
|
||||
for link_len in sorted(entities_by_link_len.keys(), reverse=True):
|
||||
for entity in entities_by_link_len[link_len]:
|
||||
|
|
@ -609,7 +610,7 @@ class DeleteAssetSubset(BaseAction):
|
|||
|
||||
return self.report_handle(report_messages, project_name, event)
|
||||
|
||||
def _filter_entities_to_delete(self, ftrack_ids_to_delete, session):
|
||||
def _prepare_entities_before_delete(self, ftrack_ids_to_delete, session):
|
||||
"""Filter children entities to avoid CircularDependencyError."""
|
||||
joined_ids_to_delete = ", ".join(
|
||||
["\"{}\"".format(id) for id in ftrack_ids_to_delete]
|
||||
|
|
@ -638,6 +639,21 @@ class DeleteAssetSubset(BaseAction):
|
|||
parent_ids_to_delete.append(entity["id"])
|
||||
to_delete_entities.append(entity)
|
||||
|
||||
# Unset 'task_id' from AssetVersion entities
|
||||
# - when task is deleted the asset version is not marked for deletion
|
||||
task_ids = set(
|
||||
entity["id"]
|
||||
for entity in to_delete_entities
|
||||
if entity.entity_type.lower() == "task"
|
||||
)
|
||||
for chunk in create_chunks(task_ids):
|
||||
asset_versions = session.query((
|
||||
"select id, task_id from AssetVersion where task_id in ({})"
|
||||
).format(self.join_query_keys(chunk))).all()
|
||||
for asset_version in asset_versions:
|
||||
asset_version["task_id"] = None
|
||||
session.commit()
|
||||
|
||||
entities_by_link_len = collections.defaultdict(list)
|
||||
for entity in to_delete_entities:
|
||||
entities_by_link_len[len(entity["link"])].append(entity)
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue