mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'develop' into enhancement/OP-2930_New-publisher-Develop-docs
This commit is contained in:
commit
ab4526a40f
105 changed files with 2383 additions and 825 deletions
|
|
@ -5,15 +5,15 @@ from Qt import QtWidgets
|
|||
from bson.objectid import ObjectId
|
||||
|
||||
import pyblish.api
|
||||
import avalon.api
|
||||
from avalon import io
|
||||
|
||||
from openpype import lib
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
import openpype.hosts.aftereffects
|
||||
|
|
@ -73,7 +73,7 @@ def install():
|
|||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
pyblish.api.register_callback(
|
||||
|
|
@ -86,7 +86,7 @@ def install():
|
|||
def uninstall():
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
|
|
|
|||
|
|
@ -5,14 +5,6 @@ from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
|
|||
from .launch_logic import get_stub
|
||||
|
||||
|
||||
def _active_document():
|
||||
document_name = get_stub().get_active_document_name()
|
||||
if not document_name:
|
||||
return None
|
||||
|
||||
return document_name
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return HOST_WORKFILE_EXTENSIONS["aftereffects"]
|
||||
|
||||
|
|
@ -39,7 +31,8 @@ def current_file():
|
|||
full_name = get_stub().get_active_document_full_name()
|
||||
if full_name and full_name != "null":
|
||||
return os.path.normpath(full_name).replace("\\", "/")
|
||||
except Exception:
|
||||
except ValueError:
|
||||
print("Nothing opened")
|
||||
pass
|
||||
|
||||
return None
|
||||
|
|
@ -47,3 +40,15 @@ def current_file():
|
|||
|
||||
def work_root(session):
|
||||
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
|
||||
|
||||
|
||||
def _active_document():
|
||||
# TODO merge with current_file - even in extension
|
||||
document_name = None
|
||||
try:
|
||||
document_name = get_stub().get_active_document_name()
|
||||
except ValueError:
|
||||
print("Nothing opened")
|
||||
pass
|
||||
|
||||
return document_name
|
||||
|
|
@ -1,12 +1,14 @@
|
|||
from openpype.pipeline import create
|
||||
from openpype.pipeline import CreatorError
|
||||
from openpype.pipeline import (
|
||||
CreatorError,
|
||||
LegacyCreator
|
||||
)
|
||||
from openpype.hosts.aftereffects.api import (
|
||||
get_stub,
|
||||
list_instances
|
||||
)
|
||||
|
||||
|
||||
class CreateRender(create.LegacyCreator):
|
||||
class CreateRender(LegacyCreator):
|
||||
"""Render folder for publish.
|
||||
|
||||
Creates subsets in format 'familyTaskSubsetname',
|
||||
|
|
|
|||
|
|
@ -14,9 +14,10 @@ import avalon.api
|
|||
from avalon import io, schema
|
||||
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.api import Logger
|
||||
|
|
@ -54,7 +55,7 @@ def install():
|
|||
pyblish.api.register_plugin_path(str(PUBLISH_PATH))
|
||||
|
||||
register_loader_plugin_path(str(LOAD_PATH))
|
||||
avalon.api.register_plugin_path(LegacyCreator, str(CREATE_PATH))
|
||||
register_creator_plugin_path(str(CREATE_PATH))
|
||||
|
||||
lib.append_user_scripts()
|
||||
|
||||
|
|
@ -76,7 +77,7 @@ def uninstall():
|
|||
pyblish.api.deregister_plugin_path(str(PUBLISH_PATH))
|
||||
|
||||
deregister_loader_plugin_path(str(LOAD_PATH))
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, str(CREATE_PATH))
|
||||
deregister_creator_plugin_path(str(CREATE_PATH))
|
||||
|
||||
if not IS_HEADLESS:
|
||||
ops.unregister()
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ log = Logger.get_logger(__name__)
|
|||
|
||||
FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]")
|
||||
|
||||
|
||||
class CTX:
|
||||
# singleton used for passing data between api modules
|
||||
app_framework = None
|
||||
|
|
@ -538,9 +539,17 @@ def get_segment_attributes(segment):
|
|||
|
||||
# head and tail with forward compatibility
|
||||
if segment.head:
|
||||
clip_data["segment_head"] = int(segment.head)
|
||||
# `infinite` can be also returned
|
||||
if isinstance(segment.head, str):
|
||||
clip_data["segment_head"] = 0
|
||||
else:
|
||||
clip_data["segment_head"] = int(segment.head)
|
||||
if segment.tail:
|
||||
clip_data["segment_tail"] = int(segment.tail)
|
||||
# `infinite` can be also returned
|
||||
if isinstance(segment.tail, str):
|
||||
clip_data["segment_tail"] = 0
|
||||
else:
|
||||
clip_data["segment_tail"] = int(segment.tail)
|
||||
|
||||
# add all available shot tokens
|
||||
shot_tokens = _get_shot_tokens_values(segment, [
|
||||
|
|
|
|||
|
|
@ -3,14 +3,14 @@ Basic avalon integration
|
|||
"""
|
||||
import os
|
||||
import contextlib
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from .lib import (
|
||||
|
|
@ -37,7 +37,7 @@ def install():
|
|||
pyblish.register_host("flame")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
log.info("OpenPype Flame plug-ins registred ...")
|
||||
|
||||
# register callback for switching publishable
|
||||
|
|
@ -52,7 +52,7 @@ def uninstall():
|
|||
log.info("Deregistering Flame plug-ins..")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
|
|
|||
|
|
@ -422,7 +422,13 @@ class WireTapCom(object):
|
|||
color_policy = color_policy or "Legacy"
|
||||
|
||||
# check if the colour policy in custom dir
|
||||
if not os.path.exists(color_policy):
|
||||
if "/" in color_policy:
|
||||
# if unlikelly full path was used make it redundant
|
||||
color_policy = color_policy.replace("/syncolor/policies/", "")
|
||||
# expecting input is `Shared/NameOfPolicy`
|
||||
color_policy = "/syncolor/policies/{}".format(
|
||||
color_policy)
|
||||
else:
|
||||
color_policy = "/syncolor/policies/Autodesk/{}".format(
|
||||
color_policy)
|
||||
|
||||
|
|
|
|||
|
|
@ -34,119 +34,125 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
project = context.data["flameProject"]
|
||||
sequence = context.data["flameSequence"]
|
||||
selected_segments = context.data["flameSelectedSegments"]
|
||||
self.log.debug("__ selected_segments: {}".format(selected_segments))
|
||||
|
||||
self.otio_timeline = context.data["otioTimeline"]
|
||||
self.clips_in_reels = opfapi.get_clips_in_reels(project)
|
||||
self.fps = context.data["fps"]
|
||||
|
||||
# process all sellected
|
||||
with opfapi.maintained_segment_selection(sequence) as segments:
|
||||
for segment in segments:
|
||||
comment_attributes = self._get_comment_attributes(segment)
|
||||
self.log.debug("_ comment_attributes: {}".format(
|
||||
pformat(comment_attributes)))
|
||||
for segment in selected_segments:
|
||||
# get openpype tag data
|
||||
marker_data = opfapi.get_segment_data_marker(segment)
|
||||
self.log.debug("__ marker_data: {}".format(
|
||||
pformat(marker_data)))
|
||||
|
||||
clip_data = opfapi.get_segment_attributes(segment)
|
||||
clip_name = clip_data["segment_name"]
|
||||
self.log.debug("clip_name: {}".format(clip_name))
|
||||
if not marker_data:
|
||||
continue
|
||||
|
||||
# get openpype tag data
|
||||
marker_data = opfapi.get_segment_data_marker(segment)
|
||||
self.log.debug("__ marker_data: {}".format(
|
||||
pformat(marker_data)))
|
||||
if marker_data.get("id") != "pyblish.avalon.instance":
|
||||
continue
|
||||
|
||||
if not marker_data:
|
||||
continue
|
||||
self.log.debug("__ segment.name: {}".format(
|
||||
segment.name
|
||||
))
|
||||
|
||||
if marker_data.get("id") != "pyblish.avalon.instance":
|
||||
continue
|
||||
comment_attributes = self._get_comment_attributes(segment)
|
||||
|
||||
# get file path
|
||||
file_path = clip_data["fpath"]
|
||||
self.log.debug("_ comment_attributes: {}".format(
|
||||
pformat(comment_attributes)))
|
||||
|
||||
# get source clip
|
||||
source_clip = self._get_reel_clip(file_path)
|
||||
clip_data = opfapi.get_segment_attributes(segment)
|
||||
clip_name = clip_data["segment_name"]
|
||||
self.log.debug("clip_name: {}".format(clip_name))
|
||||
|
||||
first_frame = opfapi.get_frame_from_filename(file_path) or 0
|
||||
# get file path
|
||||
file_path = clip_data["fpath"]
|
||||
|
||||
head, tail = self._get_head_tail(clip_data, first_frame)
|
||||
# get source clip
|
||||
source_clip = self._get_reel_clip(file_path)
|
||||
|
||||
# solve handles length
|
||||
marker_data["handleStart"] = min(
|
||||
marker_data["handleStart"], head)
|
||||
marker_data["handleEnd"] = min(
|
||||
marker_data["handleEnd"], tail)
|
||||
first_frame = opfapi.get_frame_from_filename(file_path) or 0
|
||||
|
||||
with_audio = bool(marker_data.pop("audio"))
|
||||
head, tail = self._get_head_tail(clip_data, first_frame)
|
||||
|
||||
# add marker data to instance data
|
||||
inst_data = dict(marker_data.items())
|
||||
# solve handles length
|
||||
marker_data["handleStart"] = min(
|
||||
marker_data["handleStart"], head)
|
||||
marker_data["handleEnd"] = min(
|
||||
marker_data["handleEnd"], tail)
|
||||
|
||||
asset = marker_data["asset"]
|
||||
subset = marker_data["subset"]
|
||||
with_audio = bool(marker_data.pop("audio"))
|
||||
|
||||
# insert family into families
|
||||
family = marker_data["family"]
|
||||
families = [str(f) for f in marker_data["families"]]
|
||||
families.insert(0, str(family))
|
||||
# add marker data to instance data
|
||||
inst_data = dict(marker_data.items())
|
||||
|
||||
# form label
|
||||
label = asset
|
||||
if asset != clip_name:
|
||||
label += " ({})".format(clip_name)
|
||||
label += " {}".format(subset)
|
||||
label += " {}".format("[" + ", ".join(families) + "]")
|
||||
asset = marker_data["asset"]
|
||||
subset = marker_data["subset"]
|
||||
|
||||
inst_data.update({
|
||||
"name": "{}_{}".format(asset, subset),
|
||||
"label": label,
|
||||
"asset": asset,
|
||||
"item": segment,
|
||||
"families": families,
|
||||
"publish": marker_data["publish"],
|
||||
"fps": self.fps,
|
||||
"flameSourceClip": source_clip,
|
||||
"sourceFirstFrame": int(first_frame),
|
||||
"path": file_path
|
||||
})
|
||||
# insert family into families
|
||||
family = marker_data["family"]
|
||||
families = [str(f) for f in marker_data["families"]]
|
||||
families.insert(0, str(family))
|
||||
|
||||
# get otio clip data
|
||||
otio_data = self._get_otio_clip_instance_data(clip_data) or {}
|
||||
self.log.debug("__ otio_data: {}".format(pformat(otio_data)))
|
||||
# form label
|
||||
label = asset
|
||||
if asset != clip_name:
|
||||
label += " ({})".format(clip_name)
|
||||
label += " {} [{}]".format(subset, ", ".join(families))
|
||||
|
||||
# add to instance data
|
||||
inst_data.update(otio_data)
|
||||
self.log.debug("__ inst_data: {}".format(pformat(inst_data)))
|
||||
inst_data.update({
|
||||
"name": "{}_{}".format(asset, subset),
|
||||
"label": label,
|
||||
"asset": asset,
|
||||
"item": segment,
|
||||
"families": families,
|
||||
"publish": marker_data["publish"],
|
||||
"fps": self.fps,
|
||||
"flameSourceClip": source_clip,
|
||||
"sourceFirstFrame": int(first_frame),
|
||||
"path": file_path
|
||||
})
|
||||
|
||||
# add resolution
|
||||
self._get_resolution_to_data(inst_data, context)
|
||||
# get otio clip data
|
||||
otio_data = self._get_otio_clip_instance_data(clip_data) or {}
|
||||
self.log.debug("__ otio_data: {}".format(pformat(otio_data)))
|
||||
|
||||
# add comment attributes if any
|
||||
inst_data.update(comment_attributes)
|
||||
# add to instance data
|
||||
inst_data.update(otio_data)
|
||||
self.log.debug("__ inst_data: {}".format(pformat(inst_data)))
|
||||
|
||||
# create instance
|
||||
instance = context.create_instance(**inst_data)
|
||||
# add resolution
|
||||
self._get_resolution_to_data(inst_data, context)
|
||||
|
||||
# add colorspace data
|
||||
instance.data.update({
|
||||
"versionData": {
|
||||
"colorspace": clip_data["colour_space"],
|
||||
}
|
||||
})
|
||||
# add comment attributes if any
|
||||
inst_data.update(comment_attributes)
|
||||
|
||||
# create shot instance for shot attributes create/update
|
||||
self._create_shot_instance(context, clip_name, **inst_data)
|
||||
# create instance
|
||||
instance = context.create_instance(**inst_data)
|
||||
|
||||
self.log.info("Creating instance: {}".format(instance))
|
||||
self.log.info(
|
||||
"_ instance.data: {}".format(pformat(instance.data)))
|
||||
# add colorspace data
|
||||
instance.data.update({
|
||||
"versionData": {
|
||||
"colorspace": clip_data["colour_space"],
|
||||
}
|
||||
})
|
||||
|
||||
if not with_audio:
|
||||
continue
|
||||
# create shot instance for shot attributes create/update
|
||||
self._create_shot_instance(context, clip_name, **inst_data)
|
||||
|
||||
# add audioReview attribute to plate instance data
|
||||
# if reviewTrack is on
|
||||
if marker_data.get("reviewTrack") is not None:
|
||||
instance.data["reviewAudio"] = True
|
||||
self.log.info("Creating instance: {}".format(instance))
|
||||
self.log.info(
|
||||
"_ instance.data: {}".format(pformat(instance.data)))
|
||||
|
||||
if not with_audio:
|
||||
continue
|
||||
|
||||
# add audioReview attribute to plate instance data
|
||||
# if reviewTrack is on
|
||||
if marker_data.get("reviewTrack") is not None:
|
||||
instance.data["reviewAudio"] = True
|
||||
|
||||
def _get_comment_attributes(self, segment):
|
||||
comment = segment.comment.get_value()
|
||||
|
|
@ -188,7 +194,7 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
# get pattern defined by type
|
||||
pattern = TXT_PATERN
|
||||
if a_type in ("number" , "float"):
|
||||
if a_type in ("number", "float"):
|
||||
pattern = NUM_PATERN
|
||||
|
||||
res_goup = pattern.findall(value)
|
||||
|
|
|
|||
|
|
@ -31,27 +31,28 @@ class CollecTimelineOTIO(pyblish.api.ContextPlugin):
|
|||
)
|
||||
|
||||
# adding otio timeline to context
|
||||
with opfapi.maintained_segment_selection(sequence):
|
||||
with opfapi.maintained_segment_selection(sequence) as selected_seg:
|
||||
otio_timeline = flame_export.create_otio_timeline(sequence)
|
||||
|
||||
instance_data = {
|
||||
"name": subset_name,
|
||||
"asset": asset_doc["name"],
|
||||
"subset": subset_name,
|
||||
"family": "workfile"
|
||||
}
|
||||
instance_data = {
|
||||
"name": subset_name,
|
||||
"asset": asset_doc["name"],
|
||||
"subset": subset_name,
|
||||
"family": "workfile"
|
||||
}
|
||||
|
||||
# create instance with workfile
|
||||
instance = context.create_instance(**instance_data)
|
||||
self.log.info("Creating instance: {}".format(instance))
|
||||
# create instance with workfile
|
||||
instance = context.create_instance(**instance_data)
|
||||
self.log.info("Creating instance: {}".format(instance))
|
||||
|
||||
# update context with main project attributes
|
||||
context.data.update({
|
||||
"flameProject": project,
|
||||
"flameSequence": sequence,
|
||||
"otioTimeline": otio_timeline,
|
||||
"currentFile": "Flame/{}/{}".format(
|
||||
project.name, sequence.name
|
||||
),
|
||||
"fps": float(str(sequence.frame_rate)[:-4])
|
||||
})
|
||||
# update context with main project attributes
|
||||
context.data.update({
|
||||
"flameProject": project,
|
||||
"flameSequence": sequence,
|
||||
"otioTimeline": otio_timeline,
|
||||
"currentFile": "Flame/{}/{}".format(
|
||||
project.name, sequence.name
|
||||
),
|
||||
"flameSelectedSegments": selected_seg,
|
||||
"fps": float(str(sequence.frame_rate)[:-4])
|
||||
})
|
||||
|
|
|
|||
|
|
@ -7,14 +7,14 @@ import logging
|
|||
import contextlib
|
||||
|
||||
import pyblish.api
|
||||
import avalon.api
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
register_inventory_action_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
deregister_inventory_action_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
|
|
@ -70,7 +70,7 @@ def install():
|
|||
log.info("Registering Fusion plug-ins..")
|
||||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
pyblish.api.register_callback(
|
||||
|
|
@ -94,7 +94,7 @@ def uninstall():
|
|||
log.info("Deregistering Fusion plug-ins..")
|
||||
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
deregister_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
pyblish.api.deregister_callback(
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
import os
|
||||
|
||||
from openpype.pipeline import create
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.hosts.fusion.api import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk
|
||||
)
|
||||
|
||||
|
||||
class CreateOpenEXRSaver(create.LegacyCreator):
|
||||
class CreateOpenEXRSaver(LegacyCreator):
|
||||
|
||||
name = "openexrDefault"
|
||||
label = "Create OpenEXR Saver"
|
||||
|
|
|
|||
|
|
@ -6,14 +6,14 @@ from bson.objectid import ObjectId
|
|||
import pyblish.api
|
||||
|
||||
from avalon import io
|
||||
import avalon.api
|
||||
|
||||
from openpype import lib
|
||||
from openpype.lib import register_event_callback
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
import openpype.hosts.harmony
|
||||
|
|
@ -108,9 +108,8 @@ def check_inventory():
|
|||
if not lib.any_outdated():
|
||||
return
|
||||
|
||||
host = avalon.api.registered_host()
|
||||
outdated_containers = []
|
||||
for container in host.ls():
|
||||
for container in ls():
|
||||
representation = container['representation']
|
||||
representation_doc = io.find_one(
|
||||
{
|
||||
|
|
@ -186,7 +185,7 @@ def install():
|
|||
pyblish.api.register_host("harmony")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
# Register callbacks.
|
||||
|
|
@ -200,7 +199,7 @@ def install():
|
|||
def uninstall():
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
|
|
|
|||
|
|
@ -5,13 +5,13 @@ import os
|
|||
import contextlib
|
||||
from collections import OrderedDict
|
||||
|
||||
from avalon import api as avalon
|
||||
from avalon import schema
|
||||
from pyblish import api as pyblish
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_creator_plugin_path,
|
||||
register_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
|
|
@ -50,7 +50,7 @@ def install():
|
|||
pyblish.register_host("hiero")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
|
@ -71,7 +71,7 @@ def uninstall():
|
|||
pyblish.deregister_host("hiero")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import avalon.api
|
|||
from avalon.lib import find_submodule
|
||||
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_creator_plugin_path,
|
||||
register_loader_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
|
|
@ -54,7 +54,7 @@ def install():
|
|||
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
log.info("Installing callbacks ... ")
|
||||
# register_event_callback("init", on_init)
|
||||
|
|
|
|||
|
|
@ -1511,7 +1511,7 @@ def get_container_members(container):
|
|||
|
||||
members = cmds.sets(container, query=True) or []
|
||||
members = cmds.ls(members, long=True, objectsOnly=True) or []
|
||||
members = set(members)
|
||||
all_members = set(members)
|
||||
|
||||
# Include any referenced nodes from any reference in the container
|
||||
# This is required since we've removed adding ALL nodes of a reference
|
||||
|
|
@ -1530,9 +1530,9 @@ def get_container_members(container):
|
|||
reference_members = cmds.ls(reference_members,
|
||||
long=True,
|
||||
objectsOnly=True)
|
||||
members.update(reference_members)
|
||||
all_members.update(reference_members)
|
||||
|
||||
return members
|
||||
return list(all_members)
|
||||
|
||||
|
||||
# region LOOKDEV
|
||||
|
|
|
|||
|
|
@ -23,8 +23,10 @@ from openpype.pipeline import (
|
|||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_inventory_action_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_inventory_action_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.hosts.maya.lib import copy_workspace_mel
|
||||
|
|
@ -60,7 +62,7 @@ def install():
|
|||
pyblish.api.register_host("maya")
|
||||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
|
|
@ -189,7 +191,7 @@ def uninstall():
|
|||
pyblish.api.deregister_host("maya")
|
||||
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
deregister_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
menu.uninstall()
|
||||
|
|
|
|||
|
|
@ -22,4 +22,6 @@ class CreateLook(plugin.Creator):
|
|||
self.data["maketx"] = self.make_tx
|
||||
|
||||
# Enable users to force a copy.
|
||||
# - on Windows is "forceCopy" always changed to `True` because of
|
||||
# windows implementation of hardlinks
|
||||
self.data["forceCopy"] = False
|
||||
|
|
|
|||
51
openpype/hosts/maya/plugins/create/create_multiverse_usd.py
Normal file
51
openpype/hosts/maya/plugins/create/create_multiverse_usd.py
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
from openpype.hosts.maya.api import plugin, lib
|
||||
|
||||
|
||||
class CreateMultiverseUsd(plugin.Creator):
|
||||
"""Multiverse USD data"""
|
||||
|
||||
name = "usdMain"
|
||||
label = "Multiverse USD"
|
||||
family = "usd"
|
||||
icon = "cubes"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateMultiverseUsd, self).__init__(*args, **kwargs)
|
||||
|
||||
# Add animation data first, since it maintains order.
|
||||
self.data.update(lib.collect_animation_data(True))
|
||||
|
||||
self.data["stripNamespaces"] = False
|
||||
self.data["mergeTransformAndShape"] = False
|
||||
self.data["writeAncestors"] = True
|
||||
self.data["flattenParentXforms"] = False
|
||||
self.data["writeSparseOverrides"] = False
|
||||
self.data["useMetaPrimPath"] = False
|
||||
self.data["customRootPath"] = ''
|
||||
self.data["customAttributes"] = ''
|
||||
self.data["nodeTypesToIgnore"] = ''
|
||||
self.data["writeMeshes"] = True
|
||||
self.data["writeCurves"] = True
|
||||
self.data["writeParticles"] = True
|
||||
self.data["writeCameras"] = False
|
||||
self.data["writeLights"] = False
|
||||
self.data["writeJoints"] = False
|
||||
self.data["writeCollections"] = False
|
||||
self.data["writePositions"] = True
|
||||
self.data["writeNormals"] = True
|
||||
self.data["writeUVs"] = True
|
||||
self.data["writeColorSets"] = False
|
||||
self.data["writeTangents"] = False
|
||||
self.data["writeRefPositions"] = False
|
||||
self.data["writeBlendShapes"] = False
|
||||
self.data["writeDisplayColor"] = False
|
||||
self.data["writeSkinWeights"] = False
|
||||
self.data["writeMaterialAssignment"] = False
|
||||
self.data["writeHardwareShader"] = False
|
||||
self.data["writeShadingNetworks"] = False
|
||||
self.data["writeTransformMatrix"] = True
|
||||
self.data["writeUsdAttributes"] = False
|
||||
self.data["timeVaryingTopology"] = False
|
||||
self.data["customMaterialNamespace"] = ''
|
||||
self.data["numTimeSamples"] = 1
|
||||
self.data["timeSamplesSpan"] = 0.0
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
from openpype.hosts.maya.api import plugin, lib
|
||||
|
||||
|
||||
class CreateMultiverseUsdComp(plugin.Creator):
|
||||
"""Create Multiverse USD Composition"""
|
||||
|
||||
name = "usdCompositionMain"
|
||||
label = "Multiverse USD Composition"
|
||||
family = "usdComposition"
|
||||
icon = "cubes"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateMultiverseUsdComp, self).__init__(*args, **kwargs)
|
||||
|
||||
# Add animation data first, since it maintains order.
|
||||
self.data.update(lib.collect_animation_data(True))
|
||||
|
||||
self.data["stripNamespaces"] = False
|
||||
self.data["mergeTransformAndShape"] = False
|
||||
self.data["flattenContent"] = False
|
||||
self.data["writePendingOverrides"] = False
|
||||
self.data["numTimeSamples"] = 1
|
||||
self.data["timeSamplesSpan"] = 0.0
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
from openpype.hosts.maya.api import plugin, lib
|
||||
|
||||
|
||||
class CreateMultiverseUsdOver(plugin.Creator):
|
||||
"""Multiverse USD data"""
|
||||
|
||||
name = "usdOverrideMain"
|
||||
label = "Multiverse USD Override"
|
||||
family = "usdOverride"
|
||||
icon = "cubes"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateMultiverseUsdOver, self).__init__(*args, **kwargs)
|
||||
|
||||
# Add animation data first, since it maintains order.
|
||||
self.data.update(lib.collect_animation_data(True))
|
||||
|
||||
self.data["writeAll"] = False
|
||||
self.data["writeTransforms"] = True
|
||||
self.data["writeVisibility"] = True
|
||||
self.data["writeAttributes"] = True
|
||||
self.data["writeMaterials"] = True
|
||||
self.data["writeVariants"] = True
|
||||
self.data["writeVariantsDefinition"] = True
|
||||
self.data["writeActiveState"] = True
|
||||
self.data["writeNamespaces"] = False
|
||||
self.data["numTimeSamples"] = 1
|
||||
self.data["timeSamplesSpan"] = 0.0
|
||||
|
|
@ -15,6 +15,14 @@ class CreateReview(plugin.Creator):
|
|||
keepImages = False
|
||||
isolate = False
|
||||
imagePlane = True
|
||||
transparency = [
|
||||
"preset",
|
||||
"simple",
|
||||
"object sorting",
|
||||
"weighted average",
|
||||
"depth peeling",
|
||||
"alpha cut"
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateReview, self).__init__(*args, **kwargs)
|
||||
|
|
@ -28,5 +36,6 @@ class CreateReview(plugin.Creator):
|
|||
data["isolate"] = self.isolate
|
||||
data["keepImages"] = self.keepImages
|
||||
data["imagePlane"] = self.imagePlane
|
||||
data["transparency"] = self.transparency
|
||||
|
||||
self.data = data
|
||||
|
|
|
|||
102
openpype/hosts/maya/plugins/load/load_multiverse_usd.py
Normal file
102
openpype/hosts/maya/plugins/load/load_multiverse_usd.py
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import maya.cmds as cmds
|
||||
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
get_representation_path
|
||||
)
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
maintained_selection,
|
||||
namespaced,
|
||||
unique_namespace
|
||||
)
|
||||
from openpype.hosts.maya.api.pipeline import containerise
|
||||
|
||||
|
||||
class MultiverseUsdLoader(load.LoaderPlugin):
|
||||
"""Load the USD by Multiverse"""
|
||||
|
||||
families = ["model", "usd", "usdComposition", "usdOverride",
|
||||
"pointcache", "animation"]
|
||||
representations = ["usd", "usda", "usdc", "usdz", "abc"]
|
||||
|
||||
label = "Read USD by Multiverse"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name=None, namespace=None, options=None):
|
||||
|
||||
asset = context['asset']['name']
|
||||
namespace = namespace or unique_namespace(
|
||||
asset + "_",
|
||||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# Create the shape
|
||||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
|
||||
shape = None
|
||||
transform = None
|
||||
with maintained_selection():
|
||||
cmds.namespace(addNamespace=namespace)
|
||||
with namespaced(namespace, new=False):
|
||||
import multiverse
|
||||
shape = multiverse.CreateUsdCompound(self.fname)
|
||||
transform = cmds.listRelatives(
|
||||
shape, parent=True, fullPath=True)[0]
|
||||
|
||||
# Lock the shape node so the user cannot delete it.
|
||||
cmds.lockNode(shape, lock=True)
|
||||
|
||||
nodes = [transform, shape]
|
||||
self[:] = nodes
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
# type: (dict, dict) -> None
|
||||
"""Update container with specified representation."""
|
||||
node = container['objectName']
|
||||
assert cmds.objExists(node), "Missing container"
|
||||
|
||||
members = cmds.sets(node, query=True) or []
|
||||
shapes = cmds.ls(members, type="mvUsdCompoundShape")
|
||||
assert shapes, "Cannot find mvUsdCompoundShape in container"
|
||||
|
||||
path = get_representation_path(representation)
|
||||
|
||||
import multiverse
|
||||
for shape in shapes:
|
||||
multiverse.SetUsdCompoundAssetPaths(shape, [path])
|
||||
|
||||
cmds.setAttr("{}.representation".format(node),
|
||||
str(representation["_id"]),
|
||||
type="string")
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
# type: (dict) -> None
|
||||
"""Remove loaded container."""
|
||||
# Delete container and its contents
|
||||
if cmds.objExists(container['objectName']):
|
||||
members = cmds.sets(container['objectName'], query=True) or []
|
||||
cmds.delete([container['objectName']] + members)
|
||||
|
||||
# Remove the namespace, if empty
|
||||
namespace = container['namespace']
|
||||
if cmds.namespace(exists=namespace):
|
||||
members = cmds.namespaceInfo(namespace, listNamespace=True)
|
||||
if not members:
|
||||
cmds.namespace(removeNamespace=namespace)
|
||||
else:
|
||||
self.log.warning("Namespace not deleted because it "
|
||||
"still has members: %s", namespace)
|
||||
|
|
@ -4,6 +4,7 @@ import os
|
|||
import sys
|
||||
import json
|
||||
import tempfile
|
||||
import platform
|
||||
import contextlib
|
||||
import subprocess
|
||||
from collections import OrderedDict
|
||||
|
|
@ -334,7 +335,14 @@ class ExtractLook(openpype.api.Extractor):
|
|||
transfers = []
|
||||
hardlinks = []
|
||||
hashes = {}
|
||||
force_copy = instance.data.get("forceCopy", False)
|
||||
# Temporary fix to NOT create hardlinks on windows machines
|
||||
if platform.system().lower() == "windows":
|
||||
self.log.info(
|
||||
"Forcing copy instead of hardlink due to issues on Windows..."
|
||||
)
|
||||
force_copy = True
|
||||
else:
|
||||
force_copy = instance.data.get("forceCopy", False)
|
||||
|
||||
for filepath in files_metadata:
|
||||
|
||||
|
|
|
|||
210
openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py
Normal file
210
openpype/hosts/maya/plugins/publish/extract_multiverse_usd.py
Normal file
|
|
@ -0,0 +1,210 @@
|
|||
import os
|
||||
import six
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import openpype.api
|
||||
from openpype.hosts.maya.api.lib import maintained_selection
|
||||
|
||||
|
||||
class ExtractMultiverseUsd(openpype.api.Extractor):
|
||||
"""Extractor for USD by Multiverse."""
|
||||
|
||||
label = "Extract Multiverse USD"
|
||||
hosts = ["maya"]
|
||||
families = ["usd"]
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Overridable options for Multiverse USD Export
|
||||
|
||||
Given in the following format
|
||||
- {NAME: EXPECTED TYPE}
|
||||
|
||||
If the overridden option's type does not match,
|
||||
the option is not included and a warning is logged.
|
||||
|
||||
"""
|
||||
|
||||
return {
|
||||
"stripNamespaces": bool,
|
||||
"mergeTransformAndShape": bool,
|
||||
"writeAncestors": bool,
|
||||
"flattenParentXforms": bool,
|
||||
"writeSparseOverrides": bool,
|
||||
"useMetaPrimPath": bool,
|
||||
"customRootPath": str,
|
||||
"customAttributes": str,
|
||||
"nodeTypesToIgnore": str,
|
||||
"writeMeshes": bool,
|
||||
"writeCurves": bool,
|
||||
"writeParticles": bool,
|
||||
"writeCameras": bool,
|
||||
"writeLights": bool,
|
||||
"writeJoints": bool,
|
||||
"writeCollections": bool,
|
||||
"writePositions": bool,
|
||||
"writeNormals": bool,
|
||||
"writeUVs": bool,
|
||||
"writeColorSets": bool,
|
||||
"writeTangents": bool,
|
||||
"writeRefPositions": bool,
|
||||
"writeBlendShapes": bool,
|
||||
"writeDisplayColor": bool,
|
||||
"writeSkinWeights": bool,
|
||||
"writeMaterialAssignment": bool,
|
||||
"writeHardwareShader": bool,
|
||||
"writeShadingNetworks": bool,
|
||||
"writeTransformMatrix": bool,
|
||||
"writeUsdAttributes": bool,
|
||||
"timeVaryingTopology": bool,
|
||||
"customMaterialNamespace": str,
|
||||
"numTimeSamples": int,
|
||||
"timeSamplesSpan": float
|
||||
}
|
||||
|
||||
@property
|
||||
def default_options(self):
|
||||
"""The default options for Multiverse USD extraction."""
|
||||
|
||||
return {
|
||||
"stripNamespaces": False,
|
||||
"mergeTransformAndShape": False,
|
||||
"writeAncestors": True,
|
||||
"flattenParentXforms": False,
|
||||
"writeSparseOverrides": False,
|
||||
"useMetaPrimPath": False,
|
||||
"customRootPath": str(),
|
||||
"customAttributes": str(),
|
||||
"nodeTypesToIgnore": str(),
|
||||
"writeMeshes": True,
|
||||
"writeCurves": True,
|
||||
"writeParticles": True,
|
||||
"writeCameras": False,
|
||||
"writeLights": False,
|
||||
"writeJoints": False,
|
||||
"writeCollections": False,
|
||||
"writePositions": True,
|
||||
"writeNormals": True,
|
||||
"writeUVs": True,
|
||||
"writeColorSets": False,
|
||||
"writeTangents": False,
|
||||
"writeRefPositions": False,
|
||||
"writeBlendShapes": False,
|
||||
"writeDisplayColor": False,
|
||||
"writeSkinWeights": False,
|
||||
"writeMaterialAssignment": False,
|
||||
"writeHardwareShader": False,
|
||||
"writeShadingNetworks": False,
|
||||
"writeTransformMatrix": True,
|
||||
"writeUsdAttributes": False,
|
||||
"timeVaryingTopology": False,
|
||||
"customMaterialNamespace": str(),
|
||||
"numTimeSamples": 1,
|
||||
"timeSamplesSpan": 0.0
|
||||
}
|
||||
|
||||
def parse_overrides(self, instance, options):
|
||||
"""Inspect data of instance to determine overridden options"""
|
||||
|
||||
for key in instance.data:
|
||||
if key not in self.options:
|
||||
continue
|
||||
|
||||
# Ensure the data is of correct type
|
||||
value = instance.data[key]
|
||||
if isinstance(value, six.text_type):
|
||||
value = str(value)
|
||||
if not isinstance(value, self.options[key]):
|
||||
self.log.warning(
|
||||
"Overridden attribute {key} was of "
|
||||
"the wrong type: {invalid_type} "
|
||||
"- should have been {valid_type}".format(
|
||||
key=key,
|
||||
invalid_type=type(value).__name__,
|
||||
valid_type=self.options[key].__name__))
|
||||
continue
|
||||
|
||||
options[key] = value
|
||||
|
||||
return options
|
||||
|
||||
def process(self, instance):
|
||||
# Load plugin firstly
|
||||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
|
||||
# Define output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
file_name = "{}.usd".format(instance.name)
|
||||
file_path = os.path.join(staging_dir, file_name)
|
||||
file_path = file_path.replace('\\', '/')
|
||||
|
||||
# Parse export options
|
||||
options = self.default_options
|
||||
options = self.parse_overrides(instance, options)
|
||||
self.log.info("Export options: {0}".format(options))
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
|
||||
with maintained_selection():
|
||||
members = instance.data("setMembers")
|
||||
members = cmds.ls(members,
|
||||
dag=True,
|
||||
shapes=True,
|
||||
type=("mesh"),
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
self.log.info('Collected object {}'.format(members))
|
||||
|
||||
import multiverse
|
||||
|
||||
time_opts = None
|
||||
frame_start = instance.data['frameStart']
|
||||
frame_end = instance.data['frameEnd']
|
||||
handle_start = instance.data['handleStart']
|
||||
handle_end = instance.data['handleEnd']
|
||||
step = instance.data['step']
|
||||
fps = instance.data['fps']
|
||||
if frame_end != frame_start:
|
||||
time_opts = multiverse.TimeOptions()
|
||||
|
||||
time_opts.writeTimeRange = True
|
||||
time_opts.frameRange = (
|
||||
frame_start - handle_start, frame_end + handle_end)
|
||||
time_opts.frameIncrement = step
|
||||
time_opts.numTimeSamples = instance.data["numTimeSamples"]
|
||||
time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"]
|
||||
time_opts.framePerSecond = fps
|
||||
|
||||
asset_write_opts = multiverse.AssetWriteOptions(time_opts)
|
||||
options_discard_keys = {
|
||||
'numTimeSamples',
|
||||
'timeSamplesSpan',
|
||||
'frameStart',
|
||||
'frameEnd',
|
||||
'handleStart',
|
||||
'handleEnd',
|
||||
'step',
|
||||
'fps'
|
||||
}
|
||||
for key, value in options.items():
|
||||
if key in options_discard_keys:
|
||||
continue
|
||||
setattr(asset_write_opts, key, value)
|
||||
|
||||
multiverse.WriteAsset(file_path, members, asset_write_opts)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'usd',
|
||||
'ext': 'usd',
|
||||
'files': file_name,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance {} to {}".format(
|
||||
instance.name, file_path))
|
||||
|
|
@ -0,0 +1,151 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import openpype.api
|
||||
from openpype.hosts.maya.api.lib import maintained_selection
|
||||
|
||||
|
||||
class ExtractMultiverseUsdComposition(openpype.api.Extractor):
|
||||
"""Extractor of Multiverse USD Composition."""
|
||||
|
||||
label = "Extract Multiverse USD Composition"
|
||||
hosts = ["maya"]
|
||||
families = ["usdComposition"]
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Overridable options for Multiverse USD Export
|
||||
|
||||
Given in the following format
|
||||
- {NAME: EXPECTED TYPE}
|
||||
|
||||
If the overridden option's type does not match,
|
||||
the option is not included and a warning is logged.
|
||||
|
||||
"""
|
||||
|
||||
return {
|
||||
"stripNamespaces": bool,
|
||||
"mergeTransformAndShape": bool,
|
||||
"flattenContent": bool,
|
||||
"writePendingOverrides": bool,
|
||||
"numTimeSamples": int,
|
||||
"timeSamplesSpan": float
|
||||
}
|
||||
|
||||
@property
|
||||
def default_options(self):
|
||||
"""The default options for Multiverse USD extraction."""
|
||||
|
||||
return {
|
||||
"stripNamespaces": True,
|
||||
"mergeTransformAndShape": False,
|
||||
"flattenContent": False,
|
||||
"writePendingOverrides": False,
|
||||
"numTimeSamples": 1,
|
||||
"timeSamplesSpan": 0.0
|
||||
}
|
||||
|
||||
def parse_overrides(self, instance, options):
|
||||
"""Inspect data of instance to determine overridden options"""
|
||||
|
||||
for key in instance.data:
|
||||
if key not in self.options:
|
||||
continue
|
||||
|
||||
# Ensure the data is of correct type
|
||||
value = instance.data[key]
|
||||
if not isinstance(value, self.options[key]):
|
||||
self.log.warning(
|
||||
"Overridden attribute {key} was of "
|
||||
"the wrong type: {invalid_type} "
|
||||
"- should have been {valid_type}".format(
|
||||
key=key,
|
||||
invalid_type=type(value).__name__,
|
||||
valid_type=self.options[key].__name__))
|
||||
continue
|
||||
|
||||
options[key] = value
|
||||
|
||||
return options
|
||||
|
||||
def process(self, instance):
|
||||
# Load plugin firstly
|
||||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
|
||||
# Define output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
file_name = "{}.usd".format(instance.name)
|
||||
file_path = os.path.join(staging_dir, file_name)
|
||||
file_path = file_path.replace('\\', '/')
|
||||
|
||||
# Parse export options
|
||||
options = self.default_options
|
||||
options = self.parse_overrides(instance, options)
|
||||
self.log.info("Export options: {0}".format(options))
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
|
||||
with maintained_selection():
|
||||
members = instance.data("setMembers")
|
||||
members = cmds.ls(members,
|
||||
dag=True,
|
||||
shapes=True,
|
||||
type="mvUsdCompoundShape",
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
self.log.info('Collected object {}'.format(members))
|
||||
|
||||
import multiverse
|
||||
|
||||
time_opts = None
|
||||
frame_start = instance.data['frameStart']
|
||||
frame_end = instance.data['frameEnd']
|
||||
handle_start = instance.data['handleStart']
|
||||
handle_end = instance.data['handleEnd']
|
||||
step = instance.data['step']
|
||||
fps = instance.data['fps']
|
||||
if frame_end != frame_start:
|
||||
time_opts = multiverse.TimeOptions()
|
||||
|
||||
time_opts.writeTimeRange = True
|
||||
time_opts.frameRange = (
|
||||
frame_start - handle_start, frame_end + handle_end)
|
||||
time_opts.frameIncrement = step
|
||||
time_opts.numTimeSamples = instance.data["numTimeSamples"]
|
||||
time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"]
|
||||
time_opts.framePerSecond = fps
|
||||
|
||||
comp_write_opts = multiverse.CompositionWriteOptions()
|
||||
options_discard_keys = {
|
||||
'numTimeSamples',
|
||||
'timeSamplesSpan',
|
||||
'frameStart',
|
||||
'frameEnd',
|
||||
'handleStart',
|
||||
'handleEnd',
|
||||
'step',
|
||||
'fps'
|
||||
}
|
||||
for key, value in options.items():
|
||||
if key in options_discard_keys:
|
||||
continue
|
||||
setattr(comp_write_opts, key, value)
|
||||
|
||||
multiverse.WriteComposition(file_path, members, comp_write_opts)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'usd',
|
||||
'ext': 'usd',
|
||||
'files': file_name,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance {} to {}".format(
|
||||
instance.name, file_path))
|
||||
|
|
@ -0,0 +1,139 @@
|
|||
import os
|
||||
|
||||
import openpype.api
|
||||
from openpype.hosts.maya.api.lib import maintained_selection
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class ExtractMultiverseUsdOverride(openpype.api.Extractor):
|
||||
"""Extractor for USD Override by Multiverse."""
|
||||
|
||||
label = "Extract Multiverse USD Override"
|
||||
hosts = ["maya"]
|
||||
families = ["usdOverride"]
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Overridable options for Multiverse USD Export
|
||||
|
||||
Given in the following format
|
||||
- {NAME: EXPECTED TYPE}
|
||||
|
||||
If the overridden option's type does not match,
|
||||
the option is not included and a warning is logged.
|
||||
|
||||
"""
|
||||
|
||||
return {
|
||||
"writeAll": bool,
|
||||
"writeTransforms": bool,
|
||||
"writeVisibility": bool,
|
||||
"writeAttributes": bool,
|
||||
"writeMaterials": bool,
|
||||
"writeVariants": bool,
|
||||
"writeVariantsDefinition": bool,
|
||||
"writeActiveState": bool,
|
||||
"writeNamespaces": bool,
|
||||
"numTimeSamples": int,
|
||||
"timeSamplesSpan": float
|
||||
}
|
||||
|
||||
@property
|
||||
def default_options(self):
|
||||
"""The default options for Multiverse USD extraction."""
|
||||
|
||||
return {
|
||||
"writeAll": False,
|
||||
"writeTransforms": True,
|
||||
"writeVisibility": True,
|
||||
"writeAttributes": True,
|
||||
"writeMaterials": True,
|
||||
"writeVariants": True,
|
||||
"writeVariantsDefinition": True,
|
||||
"writeActiveState": True,
|
||||
"writeNamespaces": False,
|
||||
"numTimeSamples": 1,
|
||||
"timeSamplesSpan": 0.0
|
||||
}
|
||||
|
||||
def process(self, instance):
|
||||
# Load plugin firstly
|
||||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
|
||||
# Define output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
file_name = "{}.usda".format(instance.name)
|
||||
file_path = os.path.join(staging_dir, file_name)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
# Parse export options
|
||||
options = self.default_options
|
||||
self.log.info("Export options: {0}".format(options))
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
|
||||
with maintained_selection():
|
||||
members = instance.data("setMembers")
|
||||
members = cmds.ls(members,
|
||||
dag=True,
|
||||
shapes=True,
|
||||
type="mvUsdCompoundShape",
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
self.log.info("Collected object {}".format(members))
|
||||
|
||||
# TODO: Deal with asset, composition, overide with options.
|
||||
import multiverse
|
||||
|
||||
time_opts = None
|
||||
frame_start = instance.data["frameStart"]
|
||||
frame_end = instance.data["frameEnd"]
|
||||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
step = instance.data["step"]
|
||||
fps = instance.data["fps"]
|
||||
if frame_end != frame_start:
|
||||
time_opts = multiverse.TimeOptions()
|
||||
|
||||
time_opts.writeTimeRange = True
|
||||
time_opts.frameRange = (
|
||||
frame_start - handle_start, frame_end + handle_end)
|
||||
time_opts.frameIncrement = step
|
||||
time_opts.numTimeSamples = instance.data["numTimeSamples"]
|
||||
time_opts.timeSamplesSpan = instance.data["timeSamplesSpan"]
|
||||
time_opts.framePerSecond = fps
|
||||
|
||||
over_write_opts = multiverse.OverridesWriteOptions(time_opts)
|
||||
options_discard_keys = {
|
||||
"numTimeSamples",
|
||||
"timeSamplesSpan",
|
||||
"frameStart",
|
||||
"frameEnd",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
"step",
|
||||
"fps"
|
||||
}
|
||||
for key, value in options.items():
|
||||
if key in options_discard_keys:
|
||||
continue
|
||||
setattr(over_write_opts, key, value)
|
||||
|
||||
for member in members:
|
||||
multiverse.WriteOverrides(file_path, member, over_write_opts)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": "usd",
|
||||
"ext": "usd",
|
||||
"files": file_name,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance {} to {}".format(
|
||||
instance.name, file_path))
|
||||
|
|
@ -73,6 +73,11 @@ class ExtractPlayblast(openpype.api.Extractor):
|
|||
pm.currentTime(refreshFrameInt - 1, edit=True)
|
||||
pm.currentTime(refreshFrameInt, edit=True)
|
||||
|
||||
# Override transparency if requested.
|
||||
transparency = instance.data.get("transparency", 0)
|
||||
if transparency != 0:
|
||||
preset["viewport2_options"]["transparencyAlgorithm"] = transparency
|
||||
|
||||
# Isolate view is requested by having objects in the set besides a
|
||||
# camera.
|
||||
if preset.pop("isolate_view", False) and instance.data.get("isolate"):
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ from openpype.tools.utils import host_tools
|
|||
from openpype.lib.path_tools import HostDirmap
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype.pipeline import discover_legacy_creator_plugins
|
||||
|
||||
from .workio import (
|
||||
save_file,
|
||||
|
|
@ -1902,7 +1903,7 @@ def recreate_instance(origin_node, avalon_data=None):
|
|||
# create new node
|
||||
# get appropriate plugin class
|
||||
creator_plugin = None
|
||||
for Creator in api.discover(api.Creator):
|
||||
for Creator in discover_legacy_creator_plugins():
|
||||
if Creator.__name__ == data["creator"]:
|
||||
creator_plugin = Creator
|
||||
break
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ from collections import OrderedDict
|
|||
import nuke
|
||||
|
||||
import pyblish.api
|
||||
import avalon.api
|
||||
|
||||
import openpype
|
||||
from openpype.api import (
|
||||
|
|
@ -15,10 +14,11 @@ from openpype.api import (
|
|||
)
|
||||
from openpype.lib import register_event_callback
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
register_inventory_action_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
deregister_inventory_action_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
|
|
@ -106,7 +106,7 @@ def install():
|
|||
log.info("Registering Nuke plug-ins..")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
register_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
# Register Avalon event for workfiles loading.
|
||||
|
|
@ -132,7 +132,7 @@ def uninstall():
|
|||
pyblish.deregister_host("nuke")
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
deregister_inventory_action_path(INVENTORY_PATH)
|
||||
|
||||
pyblish.api.deregister_callback(
|
||||
|
|
|
|||
|
|
@ -450,6 +450,7 @@ class ExporterReviewMov(ExporterReview):
|
|||
|
||||
def generate_mov(self, farm=False, **kwargs):
|
||||
self.publish_on_farm = farm
|
||||
read_raw = kwargs["read_raw"]
|
||||
reformat_node_add = kwargs["reformat_node_add"]
|
||||
reformat_node_config = kwargs["reformat_node_config"]
|
||||
bake_viewer_process = kwargs["bake_viewer_process"]
|
||||
|
|
@ -484,6 +485,9 @@ class ExporterReviewMov(ExporterReview):
|
|||
r_node["origlast"].setValue(self.last_frame)
|
||||
r_node["colorspace"].setValue(self.write_colorspace)
|
||||
|
||||
if read_raw:
|
||||
r_node["raw"].setValue(1)
|
||||
|
||||
# connect
|
||||
self._temp_nodes[subset].append(r_node)
|
||||
self.previous_node = r_node
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ class LoadEffects(load.LoaderPlugin):
|
|||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).iteritems()}
|
||||
for key, value in json.load(f).items()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f)
|
||||
|
|
@ -188,7 +188,7 @@ class LoadEffects(load.LoaderPlugin):
|
|||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).iteritems()}
|
||||
for key, value in json.load(f).items()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f)
|
||||
|
|
@ -330,11 +330,11 @@ class LoadEffects(load.LoaderPlugin):
|
|||
|
||||
if isinstance(input, dict):
|
||||
return {self.byteify(key): self.byteify(value)
|
||||
for key, value in input.iteritems()}
|
||||
for key, value in input.items()}
|
||||
elif isinstance(input, list):
|
||||
return [self.byteify(element) for element in input]
|
||||
elif isinstance(input, unicode):
|
||||
return input.encode('utf-8')
|
||||
elif isinstance(input, str):
|
||||
return str(input)
|
||||
else:
|
||||
return input
|
||||
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin):
|
|||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).iteritems()}
|
||||
for key, value in json.load(f).items()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f)
|
||||
|
|
@ -194,7 +194,7 @@ class LoadEffectsInputProcess(load.LoaderPlugin):
|
|||
# getting data from json file with unicode conversion
|
||||
with open(file, "r") as f:
|
||||
json_f = {self.byteify(key): self.byteify(value)
|
||||
for key, value in json.load(f).iteritems()}
|
||||
for key, value in json.load(f).items()}
|
||||
|
||||
# get correct order of nodes by positions on track and subtrack
|
||||
nodes_order = self.reorder_nodes(json_f)
|
||||
|
|
@ -350,11 +350,11 @@ class LoadEffectsInputProcess(load.LoaderPlugin):
|
|||
|
||||
if isinstance(input, dict):
|
||||
return {self.byteify(key): self.byteify(value)
|
||||
for key, value in input.iteritems()}
|
||||
for key, value in input.items()}
|
||||
elif isinstance(input, list):
|
||||
return [self.byteify(element) for element in input]
|
||||
elif isinstance(input, unicode):
|
||||
return input.encode('utf-8')
|
||||
elif isinstance(input, str):
|
||||
return str(input)
|
||||
else:
|
||||
return input
|
||||
|
||||
|
|
|
|||
|
|
@ -240,7 +240,7 @@ class LoadGizmoInputProcess(load.LoaderPlugin):
|
|||
|
||||
if isinstance(input, dict):
|
||||
return {self.byteify(key): self.byteify(value)
|
||||
for key, value in input.iteritems()}
|
||||
for key, value in input.items()}
|
||||
elif isinstance(input, list):
|
||||
return [self.byteify(element) for element in input]
|
||||
elif isinstance(input, unicode):
|
||||
|
|
|
|||
|
|
@ -24,7 +24,11 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
outputs = {}
|
||||
|
||||
def process(self, instance):
|
||||
families = instance.data["families"]
|
||||
families = set(instance.data["families"])
|
||||
|
||||
# add main family to make sure all families are compared
|
||||
families.add(instance.data["family"])
|
||||
|
||||
task_type = instance.context.data["taskType"]
|
||||
subset = instance.data["subset"]
|
||||
self.log.info("Creating staging dir...")
|
||||
|
|
@ -50,51 +54,31 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
f_task_types = o_data["filter"]["task_types"]
|
||||
f_subsets = o_data["filter"]["sebsets"]
|
||||
|
||||
self.log.debug(
|
||||
"f_families `{}` > families: {}".format(
|
||||
f_families, families))
|
||||
|
||||
self.log.debug(
|
||||
"f_task_types `{}` > task_type: {}".format(
|
||||
f_task_types, task_type))
|
||||
|
||||
self.log.debug(
|
||||
"f_subsets `{}` > subset: {}".format(
|
||||
f_subsets, subset))
|
||||
|
||||
# test if family found in context
|
||||
test_families = any([
|
||||
# first if exact family set is matching
|
||||
# make sure only interesetion of list is correct
|
||||
bool(set(families).intersection(f_families)),
|
||||
# and if famiies are set at all
|
||||
# if not then return True because we want this preset
|
||||
# to be active if nothig is set
|
||||
bool(not f_families)
|
||||
])
|
||||
# using intersection to make sure all defined
|
||||
# families are present in combination
|
||||
if f_families and not families.intersection(f_families):
|
||||
continue
|
||||
|
||||
# test task types from filter
|
||||
test_task_types = any([
|
||||
# check if actual task type is defined in task types
|
||||
# set in preset's filter
|
||||
bool(task_type in f_task_types),
|
||||
# and if taskTypes are defined in preset filter
|
||||
# if not then return True, because we want this filter
|
||||
# to be active if no taskType is set
|
||||
bool(not f_task_types)
|
||||
])
|
||||
if f_task_types and task_type not in f_task_types:
|
||||
continue
|
||||
|
||||
# test subsets from filter
|
||||
test_subsets = any([
|
||||
# check if any of subset filter inputs
|
||||
# converted to regex patern is not found in subset
|
||||
# we keep strict case sensitivity
|
||||
bool(next((
|
||||
s for s in f_subsets
|
||||
if re.search(re.compile(s), subset)
|
||||
), None)),
|
||||
# but if no subsets were set then make this acuntable too
|
||||
bool(not f_subsets)
|
||||
])
|
||||
|
||||
# we need all filters to be positive for this
|
||||
# preset to be activated
|
||||
test_all = all([
|
||||
test_families,
|
||||
test_task_types,
|
||||
test_subsets
|
||||
])
|
||||
|
||||
# if it is not positive then skip this preset
|
||||
if not test_all:
|
||||
if f_subsets and not any(
|
||||
re.search(s, subset) for s in f_subsets):
|
||||
continue
|
||||
|
||||
self.log.info(
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ class RepairNukeWriteDeadlineTab(pyblish.api.Action):
|
|||
|
||||
# Remove existing knobs.
|
||||
knob_names = openpype.hosts.nuke.lib.get_deadline_knob_names()
|
||||
for name, knob in group_node.knobs().iteritems():
|
||||
for name, knob in group_node.knobs().items():
|
||||
if name in knob_names:
|
||||
group_node.removeKnob(knob)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
import os
|
||||
import toml
|
||||
|
||||
import nuke
|
||||
|
||||
from avalon import api
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.pipeline import discover_creator_plugins
|
||||
from openpype.hosts.nuke.api.lib import get_avalon_knob_data
|
||||
|
||||
|
||||
|
|
@ -79,7 +78,7 @@ class ValidateWriteLegacy(pyblish.api.InstancePlugin):
|
|||
|
||||
# get appropriate plugin class
|
||||
creator_plugin = None
|
||||
for Creator in api.discover(api.Creator):
|
||||
for Creator in discover_creator_plugins():
|
||||
if Creator.__name__ != Create_name:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -9,9 +9,10 @@ from avalon import io
|
|||
from openpype.api import Logger
|
||||
from openpype.lib import register_event_callback
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
import openpype.hosts.photoshop
|
||||
|
|
@ -75,7 +76,7 @@ def install():
|
|||
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
|
||||
pyblish.api.register_callback(
|
||||
|
|
@ -88,7 +89,7 @@ def install():
|
|||
def uninstall():
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
|
||||
def ls():
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
from Qt import QtWidgets
|
||||
from openpype.pipeline import create
|
||||
from openpype.pipeline import LegacyCreator
|
||||
from openpype.hosts.photoshop import api as photoshop
|
||||
|
||||
|
||||
class CreateImage(create.LegacyCreator):
|
||||
class CreateImage(LegacyCreator):
|
||||
"""Image folder for publish."""
|
||||
|
||||
name = "imageDefault"
|
||||
|
|
|
|||
|
|
@ -29,7 +29,8 @@ class ValidateNamingRepair(pyblish.api.Action):
|
|||
stub = photoshop.stub()
|
||||
for instance in instances:
|
||||
self.log.info("validate_naming instance {}".format(instance))
|
||||
metadata = stub.read(instance[0])
|
||||
layer_item = instance.data["layer"]
|
||||
metadata = stub.read(layer_item)
|
||||
self.log.info("metadata instance {}".format(metadata))
|
||||
layer_name = None
|
||||
if metadata.get("uuid"):
|
||||
|
|
@ -43,11 +44,11 @@ class ValidateNamingRepair(pyblish.api.Action):
|
|||
stub.rename_layer(instance.data["uuid"], layer_name)
|
||||
|
||||
subset_name = re.sub(invalid_chars, replace_char,
|
||||
instance.data["name"])
|
||||
instance.data["subset"])
|
||||
|
||||
instance[0].Name = layer_name or subset_name
|
||||
layer_item.name = layer_name or subset_name
|
||||
metadata["subset"] = subset_name
|
||||
stub.imprint(instance[0], metadata)
|
||||
stub.imprint(layer_item, metadata)
|
||||
|
||||
return True
|
||||
|
||||
|
|
|
|||
|
|
@ -4,14 +4,17 @@ Basic avalon integration
|
|||
import os
|
||||
import contextlib
|
||||
from collections import OrderedDict
|
||||
from avalon import api as avalon
|
||||
from avalon import schema
|
||||
|
||||
from pyblish import api as pyblish
|
||||
|
||||
from avalon import schema
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from . import lib
|
||||
|
|
@ -46,7 +49,7 @@ def install():
|
|||
log.info("Registering DaVinci Resovle plug-ins..")
|
||||
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.register_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
|
@ -70,7 +73,7 @@ def uninstall():
|
|||
log.info("Deregistering DaVinci Resovle plug-ins..")
|
||||
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
# register callback for switching publishable
|
||||
pyblish.deregister_callback("instanceToggled", on_pyblish_instance_toggled)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from openpype.hosts.traypublisher.api import pipeline
|
||||
from openpype.lib import FileDef
|
||||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
lib
|
||||
CreatedInstance
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -80,7 +80,7 @@ class WorkfileCreator(Creator):
|
|||
|
||||
def get_instance_attr_defs(self):
|
||||
output = [
|
||||
lib.FileDef(
|
||||
FileDef(
|
||||
"filepath",
|
||||
folders=False,
|
||||
extensions=self.extensions,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from openpype.pipeline import PublishValidationError
|
|||
class ValidateWorkfilePath(pyblish.api.InstancePlugin):
|
||||
"""Validate existence of workfile instance existence."""
|
||||
|
||||
label = "Collect Workfile"
|
||||
label = "Validate Workfile"
|
||||
order = pyblish.api.ValidatorOrder - 0.49
|
||||
families = ["workfile"]
|
||||
hosts = ["traypublisher"]
|
||||
|
|
|
|||
|
|
@ -15,9 +15,10 @@ from openpype.hosts import tvpaint
|
|||
from openpype.api import get_current_project_settings
|
||||
from openpype.lib import register_event_callback
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
|
||||
|
|
@ -82,7 +83,7 @@ def install():
|
|||
pyblish.api.register_host("tvpaint")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.register_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
registered_callbacks = (
|
||||
pyblish.api.registered_callbacks().get("instanceToggled") or []
|
||||
|
|
@ -104,7 +105,7 @@ def uninstall():
|
|||
pyblish.api.deregister_host("tvpaint")
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
avalon.api.deregister_plugin_path(LegacyCreator, CREATE_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
|
||||
def containerise(
|
||||
|
|
|
|||
|
|
@ -7,9 +7,10 @@ import pyblish.api
|
|||
from avalon import api
|
||||
|
||||
from openpype.pipeline import (
|
||||
LegacyCreator,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.tools.utils import host_tools
|
||||
|
|
@ -49,7 +50,7 @@ def install():
|
|||
logger.info("installing OpenPype for Unreal")
|
||||
pyblish.api.register_plugin_path(str(PUBLISH_PATH))
|
||||
register_loader_plugin_path(str(LOAD_PATH))
|
||||
api.register_plugin_path(LegacyCreator, str(CREATE_PATH))
|
||||
register_creator_plugin_path(str(CREATE_PATH))
|
||||
_register_callbacks()
|
||||
_register_events()
|
||||
|
||||
|
|
@ -58,7 +59,7 @@ def uninstall():
|
|||
"""Uninstall Unreal configuration for Avalon."""
|
||||
pyblish.api.deregister_plugin_path(str(PUBLISH_PATH))
|
||||
deregister_loader_plugin_path(str(LOAD_PATH))
|
||||
api.deregister_plugin_path(LegacyCreator, str(CREATE_PATH))
|
||||
deregister_creator_plugin_path(str(CREATE_PATH))
|
||||
|
||||
|
||||
def _register_callbacks():
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue