mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-26 13:52:15 +01:00
Merge branch 'develop' into enhancement/maya_validate_node_ids_related_report
This commit is contained in:
commit
06c843235d
88 changed files with 501 additions and 413 deletions
|
|
@ -14,3 +14,15 @@ AYON_SERVER_ENABLED = True
|
|||
# Indicate if AYON entities should be used instead of OpenPype entities
|
||||
USE_AYON_ENTITIES = True
|
||||
# -------------------------
|
||||
|
||||
|
||||
__all__ = (
|
||||
"__version__",
|
||||
|
||||
# Deprecated
|
||||
"AYON_CORE_ROOT",
|
||||
"PACKAGE_DIR",
|
||||
"PLUGINS_DIR",
|
||||
"AYON_SERVER_ENABLED",
|
||||
"USE_AYON_ENTITIES",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import warnings
|
||||
|
||||
|
||||
class Commands:
|
||||
|
|
|
|||
|
|
@ -31,6 +31,7 @@ __all__ = [
|
|||
"get_stub",
|
||||
|
||||
# pipeline
|
||||
"AfterEffectsHost",
|
||||
"ls",
|
||||
"containerise",
|
||||
|
||||
|
|
|
|||
|
|
@ -1,14 +1,11 @@
|
|||
import os
|
||||
import re
|
||||
import tempfile
|
||||
import attr
|
||||
|
||||
import attr
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.pipeline.publish import RenderInstance
|
||||
|
||||
from ayon_core.hosts.aftereffects.api import get_stub
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import bpy
|
|||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.blender.api import plugin
|
||||
from ayon_core.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||
|
||||
|
||||
class ExtractCameraABC(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import bpy
|
|||
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.hosts.blender.api import plugin
|
||||
from ayon_core.hosts.blender.api.pipeline import AVALON_PROPERTY
|
||||
|
||||
|
||||
class ExtractFBX(publish.Extractor, publish.OptionalPyblishPluginMixin):
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ from .lib import (
|
|||
reset_segment_selection,
|
||||
get_segment_attributes,
|
||||
get_clips_in_reels,
|
||||
get_reformated_filename,
|
||||
get_reformatted_filename,
|
||||
get_frame_from_filename,
|
||||
get_padding_from_filename,
|
||||
maintained_object_duplication,
|
||||
|
|
@ -101,7 +101,7 @@ __all__ = [
|
|||
"reset_segment_selection",
|
||||
"get_segment_attributes",
|
||||
"get_clips_in_reels",
|
||||
"get_reformated_filename",
|
||||
"get_reformatted_filename",
|
||||
"get_frame_from_filename",
|
||||
"get_padding_from_filename",
|
||||
"maintained_object_duplication",
|
||||
|
|
|
|||
|
|
@ -607,7 +607,7 @@ def get_clips_in_reels(project):
|
|||
return output_clips
|
||||
|
||||
|
||||
def get_reformated_filename(filename, padded=True):
|
||||
def get_reformatted_filename(filename, padded=True):
|
||||
"""
|
||||
Return fixed python expression path
|
||||
|
||||
|
|
@ -618,7 +618,7 @@ def get_reformated_filename(filename, padded=True):
|
|||
type: string with reformatted path
|
||||
|
||||
Example:
|
||||
get_reformated_filename("plate.1001.exr") > plate.%04d.exr
|
||||
get_reformatted_filename("plate.1001.exr") > plate.%04d.exr
|
||||
|
||||
"""
|
||||
found = FRAME_PATTERN.search(filename)
|
||||
|
|
|
|||
|
|
@ -644,13 +644,13 @@ class PublishableClip:
|
|||
"families": [self.base_product_type, self.product_type]
|
||||
}
|
||||
|
||||
def _convert_to_entity(self, type, template):
|
||||
def _convert_to_entity(self, src_type, template):
|
||||
""" Converting input key to key with type. """
|
||||
# convert to entity type
|
||||
entity_type = self.types.get(type, None)
|
||||
folder_type = self.types.get(src_type, None)
|
||||
|
||||
assert entity_type, "Missing entity type for `{}`".format(
|
||||
type
|
||||
assert folder_type, "Missing folder type for `{}`".format(
|
||||
src_type
|
||||
)
|
||||
|
||||
# first collect formatting data to use for formatting template
|
||||
|
|
@ -661,7 +661,7 @@ class PublishableClip:
|
|||
formatting_data[_k] = value
|
||||
|
||||
return {
|
||||
"entity_type": entity_type,
|
||||
"folder_type": folder_type,
|
||||
"entity_name": template.format(
|
||||
**formatting_data
|
||||
)
|
||||
|
|
|
|||
|
|
@ -256,7 +256,7 @@ def create_otio_reference(clip_data, fps=None):
|
|||
|
||||
if not otio_ex_ref_item:
|
||||
dirname, file_name = os.path.split(path)
|
||||
file_name = utils.get_reformated_filename(file_name, padded=False)
|
||||
file_name = utils.get_reformatted_filename(file_name, padded=False)
|
||||
reformated_path = os.path.join(dirname, file_name)
|
||||
# in case old OTIO or video file create `ExternalReference`
|
||||
otio_ex_ref_item = otio.schema.ExternalReference(
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ def frames_to_seconds(frames, framerate):
|
|||
return otio.opentime.to_seconds(rt)
|
||||
|
||||
|
||||
def get_reformated_filename(filename, padded=True):
|
||||
def get_reformatted_filename(filename, padded=True):
|
||||
"""
|
||||
Return fixed python expression path
|
||||
|
||||
|
|
@ -32,7 +32,7 @@ def get_reformated_filename(filename, padded=True):
|
|||
type: string with reformatted path
|
||||
|
||||
Example:
|
||||
get_reformated_filename("plate.1001.exr") > plate.%04d.exr
|
||||
get_reformatted_filename("plate.1001.exr") > plate.%04d.exr
|
||||
|
||||
"""
|
||||
found = FRAME_PATTERN.search(filename)
|
||||
|
|
|
|||
|
|
@ -100,6 +100,12 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
marker_data["handleEnd"] = min(
|
||||
marker_data["handleEnd"], tail)
|
||||
|
||||
# Backward compatibility fix of 'entity_type' > 'folder_type'
|
||||
if "parents" in marker_data:
|
||||
for parent in marker_data["parents"]:
|
||||
if "entity_type" in parent:
|
||||
parent["folder_type"] = parent.pop("entity_type")
|
||||
|
||||
workfile_start = self._set_workfile_start(marker_data)
|
||||
|
||||
with_audio = bool(marker_data.pop("audio"))
|
||||
|
|
|
|||
|
|
@ -28,7 +28,6 @@ from ayon_core.tools.utils import host_tools
|
|||
|
||||
from .lib import (
|
||||
get_current_comp,
|
||||
comp_lock_and_undo_chunk,
|
||||
validate_comp_prefs
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
from ayon_core.lib import EnumDef
|
||||
from ayon_core.lib import (
|
||||
UILabelDef,
|
||||
NumberDef,
|
||||
EnumDef
|
||||
)
|
||||
|
||||
from ayon_core.hosts.fusion.api.plugin import GenericCreateSaver
|
||||
from ayon_core.hosts.fusion.api.lib import get_current_comp
|
||||
|
||||
|
||||
class CreateSaver(GenericCreateSaver):
|
||||
|
|
@ -45,6 +50,7 @@ class CreateSaver(GenericCreateSaver):
|
|||
self._get_reviewable_bool(),
|
||||
self._get_frame_range_enum(),
|
||||
self._get_image_format_enum(),
|
||||
*self._get_custom_frame_range_attribute_defs()
|
||||
]
|
||||
return attr_defs
|
||||
|
||||
|
|
@ -53,6 +59,7 @@ class CreateSaver(GenericCreateSaver):
|
|||
"current_folder": "Current Folder context",
|
||||
"render_range": "From render in/out",
|
||||
"comp_range": "From composition timeline",
|
||||
"custom_range": "Custom frame range",
|
||||
}
|
||||
|
||||
return EnumDef(
|
||||
|
|
@ -61,3 +68,82 @@ class CreateSaver(GenericCreateSaver):
|
|||
label="Frame range source",
|
||||
default=self.default_frame_range_option
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _get_custom_frame_range_attribute_defs() -> list:
|
||||
|
||||
# Define custom frame range defaults based on current comp
|
||||
# timeline settings (if a comp is currently open)
|
||||
comp = get_current_comp()
|
||||
if comp is not None:
|
||||
attrs = comp.GetAttrs()
|
||||
frame_defaults = {
|
||||
"frameStart": int(attrs["COMPN_GlobalStart"]),
|
||||
"frameEnd": int(attrs["COMPN_GlobalEnd"]),
|
||||
"handleStart": int(
|
||||
attrs["COMPN_RenderStart"] - attrs["COMPN_GlobalStart"]
|
||||
),
|
||||
"handleEnd": int(
|
||||
attrs["COMPN_GlobalEnd"] - attrs["COMPN_RenderEnd"]
|
||||
),
|
||||
}
|
||||
else:
|
||||
frame_defaults = {
|
||||
"frameStart": 1001,
|
||||
"frameEnd": 1100,
|
||||
"handleStart": 0,
|
||||
"handleEnd": 0
|
||||
}
|
||||
|
||||
return [
|
||||
UILabelDef(
|
||||
label="<br><b>Custom Frame Range</b><br>"
|
||||
"<i>only used with 'Custom frame range' source</i>"
|
||||
),
|
||||
NumberDef(
|
||||
"custom_frameStart",
|
||||
label="Frame Start",
|
||||
default=frame_defaults["frameStart"],
|
||||
minimum=0,
|
||||
decimals=0,
|
||||
tooltip=(
|
||||
"Set the start frame for the export.\n"
|
||||
"Only used if frame range source is 'Custom frame range'."
|
||||
)
|
||||
),
|
||||
NumberDef(
|
||||
"custom_frameEnd",
|
||||
label="Frame End",
|
||||
default=frame_defaults["frameEnd"],
|
||||
minimum=0,
|
||||
decimals=0,
|
||||
tooltip=(
|
||||
"Set the end frame for the export.\n"
|
||||
"Only used if frame range source is 'Custom frame range'."
|
||||
)
|
||||
),
|
||||
NumberDef(
|
||||
"custom_handleStart",
|
||||
label="Handle Start",
|
||||
default=frame_defaults["handleStart"],
|
||||
minimum=0,
|
||||
decimals=0,
|
||||
tooltip=(
|
||||
"Set the start handles for the export, this will be "
|
||||
"added before the start frame.\n"
|
||||
"Only used if frame range source is 'Custom frame range'."
|
||||
)
|
||||
),
|
||||
NumberDef(
|
||||
"custom_handleEnd",
|
||||
label="Handle End",
|
||||
default=frame_defaults["handleEnd"],
|
||||
minimum=0,
|
||||
decimals=0,
|
||||
tooltip=(
|
||||
"Set the end handles for the export, this will be added "
|
||||
"after the end frame.\n"
|
||||
"Only used if frame range source is 'Custom frame range'."
|
||||
)
|
||||
)
|
||||
]
|
||||
|
|
|
|||
|
|
@ -57,6 +57,14 @@ class CollectInstanceData(pyblish.api.InstancePlugin):
|
|||
start_with_handle = comp_start
|
||||
end_with_handle = comp_end
|
||||
|
||||
if frame_range_source == "custom_range":
|
||||
start = int(instance.data["custom_frameStart"])
|
||||
end = int(instance.data["custom_frameEnd"])
|
||||
handle_start = int(instance.data["custom_handleStart"])
|
||||
handle_end = int(instance.data["custom_handleEnd"])
|
||||
start_with_handle = start - handle_start
|
||||
end_with_handle = end + handle_end
|
||||
|
||||
frame = instance.data["creator_attributes"].get("frame")
|
||||
# explicitly publishing only single frame
|
||||
if frame is not None:
|
||||
|
|
|
|||
|
|
@ -21,12 +21,12 @@ class CreateFarmRender(plugin.Creator):
|
|||
path = "render/{0}/{0}.".format(node.split("/")[-1])
|
||||
harmony.send(
|
||||
{
|
||||
"function": f"PypeHarmony.Creators.CreateRender.create",
|
||||
"function": "PypeHarmony.Creators.CreateRender.create",
|
||||
"args": [node, path]
|
||||
})
|
||||
harmony.send(
|
||||
{
|
||||
"function": f"PypeHarmony.color",
|
||||
"function": "PypeHarmony.color",
|
||||
"args": [[0.9, 0.75, 0.3, 1.0]]
|
||||
}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectAudio(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Collect relative path for audio file to instance.
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class CollectScene(pyblish.api.ContextPlugin):
|
|||
"""Plugin entry point."""
|
||||
result = harmony.send(
|
||||
{
|
||||
f"function": "PypeHarmony.getSceneSettings",
|
||||
"function": "PypeHarmony.getSceneSettings",
|
||||
"args": []}
|
||||
)["result"]
|
||||
|
||||
|
|
@ -62,7 +62,7 @@ class CollectScene(pyblish.api.ContextPlugin):
|
|||
|
||||
result = harmony.send(
|
||||
{
|
||||
f"function": "PypeHarmony.getVersion",
|
||||
"function": "PypeHarmony.getVersion",
|
||||
"args": []}
|
||||
)["result"]
|
||||
context.data["harmonyVersion"] = "{}.{}".format(result[0], result[1])
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
import os
|
||||
|
||||
import hiero.core.events
|
||||
|
||||
from ayon_core.lib import Logger, register_event_callback
|
||||
|
||||
from .lib import (
|
||||
sync_avalon_data_to_workfile,
|
||||
launch_workfiles_app,
|
||||
selection_changed_timeline,
|
||||
before_project_save,
|
||||
)
|
||||
from .tags import add_tags_to_workfile
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ def apply_transition(otio_track, otio_item, track):
|
|||
if transition_type == 'dissolve':
|
||||
transition_func = getattr(
|
||||
hiero.core.Transition,
|
||||
'create{kind}DissolveTransition'.format(kind=kind)
|
||||
"create{kind}DissolveTransition".format(kind=kind)
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
@ -109,7 +109,7 @@ def apply_transition(otio_track, otio_item, track):
|
|||
item_in,
|
||||
item_out,
|
||||
otio_item.in_offset.value,
|
||||
otio_item.out_offset.value
|
||||
otio_item.out_offset.value,
|
||||
)
|
||||
|
||||
# Catch error raised if transition is bigger than TrackItem source
|
||||
|
|
@ -134,7 +134,7 @@ def apply_transition(otio_track, otio_item, track):
|
|||
|
||||
transition = transition_func(
|
||||
item_out,
|
||||
otio_item.out_offset.value
|
||||
otio_item.out_offset.value,
|
||||
)
|
||||
|
||||
elif transition_type == 'fade_out':
|
||||
|
|
@ -183,9 +183,7 @@ def prep_url(url_in):
|
|||
def create_offline_mediasource(otio_clip, path=None):
|
||||
global _otio_old
|
||||
|
||||
hiero_rate = hiero.core.TimeBase(
|
||||
otio_clip.source_range.start_time.rate
|
||||
)
|
||||
hiero_rate = hiero.core.TimeBase(otio_clip.source_range.start_time.rate)
|
||||
|
||||
try:
|
||||
legal_media_refs = (
|
||||
|
|
@ -212,7 +210,7 @@ def create_offline_mediasource(otio_clip, path=None):
|
|||
source_range.start_time.value,
|
||||
source_range.duration.value,
|
||||
hiero_rate,
|
||||
source_range.start_time.value
|
||||
source_range.start_time.value,
|
||||
)
|
||||
|
||||
return media
|
||||
|
|
@ -385,7 +383,8 @@ def create_trackitem(playhead, track, otio_clip, clip):
|
|||
# Only reverse effect can be applied here
|
||||
if abs(time_scalar) == 1.:
|
||||
trackitem.setPlaybackSpeed(
|
||||
trackitem.playbackSpeed() * time_scalar)
|
||||
trackitem.playbackSpeed() * time_scalar
|
||||
)
|
||||
|
||||
elif isinstance(effect, otio.schema.FreezeFrame):
|
||||
# For freeze frame, playback speed must be set after range
|
||||
|
|
@ -397,28 +396,21 @@ def create_trackitem(playhead, track, otio_clip, clip):
|
|||
source_in = source_range.end_time_inclusive().value
|
||||
|
||||
timeline_in = playhead + source_out
|
||||
timeline_out = (
|
||||
timeline_in +
|
||||
source_range.duration.value
|
||||
) - 1
|
||||
timeline_out = (timeline_in + source_range.duration.value) - 1
|
||||
else:
|
||||
# Normal playback speed
|
||||
source_in = source_range.start_time.value
|
||||
source_out = source_range.end_time_inclusive().value
|
||||
|
||||
timeline_in = playhead
|
||||
timeline_out = (
|
||||
timeline_in +
|
||||
source_range.duration.value
|
||||
) - 1
|
||||
timeline_out = (timeline_in + source_range.duration.value) - 1
|
||||
|
||||
# Set source and timeline in/out points
|
||||
trackitem.setTimes(
|
||||
timeline_in,
|
||||
timeline_out,
|
||||
source_in,
|
||||
source_out
|
||||
|
||||
source_out,
|
||||
)
|
||||
|
||||
# Apply playback speed for freeze frames
|
||||
|
|
@ -435,7 +427,8 @@ def create_trackitem(playhead, track, otio_clip, clip):
|
|||
|
||||
|
||||
def build_sequence(
|
||||
otio_timeline, project=None, sequence=None, track_kind=None):
|
||||
otio_timeline, project=None, sequence=None, track_kind=None
|
||||
):
|
||||
if project is None:
|
||||
if sequence:
|
||||
project = sequence.project()
|
||||
|
|
@ -509,10 +502,7 @@ def build_sequence(
|
|||
|
||||
# Create TrackItem
|
||||
trackitem = create_trackitem(
|
||||
playhead,
|
||||
track,
|
||||
otio_clip,
|
||||
clip
|
||||
playhead, track, otio_clip, clip
|
||||
)
|
||||
|
||||
# Add markers
|
||||
|
|
|
|||
|
|
@ -449,7 +449,6 @@ class ClipLoader:
|
|||
repr = self.context["representation"]
|
||||
repr_cntx = repr["context"]
|
||||
folder_path = self.context["folder"]["path"]
|
||||
folder_name = self.context["folder"]["name"]
|
||||
product_name = self.context["product"]["name"]
|
||||
representation = repr["name"]
|
||||
self.data["clip_name"] = self.clip_name_template.format(**repr_cntx)
|
||||
|
|
@ -906,16 +905,16 @@ class PublishClip:
|
|||
"hierarchyData": hierarchy_formatting_data,
|
||||
"productName": self.product_name,
|
||||
"productType": self.product_type,
|
||||
"families": [self.product_type, self.data["family"]]
|
||||
"families": [self.product_type, self.data["productType"]]
|
||||
}
|
||||
|
||||
def _convert_to_entity(self, type, template):
|
||||
def _convert_to_entity(self, src_type, template):
|
||||
""" Converting input key to key with type. """
|
||||
# convert to entity type
|
||||
entity_type = self.types.get(type, None)
|
||||
folder_type = self.types.get(src_type, None)
|
||||
|
||||
assert entity_type, "Missing entity type for `{}`".format(
|
||||
type
|
||||
assert folder_type, "Missing folder type for `{}`".format(
|
||||
src_type
|
||||
)
|
||||
|
||||
# first collect formatting data to use for formatting template
|
||||
|
|
@ -926,7 +925,7 @@ class PublishClip:
|
|||
formatting_data[_k] = value
|
||||
|
||||
return {
|
||||
"entity_type": entity_type,
|
||||
"folder_type": folder_type,
|
||||
"entity_name": template.format(
|
||||
**formatting_data
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,9 +3,11 @@
|
|||
# Note: This only prints the text data that is visible in the active Spreadsheet View.
|
||||
# If you've filtered text, only the visible text will be printed to the CSV file
|
||||
# Usage: Copy to ~/.hiero/Python/StartupUI
|
||||
import os
|
||||
import csv
|
||||
|
||||
import hiero.core.events
|
||||
import hiero.ui
|
||||
import os, csv
|
||||
try:
|
||||
from PySide.QtGui import *
|
||||
from PySide.QtCore import *
|
||||
|
|
|
|||
|
|
@ -641,7 +641,7 @@ def _setStatus(self, status):
|
|||
global gStatusTags
|
||||
|
||||
# Get a valid Tag object from the Global list of statuses
|
||||
if not status in gStatusTags.keys():
|
||||
if status not in gStatusTags.keys():
|
||||
print("Status requested was not a valid Status string.")
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -101,14 +101,14 @@ def apply_transition(otio_track, otio_item, track):
|
|||
if transition_type == "dissolve":
|
||||
transition_func = getattr(
|
||||
hiero.core.Transition,
|
||||
'create{kind}DissolveTransition'.format(kind=kind)
|
||||
"create{kind}DissolveTransition".format(kind=kind)
|
||||
)
|
||||
|
||||
transition = transition_func(
|
||||
item_in,
|
||||
item_out,
|
||||
otio_item.in_offset.value,
|
||||
otio_item.out_offset.value
|
||||
otio_item.out_offset.value,
|
||||
)
|
||||
|
||||
elif transition_type == "fade_in":
|
||||
|
|
@ -116,20 +116,14 @@ def apply_transition(otio_track, otio_item, track):
|
|||
hiero.core.Transition,
|
||||
'create{kind}FadeInTransition'.format(kind=kind)
|
||||
)
|
||||
transition = transition_func(
|
||||
item_out,
|
||||
otio_item.out_offset.value
|
||||
)
|
||||
transition = transition_func(item_out, otio_item.out_offset.value)
|
||||
|
||||
elif transition_type == "fade_out":
|
||||
transition_func = getattr(
|
||||
hiero.core.Transition,
|
||||
'create{kind}FadeOutTransition'.format(kind=kind)
|
||||
)
|
||||
transition = transition_func(
|
||||
item_in,
|
||||
otio_item.in_offset.value
|
||||
"create{kind}FadeOutTransition".format(kind=kind)
|
||||
)
|
||||
transition = transition_func(item_in, otio_item.in_offset.value)
|
||||
|
||||
else:
|
||||
# Unknown transition
|
||||
|
|
@ -138,11 +132,10 @@ def apply_transition(otio_track, otio_item, track):
|
|||
# Apply transition to track
|
||||
track.addTransition(transition)
|
||||
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
sys.stderr.write(
|
||||
'Unable to apply transition "{t}": "{e}"\n'.format(
|
||||
t=otio_item,
|
||||
e=e
|
||||
t=otio_item, e=e
|
||||
)
|
||||
)
|
||||
|
||||
|
|
@ -153,18 +146,14 @@ def prep_url(url_in):
|
|||
if url.startswith("file://localhost/"):
|
||||
return url.replace("file://localhost/", "")
|
||||
|
||||
url = '{url}'.format(
|
||||
sep=url.startswith(os.sep) and "" or os.sep,
|
||||
url=url.startswith(os.sep) and url[1:] or url
|
||||
)
|
||||
if url.startswith(os.sep):
|
||||
url = url[1:]
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def create_offline_mediasource(otio_clip, path=None):
|
||||
hiero_rate = hiero.core.TimeBase(
|
||||
otio_clip.source_range.start_time.rate
|
||||
)
|
||||
hiero_rate = hiero.core.TimeBase(otio_clip.source_range.start_time.rate)
|
||||
|
||||
if isinstance(otio_clip.media_reference, otio.schema.ExternalReference):
|
||||
source_range = otio_clip.available_range()
|
||||
|
|
@ -180,7 +169,7 @@ def create_offline_mediasource(otio_clip, path=None):
|
|||
source_range.start_time.value,
|
||||
source_range.duration.value,
|
||||
hiero_rate,
|
||||
source_range.start_time.value
|
||||
source_range.start_time.value,
|
||||
)
|
||||
|
||||
return media
|
||||
|
|
@ -203,7 +192,7 @@ marker_color_map = {
|
|||
"MAGENTA": "Magenta",
|
||||
"BLACK": "Blue",
|
||||
"WHITE": "Green",
|
||||
"MINT": "Cyan"
|
||||
"MINT": "Cyan",
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -254,12 +243,6 @@ def add_markers(otio_item, hiero_item, tagsbin):
|
|||
if _tag is None:
|
||||
_tag = hiero.core.Tag(marker_color_map[marker.color])
|
||||
|
||||
start = marker.marked_range.start_time.value
|
||||
end = (
|
||||
marker.marked_range.start_time.value +
|
||||
marker.marked_range.duration.value
|
||||
)
|
||||
|
||||
tag = hiero_item.addTag(_tag)
|
||||
tag.setName(marker.name or marker_color_map[marker_color])
|
||||
|
||||
|
|
@ -275,12 +258,12 @@ def create_track(otio_track, tracknum, track_kind):
|
|||
# Create a Track
|
||||
if otio_track.kind == otio.schema.TrackKind.Video:
|
||||
track = hiero.core.VideoTrack(
|
||||
otio_track.name or 'Video{n}'.format(n=tracknum)
|
||||
otio_track.name or "Video{n}".format(n=tracknum)
|
||||
)
|
||||
|
||||
else:
|
||||
track = hiero.core.AudioTrack(
|
||||
otio_track.name or 'Audio{n}'.format(n=tracknum)
|
||||
otio_track.name or "Audio{n}".format(n=tracknum)
|
||||
)
|
||||
|
||||
return track
|
||||
|
|
@ -315,34 +298,25 @@ def create_trackitem(playhead, track, otio_clip, clip, tagsbin):
|
|||
for effect in otio_clip.effects:
|
||||
if isinstance(effect, otio.schema.LinearTimeWarp):
|
||||
trackitem.setPlaybackSpeed(
|
||||
trackitem.playbackSpeed() *
|
||||
effect.time_scalar
|
||||
trackitem.playbackSpeed() * effect.time_scalar
|
||||
)
|
||||
|
||||
# If reverse playback speed swap source in and out
|
||||
if trackitem.playbackSpeed() < 0:
|
||||
source_out = source_range.start_time.value
|
||||
source_in = (
|
||||
source_range.start_time.value +
|
||||
source_range.duration.value
|
||||
source_range.start_time.value + source_range.duration.value
|
||||
) - 1
|
||||
timeline_in = playhead + source_out
|
||||
timeline_out = (
|
||||
timeline_in +
|
||||
source_range.duration.value
|
||||
) - 1
|
||||
timeline_out = (timeline_in + source_range.duration.value) - 1
|
||||
else:
|
||||
# Normal playback speed
|
||||
source_in = source_range.start_time.value
|
||||
source_out = (
|
||||
source_range.start_time.value +
|
||||
source_range.duration.value
|
||||
source_range.start_time.value + source_range.duration.value
|
||||
) - 1
|
||||
timeline_in = playhead
|
||||
timeline_out = (
|
||||
timeline_in +
|
||||
source_range.duration.value
|
||||
) - 1
|
||||
timeline_out = (timeline_in + source_range.duration.value) - 1
|
||||
|
||||
# Set source and timeline in/out points
|
||||
trackitem.setSourceIn(source_in)
|
||||
|
|
@ -357,7 +331,8 @@ def create_trackitem(playhead, track, otio_clip, clip, tagsbin):
|
|||
|
||||
|
||||
def build_sequence(
|
||||
otio_timeline, project=None, sequence=None, track_kind=None):
|
||||
otio_timeline, project=None, sequence=None, track_kind=None
|
||||
):
|
||||
|
||||
if project is None:
|
||||
if sequence:
|
||||
|
|
@ -414,8 +389,7 @@ def build_sequence(
|
|||
if isinstance(otio_clip, otio.schema.Stack):
|
||||
bar = hiero.ui.mainWindow().statusBar()
|
||||
bar.showMessage(
|
||||
"Nested sequences are created separately.",
|
||||
timeout=3000
|
||||
"Nested sequences are created separately.", timeout=3000
|
||||
)
|
||||
build_sequence(otio_clip, project, otio_track.kind)
|
||||
|
||||
|
|
@ -428,11 +402,7 @@ def build_sequence(
|
|||
|
||||
# Create TrackItem
|
||||
trackitem = create_trackitem(
|
||||
playhead,
|
||||
track,
|
||||
otio_clip,
|
||||
clip,
|
||||
tagsbin
|
||||
playhead, track, otio_clip, clip, tagsbin
|
||||
)
|
||||
|
||||
# Add trackitem to track
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ def update_tag(tag, data):
|
|||
# set all data metadata to tag metadata
|
||||
for _k, _v in data_mtd.items():
|
||||
value = str(_v)
|
||||
if type(_v) == dict:
|
||||
if isinstance(_v, dict):
|
||||
value = json.dumps(_v)
|
||||
|
||||
# set the value
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from itertools import product
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -84,6 +84,11 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
k: v for k, v in tag_data.items()
|
||||
if k not in ("id", "applieswhole", "label")
|
||||
})
|
||||
# Backward compatibility fix of 'entity_type' > 'folder_type'
|
||||
if "parents" in data:
|
||||
for parent in data["parents"]:
|
||||
if "entity_type" in parent:
|
||||
parent["folder_type"] = parent.pop("entity_type")
|
||||
|
||||
asset, asset_name = self._get_folder_data(tag_data)
|
||||
|
||||
|
|
@ -378,12 +383,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
# collect all subtrack items
|
||||
sub_track_items = {}
|
||||
for track in tracks:
|
||||
items = track.items()
|
||||
|
||||
effet_items = track.subTrackItems()
|
||||
effect_items = track.subTrackItems()
|
||||
|
||||
# skip if no clips on track > need track with effect only
|
||||
if not effet_items:
|
||||
if not effect_items:
|
||||
continue
|
||||
|
||||
# skip all disabled tracks
|
||||
|
|
@ -391,7 +394,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
continue
|
||||
|
||||
track_index = track.trackIndex()
|
||||
_sub_track_items = phiero.flatten(effet_items)
|
||||
_sub_track_items = phiero.flatten(effect_items)
|
||||
|
||||
_sub_track_items = list(_sub_track_items)
|
||||
# continue only if any subtrack items are collected
|
||||
|
|
|
|||
|
|
@ -35,10 +35,6 @@ class PrecollectRetime(api.InstancePlugin):
|
|||
source_out = int(track_item.sourceOut())
|
||||
speed = track_item.playbackSpeed()
|
||||
|
||||
# calculate available material before retime
|
||||
available_in = int(track_item.handleInLength() * speed)
|
||||
available_out = int(track_item.handleOutLength() * speed)
|
||||
|
||||
self.log.debug((
|
||||
"_BEFORE: \n timeline_in: `{0}`,\n timeline_out: `{1}`, \n "
|
||||
"source_in: `{2}`,\n source_out: `{3}`,\n speed: `{4}`,\n "
|
||||
|
|
|
|||
|
|
@ -447,7 +447,7 @@ def maintained_selection():
|
|||
node.setSelected(on=True)
|
||||
|
||||
|
||||
def reset_framerange():
|
||||
def reset_framerange(fps=True, frame_range=True):
|
||||
"""Set frame range and FPS to current folder."""
|
||||
|
||||
project_name = get_current_project_name()
|
||||
|
|
@ -456,29 +456,32 @@ def reset_framerange():
|
|||
folder_entity = ayon_api.get_folder_by_path(project_name, folder_path)
|
||||
folder_attributes = folder_entity["attrib"]
|
||||
|
||||
# Get FPS
|
||||
fps = get_folder_fps(folder_entity)
|
||||
# Set FPS
|
||||
if fps:
|
||||
fps = get_folder_fps(folder_entity)
|
||||
print("Setting scene FPS to {}".format(int(fps)))
|
||||
set_scene_fps(fps)
|
||||
|
||||
# Get Start and End Frames
|
||||
frame_start = folder_attributes.get("frameStart")
|
||||
frame_end = folder_attributes.get("frameEnd")
|
||||
if frame_range:
|
||||
|
||||
if frame_start is None or frame_end is None:
|
||||
log.warning("No edit information found for '{}'".format(folder_path))
|
||||
return
|
||||
# Set Start and End Frames
|
||||
frame_start = folder_attributes.get("frameStart")
|
||||
frame_end = folder_attributes.get("frameEnd")
|
||||
|
||||
handle_start = folder_attributes.get("handleStart", 0)
|
||||
handle_end = folder_attributes.get("handleEnd", 0)
|
||||
if frame_start is None or frame_end is None:
|
||||
log.warning("No edit information found for '%s'", folder_path)
|
||||
return
|
||||
|
||||
frame_start -= int(handle_start)
|
||||
frame_end += int(handle_end)
|
||||
handle_start = folder_attributes.get("handleStart", 0)
|
||||
handle_end = folder_attributes.get("handleEnd", 0)
|
||||
|
||||
# Set frame range and FPS
|
||||
print("Setting scene FPS to {}".format(int(fps)))
|
||||
set_scene_fps(fps)
|
||||
hou.playbar.setFrameRange(frame_start, frame_end)
|
||||
hou.playbar.setPlaybackRange(frame_start, frame_end)
|
||||
hou.setFrame(frame_start)
|
||||
frame_start -= int(handle_start)
|
||||
frame_end += int(handle_end)
|
||||
|
||||
# Set frame range and FPS
|
||||
hou.playbar.setFrameRange(frame_start, frame_end)
|
||||
hou.playbar.setPlaybackRange(frame_start, frame_end)
|
||||
hou.setFrame(frame_start)
|
||||
|
||||
|
||||
def get_main_window():
|
||||
|
|
@ -993,3 +996,84 @@ def add_self_publish_button(node):
|
|||
template = node.parmTemplateGroup()
|
||||
template.insertBefore((0,), button_parm)
|
||||
node.setParmTemplateGroup(template)
|
||||
|
||||
|
||||
def update_content_on_context_change():
|
||||
"""Update all Creator instances to current asset"""
|
||||
host = registered_host()
|
||||
context = host.get_current_context()
|
||||
|
||||
folder_path = context["folder_path"]
|
||||
task = context["task_name"]
|
||||
|
||||
create_context = CreateContext(host, reset=True)
|
||||
|
||||
for instance in create_context.instances:
|
||||
instance_folder_path = instance.get("folderPath")
|
||||
if instance_folder_path and instance_folder_path != folder_path:
|
||||
instance["folderPath"] = folder_path
|
||||
instance_task = instance.get("task")
|
||||
if instance_task and instance_task != task:
|
||||
instance["task"] = task
|
||||
|
||||
create_context.save_changes()
|
||||
|
||||
|
||||
def prompt_reset_context():
|
||||
"""Prompt the user what context settings to reset.
|
||||
This prompt is used on saving to a different task to allow the scene to
|
||||
get matched to the new context.
|
||||
"""
|
||||
# TODO: Cleanup this prototyped mess of imports and odd dialog
|
||||
from ayon_core.tools.attribute_defs.dialog import (
|
||||
AttributeDefinitionsDialog
|
||||
)
|
||||
from ayon_core.style import load_stylesheet
|
||||
from ayon_core.lib import BoolDef, UILabelDef
|
||||
|
||||
definitions = [
|
||||
UILabelDef(
|
||||
label=(
|
||||
"You are saving your workfile into a different folder or task."
|
||||
"\n\n"
|
||||
"Would you like to update some settings to the new context?\n"
|
||||
)
|
||||
),
|
||||
BoolDef(
|
||||
"fps",
|
||||
label="FPS",
|
||||
tooltip="Reset workfile FPS",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"frame_range",
|
||||
label="Frame Range",
|
||||
tooltip="Reset workfile start and end frame ranges",
|
||||
default=True
|
||||
),
|
||||
BoolDef(
|
||||
"instances",
|
||||
label="Publish instances",
|
||||
tooltip="Update all publish instance's folder and task to match "
|
||||
"the new folder and task",
|
||||
default=True
|
||||
),
|
||||
]
|
||||
|
||||
dialog = AttributeDefinitionsDialog(definitions)
|
||||
dialog.setWindowTitle("Saving to different context.")
|
||||
dialog.setStyleSheet(load_stylesheet())
|
||||
if not dialog.exec_():
|
||||
return None
|
||||
|
||||
options = dialog.get_values()
|
||||
if options["fps"] or options["frame_range"]:
|
||||
reset_framerange(
|
||||
fps=options["fps"],
|
||||
frame_range=options["frame_range"]
|
||||
)
|
||||
|
||||
if options["instances"]:
|
||||
update_content_on_context_change()
|
||||
|
||||
dialog.deleteLater()
|
||||
|
|
@ -1,7 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Pipeline tools for OpenPype Houdini integration."""
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
import hou # noqa
|
||||
|
|
@ -39,6 +38,9 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
|||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
# Track whether the workfile tool is about to save
|
||||
ABOUT_TO_SAVE = False
|
||||
|
||||
|
||||
class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
||||
name = "houdini"
|
||||
|
|
@ -61,10 +63,12 @@ class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
|
|||
log.info("Installing callbacks ... ")
|
||||
# register_event_callback("init", on_init)
|
||||
self._register_callbacks()
|
||||
register_event_callback("workfile.save.before", before_workfile_save)
|
||||
register_event_callback("before.save", before_save)
|
||||
register_event_callback("save", on_save)
|
||||
register_event_callback("open", on_open)
|
||||
register_event_callback("new", on_new)
|
||||
register_event_callback("taskChanged", on_task_changed)
|
||||
|
||||
self._has_been_setup = True
|
||||
|
||||
|
|
@ -287,6 +291,11 @@ def ls():
|
|||
yield parse_container(container)
|
||||
|
||||
|
||||
def before_workfile_save(event):
|
||||
global ABOUT_TO_SAVE
|
||||
ABOUT_TO_SAVE = True
|
||||
|
||||
|
||||
def before_save():
|
||||
return lib.validate_fps()
|
||||
|
||||
|
|
@ -298,6 +307,21 @@ def on_save():
|
|||
# update houdini vars
|
||||
lib.update_houdini_vars_context_dialog()
|
||||
|
||||
nodes = lib.get_id_required_nodes()
|
||||
for node, new_id in lib.generate_ids(nodes):
|
||||
lib.set_id(node, new_id, overwrite=False)
|
||||
|
||||
# We are now starting the actual save directly
|
||||
global ABOUT_TO_SAVE
|
||||
ABOUT_TO_SAVE = False
|
||||
|
||||
|
||||
def on_task_changed():
|
||||
global ABOUT_TO_SAVE
|
||||
if not IS_HEADLESS and ABOUT_TO_SAVE:
|
||||
# Let's prompt the user to update the context settings or not
|
||||
lib.prompt_reset_context()
|
||||
|
||||
|
||||
def _show_outdated_content_popup():
|
||||
# Get main window
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ class AbcLoader(load.LoaderPlugin):
|
|||
|
||||
normal_node.setInput(0, unpack)
|
||||
|
||||
null = container.createNode("null", node_name="OUT".format(name))
|
||||
null = container.createNode("null", node_name="OUT")
|
||||
null.setInput(0, normal_node)
|
||||
|
||||
# Ensure display flag is on the Alembic input node and not on the OUT
|
||||
|
|
|
|||
|
|
@ -71,6 +71,8 @@ class ValidateCopOutputNode(pyblish.api.InstancePlugin):
|
|||
# the isinstance check above should be stricter than this category
|
||||
if output_node.type().category().name() != "Cop2":
|
||||
raise PublishValidationError(
|
||||
("Output node %s is not of category Cop2. "
|
||||
"This is a bug...").format(output_node.path()),
|
||||
(
|
||||
"Output node {} is not of category Cop2."
|
||||
" This is a bug..."
|
||||
).format(output_node.path()),
|
||||
title=cls.label)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ from ayon_core.hosts.max.api.lib import (
|
|||
maintained_selection,
|
||||
object_transform_set
|
||||
)
|
||||
from ayon_core.hosts.max.api.lib import maintained_selection
|
||||
from ayon_core.hosts.max.api.pipeline import (
|
||||
containerise,
|
||||
get_previous_loaded_object,
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import PublishValidationError
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class ValidateCameraContent(pyblish.api.InstancePlugin):
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@ class ValidateRenderPasses(OptionalPyblishPluginMixin,
|
|||
invalid = []
|
||||
if instance.name not in file_name:
|
||||
cls.log.error("The renderpass filename should contain the instance name.")
|
||||
invalid.append((f"Invalid instance name",
|
||||
invalid.append(("Invalid instance name",
|
||||
file_name))
|
||||
if renderpass is not None:
|
||||
if not file_name.rstrip(".").endswith(renderpass):
|
||||
|
|
|
|||
|
|
@ -2152,9 +2152,13 @@ def get_related_sets(node):
|
|||
sets = cmds.ls(sets)
|
||||
|
||||
# Ignore `avalon.container`
|
||||
sets = [s for s in sets if
|
||||
not cmds.attributeQuery("id", node=s, exists=True) or
|
||||
not cmds.getAttr("%s.id" % s) in ignored]
|
||||
sets = [
|
||||
s for s in sets
|
||||
if (
|
||||
not cmds.attributeQuery("id", node=s, exists=True)
|
||||
or cmds.getAttr(f"{s}.id") not in ignored
|
||||
)
|
||||
]
|
||||
|
||||
# Exclude deformer sets (`type=2` for `maya.cmds.listSets`)
|
||||
deformer_sets = cmds.listSets(object=node,
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ from ayon_core.hosts.maya.api.lib import (
|
|||
unique_namespace,
|
||||
get_attribute_input,
|
||||
maintained_selection,
|
||||
convert_to_maya_fps
|
||||
)
|
||||
from ayon_core.hosts.maya.api.pipeline import containerise
|
||||
from ayon_core.hosts.maya.api.plugin import get_load_color_for_product_type
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import os
|
||||
|
||||
import maya.cmds as cmds
|
||||
|
||||
from ayon_core.hosts.maya.api.pipeline import containerise
|
||||
|
|
|
|||
|
|
@ -1,10 +1,8 @@
|
|||
import os
|
||||
import copy
|
||||
|
||||
from ayon_core.lib import EnumDef
|
||||
from ayon_core.pipeline import (
|
||||
load,
|
||||
get_representation_context,
|
||||
get_current_host_name,
|
||||
)
|
||||
from ayon_core.pipeline.load.utils import get_representation_path_from_context
|
||||
|
|
|
|||
|
|
@ -78,7 +78,6 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
layer = instance.data["transientData"]["layer"]
|
||||
objset = instance.data.get("instance_node")
|
||||
filepath = context.data["currentFile"].replace("\\", "/")
|
||||
workspace = context.data["workspaceDir"]
|
||||
|
||||
# check if layer is renderable
|
||||
if not layer.isRenderable():
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Create Unreal Skeletal Mesh data to be extracted as FBX."""
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
|
||||
from maya import cmds # noqa
|
||||
|
||||
|
|
|
|||
|
|
@ -74,7 +74,7 @@ class ExtractUnrealSkeletalMeshFbx(publish.Extractor):
|
|||
renamed_to_extract.append("|".join(node_path))
|
||||
|
||||
with renamed(original_parent, parent_node):
|
||||
self.log.debug("Extracting: {}".format(renamed_to_extract, path))
|
||||
self.log.debug("Extracting: {}".format(renamed_to_extract))
|
||||
fbx_exporter.export(renamed_to_extract, path)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ from maya import cmds
|
|||
import pyblish.api
|
||||
from ayon_core.hosts.maya.api.lib import extract_alembic
|
||||
from ayon_core.pipeline import publish
|
||||
from ayon_core.lib import StringTemplate
|
||||
|
||||
|
||||
class ExtractWorkfileXgen(publish.Extractor):
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ from ayon_core.pipeline import publish
|
|||
from ayon_core.hosts.maya.api.lib import (
|
||||
maintained_selection, attribute_values, write_xgen_file, delete_after
|
||||
)
|
||||
from ayon_core.lib import StringTemplate
|
||||
|
||||
|
||||
class ExtractXgen(publish.Extractor):
|
||||
|
|
|
|||
|
|
@ -29,7 +29,8 @@ class AssetModel(models.TreeModel):
|
|||
self.beginResetModel()
|
||||
|
||||
# Add the items sorted by label
|
||||
sorter = lambda x: x["label"]
|
||||
def sorter(x):
|
||||
return x["label"]
|
||||
|
||||
for item in sorted(items, key=sorter):
|
||||
|
||||
|
|
|
|||
|
|
@ -30,13 +30,11 @@ from ayon_core.tools.utils import host_tools
|
|||
from ayon_core.hosts.nuke import NUKE_ROOT_DIR
|
||||
from ayon_core.tools.workfile_template_build import open_template_ui
|
||||
|
||||
from .command import viewer_update_and_undo_stop
|
||||
from .lib import (
|
||||
Context,
|
||||
ROOT_DATA_KNOB,
|
||||
INSTANCE_DATA_KNOB,
|
||||
get_main_window,
|
||||
add_publish_knob,
|
||||
WorkfileSettings,
|
||||
# TODO: remove this once workfile builder will be removed
|
||||
process_workfile_builder,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ import six
|
|||
import random
|
||||
import string
|
||||
from collections import OrderedDict, defaultdict
|
||||
from abc import abstractmethod
|
||||
|
||||
from ayon_core.settings import get_current_project_settings
|
||||
from ayon_core.lib import (
|
||||
|
|
@ -14,7 +13,6 @@ from ayon_core.lib import (
|
|||
EnumDef
|
||||
)
|
||||
from ayon_core.pipeline import (
|
||||
LegacyCreator,
|
||||
LoaderPlugin,
|
||||
CreatorError,
|
||||
Creator as NewCreator,
|
||||
|
|
@ -34,18 +32,13 @@ from ayon_core.lib.transcoding import (
|
|||
from .lib import (
|
||||
INSTANCE_DATA_KNOB,
|
||||
Knobby,
|
||||
check_product_name_exists,
|
||||
maintained_selection,
|
||||
get_avalon_knob_data,
|
||||
set_avalon_knob_data,
|
||||
add_publish_knob,
|
||||
get_nuke_imageio_settings,
|
||||
set_node_knobs_from_settings,
|
||||
set_node_data,
|
||||
get_node_data,
|
||||
get_view_process_node,
|
||||
get_viewer_config_from_string,
|
||||
deprecated,
|
||||
get_filenames_without_hash,
|
||||
link_knobs
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
import math
|
||||
from pprint import pformat
|
||||
|
||||
import nuke
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import json
|
||||
|
||||
import nuke
|
||||
import six
|
||||
import pyblish.api
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import clique
|
||||
|
||||
from ayon_core.pipeline import PublishXmlValidationError
|
||||
from ayon_core.pipeline.publish import get_errored_instances_from_context
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ from wsrpc_aiohttp import (
|
|||
import ayon_api
|
||||
from qtpy import QtCore
|
||||
|
||||
from ayon_core.lib import Logger, StringTemplate
|
||||
from ayon_core.lib import Logger
|
||||
from ayon_core.pipeline import (
|
||||
registered_host,
|
||||
Anatomy,
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Close PS after publish. For Webpublishing only."""
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.hosts.photoshop import api as photoshop
|
||||
|
|
|
|||
|
|
@ -875,14 +875,14 @@ class PublishClip:
|
|||
def _convert_to_entity(self, key):
|
||||
""" Converting input key to key with type. """
|
||||
# convert to entity type
|
||||
entity_type = self.types.get(key)
|
||||
folder_type = self.types.get(key)
|
||||
|
||||
assert entity_type, "Missing entity type for `{}`".format(
|
||||
assert folder_type, "Missing folder type for `{}`".format(
|
||||
key
|
||||
)
|
||||
|
||||
return {
|
||||
"entity_type": entity_type,
|
||||
"folder_type": folder_type,
|
||||
"entity_name": self.hierarchy_data[key]["value"].format(
|
||||
**self.timeline_item_default_data
|
||||
)
|
||||
|
|
|
|||
|
|
@ -64,6 +64,11 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
})
|
||||
|
||||
folder_path = tag_data["folder_path"]
|
||||
# Backward compatibility fix of 'entity_type' > 'folder_type'
|
||||
if "parents" in data:
|
||||
for parent in data["parents"]:
|
||||
if "entity_type" in parent:
|
||||
parent["folder_type"] = parent.pop("entity_type")
|
||||
|
||||
# TODO: remove backward compatibility
|
||||
product_name = tag_data.get("productName")
|
||||
|
|
|
|||
|
|
@ -12,17 +12,14 @@ import substance_painter.project
|
|||
import pyblish.api
|
||||
|
||||
from ayon_core.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost
|
||||
from ayon_core.settings import (
|
||||
get_current_project_settings,
|
||||
get_project_settings,
|
||||
)
|
||||
from ayon_core.settings import get_current_project_settings
|
||||
|
||||
from ayon_core.pipeline.template_data import get_template_data_with_names
|
||||
from ayon_core.pipeline import (
|
||||
register_creator_plugin_path,
|
||||
register_loader_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
Anatomy
|
||||
Anatomy,
|
||||
)
|
||||
from ayon_core.lib import (
|
||||
StringTemplate,
|
||||
|
|
|
|||
|
|
@ -186,14 +186,15 @@ class ShotMetadataSolver:
|
|||
# in case first parent is project then start parents from start
|
||||
if (
|
||||
_index == 0
|
||||
and parent_token_type == "project"
|
||||
and parent_token_type.lower() == "project"
|
||||
):
|
||||
project_parent = parents[0]
|
||||
parents = [project_parent]
|
||||
continue
|
||||
|
||||
parents.append({
|
||||
"entity_type": parent_token_type,
|
||||
"entity_type": "folder",
|
||||
"folder_type": parent_token_type.lower(),
|
||||
"entity_name": parent_name
|
||||
})
|
||||
|
||||
|
|
@ -264,7 +265,8 @@ class ShotMetadataSolver:
|
|||
}]
|
||||
for entity in folders_hierarchy:
|
||||
output.append({
|
||||
"entity_type": entity["folderType"],
|
||||
"entity_type": "folder",
|
||||
"folder_type": entity["folderType"],
|
||||
"entity_name": entity["name"]
|
||||
})
|
||||
return output
|
||||
|
|
|
|||
|
|
@ -675,7 +675,7 @@ or updating already created. Publishing will create OTIO file.
|
|||
base_instance_data = {
|
||||
"shotName": shot_name,
|
||||
"variant": variant_name,
|
||||
"task": "",
|
||||
"task": None,
|
||||
"newAssetPublishing": True,
|
||||
"trackStartFrame": track_start_frame,
|
||||
"timelineOffset": timeline_offset,
|
||||
|
|
|
|||
|
|
@ -154,8 +154,9 @@ class CollectShotInstance(pyblish.api.InstancePlugin):
|
|||
handle_end = int(instance.data["handleEnd"])
|
||||
|
||||
in_info = {
|
||||
"entity_type": "Shot",
|
||||
"custom_attributes": {
|
||||
"entity_type": "folder",
|
||||
"folder_type": "Shot",
|
||||
"attributes": {
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": instance.data["frameStart"],
|
||||
|
|
@ -174,13 +175,13 @@ class CollectShotInstance(pyblish.api.InstancePlugin):
|
|||
|
||||
for parent in reversed(parents):
|
||||
parent_name = parent["entity_name"]
|
||||
next_dict = {
|
||||
parent_name: {
|
||||
"entity_type": parent["entity_type"],
|
||||
"childs": actual
|
||||
}
|
||||
parent_info = {
|
||||
"entity_type": parent["entity_type"],
|
||||
"children": actual,
|
||||
}
|
||||
actual = next_dict
|
||||
if parent_info["entity_type"] == "folder":
|
||||
parent_info["folder_type"] = parent["folder_type"]
|
||||
actual = {parent_name: parent_info}
|
||||
|
||||
final_context = self._update_dict(final_context, actual)
|
||||
|
||||
|
|
|
|||
|
|
@ -599,7 +599,7 @@ class CreateRenderPass(TVPaintCreator):
|
|||
if filtered_layers:
|
||||
self.log.info((
|
||||
"Changing group of "
|
||||
f"{','.join([l['name'] for l in filtered_layers])}"
|
||||
f"{','.join([layer['name'] for layer in filtered_layers])}"
|
||||
f" to {group_id}"
|
||||
))
|
||||
george_lines = [
|
||||
|
|
@ -760,7 +760,9 @@ class TVPaintAutoDetectRenderCreator(TVPaintCreator):
|
|||
grg_lines: list[str] = []
|
||||
for group_id, group_name in new_group_name_by_id.items():
|
||||
group: dict[str, Any] = groups_by_id[group_id]
|
||||
grg_line: str = "tv_layercolor \"setcolor\" {} {} {} {} {}".format(
|
||||
grg_line: str = (
|
||||
"tv_layercolor \"setcolor\" {} {} {} {} {} \"{}\""
|
||||
).format(
|
||||
group["clip_id"],
|
||||
group_id,
|
||||
group["red"],
|
||||
|
|
|
|||
|
|
@ -216,10 +216,8 @@ def create_unreal_project(project_name: str,
|
|||
since 3.16.0
|
||||
|
||||
"""
|
||||
env = env or os.environ
|
||||
|
||||
preset = get_project_settings(project_name)["unreal"]["project_setup"]
|
||||
ue_id = ".".join(ue_version.split(".")[:2])
|
||||
# get unreal engine identifier
|
||||
# -------------------------------------------------------------------------
|
||||
# FIXME (antirotor): As of 4.26 this is problem with UE4 built from
|
||||
|
|
@ -238,10 +236,12 @@ def create_unreal_project(project_name: str,
|
|||
project_file = pr_dir / f"{unreal_project_name}.uproject"
|
||||
|
||||
print("--- Generating a new project ...")
|
||||
commandlet_cmd = [f'{ue_editor_exe.as_posix()}',
|
||||
f'{cmdlet_project.as_posix()}',
|
||||
f'-run=AyonGenerateProject',
|
||||
f'{project_file.resolve().as_posix()}']
|
||||
commandlet_cmd = [
|
||||
ue_editor_exe.as_posix(),
|
||||
cmdlet_project.as_posix(),
|
||||
"-run=AyonGenerateProject",
|
||||
project_file.resolve().as_posix()
|
||||
]
|
||||
|
||||
if dev_mode or preset["dev_mode"]:
|
||||
commandlet_cmd.append('-GenerateCode')
|
||||
|
|
@ -268,7 +268,7 @@ def create_unreal_project(project_name: str,
|
|||
pf.seek(0)
|
||||
json.dump(pf_json, pf, indent=4)
|
||||
pf.truncate()
|
||||
print(f'--- Engine ID has been written into the project file')
|
||||
print("--- Engine ID has been written into the project file")
|
||||
|
||||
if dev_mode or preset["dev_mode"]:
|
||||
u_build_tool = get_path_to_ubt(engine_path, ue_version)
|
||||
|
|
@ -282,17 +282,25 @@ def create_unreal_project(project_name: str,
|
|||
# we need to test this out
|
||||
arch = "Mac"
|
||||
|
||||
command1 = [u_build_tool.as_posix(), "-projectfiles",
|
||||
f"-project={project_file}", "-progress"]
|
||||
command1 = [
|
||||
u_build_tool.as_posix(),
|
||||
"-projectfiles",
|
||||
f"-project={project_file}",
|
||||
"-progress"
|
||||
]
|
||||
|
||||
subprocess.run(command1)
|
||||
|
||||
command2 = [u_build_tool.as_posix(),
|
||||
f"-ModuleWithSuffix={unreal_project_name},3555", arch,
|
||||
"Development", "-TargetType=Editor",
|
||||
f'-Project={project_file}',
|
||||
f'{project_file}',
|
||||
"-IgnoreJunk"]
|
||||
command2 = [
|
||||
u_build_tool.as_posix(),
|
||||
f"-ModuleWithSuffix={unreal_project_name},3555",
|
||||
arch,
|
||||
"Development",
|
||||
"-TargetType=Editor",
|
||||
f"-Project={project_file}",
|
||||
project_file,
|
||||
"-IgnoreJunk"
|
||||
]
|
||||
|
||||
subprocess.run(command2)
|
||||
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ class CreateRender(UnrealAssetCreator):
|
|||
# If the option to create a new level sequence is selected,
|
||||
# create a new level sequence and a master level.
|
||||
|
||||
root = f"/Game/Ayon/Sequences"
|
||||
root = "/Game/Ayon/Sequences"
|
||||
|
||||
# Create a new folder for the sequence in root
|
||||
sequence_dir_name = create_folder(root, product_name)
|
||||
|
|
@ -166,7 +166,7 @@ class CreateRender(UnrealAssetCreator):
|
|||
master_lvl = levels[0].get_asset().get_path_name()
|
||||
except IndexError:
|
||||
raise RuntimeError(
|
||||
f"Could not find the hierarchy for the selected sequence.")
|
||||
"Could not find the hierarchy for the selected sequence.")
|
||||
|
||||
# If the selected asset is the master sequence, we get its data
|
||||
# and then we create the instance for the master sequence.
|
||||
|
|
|
|||
|
|
@ -1,12 +1,11 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import unreal
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import get_current_project_name
|
||||
from ayon_core.pipeline import Anatomy
|
||||
from ayon_core.hosts.unreal.api import pipeline
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectRenderInstances(pyblish.api.InstancePlugin):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# Backwards compatibility support
|
||||
# - TODO should be removed before release 1.0.0
|
||||
from ayon_core.addon import (
|
||||
AYONAddon,
|
||||
AddonsManager,
|
||||
|
|
@ -12,3 +14,16 @@ from ayon_core.addon.base import (
|
|||
ModulesManager = AddonsManager
|
||||
TrayModulesManager = TrayAddonsManager
|
||||
load_modules = load_addons
|
||||
|
||||
|
||||
__all__ = (
|
||||
"AYONAddon",
|
||||
"AddonsManager",
|
||||
"TrayAddonsManager",
|
||||
"load_addons",
|
||||
"OpenPypeModule",
|
||||
"OpenPypeAddOn",
|
||||
"ModulesManager",
|
||||
"TrayModulesManager",
|
||||
"load_modules",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,4 @@
|
|||
import os
|
||||
import re
|
||||
import time
|
||||
import json
|
||||
import datetime
|
||||
import requests
|
||||
|
|
|
|||
|
|
@ -651,7 +651,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
return job_info, attr.asdict(plugin_info)
|
||||
|
||||
def _get_arnold_render_payload(self, data):
|
||||
from maya import cmds
|
||||
# Job Info
|
||||
job_info = copy.deepcopy(self.job_info)
|
||||
job_info.Name = self._job_info_label("Render")
|
||||
|
|
@ -856,10 +855,10 @@ def _format_tiles(
|
|||
"""
|
||||
# Math used requires integers for correct output - as such
|
||||
# we ensure our inputs are correct.
|
||||
assert type(tiles_x) is int, "tiles_x must be an integer"
|
||||
assert type(tiles_y) is int, "tiles_y must be an integer"
|
||||
assert type(width) is int, "width must be an integer"
|
||||
assert type(height) is int, "height must be an integer"
|
||||
assert isinstance(tiles_x, int), "tiles_x must be an integer"
|
||||
assert isinstance(tiles_y, int), "tiles_y must be an integer"
|
||||
assert isinstance(width, int), "width must be an integer"
|
||||
assert isinstance(height, int), "height must be an integer"
|
||||
|
||||
out = {"JobInfo": {}, "PluginInfo": {}}
|
||||
cfg = OrderedDict()
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ from Deadline.Plugins import PluginType, DeadlinePlugin
|
|||
from Deadline.Scripting import (
|
||||
StringUtils,
|
||||
FileUtils,
|
||||
DirectoryUtils,
|
||||
RepositoryUtils
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ from Deadline.Scripting import (
|
|||
RepositoryUtils,
|
||||
FileUtils,
|
||||
DirectoryUtils,
|
||||
ProcessUtils,
|
||||
)
|
||||
__version__ = "1.0.1"
|
||||
VERSION_REGEX = re.compile(
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
"""Submitting render job to RoyalRender."""
|
||||
import os
|
||||
import json
|
||||
import platform
|
||||
import re
|
||||
import tempfile
|
||||
import uuid
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submit jobs to RoyalRender."""
|
||||
import tempfile
|
||||
import platform
|
||||
|
||||
import pyblish.api
|
||||
from ayon_core.modules.royalrender.api import (
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import ayon_api
|
||||
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.lib import filter_profiles, prepare_template_data
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import pyblish.api
|
||||
from ayon_core.pipeline import Anatomy
|
||||
from typing import Tuple, Union, List
|
||||
from typing import Tuple, List
|
||||
|
||||
|
||||
class TimeData:
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ import os
|
|||
import logging
|
||||
|
||||
from ayon_core.settings import get_project_settings
|
||||
from ayon_core.pipeline import schema
|
||||
from ayon_core.pipeline.plugin_discover import (
|
||||
discover,
|
||||
register_plugin,
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ import inspect
|
|||
from abc import ABCMeta
|
||||
import pyblish.api
|
||||
from pyblish.plugin import MetaPlugin, ExplicitMetaPlugin
|
||||
from ayon_core.lib.transcoding import VIDEO_EXTENSIONS, IMAGE_EXTENSIONS
|
||||
from ayon_core.lib import BoolDef
|
||||
|
||||
from .lib import (
|
||||
|
|
|
|||
|
|
@ -465,7 +465,11 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
current_data = hierarchy_context.get(project_name, {})
|
||||
for key in folder_path.split("/"):
|
||||
if key:
|
||||
current_data = current_data.get("childs", {}).get(key, {})
|
||||
current_data = (
|
||||
current_data
|
||||
.get("children", {})
|
||||
.get(key, {})
|
||||
)
|
||||
tasks_info = current_data.get("tasks", {})
|
||||
|
||||
task_info = tasks_info.get(task_name, {})
|
||||
|
|
@ -529,5 +533,5 @@ class CollectAnatomyInstanceData(pyblish.api.ContextPlugin):
|
|||
return item[folder_name].get("tasks") or {}
|
||||
|
||||
for subitem in item.values():
|
||||
hierarchy_queue.extend(subitem.get("childs") or [])
|
||||
hierarchy_queue.extend(subitem.get("children") or [])
|
||||
return {}
|
||||
|
|
|
|||
|
|
@ -17,17 +17,18 @@ class CollectHierarchy(pyblish.api.ContextPlugin):
|
|||
hosts = ["resolve", "hiero", "flame"]
|
||||
|
||||
def process(self, context):
|
||||
temp_context = {}
|
||||
project_name = context.data["projectName"]
|
||||
final_context = {}
|
||||
final_context[project_name] = {}
|
||||
final_context[project_name]["entity_type"] = "project"
|
||||
|
||||
final_context = {
|
||||
project_name: {
|
||||
"entity_type": "project",
|
||||
"children": {}
|
||||
},
|
||||
}
|
||||
temp_context = {}
|
||||
for instance in context:
|
||||
self.log.debug("Processing instance: `{}` ...".format(instance))
|
||||
|
||||
# shot data dict
|
||||
shot_data = {}
|
||||
product_type = instance.data["productType"]
|
||||
families = instance.data["families"]
|
||||
|
||||
|
|
@ -41,34 +42,38 @@ class CollectHierarchy(pyblish.api.ContextPlugin):
|
|||
if not instance.data.get("heroTrack"):
|
||||
continue
|
||||
|
||||
# suppose that all instances are Shots
|
||||
shot_data['entity_type'] = 'Shot'
|
||||
shot_data['tasks'] = instance.data.get("tasks") or {}
|
||||
shot_data["comments"] = instance.data.get("comments", [])
|
||||
|
||||
shot_data['custom_attributes'] = {
|
||||
"handleStart": instance.data["handleStart"],
|
||||
"handleEnd": instance.data["handleEnd"],
|
||||
"frameStart": instance.data["frameStart"],
|
||||
"frameEnd": instance.data["frameEnd"],
|
||||
"clipIn": instance.data["clipIn"],
|
||||
"clipOut": instance.data["clipOut"],
|
||||
"fps": instance.data["fps"],
|
||||
"resolutionWidth": instance.data["resolutionWidth"],
|
||||
"resolutionHeight": instance.data["resolutionHeight"],
|
||||
"pixelAspect": instance.data["pixelAspect"]
|
||||
shot_data = {
|
||||
"entity_type": "folder",
|
||||
# WARNING Default folder type is hardcoded
|
||||
# suppose that all instances are Shots
|
||||
"folder_type": "Shot",
|
||||
"tasks": instance.data.get("tasks") or {},
|
||||
"comments": instance.data.get("comments", []),
|
||||
"attributes": {
|
||||
"handleStart": instance.data["handleStart"],
|
||||
"handleEnd": instance.data["handleEnd"],
|
||||
"frameStart": instance.data["frameStart"],
|
||||
"frameEnd": instance.data["frameEnd"],
|
||||
"clipIn": instance.data["clipIn"],
|
||||
"clipOut": instance.data["clipOut"],
|
||||
"fps": instance.data["fps"],
|
||||
"resolutionWidth": instance.data["resolutionWidth"],
|
||||
"resolutionHeight": instance.data["resolutionHeight"],
|
||||
"pixelAspect": instance.data["pixelAspect"],
|
||||
},
|
||||
}
|
||||
# Split by '/' for AYON where asset is a path
|
||||
name = instance.data["folderPath"].split("/")[-1]
|
||||
actual = {name: shot_data}
|
||||
|
||||
for parent in reversed(instance.data["parents"]):
|
||||
next_dict = {}
|
||||
parent_name = parent["entity_name"]
|
||||
next_dict[parent_name] = {}
|
||||
next_dict[parent_name]["entity_type"] = parent[
|
||||
"entity_type"].capitalize()
|
||||
next_dict[parent_name]["childs"] = actual
|
||||
next_dict = {
|
||||
parent["entity_name"]: {
|
||||
"entity_type": "folder",
|
||||
"folder_type": parent["folder_type"],
|
||||
"children": actual,
|
||||
}
|
||||
}
|
||||
actual = next_dict
|
||||
|
||||
temp_context = self._update_dict(temp_context, actual)
|
||||
|
|
@ -77,7 +82,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin):
|
|||
if not temp_context:
|
||||
return
|
||||
|
||||
final_context[project_name]['childs'] = temp_context
|
||||
final_context[project_name]["children"] = temp_context
|
||||
|
||||
# adding hierarchy context to context
|
||||
context.data["hierarchyContext"] = final_context
|
||||
|
|
@ -85,8 +90,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin):
|
|||
context.data["hierarchyContext"]))
|
||||
|
||||
def _update_dict(self, parent_dict, child_dict):
|
||||
"""
|
||||
Nesting each children into its parent.
|
||||
"""Nesting each child into its parent.
|
||||
|
||||
Args:
|
||||
parent_dict (dict): parent dict wich should be nested with children
|
||||
|
|
|
|||
|
|
@ -115,6 +115,10 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin):
|
|||
|
||||
entity_hub = EntityHub(project_name)
|
||||
project = entity_hub.project_entity
|
||||
folder_type_name_by_low_name = {
|
||||
folder_type_item["name"].lower(): folder_type_item["name"]
|
||||
for folder_type_item in project.get_folder_types()
|
||||
}
|
||||
|
||||
hierarchy_match_queue = collections.deque()
|
||||
hierarchy_match_queue.append((project, hierarchy_context))
|
||||
|
|
@ -167,8 +171,18 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin):
|
|||
# TODO check if existing entity have 'folder' type
|
||||
child_entity = children_by_low_name.get(child_name.lower())
|
||||
if child_entity is None:
|
||||
folder_type = folder_type_name_by_low_name.get(
|
||||
child_info["folder_type"].lower()
|
||||
)
|
||||
if folder_type is None:
|
||||
# TODO add validator for folder type validations
|
||||
self.log.warning((
|
||||
"Couldn't find folder type '{}'"
|
||||
).format(child_info["folder_type"]))
|
||||
folder_type = "Folder"
|
||||
|
||||
child_entity = entity_hub.add_new_folder(
|
||||
child_info["entity_type"],
|
||||
folder_type,
|
||||
parent_id=entity.id,
|
||||
name=child_name
|
||||
)
|
||||
|
|
@ -223,12 +237,11 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin):
|
|||
# filter only the active publishing instances
|
||||
active_folder_paths = set()
|
||||
for instance in context:
|
||||
if instance.data.get("publish") is not False:
|
||||
if instance.data.get("publish", True) is not False:
|
||||
active_folder_paths.add(instance.data.get("folderPath"))
|
||||
|
||||
active_folder_paths.discard(None)
|
||||
|
||||
self.log.debug("Active folder paths: {}".format(active_folder_paths))
|
||||
if not active_folder_paths:
|
||||
return None
|
||||
|
||||
|
|
@ -237,11 +250,11 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin):
|
|||
hierarchy_context = copy.deepcopy(context.data["hierarchyContext"])
|
||||
for key, value in hierarchy_context.items():
|
||||
project_item = copy.deepcopy(value)
|
||||
project_children_context = project_item.pop("childs", None)
|
||||
project_children_context = project_item.pop("children", None)
|
||||
project_item["name"] = key
|
||||
project_item["tasks"] = []
|
||||
project_item["attributes"] = project_item.pop(
|
||||
"custom_attributes", {}
|
||||
"attributes", {}
|
||||
)
|
||||
project_item["children"] = []
|
||||
|
||||
|
|
@ -265,22 +278,23 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin):
|
|||
folder_path = "{}/{}".format(parent_path, folder_name)
|
||||
if (
|
||||
folder_path not in active_folder_paths
|
||||
and not folder_info.get("childs")
|
||||
and not folder_info.get("children")
|
||||
):
|
||||
continue
|
||||
|
||||
item_id = uuid.uuid4().hex
|
||||
new_item = copy.deepcopy(folder_info)
|
||||
new_children_context = new_item.pop("children", None)
|
||||
tasks = new_item.pop("tasks", {})
|
||||
|
||||
new_item["name"] = folder_name
|
||||
new_item["children"] = []
|
||||
new_children_context = new_item.pop("childs", None)
|
||||
tasks = new_item.pop("tasks", {})
|
||||
task_items = []
|
||||
for task_name, task_info in tasks.items():
|
||||
task_info["name"] = task_name
|
||||
task_items.append(task_info)
|
||||
new_item["tasks"] = task_items
|
||||
new_item["attributes"] = new_item.pop("custom_attributes", {})
|
||||
new_item["attributes"] = new_item.pop("attributes", {})
|
||||
|
||||
items_by_id[item_id] = new_item
|
||||
parent_id_by_item_id[item_id] = parent_id
|
||||
|
|
|
|||
|
|
@ -1,122 +0,0 @@
|
|||
from pprint import pformat
|
||||
|
||||
import ayon_api
|
||||
import pyblish.api
|
||||
|
||||
from ayon_core.pipeline import KnownPublishError
|
||||
|
||||
|
||||
class ValidateEditorialAssetName(pyblish.api.ContextPlugin):
|
||||
""" Validating if editorial's folder names are not already created in db.
|
||||
|
||||
Checking variations of names with different size of caps or with
|
||||
or without underscores.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Editorial Folder Name"
|
||||
hosts = [
|
||||
"hiero",
|
||||
"resolve",
|
||||
"flame",
|
||||
"traypublisher"
|
||||
]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
folder_and_parents = self.get_parents(context)
|
||||
self.log.debug("__ folder_and_parents: {}".format(folder_and_parents))
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
folder_entities = list(ayon_api.get_folders(
|
||||
project_name, fields={"path"}
|
||||
))
|
||||
self.log.debug("__ folder_entities: {}".format(folder_entities))
|
||||
|
||||
existing_folder_paths = {
|
||||
folder_entity["path"]: (
|
||||
folder_entity["path"].lstrip("/").rsplit("/")[0]
|
||||
)
|
||||
for folder_entity in folder_entities
|
||||
}
|
||||
|
||||
self.log.debug("__ project_entities: {}".format(
|
||||
pformat(existing_folder_paths)))
|
||||
|
||||
folders_missing_name = {}
|
||||
folders_wrong_parent = {}
|
||||
for folder_path in folder_and_parents.keys():
|
||||
if folder_path not in existing_folder_paths.keys():
|
||||
# add to some nonexistent list for next layer of check
|
||||
folders_missing_name[folder_path] = (
|
||||
folder_and_parents[folder_path]
|
||||
)
|
||||
continue
|
||||
|
||||
existing_parents = existing_folder_paths[folder_path]
|
||||
if folder_and_parents[folder_path] != existing_parents:
|
||||
# add to some nonexistent list for next layer of check
|
||||
folders_wrong_parent[folder_path] = {
|
||||
"required": folder_and_parents[folder_path],
|
||||
"already_in_db": existing_folder_paths[folder_path]
|
||||
}
|
||||
continue
|
||||
|
||||
self.log.debug("correct folder: {}".format(folder_path))
|
||||
|
||||
if folders_missing_name:
|
||||
wrong_names = {}
|
||||
self.log.debug(
|
||||
">> folders_missing_name: {}".format(folders_missing_name))
|
||||
|
||||
# This will create set of folder paths
|
||||
folder_paths = {
|
||||
folder_path.lower().replace("_", "")
|
||||
for folder_path in existing_folder_paths
|
||||
}
|
||||
|
||||
for folder_path in folders_missing_name:
|
||||
_folder_path = folder_path.lower().replace("_", "")
|
||||
if _folder_path in folder_paths:
|
||||
wrong_names[folder_path].update(
|
||||
{
|
||||
"required_name": folder_path,
|
||||
"used_variants_in_db": [
|
||||
p
|
||||
for p in existing_folder_paths
|
||||
if p.lower().replace("_", "") == _folder_path
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
if wrong_names:
|
||||
self.log.debug(
|
||||
">> wrong_names: {}".format(wrong_names))
|
||||
raise Exception(
|
||||
"Some already existing folder name variants `{}`".format(
|
||||
wrong_names))
|
||||
|
||||
if folders_wrong_parent:
|
||||
self.log.debug(
|
||||
">> folders_wrong_parent: {}".format(folders_wrong_parent))
|
||||
raise KnownPublishError(
|
||||
"Wrong parents on folders `{}`".format(folders_wrong_parent))
|
||||
|
||||
def get_parents(self, context):
|
||||
output = {}
|
||||
for instance in context:
|
||||
folder_path = instance.data["folderPath"]
|
||||
families = instance.data.get("families", []) + [
|
||||
instance.data["family"]
|
||||
]
|
||||
# filter out non-shot families
|
||||
if "shot" not in families:
|
||||
continue
|
||||
|
||||
parents = instance.data["parents"]
|
||||
|
||||
output[folder_path] = [
|
||||
str(p["entity_name"]) for p in parents
|
||||
if p["entity_type"].lower() != "project"
|
||||
]
|
||||
return output
|
||||
|
|
@ -13,3 +13,21 @@ from .items import (
|
|||
)
|
||||
from .lib import create_slates
|
||||
from .example import example
|
||||
|
||||
|
||||
__all__ = (
|
||||
"FontFactory",
|
||||
"BaseObj",
|
||||
"load_default_style",
|
||||
"MainFrame",
|
||||
"Layer",
|
||||
"BaseItem",
|
||||
"ItemImage",
|
||||
"ItemRectangle",
|
||||
"ItemPlaceHolder",
|
||||
"ItemText",
|
||||
"ItemTable",
|
||||
"TableField",
|
||||
"create_slates",
|
||||
"example",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
from qtpy import QtWidgets, QtCore
|
||||
|
||||
from ayon_core.lib.events import QueuedEventSystem
|
||||
from ayon_core.tools.utils import PlaceholderLineEdit, GoToCurrentButton
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
from qtpy import QtWidgets
|
||||
|
||||
from ayon_core.lib.events import QueuedEventSystem
|
||||
from ayon_core.tools.utils import PlaceholderLineEdit, FoldersWidget
|
||||
|
|
|
|||
|
|
@ -1,7 +1,3 @@
|
|||
import os
|
||||
import json
|
||||
import time
|
||||
|
||||
from qtpy import QtWidgets, QtCore
|
||||
|
||||
from .widgets import (
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from qtpy import QtWidgets, QtCore, QtGui
|
||||
from qtpy import QtCore, QtGui
|
||||
|
||||
from ayon_core.style import get_default_entity_icon_color
|
||||
from ayon_core.tools.utils import get_qt_icon
|
||||
|
|
|
|||
|
|
@ -7,8 +7,6 @@ from __future__ import (
|
|||
|
||||
import os
|
||||
import sys
|
||||
import numbers
|
||||
import copy
|
||||
import collections
|
||||
|
||||
from qtpy import QtCore
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ from qtpy import QtWidgets, QtCore, QtGui
|
|||
import qtawesome
|
||||
|
||||
from ayon_core.style import (
|
||||
get_default_entity_icon_color,
|
||||
get_objected_colors,
|
||||
get_app_icon_path,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import re
|
|||
import logging
|
||||
|
||||
import qtpy
|
||||
from qtpy import QtCore, QtGui
|
||||
from qtpy import QtCore
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import os
|
||||
import sys
|
||||
import re
|
||||
import json
|
||||
import shutil
|
||||
import argparse
|
||||
import zipfile
|
||||
|
|
@ -220,7 +219,6 @@ def main(
|
|||
addons=None,
|
||||
):
|
||||
current_dir = Path(os.path.dirname(os.path.abspath(__file__)))
|
||||
root_dir = current_dir.parent
|
||||
create_zip = not skip_zip
|
||||
|
||||
if output_dir:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
from ayon_server.settings import (
|
||||
BaseSettingsModel,
|
||||
SettingsField,
|
||||
ensure_unique_names
|
||||
)
|
||||
|
||||
from .general import (
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
from ayon_server.settings import (
|
||||
BaseSettingsModel,
|
||||
SettingsField,
|
||||
ensure_unique_names,
|
||||
)
|
||||
|
||||
from .imageio import TVPaintImageIOModel
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue