mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
adding a Qt lockfile dialog for lockfile tasks
This commit is contained in:
commit
0ac3cacc35
66 changed files with 2066 additions and 1813 deletions
|
|
@ -14,6 +14,8 @@ from bson.objectid import ObjectId
|
|||
|
||||
from .mongo import get_project_database, get_project_connection
|
||||
|
||||
PatternType = type(re.compile(""))
|
||||
|
||||
|
||||
def _prepare_fields(fields, required_fields=None):
|
||||
if not fields:
|
||||
|
|
@ -1054,11 +1056,11 @@ def _regex_filters(filters):
|
|||
for key, value in filters.items():
|
||||
regexes = []
|
||||
a_values = []
|
||||
if isinstance(value, re.Pattern):
|
||||
if isinstance(value, PatternType):
|
||||
regexes.append(value)
|
||||
elif isinstance(value, (list, tuple, set)):
|
||||
for item in value:
|
||||
if isinstance(item, re.Pattern):
|
||||
if isinstance(item, PatternType):
|
||||
regexes.append(item)
|
||||
else:
|
||||
a_values.append(item)
|
||||
|
|
@ -1194,7 +1196,7 @@ def get_representations(
|
|||
as filter. Filter ignored if 'None' is passed.
|
||||
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
|
||||
ignored if 'None' is passed.
|
||||
context_filters (Dict[str, List[str, re.Pattern]]): Filter by
|
||||
context_filters (Dict[str, List[str, PatternType]]): Filter by
|
||||
representation context fields.
|
||||
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
|
||||
using version ids and list of names under the version.
|
||||
|
|
@ -1240,7 +1242,7 @@ def get_archived_representations(
|
|||
as filter. Filter ignored if 'None' is passed.
|
||||
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
|
||||
ignored if 'None' is passed.
|
||||
context_filters (Dict[str, List[str, re.Pattern]]): Filter by
|
||||
context_filters (Dict[str, List[str, PatternType]]): Filter by
|
||||
representation context fields.
|
||||
names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering
|
||||
using version ids and list of names under the version.
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
import os
|
||||
from openpype.lib import (
|
||||
PreLaunchHook,
|
||||
create_workdir_extra_folders
|
||||
)
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.pipeline.workfile import create_workdir_extra_folders
|
||||
|
||||
|
||||
class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
||||
|
|
|
|||
|
|
@ -1,113 +0,0 @@
|
|||
import os
|
||||
import collections
|
||||
from pprint import pformat
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.client import (
|
||||
get_subsets,
|
||||
get_last_versions,
|
||||
get_representations
|
||||
)
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
|
||||
class AppendCelactionAudio(pyblish.api.ContextPlugin):
|
||||
|
||||
label = "Colect Audio for publishing"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
def process(self, context):
|
||||
self.log.info('Collecting Audio Data')
|
||||
asset_doc = context.data["assetEntity"]
|
||||
|
||||
# get all available representations
|
||||
subsets = self.get_subsets(
|
||||
asset_doc,
|
||||
representations=["audio", "wav"]
|
||||
)
|
||||
self.log.info(f"subsets is: {pformat(subsets)}")
|
||||
|
||||
if not subsets.get("audioMain"):
|
||||
raise AttributeError("`audioMain` subset does not exist")
|
||||
|
||||
reprs = subsets.get("audioMain", {}).get("representations", [])
|
||||
self.log.info(f"reprs is: {pformat(reprs)}")
|
||||
|
||||
repr = next((r for r in reprs), None)
|
||||
if not repr:
|
||||
raise "Missing `audioMain` representation"
|
||||
self.log.info(f"representation is: {repr}")
|
||||
|
||||
audio_file = repr.get('data', {}).get('path', "")
|
||||
|
||||
if os.path.exists(audio_file):
|
||||
context.data["audioFile"] = audio_file
|
||||
self.log.info(
|
||||
'audio_file: {}, has been added to context'.format(audio_file))
|
||||
else:
|
||||
self.log.warning("Couldn't find any audio file on Ftrack.")
|
||||
|
||||
def get_subsets(self, asset_doc, representations):
|
||||
"""
|
||||
Query subsets with filter on name.
|
||||
|
||||
The method will return all found subsets and its defined version
|
||||
and subsets. Version could be specified with number. Representation
|
||||
can be filtered.
|
||||
|
||||
Arguments:
|
||||
asset_doct (dict): Asset (shot) mongo document
|
||||
representations (list): list for all representations
|
||||
|
||||
Returns:
|
||||
dict: subsets with version and representations in keys
|
||||
"""
|
||||
|
||||
# Query all subsets for asset
|
||||
project_name = legacy_io.active_project()
|
||||
subset_docs = get_subsets(
|
||||
project_name, asset_ids=[asset_doc["_id"]], fields=["_id"]
|
||||
)
|
||||
# Collect all subset ids
|
||||
subset_ids = [
|
||||
subset_doc["_id"]
|
||||
for subset_doc in subset_docs
|
||||
]
|
||||
|
||||
# Check if we found anything
|
||||
assert subset_ids, (
|
||||
"No subsets found. Check correct filter. "
|
||||
"Try this for start `r'.*'`: asset: `{}`"
|
||||
).format(asset_doc["name"])
|
||||
|
||||
last_versions_by_subset_id = get_last_versions(
|
||||
project_name, subset_ids, fields=["_id", "parent"]
|
||||
)
|
||||
|
||||
version_docs_by_id = {}
|
||||
for version_doc in last_versions_by_subset_id.values():
|
||||
version_docs_by_id[version_doc["_id"]] = version_doc
|
||||
|
||||
repre_docs = get_representations(
|
||||
project_name,
|
||||
version_ids=version_docs_by_id.keys(),
|
||||
representation_names=representations
|
||||
)
|
||||
repre_docs_by_version_id = collections.defaultdict(list)
|
||||
for repre_doc in repre_docs:
|
||||
version_id = repre_doc["parent"]
|
||||
repre_docs_by_version_id[version_id].append(repre_doc)
|
||||
|
||||
output_dict = {}
|
||||
for version_id, repre_docs in repre_docs_by_version_id.items():
|
||||
version_doc = version_docs_by_id[version_id]
|
||||
subset_id = version_doc["parent"]
|
||||
subset_doc = last_versions_by_subset_id[subset_id]
|
||||
# Store queried docs by subset name
|
||||
output_dict[subset_doc["name"]] = {
|
||||
"representations": repre_docs,
|
||||
"version": version_doc
|
||||
}
|
||||
|
||||
return output_dict
|
||||
|
|
@ -51,7 +51,8 @@ from .pipeline import (
|
|||
)
|
||||
from .menu import (
|
||||
FlameMenuProjectConnect,
|
||||
FlameMenuTimeline
|
||||
FlameMenuTimeline,
|
||||
FlameMenuUniversal
|
||||
)
|
||||
from .plugin import (
|
||||
Creator,
|
||||
|
|
@ -131,6 +132,7 @@ __all__ = [
|
|||
# menu
|
||||
"FlameMenuProjectConnect",
|
||||
"FlameMenuTimeline",
|
||||
"FlameMenuUniversal",
|
||||
|
||||
# plugin
|
||||
"Creator",
|
||||
|
|
|
|||
|
|
@ -201,3 +201,53 @@ class FlameMenuTimeline(_FlameMenuApp):
|
|||
if self.flame:
|
||||
self.flame.execute_shortcut('Rescan Python Hooks')
|
||||
self.log.info('Rescan Python Hooks')
|
||||
|
||||
|
||||
class FlameMenuUniversal(_FlameMenuApp):
|
||||
|
||||
# flameMenuProjectconnect app takes care of the preferences dialog as well
|
||||
|
||||
def __init__(self, framework):
|
||||
_FlameMenuApp.__init__(self, framework)
|
||||
|
||||
def __getattr__(self, name):
|
||||
def method(*args, **kwargs):
|
||||
project = self.dynamic_menu_data.get(name)
|
||||
if project:
|
||||
self.link_project(project)
|
||||
return method
|
||||
|
||||
def build_menu(self):
|
||||
if not self.flame:
|
||||
return []
|
||||
|
||||
menu = deepcopy(self.menu)
|
||||
|
||||
menu['actions'].append({
|
||||
"name": "Load...",
|
||||
"execute": lambda x: self.tools_helper.show_loader()
|
||||
})
|
||||
menu['actions'].append({
|
||||
"name": "Manage...",
|
||||
"execute": lambda x: self.tools_helper.show_scene_inventory()
|
||||
})
|
||||
menu['actions'].append({
|
||||
"name": "Library...",
|
||||
"execute": lambda x: self.tools_helper.show_library_loader()
|
||||
})
|
||||
return menu
|
||||
|
||||
def refresh(self, *args, **kwargs):
|
||||
self.rescan()
|
||||
|
||||
def rescan(self, *args, **kwargs):
|
||||
if not self.flame:
|
||||
try:
|
||||
import flame
|
||||
self.flame = flame
|
||||
except ImportError:
|
||||
self.flame = None
|
||||
|
||||
if self.flame:
|
||||
self.flame.execute_shortcut('Rescan Python Hooks')
|
||||
self.log.info('Rescan Python Hooks')
|
||||
|
|
|
|||
|
|
@ -361,6 +361,8 @@ class PublishableClip:
|
|||
index_from_segment_default = False
|
||||
use_shot_name_default = False
|
||||
include_handles_default = False
|
||||
retimed_handles_default = True
|
||||
retimed_framerange_default = True
|
||||
|
||||
def __init__(self, segment, **kwargs):
|
||||
self.rename_index = kwargs["rename_index"]
|
||||
|
|
@ -496,6 +498,14 @@ class PublishableClip:
|
|||
"audio", {}).get("value") or False
|
||||
self.include_handles = self.ui_inputs.get(
|
||||
"includeHandles", {}).get("value") or self.include_handles_default
|
||||
self.retimed_handles = (
|
||||
self.ui_inputs.get("retimedHandles", {}).get("value")
|
||||
or self.retimed_handles_default
|
||||
)
|
||||
self.retimed_framerange = (
|
||||
self.ui_inputs.get("retimedFramerange", {}).get("value")
|
||||
or self.retimed_framerange_default
|
||||
)
|
||||
|
||||
# build subset name from layer name
|
||||
if self.subset_name == "[ track name ]":
|
||||
|
|
|
|||
|
|
@ -276,6 +276,22 @@ class CreateShotClip(opfapi.Creator):
|
|||
"target": "tag",
|
||||
"toolTip": "By default handles are excluded", # noqa
|
||||
"order": 3
|
||||
},
|
||||
"retimedHandles": {
|
||||
"value": True,
|
||||
"type": "QCheckBox",
|
||||
"label": "Retimed handles",
|
||||
"target": "tag",
|
||||
"toolTip": "By default handles are retimed.", # noqa
|
||||
"order": 4
|
||||
},
|
||||
"retimedFramerange": {
|
||||
"value": True,
|
||||
"type": "QCheckBox",
|
||||
"label": "Retimed framerange",
|
||||
"target": "tag",
|
||||
"toolTip": "By default framerange is retimed.", # noqa
|
||||
"order": 5
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -131,6 +131,10 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
|
|||
"fps": self.fps,
|
||||
"workfileFrameStart": workfile_start,
|
||||
"sourceFirstFrame": int(first_frame),
|
||||
"notRetimedHandles": (
|
||||
not marker_data.get("retimedHandles")),
|
||||
"notRetimedFramerange": (
|
||||
not marker_data.get("retimedFramerange")),
|
||||
"path": file_path,
|
||||
"flameAddTasks": self.add_tasks,
|
||||
"tasks": {
|
||||
|
|
|
|||
|
|
@ -90,26 +90,38 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
handle_end = instance.data["handleEnd"]
|
||||
handles = max(handle_start, handle_end)
|
||||
include_handles = instance.data.get("includeHandles")
|
||||
retimed_handles = instance.data.get("retimedHandles")
|
||||
|
||||
# get media source range with handles
|
||||
source_start_handles = instance.data["sourceStartH"]
|
||||
source_end_handles = instance.data["sourceEndH"]
|
||||
|
||||
# retime if needed
|
||||
if r_speed != 1.0:
|
||||
source_start_handles = (
|
||||
instance.data["sourceStart"] - r_handle_start)
|
||||
source_end_handles = (
|
||||
source_start_handles
|
||||
+ (r_source_dur - 1)
|
||||
+ r_handle_start
|
||||
+ r_handle_end
|
||||
)
|
||||
if retimed_handles:
|
||||
# handles are retimed
|
||||
source_start_handles = (
|
||||
instance.data["sourceStart"] - r_handle_start)
|
||||
source_end_handles = (
|
||||
source_start_handles
|
||||
+ (r_source_dur - 1)
|
||||
+ r_handle_start
|
||||
+ r_handle_end
|
||||
)
|
||||
else:
|
||||
# handles are not retimed
|
||||
source_end_handles = (
|
||||
source_start_handles
|
||||
+ (r_source_dur - 1)
|
||||
+ handle_start
|
||||
+ handle_end
|
||||
)
|
||||
|
||||
# get frame range with handles for representation range
|
||||
frame_start_handle = frame_start - handle_start
|
||||
repre_frame_start = frame_start_handle
|
||||
if include_handles:
|
||||
if r_speed == 1.0:
|
||||
if r_speed == 1.0 or not retimed_handles:
|
||||
frame_start_handle = frame_start
|
||||
else:
|
||||
frame_start_handle = (
|
||||
|
|
|
|||
|
|
@ -73,6 +73,8 @@ def load_apps():
|
|||
opfapi.FlameMenuProjectConnect(opfapi.CTX.app_framework))
|
||||
opfapi.CTX.flame_apps.append(
|
||||
opfapi.FlameMenuTimeline(opfapi.CTX.app_framework))
|
||||
opfapi.CTX.flame_apps.append(
|
||||
opfapi.FlameMenuUniversal(opfapi.CTX.app_framework))
|
||||
opfapi.CTX.app_framework.log.info("Apps are loaded")
|
||||
|
||||
|
||||
|
|
@ -191,3 +193,27 @@ def get_timeline_custom_ui_actions():
|
|||
openpype_install()
|
||||
|
||||
return _build_app_menu("FlameMenuTimeline")
|
||||
|
||||
|
||||
def get_batch_custom_ui_actions():
|
||||
"""Hook to create submenu in batch
|
||||
|
||||
Returns:
|
||||
list: menu object
|
||||
"""
|
||||
# install openpype and the host
|
||||
openpype_install()
|
||||
|
||||
return _build_app_menu("FlameMenuUniversal")
|
||||
|
||||
|
||||
def get_media_panel_custom_ui_actions():
|
||||
"""Hook to create submenu in desktop
|
||||
|
||||
Returns:
|
||||
list: menu object
|
||||
"""
|
||||
# install openpype and the host
|
||||
openpype_install()
|
||||
|
||||
return _build_app_menu("FlameMenuUniversal")
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
from .addon import (
|
||||
FusionAddon,
|
||||
FUSION_HOST_DIR,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"FusionAddon",
|
||||
"FUSION_HOST_DIR",
|
||||
)
|
||||
23
openpype/hosts/fusion/addon.py
Normal file
23
openpype/hosts/fusion/addon.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
|
||||
FUSION_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class FusionAddon(OpenPypeModule, IHostAddon):
|
||||
name = "fusion"
|
||||
host_name = "fusion"
|
||||
|
||||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(FUSION_HOST_DIR, "hooks")
|
||||
]
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".comp"]
|
||||
|
|
@ -18,12 +18,11 @@ from openpype.pipeline import (
|
|||
deregister_inventory_action_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
import openpype.hosts.fusion
|
||||
from openpype.hosts.fusion import FUSION_HOST_DIR
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__))
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
PLUGINS_DIR = os.path.join(FUSION_HOST_DIR, "plugins")
|
||||
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
|
|
|
|||
|
|
@ -2,13 +2,11 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
|
||||
|
||||
from .pipeline import get_current_comp
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return HOST_WORKFILE_EXTENSIONS["fusion"]
|
||||
return [".comp"]
|
||||
|
||||
|
||||
def has_unsaved_changes():
|
||||
|
|
|
|||
|
|
@ -318,10 +318,9 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
@staticmethod
|
||||
def create_otio_time_range_from_timeline_item_data(track_item):
|
||||
speed = track_item.playbackSpeed()
|
||||
timeline = phiero.get_current_sequence()
|
||||
frame_start = int(track_item.timelineIn())
|
||||
frame_duration = int((track_item.duration() - 1) / speed)
|
||||
frame_duration = int(track_item.duration())
|
||||
fps = timeline.framerate().toFloat()
|
||||
|
||||
return hiero_export.create_otio_time_range(
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ from openpype.host import (
|
|||
HostDirmap,
|
||||
)
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype.tools.workfiles.lock_dialog import WorkfileLockDialog
|
||||
from openpype.lib import (
|
||||
register_event_callback,
|
||||
emit_event
|
||||
|
|
@ -38,7 +39,7 @@ from openpype.pipeline.workfile.lock_workfile import (
|
|||
is_workfile_lock_enabled
|
||||
)
|
||||
from openpype.hosts.maya import MAYA_ROOT_DIR
|
||||
from openpype.hosts.maya.lib import copy_workspace_mel
|
||||
from openpype.hosts.maya.lib import create_workspace_mel
|
||||
|
||||
from . import menu, lib
|
||||
from .workio import (
|
||||
|
|
@ -69,7 +70,7 @@ class MayaHost(HostBase, IWorkfileHost, ILoadHost):
|
|||
self._op_events = {}
|
||||
|
||||
def install(self):
|
||||
project_name = os.getenv("AVALON_PROJECT")
|
||||
project_name = legacy_io.active_project()
|
||||
project_settings = get_project_settings(project_name)
|
||||
# process path mapping
|
||||
dirmap_processor = MayaDirmap("maya", project_name, project_settings)
|
||||
|
|
@ -488,20 +489,18 @@ def check_lock_on_current_file():
|
|||
# add the lock file when opening the file
|
||||
filepath = current_file()
|
||||
|
||||
if not is_workfile_locked(filepath):
|
||||
create_workfile_lock(filepath)
|
||||
return
|
||||
|
||||
# add lockfile dialog
|
||||
from Qt import QtWidgets
|
||||
from openpype.tools.workfiles.lock_dialog import WorkfileLockDialog
|
||||
|
||||
top_level_widgets = {w.objectName(): w for w in
|
||||
QtWidgets.QApplication.topLevelWidgets()}
|
||||
parent = top_level_widgets.get("MayaWindow", None)
|
||||
workfile_dialog = WorkfileLockDialog(filepath, parent=parent)
|
||||
workfile_dialog.show()
|
||||
if is_workfile_locked(filepath):
|
||||
# add lockfile dialog
|
||||
from Qt import QtWidgets
|
||||
top_level_widgets = {w.objectName(): w for w in
|
||||
QtWidgets.QApplication.topLevelWidgets()}
|
||||
parent = top_level_widgets.get("MayaWindow", None)
|
||||
workfile_dialog = WorkfileLockDialog(filepath, parent=parent)
|
||||
if not workfile_dialog.exec_():
|
||||
cmds.file(new=True)
|
||||
return
|
||||
|
||||
create_workfile_lock(filepath)
|
||||
|
||||
def on_before_close():
|
||||
"""Delete the lock file after user quitting the Maya Scene"""
|
||||
|
|
@ -631,7 +630,7 @@ def on_task_changed():
|
|||
lib.update_content_on_context_change()
|
||||
|
||||
msg = " project: {}\n asset: {}\n task:{}".format(
|
||||
legacy_io.Session["AVALON_PROJECT"],
|
||||
legacy_io.active_project(),
|
||||
legacy_io.Session["AVALON_ASSET"],
|
||||
legacy_io.Session["AVALON_TASK"]
|
||||
)
|
||||
|
|
@ -647,10 +646,11 @@ def before_workfile_open():
|
|||
|
||||
|
||||
def before_workfile_save(event):
|
||||
project_name = legacy_io.active_project()
|
||||
_remove_workfile_lock()
|
||||
workdir_path = event["workdir_path"]
|
||||
if workdir_path:
|
||||
copy_workspace_mel(workdir_path)
|
||||
create_workspace_mel(workdir_path, project_name)
|
||||
|
||||
|
||||
class MayaDirmap(HostDirmap):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from openpype.lib import PreLaunchHook
|
||||
from openpype.hosts.maya.lib import copy_workspace_mel
|
||||
from openpype.hosts.maya.lib import create_workspace_mel
|
||||
|
||||
|
||||
class PreCopyMel(PreLaunchHook):
|
||||
|
|
@ -10,9 +10,10 @@ class PreCopyMel(PreLaunchHook):
|
|||
app_groups = ["maya"]
|
||||
|
||||
def execute(self):
|
||||
project_name = self.launch_context.env.get("AVALON_PROJECT")
|
||||
workdir = self.launch_context.env.get("AVALON_WORKDIR")
|
||||
if not workdir:
|
||||
self.log.warning("BUG: Workdir is not filled.")
|
||||
return
|
||||
|
||||
copy_workspace_mel(workdir)
|
||||
create_workspace_mel(workdir, project_name)
|
||||
|
|
|
|||
|
|
@ -1,26 +1,24 @@
|
|||
import os
|
||||
import shutil
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.lib import Logger
|
||||
|
||||
|
||||
def copy_workspace_mel(workdir):
|
||||
# Check that source mel exists
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
src_filepath = os.path.join(current_dir, "resources", "workspace.mel")
|
||||
if not os.path.exists(src_filepath):
|
||||
print("Source mel file does not exist. {}".format(src_filepath))
|
||||
return
|
||||
|
||||
# Skip if workspace.mel already exists
|
||||
def create_workspace_mel(workdir, project_name):
|
||||
dst_filepath = os.path.join(workdir, "workspace.mel")
|
||||
if os.path.exists(dst_filepath):
|
||||
return
|
||||
|
||||
# Create workdir if does not exists yet
|
||||
if not os.path.exists(workdir):
|
||||
os.makedirs(workdir)
|
||||
|
||||
# Copy file
|
||||
print("Copying workspace mel \"{}\" -> \"{}\"".format(
|
||||
src_filepath, dst_filepath
|
||||
))
|
||||
shutil.copy(src_filepath, dst_filepath)
|
||||
project_setting = get_project_settings(project_name)
|
||||
mel_script = project_setting["maya"].get("mel_workspace")
|
||||
|
||||
# Skip if mel script in settings is empty
|
||||
if not mel_script:
|
||||
log = Logger.get_logger("create_workspace_mel")
|
||||
log.debug("File 'workspace.mel' not created. Settings value is empty.")
|
||||
return
|
||||
|
||||
with open(dst_filepath, "w") as mel_file:
|
||||
mel_file.write(mel_script)
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ class CollectAssembly(pyblish.api.InstancePlugin):
|
|||
data[representation_id].append(instance_data)
|
||||
|
||||
instance.data["scenedata"] = dict(data)
|
||||
instance.data["hierarchy"] = list(set(hierarchy_nodes))
|
||||
instance.data["nodesHierarchy"] = list(set(hierarchy_nodes))
|
||||
|
||||
def get_file_rule(self, rule):
|
||||
return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule))
|
||||
|
|
|
|||
|
|
@ -293,6 +293,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
"source": filepath,
|
||||
"expectedFiles": full_exp_files,
|
||||
"publishRenderMetadataFolder": common_publish_meta_path,
|
||||
"renderProducts": layer_render_products,
|
||||
"resolutionWidth": lib.get_attr_in_layer(
|
||||
"defaultResolution.width", layer=layer_name
|
||||
),
|
||||
|
|
@ -359,7 +360,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
instance.data["label"] = label
|
||||
instance.data["farm"] = True
|
||||
instance.data.update(data)
|
||||
self.log.debug("data: {}".format(json.dumps(data, indent=4)))
|
||||
|
||||
def parse_options(self, render_globals):
|
||||
"""Get all overrides with a value, skip those without.
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ class ExtractAssembly(openpype.api.Extractor):
|
|||
json.dump(instance.data["scenedata"], filepath, ensure_ascii=False)
|
||||
|
||||
self.log.info("Extracting point cache ..")
|
||||
cmds.select(instance.data["hierarchy"])
|
||||
cmds.select(instance.data["nodesHierarchy"])
|
||||
|
||||
# Run basic alembic exporter
|
||||
extract_alembic(file=hierarchy_path,
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin):
|
|||
from openpype.hosts.maya.api import lib
|
||||
|
||||
# Get all transforms in the loaded containers
|
||||
container_roots = cmds.listRelatives(instance.data["hierarchy"],
|
||||
container_roots = cmds.listRelatives(instance.data["nodesHierarchy"],
|
||||
children=True,
|
||||
type="transform",
|
||||
fullPath=True)
|
||||
|
|
|
|||
|
|
@ -1,11 +0,0 @@
|
|||
//Maya 2018 Project Definition
|
||||
|
||||
workspace -fr "shaders" "renderData/shaders";
|
||||
workspace -fr "alembicCache" "cache/alembic";
|
||||
workspace -fr "mayaAscii" "";
|
||||
workspace -fr "mayaBinary" "";
|
||||
workspace -fr "renderData" "renderData";
|
||||
workspace -fr "fileCache" "cache/nCache";
|
||||
workspace -fr "scene" "";
|
||||
workspace -fr "sourceImages" "sourceimages";
|
||||
workspace -fr "images" "renders";
|
||||
|
|
@ -201,34 +201,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
if not instance.data["review"]:
|
||||
instance.data["useSequenceForReview"] = False
|
||||
|
||||
project_name = legacy_io.active_project()
|
||||
asset_name = instance.data["asset"]
|
||||
# * Add audio to instance if exists.
|
||||
# Find latest versions document
|
||||
last_version_doc = get_last_version_by_subset_name(
|
||||
project_name, "audioMain", asset_name=asset_name, fields=["_id"]
|
||||
)
|
||||
|
||||
repre_doc = None
|
||||
if last_version_doc:
|
||||
# Try to find it's representation (Expected there is only one)
|
||||
repre_docs = list(get_representations(
|
||||
project_name, version_ids=[last_version_doc["_id"]]
|
||||
))
|
||||
if not repre_docs:
|
||||
self.log.warning(
|
||||
"Version document does not contain any representations"
|
||||
)
|
||||
else:
|
||||
repre_doc = repre_docs[0]
|
||||
|
||||
# Add audio to instance if representation was found
|
||||
if repre_doc:
|
||||
instance.data["audio"] = [{
|
||||
"offset": 0,
|
||||
"filename": get_representation_path(repre_doc)
|
||||
}]
|
||||
|
||||
self.log.debug("instance.data: {}".format(pformat(instance.data)))
|
||||
|
||||
def is_prerender(self, families):
|
||||
|
|
|
|||
|
|
@ -1,21 +1,60 @@
|
|||
import os
|
||||
import re
|
||||
import abc
|
||||
import json
|
||||
import logging
|
||||
import six
|
||||
import platform
|
||||
import functools
|
||||
import warnings
|
||||
|
||||
import clique
|
||||
|
||||
from openpype.client import get_project
|
||||
from openpype.settings import get_project_settings
|
||||
|
||||
from .profiles_filtering import filter_profiles
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PathToolsDeprecatedWarning(DeprecationWarning):
|
||||
pass
|
||||
|
||||
|
||||
def deprecated(new_destination):
|
||||
"""Mark functions as deprecated.
|
||||
|
||||
It will result in a warning being emitted when the function is used.
|
||||
"""
|
||||
|
||||
func = None
|
||||
if callable(new_destination):
|
||||
func = new_destination
|
||||
new_destination = None
|
||||
|
||||
def _decorator(decorated_func):
|
||||
if new_destination is None:
|
||||
warning_message = (
|
||||
" Please check content of deprecated function to figure out"
|
||||
" possible replacement."
|
||||
)
|
||||
else:
|
||||
warning_message = " Please replace your usage with '{}'.".format(
|
||||
new_destination
|
||||
)
|
||||
|
||||
@functools.wraps(decorated_func)
|
||||
def wrapper(*args, **kwargs):
|
||||
warnings.simplefilter("always", PathToolsDeprecatedWarning)
|
||||
warnings.warn(
|
||||
(
|
||||
"Call to deprecated function '{}'"
|
||||
"\nFunction was moved or removed.{}"
|
||||
).format(decorated_func.__name__, warning_message),
|
||||
category=PathToolsDeprecatedWarning,
|
||||
stacklevel=4
|
||||
)
|
||||
return decorated_func(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
if func is None:
|
||||
return _decorator
|
||||
return _decorator(func)
|
||||
|
||||
|
||||
def format_file_size(file_size, suffix=None):
|
||||
"""Returns formatted string with size in appropriate unit.
|
||||
|
||||
|
|
@ -232,107 +271,69 @@ def get_last_version_from_path(path_dir, filter):
|
|||
return None
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.project_folders.concatenate_splitted_paths")
|
||||
def concatenate_splitted_paths(split_paths, anatomy):
|
||||
pattern_array = re.compile(r"\[.*\]")
|
||||
output = []
|
||||
for path_items in split_paths:
|
||||
clean_items = []
|
||||
if isinstance(path_items, str):
|
||||
path_items = [path_items]
|
||||
"""
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.16.*
|
||||
"""
|
||||
|
||||
for path_item in path_items:
|
||||
if not re.match(r"{.+}", path_item):
|
||||
path_item = re.sub(pattern_array, "", path_item)
|
||||
clean_items.append(path_item)
|
||||
from openpype.pipeline.project_folders import concatenate_splitted_paths
|
||||
|
||||
# backward compatibility
|
||||
if "__project_root__" in path_items:
|
||||
for root, root_path in anatomy.roots.items():
|
||||
if not os.path.exists(str(root_path)):
|
||||
log.debug("Root {} path path {} not exist on \
|
||||
computer!".format(root, root_path))
|
||||
continue
|
||||
clean_items = ["{{root[{}]}}".format(root),
|
||||
r"{project[name]}"] + clean_items[1:]
|
||||
output.append(os.path.normpath(os.path.sep.join(clean_items)))
|
||||
continue
|
||||
|
||||
output.append(os.path.normpath(os.path.sep.join(clean_items)))
|
||||
|
||||
return output
|
||||
return concatenate_splitted_paths(split_paths, anatomy)
|
||||
|
||||
|
||||
@deprecated
|
||||
def get_format_data(anatomy):
|
||||
project_doc = get_project(anatomy.project_name, fields=["data.code"])
|
||||
project_code = project_doc["data"]["code"]
|
||||
"""
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.16.*
|
||||
"""
|
||||
|
||||
return {
|
||||
"root": anatomy.roots,
|
||||
"project": {
|
||||
"name": anatomy.project_name,
|
||||
"code": project_code
|
||||
},
|
||||
}
|
||||
from openpype.pipeline.template_data import get_project_template_data
|
||||
|
||||
data = get_project_template_data(project_name=anatomy.project_name)
|
||||
data["root"] = anatomy.roots
|
||||
return data
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.project_folders.fill_paths")
|
||||
def fill_paths(path_list, anatomy):
|
||||
format_data = get_format_data(anatomy)
|
||||
filled_paths = []
|
||||
"""
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.16.*
|
||||
"""
|
||||
|
||||
for path in path_list:
|
||||
new_path = path.format(**format_data)
|
||||
filled_paths.append(new_path)
|
||||
from openpype.pipeline.project_folders import fill_paths
|
||||
|
||||
return filled_paths
|
||||
return fill_paths(path_list, anatomy)
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.project_folders.create_project_folders")
|
||||
def create_project_folders(basic_paths, project_name):
|
||||
from openpype.pipeline import Anatomy
|
||||
anatomy = Anatomy(project_name)
|
||||
"""
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.16.*
|
||||
"""
|
||||
|
||||
concat_paths = concatenate_splitted_paths(basic_paths, anatomy)
|
||||
filled_paths = fill_paths(concat_paths, anatomy)
|
||||
from openpype.pipeline.project_folders import create_project_folders
|
||||
|
||||
# Create folders
|
||||
for path in filled_paths:
|
||||
if os.path.exists(path):
|
||||
log.debug("Folder already exists: {}".format(path))
|
||||
else:
|
||||
log.debug("Creating folder: {}".format(path))
|
||||
os.makedirs(path)
|
||||
|
||||
|
||||
def _list_path_items(folder_structure):
|
||||
output = []
|
||||
for key, value in folder_structure.items():
|
||||
if not value:
|
||||
output.append(key)
|
||||
else:
|
||||
paths = _list_path_items(value)
|
||||
for path in paths:
|
||||
if not isinstance(path, (list, tuple)):
|
||||
path = [path]
|
||||
|
||||
item = [key]
|
||||
item.extend(path)
|
||||
output.append(item)
|
||||
|
||||
return output
|
||||
return create_project_folders(project_name, basic_paths)
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.project_folders.get_project_basic_paths")
|
||||
def get_project_basic_paths(project_name):
|
||||
project_settings = get_project_settings(project_name)
|
||||
folder_structure = (
|
||||
project_settings["global"]["project_folder_structure"]
|
||||
)
|
||||
if not folder_structure:
|
||||
return []
|
||||
"""
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.16.*
|
||||
"""
|
||||
|
||||
if isinstance(folder_structure, str):
|
||||
folder_structure = json.loads(folder_structure)
|
||||
return _list_path_items(folder_structure)
|
||||
from openpype.pipeline.project_folders import get_project_basic_paths
|
||||
|
||||
return get_project_basic_paths(project_name)
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.workfile.create_workdir_extra_folders")
|
||||
def create_workdir_extra_folders(
|
||||
workdir, host_name, task_type, task_name, project_name,
|
||||
project_settings=None
|
||||
|
|
@ -349,37 +350,18 @@ def create_workdir_extra_folders(
|
|||
project_name (str): Name of project on which task is.
|
||||
project_settings (dict): Prepared project settings. Are loaded if not
|
||||
passed.
|
||||
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.16.*
|
||||
"""
|
||||
# Load project settings if not set
|
||||
if not project_settings:
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
# Load extra folders profiles
|
||||
extra_folders_profiles = (
|
||||
project_settings["global"]["tools"]["Workfiles"]["extra_folders"]
|
||||
from openpype.pipeline.project_folders import create_workdir_extra_folders
|
||||
|
||||
return create_workdir_extra_folders(
|
||||
workdir,
|
||||
host_name,
|
||||
task_type,
|
||||
task_name,
|
||||
project_name,
|
||||
project_settings
|
||||
)
|
||||
# Skip if are empty
|
||||
if not extra_folders_profiles:
|
||||
return
|
||||
|
||||
# Prepare profiles filters
|
||||
filter_data = {
|
||||
"task_types": task_type,
|
||||
"task_names": task_name,
|
||||
"hosts": host_name
|
||||
}
|
||||
profile = filter_profiles(extra_folders_profiles, filter_data)
|
||||
if profile is None:
|
||||
return
|
||||
|
||||
for subfolder in profile["folders"]:
|
||||
# Make sure backslashes are converted to forwards slashes
|
||||
# and does not start with slash
|
||||
subfolder = subfolder.replace("\\", "/").lstrip("/")
|
||||
# Skip empty strings
|
||||
if not subfolder:
|
||||
continue
|
||||
|
||||
fullpath = os.path.join(workdir, subfolder)
|
||||
if not os.path.exists(fullpath):
|
||||
os.makedirs(fullpath)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@
|
|||
import os
|
||||
import logging
|
||||
import re
|
||||
import json
|
||||
|
||||
import warnings
|
||||
import functools
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import os
|
|||
from abc import abstractmethod
|
||||
import platform
|
||||
import getpass
|
||||
from functools import partial
|
||||
from collections import OrderedDict
|
||||
|
||||
import six
|
||||
|
|
@ -66,6 +67,96 @@ def requests_get(*args, **kwargs):
|
|||
return requests.get(*args, **kwargs)
|
||||
|
||||
|
||||
class DeadlineKeyValueVar(dict):
|
||||
"""
|
||||
|
||||
Serializes dictionary key values as "{key}={value}" like Deadline uses
|
||||
for EnvironmentKeyValue.
|
||||
|
||||
As an example:
|
||||
EnvironmentKeyValue0="A_KEY=VALUE_A"
|
||||
EnvironmentKeyValue1="OTHER_KEY=VALUE_B"
|
||||
|
||||
The keys are serialized in alphabetical order (sorted).
|
||||
|
||||
Example:
|
||||
>>> var = DeadlineKeyValueVar("EnvironmentKeyValue")
|
||||
>>> var["my_var"] = "hello"
|
||||
>>> var["my_other_var"] = "hello2"
|
||||
>>> var.serialize()
|
||||
|
||||
|
||||
"""
|
||||
def __init__(self, key):
|
||||
super(DeadlineKeyValueVar, self).__init__()
|
||||
self.__key = key
|
||||
|
||||
def serialize(self):
|
||||
key = self.__key
|
||||
|
||||
# Allow custom location for index in serialized string
|
||||
if "{}" not in key:
|
||||
key = key + "{}"
|
||||
|
||||
return {
|
||||
key.format(index): "{}={}".format(var_key, var_value)
|
||||
for index, (var_key, var_value) in enumerate(sorted(self.items()))
|
||||
}
|
||||
|
||||
|
||||
class DeadlineIndexedVar(dict):
|
||||
"""
|
||||
|
||||
Allows to set and query values by integer indices:
|
||||
Query: var[1] or var.get(1)
|
||||
Set: var[1] = "my_value"
|
||||
Append: var += "value"
|
||||
|
||||
Note: Iterating the instance is not guarantueed to be the order of the
|
||||
indices. To do so iterate with `sorted()`
|
||||
|
||||
"""
|
||||
def __init__(self, key):
|
||||
super(DeadlineIndexedVar, self).__init__()
|
||||
self.__key = key
|
||||
|
||||
def serialize(self):
|
||||
key = self.__key
|
||||
|
||||
# Allow custom location for index in serialized string
|
||||
if "{}" not in key:
|
||||
key = key + "{}"
|
||||
|
||||
return {
|
||||
key.format(index): value for index, value in sorted(self.items())
|
||||
}
|
||||
|
||||
def next_available_index(self):
|
||||
# Add as first unused entry
|
||||
i = 0
|
||||
while i in self.keys():
|
||||
i += 1
|
||||
return i
|
||||
|
||||
def update(self, data):
|
||||
# Force the integer key check
|
||||
for key, value in data.items():
|
||||
self.__setitem__(key, value)
|
||||
|
||||
def __iadd__(self, other):
|
||||
index = self.next_available_index()
|
||||
self[index] = other
|
||||
return self
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if not isinstance(key, int):
|
||||
raise TypeError("Key must be an integer: {}".format(key))
|
||||
|
||||
if key < 0:
|
||||
raise ValueError("Negative index can't be set: {}".format(key))
|
||||
dict.__setitem__(self, key, value)
|
||||
|
||||
|
||||
@attr.s
|
||||
class DeadlineJobInfo(object):
|
||||
"""Mapping of all Deadline *JobInfo* attributes.
|
||||
|
|
@ -218,24 +309,8 @@ class DeadlineJobInfo(object):
|
|||
|
||||
# Environment
|
||||
# ----------------------------------------------
|
||||
_environmentKeyValue = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def EnvironmentKeyValue(self): # noqa: N802
|
||||
"""Return all environment key values formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'EnvironmentKeyValue0', 'key=value'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._environmentKeyValue):
|
||||
out["EnvironmentKeyValue{}".format(index)] = v
|
||||
return out
|
||||
|
||||
@EnvironmentKeyValue.setter
|
||||
def EnvironmentKeyValue(self, val): # noqa: N802
|
||||
self._environmentKeyValue.append(val)
|
||||
EnvironmentKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar,
|
||||
"EnvironmentKeyValue"))
|
||||
|
||||
IncludeEnvironment = attr.ib(default=None) # Default: false
|
||||
UseJobEnvironmentOnly = attr.ib(default=None) # Default: false
|
||||
|
|
@ -243,121 +318,29 @@ class DeadlineJobInfo(object):
|
|||
|
||||
# Job Extra Info
|
||||
# ----------------------------------------------
|
||||
_extraInfos = attr.ib(factory=list)
|
||||
_extraInfoKeyValues = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def ExtraInfo(self): # noqa: N802
|
||||
"""Return all ExtraInfo values formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'ExtraInfo0': 'value'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._extraInfos):
|
||||
out["ExtraInfo{}".format(index)] = v
|
||||
return out
|
||||
|
||||
@ExtraInfo.setter
|
||||
def ExtraInfo(self, val): # noqa: N802
|
||||
self._extraInfos.append(val)
|
||||
|
||||
@property
|
||||
def ExtraInfoKeyValue(self): # noqa: N802
|
||||
"""Return all ExtraInfoKeyValue values formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as {'ExtraInfoKeyValue0': 'key=value'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._extraInfoKeyValues):
|
||||
out["ExtraInfoKeyValue{}".format(index)] = v
|
||||
return out
|
||||
|
||||
@ExtraInfoKeyValue.setter
|
||||
def ExtraInfoKeyValue(self, val): # noqa: N802
|
||||
self._extraInfoKeyValues.append(val)
|
||||
ExtraInfo = attr.ib(factory=partial(DeadlineIndexedVar, "ExtraInfo"))
|
||||
ExtraInfoKeyValue = attr.ib(factory=partial(DeadlineKeyValueVar,
|
||||
"ExtraInfoKeyValue"))
|
||||
|
||||
# Task Extra Info Names
|
||||
# ----------------------------------------------
|
||||
OverrideTaskExtraInfoNames = attr.ib(default=None) # Default: false
|
||||
_taskExtraInfos = attr.ib(factory=list)
|
||||
|
||||
@property
|
||||
def TaskExtraInfoName(self): # noqa: N802
|
||||
"""Return all TaskExtraInfoName values formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'TaskExtraInfoName0': 'value'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._taskExtraInfos):
|
||||
out["TaskExtraInfoName{}".format(index)] = v
|
||||
return out
|
||||
|
||||
@TaskExtraInfoName.setter
|
||||
def TaskExtraInfoName(self, val): # noqa: N802
|
||||
self._taskExtraInfos.append(val)
|
||||
TaskExtraInfoName = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"TaskExtraInfoName"))
|
||||
|
||||
# Output
|
||||
# ----------------------------------------------
|
||||
_outputFilename = attr.ib(factory=list)
|
||||
_outputFilenameTile = attr.ib(factory=list)
|
||||
_outputDirectory = attr.ib(factory=list)
|
||||
OutputFilename = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"OutputFilename"))
|
||||
OutputFilenameTile = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"OutputFilename{}Tile"))
|
||||
OutputDirectory = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"OutputDirectory"))
|
||||
|
||||
@property
|
||||
def OutputFilename(self): # noqa: N802
|
||||
"""Return all OutputFilename values formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'OutputFilename0': 'filename'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputFilename):
|
||||
out["OutputFilename{}".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputFilename.setter
|
||||
def OutputFilename(self, val): # noqa: N802
|
||||
self._outputFilename.append(val)
|
||||
|
||||
@property
|
||||
def OutputFilenameTile(self): # noqa: N802
|
||||
"""Return all OutputFilename#Tile values formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'OutputFilenme#Tile': 'tile'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputFilenameTile):
|
||||
out["OutputFilename{}Tile".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputFilenameTile.setter
|
||||
def OutputFilenameTile(self, val): # noqa: N802
|
||||
self._outputFilenameTile.append(val)
|
||||
|
||||
@property
|
||||
def OutputDirectory(self): # noqa: N802
|
||||
"""Return all OutputDirectory values formatted for Deadline.
|
||||
|
||||
Returns:
|
||||
dict: as `{'OutputDirectory0': 'dir'}`
|
||||
|
||||
"""
|
||||
out = {}
|
||||
for index, v in enumerate(self._outputDirectory):
|
||||
out["OutputDirectory{}".format(index)] = v
|
||||
return out
|
||||
|
||||
@OutputDirectory.setter
|
||||
def OutputDirectory(self, val): # noqa: N802
|
||||
self._outputDirectory.append(val)
|
||||
# Asset Dependency
|
||||
# ----------------------------------------------
|
||||
AssetDependency = attr.ib(factory=partial(DeadlineIndexedVar,
|
||||
"AssetDependency"))
|
||||
|
||||
# Tile Job
|
||||
# ----------------------------------------------
|
||||
|
|
@ -381,7 +364,7 @@ class DeadlineJobInfo(object):
|
|||
|
||||
"""
|
||||
def filter_data(a, v):
|
||||
if a.name.startswith("_"):
|
||||
if isinstance(v, (DeadlineIndexedVar, DeadlineKeyValueVar)):
|
||||
return False
|
||||
if v is None:
|
||||
return False
|
||||
|
|
@ -389,15 +372,27 @@ class DeadlineJobInfo(object):
|
|||
|
||||
serialized = attr.asdict(
|
||||
self, dict_factory=OrderedDict, filter=filter_data)
|
||||
serialized.update(self.EnvironmentKeyValue)
|
||||
serialized.update(self.ExtraInfo)
|
||||
serialized.update(self.ExtraInfoKeyValue)
|
||||
serialized.update(self.TaskExtraInfoName)
|
||||
serialized.update(self.OutputFilename)
|
||||
serialized.update(self.OutputFilenameTile)
|
||||
serialized.update(self.OutputDirectory)
|
||||
|
||||
# Custom serialize these attributes
|
||||
for attribute in [
|
||||
self.EnvironmentKeyValue,
|
||||
self.ExtraInfo,
|
||||
self.ExtraInfoKeyValue,
|
||||
self.TaskExtraInfoName,
|
||||
self.OutputFilename,
|
||||
self.OutputFilenameTile,
|
||||
self.OutputDirectory,
|
||||
self.AssetDependency
|
||||
]:
|
||||
serialized.update(attribute.serialize())
|
||||
|
||||
return serialized
|
||||
|
||||
def update(self, data):
|
||||
"""Update instance with data dict"""
|
||||
for key, value in data.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
|
||||
@six.add_metaclass(AbstractMetaInstancePlugin)
|
||||
class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
|
|
@ -521,68 +516,72 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
published.
|
||||
|
||||
"""
|
||||
anatomy = self._instance.context.data['anatomy']
|
||||
file_path = None
|
||||
for i in self._instance.context:
|
||||
if "workfile" in i.data["families"] \
|
||||
or i.data["family"] == "workfile":
|
||||
# test if there is instance of workfile waiting
|
||||
# to be published.
|
||||
assert i.data["publish"] is True, (
|
||||
"Workfile (scene) must be published along")
|
||||
# determine published path from Anatomy.
|
||||
template_data = i.data.get("anatomyData")
|
||||
rep = i.data.get("representations")[0].get("ext")
|
||||
template_data["representation"] = rep
|
||||
template_data["ext"] = rep
|
||||
template_data["comment"] = None
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
template_filled = anatomy_filled["publish"]["path"]
|
||||
file_path = os.path.normpath(template_filled)
|
||||
|
||||
self.log.info("Using published scene for render {}".format(
|
||||
file_path))
|
||||
instance = self._instance
|
||||
workfile_instance = self._get_workfile_instance(instance.context)
|
||||
if workfile_instance is None:
|
||||
return
|
||||
|
||||
if not os.path.exists(file_path):
|
||||
self.log.error("published scene does not exist!")
|
||||
raise
|
||||
# determine published path from Anatomy.
|
||||
template_data = workfile_instance.data.get("anatomyData")
|
||||
rep = workfile_instance.data.get("representations")[0]
|
||||
template_data["representation"] = rep.get("name")
|
||||
template_data["ext"] = rep.get("ext")
|
||||
template_data["comment"] = None
|
||||
|
||||
if not replace_in_path:
|
||||
return file_path
|
||||
anatomy = instance.context.data['anatomy']
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
template_filled = anatomy_filled["publish"]["path"]
|
||||
file_path = os.path.normpath(template_filled)
|
||||
|
||||
# now we need to switch scene in expected files
|
||||
# because <scene> token will now point to published
|
||||
# scene file and that might differ from current one
|
||||
new_scene = os.path.splitext(
|
||||
os.path.basename(file_path))[0]
|
||||
orig_scene = os.path.splitext(
|
||||
os.path.basename(
|
||||
self._instance.context.data["currentFile"]))[0]
|
||||
exp = self._instance.data.get("expectedFiles")
|
||||
self.log.info("Using published scene for render {}".format(file_path))
|
||||
|
||||
if isinstance(exp[0], dict):
|
||||
# we have aovs and we need to iterate over them
|
||||
new_exp = {}
|
||||
for aov, files in exp[0].items():
|
||||
replaced_files = []
|
||||
for f in files:
|
||||
replaced_files.append(
|
||||
str(f).replace(orig_scene, new_scene)
|
||||
)
|
||||
new_exp[aov] = replaced_files
|
||||
# [] might be too much here, TODO
|
||||
self._instance.data["expectedFiles"] = [new_exp]
|
||||
else:
|
||||
new_exp = []
|
||||
for f in exp:
|
||||
new_exp.append(
|
||||
str(f).replace(orig_scene, new_scene)
|
||||
)
|
||||
self._instance.data["expectedFiles"] = new_exp
|
||||
if not os.path.exists(file_path):
|
||||
self.log.error("published scene does not exist!")
|
||||
raise
|
||||
|
||||
self.log.info("Scene name was switched {} -> {}".format(
|
||||
orig_scene, new_scene
|
||||
))
|
||||
if not replace_in_path:
|
||||
return file_path
|
||||
|
||||
# now we need to switch scene in expected files
|
||||
# because <scene> token will now point to published
|
||||
# scene file and that might differ from current one
|
||||
def _clean_name(path):
|
||||
return os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
new_scene = _clean_name(file_path)
|
||||
orig_scene = _clean_name(instance.context.data["currentFile"])
|
||||
expected_files = instance.data.get("expectedFiles")
|
||||
|
||||
if isinstance(expected_files[0], dict):
|
||||
# we have aovs and we need to iterate over them
|
||||
new_exp = {}
|
||||
for aov, files in expected_files[0].items():
|
||||
replaced_files = []
|
||||
for f in files:
|
||||
replaced_files.append(
|
||||
str(f).replace(orig_scene, new_scene)
|
||||
)
|
||||
new_exp[aov] = replaced_files
|
||||
# [] might be too much here, TODO
|
||||
instance.data["expectedFiles"] = [new_exp]
|
||||
else:
|
||||
new_exp = []
|
||||
for f in expected_files:
|
||||
new_exp.append(
|
||||
str(f).replace(orig_scene, new_scene)
|
||||
)
|
||||
instance.data["expectedFiles"] = new_exp
|
||||
|
||||
metadata_folder = instance.data.get("publishRenderMetadataFolder")
|
||||
if metadata_folder:
|
||||
metadata_folder = metadata_folder.replace(orig_scene,
|
||||
new_scene)
|
||||
instance.data["publishRenderMetadataFolder"] = metadata_folder
|
||||
|
||||
self.log.info("Scene name was switched {} -> {}".format(
|
||||
orig_scene, new_scene
|
||||
))
|
||||
|
||||
return file_path
|
||||
|
||||
|
|
@ -645,3 +644,22 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self._instance.data["deadlineSubmissionJob"] = result
|
||||
|
||||
return result["_id"]
|
||||
|
||||
@staticmethod
|
||||
def _get_workfile_instance(context):
|
||||
"""Find workfile instance in context"""
|
||||
for i in context:
|
||||
|
||||
is_workfile = (
|
||||
"workfile" in i.data.get("families", []) or
|
||||
i.data["family"] == "workfile"
|
||||
)
|
||||
if not is_workfile:
|
||||
continue
|
||||
|
||||
# test if there is instance of workfile waiting
|
||||
# to be published.
|
||||
assert i.data["publish"] is True, (
|
||||
"Workfile (scene) must be published along")
|
||||
|
||||
return i
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
|
||||
order = pyblish.api.CollectorOrder + 0.415
|
||||
label = "Deadline Webservice from the Instance"
|
||||
families = ["rendering"]
|
||||
families = ["rendering", "renderlayer"]
|
||||
|
||||
def process(self, instance):
|
||||
instance.data["deadlineUrl"] = self._collect_deadline_url(instance)
|
||||
|
|
|
|||
|
|
@ -67,9 +67,9 @@ class AfterEffectsSubmitDeadline(
|
|||
dln_job_info.Group = self.group
|
||||
dln_job_info.Department = self.department
|
||||
dln_job_info.ChunkSize = self.chunk_size
|
||||
dln_job_info.OutputFilename = \
|
||||
dln_job_info.OutputFilename += \
|
||||
os.path.basename(self._instance.data["expectedFiles"][0])
|
||||
dln_job_info.OutputDirectory = \
|
||||
dln_job_info.OutputDirectory += \
|
||||
os.path.dirname(self._instance.data["expectedFiles"][0])
|
||||
dln_job_info.JobDelay = "00:00:00"
|
||||
|
||||
|
|
@ -92,13 +92,12 @@ class AfterEffectsSubmitDeadline(
|
|||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **legacy_io.Session)
|
||||
for key in keys:
|
||||
val = environment.get(key)
|
||||
if val:
|
||||
dln_job_info.EnvironmentKeyValue = "{key}={value}".format(
|
||||
key=key,
|
||||
value=val)
|
||||
value = environment.get(key)
|
||||
if value:
|
||||
dln_job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize job from PYPE for turning Event On/Off
|
||||
dln_job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1"
|
||||
dln_job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1"
|
||||
|
||||
return dln_job_info
|
||||
|
||||
|
|
|
|||
|
|
@ -284,14 +284,12 @@ class HarmonySubmitDeadline(
|
|||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **legacy_io.Session)
|
||||
for key in keys:
|
||||
val = environment.get(key)
|
||||
if val:
|
||||
job_info.EnvironmentKeyValue = "{key}={value}".format(
|
||||
key=key,
|
||||
value=val)
|
||||
value = environment.get(key)
|
||||
if value:
|
||||
job_info.EnvironmentKeyValue[key] = value
|
||||
|
||||
# to recognize job from PYPE for turning Event On/Off
|
||||
job_info.EnvironmentKeyValue = "OPENPYPE_RENDER_JOB=1"
|
||||
job_info.EnvironmentKeyValue["OPENPYPE_RENDER_JOB"] = "1"
|
||||
|
||||
return job_info
|
||||
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,7 +1,10 @@
|
|||
import re
|
||||
|
||||
from openpype.pipeline.project_folders import (
|
||||
get_project_basic_paths,
|
||||
create_project_folders,
|
||||
)
|
||||
from openpype_modules.ftrack.lib import BaseAction, statics_icon
|
||||
from openpype.api import get_project_basic_paths, create_project_folders
|
||||
|
||||
|
||||
class CreateProjectFolders(BaseAction):
|
||||
|
|
@ -81,7 +84,7 @@ class CreateProjectFolders(BaseAction):
|
|||
}
|
||||
|
||||
# Invoking OpenPype API to create the project folders
|
||||
create_project_folders(basic_paths, project_name)
|
||||
create_project_folders(project_name, basic_paths)
|
||||
self.create_ftrack_entities(basic_paths, project_entity)
|
||||
|
||||
self.trigger_event(
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
"""Loads publishing context from json and continues in publish process.
|
||||
|
||||
Should run before 'CollectAnatomyContextData' so the user on context is
|
||||
changed before it's stored to context anatomy data or instance anatomy data.
|
||||
|
||||
Requires:
|
||||
anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11)
|
||||
|
||||
|
|
@ -13,7 +16,7 @@ import os
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectUsername(pyblish.api.ContextPlugin):
|
||||
class CollectUsernameForWebpublish(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Translates user email to Ftrack username.
|
||||
|
||||
|
|
@ -32,10 +35,8 @@ class CollectUsername(pyblish.api.ContextPlugin):
|
|||
hosts = ["webpublisher", "photoshop"]
|
||||
targets = ["remotepublish", "filespublish", "tvpaint_worker"]
|
||||
|
||||
_context = None
|
||||
|
||||
def process(self, context):
|
||||
self.log.info("CollectUsername")
|
||||
self.log.info("{}".format(self.__class__.__name__))
|
||||
os.environ["FTRACK_API_USER"] = os.environ["FTRACK_BOT_API_USER"]
|
||||
os.environ["FTRACK_API_KEY"] = os.environ["FTRACK_BOT_API_KEY"]
|
||||
|
||||
|
|
@ -54,12 +55,14 @@ class CollectUsername(pyblish.api.ContextPlugin):
|
|||
return
|
||||
|
||||
session = ftrack_api.Session(auto_connect_event_hub=False)
|
||||
user = session.query("User where email like '{}'".format(user_email))
|
||||
user = session.query(
|
||||
"User where email like '{}'".format(user_email)
|
||||
).first()
|
||||
|
||||
if not user:
|
||||
raise ValueError(
|
||||
"Couldn't find user with {} email".format(user_email))
|
||||
user = user[0]
|
||||
|
||||
username = user.get("username")
|
||||
self.log.debug("Resolved ftrack username:: {}".format(username))
|
||||
os.environ["FTRACK_API_USER"] = username
|
||||
|
|
@ -67,5 +70,4 @@ class CollectUsername(pyblish.api.ContextPlugin):
|
|||
burnin_name = username
|
||||
if '@' in burnin_name:
|
||||
burnin_name = burnin_name[:burnin_name.index('@')]
|
||||
os.environ["WEBPUBLISH_OPENPYPE_USERNAME"] = burnin_name
|
||||
context.data["user"] = burnin_name
|
||||
|
|
|
|||
|
|
@ -289,13 +289,13 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
|||
)
|
||||
|
||||
|
||||
def sync_all_projects(login: str, password: str):
|
||||
def sync_all_projects(login: str, password: str, ignore_projects: list = None):
|
||||
"""Update all OP projects in DB with Zou data.
|
||||
|
||||
Args:
|
||||
login (str): Kitsu user login
|
||||
password (str): Kitsu user password
|
||||
|
||||
ignore_projects (list): List of unsynced project names
|
||||
Raises:
|
||||
gazu.exception.AuthFailedException: Wrong user login and/or password
|
||||
"""
|
||||
|
|
@ -311,6 +311,8 @@ def sync_all_projects(login: str, password: str):
|
|||
dbcon.install()
|
||||
all_projects = gazu.project.all_open_projects()
|
||||
for project in all_projects:
|
||||
if ignore_projects and project["name"] in ignore_projects:
|
||||
continue
|
||||
sync_project_from_kitsu(dbcon, project)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -95,13 +95,15 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
Reviews might be large, so allow only adding link to message instead of
|
||||
uploading only.
|
||||
"""
|
||||
|
||||
fill_data = copy.deepcopy(instance.context.data["anatomyData"])
|
||||
|
||||
username = fill_data.get("user")
|
||||
fill_pairs = [
|
||||
("asset", instance.data.get("asset", fill_data.get("asset"))),
|
||||
("subset", instance.data.get("subset", fill_data.get("subset"))),
|
||||
("username", instance.data.get("username",
|
||||
fill_data.get("username"))),
|
||||
("user", username),
|
||||
("username", username),
|
||||
("app", instance.data.get("app", fill_data.get("app"))),
|
||||
("family", instance.data.get("family", fill_data.get("family"))),
|
||||
("version", str(instance.data.get("version",
|
||||
|
|
@ -110,13 +112,19 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
if review_path:
|
||||
fill_pairs.append(("review_filepath", review_path))
|
||||
|
||||
task_data = instance.data.get("task")
|
||||
if not task_data:
|
||||
task_data = fill_data.get("task")
|
||||
for key, value in task_data.items():
|
||||
fill_key = "task[{}]".format(key)
|
||||
fill_pairs.append((fill_key, value))
|
||||
fill_pairs.append(("task", task_data["name"]))
|
||||
task_data = fill_data.get("task")
|
||||
if task_data:
|
||||
if (
|
||||
"{task}" in message_templ
|
||||
or "{Task}" in message_templ
|
||||
or "{TASK}" in message_templ
|
||||
):
|
||||
fill_pairs.append(("task", task_data["name"]))
|
||||
|
||||
else:
|
||||
for key, value in task_data.items():
|
||||
fill_key = "task[{}]".format(key)
|
||||
fill_pairs.append((fill_key, value))
|
||||
|
||||
self.log.debug("fill_pairs ::{}".format(fill_pairs))
|
||||
multiple_case_variants = prepare_template_data(fill_pairs)
|
||||
|
|
|
|||
107
openpype/pipeline/project_folders.py
Normal file
107
openpype/pipeline/project_folders.py
Normal file
|
|
@ -0,0 +1,107 @@
|
|||
import os
|
||||
import re
|
||||
import json
|
||||
|
||||
import six
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.lib import Logger
|
||||
|
||||
from .anatomy import Anatomy
|
||||
from .template_data import get_project_template_data
|
||||
|
||||
|
||||
def concatenate_splitted_paths(split_paths, anatomy):
|
||||
log = Logger.get_logger("concatenate_splitted_paths")
|
||||
pattern_array = re.compile(r"\[.*\]")
|
||||
output = []
|
||||
for path_items in split_paths:
|
||||
clean_items = []
|
||||
if isinstance(path_items, str):
|
||||
path_items = [path_items]
|
||||
|
||||
for path_item in path_items:
|
||||
if not re.match(r"{.+}", path_item):
|
||||
path_item = re.sub(pattern_array, "", path_item)
|
||||
clean_items.append(path_item)
|
||||
|
||||
# backward compatibility
|
||||
if "__project_root__" in path_items:
|
||||
for root, root_path in anatomy.roots.items():
|
||||
if not os.path.exists(str(root_path)):
|
||||
log.debug("Root {} path path {} not exist on \
|
||||
computer!".format(root, root_path))
|
||||
continue
|
||||
clean_items = ["{{root[{}]}}".format(root),
|
||||
r"{project[name]}"] + clean_items[1:]
|
||||
output.append(os.path.normpath(os.path.sep.join(clean_items)))
|
||||
continue
|
||||
|
||||
output.append(os.path.normpath(os.path.sep.join(clean_items)))
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def fill_paths(path_list, anatomy):
|
||||
format_data = get_project_template_data(project_name=anatomy.project_name)
|
||||
format_data["root"] = anatomy.roots
|
||||
filled_paths = []
|
||||
|
||||
for path in path_list:
|
||||
new_path = path.format(**format_data)
|
||||
filled_paths.append(new_path)
|
||||
|
||||
return filled_paths
|
||||
|
||||
|
||||
def create_project_folders(project_name, basic_paths=None):
|
||||
log = Logger.get_logger("create_project_folders")
|
||||
anatomy = Anatomy(project_name)
|
||||
if basic_paths is None:
|
||||
basic_paths = get_project_basic_paths(project_name)
|
||||
|
||||
if not basic_paths:
|
||||
return
|
||||
|
||||
concat_paths = concatenate_splitted_paths(basic_paths, anatomy)
|
||||
filled_paths = fill_paths(concat_paths, anatomy)
|
||||
|
||||
# Create folders
|
||||
for path in filled_paths:
|
||||
if os.path.exists(path):
|
||||
log.debug("Folder already exists: {}".format(path))
|
||||
else:
|
||||
log.debug("Creating folder: {}".format(path))
|
||||
os.makedirs(path)
|
||||
|
||||
|
||||
def _list_path_items(folder_structure):
|
||||
output = []
|
||||
for key, value in folder_structure.items():
|
||||
if not value:
|
||||
output.append(key)
|
||||
continue
|
||||
|
||||
paths = _list_path_items(value)
|
||||
for path in paths:
|
||||
if not isinstance(path, (list, tuple)):
|
||||
path = [path]
|
||||
|
||||
item = [key]
|
||||
item.extend(path)
|
||||
output.append(item)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def get_project_basic_paths(project_name):
|
||||
project_settings = get_project_settings(project_name)
|
||||
folder_structure = (
|
||||
project_settings["global"]["project_folder_structure"]
|
||||
)
|
||||
if not folder_structure:
|
||||
return []
|
||||
|
||||
if isinstance(folder_structure, six.string_types):
|
||||
folder_structure = json.loads(folder_structure)
|
||||
return _list_path_items(folder_structure)
|
||||
|
|
@ -22,6 +22,8 @@ from .publish_plugins import (
|
|||
)
|
||||
|
||||
from .lib import (
|
||||
get_publish_template_name,
|
||||
|
||||
DiscoverResult,
|
||||
publish_plugins_discover,
|
||||
load_help_content_from_plugin,
|
||||
|
|
@ -62,6 +64,8 @@ __all__ = (
|
|||
|
||||
"Extractor",
|
||||
|
||||
"get_publish_template_name",
|
||||
|
||||
"DiscoverResult",
|
||||
"publish_plugins_discover",
|
||||
"load_help_content_from_plugin",
|
||||
|
|
|
|||
2
openpype/pipeline/publish/contants.py
Normal file
2
openpype/pipeline/publish/contants.py
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
DEFAULT_PUBLISH_TEMPLATE = "publish"
|
||||
DEFAULT_HERO_PUBLISH_TEMPLATE = "hero"
|
||||
|
|
@ -2,6 +2,7 @@ import os
|
|||
import sys
|
||||
import types
|
||||
import inspect
|
||||
import copy
|
||||
import tempfile
|
||||
import xml.etree.ElementTree
|
||||
|
||||
|
|
@ -9,8 +10,190 @@ import six
|
|||
import pyblish.plugin
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib import Logger
|
||||
from openpype.settings import get_project_settings, get_system_settings
|
||||
from openpype.lib import Logger, filter_profiles
|
||||
from openpype.settings import (
|
||||
get_project_settings,
|
||||
get_system_settings,
|
||||
)
|
||||
|
||||
from .contants import (
|
||||
DEFAULT_PUBLISH_TEMPLATE,
|
||||
DEFAULT_HERO_PUBLISH_TEMPLATE,
|
||||
)
|
||||
|
||||
|
||||
def get_template_name_profiles(
|
||||
project_name, project_settings=None, logger=None
|
||||
):
|
||||
"""Receive profiles for publish template keys.
|
||||
|
||||
At least one of arguments must be passed.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for templates.
|
||||
project_settings(Dic[str, Any]): Prepared project settings.
|
||||
|
||||
Returns:
|
||||
List[Dict[str, Any]]: Publish template profiles.
|
||||
"""
|
||||
|
||||
if not project_name and not project_settings:
|
||||
raise ValueError((
|
||||
"Both project name and project settings are missing."
|
||||
" At least one must be entered."
|
||||
))
|
||||
|
||||
if not project_settings:
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["tools"]
|
||||
["publish"]
|
||||
["template_name_profiles"]
|
||||
)
|
||||
if profiles:
|
||||
return copy.deepcopy(profiles)
|
||||
|
||||
# Use legacy approach for cases new settings are not filled yet for the
|
||||
# project
|
||||
legacy_profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["publish"]
|
||||
["IntegrateAssetNew"]
|
||||
["template_name_profiles"]
|
||||
)
|
||||
if legacy_profiles:
|
||||
if not logger:
|
||||
logger = Logger.get_logger("get_template_name_profiles")
|
||||
|
||||
logger.warning((
|
||||
"Project \"{}\" is using legacy access to publish template."
|
||||
" It is recommended to move settings to new location"
|
||||
" 'project_settings/global/tools/publish/template_name_profiles'."
|
||||
).format(project_name))
|
||||
|
||||
# Replace "tasks" key with "task_names"
|
||||
profiles = []
|
||||
for profile in copy.deepcopy(legacy_profiles):
|
||||
profile["task_names"] = profile.pop("tasks", [])
|
||||
profiles.append(profile)
|
||||
return profiles
|
||||
|
||||
|
||||
def get_hero_template_name_profiles(
|
||||
project_name, project_settings=None, logger=None
|
||||
):
|
||||
"""Receive profiles for hero publish template keys.
|
||||
|
||||
At least one of arguments must be passed.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for templates.
|
||||
project_settings(Dic[str, Any]): Prepared project settings.
|
||||
|
||||
Returns:
|
||||
List[Dict[str, Any]]: Publish template profiles.
|
||||
"""
|
||||
|
||||
if not project_name and not project_settings:
|
||||
raise ValueError((
|
||||
"Both project name and project settings are missing."
|
||||
" At least one must be entered."
|
||||
))
|
||||
|
||||
if not project_settings:
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["tools"]
|
||||
["publish"]
|
||||
["hero_template_name_profiles"]
|
||||
)
|
||||
if profiles:
|
||||
return copy.deepcopy(profiles)
|
||||
|
||||
# Use legacy approach for cases new settings are not filled yet for the
|
||||
# project
|
||||
legacy_profiles = copy.deepcopy(
|
||||
project_settings
|
||||
["global"]
|
||||
["publish"]
|
||||
["IntegrateHeroVersion"]
|
||||
["template_name_profiles"]
|
||||
)
|
||||
if legacy_profiles:
|
||||
if not logger:
|
||||
logger = Logger.get_logger("get_hero_template_name_profiles")
|
||||
|
||||
logger.warning((
|
||||
"Project \"{}\" is using legacy access to hero publish template."
|
||||
" It is recommended to move settings to new location"
|
||||
" 'project_settings/global/tools/publish/"
|
||||
"hero_template_name_profiles'."
|
||||
).format(project_name))
|
||||
return legacy_profiles
|
||||
|
||||
|
||||
def get_publish_template_name(
|
||||
project_name,
|
||||
host_name,
|
||||
family,
|
||||
task_name,
|
||||
task_type,
|
||||
project_settings=None,
|
||||
hero=False,
|
||||
logger=None
|
||||
):
|
||||
"""Get template name which should be used for passed context.
|
||||
|
||||
Publish templates are filtered by host name, family, task name and
|
||||
task type.
|
||||
|
||||
Default template which is used at if profiles are not available or profile
|
||||
has empty value is defined by 'DEFAULT_PUBLISH_TEMPLATE' constant.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project where to look for settings.
|
||||
host_name (str): Name of host integration.
|
||||
family (str): Family for which should be found template.
|
||||
task_name (str): Task name on which is intance working.
|
||||
task_type (str): Task type on which is intance working.
|
||||
project_setting (Dict[str, Any]): Prepared project settings.
|
||||
logger (logging.Logger): Custom logger used for 'filter_profiles'
|
||||
function.
|
||||
|
||||
Returns:
|
||||
str: Template name which should be used for integration.
|
||||
"""
|
||||
|
||||
template = None
|
||||
filter_criteria = {
|
||||
"hosts": host_name,
|
||||
"families": family,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
}
|
||||
if hero:
|
||||
default_template = DEFAULT_HERO_PUBLISH_TEMPLATE
|
||||
profiles = get_hero_template_name_profiles(
|
||||
project_name, project_settings, logger
|
||||
)
|
||||
|
||||
else:
|
||||
profiles = get_template_name_profiles(
|
||||
project_name, project_settings, logger
|
||||
)
|
||||
default_template = DEFAULT_PUBLISH_TEMPLATE
|
||||
|
||||
profile = filter_profiles(profiles, filter_criteria, logger=logger)
|
||||
if profile:
|
||||
template = profile["template_name"]
|
||||
return template or default_template
|
||||
|
||||
|
||||
class DiscoverResult:
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ def get_project_template_data(project_doc=None, project_name=None):
|
|||
project_name = project_doc["name"]
|
||||
|
||||
if not project_doc:
|
||||
project_code = get_project(project_name, fields=["data.code"])
|
||||
project_doc = get_project(project_name, fields=["data.code"])
|
||||
|
||||
project_code = project_doc.get("data", {}).get("code")
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -9,6 +9,8 @@ from .path_resolving import (
|
|||
|
||||
get_custom_workfile_template,
|
||||
get_custom_workfile_template_by_string_context,
|
||||
|
||||
create_workdir_extra_folders,
|
||||
)
|
||||
|
||||
from .build_workfile import BuildWorkfile
|
||||
|
|
@ -26,5 +28,7 @@ __all__ = (
|
|||
"get_custom_workfile_template",
|
||||
"get_custom_workfile_template_by_string_context",
|
||||
|
||||
"create_workdir_extra_folders",
|
||||
|
||||
"BuildWorkfile",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -467,3 +467,60 @@ def get_custom_workfile_template_by_string_context(
|
|||
return get_custom_workfile_template(
|
||||
project_doc, asset_doc, task_name, host_name, anatomy, project_settings
|
||||
)
|
||||
|
||||
|
||||
def create_workdir_extra_folders(
|
||||
workdir,
|
||||
host_name,
|
||||
task_type,
|
||||
task_name,
|
||||
project_name,
|
||||
project_settings=None
|
||||
):
|
||||
"""Create extra folders in work directory based on context.
|
||||
|
||||
Args:
|
||||
workdir (str): Path to workdir where workfiles is stored.
|
||||
host_name (str): Name of host implementation.
|
||||
task_type (str): Type of task for which extra folders should be
|
||||
created.
|
||||
task_name (str): Name of task for which extra folders should be
|
||||
created.
|
||||
project_name (str): Name of project on which task is.
|
||||
project_settings (dict): Prepared project settings. Are loaded if not
|
||||
passed.
|
||||
"""
|
||||
|
||||
# Load project settings if not set
|
||||
if not project_settings:
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
# Load extra folders profiles
|
||||
extra_folders_profiles = (
|
||||
project_settings["global"]["tools"]["Workfiles"]["extra_folders"]
|
||||
)
|
||||
# Skip if are empty
|
||||
if not extra_folders_profiles:
|
||||
return
|
||||
|
||||
# Prepare profiles filters
|
||||
filter_data = {
|
||||
"task_types": task_type,
|
||||
"task_names": task_name,
|
||||
"hosts": host_name
|
||||
}
|
||||
profile = filter_profiles(extra_folders_profiles, filter_data)
|
||||
if profile is None:
|
||||
return
|
||||
|
||||
for subfolder in profile["folders"]:
|
||||
# Make sure backslashes are converted to forwards slashes
|
||||
# and does not start with slash
|
||||
subfolder = subfolder.replace("\\", "/").lstrip("/")
|
||||
# Skip empty strings
|
||||
if not subfolder:
|
||||
continue
|
||||
|
||||
fullpath = os.path.join(workdir, subfolder)
|
||||
if not os.path.exists(fullpath):
|
||||
os.makedirs(fullpath)
|
||||
|
|
|
|||
105
openpype/plugins/publish/collect_audio.py
Normal file
105
openpype/plugins/publish/collect_audio.py
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.client import (
|
||||
get_last_version_by_subset_name,
|
||||
get_representations,
|
||||
)
|
||||
from openpype.pipeline import (
|
||||
legacy_io,
|
||||
get_representation_path,
|
||||
)
|
||||
|
||||
|
||||
class CollectAudio(pyblish.api.InstancePlugin):
|
||||
"""Collect asset's last published audio.
|
||||
|
||||
The audio subset name searched for is defined in:
|
||||
project settings > Collect Audio
|
||||
"""
|
||||
label = "Collect Asset Audio"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
families = ["review"]
|
||||
hosts = [
|
||||
"nuke",
|
||||
"maya",
|
||||
"shell",
|
||||
"hiero",
|
||||
"premiere",
|
||||
"harmony",
|
||||
"traypublisher",
|
||||
"standalonepublisher",
|
||||
"fusion",
|
||||
"tvpaint",
|
||||
"resolve",
|
||||
"webpublisher",
|
||||
"aftereffects",
|
||||
"flame",
|
||||
"unreal"
|
||||
]
|
||||
|
||||
audio_subset_name = "audioMain"
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("audio"):
|
||||
self.log.info(
|
||||
"Skipping Audio collecion. It is already collected"
|
||||
)
|
||||
return
|
||||
|
||||
# Add audio to instance if exists.
|
||||
self.log.info((
|
||||
"Searching for audio subset '{subset}'"
|
||||
" in asset '{asset}'"
|
||||
).format(
|
||||
subset=self.audio_subset_name,
|
||||
asset=instance.data["asset"]
|
||||
))
|
||||
|
||||
repre_doc = self._get_repre_doc(instance)
|
||||
|
||||
# Add audio to instance if representation was found
|
||||
if repre_doc:
|
||||
instance.data["audio"] = [{
|
||||
"offset": 0,
|
||||
"filename": get_representation_path(repre_doc)
|
||||
}]
|
||||
self.log.info("Audio Data added to instance ...")
|
||||
|
||||
def _get_repre_doc(self, instance):
|
||||
cache = instance.context.data.get("__cache_asset_audio")
|
||||
if cache is None:
|
||||
cache = {}
|
||||
instance.context.data["__cache_asset_audio"] = cache
|
||||
asset_name = instance.data["asset"]
|
||||
|
||||
# first try to get it from cache
|
||||
if asset_name in cache:
|
||||
return cache[asset_name]
|
||||
|
||||
project_name = legacy_io.active_project()
|
||||
|
||||
# Find latest versions document
|
||||
last_version_doc = get_last_version_by_subset_name(
|
||||
project_name,
|
||||
self.audio_subset_name,
|
||||
asset_name=asset_name,
|
||||
fields=["_id"]
|
||||
)
|
||||
|
||||
repre_doc = None
|
||||
if last_version_doc:
|
||||
# Try to find it's representation (Expected there is only one)
|
||||
repre_docs = list(get_representations(
|
||||
project_name, version_ids=[last_version_doc["_id"]]
|
||||
))
|
||||
if not repre_docs:
|
||||
self.log.warning(
|
||||
"Version document does not contain any representations"
|
||||
)
|
||||
else:
|
||||
repre_doc = repre_docs[0]
|
||||
|
||||
# update cache
|
||||
cache[asset_name] = repre_doc
|
||||
|
||||
return repre_doc
|
||||
|
|
@ -29,6 +29,7 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin):
|
|||
# get basic variables
|
||||
otio_clip = instance.data["otioClip"]
|
||||
workfile_start = instance.data["workfileFrameStart"]
|
||||
workfile_source_duration = instance.data.get("notRetimedFramerange")
|
||||
|
||||
# get ranges
|
||||
otio_tl_range = otio_clip.range_in_parent()
|
||||
|
|
@ -54,6 +55,11 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin):
|
|||
frame_end = frame_start + otio.opentime.to_frames(
|
||||
otio_tl_range.duration, otio_tl_range.duration.rate) - 1
|
||||
|
||||
# in case of retimed clip and frame range should not be retimed
|
||||
if workfile_source_duration:
|
||||
frame_end = frame_start + otio.opentime.to_frames(
|
||||
otio_src_range.duration, otio_src_range.duration.rate) - 1
|
||||
|
||||
data = {
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
|
|
|
|||
|
|
@ -488,12 +488,6 @@ class ExtractBurnin(publish.Extractor):
|
|||
"frame_end_handle": frame_end_handle
|
||||
}
|
||||
|
||||
# use explicit username for webpublishes as rewriting
|
||||
# OPENPYPE_USERNAME might have side effects
|
||||
webpublish_user_name = os.environ.get("WEBPUBLISH_OPENPYPE_USERNAME")
|
||||
if webpublish_user_name:
|
||||
burnin_data["username"] = webpublish_user_name
|
||||
|
||||
self.log.debug(
|
||||
"Basic burnin_data: {}".format(json.dumps(burnin_data, indent=4))
|
||||
)
|
||||
|
|
|
|||
|
|
@ -5,6 +5,9 @@ import copy
|
|||
import clique
|
||||
import six
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
import pyblish.api
|
||||
|
||||
from openpype.client.operations import (
|
||||
OperationsSession,
|
||||
new_subset_document,
|
||||
|
|
@ -14,8 +17,6 @@ from openpype.client.operations import (
|
|||
prepare_version_update_data,
|
||||
prepare_representation_update_data,
|
||||
)
|
||||
from bson.objectid import ObjectId
|
||||
import pyblish.api
|
||||
|
||||
from openpype.client import (
|
||||
get_representations,
|
||||
|
|
@ -23,10 +24,12 @@ from openpype.client import (
|
|||
get_version_by_name,
|
||||
)
|
||||
from openpype.lib import source_hash
|
||||
from openpype.lib.profiles_filtering import filter_profiles
|
||||
from openpype.lib.file_transaction import FileTransaction
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.publish import KnownPublishError
|
||||
from openpype.pipeline.publish import (
|
||||
KnownPublishError,
|
||||
get_publish_template_name,
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -135,7 +138,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
# the database even if not used by the destination template
|
||||
db_representation_context_keys = [
|
||||
"project", "asset", "task", "subset", "version", "representation",
|
||||
"family", "hierarchy", "username", "output"
|
||||
"family", "hierarchy", "username", "user", "output"
|
||||
]
|
||||
skip_host_families = []
|
||||
|
||||
|
|
@ -792,52 +795,26 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
def get_template_name(self, instance):
|
||||
"""Return anatomy template name to use for integration"""
|
||||
# Define publish template name from profiles
|
||||
filter_criteria = self.get_profile_filter_criteria(instance)
|
||||
template_name_profiles = self._get_template_name_profiles(instance)
|
||||
profile = filter_profiles(
|
||||
template_name_profiles,
|
||||
filter_criteria,
|
||||
logger=self.log
|
||||
)
|
||||
|
||||
if profile:
|
||||
return profile["template_name"]
|
||||
return self.default_template_name
|
||||
|
||||
def _get_template_name_profiles(self, instance):
|
||||
"""Receive profiles for publish template keys.
|
||||
|
||||
Reuse template name profiles from legacy integrator. Goal is to move
|
||||
the profile settings out of plugin settings but until that happens we
|
||||
want to be able set it at one place and don't break backwards
|
||||
compatibility (more then once).
|
||||
"""
|
||||
|
||||
return (
|
||||
instance.context.data["project_settings"]
|
||||
["global"]
|
||||
["publish"]
|
||||
["IntegrateAssetNew"]
|
||||
["template_name_profiles"]
|
||||
)
|
||||
|
||||
def get_profile_filter_criteria(self, instance):
|
||||
"""Return filter criteria for `filter_profiles`"""
|
||||
|
||||
# Anatomy data is pre-filled by Collectors
|
||||
anatomy_data = instance.data["anatomyData"]
|
||||
|
||||
project_name = legacy_io.active_project()
|
||||
|
||||
# Task can be optional in anatomy data
|
||||
task = anatomy_data.get("task", {})
|
||||
host_name = instance.context.data["hostName"]
|
||||
anatomy_data = instance.data["anatomyData"]
|
||||
family = anatomy_data["family"]
|
||||
task_info = anatomy_data.get("task") or {}
|
||||
|
||||
# Return filter criteria
|
||||
return {
|
||||
"families": anatomy_data["family"],
|
||||
"tasks": task.get("name"),
|
||||
"task_types": task.get("type"),
|
||||
"hosts": instance.context.data["hostName"],
|
||||
}
|
||||
return get_publish_template_name(
|
||||
project_name,
|
||||
host_name,
|
||||
family,
|
||||
task_name=task_info.get("name"),
|
||||
task_type=task_info.get("type"),
|
||||
project_settings=instance.context.data["project_settings"],
|
||||
logger=self.log
|
||||
)
|
||||
|
||||
def get_rootless_path(self, anatomy, path):
|
||||
"""Returns, if possible, path without absolute portion from root
|
||||
|
|
|
|||
|
|
@ -14,14 +14,12 @@ from openpype.client import (
|
|||
get_archived_representations,
|
||||
get_representations,
|
||||
)
|
||||
from openpype.lib import (
|
||||
create_hard_link,
|
||||
filter_profiles
|
||||
)
|
||||
from openpype.lib import create_hard_link
|
||||
from openpype.pipeline import (
|
||||
schema,
|
||||
legacy_io,
|
||||
)
|
||||
from openpype.pipeline.publish import get_publish_template_name
|
||||
|
||||
|
||||
class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
||||
|
|
@ -46,7 +44,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
ignored_representation_names = []
|
||||
db_representation_context_keys = [
|
||||
"project", "asset", "task", "subset", "representation",
|
||||
"family", "hierarchy", "task", "username"
|
||||
"family", "hierarchy", "task", "username", "user"
|
||||
]
|
||||
# QUESTION/TODO this process should happen on server if crashed due to
|
||||
# permissions error on files (files were used or user didn't have perms)
|
||||
|
|
@ -68,10 +66,11 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
)
|
||||
return
|
||||
|
||||
template_key = self._get_template_key(instance)
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
project_name = anatomy.project_name
|
||||
|
||||
template_key = self._get_template_key(project_name, instance)
|
||||
|
||||
if template_key not in anatomy.templates:
|
||||
self.log.warning((
|
||||
"!!! Anatomy of project \"{}\" does not have set"
|
||||
|
|
@ -527,30 +526,24 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
|
|||
|
||||
return publish_folder
|
||||
|
||||
def _get_template_key(self, instance):
|
||||
def _get_template_key(self, project_name, instance):
|
||||
anatomy_data = instance.data["anatomyData"]
|
||||
task_data = anatomy_data.get("task") or {}
|
||||
task_name = task_data.get("name")
|
||||
task_type = task_data.get("type")
|
||||
task_info = anatomy_data.get("task") or {}
|
||||
host_name = instance.context.data["hostName"]
|
||||
|
||||
# TODO raise error if Hero not set?
|
||||
family = self.main_family_from_instance(instance)
|
||||
key_values = {
|
||||
"families": family,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
"hosts": host_name
|
||||
}
|
||||
profile = filter_profiles(
|
||||
self.template_name_profiles,
|
||||
key_values,
|
||||
|
||||
return get_publish_template_name(
|
||||
project_name,
|
||||
host_name,
|
||||
family,
|
||||
task_info.get("name"),
|
||||
task_info.get("type"),
|
||||
project_settings=instance.context.data["project_settings"],
|
||||
hero=True,
|
||||
logger=self.log
|
||||
)
|
||||
if profile:
|
||||
template_name = profile["template_name"]
|
||||
else:
|
||||
template_name = self._default_template_name
|
||||
return template_name
|
||||
|
||||
def main_family_from_instance(self, instance):
|
||||
"""Returns main family of entered instance."""
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ from bson.objectid import ObjectId
|
|||
from pymongo import DeleteOne, InsertOne
|
||||
import pyblish.api
|
||||
|
||||
import openpype.api
|
||||
from openpype.client import (
|
||||
get_asset_by_name,
|
||||
get_subset_by_id,
|
||||
|
|
@ -25,14 +24,17 @@ from openpype.client import (
|
|||
get_representations,
|
||||
get_archived_representations,
|
||||
)
|
||||
from openpype.lib.profiles_filtering import filter_profiles
|
||||
from openpype.lib import (
|
||||
prepare_template_data,
|
||||
create_hard_link,
|
||||
StringTemplate,
|
||||
TemplateUnsolved
|
||||
TemplateUnsolved,
|
||||
source_hash,
|
||||
filter_profiles,
|
||||
get_local_site_id,
|
||||
)
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.publish import get_publish_template_name
|
||||
|
||||
# this is needed until speedcopy for linux is fixed
|
||||
if sys.platform == "win32":
|
||||
|
|
@ -127,7 +129,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
exclude_families = ["render.farm"]
|
||||
db_representation_context_keys = [
|
||||
"project", "asset", "task", "subset", "version", "representation",
|
||||
"family", "hierarchy", "task", "username"
|
||||
"family", "hierarchy", "task", "username", "user"
|
||||
]
|
||||
default_template_name = "publish"
|
||||
|
||||
|
|
@ -138,7 +140,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
integrated_file_sizes = {}
|
||||
|
||||
# Attributes set by settings
|
||||
template_name_profiles = None
|
||||
subset_grouping_profiles = None
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -388,22 +389,16 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
family = self.main_family_from_instance(instance)
|
||||
|
||||
key_values = {
|
||||
"families": family,
|
||||
"tasks": task_name,
|
||||
"hosts": instance.context.data["hostName"],
|
||||
"task_types": task_type
|
||||
}
|
||||
profile = filter_profiles(
|
||||
self.template_name_profiles,
|
||||
key_values,
|
||||
template_name = get_publish_template_name(
|
||||
project_name,
|
||||
instance.context.data["hostName"],
|
||||
family,
|
||||
task_name=task_info.get("name"),
|
||||
task_type=task_info.get("type"),
|
||||
project_settings=instance.context.data["project_settings"],
|
||||
logger=self.log
|
||||
)
|
||||
|
||||
template_name = "publish"
|
||||
if profile:
|
||||
template_name = profile["template_name"]
|
||||
|
||||
published_representations = {}
|
||||
for idx, repre in enumerate(repres):
|
||||
published_files = []
|
||||
|
|
@ -1058,7 +1053,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
for _src, dest in resources:
|
||||
path = self.get_rootless_path(anatomy, dest)
|
||||
dest = self.get_dest_temp_url(dest)
|
||||
file_hash = openpype.api.source_hash(dest)
|
||||
file_hash = source_hash(dest)
|
||||
if self.TMP_FILE_EXT and \
|
||||
',{}'.format(self.TMP_FILE_EXT) in file_hash:
|
||||
file_hash = file_hash.replace(',{}'.format(self.TMP_FILE_EXT),
|
||||
|
|
@ -1168,7 +1163,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
def _get_sites(self, sync_project_presets):
|
||||
"""Returns tuple (local_site, remote_site)"""
|
||||
local_site_id = openpype.api.get_local_site_id()
|
||||
local_site_id = get_local_site_id()
|
||||
local_site = sync_project_presets["config"]. \
|
||||
get("active_site", "studio").strip()
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,10 @@
|
|||
"CollectAnatomyInstanceData": {
|
||||
"follow_workfile_version": false
|
||||
},
|
||||
"CollectAudio": {
|
||||
"enabled": false,
|
||||
"audio_subset_name": "audioMain"
|
||||
},
|
||||
"CollectSceneVersion": {
|
||||
"hosts": [
|
||||
"aftereffects",
|
||||
|
|
@ -415,6 +419,10 @@
|
|||
"filter_families": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"publish": {
|
||||
"template_name_profiles": [],
|
||||
"hero_template_name_profiles": []
|
||||
}
|
||||
},
|
||||
"project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets\": {\"characters\": {}, \"locations\": {}}, \"shots\": {}}}",
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
{
|
||||
"mel_workspace": "workspace -fr \"shaders\" \"renderData/shaders\";\nworkspace -fr \"images\" \"renders\";\nworkspace -fr \"particles\" \"particles\";\nworkspace -fr \"mayaAscii\" \"\";\nworkspace -fr \"mayaBinary\" \"\";\nworkspace -fr \"scene\" \"\";\nworkspace -fr \"alembicCache\" \"cache/alembic\";\nworkspace -fr \"renderData\" \"renderData\";\nworkspace -fr \"sourceImages\" \"sourceimages\";\nworkspace -fr \"fileCache\" \"cache/nCache\";\n",
|
||||
"ext_mapping": {
|
||||
"model": "ma",
|
||||
"mayaAscii": "ma",
|
||||
|
|
|
|||
|
|
@ -5,6 +5,13 @@
|
|||
"label": "Maya",
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"multiline" : true,
|
||||
"use_label_wrap": true,
|
||||
"key": "mel_workspace",
|
||||
"label": "Maya MEL Workspace"
|
||||
},
|
||||
{
|
||||
"type": "dict-modifiable",
|
||||
"key": "ext_mapping",
|
||||
|
|
|
|||
|
|
@ -18,6 +18,27 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"checkbox_key": "enabled",
|
||||
"key": "CollectAudio",
|
||||
"label": "Collect Audio",
|
||||
"is_group": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"key": "audio_subset_name",
|
||||
"label": "Name of audio variant",
|
||||
"type": "text",
|
||||
"placeholder": "audioMain"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
@ -642,10 +663,14 @@
|
|||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"label": "<b>NOTE:</b> Publish template profiles settings were moved to <a href=\"settings://project_settings/global/tools/publish/template_name_profiles\"><b>Tools/Publish/Template name profiles</b></a>. Please move values there."
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "template_name_profiles",
|
||||
"label": "Template name profiles",
|
||||
"label": "Template name profiles (DEPRECATED)",
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
|
|
@ -750,10 +775,14 @@
|
|||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"label": "<b>NOTE:</b> Hero publish template profiles settings were moved to <a href=\"settings://project_settings/global/tools/publish/hero_template_name_profiles\"><b>Tools/Publish/Hero template name profiles</b></a>. Please move values there."
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "template_name_profiles",
|
||||
"label": "Template name profiles",
|
||||
"label": "Template name profiles (DEPRECATED)",
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
|
|
|
|||
|
|
@ -309,6 +309,102 @@
|
|||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "publish",
|
||||
"label": "Publish",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "<b>NOTE:</b> For backwards compatibility can be value empty and in that case are used values from <a href=\"settings://project_settings/global/publish/IntegrateAssetNew\"><b>IntegrateAssetNew</b></a>. This will change in future so please move all values here as soon as possible."
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "template_name_profiles",
|
||||
"label": "Template name profiles",
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"key": "families",
|
||||
"label": "Families",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "hosts-enum",
|
||||
"key": "hosts",
|
||||
"label": "Hosts",
|
||||
"multiselection": true
|
||||
},
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "task_names",
|
||||
"label": "Task names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "template_name",
|
||||
"label": "Template name"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "hero_template_name_profiles",
|
||||
"label": "Hero template name profiles",
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"key": "families",
|
||||
"label": "Families",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "hosts-enum",
|
||||
"key": "hosts",
|
||||
"label": "Hosts",
|
||||
"multiselection": true
|
||||
},
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "task_names",
|
||||
"label": "Task names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "template_name",
|
||||
"label": "Template name",
|
||||
"tooltip": "Name of template from Anatomy templates"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -281,18 +281,25 @@ class ActionModel(QtGui.QStandardItemModel):
|
|||
if not action_item:
|
||||
return
|
||||
|
||||
action = action_item.data(ACTION_ROLE)
|
||||
actual_data = self._prepare_compare_data(action)
|
||||
actions = action_item.data(ACTION_ROLE)
|
||||
if not isinstance(actions, list):
|
||||
actions = [actions]
|
||||
|
||||
action_actions_data = [
|
||||
self._prepare_compare_data(action)
|
||||
for action in actions
|
||||
]
|
||||
|
||||
stored = self.launcher_registry.get_item("force_not_open_workfile")
|
||||
if is_checked:
|
||||
stored.append(actual_data)
|
||||
else:
|
||||
final_values = []
|
||||
for config in stored:
|
||||
if config != actual_data:
|
||||
final_values.append(config)
|
||||
stored = final_values
|
||||
for actual_data in action_actions_data:
|
||||
if is_checked:
|
||||
stored.append(actual_data)
|
||||
else:
|
||||
final_values = []
|
||||
for config in stored:
|
||||
if config != actual_data:
|
||||
final_values.append(config)
|
||||
stored = final_values
|
||||
|
||||
self.launcher_registry.set_item("force_not_open_workfile", stored)
|
||||
self.launcher_registry._get_item.cache_clear()
|
||||
|
|
@ -329,21 +336,24 @@ class ActionModel(QtGui.QStandardItemModel):
|
|||
item (QStandardItem)
|
||||
stored (list) of dict
|
||||
"""
|
||||
action = item.data(ACTION_ROLE)
|
||||
if not self.is_application_action(action):
|
||||
|
||||
actions = item.data(ACTION_ROLE)
|
||||
if not isinstance(actions, list):
|
||||
actions = [actions]
|
||||
|
||||
if not self.is_application_action(actions[0]):
|
||||
return False
|
||||
|
||||
actual_data = self._prepare_compare_data(action)
|
||||
action_actions_data = [
|
||||
self._prepare_compare_data(action)
|
||||
for action in actions
|
||||
]
|
||||
for config in stored:
|
||||
if config == actual_data:
|
||||
if config in action_actions_data:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _prepare_compare_data(self, action):
|
||||
if isinstance(action, list) and action:
|
||||
action = action[0]
|
||||
|
||||
compare_data = {}
|
||||
if action and action.label:
|
||||
compare_data = {
|
||||
|
|
|
|||
|
|
@ -312,11 +312,12 @@ class ActionBar(QtWidgets.QWidget):
|
|||
|
||||
is_group = index.data(GROUP_ROLE)
|
||||
is_variant_group = index.data(VARIANT_GROUP_ROLE)
|
||||
force_not_open_workfile = index.data(FORCE_NOT_OPEN_WORKFILE_ROLE)
|
||||
if not is_group and not is_variant_group:
|
||||
action = index.data(ACTION_ROLE)
|
||||
# Change data of application action
|
||||
if issubclass(action, ApplicationAction):
|
||||
if index.data(FORCE_NOT_OPEN_WORKFILE_ROLE):
|
||||
if force_not_open_workfile:
|
||||
action.data["start_last_workfile"] = False
|
||||
else:
|
||||
action.data.pop("start_last_workfile", None)
|
||||
|
|
@ -385,10 +386,18 @@ class ActionBar(QtWidgets.QWidget):
|
|||
menu.addMenu(sub_menu)
|
||||
|
||||
result = menu.exec_(QtGui.QCursor.pos())
|
||||
if result:
|
||||
action = actions_mapping[result]
|
||||
self._start_animation(index)
|
||||
self.action_clicked.emit(action)
|
||||
if not result:
|
||||
return
|
||||
|
||||
action = actions_mapping[result]
|
||||
if issubclass(action, ApplicationAction):
|
||||
if force_not_open_workfile:
|
||||
action.data["start_last_workfile"] = False
|
||||
else:
|
||||
action.data.pop("start_last_workfile", None)
|
||||
|
||||
self._start_animation(index)
|
||||
self.action_clicked.emit(action)
|
||||
|
||||
|
||||
class ActionHistory(QtWidgets.QPushButton):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,12 @@
|
|||
from Qt import QtWidgets, QtCore, QtGui
|
||||
|
||||
from openpype import resources
|
||||
from openpype.style import load_stylesheet
|
||||
from openpype.widgets import PasswordDialog
|
||||
from openpype.lib import is_admin_password_required, Logger
|
||||
from openpype.pipeline import AvalonMongoDB
|
||||
from openpype.pipeline.project_folders import create_project_folders
|
||||
|
||||
from . import (
|
||||
ProjectModel,
|
||||
ProjectProxyFilter,
|
||||
|
|
@ -13,17 +20,6 @@ from . import (
|
|||
)
|
||||
from .widgets import ConfirmProjectDeletion
|
||||
from .style import ResourceCache
|
||||
from openpype.style import load_stylesheet
|
||||
from openpype.lib import is_admin_password_required
|
||||
from openpype.widgets import PasswordDialog
|
||||
from openpype.pipeline import AvalonMongoDB
|
||||
|
||||
from openpype import resources
|
||||
from openpype.api import (
|
||||
get_project_basic_paths,
|
||||
create_project_folders,
|
||||
Logger
|
||||
)
|
||||
|
||||
|
||||
class ProjectManagerWindow(QtWidgets.QWidget):
|
||||
|
|
@ -259,12 +255,8 @@ class ProjectManagerWindow(QtWidgets.QWidget):
|
|||
qm.Yes | qm.No)
|
||||
if ans == qm.Yes:
|
||||
try:
|
||||
# Get paths based on presets
|
||||
basic_paths = get_project_basic_paths(project_name)
|
||||
if not basic_paths:
|
||||
pass
|
||||
# Invoking OpenPype API to create the project folders
|
||||
create_project_folders(basic_paths, project_name)
|
||||
create_project_folders(project_name)
|
||||
except Exception as exc:
|
||||
self.log.warning(
|
||||
"Cannot create starting folders: {}".format(exc),
|
||||
|
|
|
|||
|
|
@ -34,7 +34,8 @@ from .lib import (
|
|||
class InventoryModel(TreeModel):
|
||||
"""The model for the inventory"""
|
||||
|
||||
Columns = ["Name", "version", "count", "family", "loader", "objectName"]
|
||||
Columns = ["Name", "version", "count", "family",
|
||||
"group", "loader", "objectName"]
|
||||
|
||||
OUTDATED_COLOR = QtGui.QColor(235, 30, 30)
|
||||
CHILD_OUTDATED_COLOR = QtGui.QColor(200, 160, 30)
|
||||
|
|
@ -157,8 +158,13 @@ class InventoryModel(TreeModel):
|
|||
# Family icon
|
||||
return item.get("familyIcon", None)
|
||||
|
||||
column_name = self.Columns[index.column()]
|
||||
|
||||
if column_name == "group" and item.get("group"):
|
||||
return qtawesome.icon("fa.object-group",
|
||||
color=get_default_entity_icon_color())
|
||||
|
||||
if item.get("isGroupNode"):
|
||||
column_name = self.Columns[index.column()]
|
||||
if column_name == "active_site":
|
||||
provider = item.get("active_site_provider")
|
||||
return self._site_icons.get(provider)
|
||||
|
|
@ -423,6 +429,7 @@ class InventoryModel(TreeModel):
|
|||
group_node["familyIcon"] = family_icon
|
||||
group_node["count"] = len(group_items)
|
||||
group_node["isGroupNode"] = True
|
||||
group_node["group"] = subset["data"].get("subsetGroup")
|
||||
|
||||
if self.sync_enabled:
|
||||
progress = get_progress_for_repre(
|
||||
|
|
|
|||
|
|
@ -89,7 +89,8 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
view.setColumnWidth(1, 55) # version
|
||||
view.setColumnWidth(2, 55) # count
|
||||
view.setColumnWidth(3, 150) # family
|
||||
view.setColumnWidth(4, 100) # namespace
|
||||
view.setColumnWidth(4, 120) # group
|
||||
view.setColumnWidth(5, 150) # loader
|
||||
|
||||
# apply delegates
|
||||
version_delegate = VersionDelegate(legacy_io, self)
|
||||
|
|
|
|||
|
|
@ -9,11 +9,11 @@ import platform
|
|||
from Qt import QtCore, QtGui, QtWidgets
|
||||
|
||||
import openpype.version
|
||||
from openpype.api import (
|
||||
resources,
|
||||
get_system_settings
|
||||
from openpype import resources, style
|
||||
from openpype.lib import (
|
||||
get_openpype_execute_args,
|
||||
Logger,
|
||||
)
|
||||
from openpype.lib import get_openpype_execute_args, Logger
|
||||
from openpype.lib.openpype_version import (
|
||||
op_version_control_available,
|
||||
get_expected_version,
|
||||
|
|
@ -25,8 +25,8 @@ from openpype.lib.openpype_version import (
|
|||
get_openpype_version,
|
||||
)
|
||||
from openpype.modules import TrayModulesManager
|
||||
from openpype import style
|
||||
from openpype.settings import (
|
||||
get_system_settings,
|
||||
SystemSettings,
|
||||
ProjectSettings,
|
||||
DefaultsNotDefined
|
||||
|
|
@ -774,10 +774,24 @@ class PypeTrayStarter(QtCore.QObject):
|
|||
|
||||
|
||||
def main():
|
||||
log = Logger.get_logger(__name__)
|
||||
app = QtWidgets.QApplication.instance()
|
||||
if not app:
|
||||
app = QtWidgets.QApplication([])
|
||||
|
||||
for attr_name in (
|
||||
"AA_EnableHighDpiScaling",
|
||||
"AA_UseHighDpiPixmaps"
|
||||
):
|
||||
attr = getattr(QtCore.Qt, attr_name, None)
|
||||
if attr is None:
|
||||
log.debug((
|
||||
"Missing QtCore.Qt attribute \"{}\"."
|
||||
" UI quality may be affected."
|
||||
).format(attr_name))
|
||||
else:
|
||||
app.setAttribute(attr)
|
||||
|
||||
starter = PypeTrayStarter(app)
|
||||
|
||||
# TODO remove when pype.exe will have an icon
|
||||
|
|
|
|||
|
|
@ -15,10 +15,7 @@ from openpype.pipeline.workfile.lock_workfile import (
|
|||
)
|
||||
from openpype.tools.utils import PlaceholderLineEdit
|
||||
from openpype.tools.utils.delegates import PrettyTimeDelegate
|
||||
from openpype.lib import (
|
||||
emit_event,
|
||||
create_workdir_extra_folders,
|
||||
)
|
||||
from openpype.lib import emit_event
|
||||
from openpype.tools.workfiles.lock_dialog import WorkfileLockDialog
|
||||
from openpype.pipeline import (
|
||||
registered_host,
|
||||
|
|
@ -29,8 +26,10 @@ from openpype.pipeline.context_tools import (
|
|||
compute_session_changes,
|
||||
change_current_context
|
||||
)
|
||||
from openpype.pipeline.workfile import get_workfile_template_key
|
||||
|
||||
from openpype.pipeline.workfile import (
|
||||
get_workfile_template_key,
|
||||
create_workdir_extra_folders,
|
||||
)
|
||||
|
||||
from .model import (
|
||||
WorkAreaFilesModel,
|
||||
|
|
@ -470,7 +469,9 @@ class FilesWidget(QtWidgets.QWidget):
|
|||
host = self.host
|
||||
if self._is_workfile_locked(filepath):
|
||||
# add lockfile dialog
|
||||
WorkfileLockDialog(filepath, parent=self)
|
||||
dialog = WorkfileLockDialog(filepath, parent=self)
|
||||
if not dialog.exec_():
|
||||
return
|
||||
|
||||
if isinstance(host, IWorkfileHost):
|
||||
has_unsaved_changes = host.workfile_has_unsaved_changes()
|
||||
|
|
|
|||
|
|
@ -38,18 +38,10 @@ class WorkfileLockDialog(QtWidgets.QDialog):
|
|||
main_layout.addSpacing(10)
|
||||
main_layout.addWidget(btns_widget, 0)
|
||||
|
||||
cancel_btn.clicked.connect(self._on_cancel_click)
|
||||
ignore_btn.clicked.connect(self._on_ignore_click)
|
||||
cancel_btn.clicked.connect(self.reject)
|
||||
ignore_btn.clicked.connect(self.accept)
|
||||
|
||||
def showEvent(self, event):
|
||||
super(WorkfileLockDialog, self).showEvent(event)
|
||||
|
||||
self.setStyleSheet(load_stylesheet())
|
||||
|
||||
def _on_ignore_click(self):
|
||||
# Result is '1'
|
||||
self.accept()
|
||||
|
||||
def _on_cancel_click(self):
|
||||
# Result is '0'
|
||||
self.reject()
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.14.2-nightly.3"
|
||||
__version__ = "3.14.2"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue