mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch 'develop' into feature/OP-4201_Load-effectPlates-from-Hiero
This commit is contained in:
commit
e81fd2d4cf
119 changed files with 2483 additions and 685 deletions
2
.github/workflows/milestone_assign.yml
vendored
2
.github/workflows/milestone_assign.yml
vendored
|
|
@ -2,7 +2,7 @@ name: Milestone - assign to PRs
|
|||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, reopened, edited, synchronize]
|
||||
types: [closed]
|
||||
|
||||
jobs:
|
||||
run_if_release:
|
||||
|
|
|
|||
|
|
@ -63,7 +63,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
"""
|
||||
staging = False
|
||||
path = None
|
||||
_VERSION_REGEX = re.compile(r"(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?") # noqa: E501
|
||||
# this should match any string complying with https://semver.org/
|
||||
_VERSION_REGEX = re.compile(r"(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>[a-zA-Z\d\-.]*))?(?:\+(?P<buildmetadata>[a-zA-Z\d\-.]*))?") # noqa: E501
|
||||
_installed_version = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
@ -211,6 +212,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
OpenPypeVersion: of detected or None.
|
||||
|
||||
"""
|
||||
# strip .zip ext if present
|
||||
string = re.sub(r"\.zip$", "", string, flags=re.IGNORECASE)
|
||||
m = re.search(OpenPypeVersion._VERSION_REGEX, string)
|
||||
if not m:
|
||||
return None
|
||||
|
|
|
|||
177
openpype/hooks/pre_copy_last_published_workfile.py
Normal file
177
openpype/hooks/pre_copy_last_published_workfile.py
Normal file
|
|
@ -0,0 +1,177 @@
|
|||
import os
|
||||
import shutil
|
||||
from time import sleep
|
||||
from openpype.client.entities import (
|
||||
get_last_version_by_subset_id,
|
||||
get_representations,
|
||||
get_subsets,
|
||||
)
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.local_settings import get_local_site_id
|
||||
from openpype.lib.profiles_filtering import filter_profiles
|
||||
from openpype.pipeline.load.utils import get_representation_path
|
||||
from openpype.settings.lib import get_project_settings
|
||||
|
||||
|
||||
class CopyLastPublishedWorkfile(PreLaunchHook):
|
||||
"""Copy last published workfile as first workfile.
|
||||
|
||||
Prelaunch hook works only if last workfile leads to not existing file.
|
||||
- That is possible only if it's first version.
|
||||
"""
|
||||
|
||||
# Before `AddLastWorkfileToLaunchArgs`
|
||||
order = -1
|
||||
app_groups = ["blender", "photoshop", "tvpaint", "aftereffects"]
|
||||
|
||||
def execute(self):
|
||||
"""Check if local workfile doesn't exist, else copy it.
|
||||
|
||||
1- Check if setting for this feature is enabled
|
||||
2- Check if workfile in work area doesn't exist
|
||||
3- Check if published workfile exists and is copied locally in publish
|
||||
4- Substitute copied published workfile as first workfile
|
||||
|
||||
Returns:
|
||||
None: This is a void method.
|
||||
"""
|
||||
|
||||
sync_server = self.modules_manager.get("sync_server")
|
||||
if not sync_server or not sync_server.enabled:
|
||||
self.log.deubg("Sync server module is not enabled or available")
|
||||
return
|
||||
|
||||
# Check there is no workfile available
|
||||
last_workfile = self.data.get("last_workfile_path")
|
||||
if os.path.exists(last_workfile):
|
||||
self.log.debug(
|
||||
"Last workfile exists. Skipping {} process.".format(
|
||||
self.__class__.__name__
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
# Get data
|
||||
project_name = self.data["project_name"]
|
||||
task_name = self.data["task_name"]
|
||||
task_type = self.data["task_type"]
|
||||
host_name = self.application.host_name
|
||||
|
||||
# Check settings has enabled it
|
||||
project_settings = get_project_settings(project_name)
|
||||
profiles = project_settings["global"]["tools"]["Workfiles"][
|
||||
"last_workfile_on_startup"
|
||||
]
|
||||
filter_data = {
|
||||
"tasks": task_name,
|
||||
"task_types": task_type,
|
||||
"hosts": host_name,
|
||||
}
|
||||
last_workfile_settings = filter_profiles(profiles, filter_data)
|
||||
use_last_published_workfile = last_workfile_settings.get(
|
||||
"use_last_published_workfile"
|
||||
)
|
||||
if use_last_published_workfile is None:
|
||||
self.log.info(
|
||||
(
|
||||
"Seems like old version of settings is used."
|
||||
' Can\'t access custom templates in host "{}".'.format(
|
||||
host_name
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
elif use_last_published_workfile is False:
|
||||
self.log.info(
|
||||
(
|
||||
'Project "{}" has turned off to use last published'
|
||||
' workfile as first workfile for host "{}"'.format(
|
||||
project_name, host_name
|
||||
)
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
self.log.info("Trying to fetch last published workfile...")
|
||||
|
||||
project_doc = self.data.get("project_doc")
|
||||
asset_doc = self.data.get("asset_doc")
|
||||
anatomy = self.data.get("anatomy")
|
||||
|
||||
# Check it can proceed
|
||||
if not project_doc and not asset_doc:
|
||||
return
|
||||
|
||||
# Get subset id
|
||||
subset_id = next(
|
||||
(
|
||||
subset["_id"]
|
||||
for subset in get_subsets(
|
||||
project_name,
|
||||
asset_ids=[asset_doc["_id"]],
|
||||
fields=["_id", "data.family", "data.families"],
|
||||
)
|
||||
if subset["data"].get("family") == "workfile"
|
||||
# Legacy compatibility
|
||||
or "workfile" in subset["data"].get("families", {})
|
||||
),
|
||||
None,
|
||||
)
|
||||
if not subset_id:
|
||||
self.log.debug(
|
||||
'No any workfile for asset "{}".'.format(asset_doc["name"])
|
||||
)
|
||||
return
|
||||
|
||||
# Get workfile representation
|
||||
last_version_doc = get_last_version_by_subset_id(
|
||||
project_name, subset_id, fields=["_id"]
|
||||
)
|
||||
if not last_version_doc:
|
||||
self.log.debug("Subset does not have any versions")
|
||||
return
|
||||
|
||||
workfile_representation = next(
|
||||
(
|
||||
representation
|
||||
for representation in get_representations(
|
||||
project_name, version_ids=[last_version_doc["_id"]]
|
||||
)
|
||||
if representation["context"]["task"]["name"] == task_name
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
if not workfile_representation:
|
||||
self.log.debug(
|
||||
'No published workfile for task "{}" and host "{}".'.format(
|
||||
task_name, host_name
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
local_site_id = get_local_site_id()
|
||||
sync_server.add_site(
|
||||
project_name,
|
||||
workfile_representation["_id"],
|
||||
local_site_id,
|
||||
force=True,
|
||||
priority=99,
|
||||
reset_timer=True,
|
||||
)
|
||||
|
||||
while not sync_server.is_representation_on_site(
|
||||
project_name, workfile_representation["_id"], local_site_id
|
||||
):
|
||||
sleep(5)
|
||||
|
||||
# Get paths
|
||||
published_workfile_path = get_representation_path(
|
||||
workfile_representation, root=anatomy.roots
|
||||
)
|
||||
local_workfile_dir = os.path.dirname(last_workfile)
|
||||
|
||||
# Copy file and substitute path
|
||||
self.data["last_workfile_path"] = shutil.copy(
|
||||
published_workfile_path, local_workfile_dir
|
||||
)
|
||||
|
|
@ -1,5 +1,4 @@
|
|||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
|
||||
class AfterEffectsAddon(OpenPypeModule, IHostAddon):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
BLENDER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
HOST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -225,7 +225,8 @@ class FlameMenuUniversal(_FlameMenuApp):
|
|||
|
||||
menu['actions'].append({
|
||||
"name": "Load...",
|
||||
"execute": lambda x: self.tools_helper.show_loader()
|
||||
"execute": lambda x: callback_selection(
|
||||
x, self.tools_helper.show_loader)
|
||||
})
|
||||
menu['actions'].append({
|
||||
"name": "Manage...",
|
||||
|
|
|
|||
|
|
@ -4,13 +4,13 @@ import shutil
|
|||
from copy import deepcopy
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
import qargparse
|
||||
from Qt import QtCore, QtWidgets
|
||||
|
||||
import qargparse
|
||||
from openpype import style
|
||||
from openpype.settings import get_current_project_settings
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline import LegacyCreator, LoaderPlugin
|
||||
from openpype.settings import get_current_project_settings
|
||||
|
||||
from . import constants
|
||||
from . import lib as flib
|
||||
|
|
@ -690,6 +690,54 @@ class ClipLoader(LoaderPlugin):
|
|||
)
|
||||
]
|
||||
|
||||
_mapping = None
|
||||
|
||||
def get_colorspace(self, context):
|
||||
"""Get colorspace name
|
||||
|
||||
Look either to version data or representation data.
|
||||
|
||||
Args:
|
||||
context (dict): version context data
|
||||
|
||||
Returns:
|
||||
str: colorspace name or None
|
||||
"""
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
colorspace = version_data.get(
|
||||
"colorspace", None
|
||||
)
|
||||
|
||||
if (
|
||||
not colorspace
|
||||
or colorspace == "Unknown"
|
||||
):
|
||||
colorspace = context["representation"]["data"].get(
|
||||
"colorspace", None)
|
||||
|
||||
return colorspace
|
||||
|
||||
@classmethod
|
||||
def get_native_colorspace(cls, input_colorspace):
|
||||
"""Return native colorspace name.
|
||||
|
||||
Args:
|
||||
input_colorspace (str | None): colorspace name
|
||||
|
||||
Returns:
|
||||
str: native colorspace name defined in mapping or None
|
||||
"""
|
||||
if not cls._mapping:
|
||||
settings = get_current_project_settings()["flame"]
|
||||
mapping = settings["imageio"]["profilesMapping"]["inputs"]
|
||||
cls._mapping = {
|
||||
input["ocioName"]: input["flameName"]
|
||||
for input in mapping
|
||||
}
|
||||
|
||||
return cls._mapping.get(input_colorspace)
|
||||
|
||||
|
||||
class OpenClipSolver(flib.MediaInfoFile):
|
||||
create_new_clip = False
|
||||
|
|
|
|||
|
|
@ -36,14 +36,15 @@ class LoadClip(opfapi.ClipLoader):
|
|||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
version_name = version.get("name", None)
|
||||
colorspace = version_data.get("colorspace", None)
|
||||
colorspace = self.get_colorspace(context)
|
||||
|
||||
clip_name = StringTemplate(self.clip_name_template).format(
|
||||
context["representation"]["context"])
|
||||
|
||||
# TODO: settings in imageio
|
||||
# convert colorspace with ocio to flame mapping
|
||||
# in imageio flame section
|
||||
colorspace = colorspace
|
||||
colorspace = self.get_native_colorspace(colorspace)
|
||||
self.log.info("Loading with colorspace: `{}`".format(colorspace))
|
||||
|
||||
# create workfile path
|
||||
workfile_dir = os.environ["AVALON_WORKDIR"]
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from copy import deepcopy
|
||||
import os
|
||||
import flame
|
||||
from pprint import pformat
|
||||
|
|
@ -22,7 +23,7 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
|
||||
# settings
|
||||
reel_name = "OP_LoadedReel"
|
||||
clip_name_template = "{asset}_{subset}<_{output}>"
|
||||
clip_name_template = "{batch}_{asset}_{subset}<_{output}>"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
||||
|
|
@ -34,19 +35,22 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
version_name = version.get("name", None)
|
||||
colorspace = version_data.get("colorspace", None)
|
||||
colorspace = self.get_colorspace(context)
|
||||
|
||||
# in case output is not in context replace key to representation
|
||||
if not context["representation"]["context"].get("output"):
|
||||
self.clip_name_template.replace("output", "representation")
|
||||
|
||||
clip_name = StringTemplate(self.clip_name_template).format(
|
||||
context["representation"]["context"])
|
||||
formating_data = deepcopy(context["representation"]["context"])
|
||||
formating_data["batch"] = self.batch.name.get_value()
|
||||
|
||||
clip_name = StringTemplate(self.clip_name_template).format(
|
||||
formating_data)
|
||||
|
||||
# TODO: settings in imageio
|
||||
# convert colorspace with ocio to flame mapping
|
||||
# in imageio flame section
|
||||
colorspace = colorspace
|
||||
colorspace = self.get_native_colorspace(colorspace)
|
||||
self.log.info("Loading with colorspace: `{}`".format(colorspace))
|
||||
|
||||
# create workfile path
|
||||
workfile_dir = options.get("workdir") or os.environ["AVALON_WORKDIR"]
|
||||
|
|
@ -56,6 +60,7 @@ class LoadClipBatch(opfapi.ClipLoader):
|
|||
openclip_path = os.path.join(
|
||||
openclip_dir, clip_name + ".clip"
|
||||
)
|
||||
|
||||
if not os.path.exists(openclip_dir):
|
||||
os.makedirs(openclip_dir)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
FUSION_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
HARMONY_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import os
|
||||
import platform
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
HIERO_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -170,7 +170,10 @@ class CreatorWidget(QtWidgets.QDialog):
|
|||
for func, val in kwargs.items():
|
||||
if getattr(item, func):
|
||||
func_attr = getattr(item, func)
|
||||
func_attr(val)
|
||||
if isinstance(val, tuple):
|
||||
func_attr(*val)
|
||||
else:
|
||||
func_attr(val)
|
||||
|
||||
# add to layout
|
||||
layout.addRow(label, item)
|
||||
|
|
@ -273,8 +276,8 @@ class CreatorWidget(QtWidgets.QDialog):
|
|||
elif v["type"] == "QSpinBox":
|
||||
data[k]["value"] = self.create_row(
|
||||
content_layout, "QSpinBox", v["label"],
|
||||
setValue=v["value"], setMinimum=0,
|
||||
setMaximum=100000, setToolTip=tool_tip)
|
||||
setRange=(1, 9999999), setValue=v["value"],
|
||||
setToolTip=tool_tip)
|
||||
return data
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
HOUDINI_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
MAYA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
132
openpype/hosts/maya/plugins/load/load_abc_to_standin.py
Normal file
132
openpype/hosts/maya/plugins/load/load_abc_to_standin.py
Normal file
|
|
@ -0,0 +1,132 @@
|
|||
import os
|
||||
|
||||
from openpype.pipeline import (
|
||||
legacy_io,
|
||||
load,
|
||||
get_representation_path
|
||||
)
|
||||
from openpype.settings import get_project_settings
|
||||
|
||||
|
||||
class AlembicStandinLoader(load.LoaderPlugin):
|
||||
"""Load Alembic as Arnold Standin"""
|
||||
|
||||
families = ["animation", "model", "pointcache"]
|
||||
representations = ["abc"]
|
||||
|
||||
label = "Import Alembic as Arnold Standin"
|
||||
order = -5
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
||||
import maya.cmds as cmds
|
||||
import mtoa.ui.arnoldmenu
|
||||
from openpype.hosts.maya.api.pipeline import containerise
|
||||
from openpype.hosts.maya.api.lib import unique_namespace
|
||||
|
||||
version = context["version"]
|
||||
version_data = version.get("data", {})
|
||||
family = version["data"]["families"]
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
self.log.info("family: {}\n".format(family))
|
||||
frameStart = version_data.get("frameStart", None)
|
||||
|
||||
asset = context["asset"]["name"]
|
||||
namespace = namespace or unique_namespace(
|
||||
asset + "_",
|
||||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# Root group
|
||||
label = "{}:{}".format(namespace, name)
|
||||
root = cmds.group(name=label, empty=True)
|
||||
|
||||
settings = get_project_settings(os.environ['AVALON_PROJECT'])
|
||||
colors = settings["maya"]["load"]["colors"]
|
||||
fps = legacy_io.Session["AVALON_FPS"]
|
||||
c = colors.get(family[0])
|
||||
if c is not None:
|
||||
r = (float(c[0]) / 255)
|
||||
g = (float(c[1]) / 255)
|
||||
b = (float(c[2]) / 255)
|
||||
cmds.setAttr(root + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(root + ".outlinerColor",
|
||||
r, g, b)
|
||||
|
||||
transform_name = label + "_ABC"
|
||||
|
||||
standinShape = cmds.ls(mtoa.ui.arnoldmenu.createStandIn())[0]
|
||||
standin = cmds.listRelatives(standinShape, parent=True,
|
||||
typ="transform")
|
||||
standin = cmds.rename(standin, transform_name)
|
||||
standinShape = cmds.listRelatives(standin, children=True)[0]
|
||||
|
||||
cmds.parent(standin, root)
|
||||
|
||||
# Set the standin filepath
|
||||
cmds.setAttr(standinShape + ".dso", self.fname, type="string")
|
||||
cmds.setAttr(standinShape + ".abcFPS", float(fps))
|
||||
|
||||
if frameStart is None:
|
||||
cmds.setAttr(standinShape + ".useFrameExtension", 0)
|
||||
|
||||
elif "model" in family:
|
||||
cmds.setAttr(standinShape + ".useFrameExtension", 0)
|
||||
|
||||
else:
|
||||
cmds.setAttr(standinShape + ".useFrameExtension", 1)
|
||||
|
||||
nodes = [root, standin]
|
||||
self[:] = nodes
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
import pymel.core as pm
|
||||
|
||||
path = get_representation_path(representation)
|
||||
fps = legacy_io.Session["AVALON_FPS"]
|
||||
# Update the standin
|
||||
standins = list()
|
||||
members = pm.sets(container['objectName'], query=True)
|
||||
self.log.info("container:{}".format(container))
|
||||
for member in members:
|
||||
shape = member.getShape()
|
||||
if (shape and shape.type() == "aiStandIn"):
|
||||
standins.append(shape)
|
||||
|
||||
for standin in standins:
|
||||
standin.dso.set(path)
|
||||
standin.abcFPS.set(float(fps))
|
||||
if "modelMain" in container['objectName']:
|
||||
standin.useFrameExtension.set(0)
|
||||
else:
|
||||
standin.useFrameExtension.set(1)
|
||||
|
||||
container = pm.PyNode(container["objectName"])
|
||||
container.representation.set(str(representation["_id"]))
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
import maya.cmds as cmds
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
cmds.delete([container['objectName']] + members)
|
||||
|
||||
# Clean up the namespace
|
||||
try:
|
||||
cmds.namespace(removeNamespace=container['namespace'],
|
||||
deleteNamespaceContent=True)
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
|
@ -73,8 +73,8 @@ class YetiCacheLoader(load.LoaderPlugin):
|
|||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(group_name + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(group_name + ".outlinerColor",
|
||||
cmds.setAttr(group_node + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(group_node + ".outlinerColor",
|
||||
(float(c[0])/255),
|
||||
(float(c[1])/255),
|
||||
(float(c[2])/255)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import os
|
||||
import platform
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
NUKE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
import os
|
||||
import nuke
|
||||
import qargparse
|
||||
|
||||
from pprint import pformat
|
||||
from copy import deepcopy
|
||||
from openpype.lib import Logger
|
||||
from openpype.client import (
|
||||
get_version_by_id,
|
||||
get_last_version_by_subset_id,
|
||||
|
|
@ -28,6 +29,7 @@ class LoadClip(plugin.NukeLoader):
|
|||
|
||||
Either it is image sequence or video file.
|
||||
"""
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
families = [
|
||||
"source",
|
||||
|
|
@ -85,24 +87,19 @@ class LoadClip(plugin.NukeLoader):
|
|||
+ plugin.get_review_presets_config()
|
||||
)
|
||||
|
||||
def _fix_path_for_knob(self, filepath, repre_cont):
|
||||
basename = os.path.basename(filepath)
|
||||
dirname = os.path.dirname(filepath)
|
||||
frame = repre_cont.get("frame")
|
||||
assert frame, "Representation is not sequence"
|
||||
|
||||
padding = len(str(frame))
|
||||
basename = basename.replace(frame, "#" * padding)
|
||||
return os.path.join(dirname, basename).replace("\\", "/")
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
repre = context["representation"]
|
||||
representation = context["representation"]
|
||||
# reste container id so it is always unique for each instance
|
||||
self.reset_container_id()
|
||||
|
||||
is_sequence = len(repre["files"]) > 1
|
||||
is_sequence = len(representation["files"]) > 1
|
||||
|
||||
filepath = self.fname.replace("\\", "/")
|
||||
if is_sequence:
|
||||
representation = self._representation_with_hash_in_frame(
|
||||
representation
|
||||
)
|
||||
filepath = get_representation_path(representation).replace("\\", "/")
|
||||
self.log.debug("_ filepath: {}".format(filepath))
|
||||
|
||||
start_at_workfile = options.get(
|
||||
"start_at_workfile", self.options_defaults["start_at_workfile"])
|
||||
|
|
@ -112,11 +109,10 @@ class LoadClip(plugin.NukeLoader):
|
|||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
repre_id = repre["_id"]
|
||||
repre_id = representation["_id"]
|
||||
|
||||
repre_cont = repre["context"]
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
self.log.debug("_ version_data: {}\n".format(
|
||||
pformat(version_data)))
|
||||
self.log.debug(
|
||||
"Representation id `{}` ".format(repre_id))
|
||||
|
||||
|
|
@ -132,8 +128,6 @@ class LoadClip(plugin.NukeLoader):
|
|||
duration = last - first
|
||||
first = 1
|
||||
last = first + duration
|
||||
elif "#" not in filepath:
|
||||
filepath = self._fix_path_for_knob(filepath, repre_cont)
|
||||
|
||||
# Fallback to asset name when namespace is None
|
||||
if namespace is None:
|
||||
|
|
@ -144,7 +138,7 @@ class LoadClip(plugin.NukeLoader):
|
|||
"Representation id `{}` is failing to load".format(repre_id))
|
||||
return
|
||||
|
||||
read_name = self._get_node_name(repre)
|
||||
read_name = self._get_node_name(representation)
|
||||
|
||||
# Create the Loader with the filename path set
|
||||
read_node = nuke.createNode(
|
||||
|
|
@ -157,7 +151,7 @@ class LoadClip(plugin.NukeLoader):
|
|||
read_node["file"].setValue(filepath)
|
||||
|
||||
used_colorspace = self._set_colorspace(
|
||||
read_node, version_data, repre["data"])
|
||||
read_node, version_data, representation["data"])
|
||||
|
||||
self._set_range_to_node(read_node, first, last, start_at_workfile)
|
||||
|
||||
|
|
@ -179,7 +173,7 @@ class LoadClip(plugin.NukeLoader):
|
|||
data_imprint[k] = version
|
||||
|
||||
elif k == 'colorspace':
|
||||
colorspace = repre["data"].get(k)
|
||||
colorspace = representation["data"].get(k)
|
||||
colorspace = colorspace or version_data.get(k)
|
||||
data_imprint["db_colorspace"] = colorspace
|
||||
if used_colorspace:
|
||||
|
|
@ -213,6 +207,20 @@ class LoadClip(plugin.NukeLoader):
|
|||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def _representation_with_hash_in_frame(self, representation):
|
||||
"""Convert frame key value to padded hash
|
||||
|
||||
Args:
|
||||
representation (dict): representation data
|
||||
|
||||
Returns:
|
||||
dict: altered representation data
|
||||
"""
|
||||
representation = deepcopy(representation)
|
||||
frame = representation["context"]["frame"]
|
||||
representation["context"]["frame"] = "#" * len(str(frame))
|
||||
return representation
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update the Loader's path
|
||||
|
||||
|
|
@ -225,7 +233,13 @@ class LoadClip(plugin.NukeLoader):
|
|||
is_sequence = len(representation["files"]) > 1
|
||||
|
||||
read_node = nuke.toNode(container['objectName'])
|
||||
|
||||
if is_sequence:
|
||||
representation = self._representation_with_hash_in_frame(
|
||||
representation
|
||||
)
|
||||
filepath = get_representation_path(representation).replace("\\", "/")
|
||||
self.log.debug("_ filepath: {}".format(filepath))
|
||||
|
||||
start_at_workfile = "start at" in read_node['frame_mode'].value()
|
||||
|
||||
|
|
@ -240,8 +254,6 @@ class LoadClip(plugin.NukeLoader):
|
|||
version_data = version_doc.get("data", {})
|
||||
repre_id = representation["_id"]
|
||||
|
||||
repre_cont = representation["context"]
|
||||
|
||||
# colorspace profile
|
||||
colorspace = representation["data"].get("colorspace")
|
||||
colorspace = colorspace or version_data.get("colorspace")
|
||||
|
|
@ -258,8 +270,6 @@ class LoadClip(plugin.NukeLoader):
|
|||
duration = last - first
|
||||
first = 1
|
||||
last = first + duration
|
||||
elif "#" not in filepath:
|
||||
filepath = self._fix_path_for_knob(filepath, repre_cont)
|
||||
|
||||
if not filepath:
|
||||
self.log.warning(
|
||||
|
|
@ -348,8 +358,10 @@ class LoadClip(plugin.NukeLoader):
|
|||
time_warp_nodes = version_data.get('timewarps', [])
|
||||
last_node = None
|
||||
source_id = self.get_container_id(parent_node)
|
||||
self.log.info("__ source_id: {}".format(source_id))
|
||||
self.log.info("__ members: {}".format(self.get_members(parent_node)))
|
||||
self.log.debug("__ source_id: {}".format(source_id))
|
||||
self.log.debug("__ members: {}".format(
|
||||
self.get_members(parent_node)))
|
||||
|
||||
dependent_nodes = self.clear_members(parent_node)
|
||||
|
||||
with maintained_selection():
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
PHOTOSHOP_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import os
|
||||
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
from .utils import RESOLVE_ROOT_DIR
|
||||
|
||||
|
|
|
|||
|
|
@ -4,8 +4,7 @@ import click
|
|||
|
||||
from openpype.lib import get_openpype_execute_args
|
||||
from openpype.lib.execute import run_detached_process
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import ITrayAction, IHostAddon
|
||||
from openpype.modules import OpenPypeModule, ITrayAction, IHostAddon
|
||||
|
||||
STANDALONEPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -4,8 +4,7 @@ import click
|
|||
|
||||
from openpype.lib import get_openpype_execute_args
|
||||
from openpype.lib.execute import run_detached_process
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import ITrayAction, IHostAddon
|
||||
from openpype.modules import OpenPypeModule, ITrayAction, IHostAddon
|
||||
|
||||
TRAYPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,49 +1,33 @@
|
|||
from openpype.lib.attribute_definitions import FileDef
|
||||
from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS
|
||||
from openpype.pipeline.create import (
|
||||
Creator,
|
||||
HiddenCreator,
|
||||
CreatedInstance
|
||||
CreatedInstance,
|
||||
cache_and_get_instances,
|
||||
PRE_CREATE_THUMBNAIL_KEY,
|
||||
)
|
||||
|
||||
from .pipeline import (
|
||||
list_instances,
|
||||
update_instances,
|
||||
remove_instances,
|
||||
HostContext,
|
||||
)
|
||||
from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS
|
||||
|
||||
|
||||
REVIEW_EXTENSIONS = IMAGE_EXTENSIONS + VIDEO_EXTENSIONS
|
||||
|
||||
|
||||
def _cache_and_get_instances(creator):
|
||||
"""Cache instances in shared data.
|
||||
|
||||
Args:
|
||||
creator (Creator): Plugin which would like to get instances from host.
|
||||
|
||||
Returns:
|
||||
List[Dict[str, Any]]: Cached instances list from host implementation.
|
||||
"""
|
||||
|
||||
shared_key = "openpype.traypublisher.instances"
|
||||
if shared_key not in creator.collection_shared_data:
|
||||
creator.collection_shared_data[shared_key] = list_instances()
|
||||
return creator.collection_shared_data[shared_key]
|
||||
REVIEW_EXTENSIONS = set(IMAGE_EXTENSIONS) | set(VIDEO_EXTENSIONS)
|
||||
SHARED_DATA_KEY = "openpype.traypublisher.instances"
|
||||
|
||||
|
||||
class HiddenTrayPublishCreator(HiddenCreator):
|
||||
host_name = "traypublisher"
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in _cache_and_get_instances(self):
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
instances_by_identifier = cache_and_get_instances(
|
||||
self, SHARED_DATA_KEY, list_instances
|
||||
)
|
||||
for instance_data in instances_by_identifier[self.identifier]:
|
||||
instance = CreatedInstance.from_existing(instance_data, self)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
update_instances(update_list)
|
||||
|
|
@ -74,13 +58,12 @@ class TrayPublishCreator(Creator):
|
|||
host_name = "traypublisher"
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in _cache_and_get_instances(self):
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
instances_by_identifier = cache_and_get_instances(
|
||||
self, SHARED_DATA_KEY, list_instances
|
||||
)
|
||||
for instance_data in instances_by_identifier[self.identifier]:
|
||||
instance = CreatedInstance.from_existing(instance_data, self)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
update_instances(update_list)
|
||||
|
|
@ -110,11 +93,14 @@ class TrayPublishCreator(Creator):
|
|||
|
||||
class SettingsCreator(TrayPublishCreator):
|
||||
create_allow_context_change = True
|
||||
create_allow_thumbnail = True
|
||||
|
||||
extensions = []
|
||||
|
||||
def create(self, subset_name, data, pre_create_data):
|
||||
# Pass precreate data to creator attributes
|
||||
thumbnail_path = pre_create_data.pop(PRE_CREATE_THUMBNAIL_KEY, None)
|
||||
|
||||
data["creator_attributes"] = pre_create_data
|
||||
data["settings_creator"] = True
|
||||
# Create new instance
|
||||
|
|
@ -122,6 +108,9 @@ class SettingsCreator(TrayPublishCreator):
|
|||
|
||||
self._store_new_instance(new_instance)
|
||||
|
||||
if thumbnail_path:
|
||||
self.set_instance_thumbnail_path(new_instance.id, thumbnail_path)
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
FileDef(
|
||||
|
|
|
|||
|
|
@ -40,7 +40,8 @@ class CollectMovieBatch(
|
|||
if creator_attributes["add_review_family"]:
|
||||
repre["tags"].append("review")
|
||||
instance.data["families"].append("review")
|
||||
instance.data["thumbnailSource"] = file_url
|
||||
if not instance.data.get("thumbnailSource"):
|
||||
instance.data["thumbnailSource"] = file_url
|
||||
|
||||
instance.data["source"] = file_url
|
||||
|
||||
|
|
|
|||
|
|
@ -188,7 +188,8 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin):
|
|||
if "review" not in instance.data["families"]:
|
||||
instance.data["families"].append("review")
|
||||
|
||||
instance.data["thumbnailSource"] = first_filepath
|
||||
if not instance.data.get("thumbnailSource"):
|
||||
instance.data["thumbnailSource"] = first_filepath
|
||||
|
||||
review_representation["tags"].append("review")
|
||||
self.log.debug("Representation {} was marked for review. {}".format(
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
TVPAINT_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import qargparse
|
||||
from openpype.lib.attribute_definitions import BoolDef
|
||||
from openpype.hosts.tvpaint.api import plugin
|
||||
from openpype.hosts.tvpaint.api.lib import execute_george_through_file
|
||||
|
||||
|
|
@ -27,26 +27,28 @@ class ImportImage(plugin.Loader):
|
|||
"preload": True
|
||||
}
|
||||
|
||||
options = [
|
||||
qargparse.Boolean(
|
||||
"stretch",
|
||||
label="Stretch to project size",
|
||||
default=True,
|
||||
help="Stretch loaded image/s to project resolution?"
|
||||
),
|
||||
qargparse.Boolean(
|
||||
"timestretch",
|
||||
label="Stretch to timeline length",
|
||||
default=True,
|
||||
help="Clip loaded image/s to timeline length?"
|
||||
),
|
||||
qargparse.Boolean(
|
||||
"preload",
|
||||
label="Preload loaded image/s",
|
||||
default=True,
|
||||
help="Preload image/s?"
|
||||
)
|
||||
]
|
||||
@classmethod
|
||||
def get_options(cls, contexts):
|
||||
return [
|
||||
BoolDef(
|
||||
"stretch",
|
||||
label="Stretch to project size",
|
||||
default=cls.defaults["stretch"],
|
||||
tooltip="Stretch loaded image/s to project resolution?"
|
||||
),
|
||||
BoolDef(
|
||||
"timestretch",
|
||||
label="Stretch to timeline length",
|
||||
default=cls.defaults["timestretch"],
|
||||
tooltip="Clip loaded image/s to timeline length?"
|
||||
),
|
||||
BoolDef(
|
||||
"preload",
|
||||
label="Preload loaded image/s",
|
||||
default=cls.defaults["preload"],
|
||||
tooltip="Preload image/s?"
|
||||
)
|
||||
]
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
stretch = options.get("stretch", self.defaults["stretch"])
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import collections
|
||||
|
||||
import qargparse
|
||||
|
||||
from openpype.lib.attribute_definitions import BoolDef
|
||||
from openpype.pipeline import (
|
||||
get_representation_context,
|
||||
register_host,
|
||||
|
|
@ -42,26 +41,28 @@ class LoadImage(plugin.Loader):
|
|||
"preload": True
|
||||
}
|
||||
|
||||
options = [
|
||||
qargparse.Boolean(
|
||||
"stretch",
|
||||
label="Stretch to project size",
|
||||
default=True,
|
||||
help="Stretch loaded image/s to project resolution?"
|
||||
),
|
||||
qargparse.Boolean(
|
||||
"timestretch",
|
||||
label="Stretch to timeline length",
|
||||
default=True,
|
||||
help="Clip loaded image/s to timeline length?"
|
||||
),
|
||||
qargparse.Boolean(
|
||||
"preload",
|
||||
label="Preload loaded image/s",
|
||||
default=True,
|
||||
help="Preload image/s?"
|
||||
)
|
||||
]
|
||||
@classmethod
|
||||
def get_options(cls, contexts):
|
||||
return [
|
||||
BoolDef(
|
||||
"stretch",
|
||||
label="Stretch to project size",
|
||||
default=cls.defaults["stretch"],
|
||||
tooltip="Stretch loaded image/s to project resolution?"
|
||||
),
|
||||
BoolDef(
|
||||
"timestretch",
|
||||
label="Stretch to timeline length",
|
||||
default=cls.defaults["timestretch"],
|
||||
tooltip="Clip loaded image/s to timeline length?"
|
||||
),
|
||||
BoolDef(
|
||||
"preload",
|
||||
label="Preload loaded image/s",
|
||||
default=cls.defaults["preload"],
|
||||
tooltip="Preload image/s?"
|
||||
)
|
||||
]
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
stretch = options.get("stretch", self.defaults["stretch"])
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ class CollectOutputFrameRange(pyblish.api.ContextPlugin):
|
|||
|
||||
When instances are collected context does not contain `frameStart` and
|
||||
`frameEnd` keys yet. They are collected in global plugin
|
||||
`CollectAvalonEntities`.
|
||||
`CollectContextEntities`.
|
||||
"""
|
||||
label = "Collect output frame range"
|
||||
order = pyblish.api.CollectorOrder
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ class ValidateMarks(pyblish.api.ContextPlugin):
|
|||
def get_expected_data(context):
|
||||
scene_mark_in = context.data["sceneMarkIn"]
|
||||
|
||||
# Data collected in `CollectAvalonEntities`
|
||||
# Data collected in `CollectContextEntities`
|
||||
frame_end = context.data["frameEnd"]
|
||||
frame_start = context.data["frameStart"]
|
||||
handle_start = context.data["handleStart"]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
UNREAL_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@ import os
|
|||
|
||||
import click
|
||||
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostAddon
|
||||
from openpype.modules import OpenPypeModule, IHostAddon
|
||||
|
||||
WEBPUBLISHER_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
|
|
@ -83,8 +83,9 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
|
|||
self.log.info("task_data:: {}".format(task_data))
|
||||
|
||||
is_sequence = len(task_data["files"]) > 1
|
||||
first_file = task_data["files"][0]
|
||||
|
||||
_, extension = os.path.splitext(task_data["files"][0])
|
||||
_, extension = os.path.splitext(first_file)
|
||||
family, families, tags = self._get_family(
|
||||
self.task_type_to_family,
|
||||
task_type,
|
||||
|
|
@ -149,10 +150,13 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
|
|||
self.log.warning("Unable to count frames "
|
||||
"duration {}".format(no_of_frames))
|
||||
|
||||
# raise ValueError("STOP")
|
||||
instance.data["handleStart"] = asset_doc["data"]["handleStart"]
|
||||
instance.data["handleEnd"] = asset_doc["data"]["handleEnd"]
|
||||
|
||||
if "review" in tags:
|
||||
first_file_path = os.path.join(task_dir, first_file)
|
||||
instance.data["thumbnailSource"] = first_file_path
|
||||
|
||||
instances.append(instance)
|
||||
self.log.info("instance.data:: {}".format(instance.data))
|
||||
|
||||
|
|
|
|||
|
|
@ -1,137 +0,0 @@
|
|||
import os
|
||||
import shutil
|
||||
|
||||
import pyblish.api
|
||||
from openpype.lib import (
|
||||
get_ffmpeg_tool_path,
|
||||
|
||||
run_subprocess,
|
||||
|
||||
get_transcode_temp_directory,
|
||||
convert_input_paths_for_ffmpeg,
|
||||
should_convert_for_ffmpeg
|
||||
)
|
||||
|
||||
|
||||
class ExtractThumbnail(pyblish.api.InstancePlugin):
|
||||
"""Create jpg thumbnail from input using ffmpeg."""
|
||||
|
||||
label = "Extract Thumbnail"
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = [
|
||||
"render",
|
||||
"image"
|
||||
]
|
||||
hosts = ["webpublisher"]
|
||||
targets = ["filespublish"]
|
||||
|
||||
def process(self, instance):
|
||||
self.log.info("subset {}".format(instance.data['subset']))
|
||||
|
||||
filtered_repres = self._get_filtered_repres(instance)
|
||||
for repre in filtered_repres:
|
||||
repre_files = repre["files"]
|
||||
if not isinstance(repre_files, (list, tuple)):
|
||||
input_file = repre_files
|
||||
else:
|
||||
file_index = int(float(len(repre_files)) * 0.5)
|
||||
input_file = repre_files[file_index]
|
||||
|
||||
stagingdir = os.path.normpath(repre["stagingDir"])
|
||||
|
||||
full_input_path = os.path.join(stagingdir, input_file)
|
||||
self.log.info("Input filepath: {}".format(full_input_path))
|
||||
|
||||
do_convert = should_convert_for_ffmpeg(full_input_path)
|
||||
# If result is None the requirement of conversion can't be
|
||||
# determined
|
||||
if do_convert is None:
|
||||
self.log.info((
|
||||
"Can't determine if representation requires conversion."
|
||||
" Skipped."
|
||||
))
|
||||
continue
|
||||
|
||||
# Do conversion if needed
|
||||
# - change staging dir of source representation
|
||||
# - must be set back after output definitions processing
|
||||
convert_dir = None
|
||||
if do_convert:
|
||||
convert_dir = get_transcode_temp_directory()
|
||||
filename = os.path.basename(full_input_path)
|
||||
convert_input_paths_for_ffmpeg(
|
||||
[full_input_path],
|
||||
convert_dir,
|
||||
self.log
|
||||
)
|
||||
full_input_path = os.path.join(convert_dir, filename)
|
||||
|
||||
filename = os.path.splitext(input_file)[0]
|
||||
while filename.endswith("."):
|
||||
filename = filename[:-1]
|
||||
thumbnail_filename = filename + "_thumbnail.jpg"
|
||||
full_output_path = os.path.join(stagingdir, thumbnail_filename)
|
||||
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
|
||||
ffmpeg_args = [
|
||||
get_ffmpeg_tool_path("ffmpeg"),
|
||||
"-y",
|
||||
"-i", full_input_path,
|
||||
"-vframes", "1",
|
||||
full_output_path
|
||||
]
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("{}".format(" ".join(ffmpeg_args)))
|
||||
try: # temporary until oiiotool is supported cross platform
|
||||
run_subprocess(
|
||||
ffmpeg_args, logger=self.log
|
||||
)
|
||||
except RuntimeError as exp:
|
||||
if "Compression" in str(exp):
|
||||
self.log.debug(
|
||||
"Unsupported compression on input files. Skipping!!!"
|
||||
)
|
||||
return
|
||||
self.log.warning("Conversion crashed", exc_info=True)
|
||||
raise
|
||||
|
||||
new_repre = {
|
||||
"name": "thumbnail",
|
||||
"ext": "jpg",
|
||||
"files": thumbnail_filename,
|
||||
"stagingDir": stagingdir,
|
||||
"thumbnail": True,
|
||||
"tags": ["thumbnail"]
|
||||
}
|
||||
|
||||
# adding representation
|
||||
self.log.debug("Adding: {}".format(new_repre))
|
||||
instance.data["representations"].append(new_repre)
|
||||
|
||||
# Cleanup temp folder
|
||||
if convert_dir is not None and os.path.exists(convert_dir):
|
||||
shutil.rmtree(convert_dir)
|
||||
|
||||
def _get_filtered_repres(self, instance):
|
||||
filtered_repres = []
|
||||
repres = instance.data.get("representations") or []
|
||||
for repre in repres:
|
||||
self.log.debug(repre)
|
||||
tags = repre.get("tags") or []
|
||||
# Skip instance if already has thumbnail representation
|
||||
if "thumbnail" in tags:
|
||||
return []
|
||||
|
||||
if "review" not in tags:
|
||||
continue
|
||||
|
||||
if not repre.get("files"):
|
||||
self.log.info((
|
||||
"Representation \"{}\" don't have files. Skipping"
|
||||
).format(repre["name"]))
|
||||
continue
|
||||
|
||||
filtered_repres.append(repre)
|
||||
return filtered_repres
|
||||
|
|
@ -13,7 +13,7 @@ class ValidateWorkfileData(pyblish.api.ContextPlugin):
|
|||
targets = ["tvpaint_worker"]
|
||||
|
||||
def process(self, context):
|
||||
# Data collected in `CollectAvalonEntities`
|
||||
# Data collected in `CollectContextEntities`
|
||||
frame_start = context.data["frameStart"]
|
||||
frame_end = context.data["frameEnd"]
|
||||
handle_start = context.data["handleStart"]
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ class AbstractAttrDefMeta(ABCMeta):
|
|||
|
||||
|
||||
@six.add_metaclass(AbstractAttrDefMeta)
|
||||
class AbtractAttrDef:
|
||||
class AbtractAttrDef(object):
|
||||
"""Abstraction of attribute definiton.
|
||||
|
||||
Each attribute definition must have implemented validation and
|
||||
|
|
@ -541,6 +541,13 @@ class FileDefItem(object):
|
|||
return ext
|
||||
return None
|
||||
|
||||
@property
|
||||
def lower_ext(self):
|
||||
ext = self.ext
|
||||
if ext is not None:
|
||||
return ext.lower()
|
||||
return ext
|
||||
|
||||
@property
|
||||
def is_dir(self):
|
||||
if self.is_empty:
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ XML_CHAR_REF_REGEX_HEX = re.compile(r"&#x?[0-9a-fA-F]+;")
|
|||
# Regex to parse array attributes
|
||||
ARRAY_TYPE_REGEX = re.compile(r"^(int|float|string)\[\d+\]$")
|
||||
|
||||
IMAGE_EXTENSIONS = [
|
||||
IMAGE_EXTENSIONS = {
|
||||
".ani", ".anim", ".apng", ".art", ".bmp", ".bpg", ".bsave", ".cal",
|
||||
".cin", ".cpc", ".cpt", ".dds", ".dpx", ".ecw", ".exr", ".fits",
|
||||
".flic", ".flif", ".fpx", ".gif", ".hdri", ".hevc", ".icer",
|
||||
|
|
@ -54,15 +54,15 @@ IMAGE_EXTENSIONS = [
|
|||
".rgbe", ".logluv", ".tiff", ".sgi", ".tga", ".tiff", ".tiff/ep",
|
||||
".tiff/it", ".ufo", ".ufp", ".wbmp", ".webp", ".xbm", ".xcf",
|
||||
".xpm", ".xwd"
|
||||
]
|
||||
}
|
||||
|
||||
VIDEO_EXTENSIONS = [
|
||||
VIDEO_EXTENSIONS = {
|
||||
".3g2", ".3gp", ".amv", ".asf", ".avi", ".drc", ".f4a", ".f4b",
|
||||
".f4p", ".f4v", ".flv", ".gif", ".gifv", ".m2v", ".m4p", ".m4v",
|
||||
".mkv", ".mng", ".mov", ".mp2", ".mp4", ".mpe", ".mpeg", ".mpg",
|
||||
".mpv", ".mxf", ".nsv", ".ogg", ".ogv", ".qt", ".rm", ".rmvb",
|
||||
".roq", ".svi", ".vob", ".webm", ".wmv", ".yuv"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
def get_transcode_temp_directory():
|
||||
|
|
|
|||
|
|
@ -1,4 +1,14 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from .interfaces import (
|
||||
ILaunchHookPaths,
|
||||
IPluginPaths,
|
||||
ITrayModule,
|
||||
ITrayAction,
|
||||
ITrayService,
|
||||
ISettingsChangeListener,
|
||||
IHostAddon,
|
||||
)
|
||||
|
||||
from .base import (
|
||||
OpenPypeModule,
|
||||
OpenPypeAddOn,
|
||||
|
|
@ -17,6 +27,14 @@ from .base import (
|
|||
|
||||
|
||||
__all__ = (
|
||||
"ILaunchHookPaths",
|
||||
"IPluginPaths",
|
||||
"ITrayModule",
|
||||
"ITrayAction",
|
||||
"ITrayService",
|
||||
"ISettingsChangeListener",
|
||||
"IHostAddon",
|
||||
|
||||
"OpenPypeModule",
|
||||
"OpenPypeAddOn",
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import os
|
||||
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import ITrayModule
|
||||
from openpype.modules import OpenPypeModule, ITrayModule
|
||||
|
||||
|
||||
class AvalonModule(OpenPypeModule, ITrayModule):
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import logging
|
|||
import platform
|
||||
import threading
|
||||
import collections
|
||||
import traceback
|
||||
from uuid import uuid4
|
||||
from abc import ABCMeta, abstractmethod
|
||||
import six
|
||||
|
|
@ -139,6 +140,15 @@ class _InterfacesClass(_ModuleClass):
|
|||
"cannot import name '{}' from 'openpype_interfaces'"
|
||||
).format(attr_name))
|
||||
|
||||
if _LoadCache.interfaces_loaded and attr_name != "log":
|
||||
stack = list(traceback.extract_stack())
|
||||
stack.pop(-1)
|
||||
self.log.warning((
|
||||
"Using deprecated import of \"{}\" from 'openpype_interfaces'."
|
||||
" Please switch to use import"
|
||||
" from 'openpype.modules.interfaces'"
|
||||
" (will be removed after 3.16.x).{}"
|
||||
).format(attr_name, "".join(traceback.format_list(stack))))
|
||||
return self.__attributes__[attr_name]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,16 +2,17 @@ import os
|
|||
import threading
|
||||
import time
|
||||
|
||||
from openpype.modules import (
|
||||
OpenPypeModule,
|
||||
ITrayModule,
|
||||
IPluginPaths
|
||||
)
|
||||
|
||||
from .clockify_api import ClockifyAPI
|
||||
from .constants import (
|
||||
CLOCKIFY_FTRACK_USER_PATH,
|
||||
CLOCKIFY_FTRACK_SERVER_PATH
|
||||
)
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import (
|
||||
ITrayModule,
|
||||
IPluginPaths
|
||||
)
|
||||
|
||||
|
||||
class ClockifyModule(
|
||||
|
|
|
|||
|
|
@ -4,8 +4,7 @@ import six
|
|||
import sys
|
||||
|
||||
from openpype.lib import requests_get, Logger
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import IPluginPaths
|
||||
from openpype.modules import OpenPypeModule, IPluginPaths
|
||||
|
||||
|
||||
class DeadlineWebserviceError(Exception):
|
||||
|
|
|
|||
|
|
@ -457,9 +457,15 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
cam = [c for c in cameras if c in col.head]
|
||||
if cam:
|
||||
subset_name = '{}_{}_{}'.format(group_name, cam, aov)
|
||||
if aov:
|
||||
subset_name = '{}_{}_{}'.format(group_name, cam, aov)
|
||||
else:
|
||||
subset_name = '{}_{}'.format(group_name, cam)
|
||||
else:
|
||||
subset_name = '{}_{}'.format(group_name, aov)
|
||||
if aov:
|
||||
subset_name = '{}_{}'.format(group_name, aov)
|
||||
else:
|
||||
subset_name = '{}'.format(group_name)
|
||||
|
||||
if isinstance(col, (list, tuple)):
|
||||
staging = os.path.dirname(col[0])
|
||||
|
|
|
|||
|
|
@ -7,7 +7,12 @@ import json
|
|||
import platform
|
||||
import uuid
|
||||
import re
|
||||
from Deadline.Scripting import RepositoryUtils, FileUtils, DirectoryUtils
|
||||
from Deadline.Scripting import (
|
||||
RepositoryUtils,
|
||||
FileUtils,
|
||||
DirectoryUtils,
|
||||
ProcessUtils,
|
||||
)
|
||||
|
||||
|
||||
def get_openpype_version_from_path(path, build=True):
|
||||
|
|
@ -162,9 +167,8 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
print(">>> Temporary path: {}".format(export_url))
|
||||
|
||||
args = [
|
||||
exe,
|
||||
"--headless",
|
||||
'extractenvironments',
|
||||
"extractenvironments",
|
||||
export_url
|
||||
]
|
||||
|
||||
|
|
@ -188,15 +192,18 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
if not os.environ.get("OPENPYPE_MONGO"):
|
||||
print(">>> Missing OPENPYPE_MONGO env var, process won't work")
|
||||
|
||||
env = os.environ
|
||||
env["OPENPYPE_HEADLESS_MODE"] = "1"
|
||||
env["AVALON_TIMEOUT"] = "5000"
|
||||
os.environ["AVALON_TIMEOUT"] = "5000"
|
||||
|
||||
print(">>> Executing: {}".format(" ".join(args)))
|
||||
std_output = subprocess.check_output(args,
|
||||
cwd=os.path.dirname(exe),
|
||||
env=env)
|
||||
print(">>> Process result {}".format(std_output))
|
||||
args_str = subprocess.list2cmdline(args)
|
||||
print(">>> Executing: {} {}".format(exe, args_str))
|
||||
process = ProcessUtils.SpawnProcess(
|
||||
exe, args_str, os.path.dirname(exe)
|
||||
)
|
||||
ProcessUtils.WaitForExit(process, -1)
|
||||
if process.ExitCode != 0:
|
||||
raise RuntimeError(
|
||||
"Failed to run OpenPype process to extract environments."
|
||||
)
|
||||
|
||||
print(">>> Loading file ...")
|
||||
with open(export_url) as fp:
|
||||
|
|
|
|||
|
|
@ -13,10 +13,7 @@ import click
|
|||
from openpype.modules import (
|
||||
JsonFilesSettingsDef,
|
||||
OpenPypeAddOn,
|
||||
ModulesManager
|
||||
)
|
||||
# Import interface defined by this addon to be able find other addons using it
|
||||
from openpype_interfaces import (
|
||||
ModulesManager,
|
||||
IPluginPaths,
|
||||
ITrayAction
|
||||
)
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ import platform
|
|||
|
||||
import click
|
||||
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import (
|
||||
from openpype.modules import (
|
||||
OpenPypeModule,
|
||||
ITrayModule,
|
||||
IPluginPaths,
|
||||
ISettingsChangeListener
|
||||
|
|
|
|||
|
|
@ -3,8 +3,11 @@
|
|||
import click
|
||||
import os
|
||||
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import IPluginPaths, ITrayAction
|
||||
from openpype.modules import (
|
||||
OpenPypeModule,
|
||||
IPluginPaths,
|
||||
ITrayAction,
|
||||
)
|
||||
|
||||
|
||||
class KitsuModule(OpenPypeModule, IPluginPaths, ITrayAction):
|
||||
|
|
|
|||
|
|
@ -0,0 +1,28 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectKitsuUsername(pyblish.api.ContextPlugin):
|
||||
"""Collect Kitsu username from the kitsu login"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = "Kitsu username"
|
||||
|
||||
def process(self, context):
|
||||
kitsu_login = os.environ.get('KITSU_LOGIN')
|
||||
|
||||
if not kitsu_login:
|
||||
return
|
||||
|
||||
kitsu_username = kitsu_login.split("@")[0].replace('.', ' ')
|
||||
new_username = re.sub('[^a-zA-Z]', ' ', kitsu_username).title()
|
||||
|
||||
for instance in context:
|
||||
# Don't override customData if it already exists
|
||||
if 'customData' not in instance.data:
|
||||
instance.data['customData'] = {}
|
||||
|
||||
instance.data['customData']["kitsuUsername"] = new_username
|
||||
|
|
@ -1,5 +1,7 @@
|
|||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import ITrayAction
|
||||
from openpype.modules import (
|
||||
OpenPypeModule,
|
||||
ITrayAction,
|
||||
)
|
||||
|
||||
|
||||
class LauncherAction(OpenPypeModule, ITrayAction):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import ITrayModule
|
||||
from openpype.modules import OpenPypeModule, ITrayModule
|
||||
|
||||
|
||||
class LogViewModule(OpenPypeModule, ITrayModule):
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@ import os
|
|||
import json
|
||||
import appdirs
|
||||
import requests
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import ITrayModule
|
||||
from openpype.modules import OpenPypeModule, ITrayModule
|
||||
|
||||
|
||||
class MusterModule(OpenPypeModule, ITrayModule):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import ITrayAction
|
||||
from openpype.modules import OpenPypeModule, ITrayAction
|
||||
|
||||
|
||||
class ProjectManagerAction(OpenPypeModule, ITrayAction):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import ITrayAction
|
||||
from openpype.modules import OpenPypeModule, ITrayAction
|
||||
|
||||
|
||||
class PythonInterpreterAction(OpenPypeModule, ITrayAction):
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@
|
|||
"""Module providing support for Royal Render."""
|
||||
import os
|
||||
import openpype.modules
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import IPluginPaths
|
||||
from openpype.modules import OpenPypeModule, IPluginPaths
|
||||
|
||||
|
||||
class RoyalRenderModule(OpenPypeModule, IPluginPaths):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import ITrayAction
|
||||
from openpype.modules import OpenPypeModule, ITrayAction
|
||||
|
||||
|
||||
class SettingsAction(OpenPypeModule, ITrayAction):
|
||||
|
|
|
|||
|
|
@ -1,12 +1,11 @@
|
|||
import os
|
||||
|
||||
from openpype_interfaces import (
|
||||
from openpype.modules import (
|
||||
OpenPypeModule,
|
||||
ITrayModule,
|
||||
IPluginPaths,
|
||||
)
|
||||
|
||||
from openpype.modules import OpenPypeModule
|
||||
|
||||
SHOTGRID_MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IPluginPaths
|
||||
from openpype.modules import OpenPypeModule, IPluginPaths
|
||||
|
||||
SLACK_MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
|
|
|||
37
openpype/modules/sync_server/rest_api.py
Normal file
37
openpype/modules/sync_server/rest_api.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
from aiohttp.web_response import Response
|
||||
from openpype.lib import Logger
|
||||
|
||||
|
||||
class SyncServerModuleRestApi:
|
||||
"""
|
||||
REST API endpoint used for calling from hosts when context change
|
||||
happens in Workfile app.
|
||||
"""
|
||||
|
||||
def __init__(self, user_module, server_manager):
|
||||
self._log = None
|
||||
self.module = user_module
|
||||
self.server_manager = server_manager
|
||||
|
||||
self.prefix = "/sync_server"
|
||||
|
||||
self.register()
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
def register(self):
|
||||
self.server_manager.add_route(
|
||||
"POST",
|
||||
self.prefix + "/reset_timer",
|
||||
self.reset_timer,
|
||||
)
|
||||
|
||||
async def reset_timer(self, _request):
|
||||
"""Force timer to run immediately."""
|
||||
self.module.reset_timer()
|
||||
|
||||
return Response(status=200)
|
||||
|
|
@ -236,6 +236,7 @@ class SyncServerThread(threading.Thread):
|
|||
"""
|
||||
def __init__(self, module):
|
||||
self.log = Logger.get_logger(self.__class__.__name__)
|
||||
|
||||
super(SyncServerThread, self).__init__()
|
||||
self.module = module
|
||||
self.loop = None
|
||||
|
|
|
|||
|
|
@ -11,9 +11,12 @@ from collections import deque, defaultdict
|
|||
import click
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from openpype.client import get_projects
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import ITrayModule
|
||||
from openpype.client import (
|
||||
get_projects,
|
||||
get_representations,
|
||||
get_representation_by_id,
|
||||
)
|
||||
from openpype.modules import OpenPypeModule, ITrayModule
|
||||
from openpype.settings import (
|
||||
get_project_settings,
|
||||
get_system_settings,
|
||||
|
|
@ -30,9 +33,6 @@ from .providers import lib
|
|||
|
||||
from .utils import time_function, SyncStatus, SiteAlreadyPresentError
|
||||
|
||||
from openpype.client import get_representations, get_representation_by_id
|
||||
|
||||
|
||||
log = Logger.get_logger("SyncServer")
|
||||
|
||||
|
||||
|
|
@ -136,14 +136,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
""" Start of Public API """
|
||||
def add_site(self, project_name, representation_id, site_name=None,
|
||||
force=False):
|
||||
force=False, priority=None, reset_timer=False):
|
||||
"""
|
||||
Adds new site to representation to be synced.
|
||||
|
||||
'project_name' must have synchronization enabled (globally or
|
||||
project only)
|
||||
|
||||
Used as a API endpoint from outside applications (Loader etc).
|
||||
Used as an API endpoint from outside applications (Loader etc).
|
||||
|
||||
Use 'force' to reset existing site.
|
||||
|
||||
|
|
@ -152,6 +152,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
representation_id (string): MongoDB _id value
|
||||
site_name (string): name of configured and active site
|
||||
force (bool): reset site if exists
|
||||
priority (int): set priority
|
||||
reset_timer (bool): if delay timer should be reset, eg. user mark
|
||||
some representation to be synced manually
|
||||
|
||||
Throws:
|
||||
SiteAlreadyPresentError - if adding already existing site and
|
||||
|
|
@ -167,7 +170,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
self.reset_site_on_representation(project_name,
|
||||
representation_id,
|
||||
site_name=site_name,
|
||||
force=force)
|
||||
force=force,
|
||||
priority=priority)
|
||||
|
||||
if reset_timer:
|
||||
self.reset_timer()
|
||||
|
||||
def remove_site(self, project_name, representation_id, site_name,
|
||||
remove_local_files=False):
|
||||
|
|
@ -911,7 +918,59 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
In case of user's involvement (reset site), start that right away.
|
||||
"""
|
||||
self.sync_server_thread.reset_timer()
|
||||
|
||||
if not self.enabled:
|
||||
return
|
||||
|
||||
if self.sync_server_thread is None:
|
||||
self._reset_timer_with_rest_api()
|
||||
else:
|
||||
self.sync_server_thread.reset_timer()
|
||||
|
||||
def is_representation_on_site(
|
||||
self, project_name, representation_id, site_name
|
||||
):
|
||||
"""Checks if 'representation_id' has all files avail. on 'site_name'"""
|
||||
representation = get_representation_by_id(project_name,
|
||||
representation_id,
|
||||
fields=["_id", "files"])
|
||||
if not representation:
|
||||
return False
|
||||
|
||||
on_site = False
|
||||
for file_info in representation.get("files", []):
|
||||
for site in file_info.get("sites", []):
|
||||
if site["name"] != site_name:
|
||||
continue
|
||||
|
||||
if (site.get("progress") or site.get("error") or
|
||||
not site.get("created_dt")):
|
||||
return False
|
||||
on_site = True
|
||||
|
||||
return on_site
|
||||
|
||||
def _reset_timer_with_rest_api(self):
|
||||
# POST to webserver sites to add to representations
|
||||
webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL")
|
||||
if not webserver_url:
|
||||
self.log.warning("Couldn't find webserver url")
|
||||
return
|
||||
|
||||
rest_api_url = "{}/sync_server/reset_timer".format(
|
||||
webserver_url
|
||||
)
|
||||
|
||||
try:
|
||||
import requests
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Couldn't add sites to representations "
|
||||
"('requests' is not available)"
|
||||
)
|
||||
return
|
||||
|
||||
requests.post(rest_api_url)
|
||||
|
||||
def get_enabled_projects(self):
|
||||
"""Returns list of projects which have SyncServer enabled."""
|
||||
|
|
@ -1544,12 +1603,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
Args:
|
||||
project_name (string): name of project - force to db connection as
|
||||
each file might come from different collection
|
||||
new_file_id (string):
|
||||
new_file_id (string): only present if file synced successfully
|
||||
file (dictionary): info about processed file (pulled from DB)
|
||||
representation (dictionary): parent repr of file (from DB)
|
||||
site (string): label ('gdrive', 'S3')
|
||||
error (string): exception message
|
||||
progress (float): 0-1 of progress of upload/download
|
||||
progress (float): 0-0.99 of progress of upload/download
|
||||
priority (int): 0-100 set priority
|
||||
|
||||
Returns:
|
||||
|
|
@ -1655,7 +1714,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
def reset_site_on_representation(self, project_name, representation_id,
|
||||
side=None, file_id=None, site_name=None,
|
||||
remove=False, pause=None, force=False):
|
||||
remove=False, pause=None, force=False,
|
||||
priority=None):
|
||||
"""
|
||||
Reset information about synchronization for particular 'file_id'
|
||||
and provider.
|
||||
|
|
@ -1678,6 +1738,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
remove (bool): if True remove site altogether
|
||||
pause (bool or None): if True - pause, False - unpause
|
||||
force (bool): hard reset - currently only for add_site
|
||||
priority (int): set priority
|
||||
|
||||
Raises:
|
||||
SiteAlreadyPresentError - if adding already existing site and
|
||||
|
|
@ -1705,6 +1766,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
elem = {"name": site_name}
|
||||
|
||||
# Add priority
|
||||
if priority:
|
||||
elem["priority"] = priority
|
||||
|
||||
if file_id: # reset site for particular file
|
||||
self._reset_site_for_file(project_name, representation_id,
|
||||
elem, file_id, site_name)
|
||||
|
|
@ -2089,6 +2154,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
def cli(self, click_group):
|
||||
click_group.add_command(cli_main)
|
||||
|
||||
# Webserver module implementation
|
||||
def webserver_initialization(self, server_manager):
|
||||
"""Add routes for syncs."""
|
||||
if self.tray_initialized:
|
||||
from .rest_api import SyncServerModuleRestApi
|
||||
self.rest_api_obj = SyncServerModuleRestApi(
|
||||
self, server_manager
|
||||
)
|
||||
|
||||
|
||||
@click.group(SyncServerModule.name, help="SyncServer module related commands.")
|
||||
def cli_main():
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ class TimersManagerModuleRestApi:
|
|||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger(self.__ckass__.__name__)
|
||||
self._log = Logger.get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
def register(self):
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ import platform
|
|||
|
||||
|
||||
from openpype.client import get_asset_by_name
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import (
|
||||
from openpype.modules import (
|
||||
OpenPypeModule,
|
||||
ITrayService,
|
||||
IPluginPaths
|
||||
)
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import logging
|
|||
from concurrent.futures import CancelledError
|
||||
from Qt import QtWidgets
|
||||
|
||||
from openpype_interfaces import ITrayService
|
||||
from openpype.modules import ITrayService
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
|
|
@ -24,8 +24,7 @@ import os
|
|||
import socket
|
||||
|
||||
from openpype import resources
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import ITrayService
|
||||
from openpype.modules import OpenPypeModule, ITrayService
|
||||
|
||||
|
||||
class WebServerModule(OpenPypeModule, ITrayService):
|
||||
|
|
|
|||
|
|
@ -85,6 +85,7 @@ from .context_tools import (
|
|||
register_host,
|
||||
registered_host,
|
||||
deregister_host,
|
||||
get_process_id,
|
||||
)
|
||||
install = install_host
|
||||
uninstall = uninstall_host
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import json
|
|||
import types
|
||||
import logging
|
||||
import platform
|
||||
import uuid
|
||||
|
||||
import pyblish.api
|
||||
from pyblish.lib import MessageHandler
|
||||
|
|
@ -37,6 +38,7 @@ from . import (
|
|||
|
||||
|
||||
_is_installed = False
|
||||
_process_id = None
|
||||
_registered_root = {"_": ""}
|
||||
_registered_host = {"_": None}
|
||||
# Keep modules manager (and it's modules) in memory
|
||||
|
|
@ -546,3 +548,18 @@ def change_current_context(asset_doc, task_name, template_key=None):
|
|||
emit_event("taskChanged", data)
|
||||
|
||||
return changes
|
||||
|
||||
|
||||
def get_process_id():
|
||||
"""Fake process id created on demand using uuid.
|
||||
|
||||
Can be used to create process specific folders in temp directory.
|
||||
|
||||
Returns:
|
||||
str: Process id.
|
||||
"""
|
||||
|
||||
global _process_id
|
||||
if _process_id is None:
|
||||
_process_id = str(uuid.uuid4())
|
||||
return _process_id
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from .constants import (
|
||||
SUBSET_NAME_ALLOWED_SYMBOLS,
|
||||
DEFAULT_SUBSET_TEMPLATE,
|
||||
PRE_CREATE_THUMBNAIL_KEY,
|
||||
)
|
||||
|
||||
from .subset_name import (
|
||||
|
|
@ -24,6 +25,8 @@ from .creator_plugins import (
|
|||
deregister_creator_plugin,
|
||||
register_creator_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
|
||||
cache_and_get_instances,
|
||||
)
|
||||
|
||||
from .context import (
|
||||
|
|
@ -40,6 +43,7 @@ from .legacy_create import (
|
|||
__all__ = (
|
||||
"SUBSET_NAME_ALLOWED_SYMBOLS",
|
||||
"DEFAULT_SUBSET_TEMPLATE",
|
||||
"PRE_CREATE_THUMBNAIL_KEY",
|
||||
|
||||
"TaskNotSetError",
|
||||
"get_subset_name",
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
SUBSET_NAME_ALLOWED_SYMBOLS = "a-zA-Z0-9_."
|
||||
DEFAULT_SUBSET_TEMPLATE = "{family}{Variant}"
|
||||
PRE_CREATE_THUMBNAIL_KEY = "thumbnail_source"
|
||||
|
||||
|
||||
__all__ = (
|
||||
"SUBSET_NAME_ALLOWED_SYMBOLS",
|
||||
"DEFAULT_SUBSET_TEMPLATE",
|
||||
"PRE_CREATE_THUMBNAIL_KEY",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1077,6 +1077,8 @@ class CreateContext:
|
|||
# Shared data across creators during collection phase
|
||||
self._collection_shared_data = None
|
||||
|
||||
self.thumbnail_paths_by_instance_id = {}
|
||||
|
||||
# Trigger reset if was enabled
|
||||
if reset:
|
||||
self.reset(discover_publish_plugins)
|
||||
|
|
@ -1146,6 +1148,29 @@ class CreateContext:
|
|||
|
||||
self.reset_finalization()
|
||||
|
||||
def refresh_thumbnails(self):
|
||||
"""Cleanup thumbnail paths.
|
||||
|
||||
Remove all thumbnail filepaths that are empty or lead to files which
|
||||
does not exists or of instances that are not available anymore.
|
||||
"""
|
||||
|
||||
invalid = set()
|
||||
for instance_id, path in self.thumbnail_paths_by_instance_id.items():
|
||||
instance_available = True
|
||||
if instance_id is not None:
|
||||
instance_available = instance_id in self._instances_by_id
|
||||
|
||||
if (
|
||||
not instance_available
|
||||
or not path
|
||||
or not os.path.exists(path)
|
||||
):
|
||||
invalid.add(instance_id)
|
||||
|
||||
for instance_id in invalid:
|
||||
self.thumbnail_paths_by_instance_id.pop(instance_id)
|
||||
|
||||
def reset_preparation(self):
|
||||
"""Prepare attributes that must be prepared/cleaned before reset."""
|
||||
|
||||
|
|
@ -1157,6 +1182,7 @@ class CreateContext:
|
|||
|
||||
# Stop access to collection shared data
|
||||
self._collection_shared_data = None
|
||||
self.refresh_thumbnails()
|
||||
|
||||
def reset_avalon_context(self):
|
||||
"""Give ability to reset avalon context.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import copy
|
||||
import collections
|
||||
|
||||
from abc import (
|
||||
ABCMeta,
|
||||
|
|
@ -442,6 +443,13 @@ class BaseCreator:
|
|||
|
||||
return self.create_context.collection_shared_data
|
||||
|
||||
def set_instance_thumbnail_path(self, instance_id, thumbnail_path=None):
|
||||
"""Set path to thumbnail for instance."""
|
||||
|
||||
self.create_context.thumbnail_paths_by_instance_id[instance_id] = (
|
||||
thumbnail_path
|
||||
)
|
||||
|
||||
|
||||
class Creator(BaseCreator):
|
||||
"""Creator that has more information for artist to show in UI.
|
||||
|
|
@ -468,6 +476,13 @@ class Creator(BaseCreator):
|
|||
# - in some cases it may confuse artists because it would not be used
|
||||
# e.g. for buld creators
|
||||
create_allow_context_change = True
|
||||
# A thumbnail can be passed in precreate attributes
|
||||
# - if is set to True is should expect that a thumbnail path under key
|
||||
# PRE_CREATE_THUMBNAIL_KEY can be sent in data with precreate data
|
||||
# - is disabled by default because the feature was added in later stages
|
||||
# and creators who would not expect PRE_CREATE_THUMBNAIL_KEY could
|
||||
# cause issues with instance data
|
||||
create_allow_thumbnail = False
|
||||
|
||||
# Precreate attribute definitions showed before creation
|
||||
# - similar to instance attribute definitions
|
||||
|
|
@ -660,3 +675,34 @@ def deregister_creator_plugin_path(path):
|
|||
deregister_plugin_path(BaseCreator, path)
|
||||
deregister_plugin_path(LegacyCreator, path)
|
||||
deregister_plugin_path(SubsetConvertorPlugin, path)
|
||||
|
||||
|
||||
def cache_and_get_instances(creator, shared_key, list_instances_func):
|
||||
"""Common approach to cache instances in shared data.
|
||||
|
||||
This is helper function which does not handle cases when a 'shared_key' is
|
||||
used for different list instances functions. The same approach of caching
|
||||
instances into 'collection_shared_data' is not required but is so common
|
||||
we've decided to unify it to some degree.
|
||||
|
||||
Function 'list_instances_func' is called only if 'shared_key' is not
|
||||
available in 'collection_shared_data' on creator.
|
||||
|
||||
Args:
|
||||
creator (Creator): Plugin which would like to get instance data.
|
||||
shared_key (str): Key under which output of function will be stored.
|
||||
list_instances_func (Function): Function that will return instance data
|
||||
if data were not yet stored under 'shared_key'.
|
||||
|
||||
Returns:
|
||||
Dict[str, Dict[str, Any]]: Cached instances by creator identifier from
|
||||
result of passed function.
|
||||
"""
|
||||
|
||||
if shared_key not in creator.collection_shared_data:
|
||||
value = collections.defaultdict(list)
|
||||
for instance in list_instances_func():
|
||||
identifier = instance.get("creator_identifier")
|
||||
value[identifier].append(instance)
|
||||
creator.collection_shared_data[shared_key] = value
|
||||
return creator.collection_shared_data[shared_key]
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
import os
|
||||
import json
|
||||
from uuid import uuid4
|
||||
from openpype.lib import Logger, filter_profiles
|
||||
from openpype.lib.pype_info import get_workstation_info
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import get_process_id
|
||||
|
||||
|
||||
def _read_lock_file(lock_filepath):
|
||||
|
|
@ -37,7 +37,7 @@ def is_workfile_locked_for_current_process(filepath):
|
|||
|
||||
lock_filepath = _get_lock_file(filepath)
|
||||
data = _read_lock_file(lock_filepath)
|
||||
return data["process_id"] == _get_process_id()
|
||||
return data["process_id"] == get_process_id()
|
||||
|
||||
|
||||
def delete_workfile_lock(filepath):
|
||||
|
|
@ -49,7 +49,7 @@ def delete_workfile_lock(filepath):
|
|||
def create_workfile_lock(filepath):
|
||||
lock_filepath = _get_lock_file(filepath)
|
||||
info = get_workstation_info()
|
||||
info["process_id"] = _get_process_id()
|
||||
info["process_id"] = get_process_id()
|
||||
with open(lock_filepath, "w") as stream:
|
||||
json.dump(info, stream)
|
||||
|
||||
|
|
@ -59,14 +59,6 @@ def remove_workfile_lock(filepath):
|
|||
delete_workfile_lock(filepath)
|
||||
|
||||
|
||||
def _get_process_id():
|
||||
process_id = os.environ.get("OPENPYPE_PROCESS_ID")
|
||||
if not process_id:
|
||||
process_id = str(uuid4())
|
||||
os.environ["OPENPYPE_PROCESS_ID"] = process_id
|
||||
return process_id
|
||||
|
||||
|
||||
def is_workfile_lock_enabled(host_name, project_name, project_setting=None):
|
||||
if project_setting is None:
|
||||
project_setting = get_project_settings(project_name)
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ Provides:
|
|||
import json
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.template_data import get_template_data
|
||||
|
||||
|
||||
|
|
@ -53,7 +52,7 @@ class CollectAnatomyContextData(pyblish.api.ContextPlugin):
|
|||
asset_entity = context.data.get("assetEntity")
|
||||
task_name = None
|
||||
if asset_entity:
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
task_name = context.data["task"]
|
||||
|
||||
anatomy_data = get_template_data(
|
||||
project_entity, asset_entity, task_name, host_name, system_settings
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
Requires:
|
||||
session -> AVALON_ASSET
|
||||
context -> projectName
|
||||
context -> asset
|
||||
context -> task
|
||||
|
||||
Provides:
|
||||
context -> projectEntity - Project document from database.
|
||||
|
|
@ -13,20 +15,19 @@ Provides:
|
|||
import pyblish.api
|
||||
|
||||
from openpype.client import get_project, get_asset_by_name
|
||||
from openpype.pipeline import legacy_io, KnownPublishError
|
||||
from openpype.pipeline import KnownPublishError
|
||||
|
||||
|
||||
class CollectAvalonEntities(pyblish.api.ContextPlugin):
|
||||
"""Collect Anatomy into Context."""
|
||||
class CollectContextEntities(pyblish.api.ContextPlugin):
|
||||
"""Collect entities into Context."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.1
|
||||
label = "Collect Avalon Entities"
|
||||
label = "Collect Context Entities"
|
||||
|
||||
def process(self, context):
|
||||
legacy_io.install()
|
||||
project_name = context.data["projectName"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
asset_name = context.data["asset"]
|
||||
task_name = context.data["task"]
|
||||
|
||||
project_entity = get_project(project_name)
|
||||
if not project_entity:
|
||||
|
|
@ -19,14 +19,28 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
|||
if not create_context:
|
||||
return
|
||||
|
||||
thumbnail_paths_by_instance_id = (
|
||||
create_context.thumbnail_paths_by_instance_id
|
||||
)
|
||||
context.data["thumbnailSource"] = (
|
||||
thumbnail_paths_by_instance_id.get(None)
|
||||
)
|
||||
|
||||
project_name = create_context.project_name
|
||||
if project_name:
|
||||
context.data["projectName"] = project_name
|
||||
|
||||
for created_instance in create_context.instances:
|
||||
instance_data = created_instance.data_to_store()
|
||||
if instance_data["active"]:
|
||||
thumbnail_path = thumbnail_paths_by_instance_id.get(
|
||||
created_instance.id
|
||||
)
|
||||
self.create_instance(
|
||||
context, instance_data, created_instance.transient_data
|
||||
context,
|
||||
instance_data,
|
||||
created_instance.transient_data,
|
||||
thumbnail_path
|
||||
)
|
||||
|
||||
# Update global data to context
|
||||
|
|
@ -39,7 +53,13 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
|||
legacy_io.Session[key] = value
|
||||
os.environ[key] = value
|
||||
|
||||
def create_instance(self, context, in_data, transient_data):
|
||||
def create_instance(
|
||||
self,
|
||||
context,
|
||||
in_data,
|
||||
transient_data,
|
||||
thumbnail_path
|
||||
):
|
||||
subset = in_data["subset"]
|
||||
# If instance data already contain families then use it
|
||||
instance_families = in_data.get("families") or []
|
||||
|
|
@ -53,7 +73,8 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
|||
"name": subset,
|
||||
"family": in_data["family"],
|
||||
"families": instance_families,
|
||||
"representations": []
|
||||
"representations": [],
|
||||
"thumbnailSource": thumbnail_path
|
||||
})
|
||||
for key, value in in_data.items():
|
||||
if key not in instance.data:
|
||||
|
|
|
|||
|
|
@ -3,26 +3,26 @@ import re
|
|||
import copy
|
||||
import json
|
||||
import shutil
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
import six
|
||||
|
||||
import clique
|
||||
|
||||
import speedcopy
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib import (
|
||||
get_ffmpeg_tool_path,
|
||||
get_ffprobe_streams,
|
||||
|
||||
path_to_subprocess_arg,
|
||||
run_subprocess,
|
||||
|
||||
)
|
||||
from openpype.lib.transcoding import (
|
||||
IMAGE_EXTENSIONS,
|
||||
get_ffprobe_streams,
|
||||
should_convert_for_ffmpeg,
|
||||
convert_input_paths_for_ffmpeg,
|
||||
get_transcode_temp_directory
|
||||
get_transcode_temp_directory,
|
||||
)
|
||||
import speedcopy
|
||||
|
||||
|
||||
class ExtractReview(pyblish.api.InstancePlugin):
|
||||
|
|
@ -175,6 +175,26 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
outputs_per_representations.append((repre, outputs))
|
||||
return outputs_per_representations
|
||||
|
||||
def _single_frame_filter(self, input_filepaths, output_defs):
|
||||
single_frame_image = False
|
||||
if len(input_filepaths) == 1:
|
||||
ext = os.path.splitext(input_filepaths[0])[-1]
|
||||
single_frame_image = ext in IMAGE_EXTENSIONS
|
||||
|
||||
filtered_defs = []
|
||||
for output_def in output_defs:
|
||||
output_filters = output_def.get("filter") or {}
|
||||
frame_filter = output_filters.get("single_frame_filter")
|
||||
if (
|
||||
(not single_frame_image and frame_filter == "single_frame")
|
||||
or (single_frame_image and frame_filter == "multi_frame")
|
||||
):
|
||||
continue
|
||||
|
||||
filtered_defs.append(output_def)
|
||||
|
||||
return filtered_defs
|
||||
|
||||
@staticmethod
|
||||
def get_instance_label(instance):
|
||||
return (
|
||||
|
|
@ -195,7 +215,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
outputs_per_repres = self._get_outputs_per_representations(
|
||||
instance, profile_outputs
|
||||
)
|
||||
for repre, outpu_defs in outputs_per_repres:
|
||||
for repre, output_defs in outputs_per_repres:
|
||||
# Check if input should be preconverted before processing
|
||||
# Store original staging dir (it's value may change)
|
||||
src_repre_staging_dir = repre["stagingDir"]
|
||||
|
|
@ -216,6 +236,16 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
if first_input_path is None:
|
||||
first_input_path = filepath
|
||||
|
||||
filtered_output_defs = self._single_frame_filter(
|
||||
input_filepaths, output_defs
|
||||
)
|
||||
if not filtered_output_defs:
|
||||
self.log.debug((
|
||||
"Repre: {} - All output definitions were filtered"
|
||||
" out by single frame filter. Skipping"
|
||||
).format(repre["name"]))
|
||||
continue
|
||||
|
||||
# Skip if file is not set
|
||||
if first_input_path is None:
|
||||
self.log.warning((
|
||||
|
|
@ -249,7 +279,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
|
||||
try:
|
||||
self._render_output_definitions(
|
||||
instance, repre, src_repre_staging_dir, outpu_defs
|
||||
instance,
|
||||
repre,
|
||||
src_repre_staging_dir,
|
||||
filtered_output_defs
|
||||
)
|
||||
|
||||
finally:
|
||||
|
|
@ -263,10 +296,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
shutil.rmtree(new_staging_dir)
|
||||
|
||||
def _render_output_definitions(
|
||||
self, instance, repre, src_repre_staging_dir, outpu_defs
|
||||
self, instance, repre, src_repre_staging_dir, output_defs
|
||||
):
|
||||
fill_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
for _output_def in outpu_defs:
|
||||
for _output_def in output_defs:
|
||||
output_def = copy.deepcopy(_output_def)
|
||||
# Make sure output definition has "tags" key
|
||||
if "tags" not in output_def:
|
||||
|
|
@ -1659,9 +1692,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
return True
|
||||
return False
|
||||
|
||||
def filter_output_defs(
|
||||
self, profile, subset_name, families
|
||||
):
|
||||
def filter_output_defs(self, profile, subset_name, families):
|
||||
"""Return outputs matching input instance families.
|
||||
|
||||
Output definitions without families filter are marked as valid.
|
||||
|
|
|
|||
|
|
@ -34,28 +34,55 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
label = "Extract Thumbnail (from source)"
|
||||
# Before 'ExtractThumbnail' in global plugins
|
||||
order = pyblish.api.ExtractorOrder - 0.00001
|
||||
hosts = ["traypublisher"]
|
||||
|
||||
def process(self, instance):
|
||||
self._create_context_thumbnail(instance.context)
|
||||
|
||||
subset_name = instance.data["subset"]
|
||||
self.log.info(
|
||||
"Processing instance with subset name {}".format(subset_name)
|
||||
)
|
||||
|
||||
thumbnail_source = instance.data.get("thumbnailSource")
|
||||
if not thumbnail_source:
|
||||
self.log.debug("Thumbnail source not filled. Skipping.")
|
||||
return
|
||||
|
||||
elif not os.path.exists(thumbnail_source):
|
||||
self.log.debug(
|
||||
"Thumbnail source file was not found {}. Skipping.".format(
|
||||
thumbnail_source))
|
||||
# Check if already has thumbnail created
|
||||
if self._instance_has_thumbnail(instance):
|
||||
self.log.info("Thumbnail representation already present.")
|
||||
return
|
||||
|
||||
# Check if already has thumbnail created
|
||||
if self._already_has_thumbnail(instance):
|
||||
self.log.info("Thumbnail representation already present.")
|
||||
dst_filepath = self._create_thumbnail(
|
||||
instance.context, thumbnail_source
|
||||
)
|
||||
if not dst_filepath:
|
||||
return
|
||||
|
||||
dst_staging, dst_filename = os.path.split(dst_filepath)
|
||||
new_repre = {
|
||||
"name": "thumbnail",
|
||||
"ext": "jpg",
|
||||
"files": dst_filename,
|
||||
"stagingDir": dst_staging,
|
||||
"thumbnail": True,
|
||||
"tags": ["thumbnail"]
|
||||
}
|
||||
|
||||
# adding representation
|
||||
self.log.debug(
|
||||
"Adding thumbnail representation: {}".format(new_repre)
|
||||
)
|
||||
instance.data["representations"].append(new_repre)
|
||||
|
||||
def _create_thumbnail(self, context, thumbnail_source):
|
||||
if not thumbnail_source:
|
||||
self.log.debug("Thumbnail source not filled. Skipping.")
|
||||
return
|
||||
|
||||
if not os.path.exists(thumbnail_source):
|
||||
self.log.debug((
|
||||
"Thumbnail source is set but file was not found {}. Skipping."
|
||||
).format(thumbnail_source))
|
||||
return
|
||||
|
||||
# Create temp directory for thumbnail
|
||||
|
|
@ -65,7 +92,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
"Create temp directory {} for thumbnail".format(dst_staging)
|
||||
)
|
||||
# Store new staging to cleanup paths
|
||||
instance.context.data["cleanupFullPaths"].append(dst_staging)
|
||||
context.data["cleanupFullPaths"].append(dst_staging)
|
||||
|
||||
thumbnail_created = False
|
||||
oiio_supported = is_oiio_supported()
|
||||
|
|
@ -97,26 +124,12 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
)
|
||||
|
||||
# Skip representation and try next one if wasn't created
|
||||
if not thumbnail_created:
|
||||
self.log.warning("Thumbanil has not been created.")
|
||||
return
|
||||
if thumbnail_created:
|
||||
return full_output_path
|
||||
|
||||
new_repre = {
|
||||
"name": "thumbnail",
|
||||
"ext": "jpg",
|
||||
"files": dst_filename,
|
||||
"stagingDir": dst_staging,
|
||||
"thumbnail": True,
|
||||
"tags": ["thumbnail"]
|
||||
}
|
||||
self.log.warning("Thumbanil has not been created.")
|
||||
|
||||
# adding representation
|
||||
self.log.debug(
|
||||
"Adding thumbnail representation: {}".format(new_repre)
|
||||
)
|
||||
instance.data["representations"].append(new_repre)
|
||||
|
||||
def _already_has_thumbnail(self, instance):
|
||||
def _instance_has_thumbnail(self, instance):
|
||||
if "representations" not in instance.data:
|
||||
self.log.warning(
|
||||
"Instance does not have 'representations' key filled"
|
||||
|
|
@ -171,3 +184,11 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
def _create_context_thumbnail(self, context):
|
||||
if "thumbnailPath" in context.data:
|
||||
return
|
||||
|
||||
thumbnail_source = context.data.get("thumbnailSource")
|
||||
thumbnail_path = self._create_thumbnail(context, thumbnail_source)
|
||||
context.data["thumbnailPath"] = thumbnail_path
|
||||
|
|
@ -13,166 +13,279 @@ import sys
|
|||
import errno
|
||||
import shutil
|
||||
import copy
|
||||
import collections
|
||||
|
||||
import six
|
||||
import pyblish.api
|
||||
|
||||
from openpype.client import get_version_by_id
|
||||
from openpype.client import get_versions
|
||||
from openpype.client.operations import OperationsSession, new_thumbnail_doc
|
||||
|
||||
InstanceFilterResult = collections.namedtuple(
|
||||
"InstanceFilterResult",
|
||||
["instance", "thumbnail_path", "version_id"]
|
||||
)
|
||||
|
||||
class IntegrateThumbnails(pyblish.api.InstancePlugin):
|
||||
|
||||
class IntegrateThumbnails(pyblish.api.ContextPlugin):
|
||||
"""Integrate Thumbnails for Openpype use in Loaders."""
|
||||
|
||||
label = "Integrate Thumbnails"
|
||||
order = pyblish.api.IntegratorOrder + 0.01
|
||||
families = ["review"]
|
||||
|
||||
required_context_keys = [
|
||||
"project", "asset", "task", "subset", "version"
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
def process(self, context):
|
||||
# Filter instances which can be used for integration
|
||||
filtered_instance_items = self._prepare_instances(context)
|
||||
if not filtered_instance_items:
|
||||
self.log.info(
|
||||
"All instances were filtered. Thumbnail integration skipped."
|
||||
)
|
||||
return
|
||||
|
||||
# Initial validation of available templated and required keys
|
||||
env_key = "AVALON_THUMBNAIL_ROOT"
|
||||
thumbnail_root_format_key = "{thumbnail_root}"
|
||||
thumbnail_root = os.environ.get(env_key) or ""
|
||||
|
||||
published_repres = instance.data.get("published_representations")
|
||||
if not published_repres:
|
||||
self.log.debug(
|
||||
"There are no published representations on the instance."
|
||||
)
|
||||
return
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
anatomy = context.data["anatomy"]
|
||||
project_name = anatomy.project_name
|
||||
if "publish" not in anatomy.templates:
|
||||
self.log.warning("Anatomy is missing the \"publish\" key!")
|
||||
self.log.warning(
|
||||
"Anatomy is missing the \"publish\" key. Skipping."
|
||||
)
|
||||
return
|
||||
|
||||
if "thumbnail" not in anatomy.templates["publish"]:
|
||||
self.log.warning((
|
||||
"There is no \"thumbnail\" template set for the project \"{}\""
|
||||
"There is no \"thumbnail\" template set for the project"
|
||||
" \"{}\". Skipping."
|
||||
).format(project_name))
|
||||
return
|
||||
|
||||
thumbnail_template = anatomy.templates["publish"]["thumbnail"]
|
||||
if not thumbnail_template:
|
||||
self.log.info("Thumbnail template is not filled. Skipping.")
|
||||
return
|
||||
|
||||
if (
|
||||
not thumbnail_root
|
||||
and thumbnail_root_format_key in thumbnail_template
|
||||
):
|
||||
self.log.warning((
|
||||
"{} is not set. Skipping thumbnail integration."
|
||||
).format(env_key))
|
||||
self.log.warning(("{} is not set. Skipping.").format(env_key))
|
||||
return
|
||||
|
||||
thumb_repre = None
|
||||
thumb_repre_anatomy_data = None
|
||||
for repre_info in published_repres.values():
|
||||
repre = repre_info["representation"]
|
||||
if repre["name"].lower() == "thumbnail":
|
||||
thumb_repre = repre
|
||||
thumb_repre_anatomy_data = repre_info["anatomy_data"]
|
||||
# Collect verion ids from all filtered instance
|
||||
version_ids = {
|
||||
instance_items.version_id
|
||||
for instance_items in filtered_instance_items
|
||||
}
|
||||
# Query versions
|
||||
version_docs = get_versions(
|
||||
project_name,
|
||||
version_ids=version_ids,
|
||||
hero=True,
|
||||
fields=["_id", "type", "name"]
|
||||
)
|
||||
# Store version by their id (converted to string)
|
||||
version_docs_by_str_id = {
|
||||
str(version_doc["_id"]): version_doc
|
||||
for version_doc in version_docs
|
||||
}
|
||||
self._integrate_thumbnails(
|
||||
filtered_instance_items,
|
||||
version_docs_by_str_id,
|
||||
anatomy,
|
||||
thumbnail_root
|
||||
)
|
||||
|
||||
def _prepare_instances(self, context):
|
||||
context_thumbnail_path = context.get("thumbnailPath")
|
||||
valid_context_thumbnail = False
|
||||
if context_thumbnail_path and os.path.exists(context_thumbnail_path):
|
||||
valid_context_thumbnail = True
|
||||
|
||||
filtered_instances = []
|
||||
for instance in context:
|
||||
instance_label = self._get_instance_label(instance)
|
||||
# Skip instances without published representations
|
||||
# - there is no place where to put the thumbnail
|
||||
published_repres = instance.data.get("published_representations")
|
||||
if not published_repres:
|
||||
self.log.debug((
|
||||
"There are no published representations"
|
||||
" on the instance {}."
|
||||
).format(instance_label))
|
||||
continue
|
||||
|
||||
# Find thumbnail path on instance
|
||||
thumbnail_path = self._get_instance_thumbnail_path(
|
||||
published_repres)
|
||||
if thumbnail_path:
|
||||
self.log.debug((
|
||||
"Found thumbnail path for instance \"{}\"."
|
||||
" Thumbnail path: {}"
|
||||
).format(instance_label, thumbnail_path))
|
||||
|
||||
elif valid_context_thumbnail:
|
||||
# Use context thumbnail path if is available
|
||||
thumbnail_path = context_thumbnail_path
|
||||
self.log.debug((
|
||||
"Using context thumbnail path for instance \"{}\"."
|
||||
" Thumbnail path: {}"
|
||||
).format(instance_label, thumbnail_path))
|
||||
|
||||
# Skip instance if thumbnail path is not available for it
|
||||
if not thumbnail_path:
|
||||
self.log.info((
|
||||
"Skipping thumbnail integration for instance \"{}\"."
|
||||
" Instance and context"
|
||||
" thumbnail paths are not available."
|
||||
).format(instance_label))
|
||||
continue
|
||||
|
||||
version_id = str(self._get_version_id(published_repres))
|
||||
filtered_instances.append(
|
||||
InstanceFilterResult(instance, thumbnail_path, version_id)
|
||||
)
|
||||
return filtered_instances
|
||||
|
||||
def _get_version_id(self, published_representations):
|
||||
for repre_info in published_representations.values():
|
||||
return repre_info["representation"]["parent"]
|
||||
|
||||
def _get_instance_thumbnail_path(self, published_representations):
|
||||
thumb_repre_doc = None
|
||||
for repre_info in published_representations.values():
|
||||
repre_doc = repre_info["representation"]
|
||||
if repre_doc["name"].lower() == "thumbnail":
|
||||
thumb_repre_doc = repre_doc
|
||||
break
|
||||
|
||||
if not thumb_repre:
|
||||
if thumb_repre_doc is None:
|
||||
self.log.debug(
|
||||
"There is not representation with name \"thumbnail\""
|
||||
)
|
||||
return
|
||||
return None
|
||||
|
||||
version = get_version_by_id(project_name, thumb_repre["parent"])
|
||||
if not version:
|
||||
raise AssertionError(
|
||||
"There does not exist version with id {}".format(
|
||||
str(thumb_repre["parent"])
|
||||
)
|
||||
path = thumb_repre_doc["data"]["path"]
|
||||
if not os.path.exists(path):
|
||||
self.log.warning(
|
||||
"Thumbnail file cannot be found. Path: {}".format(path)
|
||||
)
|
||||
return None
|
||||
return os.path.normpath(path)
|
||||
|
||||
def _integrate_thumbnails(
|
||||
self,
|
||||
filtered_instance_items,
|
||||
version_docs_by_str_id,
|
||||
anatomy,
|
||||
thumbnail_root
|
||||
):
|
||||
op_session = OperationsSession()
|
||||
project_name = anatomy.project_name
|
||||
|
||||
for instance_item in filtered_instance_items:
|
||||
instance, thumbnail_path, version_id = instance_item
|
||||
instance_label = self._get_instance_label(instance)
|
||||
version_doc = version_docs_by_str_id.get(version_id)
|
||||
if not version_doc:
|
||||
self.log.warning((
|
||||
"Version entity for instance \"{}\" was not found."
|
||||
).format(instance_label))
|
||||
continue
|
||||
|
||||
filename, file_extension = os.path.splitext(thumbnail_path)
|
||||
# Create id for mongo entity now to fill anatomy template
|
||||
thumbnail_doc = new_thumbnail_doc()
|
||||
thumbnail_id = thumbnail_doc["_id"]
|
||||
|
||||
# Prepare anatomy template fill data
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
template_data.update({
|
||||
"_id": str(thumbnail_id),
|
||||
"ext": file_extension[1:],
|
||||
"name": "thumbnail",
|
||||
"thumbnail_root": thumbnail_root,
|
||||
"thumbnail_type": "thumbnail"
|
||||
})
|
||||
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
thumbnail_template = anatomy.templates["publish"]["thumbnail"]
|
||||
template_filled = anatomy_filled["publish"]["thumbnail"]
|
||||
|
||||
dst_full_path = os.path.normpath(str(template_filled))
|
||||
self.log.debug("Copying file .. {} -> {}".format(
|
||||
thumbnail_path, dst_full_path
|
||||
))
|
||||
dirname = os.path.dirname(dst_full_path)
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
tp, value, tb = sys.exc_info()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
shutil.copy(thumbnail_path, dst_full_path)
|
||||
|
||||
# Clean template data from keys that are dynamic
|
||||
for key in ("_id", "thumbnail_root"):
|
||||
template_data.pop(key, None)
|
||||
|
||||
repre_context = template_filled.used_values
|
||||
for key in self.required_context_keys:
|
||||
value = template_data.get(key)
|
||||
if not value:
|
||||
continue
|
||||
repre_context[key] = template_data[key]
|
||||
|
||||
thumbnail_doc["data"] = {
|
||||
"template": thumbnail_template,
|
||||
"template_data": repre_context
|
||||
}
|
||||
op_session.create_entity(
|
||||
project_name, thumbnail_doc["type"], thumbnail_doc
|
||||
)
|
||||
# Create thumbnail entity
|
||||
self.log.debug(
|
||||
"Creating entity in database {}".format(str(thumbnail_doc))
|
||||
)
|
||||
|
||||
# Get full path to thumbnail file from representation
|
||||
src_full_path = os.path.normpath(thumb_repre["data"]["path"])
|
||||
if not os.path.exists(src_full_path):
|
||||
self.log.warning("Thumbnail file was not found. Path: {}".format(
|
||||
src_full_path
|
||||
# Set thumbnail id for version
|
||||
op_session.update_entity(
|
||||
project_name,
|
||||
version_doc["type"],
|
||||
version_doc["_id"],
|
||||
{"data.thumbnail_id": thumbnail_id}
|
||||
)
|
||||
if version_doc["type"] == "hero_version":
|
||||
version_name = "Hero"
|
||||
else:
|
||||
version_name = version_doc["name"]
|
||||
self.log.debug("Setting thumbnail for version \"{}\" <{}>".format(
|
||||
version_name, version_id
|
||||
))
|
||||
return
|
||||
|
||||
filename, file_extension = os.path.splitext(src_full_path)
|
||||
# Create id for mongo entity now to fill anatomy template
|
||||
thumbnail_doc = new_thumbnail_doc()
|
||||
thumbnail_id = thumbnail_doc["_id"]
|
||||
|
||||
# Prepare anatomy template fill data
|
||||
template_data = copy.deepcopy(thumb_repre_anatomy_data)
|
||||
template_data.update({
|
||||
"_id": str(thumbnail_id),
|
||||
"ext": file_extension[1:],
|
||||
"thumbnail_root": thumbnail_root,
|
||||
"thumbnail_type": "thumbnail"
|
||||
})
|
||||
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
template_filled = anatomy_filled["publish"]["thumbnail"]
|
||||
|
||||
dst_full_path = os.path.normpath(str(template_filled))
|
||||
self.log.debug(
|
||||
"Copying file .. {} -> {}".format(src_full_path, dst_full_path)
|
||||
)
|
||||
dirname = os.path.dirname(dst_full_path)
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
tp, value, tb = sys.exc_info()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
shutil.copy(src_full_path, dst_full_path)
|
||||
|
||||
# Clean template data from keys that are dynamic
|
||||
for key in ("_id", "thumbnail_root"):
|
||||
template_data.pop(key, None)
|
||||
|
||||
repre_context = template_filled.used_values
|
||||
for key in self.required_context_keys:
|
||||
value = template_data.get(key)
|
||||
if not value:
|
||||
continue
|
||||
repre_context[key] = template_data[key]
|
||||
|
||||
op_session = OperationsSession()
|
||||
|
||||
thumbnail_doc["data"] = {
|
||||
"template": thumbnail_template,
|
||||
"template_data": repre_context
|
||||
}
|
||||
op_session.create_entity(
|
||||
project_name, thumbnail_doc["type"], thumbnail_doc
|
||||
)
|
||||
# Create thumbnail entity
|
||||
self.log.debug(
|
||||
"Creating entity in database {}".format(str(thumbnail_doc))
|
||||
)
|
||||
|
||||
# Set thumbnail id for version
|
||||
op_session.update_entity(
|
||||
project_name,
|
||||
version["type"],
|
||||
version["_id"],
|
||||
{"data.thumbnail_id": thumbnail_id}
|
||||
)
|
||||
self.log.debug("Setting thumbnail for version \"{}\" <{}>".format(
|
||||
version["name"], str(version["_id"])
|
||||
))
|
||||
|
||||
asset_entity = instance.data["assetEntity"]
|
||||
op_session.update_entity(
|
||||
project_name,
|
||||
asset_entity["type"],
|
||||
asset_entity["_id"],
|
||||
{"data.thumbnail_id": thumbnail_id}
|
||||
)
|
||||
self.log.debug("Setting thumbnail for asset \"{}\" <{}>".format(
|
||||
asset_entity["name"], str(version["_id"])
|
||||
))
|
||||
asset_entity = instance.data["assetEntity"]
|
||||
op_session.update_entity(
|
||||
project_name,
|
||||
asset_entity["type"],
|
||||
asset_entity["_id"],
|
||||
{"data.thumbnail_id": thumbnail_id}
|
||||
)
|
||||
self.log.debug("Setting thumbnail for asset \"{}\" <{}>".format(
|
||||
asset_entity["name"], version_id
|
||||
))
|
||||
|
||||
op_session.commit()
|
||||
|
||||
def _get_instance_label(self, instance):
|
||||
return (
|
||||
instance.data.get("label")
|
||||
or instance.data.get("name")
|
||||
or "N/A"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -21,9 +21,8 @@ class PreIntegrateThumbnails(pyblish.api.InstancePlugin):
|
|||
|
||||
label = "Override Integrate Thumbnail Representations"
|
||||
order = pyblish.api.IntegratorOrder - 0.1
|
||||
families = ["review"]
|
||||
|
||||
integrate_profiles = {}
|
||||
integrate_profiles = []
|
||||
|
||||
def process(self, instance):
|
||||
repres = instance.data.get("representations")
|
||||
|
|
|
|||
|
|
@ -142,7 +142,7 @@
|
|||
"exr16fpdwaa"
|
||||
],
|
||||
"reel_name": "OP_LoadedReel",
|
||||
"clip_name_template": "{asset}_{subset}<_{output}>"
|
||||
"clip_name_template": "{batch}_{asset}_{subset}<_{output}>"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -53,6 +53,62 @@
|
|||
"families": [],
|
||||
"hosts": [],
|
||||
"outputs": {
|
||||
"png": {
|
||||
"ext": "png",
|
||||
"tags": [
|
||||
"ftrackreview"
|
||||
],
|
||||
"burnins": [],
|
||||
"ffmpeg_args": {
|
||||
"video_filters": [],
|
||||
"audio_filters": [],
|
||||
"input": [],
|
||||
"output": []
|
||||
},
|
||||
"filter": {
|
||||
"families": [
|
||||
"render",
|
||||
"review",
|
||||
"ftrack"
|
||||
],
|
||||
"subsets": [],
|
||||
"custom_tags": [],
|
||||
"single_frame_filter": "single_frame"
|
||||
},
|
||||
"overscan_crop": "",
|
||||
"overscan_color": [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
255
|
||||
],
|
||||
"width": 1920,
|
||||
"height": 1080,
|
||||
"scale_pixel_aspect": true,
|
||||
"bg_color": [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0
|
||||
],
|
||||
"letter_box": {
|
||||
"enabled": false,
|
||||
"ratio": 0.0,
|
||||
"fill_color": [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
255
|
||||
],
|
||||
"line_thickness": 0,
|
||||
"line_color": [
|
||||
255,
|
||||
0,
|
||||
0,
|
||||
255
|
||||
]
|
||||
}
|
||||
},
|
||||
"h264": {
|
||||
"ext": "mp4",
|
||||
"tags": [
|
||||
|
|
@ -79,7 +135,8 @@
|
|||
"ftrack"
|
||||
],
|
||||
"subsets": [],
|
||||
"custom_tags": []
|
||||
"custom_tags": [],
|
||||
"single_frame_filter": "multi_frame"
|
||||
},
|
||||
"overscan_crop": "",
|
||||
"overscan_color": [
|
||||
|
|
@ -401,7 +458,8 @@
|
|||
"hosts": [],
|
||||
"task_types": [],
|
||||
"tasks": [],
|
||||
"enabled": true
|
||||
"enabled": true,
|
||||
"use_last_published_workfile": false
|
||||
}
|
||||
],
|
||||
"open_workfile_tool_on_startup": [
|
||||
|
|
|
|||
|
|
@ -303,5 +303,12 @@
|
|||
"extensions": [
|
||||
".mov"
|
||||
]
|
||||
},
|
||||
"publish": {
|
||||
"ValidateFrameRange": {
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
"active": true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -311,6 +311,24 @@
|
|||
"object_type": "text"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "publish",
|
||||
"label": "Publish plugins",
|
||||
"children": [
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_validate_plugin",
|
||||
"template_data": [
|
||||
{
|
||||
"key": "ValidateFrameRange",
|
||||
"label": "Validate frame range"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,22 +16,26 @@
|
|||
{
|
||||
"type": "number",
|
||||
"key": "frameStart",
|
||||
"label": "Frame Start"
|
||||
"label": "Frame Start",
|
||||
"maximum": 999999999
|
||||
},
|
||||
{
|
||||
"type": "number",
|
||||
"key": "frameEnd",
|
||||
"label": "Frame End"
|
||||
"label": "Frame End",
|
||||
"maximum": 999999999
|
||||
},
|
||||
{
|
||||
"type": "number",
|
||||
"key": "clipIn",
|
||||
"label": "Clip In"
|
||||
"label": "Clip In",
|
||||
"maximum": 999999999
|
||||
},
|
||||
{
|
||||
"type": "number",
|
||||
"key": "clipOut",
|
||||
"label": "Clip Out"
|
||||
"label": "Clip Out",
|
||||
"maximum": 999999999
|
||||
},
|
||||
{
|
||||
"type": "number",
|
||||
|
|
|
|||
|
|
@ -304,6 +304,20 @@
|
|||
"label": "Custom Tags",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Use output <b>always</b> / only if input <b>is 1 frame</b> image / only if has <b>2+ frames</b> or <b>is video</b>"
|
||||
},
|
||||
{
|
||||
"type": "enum",
|
||||
"key": "single_frame_filter",
|
||||
"default": "everytime",
|
||||
"enum_items": [
|
||||
{"everytime": "Always"},
|
||||
{"single_frame": "Only if input has 1 image frame"},
|
||||
{"multi_frame": "Only if input is video or sequence of frames"}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
|||
|
|
@ -149,6 +149,11 @@
|
|||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "use_last_published_workfile",
|
||||
"label": "Use last published workfile"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
[
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "{key}",
|
||||
"label": "{label}",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "optional",
|
||||
"label": "Optional"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "active",
|
||||
"label": "Active"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
|
|
@ -138,8 +138,7 @@ def save_studio_settings(data):
|
|||
SaveWarningExc: If any module raises the exception.
|
||||
"""
|
||||
# Notify Pype modules
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype_interfaces import ISettingsChangeListener
|
||||
from openpype.modules import ModulesManager, ISettingsChangeListener
|
||||
|
||||
old_data = get_system_settings()
|
||||
default_values = get_default_settings()[SYSTEM_SETTINGS_KEY]
|
||||
|
|
@ -186,8 +185,7 @@ def save_project_settings(project_name, overrides):
|
|||
SaveWarningExc: If any module raises the exception.
|
||||
"""
|
||||
# Notify Pype modules
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype_interfaces import ISettingsChangeListener
|
||||
from openpype.modules import ModulesManager, ISettingsChangeListener
|
||||
|
||||
default_values = get_default_settings()[PROJECT_SETTINGS_KEY]
|
||||
if project_name:
|
||||
|
|
@ -248,8 +246,7 @@ def save_project_anatomy(project_name, anatomy_data):
|
|||
SaveWarningExc: If any module raises the exception.
|
||||
"""
|
||||
# Notify Pype modules
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype_interfaces import ISettingsChangeListener
|
||||
from openpype.modules import ModulesManager, ISettingsChangeListener
|
||||
|
||||
default_values = get_default_settings()[PROJECT_ANATOMY_KEY]
|
||||
if project_name:
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@
|
|||
"bg": "#2C313A",
|
||||
"bg-inputs": "#21252B",
|
||||
"bg-buttons": "#434a56",
|
||||
"bg-button-hover": "rgba(168, 175, 189, 0.3)",
|
||||
"bg-button-hover": "rgb(81, 86, 97)",
|
||||
"bg-inputs-disabled": "#2C313A",
|
||||
"bg-buttons-disabled": "#434a56",
|
||||
|
||||
|
|
|
|||
|
|
@ -884,6 +884,26 @@ PublisherTabBtn[active="1"]:hover {
|
|||
background: {color:bg};
|
||||
}
|
||||
|
||||
PixmapButton{
|
||||
border: 0px solid transparent;
|
||||
border-radius: 0.2em;
|
||||
background: {color:bg-buttons};
|
||||
}
|
||||
PixmapButton:hover {
|
||||
background: {color:bg-button-hover};
|
||||
}
|
||||
PixmapButton:disabled {
|
||||
background: {color:bg-buttons-disabled};
|
||||
}
|
||||
|
||||
#ThumbnailPixmapHoverButton {
|
||||
font-size: 11pt;
|
||||
background: {color:bg-view};
|
||||
}
|
||||
#ThumbnailPixmapHoverButton:hover {
|
||||
background: {color:bg-button-hover};
|
||||
}
|
||||
|
||||
#CreatorDetailedDescription {
|
||||
padding-left: 5px;
|
||||
padding-right: 5px;
|
||||
|
|
@ -911,11 +931,11 @@ PublisherTabBtn[active="1"]:hover {
|
|||
#PublishLogConsole {
|
||||
font-family: "Noto Sans Mono";
|
||||
}
|
||||
VariantInputsWidget QLineEdit {
|
||||
#VariantInputsWidget QLineEdit {
|
||||
border-bottom-right-radius: 0px;
|
||||
border-top-right-radius: 0px;
|
||||
}
|
||||
VariantInputsWidget QToolButton {
|
||||
#VariantInputsWidget QToolButton {
|
||||
border-bottom-left-radius: 0px;
|
||||
border-top-left-radius: 0px;
|
||||
padding-top: 0.5em;
|
||||
|
|
|
|||
|
|
@ -3,8 +3,14 @@ from .widgets import (
|
|||
AttributeDefinitionsWidget,
|
||||
)
|
||||
|
||||
from .dialog import (
|
||||
AttributeDefinitionsDialog,
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"create_widget_for_attr_def",
|
||||
"AttributeDefinitionsWidget",
|
||||
|
||||
"AttributeDefinitionsDialog",
|
||||
)
|
||||
33
openpype/tools/attribute_defs/dialog.py
Normal file
33
openpype/tools/attribute_defs/dialog.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
from Qt import QtWidgets
|
||||
|
||||
from .widgets import AttributeDefinitionsWidget
|
||||
|
||||
|
||||
class AttributeDefinitionsDialog(QtWidgets.QDialog):
|
||||
def __init__(self, attr_defs, parent=None):
|
||||
super(AttributeDefinitionsDialog, self).__init__(parent)
|
||||
|
||||
attrs_widget = AttributeDefinitionsWidget(attr_defs, self)
|
||||
|
||||
btns_widget = QtWidgets.QWidget(self)
|
||||
ok_btn = QtWidgets.QPushButton("OK", btns_widget)
|
||||
cancel_btn = QtWidgets.QPushButton("Cancel", btns_widget)
|
||||
|
||||
btns_layout = QtWidgets.QHBoxLayout(btns_widget)
|
||||
btns_layout.setContentsMargins(0, 0, 0, 0)
|
||||
btns_layout.addStretch(1)
|
||||
btns_layout.addWidget(ok_btn, 0)
|
||||
btns_layout.addWidget(cancel_btn, 0)
|
||||
|
||||
main_layout = QtWidgets.QVBoxLayout(self)
|
||||
main_layout.addWidget(attrs_widget, 0)
|
||||
main_layout.addStretch(1)
|
||||
main_layout.addWidget(btns_widget, 0)
|
||||
|
||||
ok_btn.clicked.connect(self.accept)
|
||||
cancel_btn.clicked.connect(self.reject)
|
||||
|
||||
self._attrs_widget = attrs_widget
|
||||
|
||||
def get_values(self):
|
||||
return self._attrs_widget.current_value()
|
||||
|
|
@ -349,7 +349,7 @@ class FilesModel(QtGui.QStandardItemModel):
|
|||
item.setData(file_item.filenames, FILENAMES_ROLE)
|
||||
item.setData(file_item.directory, DIRPATH_ROLE)
|
||||
item.setData(icon_pixmap, ITEM_ICON_ROLE)
|
||||
item.setData(file_item.ext, EXT_ROLE)
|
||||
item.setData(file_item.lower_ext, EXT_ROLE)
|
||||
item.setData(file_item.is_dir, IS_DIR_ROLE)
|
||||
item.setData(file_item.is_sequence, IS_SEQUENCE_ROLE)
|
||||
|
||||
|
|
@ -463,7 +463,7 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel):
|
|||
for filepath in filepaths:
|
||||
if os.path.isfile(filepath):
|
||||
_, ext = os.path.splitext(filepath)
|
||||
if ext in self._allowed_extensions:
|
||||
if ext.lower() in self._allowed_extensions:
|
||||
return True
|
||||
|
||||
elif self._allow_folders:
|
||||
|
|
@ -475,7 +475,7 @@ class FilesProxyModel(QtCore.QSortFilterProxyModel):
|
|||
for filepath in filepaths:
|
||||
if os.path.isfile(filepath):
|
||||
_, ext = os.path.splitext(filepath)
|
||||
if ext in self._allowed_extensions:
|
||||
if ext.lower() in self._allowed_extensions:
|
||||
filtered_paths.append(filepath)
|
||||
|
||||
elif self._allow_folders:
|
||||
|
|
@ -2,6 +2,8 @@ import inspect
|
|||
from Qt import QtGui
|
||||
import qtawesome
|
||||
|
||||
from openpype.lib.attribute_definitions import AbtractAttrDef
|
||||
from openpype.tools.attribute_defs import AttributeDefinitionsDialog
|
||||
from openpype.tools.utils.widgets import (
|
||||
OptionalAction,
|
||||
OptionDialog
|
||||
|
|
@ -34,21 +36,30 @@ def get_options(action, loader, parent, repre_contexts):
|
|||
None when dialog was closed or cancelled, in all other cases {}
|
||||
if no options
|
||||
"""
|
||||
|
||||
# Pop option dialog
|
||||
options = {}
|
||||
loader_options = loader.get_options(repre_contexts)
|
||||
if getattr(action, "optioned", False) and loader_options:
|
||||
if not getattr(action, "optioned", False) or not loader_options:
|
||||
return options
|
||||
|
||||
if isinstance(loader_options[0], AbtractAttrDef):
|
||||
qargparse_options = False
|
||||
dialog = AttributeDefinitionsDialog(loader_options, parent)
|
||||
else:
|
||||
qargparse_options = True
|
||||
dialog = OptionDialog(parent)
|
||||
dialog.setWindowTitle(action.label + " Options")
|
||||
dialog.create(loader_options)
|
||||
|
||||
if not dialog.exec_():
|
||||
return None
|
||||
dialog.setWindowTitle(action.label + " Options")
|
||||
|
||||
# Get option
|
||||
options = dialog.parse()
|
||||
if not dialog.exec_():
|
||||
return None
|
||||
|
||||
return options
|
||||
# Get option
|
||||
if qargparse_options:
|
||||
return dialog.parse()
|
||||
return dialog.get_values()
|
||||
|
||||
|
||||
def add_representation_loaders_to_menu(loaders, menu, repre_contexts):
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ class NameDef:
|
|||
class NumberDef:
|
||||
def __init__(self, minimum=None, maximum=None, decimals=None):
|
||||
self.minimum = 0 if minimum is None else minimum
|
||||
self.maximum = 999999 if maximum is None else maximum
|
||||
self.maximum = 999999999 if maximum is None else maximum
|
||||
self.decimals = 0 if decimals is None else decimals
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -20,9 +20,10 @@ INSTANCE_ID_ROLE = QtCore.Qt.UserRole + 1
|
|||
SORT_VALUE_ROLE = QtCore.Qt.UserRole + 2
|
||||
IS_GROUP_ROLE = QtCore.Qt.UserRole + 3
|
||||
CREATOR_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 4
|
||||
FAMILY_ROLE = QtCore.Qt.UserRole + 5
|
||||
GROUP_ROLE = QtCore.Qt.UserRole + 6
|
||||
CONVERTER_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 7
|
||||
CREATOR_THUMBNAIL_ENABLED_ROLE = QtCore.Qt.UserRole + 5
|
||||
FAMILY_ROLE = QtCore.Qt.UserRole + 6
|
||||
GROUP_ROLE = QtCore.Qt.UserRole + 7
|
||||
CONVERTER_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 8
|
||||
|
||||
|
||||
__all__ = (
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ import logging
|
|||
import traceback
|
||||
import collections
|
||||
import uuid
|
||||
import tempfile
|
||||
import shutil
|
||||
from abc import ABCMeta, abstractmethod, abstractproperty
|
||||
|
||||
import six
|
||||
|
|
@ -24,6 +26,7 @@ from openpype.pipeline import (
|
|||
KnownPublishError,
|
||||
registered_host,
|
||||
legacy_io,
|
||||
get_process_id,
|
||||
)
|
||||
from openpype.pipeline.create import (
|
||||
CreateContext,
|
||||
|
|
@ -87,9 +90,9 @@ class AssetDocsCache:
|
|||
return
|
||||
|
||||
project_name = self._controller.project_name
|
||||
asset_docs = get_assets(
|
||||
asset_docs = list(get_assets(
|
||||
project_name, fields=self.projection.keys()
|
||||
)
|
||||
))
|
||||
asset_docs_by_name = {}
|
||||
task_names_by_asset_name = {}
|
||||
for asset_doc in asset_docs:
|
||||
|
|
@ -825,6 +828,7 @@ class CreatorItem:
|
|||
default_variant,
|
||||
default_variants,
|
||||
create_allow_context_change,
|
||||
create_allow_thumbnail,
|
||||
pre_create_attributes_defs
|
||||
):
|
||||
self.identifier = identifier
|
||||
|
|
@ -838,6 +842,7 @@ class CreatorItem:
|
|||
self.default_variant = default_variant
|
||||
self.default_variants = default_variants
|
||||
self.create_allow_context_change = create_allow_context_change
|
||||
self.create_allow_thumbnail = create_allow_thumbnail
|
||||
self.instance_attributes_defs = instance_attributes_defs
|
||||
self.pre_create_attributes_defs = pre_create_attributes_defs
|
||||
|
||||
|
|
@ -864,6 +869,7 @@ class CreatorItem:
|
|||
default_variants = None
|
||||
pre_create_attr_defs = None
|
||||
create_allow_context_change = None
|
||||
create_allow_thumbnail = None
|
||||
if creator_type is CreatorTypes.artist:
|
||||
description = creator.get_description()
|
||||
detail_description = creator.get_detail_description()
|
||||
|
|
@ -871,6 +877,7 @@ class CreatorItem:
|
|||
default_variants = creator.get_default_variants()
|
||||
pre_create_attr_defs = creator.get_pre_create_attr_defs()
|
||||
create_allow_context_change = creator.create_allow_context_change
|
||||
create_allow_thumbnail = creator.create_allow_thumbnail
|
||||
|
||||
identifier = creator.identifier
|
||||
return cls(
|
||||
|
|
@ -886,6 +893,7 @@ class CreatorItem:
|
|||
default_variant,
|
||||
default_variants,
|
||||
create_allow_context_change,
|
||||
create_allow_thumbnail,
|
||||
pre_create_attr_defs
|
||||
)
|
||||
|
||||
|
|
@ -914,6 +922,7 @@ class CreatorItem:
|
|||
"default_variant": self.default_variant,
|
||||
"default_variants": self.default_variants,
|
||||
"create_allow_context_change": self.create_allow_context_change,
|
||||
"create_allow_thumbnail": self.create_allow_thumbnail,
|
||||
"instance_attributes_defs": instance_attributes_defs,
|
||||
"pre_create_attributes_defs": pre_create_attributes_defs,
|
||||
}
|
||||
|
|
@ -1115,11 +1124,13 @@ class AbstractPublisherController(object):
|
|||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def save_changes(self):
|
||||
"""Save changes in create context."""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def remove_instances(self, instance_ids):
|
||||
"""Remove list of instances from create context."""
|
||||
# TODO expect instance ids
|
||||
|
|
@ -1256,6 +1267,14 @@ class AbstractPublisherController(object):
|
|||
def trigger_convertor_items(self, convertor_identifiers):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_thumbnail_paths_for_instances(self, instance_ids):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set_thumbnail_paths_for_instances(self, thumbnail_path_mapping):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set_comment(self, comment):
|
||||
"""Set comment on pyblish context.
|
||||
|
|
@ -1283,6 +1302,22 @@ class AbstractPublisherController(object):
|
|||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_thumbnail_temp_dir_path(self):
|
||||
"""Return path to directory where thumbnails can be temporary stored.
|
||||
|
||||
Returns:
|
||||
str: Path to a directory.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def clear_thumbnail_temp_dir_path(self):
|
||||
"""Remove content of thumbnail temp directory."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class BasePublisherController(AbstractPublisherController):
|
||||
"""Implement common logic for controllers.
|
||||
|
|
@ -1523,6 +1558,26 @@ class BasePublisherController(AbstractPublisherController):
|
|||
return creator_item.icon
|
||||
return None
|
||||
|
||||
def get_thumbnail_temp_dir_path(self):
|
||||
"""Return path to directory where thumbnails can be temporary stored.
|
||||
|
||||
Returns:
|
||||
str: Path to a directory.
|
||||
"""
|
||||
|
||||
return os.path.join(
|
||||
tempfile.gettempdir(),
|
||||
"publisher_thumbnails",
|
||||
get_process_id()
|
||||
)
|
||||
|
||||
def clear_thumbnail_temp_dir_path(self):
|
||||
"""Remove content of thumbnail temp directory."""
|
||||
|
||||
dirpath = self.get_thumbnail_temp_dir_path()
|
||||
if os.path.exists(dirpath):
|
||||
shutil.rmtree(dirpath)
|
||||
|
||||
|
||||
class PublisherController(BasePublisherController):
|
||||
"""Middleware between UI, CreateContext and publish Context.
|
||||
|
|
@ -1778,6 +1833,29 @@ class PublisherController(BasePublisherController):
|
|||
|
||||
self._on_create_instance_change()
|
||||
|
||||
def get_thumbnail_paths_for_instances(self, instance_ids):
|
||||
thumbnail_paths_by_instance_id = (
|
||||
self._create_context.thumbnail_paths_by_instance_id
|
||||
)
|
||||
return {
|
||||
instance_id: thumbnail_paths_by_instance_id.get(instance_id)
|
||||
for instance_id in instance_ids
|
||||
}
|
||||
|
||||
def set_thumbnail_paths_for_instances(self, thumbnail_path_mapping):
|
||||
thumbnail_paths_by_instance_id = (
|
||||
self._create_context.thumbnail_paths_by_instance_id
|
||||
)
|
||||
for instance_id, thumbnail_path in thumbnail_path_mapping.items():
|
||||
thumbnail_paths_by_instance_id[instance_id] = thumbnail_path
|
||||
|
||||
self._emit_event(
|
||||
"instance.thumbnail.changed",
|
||||
{
|
||||
"mapping": thumbnail_path_mapping
|
||||
}
|
||||
)
|
||||
|
||||
def emit_card_message(
|
||||
self, message, message_type=CardMessageTypes.standard
|
||||
):
|
||||
|
|
|
|||
|
|
@ -115,6 +115,11 @@ class QtRemotePublishController(BasePublisherController):
|
|||
super().__init__(*args, **kwargs)
|
||||
|
||||
self._created_instances = {}
|
||||
self._thumbnail_paths_by_instance_id = None
|
||||
|
||||
def _reset_attributes(self):
|
||||
super()._reset_attributes()
|
||||
self._thumbnail_paths_by_instance_id = None
|
||||
|
||||
@abstractmethod
|
||||
def _get_serialized_instances(self):
|
||||
|
|
@ -180,6 +185,11 @@ class QtRemotePublishController(BasePublisherController):
|
|||
self.host_is_valid = event["value"]
|
||||
return
|
||||
|
||||
# Don't skip because UI want know about it too
|
||||
if event.topic == "instance.thumbnail.changed":
|
||||
for instance_id, path in event["mapping"].items():
|
||||
self.thumbnail_paths_by_instance_id[instance_id] = path
|
||||
|
||||
# Topics that can be just passed by because are not affecting
|
||||
# controller itself
|
||||
# - "show.card.message"
|
||||
|
|
@ -256,6 +266,42 @@ class QtRemotePublishController(BasePublisherController):
|
|||
def get_existing_subset_names(self, asset_name):
|
||||
pass
|
||||
|
||||
@property
|
||||
def thumbnail_paths_by_instance_id(self):
|
||||
if self._thumbnail_paths_by_instance_id is None:
|
||||
self._thumbnail_paths_by_instance_id = (
|
||||
self._collect_thumbnail_paths_by_instance_id()
|
||||
)
|
||||
return self._thumbnail_paths_by_instance_id
|
||||
|
||||
def get_thumbnail_path_for_instance(self, instance_id):
|
||||
return self.thumbnail_paths_by_instance_id.get(instance_id)
|
||||
|
||||
def set_thumbnail_path_for_instance(self, instance_id, thumbnail_path):
|
||||
self._set_thumbnail_path_on_context(self, instance_id, thumbnail_path)
|
||||
|
||||
@abstractmethod
|
||||
def _collect_thumbnail_paths_by_instance_id(self):
|
||||
"""Collect thumbnail paths by instance id in remote controller.
|
||||
|
||||
These should be collected from 'CreatedContext' there.
|
||||
|
||||
Returns:
|
||||
Dict[str, str]: Mapping of thumbnail path by instance id.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _set_thumbnail_path_on_context(self, instance_id, thumbnail_path):
|
||||
"""Send change of thumbnail path in remote controller.
|
||||
|
||||
That should trigger event 'instance.thumbnail.changed' which is
|
||||
captured and handled in default implementation in this class.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_subset_name(
|
||||
self,
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue