mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge remote-tracking branch 'origin/bugfix/remove_auto_version_from_instance' into feature/OP-2765_AE-to-new-publisher
This commit is contained in:
commit
05867fb5c3
33 changed files with 612 additions and 328 deletions
|
|
@ -202,13 +202,10 @@ def reload_pipeline(*args):
|
|||
avalon.api.uninstall()
|
||||
|
||||
for module in (
|
||||
"avalon.io",
|
||||
"avalon.lib",
|
||||
"avalon.pipeline",
|
||||
"avalon.tools.creator.app",
|
||||
"avalon.tools.manager.app",
|
||||
"avalon.api",
|
||||
"avalon.tools",
|
||||
"avalon.io",
|
||||
"avalon.lib",
|
||||
"avalon.pipeline",
|
||||
"avalon.api",
|
||||
):
|
||||
module = importlib.import_module(module)
|
||||
importlib.reload(module)
|
||||
|
|
|
|||
|
|
@ -361,7 +361,7 @@ def zip_and_move(source, destination):
|
|||
log.debug(f"Saved '{source}' to '{destination}'")
|
||||
|
||||
|
||||
def show(module_name):
|
||||
def show(tool_name):
|
||||
"""Call show on "module_name".
|
||||
|
||||
This allows to make a QApplication ahead of time and always "exec_" to
|
||||
|
|
@ -375,13 +375,6 @@ def show(module_name):
|
|||
# requests to be received properly.
|
||||
time.sleep(1)
|
||||
|
||||
# Get tool name from module name
|
||||
# TODO this is for backwards compatibility not sure if `TB_sceneOpened.js`
|
||||
# is automatically updated.
|
||||
# Previous javascript sent 'module_name' which contained whole tool import
|
||||
# string e.g. "avalon.tools.workfiles" now it should be only "workfiles"
|
||||
tool_name = module_name.split(".")[-1]
|
||||
|
||||
kwargs = {}
|
||||
if tool_name == "loader":
|
||||
kwargs["use_context"] = True
|
||||
|
|
|
|||
|
|
@ -37,17 +37,17 @@ class ToolWindows:
|
|||
|
||||
|
||||
def edit_shader_definitions():
|
||||
from avalon.tools import lib
|
||||
from Qt import QtWidgets
|
||||
from openpype.hosts.maya.api.shader_definition_editor import (
|
||||
ShaderDefinitionsEditor
|
||||
)
|
||||
from openpype.tools.utils import qt_app_context
|
||||
|
||||
top_level_widgets = QtWidgets.QApplication.topLevelWidgets()
|
||||
main_window = next(widget for widget in top_level_widgets
|
||||
if widget.objectName() == "MayaWindow")
|
||||
|
||||
with lib.application():
|
||||
with qt_app_context():
|
||||
window = ToolWindows.get_window("shader_definition_editor")
|
||||
if not window:
|
||||
window = ShaderDefinitionsEditor(parent=main_window)
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ def install():
|
|||
return
|
||||
|
||||
def deferred():
|
||||
from avalon.tools import publish
|
||||
pyblish_icon = host_tools.get_pyblish_icon()
|
||||
parent_widget = get_main_window()
|
||||
cmds.menu(
|
||||
MENU_NAME,
|
||||
|
|
@ -80,7 +80,7 @@ def install():
|
|||
command=lambda *args: host_tools.show_publish(
|
||||
parent=parent_widget
|
||||
),
|
||||
image=publish.ICON
|
||||
image=pyblish_icon
|
||||
)
|
||||
|
||||
cmds.menuItem(
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import maya.cmds as cmds
|
||||
import maya.cmds as cmds # noqa
|
||||
from avalon import api
|
||||
from openpype.api import get_project_settings
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
|
|
@ -42,20 +43,20 @@ class VRaySceneLoader(api.Loader):
|
|||
with maintained_selection():
|
||||
cmds.namespace(addNamespace=namespace)
|
||||
with namespaced(namespace, new=False):
|
||||
nodes, group_node = self.create_vray_scene(name,
|
||||
filename=self.fname)
|
||||
nodes, root_node = self.create_vray_scene(name,
|
||||
filename=self.fname)
|
||||
|
||||
self[:] = nodes
|
||||
if not nodes:
|
||||
return
|
||||
|
||||
# colour the group node
|
||||
presets = get_project_settings(os.environ['AVALON_PROJECT'])
|
||||
colors = presets['maya']['load']['colors']
|
||||
settings = get_project_settings(os.environ['AVALON_PROJECT'])
|
||||
colors = settings['maya']['load']['colors']
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1)
|
||||
cmds.setAttr("{0}.outlinerColor".format(group_node),
|
||||
cmds.setAttr("{0}.useOutlinerColor".format(root_node), 1)
|
||||
cmds.setAttr("{0}.outlinerColor".format(root_node),
|
||||
(float(c[0])/255),
|
||||
(float(c[1])/255),
|
||||
(float(c[2])/255)
|
||||
|
|
@ -123,17 +124,21 @@ class VRaySceneLoader(api.Loader):
|
|||
mesh_node_name = "VRayScene_{}".format(name)
|
||||
|
||||
trans = cmds.createNode(
|
||||
"transform", name="{}".format(mesh_node_name))
|
||||
mesh = cmds.createNode(
|
||||
"mesh", name="{}_Shape".format(mesh_node_name), parent=trans)
|
||||
"transform", name=mesh_node_name)
|
||||
vray_scene = cmds.createNode(
|
||||
"VRayScene", name="{}_VRSCN".format(mesh_node_name), parent=trans)
|
||||
mesh = cmds.createNode(
|
||||
"mesh", name="{}_Shape".format(mesh_node_name), parent=trans)
|
||||
|
||||
cmds.connectAttr(
|
||||
"{}.outMesh".format(vray_scene), "{}.inMesh".format(mesh))
|
||||
|
||||
cmds.setAttr("{}.FilePath".format(vray_scene), filename, type="string")
|
||||
|
||||
# Lock the shape nodes so the user cannot delete these
|
||||
cmds.lockNode(mesh, lock=True)
|
||||
cmds.lockNode(vray_scene, lock=True)
|
||||
|
||||
# Create important connections
|
||||
cmds.connectAttr("time1.outTime",
|
||||
"{0}.inputTime".format(trans))
|
||||
|
|
@ -141,11 +146,9 @@ class VRaySceneLoader(api.Loader):
|
|||
# Connect mesh to initialShadingGroup
|
||||
cmds.sets([mesh], forceElement="initialShadingGroup")
|
||||
|
||||
group_node = cmds.group(empty=True, name="{}_GRP".format(name))
|
||||
cmds.parent(trans, group_node)
|
||||
nodes = [trans, vray_scene, mesh, group_node]
|
||||
nodes = [trans, vray_scene, mesh]
|
||||
|
||||
# Fix: Force refresh so the mesh shows correctly after creation
|
||||
cmds.refresh()
|
||||
|
||||
return nodes, group_node
|
||||
return nodes, trans
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
import six
|
||||
import platform
|
||||
import contextlib
|
||||
|
|
@ -679,10 +678,10 @@ def get_render_path(node):
|
|||
}
|
||||
|
||||
nuke_imageio_writes = get_created_node_imageio_setting(**data_preset)
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
|
||||
application = lib.get_application(os.environ["AVALON_APP_NAME"])
|
||||
data.update({
|
||||
"application": application,
|
||||
"app": host_name,
|
||||
"nuke_imageio_writes": nuke_imageio_writes
|
||||
})
|
||||
|
||||
|
|
@ -805,18 +804,14 @@ def create_write_node(name, data, input=None, prenodes=None,
|
|||
'''
|
||||
|
||||
imageio_writes = get_created_node_imageio_setting(**data)
|
||||
app_manager = ApplicationManager()
|
||||
app_name = os.environ.get("AVALON_APP_NAME")
|
||||
if app_name:
|
||||
app = app_manager.applications.get(app_name)
|
||||
|
||||
for knob in imageio_writes["knobs"]:
|
||||
if knob["name"] == "file_type":
|
||||
representation = knob["value"]
|
||||
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
try:
|
||||
data.update({
|
||||
"app": app.host_name,
|
||||
"app": host_name,
|
||||
"imageio_writes": imageio_writes,
|
||||
"representation": representation,
|
||||
})
|
||||
|
|
|
|||
|
|
@ -446,6 +446,8 @@ class ExporterReviewMov(ExporterReview):
|
|||
return path
|
||||
|
||||
def generate_mov(self, farm=False, **kwargs):
|
||||
reformat_node_add = kwargs["reformat_node_add"]
|
||||
reformat_node_config = kwargs["reformat_node_config"]
|
||||
bake_viewer_process = kwargs["bake_viewer_process"]
|
||||
bake_viewer_input_process_node = kwargs[
|
||||
"bake_viewer_input_process"]
|
||||
|
|
@ -483,6 +485,30 @@ class ExporterReviewMov(ExporterReview):
|
|||
self.previous_node = r_node
|
||||
self.log.debug("Read... `{}`".format(self._temp_nodes[subset]))
|
||||
|
||||
# add reformat node
|
||||
if reformat_node_add:
|
||||
# append reformated tag
|
||||
add_tags.append("reformated")
|
||||
|
||||
rf_node = nuke.createNode("Reformat")
|
||||
for kn_conf in reformat_node_config:
|
||||
_type = kn_conf["type"]
|
||||
k_name = str(kn_conf["name"])
|
||||
k_value = kn_conf["value"]
|
||||
|
||||
# to remove unicode as nuke doesn't like it
|
||||
if _type == "string":
|
||||
k_value = str(kn_conf["value"])
|
||||
|
||||
rf_node[k_name].setValue(k_value)
|
||||
|
||||
# connect
|
||||
rf_node.setInput(0, self.previous_node)
|
||||
self._temp_nodes[subset].append(rf_node)
|
||||
self.previous_node = rf_node
|
||||
self.log.debug(
|
||||
"Reformat... `{}`".format(self._temp_nodes[subset]))
|
||||
|
||||
# only create colorspace baking if toggled on
|
||||
if bake_viewer_process:
|
||||
if bake_viewer_input_process_node:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import re
|
||||
import pyblish.api
|
||||
import openpype
|
||||
from openpype.hosts.nuke.api import plugin
|
||||
|
|
@ -25,6 +26,7 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
def process(self, instance):
|
||||
families = instance.data["families"]
|
||||
task_type = instance.context.data["taskType"]
|
||||
subset = instance.data["subset"]
|
||||
self.log.info("Creating staging dir...")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
|
|
@ -46,6 +48,7 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
for o_name, o_data in self.outputs.items():
|
||||
f_families = o_data["filter"]["families"]
|
||||
f_task_types = o_data["filter"]["task_types"]
|
||||
f_subsets = o_data["filter"]["sebsets"]
|
||||
|
||||
# test if family found in context
|
||||
test_families = any([
|
||||
|
|
@ -69,11 +72,25 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
bool(not f_task_types)
|
||||
])
|
||||
|
||||
# test subsets from filter
|
||||
test_subsets = any([
|
||||
# check if any of subset filter inputs
|
||||
# converted to regex patern is not found in subset
|
||||
# we keep strict case sensitivity
|
||||
bool(next((
|
||||
s for s in f_subsets
|
||||
if re.search(re.compile(s), subset)
|
||||
), None)),
|
||||
# but if no subsets were set then make this acuntable too
|
||||
bool(not f_subsets)
|
||||
])
|
||||
|
||||
# we need all filters to be positive for this
|
||||
# preset to be activated
|
||||
test_all = all([
|
||||
test_families,
|
||||
test_task_types
|
||||
test_task_types,
|
||||
test_subsets
|
||||
])
|
||||
|
||||
# if it is not positive then skip this preset
|
||||
|
|
@ -120,6 +137,13 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
|||
if generated_repres:
|
||||
# assign to representations
|
||||
instance.data["representations"] += generated_repres
|
||||
else:
|
||||
instance.data["families"].remove("review")
|
||||
self.log.info((
|
||||
"Removing `review` from families. "
|
||||
"Not available baking profile."
|
||||
))
|
||||
self.log.debug(instance.data["families"])
|
||||
|
||||
self.log.debug(
|
||||
"_ representations: {}".format(
|
||||
|
|
|
|||
|
|
@ -952,7 +952,7 @@ class BuildWorkfile:
|
|||
Returns:
|
||||
(dict): preset per entered task name
|
||||
"""
|
||||
host_name = avalon.api.registered_host().__name__.rsplit(".", 1)[-1]
|
||||
host_name = os.environ["AVALON_APP"]
|
||||
project_settings = get_project_settings(
|
||||
avalon.io.Session["AVALON_PROJECT"]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,37 +0,0 @@
|
|||
[State]
|
||||
Type=Enum
|
||||
Items=Global Enabled;Opt-In;Disabled
|
||||
Category=Options
|
||||
CategoryOrder=0
|
||||
CategoryIndex=0
|
||||
Label=State
|
||||
Default=Global Enabled
|
||||
Description=How this event plug-in should respond to events. If Global, all jobs and slaves will trigger the events for this plugin. If Opt-In, jobs and slaves can choose to trigger the events for this plugin. If Disabled, no events are triggered for this plugin.
|
||||
|
||||
[PythonSearchPaths]
|
||||
Type=MultiLineMultiFolder
|
||||
Label=Additional Python Search Paths
|
||||
Category=Options
|
||||
CategoryOrder=0
|
||||
CategoryIndex=1
|
||||
Default=
|
||||
Description=The list of paths to append to the PYTHONPATH environment variable. This allows the Python job to find custom modules in non-standard locations.
|
||||
|
||||
[LoggingLevel]
|
||||
Type=Enum
|
||||
Label=Logging Level
|
||||
Category=Options
|
||||
CategoryOrder=0
|
||||
CategoryIndex=2
|
||||
Items=DEBUG;INFO;WARNING;ERROR
|
||||
Default=DEBUG
|
||||
Description=Logging level where printing will start.
|
||||
|
||||
[OpenPypeExecutable]
|
||||
Type=multilinemultifilename
|
||||
Label=Path to OpenPype executable
|
||||
Category=Job Plugins
|
||||
CategoryOrder=1
|
||||
CategoryIndex=1
|
||||
Default=
|
||||
Description=
|
||||
|
|
@ -1,191 +0,0 @@
|
|||
import Deadline.Events
|
||||
import Deadline.Scripting
|
||||
|
||||
|
||||
def GetDeadlineEventListener():
|
||||
return OpenPypeEventListener()
|
||||
|
||||
|
||||
def CleanupDeadlineEventListener(eventListener):
|
||||
eventListener.Cleanup()
|
||||
|
||||
|
||||
class OpenPypeEventListener(Deadline.Events.DeadlineEventListener):
|
||||
"""
|
||||
Called on every Deadline plugin event, used for injecting OpenPype
|
||||
environment variables into rendering process.
|
||||
|
||||
Expects that job already contains env vars:
|
||||
AVALON_PROJECT
|
||||
AVALON_ASSET
|
||||
AVALON_TASK
|
||||
AVALON_APP_NAME
|
||||
Without these only global environment would be pulled from OpenPype
|
||||
|
||||
Configure 'Path to OpenPype executable dir' in Deadlines
|
||||
'Tools > Configure Events > openpype '
|
||||
Only directory path is needed.
|
||||
|
||||
"""
|
||||
def __init__(self):
|
||||
self.OnJobSubmittedCallback += self.OnJobSubmitted
|
||||
self.OnJobStartedCallback += self.OnJobStarted
|
||||
self.OnJobFinishedCallback += self.OnJobFinished
|
||||
self.OnJobRequeuedCallback += self.OnJobRequeued
|
||||
self.OnJobFailedCallback += self.OnJobFailed
|
||||
self.OnJobSuspendedCallback += self.OnJobSuspended
|
||||
self.OnJobResumedCallback += self.OnJobResumed
|
||||
self.OnJobPendedCallback += self.OnJobPended
|
||||
self.OnJobReleasedCallback += self.OnJobReleased
|
||||
self.OnJobDeletedCallback += self.OnJobDeleted
|
||||
self.OnJobErrorCallback += self.OnJobError
|
||||
self.OnJobPurgedCallback += self.OnJobPurged
|
||||
|
||||
self.OnHouseCleaningCallback += self.OnHouseCleaning
|
||||
self.OnRepositoryRepairCallback += self.OnRepositoryRepair
|
||||
|
||||
self.OnSlaveStartedCallback += self.OnSlaveStarted
|
||||
self.OnSlaveStoppedCallback += self.OnSlaveStopped
|
||||
self.OnSlaveIdleCallback += self.OnSlaveIdle
|
||||
self.OnSlaveRenderingCallback += self.OnSlaveRendering
|
||||
self.OnSlaveStartingJobCallback += self.OnSlaveStartingJob
|
||||
self.OnSlaveStalledCallback += self.OnSlaveStalled
|
||||
|
||||
self.OnIdleShutdownCallback += self.OnIdleShutdown
|
||||
self.OnMachineStartupCallback += self.OnMachineStartup
|
||||
self.OnThermalShutdownCallback += self.OnThermalShutdown
|
||||
self.OnMachineRestartCallback += self.OnMachineRestart
|
||||
|
||||
def Cleanup(self):
|
||||
del self.OnJobSubmittedCallback
|
||||
del self.OnJobStartedCallback
|
||||
del self.OnJobFinishedCallback
|
||||
del self.OnJobRequeuedCallback
|
||||
del self.OnJobFailedCallback
|
||||
del self.OnJobSuspendedCallback
|
||||
del self.OnJobResumedCallback
|
||||
del self.OnJobPendedCallback
|
||||
del self.OnJobReleasedCallback
|
||||
del self.OnJobDeletedCallback
|
||||
del self.OnJobErrorCallback
|
||||
del self.OnJobPurgedCallback
|
||||
|
||||
del self.OnHouseCleaningCallback
|
||||
del self.OnRepositoryRepairCallback
|
||||
|
||||
del self.OnSlaveStartedCallback
|
||||
del self.OnSlaveStoppedCallback
|
||||
del self.OnSlaveIdleCallback
|
||||
del self.OnSlaveRenderingCallback
|
||||
del self.OnSlaveStartingJobCallback
|
||||
del self.OnSlaveStalledCallback
|
||||
|
||||
del self.OnIdleShutdownCallback
|
||||
del self.OnMachineStartupCallback
|
||||
del self.OnThermalShutdownCallback
|
||||
del self.OnMachineRestartCallback
|
||||
|
||||
def set_openpype_executable_path(self, job):
|
||||
"""
|
||||
Sets configurable OpenPypeExecutable value to job extra infos.
|
||||
|
||||
GlobalJobPreLoad takes this value, pulls env vars for each task
|
||||
from specific worker itself. GlobalJobPreLoad is not easily
|
||||
configured, so we are configuring Event itself.
|
||||
"""
|
||||
openpype_execs = self.GetConfigEntryWithDefault("OpenPypeExecutable",
|
||||
"")
|
||||
job.SetJobExtraInfoKeyValue("openpype_executables", openpype_execs)
|
||||
|
||||
Deadline.Scripting.RepositoryUtils.SaveJob(job)
|
||||
|
||||
def updateFtrackStatus(self, job, statusName, createIfMissing=False):
|
||||
"""Updates version status on ftrack"""
|
||||
pass
|
||||
|
||||
def OnJobSubmitted(self, job):
|
||||
# self.LogInfo("OnJobSubmitted LOGGING")
|
||||
# for 1st time submit
|
||||
self.set_openpype_executable_path(job)
|
||||
self.updateFtrackStatus(job, "Render Queued")
|
||||
|
||||
def OnJobStarted(self, job):
|
||||
# self.LogInfo("OnJobStarted")
|
||||
self.set_openpype_executable_path(job)
|
||||
self.updateFtrackStatus(job, "Rendering")
|
||||
|
||||
def OnJobFinished(self, job):
|
||||
# self.LogInfo("OnJobFinished")
|
||||
self.updateFtrackStatus(job, "Artist Review")
|
||||
|
||||
def OnJobRequeued(self, job):
|
||||
# self.LogInfo("OnJobRequeued LOGGING")
|
||||
self.set_openpype_executable_path(job)
|
||||
|
||||
def OnJobFailed(self, job):
|
||||
pass
|
||||
|
||||
def OnJobSuspended(self, job):
|
||||
# self.LogInfo("OnJobSuspended LOGGING")
|
||||
self.updateFtrackStatus(job, "Render Queued")
|
||||
|
||||
def OnJobResumed(self, job):
|
||||
# self.LogInfo("OnJobResumed LOGGING")
|
||||
self.set_openpype_executable_path(job)
|
||||
self.updateFtrackStatus(job, "Rendering")
|
||||
|
||||
def OnJobPended(self, job):
|
||||
# self.LogInfo("OnJobPended LOGGING")
|
||||
pass
|
||||
|
||||
def OnJobReleased(self, job):
|
||||
pass
|
||||
|
||||
def OnJobDeleted(self, job):
|
||||
pass
|
||||
|
||||
def OnJobError(self, job, task, report):
|
||||
# self.LogInfo("OnJobError LOGGING")
|
||||
pass
|
||||
|
||||
def OnJobPurged(self, job):
|
||||
pass
|
||||
|
||||
def OnHouseCleaning(self):
|
||||
pass
|
||||
|
||||
def OnRepositoryRepair(self, job, *args):
|
||||
pass
|
||||
|
||||
def OnSlaveStarted(self, job):
|
||||
# self.LogInfo("OnSlaveStarted LOGGING")
|
||||
pass
|
||||
|
||||
def OnSlaveStopped(self, job):
|
||||
pass
|
||||
|
||||
def OnSlaveIdle(self, job):
|
||||
pass
|
||||
|
||||
def OnSlaveRendering(self, host_name, job):
|
||||
# self.LogInfo("OnSlaveRendering LOGGING")
|
||||
pass
|
||||
|
||||
def OnSlaveStartingJob(self, host_name, job):
|
||||
# self.LogInfo("OnSlaveStartingJob LOGGING")
|
||||
self.set_openpype_executable_path(job)
|
||||
|
||||
def OnSlaveStalled(self, job):
|
||||
pass
|
||||
|
||||
def OnIdleShutdown(self, job):
|
||||
pass
|
||||
|
||||
def OnMachineStartup(self, job):
|
||||
pass
|
||||
|
||||
def OnThermalShutdown(self, job):
|
||||
pass
|
||||
|
||||
def OnMachineRestart(self, job):
|
||||
pass
|
||||
|
|
@ -1,10 +1,11 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import tempfile
|
||||
import time
|
||||
from datetime import datetime
|
||||
import subprocess
|
||||
import json
|
||||
import platform
|
||||
import uuid
|
||||
from Deadline.Scripting import RepositoryUtils, FileUtils
|
||||
|
||||
|
||||
|
|
@ -36,9 +37,11 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
print("--- OpenPype executable: {}".format(openpype_app))
|
||||
|
||||
# tempfile.TemporaryFile cannot be used because of locking
|
||||
export_url = os.path.join(tempfile.gettempdir(),
|
||||
time.strftime('%Y%m%d%H%M%S'),
|
||||
'env.json') # add HHMMSS + delete later
|
||||
temp_file_name = "{}_{}.json".format(
|
||||
datetime.utcnow().strftime('%Y%m%d%H%M%S%f'),
|
||||
str(uuid.uuid1())
|
||||
)
|
||||
export_url = os.path.join(tempfile.gettempdir(), temp_file_name)
|
||||
print(">>> Temporary path: {}".format(export_url))
|
||||
|
||||
args = [
|
||||
|
|
|
|||
|
|
@ -20,11 +20,16 @@ from openpype_modules.ftrack.lib import (
|
|||
query_custom_attributes,
|
||||
CUST_ATTR_ID_KEY,
|
||||
CUST_ATTR_AUTO_SYNC,
|
||||
FPS_KEYS,
|
||||
|
||||
avalon_sync,
|
||||
|
||||
BaseEvent
|
||||
)
|
||||
from openpype_modules.ftrack.lib.avalon_sync import (
|
||||
convert_to_fps,
|
||||
InvalidFpsValue
|
||||
)
|
||||
from openpype.lib import CURRENT_DOC_SCHEMAS
|
||||
|
||||
|
||||
|
|
@ -1149,12 +1154,31 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
"description": ftrack_ent["description"]
|
||||
}
|
||||
}
|
||||
invalid_fps_items = []
|
||||
cust_attrs = self.get_cust_attr_values(ftrack_ent)
|
||||
for key, val in cust_attrs.items():
|
||||
if key.startswith("avalon_"):
|
||||
continue
|
||||
|
||||
if key in FPS_KEYS:
|
||||
try:
|
||||
val = convert_to_fps(val)
|
||||
except InvalidFpsValue:
|
||||
invalid_fps_items.append((ftrack_ent["id"], val))
|
||||
continue
|
||||
|
||||
final_entity["data"][key] = val
|
||||
|
||||
if invalid_fps_items:
|
||||
fps_msg = (
|
||||
"These entities have invalid fps value in custom attributes"
|
||||
)
|
||||
items = []
|
||||
for entity_id, value in invalid_fps_items:
|
||||
ent_path = self.get_ent_path(entity_id)
|
||||
items.append("{} - \"{}\"".format(ent_path, value))
|
||||
self.report_items["error"][fps_msg] = items
|
||||
|
||||
_mongo_id_str = cust_attrs.get(CUST_ATTR_ID_KEY)
|
||||
if _mongo_id_str:
|
||||
try:
|
||||
|
|
@ -2155,11 +2179,19 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
)
|
||||
|
||||
convert_types_by_id[attr_id] = convert_type
|
||||
default_value = attr["default"]
|
||||
if key in FPS_KEYS:
|
||||
try:
|
||||
default_value = convert_to_fps(default_value)
|
||||
except InvalidFpsValue:
|
||||
pass
|
||||
|
||||
entities_dict[ftrack_project_id]["hier_attrs"][key] = (
|
||||
attr["default"]
|
||||
)
|
||||
|
||||
# PREPARE DATA BEFORE THIS
|
||||
invalid_fps_items = []
|
||||
avalon_hier = []
|
||||
for item in values:
|
||||
value = item["value"]
|
||||
|
|
@ -2173,8 +2205,25 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
|
||||
if convert_type:
|
||||
value = convert_type(value)
|
||||
|
||||
if key in FPS_KEYS:
|
||||
try:
|
||||
value = convert_to_fps(value)
|
||||
except InvalidFpsValue:
|
||||
invalid_fps_items.append((entity_id, value))
|
||||
continue
|
||||
entities_dict[entity_id]["hier_attrs"][key] = value
|
||||
|
||||
if invalid_fps_items:
|
||||
fps_msg = (
|
||||
"These entities have invalid fps value in custom attributes"
|
||||
)
|
||||
items = []
|
||||
for entity_id, value in invalid_fps_items:
|
||||
ent_path = self.get_ent_path(entity_id)
|
||||
items.append("{} - \"{}\"".format(ent_path, value))
|
||||
self.report_items["error"][fps_msg] = items
|
||||
|
||||
# Get dictionary with not None hierarchical values to pull to childs
|
||||
project_values = {}
|
||||
for key, value in (
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from openpype_modules.ftrack.lib import (
|
|||
CUST_ATTR_TOOLS,
|
||||
CUST_ATTR_APPLICATIONS,
|
||||
CUST_ATTR_INTENT,
|
||||
FPS_KEYS,
|
||||
|
||||
default_custom_attributes_definition,
|
||||
app_definitions_from_app_manager,
|
||||
|
|
@ -519,20 +520,28 @@ class CustomAttributes(BaseAction):
|
|||
self.show_message(event, msg)
|
||||
|
||||
def process_attribute(self, data):
|
||||
existing_attrs = self.session.query(
|
||||
"CustomAttributeConfiguration"
|
||||
).all()
|
||||
existing_attrs = self.session.query((
|
||||
"select is_hierarchical, key, type, entity_type, object_type_id"
|
||||
" from CustomAttributeConfiguration"
|
||||
)).all()
|
||||
matching = []
|
||||
is_hierarchical = data.get("is_hierarchical", False)
|
||||
for attr in existing_attrs:
|
||||
if (
|
||||
attr["key"] != data["key"] or
|
||||
attr["type"]["name"] != data["type"]["name"]
|
||||
is_hierarchical != attr["is_hierarchical"]
|
||||
or attr["key"] != data["key"]
|
||||
):
|
||||
continue
|
||||
|
||||
if data.get("is_hierarchical") is True:
|
||||
if attr["is_hierarchical"] is True:
|
||||
matching.append(attr)
|
||||
if attr["type"]["name"] != data["type"]["name"]:
|
||||
if data["key"] in FPS_KEYS and attr["type"]["name"] == "text":
|
||||
self.log.info("Kept 'fps' as text custom attribute.")
|
||||
return
|
||||
continue
|
||||
|
||||
if is_hierarchical:
|
||||
matching.append(attr)
|
||||
|
||||
elif "object_type_id" in data:
|
||||
if (
|
||||
attr["entity_type"] == data["entity_type"] and
|
||||
|
|
|
|||
|
|
@ -4,7 +4,8 @@ from .constants import (
|
|||
CUST_ATTR_GROUP,
|
||||
CUST_ATTR_TOOLS,
|
||||
CUST_ATTR_APPLICATIONS,
|
||||
CUST_ATTR_INTENT
|
||||
CUST_ATTR_INTENT,
|
||||
FPS_KEYS
|
||||
)
|
||||
from .settings import (
|
||||
get_ftrack_event_mongo_info
|
||||
|
|
@ -30,6 +31,8 @@ __all__ = (
|
|||
"CUST_ATTR_GROUP",
|
||||
"CUST_ATTR_TOOLS",
|
||||
"CUST_ATTR_APPLICATIONS",
|
||||
"CUST_ATTR_INTENT",
|
||||
"FPS_KEYS",
|
||||
|
||||
"get_ftrack_event_mongo_info",
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,9 @@ import re
|
|||
import json
|
||||
import collections
|
||||
import copy
|
||||
import numbers
|
||||
|
||||
import six
|
||||
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
|
|
@ -14,7 +17,7 @@ from openpype.api import (
|
|||
)
|
||||
from openpype.lib import ApplicationManager
|
||||
|
||||
from .constants import CUST_ATTR_ID_KEY
|
||||
from .constants import CUST_ATTR_ID_KEY, FPS_KEYS
|
||||
from .custom_attributes import get_openpype_attr, query_custom_attributes
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
|
@ -33,6 +36,106 @@ CURRENT_DOC_SCHEMAS = {
|
|||
}
|
||||
|
||||
|
||||
class InvalidFpsValue(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def is_string_number(value):
|
||||
"""Can string value be converted to number (float)."""
|
||||
if not isinstance(value, six.string_types):
|
||||
raise TypeError("Expected {} got {}".format(
|
||||
", ".join(str(t) for t in six.string_types), str(type(value))
|
||||
))
|
||||
if value == ".":
|
||||
return False
|
||||
|
||||
if value.startswith("."):
|
||||
value = "0" + value
|
||||
elif value.endswith("."):
|
||||
value = value + "0"
|
||||
|
||||
if re.match(r"^\d+(\.\d+)?$", value) is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def convert_to_fps(source_value):
|
||||
"""Convert value into fps value.
|
||||
|
||||
Non string values are kept untouched. String is tried to convert.
|
||||
Valid values:
|
||||
"1000"
|
||||
"1000.05"
|
||||
"1000,05"
|
||||
",05"
|
||||
".05"
|
||||
"1000,"
|
||||
"1000."
|
||||
"1000/1000"
|
||||
"1000.05/1000"
|
||||
"1000/1000.05"
|
||||
"1000.05/1000.05"
|
||||
"1000,05/1000"
|
||||
"1000/1000,05"
|
||||
"1000,05/1000,05"
|
||||
|
||||
Invalid values:
|
||||
"/"
|
||||
"/1000"
|
||||
"1000/"
|
||||
","
|
||||
"."
|
||||
...any other string
|
||||
|
||||
Returns:
|
||||
float: Converted value.
|
||||
|
||||
Raises:
|
||||
InvalidFpsValue: When value can't be converted to float.
|
||||
"""
|
||||
if not isinstance(source_value, six.string_types):
|
||||
if isinstance(source_value, numbers.Number):
|
||||
return float(source_value)
|
||||
return source_value
|
||||
|
||||
value = source_value.strip().replace(",", ".")
|
||||
if not value:
|
||||
raise InvalidFpsValue("Got empty value")
|
||||
|
||||
subs = value.split("/")
|
||||
if len(subs) == 1:
|
||||
str_value = subs[0]
|
||||
if not is_string_number(str_value):
|
||||
raise InvalidFpsValue(
|
||||
"Value \"{}\" can't be converted to number.".format(value)
|
||||
)
|
||||
return float(str_value)
|
||||
|
||||
elif len(subs) == 2:
|
||||
divident, divisor = subs
|
||||
if not divident or not is_string_number(divident):
|
||||
raise InvalidFpsValue(
|
||||
"Divident value \"{}\" can't be converted to number".format(
|
||||
divident
|
||||
)
|
||||
)
|
||||
|
||||
if not divisor or not is_string_number(divisor):
|
||||
raise InvalidFpsValue(
|
||||
"Divisor value \"{}\" can't be converted to number".format(
|
||||
divident
|
||||
)
|
||||
)
|
||||
divisor_float = float(divisor)
|
||||
if divisor_float == 0.0:
|
||||
raise InvalidFpsValue("Can't divide by zero")
|
||||
return float(divident) / divisor_float
|
||||
|
||||
raise InvalidFpsValue(
|
||||
"Value can't be converted to number \"{}\"".format(source_value)
|
||||
)
|
||||
|
||||
|
||||
def create_chunks(iterable, chunk_size=None):
|
||||
"""Separate iterable into multiple chunks by size.
|
||||
|
||||
|
|
@ -980,6 +1083,7 @@ class SyncEntitiesFactory:
|
|||
sync_ids
|
||||
)
|
||||
|
||||
invalid_fps_items = []
|
||||
for item in items:
|
||||
entity_id = item["entity_id"]
|
||||
attr_id = item["configuration_id"]
|
||||
|
|
@ -992,8 +1096,24 @@ class SyncEntitiesFactory:
|
|||
value = item["value"]
|
||||
if convert_type:
|
||||
value = convert_type(value)
|
||||
|
||||
if key in FPS_KEYS:
|
||||
try:
|
||||
value = convert_to_fps(value)
|
||||
except InvalidFpsValue:
|
||||
invalid_fps_items.append((entity_id, value))
|
||||
self.entities_dict[entity_id][store_key][key] = value
|
||||
|
||||
if invalid_fps_items:
|
||||
fps_msg = (
|
||||
"These entities have invalid fps value in custom attributes"
|
||||
)
|
||||
items = []
|
||||
for entity_id, value in invalid_fps_items:
|
||||
ent_path = self.get_ent_path(entity_id)
|
||||
items.append("{} - \"{}\"".format(ent_path, value))
|
||||
self.report_items["error"][fps_msg] = items
|
||||
|
||||
# process hierarchical attributes
|
||||
self.set_hierarchical_attribute(
|
||||
hier_attrs, sync_ids, cust_attr_type_name_by_id
|
||||
|
|
@ -1026,8 +1146,15 @@ class SyncEntitiesFactory:
|
|||
if key.startswith("avalon_"):
|
||||
store_key = "avalon_attrs"
|
||||
|
||||
default_value = attr["default"]
|
||||
if key in FPS_KEYS:
|
||||
try:
|
||||
default_value = convert_to_fps(default_value)
|
||||
except InvalidFpsValue:
|
||||
pass
|
||||
|
||||
self.entities_dict[self.ft_project_id][store_key][key] = (
|
||||
attr["default"]
|
||||
default_value
|
||||
)
|
||||
|
||||
# Add attribute ids to entities dictionary
|
||||
|
|
@ -1069,6 +1196,7 @@ class SyncEntitiesFactory:
|
|||
True
|
||||
)
|
||||
|
||||
invalid_fps_items = []
|
||||
avalon_hier = []
|
||||
for item in items:
|
||||
value = item["value"]
|
||||
|
|
@ -1088,6 +1216,13 @@ class SyncEntitiesFactory:
|
|||
|
||||
entity_id = item["entity_id"]
|
||||
key = attribute_key_by_id[attr_id]
|
||||
if key in FPS_KEYS:
|
||||
try:
|
||||
value = convert_to_fps(value)
|
||||
except InvalidFpsValue:
|
||||
invalid_fps_items.append((entity_id, value))
|
||||
continue
|
||||
|
||||
if key.startswith("avalon_"):
|
||||
store_key = "avalon_attrs"
|
||||
avalon_hier.append(key)
|
||||
|
|
@ -1095,6 +1230,16 @@ class SyncEntitiesFactory:
|
|||
store_key = "hier_attrs"
|
||||
self.entities_dict[entity_id][store_key][key] = value
|
||||
|
||||
if invalid_fps_items:
|
||||
fps_msg = (
|
||||
"These entities have invalid fps value in custom attributes"
|
||||
)
|
||||
items = []
|
||||
for entity_id, value in invalid_fps_items:
|
||||
ent_path = self.get_ent_path(entity_id)
|
||||
items.append("{} - \"{}\"".format(ent_path, value))
|
||||
self.report_items["error"][fps_msg] = items
|
||||
|
||||
# Get dictionary with not None hierarchical values to pull to childs
|
||||
top_id = self.ft_project_id
|
||||
project_values = {}
|
||||
|
|
|
|||
|
|
@ -12,3 +12,9 @@ CUST_ATTR_APPLICATIONS = "applications"
|
|||
CUST_ATTR_TOOLS = "tools_env"
|
||||
# Intent custom attribute name
|
||||
CUST_ATTR_INTENT = "intent"
|
||||
|
||||
FPS_KEYS = {
|
||||
"fps",
|
||||
# For development purposes
|
||||
"fps_string"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -399,15 +399,6 @@ class CreatedInstance:
|
|||
self._data["active"] = data.get("active", True)
|
||||
self._data["creator_identifier"] = creator.identifier
|
||||
|
||||
# QUESTION handle version of instance here or in creator?
|
||||
version = None
|
||||
if not new:
|
||||
version = data.get("version")
|
||||
|
||||
if version is None:
|
||||
version = 1
|
||||
self._data["version"] = version
|
||||
|
||||
# Pop from source data all keys that are defined in `_data` before
|
||||
# this moment and through their values away
|
||||
# - they should be the same and if are not then should not change
|
||||
|
|
|
|||
|
|
@ -34,7 +34,12 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
self.log.info("subset {}".format(instance.data['subset']))
|
||||
|
||||
# skip crypto passes.
|
||||
if 'crypto' in instance.data['subset']:
|
||||
# TODO: This is just a quick fix and has its own side-effects - it is
|
||||
# affecting every subset name with `crypto` in its name.
|
||||
# This must be solved properly, maybe using tags on
|
||||
# representation that can be determined much earlier and
|
||||
# with better precision.
|
||||
if 'crypto' in instance.data['subset'].lower():
|
||||
self.log.info("Skipping crypto passes.")
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -1171,6 +1171,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
self.log.debug("input_width: `{}`".format(input_width))
|
||||
self.log.debug("input_height: `{}`".format(input_height))
|
||||
|
||||
reformat_in_baking = bool("reformated" in new_repre["tags"])
|
||||
self.log.debug("reformat_in_baking: `{}`".format(reformat_in_baking))
|
||||
|
||||
# Use instance resolution if output definition has not set it.
|
||||
if output_width is None or output_height is None:
|
||||
output_width = temp_data["resolution_width"]
|
||||
|
|
@ -1182,6 +1185,17 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
output_width = input_width
|
||||
output_height = input_height
|
||||
|
||||
if reformat_in_baking:
|
||||
self.log.debug((
|
||||
"Using resolution from input. It is already "
|
||||
"reformated from baking process"
|
||||
))
|
||||
output_width = input_width
|
||||
output_height = input_height
|
||||
pixel_aspect = 1
|
||||
new_repre["resolutionWidth"] = input_width
|
||||
new_repre["resolutionHeight"] = input_height
|
||||
|
||||
output_width = int(output_width)
|
||||
output_height = int(output_height)
|
||||
|
||||
|
|
|
|||
|
|
@ -589,6 +589,12 @@
|
|||
12,
|
||||
255
|
||||
],
|
||||
"vrayscene_layer": [
|
||||
255,
|
||||
150,
|
||||
12,
|
||||
255
|
||||
],
|
||||
"yeticache": [
|
||||
99,
|
||||
206,
|
||||
|
|
|
|||
|
|
@ -116,13 +116,42 @@
|
|||
"baking": {
|
||||
"filter": {
|
||||
"task_types": [],
|
||||
"families": []
|
||||
"families": [],
|
||||
"sebsets": []
|
||||
},
|
||||
"extension": "mov",
|
||||
"viewer_process_override": "",
|
||||
"bake_viewer_process": true,
|
||||
"bake_viewer_input_process": true,
|
||||
"add_tags": []
|
||||
"add_tags": [],
|
||||
"reformat_node_add": false,
|
||||
"reformat_node_config": [
|
||||
{
|
||||
"type": "string",
|
||||
"name": "type",
|
||||
"value": "to format"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"name": "format",
|
||||
"value": "HD_1080"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"name": "filter",
|
||||
"value": "Lanczos6"
|
||||
},
|
||||
{
|
||||
"type": "bool",
|
||||
"name": "black_outside",
|
||||
"value": true
|
||||
},
|
||||
{
|
||||
"type": "bool",
|
||||
"name": "pbb",
|
||||
"value": false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -584,8 +584,9 @@ class DictConditionalEntity(ItemEntity):
|
|||
|
||||
self.enum_entity.update_default_value(enum_value)
|
||||
for children_by_key in self.non_gui_children.values():
|
||||
value_copy = copy.deepcopy(value)
|
||||
for key, child_obj in children_by_key.items():
|
||||
child_value = value.get(key, NOT_SET)
|
||||
child_value = value_copy.get(key, NOT_SET)
|
||||
child_obj.update_default_value(child_value)
|
||||
|
||||
def update_studio_value(self, value):
|
||||
|
|
@ -620,8 +621,9 @@ class DictConditionalEntity(ItemEntity):
|
|||
|
||||
self.enum_entity.update_studio_value(enum_value)
|
||||
for children_by_key in self.non_gui_children.values():
|
||||
value_copy = copy.deepcopy(value)
|
||||
for key, child_obj in children_by_key.items():
|
||||
child_value = value.get(key, NOT_SET)
|
||||
child_value = value_copy.get(key, NOT_SET)
|
||||
child_obj.update_studio_value(child_value)
|
||||
|
||||
def update_project_value(self, value):
|
||||
|
|
@ -656,8 +658,9 @@ class DictConditionalEntity(ItemEntity):
|
|||
|
||||
self.enum_entity.update_project_value(enum_value)
|
||||
for children_by_key in self.non_gui_children.values():
|
||||
value_copy = copy.deepcopy(value)
|
||||
for key, child_obj in children_by_key.items():
|
||||
child_value = value.get(key, NOT_SET)
|
||||
child_value = value_copy.get(key, NOT_SET)
|
||||
child_obj.update_project_value(child_value)
|
||||
|
||||
def _discard_changes(self, on_change_trigger):
|
||||
|
|
|
|||
|
|
@ -75,6 +75,11 @@
|
|||
"label": "Vray Proxy:",
|
||||
"key": "vrayproxy"
|
||||
},
|
||||
{
|
||||
"type": "color",
|
||||
"label": "Vray Scene:",
|
||||
"key": "vrayscene_layer"
|
||||
},
|
||||
{
|
||||
"type": "color",
|
||||
"label": "Yeti Cache:",
|
||||
|
|
|
|||
|
|
@ -195,6 +195,12 @@
|
|||
"label": "Families",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"key": "sebsets",
|
||||
"label": "Subsets",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
@ -226,6 +232,121 @@
|
|||
"label": "Add additional tags to representations",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "reformat_node_add",
|
||||
"label": "Add Reformat Node",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"type": "collapsible-wrap",
|
||||
"label": "Reformat Node Knobs",
|
||||
"collapsible": true,
|
||||
"collapsed": false,
|
||||
"children": [
|
||||
{
|
||||
"type": "list",
|
||||
"key": "reformat_node_config",
|
||||
"object_type": {
|
||||
"type": "dict-conditional",
|
||||
"enum_key": "type",
|
||||
"enum_label": "Type",
|
||||
"enum_children": [
|
||||
{
|
||||
"key": "string",
|
||||
"label": "String",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "name",
|
||||
"label": "Name"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "value",
|
||||
"label": "Value"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "bool",
|
||||
"label": "Boolean",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "name",
|
||||
"label": "Name"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "value",
|
||||
"label": "Value"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "number",
|
||||
"label": "Number",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "name",
|
||||
"label": "Name"
|
||||
},
|
||||
{
|
||||
"type": "list-strict",
|
||||
"key": "value",
|
||||
"label": "Value",
|
||||
"object_types": [
|
||||
{
|
||||
"type": "number",
|
||||
"key": "number",
|
||||
"default": 1,
|
||||
"decimal": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "list_numbers",
|
||||
"label": "2 Numbers",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "name",
|
||||
"label": "Name"
|
||||
},
|
||||
{
|
||||
"type": "list-strict",
|
||||
"key": "value",
|
||||
"label": "Value",
|
||||
"object_types": [
|
||||
{
|
||||
"type": "number",
|
||||
"key": "x",
|
||||
"default": 1,
|
||||
"decimal": 4
|
||||
},
|
||||
{
|
||||
"type": "number",
|
||||
"key": "y",
|
||||
"default": 1,
|
||||
"decimal": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,9 +3,11 @@ from collections import defaultdict
|
|||
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
# TODO: expose this better in avalon core
|
||||
from avalon.tools import lib
|
||||
from avalon.tools.models import TreeModel
|
||||
from openpype.tools.utils.models import TreeModel
|
||||
from openpype.tools.utils.lib import (
|
||||
preserve_expanded_rows,
|
||||
preserve_selection,
|
||||
)
|
||||
|
||||
from .models import (
|
||||
AssetModel,
|
||||
|
|
@ -88,8 +90,8 @@ class AssetOutliner(QtWidgets.QWidget):
|
|||
"""Add all items from the current scene"""
|
||||
|
||||
items = []
|
||||
with lib.preserve_expanded_rows(self.view):
|
||||
with lib.preserve_selection(self.view):
|
||||
with preserve_expanded_rows(self.view):
|
||||
with preserve_selection(self.view):
|
||||
self.clear()
|
||||
nodes = commands.get_all_asset_nodes()
|
||||
items = commands.create_items_from_nodes(nodes)
|
||||
|
|
@ -100,8 +102,8 @@ class AssetOutliner(QtWidgets.QWidget):
|
|||
def get_selected_assets(self):
|
||||
"""Add all selected items from the current scene"""
|
||||
|
||||
with lib.preserve_expanded_rows(self.view):
|
||||
with lib.preserve_selection(self.view):
|
||||
with preserve_expanded_rows(self.view):
|
||||
with preserve_selection(self.view):
|
||||
self.clear()
|
||||
nodes = commands.get_selected_nodes()
|
||||
items = commands.create_items_from_nodes(nodes)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from avalon import api, io, style, schema
|
|||
from avalon.vendor import qtawesome
|
||||
|
||||
from avalon.lib import HeroVersionType
|
||||
from avalon.tools.models import TreeModel, Item
|
||||
from openpype.tools.utils.models import TreeModel, Item
|
||||
|
||||
from .lib import (
|
||||
get_site_icons,
|
||||
|
|
|
|||
|
|
@ -7,9 +7,13 @@ from Qt import QtWidgets, QtCore
|
|||
from avalon import io, api, style
|
||||
from avalon.vendor import qtawesome
|
||||
from avalon.lib import HeroVersionType
|
||||
from avalon.tools import lib as tools_lib
|
||||
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype.tools.utils.lib import (
|
||||
get_progress_for_repre,
|
||||
iter_model_rows,
|
||||
format_version
|
||||
)
|
||||
|
||||
from .switch_dialog import SwitchAssetDialog
|
||||
from .model import InventoryModel
|
||||
|
|
@ -20,12 +24,12 @@ DEFAULT_COLOR = "#fb9c15"
|
|||
log = logging.getLogger("SceneInventory")
|
||||
|
||||
|
||||
class SceneInvetoryView(QtWidgets.QTreeView):
|
||||
class SceneInventoryView(QtWidgets.QTreeView):
|
||||
data_changed = QtCore.Signal()
|
||||
hierarchy_view_changed = QtCore.Signal(bool)
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(SceneInvetoryView, self).__init__(parent=parent)
|
||||
super(SceneInventoryView, self).__init__(parent=parent)
|
||||
|
||||
# view settings
|
||||
self.setIndentation(12)
|
||||
|
|
@ -373,7 +377,7 @@ class SceneInvetoryView(QtWidgets.QTreeView):
|
|||
if not repre_doc:
|
||||
continue
|
||||
|
||||
progress = tools_lib.get_progress_for_repre(
|
||||
progress = get_progress_for_repre(
|
||||
repre_doc,
|
||||
active_site,
|
||||
remote_site
|
||||
|
|
@ -544,7 +548,7 @@ class SceneInvetoryView(QtWidgets.QTreeView):
|
|||
"toggle": selection_model.Toggle,
|
||||
}[options.get("mode", "select")]
|
||||
|
||||
for item in tools_lib.iter_model_rows(model, 0):
|
||||
for item in iter_model_rows(model, 0):
|
||||
item = item.data(InventoryModel.ItemRole)
|
||||
if item.get("isGroupNode"):
|
||||
continue
|
||||
|
|
@ -704,7 +708,7 @@ class SceneInvetoryView(QtWidgets.QTreeView):
|
|||
labels = []
|
||||
for version in all_versions:
|
||||
is_hero = version["type"] == "hero_version"
|
||||
label = tools_lib.format_version(version["name"], is_hero)
|
||||
label = format_version(version["name"], is_hero)
|
||||
labels.append(label)
|
||||
versions_by_label[label] = version["name"]
|
||||
|
||||
|
|
@ -792,3 +796,40 @@ class SceneInvetoryView(QtWidgets.QTreeView):
|
|||
).format(version_str)
|
||||
dialog.setText(msg)
|
||||
dialog.exec_()
|
||||
|
||||
def update_all(self):
|
||||
"""Update all items that are currently 'outdated' in the view"""
|
||||
# Get the source model through the proxy model
|
||||
model = self.model().sourceModel()
|
||||
|
||||
# Get all items from outdated groups
|
||||
outdated_items = []
|
||||
for index in iter_model_rows(model,
|
||||
column=0,
|
||||
include_root=False):
|
||||
item = index.data(model.ItemRole)
|
||||
|
||||
if not item.get("isGroupNode"):
|
||||
continue
|
||||
|
||||
# Only the group nodes contain the "highest_version" data and as
|
||||
# such we find only the groups and take its children.
|
||||
if not model.outdated(item):
|
||||
continue
|
||||
|
||||
# Collect all children which we want to update
|
||||
children = item.children()
|
||||
outdated_items.extend(children)
|
||||
|
||||
if not outdated_items:
|
||||
log.info("Nothing to update.")
|
||||
return
|
||||
|
||||
# Trigger update to latest
|
||||
for item in outdated_items:
|
||||
try:
|
||||
api.update(item, -1)
|
||||
except AssertionError:
|
||||
self._show_version_error_dialog(None, [item])
|
||||
log.warning("Update failed", exc_info=True)
|
||||
self.data_changed.emit()
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ from .model import (
|
|||
InventoryModel,
|
||||
FilterProxyModel
|
||||
)
|
||||
from .view import SceneInvetoryView
|
||||
from .view import SceneInventoryView
|
||||
|
||||
|
||||
module = sys.modules[__name__]
|
||||
|
|
@ -54,14 +54,21 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
outdated_only_checkbox.setToolTip("Show outdated files only")
|
||||
outdated_only_checkbox.setChecked(False)
|
||||
|
||||
icon = qtawesome.icon("fa.arrow-up", color="white")
|
||||
update_all_button = QtWidgets.QPushButton(self)
|
||||
update_all_button.setToolTip("Update all outdated to latest version")
|
||||
update_all_button.setIcon(icon)
|
||||
|
||||
icon = qtawesome.icon("fa.refresh", color="white")
|
||||
refresh_button = QtWidgets.QPushButton(self)
|
||||
update_all_button.setToolTip("Refresh")
|
||||
refresh_button.setIcon(icon)
|
||||
|
||||
control_layout = QtWidgets.QHBoxLayout()
|
||||
control_layout.addWidget(filter_label)
|
||||
control_layout.addWidget(text_filter)
|
||||
control_layout.addWidget(outdated_only_checkbox)
|
||||
control_layout.addWidget(update_all_button)
|
||||
control_layout.addWidget(refresh_button)
|
||||
|
||||
# endregion control
|
||||
|
|
@ -73,7 +80,7 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
proxy.setDynamicSortFilter(True)
|
||||
proxy.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive)
|
||||
|
||||
view = SceneInvetoryView(self)
|
||||
view = SceneInventoryView(self)
|
||||
view.setModel(proxy)
|
||||
|
||||
# set some nice default widths for the view
|
||||
|
|
@ -98,11 +105,13 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
self._on_outdated_state_change
|
||||
)
|
||||
view.hierarchy_view_changed.connect(
|
||||
self._on_hiearchy_view_change
|
||||
self._on_hierarchy_view_change
|
||||
)
|
||||
view.data_changed.connect(self.refresh)
|
||||
refresh_button.clicked.connect(self.refresh)
|
||||
update_all_button.clicked.connect(self._on_update_all)
|
||||
|
||||
self._update_all_button = update_all_button
|
||||
self._outdated_only_checkbox = outdated_only_checkbox
|
||||
self._view = view
|
||||
self._model = model
|
||||
|
|
@ -146,7 +155,7 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
kwargs["selected"] = self._view._selected
|
||||
self._model.refresh(**kwargs)
|
||||
|
||||
def _on_hiearchy_view_change(self, enabled):
|
||||
def _on_hierarchy_view_change(self, enabled):
|
||||
self._proxy.set_hierarchy_view(enabled)
|
||||
self._model.set_hierarchy_view(enabled)
|
||||
|
||||
|
|
@ -158,6 +167,9 @@ class SceneInventoryWindow(QtWidgets.QDialog):
|
|||
self._outdated_only_checkbox.isChecked()
|
||||
)
|
||||
|
||||
def _on_update_all(self):
|
||||
self._view.update_all()
|
||||
|
||||
|
||||
def show(root=None, debug=False, parent=None, items=None):
|
||||
"""Display Scene Inventory GUI
|
||||
|
|
|
|||
|
|
@ -3,10 +3,10 @@ import sys
|
|||
|
||||
import openpype
|
||||
import pyblish.api
|
||||
from openpype.tools.utils.host_tools import show_publish
|
||||
|
||||
|
||||
def main(env):
|
||||
from avalon.tools import publish
|
||||
# Registers pype's Global pyblish plugins
|
||||
openpype.install()
|
||||
|
||||
|
|
@ -19,7 +19,7 @@ def main(env):
|
|||
continue
|
||||
pyblish.api.register_plugin_path(path)
|
||||
|
||||
return publish.show()
|
||||
return show_publish()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ from .lib import (
|
|||
get_warning_pixmap,
|
||||
set_style_property,
|
||||
DynamicQThread,
|
||||
qt_app_context,
|
||||
)
|
||||
|
||||
from .models import (
|
||||
|
|
@ -39,6 +40,7 @@ __all__ = (
|
|||
"get_warning_pixmap",
|
||||
"set_style_property",
|
||||
"DynamicQThread",
|
||||
"qt_app_context",
|
||||
|
||||
"RecursiveSortFilterProxyModel",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,8 +3,9 @@
|
|||
It is possible to create `HostToolsHelper` in host implementation or
|
||||
use singleton approach with global functions (using helper anyway).
|
||||
"""
|
||||
|
||||
import os
|
||||
import avalon.api
|
||||
import pyblish.api
|
||||
from .lib import qt_app_context
|
||||
|
||||
|
||||
|
|
@ -196,10 +197,29 @@ class HostToolsHelper:
|
|||
library_loader_tool.refresh()
|
||||
|
||||
def show_publish(self, parent=None):
|
||||
"""Publish UI."""
|
||||
from avalon.tools import publish
|
||||
"""Try showing the most desirable publish GUI
|
||||
|
||||
publish.show(parent)
|
||||
This function cycles through the currently registered
|
||||
graphical user interfaces, if any, and presents it to
|
||||
the user.
|
||||
"""
|
||||
|
||||
pyblish_show = self._discover_pyblish_gui()
|
||||
return pyblish_show(parent)
|
||||
|
||||
def _discover_pyblish_gui(self):
|
||||
"""Return the most desirable of the currently registered GUIs"""
|
||||
# Prefer last registered
|
||||
guis = list(reversed(pyblish.api.registered_guis()))
|
||||
for gui in guis:
|
||||
try:
|
||||
gui = __import__(gui).show
|
||||
except (ImportError, AttributeError):
|
||||
continue
|
||||
else:
|
||||
return gui
|
||||
|
||||
raise ImportError("No Pyblish GUI found")
|
||||
|
||||
def get_look_assigner_tool(self, parent):
|
||||
"""Create, cache and return look assigner tool window."""
|
||||
|
|
@ -394,3 +414,11 @@ def show_publish(parent=None):
|
|||
|
||||
def show_experimental_tools_dialog(parent=None):
|
||||
_SingletonPoint.show_tool_by_name("experimental_tools", parent)
|
||||
|
||||
|
||||
def get_pyblish_icon():
|
||||
pyblish_dir = os.path.abspath(os.path.dirname(pyblish.api.__file__))
|
||||
icon_path = os.path.join(pyblish_dir, "icons", "logo-32x32.svg")
|
||||
if os.path.exists(icon_path):
|
||||
return icon_path
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
import os
|
||||
import logging
|
||||
|
||||
from Qt import QtCore, QtGui
|
||||
from Qt import QtCore
|
||||
|
||||
from avalon import style
|
||||
from avalon.vendor import qtawesome
|
||||
from avalon.tools.models import TreeModel, Item
|
||||
from openpype.tools.utils.models import TreeModel, Item
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue