mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'develop' into 1175-add-ffmpeg-and-othe-binaries-to-build
This commit is contained in:
commit
a28cf1fc17
69 changed files with 5507 additions and 3891 deletions
|
|
@ -11,7 +11,7 @@ class PrePython2Vendor(PreLaunchHook):
|
|||
def execute(self):
|
||||
# Prepare vendor dir path
|
||||
self.log.info("adding global python 2 vendor")
|
||||
pype_root = os.getenv("OPENPYPE_ROOT")
|
||||
pype_root = os.getenv("OPENPYPE_REPOS_ROOT")
|
||||
python_2_vendor = os.path.join(
|
||||
pype_root,
|
||||
"openpype",
|
||||
|
|
|
|||
|
|
@ -51,18 +51,38 @@ def set_start_end_frames():
|
|||
"name": asset_name
|
||||
})
|
||||
|
||||
# Default frame start/end
|
||||
frameStart = 0
|
||||
frameEnd = 100
|
||||
scene = bpy.context.scene
|
||||
|
||||
# Check if frameStart/frameEnd are set
|
||||
if asset_doc["data"]["frameStart"]:
|
||||
frameStart = asset_doc["data"]["frameStart"]
|
||||
if asset_doc["data"]["frameEnd"]:
|
||||
frameEnd = asset_doc["data"]["frameEnd"]
|
||||
# Default scene settings
|
||||
frameStart = scene.frame_start
|
||||
frameEnd = scene.frame_end
|
||||
fps = scene.render.fps
|
||||
resolution_x = scene.render.resolution_x
|
||||
resolution_y = scene.render.resolution_y
|
||||
|
||||
# Check if settings are set
|
||||
data = asset_doc.get("data")
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
if data.get("frameStart"):
|
||||
frameStart = data.get("frameStart")
|
||||
if data.get("frameEnd"):
|
||||
frameEnd = data.get("frameEnd")
|
||||
if data.get("fps"):
|
||||
fps = data.get("fps")
|
||||
if data.get("resolutionWidth"):
|
||||
resolution_x = data.get("resolutionWidth")
|
||||
if data.get("resolutionHeight"):
|
||||
resolution_y = data.get("resolutionHeight")
|
||||
|
||||
scene.frame_start = frameStart
|
||||
scene.frame_end = frameEnd
|
||||
scene.render.fps = fps
|
||||
scene.render.resolution_x = resolution_x
|
||||
scene.render.resolution_y = resolution_y
|
||||
|
||||
bpy.context.scene.frame_start = frameStart
|
||||
bpy.context.scene.frame_end = frameEnd
|
||||
|
||||
def on_new(arg1, arg2):
|
||||
set_start_end_frames()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,35 @@
|
|||
from typing import List
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.blender.api.action
|
||||
|
||||
|
||||
class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin):
|
||||
"""Validate that the current object is in Object Mode."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder - 0.01
|
||||
hosts = ["blender"]
|
||||
families = ["model", "rig"]
|
||||
category = "geometry"
|
||||
label = "Object is in Object Mode"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
optional = True
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance) -> List:
|
||||
invalid = []
|
||||
for obj in [obj for obj in instance]:
|
||||
try:
|
||||
if obj.type == 'MESH' or obj.type == 'ARMATURE':
|
||||
# Check if the object is in object mode.
|
||||
if not obj.mode == 'OBJECT':
|
||||
invalid.append(obj)
|
||||
except Exception:
|
||||
continue
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
f"Object found in instance is not in Object Mode: {invalid}")
|
||||
|
|
@ -12,6 +12,7 @@ class CreateLook(plugin.Creator):
|
|||
family = "look"
|
||||
icon = "paint-brush"
|
||||
defaults = ['Main']
|
||||
make_tx = True
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateLook, self).__init__(*args, **kwargs)
|
||||
|
|
@ -19,7 +20,7 @@ class CreateLook(plugin.Creator):
|
|||
self.data["renderlayer"] = lib.get_current_renderlayer()
|
||||
|
||||
# Whether to automatically convert the textures to .tx upon publish.
|
||||
self.data["maketx"] = True
|
||||
self.data["maketx"] = self.make_tx
|
||||
|
||||
# Enable users to force a copy.
|
||||
self.data["forceCopy"] = False
|
||||
|
|
|
|||
|
|
@ -80,25 +80,31 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
# Add all nodes in group instances.
|
||||
if node.Class() == "Group":
|
||||
# check if it is write node in family
|
||||
if "write" in families:
|
||||
# only alter families for render family
|
||||
if "write" in families_ak:
|
||||
target = node["render"].value()
|
||||
if target == "Use existing frames":
|
||||
# Local rendering
|
||||
self.log.info("flagged for no render")
|
||||
families.append("render")
|
||||
families.append(family)
|
||||
elif target == "Local":
|
||||
# Local rendering
|
||||
self.log.info("flagged for local render")
|
||||
families.append("{}.local".format("render"))
|
||||
families.append("{}.local".format(family))
|
||||
elif target == "On farm":
|
||||
# Farm rendering
|
||||
self.log.info("flagged for farm render")
|
||||
instance.data["transfer"] = False
|
||||
families.append("{}.farm".format("render"))
|
||||
families.append("{}.farm".format(family))
|
||||
|
||||
# suffle family to `write` as it is main family
|
||||
# this will be changed later on in process
|
||||
if "render" in families:
|
||||
families.remove("render")
|
||||
family = "write"
|
||||
elif "prerender" in families:
|
||||
families.remove("prerender")
|
||||
family = "write"
|
||||
|
||||
node.begin()
|
||||
for i in nuke.allNodes():
|
||||
|
|
|
|||
|
|
@ -108,6 +108,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
|
||||
# Add version data to instance
|
||||
version_data = {
|
||||
"families": [f.replace(".local", "").replace(".farm", "")
|
||||
for f in families if "write" not in f],
|
||||
"colorspace": node["colorspace"].value(),
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,14 +1,15 @@
|
|||
import os
|
||||
import logging
|
||||
|
||||
from avalon.tvpaint.communication_server import register_localization_file
|
||||
from avalon.tvpaint import pipeline
|
||||
import avalon.api
|
||||
import pyblish.api
|
||||
from avalon.tvpaint import pipeline
|
||||
from avalon.tvpaint.communication_server import register_localization_file
|
||||
from .lib import set_context_settings
|
||||
|
||||
from openpype.hosts import tvpaint
|
||||
|
||||
log = logging.getLogger("openpype.hosts.tvpaint")
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
HOST_DIR = os.path.dirname(os.path.abspath(tvpaint.__file__))
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
|
|
@ -34,6 +35,18 @@ def on_instance_toggle(instance, old_value, new_value):
|
|||
pipeline._write_instances(current_instances)
|
||||
|
||||
|
||||
def initial_launch():
|
||||
# Setup project settings if its the template that's launched.
|
||||
# TODO also check for template creation when it's possible to define
|
||||
# templates
|
||||
last_workfile = os.environ.get("AVALON_LAST_WORKFILE")
|
||||
if not last_workfile or os.path.exists(last_workfile):
|
||||
return
|
||||
|
||||
log.info("Setting up project...")
|
||||
set_context_settings()
|
||||
|
||||
|
||||
def install():
|
||||
log.info("OpenPype - Installing TVPaint integration")
|
||||
localization_file = os.path.join(HOST_DIR, "resources", "avalon.loc")
|
||||
|
|
@ -49,6 +62,8 @@ def install():
|
|||
if on_instance_toggle not in registered_callbacks:
|
||||
pyblish.api.register_callback("instanceToggled", on_instance_toggle)
|
||||
|
||||
avalon.api.on("application.launched", initial_launch)
|
||||
|
||||
|
||||
def uninstall():
|
||||
log.info("OpenPype - Uninstalling TVPaint integration")
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
from PIL import Image
|
||||
|
||||
import avalon.io
|
||||
from avalon.tvpaint.lib import execute_george
|
||||
|
||||
|
||||
def composite_images(input_image_paths, output_filepath):
|
||||
"""Composite images in order from passed list.
|
||||
|
|
@ -18,3 +21,64 @@ def composite_images(input_image_paths, output_filepath):
|
|||
else:
|
||||
img_obj.alpha_composite(_img_obj)
|
||||
img_obj.save(output_filepath)
|
||||
|
||||
|
||||
def set_context_settings(asset_doc=None):
|
||||
"""Set workfile settings by asset document data.
|
||||
|
||||
Change fps, resolution and frame start/end.
|
||||
"""
|
||||
if asset_doc is None:
|
||||
# Use current session asset if not passed
|
||||
asset_doc = avalon.io.find_one({
|
||||
"type": "asset",
|
||||
"name": avalon.io.Session["AVALON_ASSET"]
|
||||
})
|
||||
|
||||
project_doc = avalon.io.find_one({"type": "project"})
|
||||
|
||||
framerate = asset_doc["data"].get("fps")
|
||||
if framerate is None:
|
||||
framerate = project_doc["data"].get("fps")
|
||||
|
||||
if framerate is not None:
|
||||
execute_george(
|
||||
"tv_framerate {} \"timestretch\"".format(framerate)
|
||||
)
|
||||
else:
|
||||
print("Framerate was not found!")
|
||||
|
||||
width_key = "resolutionWidth"
|
||||
height_key = "resolutionHeight"
|
||||
|
||||
width = asset_doc["data"].get(width_key)
|
||||
height = asset_doc["data"].get(height_key)
|
||||
if width is None or height is None:
|
||||
width = project_doc["data"].get(width_key)
|
||||
height = project_doc["data"].get(height_key)
|
||||
|
||||
if width is None or height is None:
|
||||
print("Resolution was not found!")
|
||||
else:
|
||||
execute_george("tv_resizepage {} {} 0".format(width, height))
|
||||
|
||||
frame_start = asset_doc["data"].get("frameStart")
|
||||
frame_end = asset_doc["data"].get("frameEnd")
|
||||
|
||||
if frame_start is None or frame_end is None:
|
||||
print("Frame range was not found!")
|
||||
return
|
||||
|
||||
handles = asset_doc["data"].get("handles") or 0
|
||||
handle_start = asset_doc["data"].get("handleStart")
|
||||
handle_end = asset_doc["data"].get("handleEnd")
|
||||
|
||||
if handle_start is None or handle_end is None:
|
||||
handle_start = handles
|
||||
handle_end = handles
|
||||
|
||||
frame_start -= int(handle_start)
|
||||
frame_end += int(handle_end)
|
||||
|
||||
execute_george("tv_markin {} set".format(frame_start - 1))
|
||||
execute_george("tv_markout {} set".format(frame_end - 1))
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
))
|
||||
|
||||
for instance_data in workfile_instances:
|
||||
instance_data["fps"] = context.data["fps"]
|
||||
instance_data["fps"] = context.data["sceneFps"]
|
||||
|
||||
# Store workfile instance data to instance data
|
||||
instance_data["originData"] = copy.deepcopy(instance_data)
|
||||
|
|
@ -32,6 +32,11 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
subset_name = instance_data["subset"]
|
||||
name = instance_data.get("name", subset_name)
|
||||
instance_data["name"] = name
|
||||
instance_data["label"] = "{} [{}-{}]".format(
|
||||
name,
|
||||
context.data["sceneMarkIn"] + 1,
|
||||
context.data["sceneMarkOut"] + 1
|
||||
)
|
||||
|
||||
active = instance_data.get("active", True)
|
||||
instance_data["active"] = active
|
||||
|
|
@ -73,8 +78,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
|
|||
if instance is None:
|
||||
continue
|
||||
|
||||
instance.data["frameStart"] = context.data["frameStart"]
|
||||
instance.data["frameEnd"] = context.data["frameEnd"]
|
||||
instance.data["frameStart"] = context.data["sceneMarkIn"] + 1
|
||||
instance.data["frameEnd"] = context.data["sceneMarkOut"] + 1
|
||||
|
||||
self.log.debug("Created instance: {}\n{}".format(
|
||||
instance, json.dumps(instance.data, indent=4)
|
||||
|
|
|
|||
|
|
@ -122,36 +122,26 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
|
|||
width = int(workfile_info_parts.pop(-1))
|
||||
workfile_path = " ".join(workfile_info_parts).replace("\"", "")
|
||||
|
||||
frame_start, frame_end = self.collect_clip_frames()
|
||||
# Marks return as "{frame - 1} {state} ", example "0 set".
|
||||
result = lib.execute_george("tv_markin")
|
||||
mark_in_frame, mark_in_state, _ = result.split(" ")
|
||||
|
||||
result = lib.execute_george("tv_markout")
|
||||
mark_out_frame, mark_out_state, _ = result.split(" ")
|
||||
|
||||
scene_data = {
|
||||
"currentFile": workfile_path,
|
||||
"sceneWidth": width,
|
||||
"sceneHeight": height,
|
||||
"pixelAspect": pixel_apsect,
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"fps": frame_rate,
|
||||
"fieldOrder": field_order
|
||||
"scenePixelAspect": pixel_apsect,
|
||||
"sceneFps": frame_rate,
|
||||
"sceneFieldOrder": field_order,
|
||||
"sceneMarkIn": int(mark_in_frame),
|
||||
"sceneMarkInState": mark_in_state == "set",
|
||||
"sceneMarkOut": int(mark_out_frame),
|
||||
"sceneMarkOutState": mark_out_state == "set"
|
||||
}
|
||||
self.log.debug(
|
||||
"Scene data: {}".format(json.dumps(scene_data, indent=4))
|
||||
)
|
||||
context.data.update(scene_data)
|
||||
|
||||
def collect_clip_frames(self):
|
||||
clip_info_str = lib.execute_george("tv_clipinfo")
|
||||
self.log.debug("Clip info: {}".format(clip_info_str))
|
||||
clip_info_items = clip_info_str.split(" ")
|
||||
# Color index - not used
|
||||
clip_info_items.pop(-1)
|
||||
clip_info_items.pop(-1)
|
||||
|
||||
mark_out = int(clip_info_items.pop(-1))
|
||||
frame_end = mark_out + 1
|
||||
clip_info_items.pop(-1)
|
||||
|
||||
mark_in = int(clip_info_items.pop(-1))
|
||||
frame_start = mark_in + 1
|
||||
clip_info_items.pop(-1)
|
||||
|
||||
return frame_start, frame_end
|
||||
|
|
|
|||
64
openpype/hosts/tvpaint/plugins/publish/validate_marks.py
Normal file
64
openpype/hosts/tvpaint/plugins/publish/validate_marks.py
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
import json
|
||||
|
||||
import pyblish.api
|
||||
from avalon.tvpaint import lib
|
||||
|
||||
|
||||
class ValidateMarksRepair(pyblish.api.Action):
|
||||
"""Repair the marks."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
expected_data = ValidateMarks.get_expected_data(context)
|
||||
|
||||
expected_data["markIn"] -= 1
|
||||
expected_data["markOut"] -= 1
|
||||
|
||||
lib.execute_george("tv_markin {} set".format(expected_data["markIn"]))
|
||||
lib.execute_george(
|
||||
"tv_markout {} set".format(expected_data["markOut"])
|
||||
)
|
||||
|
||||
|
||||
class ValidateMarks(pyblish.api.ContextPlugin):
|
||||
"""Validate mark in and out are enabled."""
|
||||
|
||||
label = "Validate Marks"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
optional = True
|
||||
actions = [ValidateMarksRepair]
|
||||
|
||||
@staticmethod
|
||||
def get_expected_data(context):
|
||||
return {
|
||||
"markIn": int(context.data["frameStart"]),
|
||||
"markInState": True,
|
||||
"markOut": int(context.data["frameEnd"]),
|
||||
"markOutState": True
|
||||
}
|
||||
|
||||
def process(self, context):
|
||||
current_data = {
|
||||
"markIn": context.data["sceneMarkIn"] + 1,
|
||||
"markInState": context.data["sceneMarkInState"],
|
||||
"markOut": context.data["sceneMarkOut"] + 1,
|
||||
"markOutState": context.data["sceneMarkOutState"]
|
||||
}
|
||||
expected_data = self.get_expected_data(context)
|
||||
invalid = {}
|
||||
for k in current_data.keys():
|
||||
if current_data[k] != expected_data[k]:
|
||||
invalid[k] = {
|
||||
"current": current_data[k],
|
||||
"expected": expected_data[k]
|
||||
}
|
||||
|
||||
if invalid:
|
||||
raise AssertionError(
|
||||
"Marks does not match database:\n{}".format(
|
||||
json.dumps(invalid, sort_keys=True, indent=4)
|
||||
)
|
||||
)
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
import json
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ValidateProjectSettings(pyblish.api.ContextPlugin):
|
||||
"""Validate project settings against database.
|
||||
"""
|
||||
|
||||
label = "Validate Project Settings"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
optional = True
|
||||
|
||||
def process(self, context):
|
||||
scene_data = {
|
||||
"fps": context.data.get("sceneFps"),
|
||||
"resolutionWidth": context.data.get("sceneWidth"),
|
||||
"resolutionHeight": context.data.get("sceneHeight"),
|
||||
"pixelAspect": context.data.get("scenePixelAspect")
|
||||
}
|
||||
invalid = {}
|
||||
for k in scene_data.keys():
|
||||
expected_value = context.data["assetEntity"]["data"][k]
|
||||
if scene_data[k] != expected_value:
|
||||
invalid[k] = {
|
||||
"current": scene_data[k], "expected": expected_value
|
||||
}
|
||||
|
||||
if invalid:
|
||||
raise AssertionError(
|
||||
"Project settings does not match database:\n{}".format(
|
||||
json.dumps(invalid, sort_keys=True, indent=4)
|
||||
)
|
||||
)
|
||||
|
|
@ -9,7 +9,7 @@ import site
|
|||
# add Python version specific vendor folder
|
||||
site.addsitedir(
|
||||
os.path.join(
|
||||
os.getenv("OPENPYPE_ROOT", ""),
|
||||
os.getenv("OPENPYPE_REPOS_ROOT", ""),
|
||||
"vendor", "python", "python_{}".format(sys.version[0])))
|
||||
|
||||
from .terminal import Terminal
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ log = Logger().get_logger(__name__)
|
|||
|
||||
def discover_host_vendor_module(module_name):
|
||||
host = os.environ["AVALON_APP"]
|
||||
pype_root = os.environ["OPENPYPE_ROOT"]
|
||||
pype_root = os.environ["OPENPYPE_REPOS_ROOT"]
|
||||
main_module = module_name.split(".")[0]
|
||||
module_path = os.path.join(
|
||||
pype_root, "hosts", host, "vendor", main_module)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ def get_pype_info():
|
|||
"version": get_pype_version(),
|
||||
"version_type": version_type,
|
||||
"executable": executable_args[-1],
|
||||
"pype_root": os.environ["OPENPYPE_ROOT"],
|
||||
"pype_root": os.environ["OPENPYPE_REPOS_ROOT"],
|
||||
"mongo_url": os.environ["OPENPYPE_MONGO"]
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ from .log_viewer import LogViewModule
|
|||
from .muster import MusterModule
|
||||
from .deadline import DeadlineModule
|
||||
from .standalonepublish_action import StandAlonePublishAction
|
||||
from .sync_server import SyncServer
|
||||
from .sync_server import SyncServerModule
|
||||
|
||||
|
||||
__all__ = (
|
||||
|
|
@ -82,5 +82,5 @@ __all__ = (
|
|||
"DeadlineModule",
|
||||
"StandAlonePublishAction",
|
||||
|
||||
"SyncServer"
|
||||
"SyncServerModule"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -102,7 +102,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
hosts = ["fusion", "maya", "nuke", "celaction", "aftereffects", "harmony"]
|
||||
|
||||
families = ["render.farm", "prerender",
|
||||
families = ["render.farm", "prerender.farm",
|
||||
"renderlayer", "imagesequence", "vrayscene"]
|
||||
|
||||
aov_filter = {"maya": [r".+(?:\.|_)([Bb]eauty)(?:\.|_).*"],
|
||||
|
|
|
|||
|
|
@ -0,0 +1,365 @@
|
|||
import json
|
||||
|
||||
from openpype.api import ProjectSettings
|
||||
|
||||
from openpype.modules.ftrack.lib import ServerAction
|
||||
from openpype.modules.ftrack.lib.avalon_sync import (
|
||||
get_pype_attr,
|
||||
CUST_ATTR_AUTO_SYNC
|
||||
)
|
||||
|
||||
|
||||
class PrepareProjectServer(ServerAction):
|
||||
"""Prepare project attributes in Anatomy."""
|
||||
|
||||
identifier = "prepare.project.server"
|
||||
label = "OpenPype Admin"
|
||||
variant = "- Prepare Project (Server)"
|
||||
description = "Set basic attributes on the project"
|
||||
|
||||
settings_key = "prepare_project"
|
||||
|
||||
role_list = ["Pypeclub", "Administrator", "Project Manager"]
|
||||
|
||||
# Key to store info about trigerring create folder structure
|
||||
item_splitter = {"type": "label", "value": "---"}
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
"""Show only on project."""
|
||||
if (
|
||||
len(entities) != 1
|
||||
or entities[0].entity_type.lower() != "project"
|
||||
):
|
||||
return False
|
||||
|
||||
return self.valid_roles(session, entities, event)
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if event['data'].get('values', {}):
|
||||
return
|
||||
|
||||
# Inform user that this may take a while
|
||||
self.show_message(event, "Preparing data... Please wait", True)
|
||||
self.log.debug("Preparing data which will be shown")
|
||||
|
||||
self.log.debug("Loading custom attributes")
|
||||
|
||||
project_entity = entities[0]
|
||||
project_name = project_entity["full_name"]
|
||||
|
||||
try:
|
||||
project_settings = ProjectSettings(project_name)
|
||||
except ValueError:
|
||||
return {
|
||||
"message": "Project is not synchronized yet",
|
||||
"success": False
|
||||
}
|
||||
|
||||
project_anatom_settings = project_settings["project_anatomy"]
|
||||
root_items = self.prepare_root_items(project_anatom_settings)
|
||||
|
||||
ca_items, multiselect_enumerators = (
|
||||
self.prepare_custom_attribute_items(project_anatom_settings)
|
||||
)
|
||||
|
||||
self.log.debug("Heavy items are ready. Preparing last items group.")
|
||||
|
||||
title = "Prepare Project"
|
||||
items = []
|
||||
|
||||
# Add root items
|
||||
items.extend(root_items)
|
||||
|
||||
items.append(self.item_splitter)
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "<h3>Set basic Attributes:</h3>"
|
||||
})
|
||||
|
||||
items.extend(ca_items)
|
||||
|
||||
# This item will be last (before enumerators)
|
||||
# - sets value of auto synchronization
|
||||
auto_sync_name = "avalon_auto_sync"
|
||||
auto_sync_value = project_entity["custom_attributes"].get(
|
||||
CUST_ATTR_AUTO_SYNC, False
|
||||
)
|
||||
auto_sync_item = {
|
||||
"name": auto_sync_name,
|
||||
"type": "boolean",
|
||||
"value": auto_sync_value,
|
||||
"label": "AutoSync to Avalon"
|
||||
}
|
||||
# Add autosync attribute
|
||||
items.append(auto_sync_item)
|
||||
|
||||
# Add enumerator items at the end
|
||||
for item in multiselect_enumerators:
|
||||
items.append(item)
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
"title": title
|
||||
}
|
||||
|
||||
def prepare_root_items(self, project_anatom_settings):
|
||||
self.log.debug("Root items preparation begins.")
|
||||
|
||||
root_items = []
|
||||
root_items.append({
|
||||
"type": "label",
|
||||
"value": "<h3>Check your Project root settings</h3>"
|
||||
})
|
||||
root_items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<p><i>NOTE: Roots are <b>crutial</b> for path filling"
|
||||
" (and creating folder structure).</i></p>"
|
||||
)
|
||||
})
|
||||
root_items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<p><i>WARNING: Do not change roots on running project,"
|
||||
" that <b>will cause workflow issues</b>.</i></p>"
|
||||
)
|
||||
})
|
||||
|
||||
empty_text = "Enter root path here..."
|
||||
|
||||
roots_entity = project_anatom_settings["roots"]
|
||||
for root_name, root_entity in roots_entity.items():
|
||||
root_items.append(self.item_splitter)
|
||||
root_items.append({
|
||||
"type": "label",
|
||||
"value": "Root: \"{}\"".format(root_name)
|
||||
})
|
||||
for platform_name, value_entity in root_entity.items():
|
||||
root_items.append({
|
||||
"label": platform_name,
|
||||
"name": "__root__{}__{}".format(root_name, platform_name),
|
||||
"type": "text",
|
||||
"value": value_entity.value,
|
||||
"empty_text": empty_text
|
||||
})
|
||||
|
||||
root_items.append({
|
||||
"type": "hidden",
|
||||
"name": "__rootnames__",
|
||||
"value": json.dumps(list(roots_entity.keys()))
|
||||
})
|
||||
|
||||
self.log.debug("Root items preparation ended.")
|
||||
return root_items
|
||||
|
||||
def _attributes_to_set(self, project_anatom_settings):
|
||||
attributes_to_set = {}
|
||||
|
||||
attribute_values_by_key = {}
|
||||
for key, entity in project_anatom_settings["attributes"].items():
|
||||
attribute_values_by_key[key] = entity.value
|
||||
|
||||
cust_attrs, hier_cust_attrs = get_pype_attr(self.session, True)
|
||||
|
||||
for attr in hier_cust_attrs:
|
||||
key = attr["key"]
|
||||
if key.startswith("avalon_"):
|
||||
continue
|
||||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
"default": attribute_values_by_key.get(key)
|
||||
}
|
||||
|
||||
for attr in cust_attrs:
|
||||
if attr["entity_type"].lower() != "show":
|
||||
continue
|
||||
key = attr["key"]
|
||||
if key.startswith("avalon_"):
|
||||
continue
|
||||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
"default": attribute_values_by_key.get(key)
|
||||
}
|
||||
|
||||
# Sort by label
|
||||
attributes_to_set = dict(sorted(
|
||||
attributes_to_set.items(),
|
||||
key=lambda x: x[1]["label"]
|
||||
))
|
||||
return attributes_to_set
|
||||
|
||||
def prepare_custom_attribute_items(self, project_anatom_settings):
|
||||
items = []
|
||||
multiselect_enumerators = []
|
||||
attributes_to_set = self._attributes_to_set(project_anatom_settings)
|
||||
|
||||
self.log.debug("Preparing interface for keys: \"{}\"".format(
|
||||
str([key for key in attributes_to_set])
|
||||
))
|
||||
|
||||
for key, in_data in attributes_to_set.items():
|
||||
attr = in_data["object"]
|
||||
|
||||
# initial item definition
|
||||
item = {
|
||||
"name": key,
|
||||
"label": in_data["label"]
|
||||
}
|
||||
|
||||
# cust attr type - may have different visualization
|
||||
type_name = attr["type"]["name"].lower()
|
||||
easy_types = ["text", "boolean", "date", "number"]
|
||||
|
||||
easy_type = False
|
||||
if type_name in easy_types:
|
||||
easy_type = True
|
||||
|
||||
elif type_name == "enumerator":
|
||||
|
||||
attr_config = json.loads(attr["config"])
|
||||
attr_config_data = json.loads(attr_config["data"])
|
||||
|
||||
if attr_config["multiSelect"] is True:
|
||||
multiselect_enumerators.append(self.item_splitter)
|
||||
multiselect_enumerators.append({
|
||||
"type": "label",
|
||||
"value": in_data["label"]
|
||||
})
|
||||
|
||||
default = in_data["default"]
|
||||
names = []
|
||||
for option in sorted(
|
||||
attr_config_data, key=lambda x: x["menu"]
|
||||
):
|
||||
name = option["value"]
|
||||
new_name = "__{}__{}".format(key, name)
|
||||
names.append(new_name)
|
||||
item = {
|
||||
"name": new_name,
|
||||
"type": "boolean",
|
||||
"label": "- {}".format(option["menu"])
|
||||
}
|
||||
if default:
|
||||
if isinstance(default, (list, tuple)):
|
||||
if name in default:
|
||||
item["value"] = True
|
||||
else:
|
||||
if name == default:
|
||||
item["value"] = True
|
||||
|
||||
multiselect_enumerators.append(item)
|
||||
|
||||
multiselect_enumerators.append({
|
||||
"type": "hidden",
|
||||
"name": "__hidden__{}".format(key),
|
||||
"value": json.dumps(names)
|
||||
})
|
||||
else:
|
||||
easy_type = True
|
||||
item["data"] = attr_config_data
|
||||
|
||||
else:
|
||||
self.log.warning((
|
||||
"Custom attribute \"{}\" has type \"{}\"."
|
||||
" I don't know how to handle"
|
||||
).format(key, type_name))
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"!!! Can't handle Custom attritubte type \"{}\""
|
||||
" (key: \"{}\")"
|
||||
).format(type_name, key)
|
||||
})
|
||||
|
||||
if easy_type:
|
||||
item["type"] = type_name
|
||||
|
||||
# default value in interface
|
||||
default = in_data["default"]
|
||||
if default is not None:
|
||||
item["value"] = default
|
||||
|
||||
items.append(item)
|
||||
|
||||
return items, multiselect_enumerators
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if not event['data'].get('values', {}):
|
||||
return
|
||||
|
||||
in_data = event['data']['values']
|
||||
|
||||
root_values = {}
|
||||
root_key = "__root__"
|
||||
for key in tuple(in_data.keys()):
|
||||
if key.startswith(root_key):
|
||||
_key = key[len(root_key):]
|
||||
root_values[_key] = in_data.pop(key)
|
||||
|
||||
root_names = in_data.pop("__rootnames__", None)
|
||||
root_data = {}
|
||||
for root_name in json.loads(root_names):
|
||||
root_data[root_name] = {}
|
||||
for key, value in tuple(root_values.items()):
|
||||
prefix = "{}__".format(root_name)
|
||||
if not key.startswith(prefix):
|
||||
continue
|
||||
|
||||
_key = key[len(prefix):]
|
||||
root_data[root_name][_key] = value
|
||||
|
||||
# Find hidden items for multiselect enumerators
|
||||
keys_to_process = []
|
||||
for key in in_data:
|
||||
if key.startswith("__hidden__"):
|
||||
keys_to_process.append(key)
|
||||
|
||||
self.log.debug("Preparing data for Multiselect Enumerators")
|
||||
enumerators = {}
|
||||
for key in keys_to_process:
|
||||
new_key = key.replace("__hidden__", "")
|
||||
enumerator_items = in_data.pop(key)
|
||||
enumerators[new_key] = json.loads(enumerator_items)
|
||||
|
||||
# find values set for multiselect enumerator
|
||||
for key, enumerator_items in enumerators.items():
|
||||
in_data[key] = []
|
||||
|
||||
name = "__{}__".format(key)
|
||||
|
||||
for item in enumerator_items:
|
||||
value = in_data.pop(item)
|
||||
if value is True:
|
||||
new_key = item.replace(name, "")
|
||||
in_data[key].append(new_key)
|
||||
|
||||
self.log.debug("Setting Custom Attribute values")
|
||||
|
||||
project_name = entities[0]["full_name"]
|
||||
project_settings = ProjectSettings(project_name)
|
||||
project_anatomy_settings = project_settings["project_anatomy"]
|
||||
project_anatomy_settings["roots"] = root_data
|
||||
|
||||
custom_attribute_values = {}
|
||||
attributes_entity = project_anatomy_settings["attributes"]
|
||||
for key, value in in_data.items():
|
||||
if key not in attributes_entity:
|
||||
custom_attribute_values[key] = value
|
||||
else:
|
||||
attributes_entity[key] = value
|
||||
|
||||
project_settings.save()
|
||||
|
||||
entity = entities[0]
|
||||
for key, value in custom_attribute_values.items():
|
||||
entity["custom_attributes"][key] = value
|
||||
self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def register(session):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
PrepareProjectServer(session).register()
|
||||
|
|
@ -1,31 +1,34 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
from openpype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from openpype.api import config, Anatomy
|
||||
from openpype.modules.ftrack.lib.avalon_sync import get_pype_attr
|
||||
from openpype.api import ProjectSettings
|
||||
|
||||
from openpype.modules.ftrack.lib import (
|
||||
BaseAction,
|
||||
statics_icon
|
||||
)
|
||||
from openpype.modules.ftrack.lib.avalon_sync import (
|
||||
get_pype_attr,
|
||||
CUST_ATTR_AUTO_SYNC
|
||||
)
|
||||
|
||||
|
||||
class PrepareProject(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
class PrepareProjectLocal(BaseAction):
|
||||
"""Prepare project attributes in Anatomy."""
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'prepare.project'
|
||||
#: Action label.
|
||||
label = 'Prepare Project'
|
||||
#: Action description.
|
||||
description = 'Set basic attributes on the project'
|
||||
#: roles that are allowed to register this action
|
||||
identifier = "prepare.project.local"
|
||||
label = "Prepare Project"
|
||||
description = "Set basic attributes on the project"
|
||||
icon = statics_icon("ftrack", "action_icons", "PrepareProject.svg")
|
||||
|
||||
role_list = ["Pypeclub", "Administrator", "Project Manager"]
|
||||
|
||||
settings_key = "prepare_project"
|
||||
|
||||
# Key to store info about trigerring create folder structure
|
||||
create_project_structure_key = "create_folder_structure"
|
||||
item_splitter = {'type': 'label', 'value': '---'}
|
||||
item_splitter = {"type": "label", "value": "---"}
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
"""Show only on project."""
|
||||
if (
|
||||
len(entities) != 1
|
||||
or entities[0].entity_type.lower() != "project"
|
||||
|
|
@ -44,27 +47,22 @@ class PrepareProject(BaseAction):
|
|||
|
||||
self.log.debug("Loading custom attributes")
|
||||
|
||||
project_name = entities[0]["full_name"]
|
||||
project_entity = entities[0]
|
||||
project_name = project_entity["full_name"]
|
||||
|
||||
project_defaults = (
|
||||
config.get_presets(project_name)
|
||||
.get("ftrack", {})
|
||||
.get("project_defaults", {})
|
||||
)
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
if not anatomy.roots:
|
||||
try:
|
||||
project_settings = ProjectSettings(project_name)
|
||||
except ValueError:
|
||||
return {
|
||||
"success": False,
|
||||
"message": (
|
||||
"Have issues with loading Roots for project \"{}\"."
|
||||
).format(anatomy.project_name)
|
||||
"message": "Project is not synchronized yet",
|
||||
"success": False
|
||||
}
|
||||
|
||||
root_items = self.prepare_root_items(anatomy)
|
||||
project_anatom_settings = project_settings["project_anatomy"]
|
||||
root_items = self.prepare_root_items(project_anatom_settings)
|
||||
|
||||
ca_items, multiselect_enumerators = (
|
||||
self.prepare_custom_attribute_items(project_defaults)
|
||||
self.prepare_custom_attribute_items(project_anatom_settings)
|
||||
)
|
||||
|
||||
self.log.debug("Heavy items are ready. Preparing last items group.")
|
||||
|
|
@ -74,19 +72,6 @@ class PrepareProject(BaseAction):
|
|||
|
||||
# Add root items
|
||||
items.extend(root_items)
|
||||
items.append(self.item_splitter)
|
||||
|
||||
# Ask if want to trigger Action Create Folder Structure
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "<h3>Want to create basic Folder Structure?</h3>"
|
||||
})
|
||||
items.append({
|
||||
"name": self.create_project_structure_key,
|
||||
"type": "boolean",
|
||||
"value": False,
|
||||
"label": "Check if Yes"
|
||||
})
|
||||
|
||||
items.append(self.item_splitter)
|
||||
items.append({
|
||||
|
|
@ -99,10 +84,13 @@ class PrepareProject(BaseAction):
|
|||
# This item will be last (before enumerators)
|
||||
# - sets value of auto synchronization
|
||||
auto_sync_name = "avalon_auto_sync"
|
||||
auto_sync_value = project_entity["custom_attributes"].get(
|
||||
CUST_ATTR_AUTO_SYNC, False
|
||||
)
|
||||
auto_sync_item = {
|
||||
"name": auto_sync_name,
|
||||
"type": "boolean",
|
||||
"value": project_defaults.get(auto_sync_name, False),
|
||||
"value": auto_sync_value,
|
||||
"label": "AutoSync to Avalon"
|
||||
}
|
||||
# Add autosync attribute
|
||||
|
|
@ -117,13 +105,10 @@ class PrepareProject(BaseAction):
|
|||
"title": title
|
||||
}
|
||||
|
||||
def prepare_root_items(self, anatomy):
|
||||
root_items = []
|
||||
def prepare_root_items(self, project_anatom_settings):
|
||||
self.log.debug("Root items preparation begins.")
|
||||
|
||||
root_names = anatomy.root_names()
|
||||
roots = anatomy.roots
|
||||
|
||||
root_items = []
|
||||
root_items.append({
|
||||
"type": "label",
|
||||
"value": "<h3>Check your Project root settings</h3>"
|
||||
|
|
@ -143,85 +128,40 @@ class PrepareProject(BaseAction):
|
|||
)
|
||||
})
|
||||
|
||||
default_roots = anatomy.roots
|
||||
while isinstance(default_roots, dict):
|
||||
key = tuple(default_roots.keys())[0]
|
||||
default_roots = default_roots[key]
|
||||
|
||||
empty_text = "Enter root path here..."
|
||||
|
||||
# Root names is None when anatomy templates contain "{root}"
|
||||
all_platforms = ["windows", "linux", "darwin"]
|
||||
if root_names is None:
|
||||
root_items.append(self.item_splitter)
|
||||
# find first possible key
|
||||
for platform in all_platforms:
|
||||
value = default_roots.raw_data.get(platform) or ""
|
||||
root_items.append({
|
||||
"label": platform,
|
||||
"name": "__root__{}".format(platform),
|
||||
"type": "text",
|
||||
"value": value,
|
||||
"empty_text": empty_text
|
||||
})
|
||||
return root_items
|
||||
|
||||
root_name_data = {}
|
||||
missing_roots = []
|
||||
for root_name in root_names:
|
||||
root_name_data[root_name] = {}
|
||||
if not isinstance(roots, dict):
|
||||
missing_roots.append(root_name)
|
||||
continue
|
||||
|
||||
root_item = roots.get(root_name)
|
||||
if not root_item:
|
||||
missing_roots.append(root_name)
|
||||
continue
|
||||
|
||||
for platform in all_platforms:
|
||||
root_name_data[root_name][platform] = (
|
||||
root_item.raw_data.get(platform) or ""
|
||||
)
|
||||
|
||||
if missing_roots:
|
||||
default_values = {}
|
||||
for platform in all_platforms:
|
||||
default_values[platform] = (
|
||||
default_roots.raw_data.get(platform) or ""
|
||||
)
|
||||
|
||||
for root_name in missing_roots:
|
||||
root_name_data[root_name] = default_values
|
||||
|
||||
root_names = list(root_name_data.keys())
|
||||
root_items.append({
|
||||
"type": "hidden",
|
||||
"name": "__rootnames__",
|
||||
"value": json.dumps(root_names)
|
||||
})
|
||||
|
||||
for root_name, values in root_name_data.items():
|
||||
roots_entity = project_anatom_settings["roots"]
|
||||
for root_name, root_entity in roots_entity.items():
|
||||
root_items.append(self.item_splitter)
|
||||
root_items.append({
|
||||
"type": "label",
|
||||
"value": "Root: \"{}\"".format(root_name)
|
||||
})
|
||||
for platform, value in values.items():
|
||||
for platform_name, value_entity in root_entity.items():
|
||||
root_items.append({
|
||||
"label": platform,
|
||||
"name": "__root__{}{}".format(root_name, platform),
|
||||
"label": platform_name,
|
||||
"name": "__root__{}__{}".format(root_name, platform_name),
|
||||
"type": "text",
|
||||
"value": value,
|
||||
"value": value_entity.value,
|
||||
"empty_text": empty_text
|
||||
})
|
||||
|
||||
root_items.append({
|
||||
"type": "hidden",
|
||||
"name": "__rootnames__",
|
||||
"value": json.dumps(list(roots_entity.keys()))
|
||||
})
|
||||
|
||||
self.log.debug("Root items preparation ended.")
|
||||
return root_items
|
||||
|
||||
def _attributes_to_set(self, project_defaults):
|
||||
def _attributes_to_set(self, project_anatom_settings):
|
||||
attributes_to_set = {}
|
||||
|
||||
attribute_values_by_key = {}
|
||||
for key, entity in project_anatom_settings["attributes"].items():
|
||||
attribute_values_by_key[key] = entity.value
|
||||
|
||||
cust_attrs, hier_cust_attrs = get_pype_attr(self.session, True)
|
||||
|
||||
for attr in hier_cust_attrs:
|
||||
|
|
@ -231,7 +171,7 @@ class PrepareProject(BaseAction):
|
|||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
"default": project_defaults.get(key)
|
||||
"default": attribute_values_by_key.get(key)
|
||||
}
|
||||
|
||||
for attr in cust_attrs:
|
||||
|
|
@ -243,7 +183,7 @@ class PrepareProject(BaseAction):
|
|||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
"default": project_defaults.get(key)
|
||||
"default": attribute_values_by_key.get(key)
|
||||
}
|
||||
|
||||
# Sort by label
|
||||
|
|
@ -253,10 +193,10 @@ class PrepareProject(BaseAction):
|
|||
))
|
||||
return attributes_to_set
|
||||
|
||||
def prepare_custom_attribute_items(self, project_defaults):
|
||||
def prepare_custom_attribute_items(self, project_anatom_settings):
|
||||
items = []
|
||||
multiselect_enumerators = []
|
||||
attributes_to_set = self._attributes_to_set(project_defaults)
|
||||
attributes_to_set = self._attributes_to_set(project_anatom_settings)
|
||||
|
||||
self.log.debug("Preparing interface for keys: \"{}\"".format(
|
||||
str([key for key in attributes_to_set])
|
||||
|
|
@ -363,24 +303,15 @@ class PrepareProject(BaseAction):
|
|||
|
||||
root_names = in_data.pop("__rootnames__", None)
|
||||
root_data = {}
|
||||
if root_names:
|
||||
for root_name in json.loads(root_names):
|
||||
root_data[root_name] = {}
|
||||
for key, value in tuple(root_values.items()):
|
||||
if key.startswith(root_name):
|
||||
_key = key[len(root_name):]
|
||||
root_data[root_name][_key] = value
|
||||
for root_name in json.loads(root_names):
|
||||
root_data[root_name] = {}
|
||||
for key, value in tuple(root_values.items()):
|
||||
prefix = "{}__".format(root_name)
|
||||
if not key.startswith(prefix):
|
||||
continue
|
||||
|
||||
else:
|
||||
for key, value in root_values.items():
|
||||
root_data[key] = value
|
||||
|
||||
# TODO implement creating of anatomy for new projects
|
||||
# project_name = entities[0]["full_name"]
|
||||
# anatomy = Anatomy(project_name)
|
||||
|
||||
# pop out info about creating project structure
|
||||
create_proj_struct = in_data.pop(self.create_project_structure_key)
|
||||
_key = key[len(prefix):]
|
||||
root_data[root_name][_key] = value
|
||||
|
||||
# Find hidden items for multiselect enumerators
|
||||
keys_to_process = []
|
||||
|
|
@ -407,54 +338,31 @@ class PrepareProject(BaseAction):
|
|||
new_key = item.replace(name, "")
|
||||
in_data[key].append(new_key)
|
||||
|
||||
self.log.debug("Setting Custom Attribute values:")
|
||||
entity = entities[0]
|
||||
self.log.debug("Setting Custom Attribute values")
|
||||
|
||||
project_name = entities[0]["full_name"]
|
||||
project_settings = ProjectSettings(project_name)
|
||||
project_anatomy_settings = project_settings["project_anatomy"]
|
||||
project_anatomy_settings["roots"] = root_data
|
||||
|
||||
custom_attribute_values = {}
|
||||
attributes_entity = project_anatomy_settings["attributes"]
|
||||
for key, value in in_data.items():
|
||||
if key not in attributes_entity:
|
||||
custom_attribute_values[key] = value
|
||||
else:
|
||||
attributes_entity[key] = value
|
||||
|
||||
project_settings.save()
|
||||
|
||||
entity = entities[0]
|
||||
for key, value in custom_attribute_values.items():
|
||||
entity["custom_attributes"][key] = value
|
||||
self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value))
|
||||
|
||||
session.commit()
|
||||
|
||||
# Create project structure
|
||||
self.create_project_specific_config(entities[0]["full_name"], in_data)
|
||||
|
||||
# Trigger Create Project Structure action
|
||||
if create_proj_struct is True:
|
||||
self.trigger_action("create.project.structure", event)
|
||||
|
||||
return True
|
||||
|
||||
def create_project_specific_config(self, project_name, json_data):
|
||||
self.log.debug("*** Creating project specifig configs ***")
|
||||
project_specific_path = project_overrides_dir_path(project_name)
|
||||
if not os.path.exists(project_specific_path):
|
||||
os.makedirs(project_specific_path)
|
||||
self.log.debug((
|
||||
"Project specific config folder for project \"{}\" created."
|
||||
).format(project_name))
|
||||
|
||||
# Presets ####################################
|
||||
self.log.debug("--- Processing Presets Begins: ---")
|
||||
|
||||
project_defaults_dir = os.path.normpath(os.path.join(
|
||||
project_specific_path, "presets", "ftrack"
|
||||
))
|
||||
project_defaults_path = os.path.normpath(os.path.join(
|
||||
project_defaults_dir, "project_defaults.json"
|
||||
))
|
||||
# Create folder if not exist
|
||||
if not os.path.exists(project_defaults_dir):
|
||||
self.log.debug("Creating Ftrack Presets folder: \"{}\"".format(
|
||||
project_defaults_dir
|
||||
))
|
||||
os.makedirs(project_defaults_dir)
|
||||
|
||||
with open(project_defaults_path, 'w') as file_stream:
|
||||
json.dump(json_data, file_stream, indent=4)
|
||||
|
||||
self.log.debug("*** Creating project specifig configs Finished ***")
|
||||
|
||||
|
||||
def register(session):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
PrepareProject(session).register()
|
||||
PrepareProjectLocal(session).register()
|
||||
|
|
|
|||
|
|
@ -891,6 +891,33 @@ class SyncEntitiesFactory:
|
|||
|
||||
self.entities_dict[parent_id]["children"].remove(id)
|
||||
|
||||
def _query_custom_attributes(self, session, conf_ids, entity_ids):
|
||||
output = []
|
||||
# Prepare values to query
|
||||
attributes_joined = join_query_keys(conf_ids)
|
||||
attributes_len = len(conf_ids)
|
||||
chunk_size = int(5000 / attributes_len)
|
||||
for idx in range(0, len(entity_ids), chunk_size):
|
||||
entity_ids_joined = join_query_keys(
|
||||
entity_ids[idx:idx + chunk_size]
|
||||
)
|
||||
|
||||
call_expr = [{
|
||||
"action": "query",
|
||||
"expression": (
|
||||
"select value, entity_id from ContextCustomAttributeValue "
|
||||
"where entity_id in ({}) and configuration_id in ({})"
|
||||
).format(entity_ids_joined, attributes_joined)
|
||||
}]
|
||||
if hasattr(session, "call"):
|
||||
[result] = session.call(call_expr)
|
||||
else:
|
||||
[result] = session._call(call_expr)
|
||||
|
||||
for item in result["data"]:
|
||||
output.append(item)
|
||||
return output
|
||||
|
||||
def set_cutom_attributes(self):
|
||||
self.log.debug("* Preparing custom attributes")
|
||||
# Get custom attributes and values
|
||||
|
|
@ -1000,31 +1027,13 @@ class SyncEntitiesFactory:
|
|||
copy.deepcopy(prepared_avalon_attr_ca_id)
|
||||
)
|
||||
|
||||
# TODO query custom attributes by entity_id
|
||||
entity_ids_joined = ", ".join([
|
||||
"\"{}\"".format(id) for id in sync_ids
|
||||
])
|
||||
attributes_joined = ", ".join([
|
||||
"\"{}\"".format(attr_id) for attr_id in attribute_key_by_id.keys()
|
||||
])
|
||||
|
||||
cust_attr_query = (
|
||||
"select value, configuration_id, entity_id"
|
||||
" from ContextCustomAttributeValue"
|
||||
" where entity_id in ({}) and configuration_id in ({})"
|
||||
items = self._query_custom_attributes(
|
||||
self.session,
|
||||
list(attribute_key_by_id.keys()),
|
||||
sync_ids
|
||||
)
|
||||
call_expr = [{
|
||||
"action": "query",
|
||||
"expression": cust_attr_query.format(
|
||||
entity_ids_joined, attributes_joined
|
||||
)
|
||||
}]
|
||||
if hasattr(self.session, "call"):
|
||||
[values] = self.session.call(call_expr)
|
||||
else:
|
||||
[values] = self.session._call(call_expr)
|
||||
|
||||
for item in values["data"]:
|
||||
for item in items:
|
||||
entity_id = item["entity_id"]
|
||||
attr_id = item["configuration_id"]
|
||||
key = attribute_key_by_id[attr_id]
|
||||
|
|
@ -1106,28 +1115,14 @@ class SyncEntitiesFactory:
|
|||
for key, val in prepare_dict_avalon.items():
|
||||
entity_dict["avalon_attrs"][key] = val
|
||||
|
||||
# Prepare values to query
|
||||
entity_ids_joined = ", ".join([
|
||||
"\"{}\"".format(id) for id in sync_ids
|
||||
])
|
||||
attributes_joined = ", ".join([
|
||||
"\"{}\"".format(attr_id) for attr_id in attribute_key_by_id.keys()
|
||||
])
|
||||
avalon_hier = []
|
||||
call_expr = [{
|
||||
"action": "query",
|
||||
"expression": (
|
||||
"select value, entity_id, configuration_id"
|
||||
" from ContextCustomAttributeValue"
|
||||
" where entity_id in ({}) and configuration_id in ({})"
|
||||
).format(entity_ids_joined, attributes_joined)
|
||||
}]
|
||||
if hasattr(self.session, "call"):
|
||||
[values] = self.session.call(call_expr)
|
||||
else:
|
||||
[values] = self.session._call(call_expr)
|
||||
items = self._query_custom_attributes(
|
||||
self.session,
|
||||
list(attribute_key_by_id.keys()),
|
||||
sync_ids
|
||||
)
|
||||
|
||||
for item in values["data"]:
|
||||
avalon_hier = []
|
||||
for item in items:
|
||||
value = item["value"]
|
||||
# WARNING It is not possible to propage enumerate hierachical
|
||||
# attributes with multiselection 100% right. Unseting all values
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from openpype.modules.sync_server.sync_server import SyncServer
|
||||
from openpype.modules.sync_server.sync_server_module import SyncServerModule
|
||||
|
||||
|
||||
def tray_init(tray_widget, main_widget):
|
||||
return SyncServer()
|
||||
return SyncServerModule()
|
||||
|
|
|
|||
0
openpype/modules/sync_server/providers/__init__.py
Normal file
0
openpype/modules/sync_server/providers/__init__.py
Normal file
|
|
@ -1,16 +1,23 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
import abc
|
||||
import six
|
||||
from openpype.api import Logger
|
||||
|
||||
log = Logger().get_logger("SyncServer")
|
||||
|
||||
|
||||
class AbstractProvider(metaclass=ABCMeta):
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AbstractProvider:
|
||||
|
||||
def __init__(self, site_name, tree=None, presets=None):
|
||||
def __init__(self, project_name, site_name, tree=None, presets=None):
|
||||
self.presets = None
|
||||
self.active = False
|
||||
self.site_name = site_name
|
||||
|
||||
self.presets = presets
|
||||
|
||||
@abstractmethod
|
||||
super(AbstractProvider, self).__init__()
|
||||
|
||||
@abc.abstractmethod
|
||||
def is_active(self):
|
||||
"""
|
||||
Returns True if provider is activated, eg. has working credentials.
|
||||
|
|
@ -18,36 +25,54 @@ class AbstractProvider(metaclass=ABCMeta):
|
|||
(boolean)
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def upload_file(self, source_path, target_path, overwrite=True):
|
||||
@abc.abstractmethod
|
||||
def upload_file(self, source_path, path,
|
||||
server, collection, file, representation, site,
|
||||
overwrite=False):
|
||||
"""
|
||||
Copy file from 'source_path' to 'target_path' on provider.
|
||||
Use 'overwrite' boolean to rewrite existing file on provider
|
||||
|
||||
Args:
|
||||
source_path (string): absolute path on local system
|
||||
target_path (string): absolute path on provider (GDrive etc.)
|
||||
overwrite (boolean): True if overwite existing
|
||||
source_path (string):
|
||||
path (string): absolute path with or without name of the file
|
||||
overwrite (boolean): replace existing file
|
||||
|
||||
arguments for saving progress:
|
||||
server (SyncServer): server instance to call update_db on
|
||||
collection (str): name of collection
|
||||
file (dict): info about uploaded file (matches structure from db)
|
||||
representation (dict): complete repre containing 'file'
|
||||
site (str): site name
|
||||
Returns:
|
||||
(string) file_id of created file, raises exception
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def download_file(self, source_path, local_path, overwrite=True):
|
||||
@abc.abstractmethod
|
||||
def download_file(self, source_path, local_path,
|
||||
server, collection, file, representation, site,
|
||||
overwrite=False):
|
||||
"""
|
||||
Download file from provider into local system
|
||||
|
||||
Args:
|
||||
source_path (string): absolute path on provider
|
||||
local_path (string): absolute path on local
|
||||
overwrite (bool): default set to True
|
||||
local_path (string): absolute path with or without name of the file
|
||||
overwrite (boolean): replace existing file
|
||||
|
||||
arguments for saving progress:
|
||||
server (SyncServer): server instance to call update_db on
|
||||
collection (str): name of collection
|
||||
file (dict): info about uploaded file (matches structure from db)
|
||||
representation (dict): complete repre containing 'file'
|
||||
site (str): site name
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
@abc.abstractmethod
|
||||
def delete_file(self, path):
|
||||
"""
|
||||
Deletes file from 'path'. Expects path to specific file.
|
||||
|
|
@ -60,7 +85,7 @@ class AbstractProvider(metaclass=ABCMeta):
|
|||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
@abc.abstractmethod
|
||||
def list_folder(self, folder_path):
|
||||
"""
|
||||
List all files and subfolders of particular path non-recursively.
|
||||
|
|
@ -72,7 +97,7 @@ class AbstractProvider(metaclass=ABCMeta):
|
|||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
@abc.abstractmethod
|
||||
def create_folder(self, folder_path):
|
||||
"""
|
||||
Create all nonexistent folders and subfolders in 'path'.
|
||||
|
|
@ -85,7 +110,7 @@ class AbstractProvider(metaclass=ABCMeta):
|
|||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
@abc.abstractmethod
|
||||
def get_tree(self):
|
||||
"""
|
||||
Creates folder structure for providers which do not provide
|
||||
|
|
@ -94,16 +119,50 @@ class AbstractProvider(metaclass=ABCMeta):
|
|||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def resolve_path(self, path, root_config, anatomy=None):
|
||||
@abc.abstractmethod
|
||||
def get_roots_config(self, anatomy=None):
|
||||
"""
|
||||
Replaces root placeholders with appropriate real value from
|
||||
'root_configs' (from Settings or Local Settings) or Anatomy
|
||||
(mainly for 'studio' site)
|
||||
Returns root values for path resolving
|
||||
|
||||
Args:
|
||||
path(string): path with '{root[work]}/...'
|
||||
root_config(dict): from Settings or Local Settings
|
||||
anatomy (Anatomy): prepared anatomy object for project
|
||||
Takes value from Anatomy which takes values from Settings
|
||||
overridden by Local Settings
|
||||
|
||||
Returns:
|
||||
(dict) - {"root": {"root": "/My Drive"}}
|
||||
OR
|
||||
{"root": {"root_ONE": "value", "root_TWO":"value}}
|
||||
Format is importing for usage of python's format ** approach
|
||||
"""
|
||||
pass
|
||||
|
||||
def resolve_path(self, path, root_config=None, anatomy=None):
|
||||
"""
|
||||
Replaces all root placeholders with proper values
|
||||
|
||||
Args:
|
||||
path(string): root[work]/folder...
|
||||
root_config (dict): {'work': "c:/..."...}
|
||||
anatomy (Anatomy): object of Anatomy
|
||||
Returns:
|
||||
(string): proper url
|
||||
"""
|
||||
if not root_config:
|
||||
root_config = self.get_roots_config(anatomy)
|
||||
|
||||
if root_config and not root_config.get("root"):
|
||||
root_config = {"root": root_config}
|
||||
|
||||
try:
|
||||
if not root_config:
|
||||
raise KeyError
|
||||
|
||||
path = path.format(**root_config)
|
||||
except KeyError:
|
||||
try:
|
||||
path = anatomy.fill_root(path)
|
||||
except KeyError:
|
||||
msg = "Error in resolving local root from anatomy"
|
||||
log.error(msg)
|
||||
raise ValueError(msg)
|
||||
|
||||
return path
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from openpype.api import get_system_settings
|
|||
from ..utils import time_function
|
||||
import time
|
||||
|
||||
|
||||
SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly',
|
||||
'https://www.googleapis.com/auth/drive.file',
|
||||
'https://www.googleapis.com/auth/drive.readonly'] # for write|delete
|
||||
|
|
@ -45,9 +46,10 @@ class GDriveHandler(AbstractProvider):
|
|||
MY_DRIVE_STR = 'My Drive' # name of root folder of regular Google drive
|
||||
CHUNK_SIZE = 2097152 # must be divisible by 256!
|
||||
|
||||
def __init__(self, site_name, tree=None, presets=None):
|
||||
def __init__(self, project_name, site_name, tree=None, presets=None):
|
||||
self.presets = None
|
||||
self.active = False
|
||||
self.project_name = project_name
|
||||
self.site_name = site_name
|
||||
|
||||
self.presets = presets
|
||||
|
|
@ -65,137 +67,6 @@ class GDriveHandler(AbstractProvider):
|
|||
self._tree = tree
|
||||
self.active = True
|
||||
|
||||
def _get_gd_service(self):
|
||||
"""
|
||||
Authorize client with 'credentials.json', uses service account.
|
||||
Service account needs to have target folder shared with.
|
||||
Produces service that communicates with GDrive API.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
creds = service_account.Credentials.from_service_account_file(
|
||||
self.presets["credentials_url"],
|
||||
scopes=SCOPES)
|
||||
service = build('drive', 'v3',
|
||||
credentials=creds, cache_discovery=False)
|
||||
return service
|
||||
|
||||
def _prepare_root_info(self):
|
||||
"""
|
||||
Prepare info about roots and theirs folder ids from 'presets'.
|
||||
Configuration might be for single or multiroot projects.
|
||||
Regular My Drive and Shared drives are implemented, their root
|
||||
folder ids need to be queried in slightly different way.
|
||||
|
||||
Returns:
|
||||
(dicts) of dicts where root folders are keys
|
||||
"""
|
||||
roots = {}
|
||||
for path in self.get_roots_config().values():
|
||||
if self.MY_DRIVE_STR in path:
|
||||
roots[self.MY_DRIVE_STR] = self.service.files()\
|
||||
.get(fileId='root').execute()
|
||||
else:
|
||||
shared_drives = []
|
||||
page_token = None
|
||||
|
||||
while True:
|
||||
response = self.service.drives().list(
|
||||
pageSize=100,
|
||||
pageToken=page_token).execute()
|
||||
shared_drives.extend(response.get('drives', []))
|
||||
page_token = response.get('nextPageToken', None)
|
||||
if page_token is None:
|
||||
break
|
||||
|
||||
folders = path.split('/')
|
||||
if len(folders) < 2:
|
||||
raise ValueError("Wrong root folder definition {}".
|
||||
format(path))
|
||||
|
||||
for shared_drive in shared_drives:
|
||||
if folders[1] in shared_drive["name"]:
|
||||
roots[shared_drive["name"]] = {
|
||||
"name": shared_drive["name"],
|
||||
"id": shared_drive["id"]}
|
||||
if self.MY_DRIVE_STR not in roots: # add My Drive always
|
||||
roots[self.MY_DRIVE_STR] = self.service.files() \
|
||||
.get(fileId='root').execute()
|
||||
|
||||
return roots
|
||||
|
||||
@time_function
|
||||
def _build_tree(self, folders):
|
||||
"""
|
||||
Create in-memory structure resolving paths to folder id as
|
||||
recursive querying might be slower.
|
||||
Initialized in the time of class initialization.
|
||||
Maybe should be persisted
|
||||
Tree is structure of path to id:
|
||||
'/ROOT': {'id': '1234567'}
|
||||
'/ROOT/PROJECT_FOLDER': {'id':'222222'}
|
||||
'/ROOT/PROJECT_FOLDER/Assets': {'id': '3434545'}
|
||||
Args:
|
||||
folders (list): list of dictionaries with folder metadata
|
||||
Returns:
|
||||
(dictionary) path as a key, folder id as a value
|
||||
"""
|
||||
log.debug("build_tree len {}".format(len(folders)))
|
||||
root_ids = []
|
||||
default_root_id = None
|
||||
tree = {}
|
||||
ending_by = {}
|
||||
for root_name, root in self.root.items(): # might be multiple roots
|
||||
if root["id"] not in root_ids:
|
||||
tree["/" + root_name] = {"id": root["id"]}
|
||||
ending_by[root["id"]] = "/" + root_name
|
||||
root_ids.append(root["id"])
|
||||
|
||||
if self.MY_DRIVE_STR == root_name:
|
||||
default_root_id = root["id"]
|
||||
|
||||
no_parents_yet = {}
|
||||
while folders:
|
||||
folder = folders.pop(0)
|
||||
parents = folder.get("parents", [])
|
||||
# weird cases, shared folders, etc, parent under root
|
||||
if not parents:
|
||||
parent = default_root_id
|
||||
else:
|
||||
parent = parents[0]
|
||||
|
||||
if folder["id"] in root_ids: # do not process root
|
||||
continue
|
||||
|
||||
if parent in ending_by:
|
||||
path_key = ending_by[parent] + "/" + folder["name"]
|
||||
ending_by[folder["id"]] = path_key
|
||||
tree[path_key] = {"id": folder["id"]}
|
||||
else:
|
||||
no_parents_yet.setdefault(parent, []).append((folder["id"],
|
||||
folder["name"]))
|
||||
loop_cnt = 0
|
||||
# break if looped more then X times - safety against infinite loop
|
||||
while no_parents_yet and loop_cnt < 20:
|
||||
|
||||
keys = list(no_parents_yet.keys())
|
||||
for parent in keys:
|
||||
if parent in ending_by.keys():
|
||||
subfolders = no_parents_yet.pop(parent)
|
||||
for folder_id, folder_name in subfolders:
|
||||
path_key = ending_by[parent] + "/" + folder_name
|
||||
ending_by[folder_id] = path_key
|
||||
tree[path_key] = {"id": folder_id}
|
||||
loop_cnt += 1
|
||||
|
||||
if len(no_parents_yet) > 0:
|
||||
log.debug("Some folders path are not resolved {}".
|
||||
format(no_parents_yet))
|
||||
log.debug("Remove deleted folders from trash.")
|
||||
|
||||
return tree
|
||||
|
||||
def is_active(self):
|
||||
"""
|
||||
Returns True if provider is activated, eg. has working credentials.
|
||||
|
|
@ -204,6 +75,21 @@ class GDriveHandler(AbstractProvider):
|
|||
"""
|
||||
return self.active
|
||||
|
||||
def get_roots_config(self, anatomy=None):
|
||||
"""
|
||||
Returns root values for path resolving
|
||||
|
||||
Use only Settings as GDrive cannot be modified by Local Settings
|
||||
|
||||
Returns:
|
||||
(dict) - {"root": {"root": "/My Drive"}}
|
||||
OR
|
||||
{"root": {"root_ONE": "value", "root_TWO":"value}}
|
||||
Format is importing for usage of python's format ** approach
|
||||
"""
|
||||
# GDrive roots cannot be locally overridden
|
||||
return self.presets['root']
|
||||
|
||||
def get_tree(self):
|
||||
"""
|
||||
Building of the folder tree could be potentially expensive,
|
||||
|
|
@ -217,26 +103,6 @@ class GDriveHandler(AbstractProvider):
|
|||
self._tree = self._build_tree(self.list_folders())
|
||||
return self._tree
|
||||
|
||||
def get_roots_config(self):
|
||||
"""
|
||||
Returns value from presets of roots. It calculates with multi
|
||||
roots. Config should be simple key value, or dictionary.
|
||||
|
||||
Examples:
|
||||
"root": "/My Drive"
|
||||
OR
|
||||
"root": {"root_ONE": "value", "root_TWO":"value}
|
||||
Returns:
|
||||
(dict) - {"root": {"root": "/My Drive"}}
|
||||
OR
|
||||
{"root": {"root_ONE": "value", "root_TWO":"value}}
|
||||
Format is importing for usage of python's format ** approach
|
||||
"""
|
||||
roots = self.presets["root"]
|
||||
if isinstance(roots, str):
|
||||
roots = {"root": roots}
|
||||
return roots
|
||||
|
||||
def create_folder(self, path):
|
||||
"""
|
||||
Create all nonexistent folders and subfolders in 'path'.
|
||||
|
|
@ -510,20 +376,6 @@ class GDriveHandler(AbstractProvider):
|
|||
self.service.files().delete(fileId=file["id"],
|
||||
supportsAllDrives=True).execute()
|
||||
|
||||
def _get_folder_metadata(self, path):
|
||||
"""
|
||||
Get info about folder with 'path'
|
||||
Args:
|
||||
path (string):
|
||||
|
||||
Returns:
|
||||
(dictionary) with metadata or raises ValueError
|
||||
"""
|
||||
try:
|
||||
return self.get_tree()[path]
|
||||
except Exception:
|
||||
raise ValueError("Uknown folder id {}".format(id))
|
||||
|
||||
def list_folder(self, folder_path):
|
||||
"""
|
||||
List all files and subfolders of particular path non-recursively.
|
||||
|
|
@ -678,15 +530,151 @@ class GDriveHandler(AbstractProvider):
|
|||
return
|
||||
return provider_presets
|
||||
|
||||
def resolve_path(self, path, root_config, anatomy=None):
|
||||
if not root_config.get("root"):
|
||||
root_config = {"root": root_config}
|
||||
def _get_gd_service(self):
|
||||
"""
|
||||
Authorize client with 'credentials.json', uses service account.
|
||||
Service account needs to have target folder shared with.
|
||||
Produces service that communicates with GDrive API.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
creds = service_account.Credentials.from_service_account_file(
|
||||
self.presets["credentials_url"],
|
||||
scopes=SCOPES)
|
||||
service = build('drive', 'v3',
|
||||
credentials=creds, cache_discovery=False)
|
||||
return service
|
||||
|
||||
def _prepare_root_info(self):
|
||||
"""
|
||||
Prepare info about roots and theirs folder ids from 'presets'.
|
||||
Configuration might be for single or multiroot projects.
|
||||
Regular My Drive and Shared drives are implemented, their root
|
||||
folder ids need to be queried in slightly different way.
|
||||
|
||||
Returns:
|
||||
(dicts) of dicts where root folders are keys
|
||||
"""
|
||||
roots = {}
|
||||
config_roots = self.get_roots_config()
|
||||
for path in config_roots.values():
|
||||
if self.MY_DRIVE_STR in path:
|
||||
roots[self.MY_DRIVE_STR] = self.service.files()\
|
||||
.get(fileId='root').execute()
|
||||
else:
|
||||
shared_drives = []
|
||||
page_token = None
|
||||
|
||||
while True:
|
||||
response = self.service.drives().list(
|
||||
pageSize=100,
|
||||
pageToken=page_token).execute()
|
||||
shared_drives.extend(response.get('drives', []))
|
||||
page_token = response.get('nextPageToken', None)
|
||||
if page_token is None:
|
||||
break
|
||||
|
||||
folders = path.split('/')
|
||||
if len(folders) < 2:
|
||||
raise ValueError("Wrong root folder definition {}".
|
||||
format(path))
|
||||
|
||||
for shared_drive in shared_drives:
|
||||
if folders[1] in shared_drive["name"]:
|
||||
roots[shared_drive["name"]] = {
|
||||
"name": shared_drive["name"],
|
||||
"id": shared_drive["id"]}
|
||||
if self.MY_DRIVE_STR not in roots: # add My Drive always
|
||||
roots[self.MY_DRIVE_STR] = self.service.files() \
|
||||
.get(fileId='root').execute()
|
||||
|
||||
return roots
|
||||
|
||||
@time_function
|
||||
def _build_tree(self, folders):
|
||||
"""
|
||||
Create in-memory structure resolving paths to folder id as
|
||||
recursive querying might be slower.
|
||||
Initialized in the time of class initialization.
|
||||
Maybe should be persisted
|
||||
Tree is structure of path to id:
|
||||
'/ROOT': {'id': '1234567'}
|
||||
'/ROOT/PROJECT_FOLDER': {'id':'222222'}
|
||||
'/ROOT/PROJECT_FOLDER/Assets': {'id': '3434545'}
|
||||
Args:
|
||||
folders (list): list of dictionaries with folder metadata
|
||||
Returns:
|
||||
(dictionary) path as a key, folder id as a value
|
||||
"""
|
||||
log.debug("build_tree len {}".format(len(folders)))
|
||||
root_ids = []
|
||||
default_root_id = None
|
||||
tree = {}
|
||||
ending_by = {}
|
||||
for root_name, root in self.root.items(): # might be multiple roots
|
||||
if root["id"] not in root_ids:
|
||||
tree["/" + root_name] = {"id": root["id"]}
|
||||
ending_by[root["id"]] = "/" + root_name
|
||||
root_ids.append(root["id"])
|
||||
|
||||
if self.MY_DRIVE_STR == root_name:
|
||||
default_root_id = root["id"]
|
||||
|
||||
no_parents_yet = {}
|
||||
while folders:
|
||||
folder = folders.pop(0)
|
||||
parents = folder.get("parents", [])
|
||||
# weird cases, shared folders, etc, parent under root
|
||||
if not parents:
|
||||
parent = default_root_id
|
||||
else:
|
||||
parent = parents[0]
|
||||
|
||||
if folder["id"] in root_ids: # do not process root
|
||||
continue
|
||||
|
||||
if parent in ending_by:
|
||||
path_key = ending_by[parent] + "/" + folder["name"]
|
||||
ending_by[folder["id"]] = path_key
|
||||
tree[path_key] = {"id": folder["id"]}
|
||||
else:
|
||||
no_parents_yet.setdefault(parent, []).append((folder["id"],
|
||||
folder["name"]))
|
||||
loop_cnt = 0
|
||||
# break if looped more then X times - safety against infinite loop
|
||||
while no_parents_yet and loop_cnt < 20:
|
||||
|
||||
keys = list(no_parents_yet.keys())
|
||||
for parent in keys:
|
||||
if parent in ending_by.keys():
|
||||
subfolders = no_parents_yet.pop(parent)
|
||||
for folder_id, folder_name in subfolders:
|
||||
path_key = ending_by[parent] + "/" + folder_name
|
||||
ending_by[folder_id] = path_key
|
||||
tree[path_key] = {"id": folder_id}
|
||||
loop_cnt += 1
|
||||
|
||||
if len(no_parents_yet) > 0:
|
||||
log.debug("Some folders path are not resolved {}".
|
||||
format(no_parents_yet))
|
||||
log.debug("Remove deleted folders from trash.")
|
||||
|
||||
return tree
|
||||
|
||||
def _get_folder_metadata(self, path):
|
||||
"""
|
||||
Get info about folder with 'path'
|
||||
Args:
|
||||
path (string):
|
||||
|
||||
Returns:
|
||||
(dictionary) with metadata or raises ValueError
|
||||
"""
|
||||
try:
|
||||
return path.format(**root_config)
|
||||
except KeyError:
|
||||
msg = "Error in resolving remote root, unknown key"
|
||||
log.error(msg)
|
||||
return self.get_tree()[path]
|
||||
except Exception:
|
||||
raise ValueError("Uknown folder id {}".format(id))
|
||||
|
||||
def _handle_q(self, q, trashed=False):
|
||||
""" API list call contain trashed and hidden files/folder by default.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
from enum import Enum
|
||||
from .gdrive import GDriveHandler
|
||||
from .local_drive import LocalDriveHandler
|
||||
|
||||
|
|
@ -25,7 +24,8 @@ class ProviderFactory:
|
|||
"""
|
||||
self.providers[provider] = (creator, batch_limit)
|
||||
|
||||
def get_provider(self, provider, site_name, tree=None, presets=None):
|
||||
def get_provider(self, provider, project_name, site_name,
|
||||
tree=None, presets=None):
|
||||
"""
|
||||
Returns new instance of provider client for specific site.
|
||||
One provider could have multiple sites.
|
||||
|
|
@ -37,6 +37,7 @@ class ProviderFactory:
|
|||
provider (string): 'gdrive','S3'
|
||||
site_name (string): descriptor of site, different service accounts
|
||||
must have different site name
|
||||
project_name (string): different projects could have diff. sites
|
||||
tree (dictionary): - folder paths to folder id structure
|
||||
presets (dictionary): config for provider and site (eg.
|
||||
"credentials_url"..)
|
||||
|
|
@ -44,7 +45,8 @@ class ProviderFactory:
|
|||
(implementation of AbstractProvider)
|
||||
"""
|
||||
creator_info = self._get_creator_info(provider)
|
||||
site = creator_info[0](site_name, tree, presets) # call init
|
||||
# call init
|
||||
site = creator_info[0](project_name, site_name, tree, presets)
|
||||
|
||||
return site
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import shutil
|
|||
import threading
|
||||
import time
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.api import Logger, Anatomy
|
||||
from .abstract_provider import AbstractProvider
|
||||
|
||||
log = Logger().get_logger("SyncServer")
|
||||
|
|
@ -12,6 +12,14 @@ log = Logger().get_logger("SyncServer")
|
|||
|
||||
class LocalDriveHandler(AbstractProvider):
|
||||
""" Handles required operations on mounted disks with OS """
|
||||
def __init__(self, project_name, site_name, tree=None, presets=None):
|
||||
self.presets = None
|
||||
self.active = False
|
||||
self.project_name = project_name
|
||||
self.site_name = site_name
|
||||
|
||||
self.active = self.is_active()
|
||||
|
||||
def is_active(self):
|
||||
return True
|
||||
|
||||
|
|
@ -82,27 +90,37 @@ class LocalDriveHandler(AbstractProvider):
|
|||
os.makedirs(folder_path, exist_ok=True)
|
||||
return folder_path
|
||||
|
||||
def get_roots_config(self, anatomy=None):
|
||||
"""
|
||||
Returns root values for path resolving
|
||||
|
||||
Takes value from Anatomy which takes values from Settings
|
||||
overridden by Local Settings
|
||||
|
||||
Returns:
|
||||
(dict) - {"root": {"root": "/My Drive"}}
|
||||
OR
|
||||
{"root": {"root_ONE": "value", "root_TWO":"value}}
|
||||
Format is importing for usage of python's format ** approach
|
||||
"""
|
||||
if not anatomy:
|
||||
anatomy = Anatomy(self.project_name,
|
||||
self._normalize_site_name(self.site_name))
|
||||
|
||||
return {'root': anatomy.roots}
|
||||
|
||||
def get_tree(self):
|
||||
return
|
||||
|
||||
def resolve_path(self, path, root_config, anatomy=None):
|
||||
if root_config and not root_config.get("root"):
|
||||
root_config = {"root": root_config}
|
||||
def get_configurable_items_for_site(self):
|
||||
"""
|
||||
Returns list of items that should be configurable by User
|
||||
|
||||
try:
|
||||
if not root_config:
|
||||
raise KeyError
|
||||
|
||||
path = path.format(**root_config)
|
||||
except KeyError:
|
||||
try:
|
||||
path = anatomy.fill_root(path)
|
||||
except KeyError:
|
||||
msg = "Error in resolving local root from anatomy"
|
||||
log.error(msg)
|
||||
raise ValueError(msg)
|
||||
|
||||
return path
|
||||
Returns:
|
||||
(list of dict)
|
||||
[{key:"root", label:"root", value:"valueFromSettings"}]
|
||||
"""
|
||||
pass
|
||||
|
||||
def _copy(self, source_path, target_path):
|
||||
print("copying {}->{}".format(source_path, target_path))
|
||||
|
|
@ -133,3 +151,9 @@ class LocalDriveHandler(AbstractProvider):
|
|||
)
|
||||
target_file_size = os.path.getsize(target_path)
|
||||
time.sleep(0.5)
|
||||
|
||||
def _normalize_site_name(self, site_name):
|
||||
"""Transform user id to 'local' for Local settings"""
|
||||
if site_name != 'studio':
|
||||
return 'local'
|
||||
return site_name
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
1193
openpype/modules/sync_server/sync_server_module.py
Normal file
1193
openpype/modules/sync_server/sync_server_module.py
Normal file
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
52
openpype/modules/sync_server/tray/lib.py
Normal file
52
openpype/modules/sync_server/tray/lib.py
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
from Qt import QtCore
|
||||
|
||||
from openpype.lib import PypeLogger
|
||||
|
||||
|
||||
log = PypeLogger().get_logger("SyncServer")
|
||||
|
||||
STATUS = {
|
||||
0: 'In Progress',
|
||||
1: 'Queued',
|
||||
2: 'Failed',
|
||||
3: 'Paused',
|
||||
4: 'Synced OK',
|
||||
-1: 'Not available'
|
||||
}
|
||||
|
||||
DUMMY_PROJECT = "No project configured"
|
||||
|
||||
ProviderRole = QtCore.Qt.UserRole + 2
|
||||
ProgressRole = QtCore.Qt.UserRole + 4
|
||||
DateRole = QtCore.Qt.UserRole + 6
|
||||
FailedRole = QtCore.Qt.UserRole + 8
|
||||
|
||||
|
||||
def pretty_size(value, suffix='B'):
|
||||
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
|
||||
if abs(value) < 1024.0:
|
||||
return "%3.1f%s%s" % (value, unit, suffix)
|
||||
value /= 1024.0
|
||||
return "%.1f%s%s" % (value, 'Yi', suffix)
|
||||
|
||||
|
||||
def convert_progress(value):
|
||||
try:
|
||||
progress = float(value)
|
||||
except (ValueError, TypeError):
|
||||
progress = 0.0
|
||||
|
||||
return progress
|
||||
|
||||
|
||||
def translate_provider_for_icon(sync_server, project, site):
|
||||
"""
|
||||
Get provider for 'site'
|
||||
|
||||
This is used for getting icon, 'studio' should have different icon
|
||||
then local sites, even the provider 'local_drive' is same
|
||||
|
||||
"""
|
||||
if site == sync_server.DEFAULT_SITE:
|
||||
return sync_server.DEFAULT_SITE
|
||||
return sync_server.get_provider_for_site(project, site)
|
||||
1124
openpype/modules/sync_server/tray/models.py
Normal file
1124
openpype/modules/sync_server/tray/models.py
Normal file
File diff suppressed because it is too large
Load diff
820
openpype/modules/sync_server/tray/widgets.py
Normal file
820
openpype/modules/sync_server/tray/widgets.py
Normal file
|
|
@ -0,0 +1,820 @@
|
|||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from Qt import QtWidgets, QtCore, QtGui
|
||||
from Qt.QtCore import Qt
|
||||
|
||||
from openpype.tools.settings import (
|
||||
ProjectListWidget,
|
||||
style
|
||||
)
|
||||
|
||||
from openpype.api import get_local_site_id
|
||||
from openpype.lib import PypeLogger
|
||||
|
||||
from avalon.tools.delegates import pretty_timestamp
|
||||
|
||||
from openpype.modules.sync_server.tray.models import (
|
||||
SyncRepresentationSummaryModel,
|
||||
SyncRepresentationDetailModel
|
||||
)
|
||||
|
||||
from openpype.modules.sync_server.tray import lib
|
||||
|
||||
log = PypeLogger().get_logger("SyncServer")
|
||||
|
||||
|
||||
class SyncProjectListWidget(ProjectListWidget):
|
||||
"""
|
||||
Lists all projects that are synchronized to choose from
|
||||
"""
|
||||
|
||||
def __init__(self, sync_server, parent):
|
||||
super(SyncProjectListWidget, self).__init__(parent)
|
||||
self.sync_server = sync_server
|
||||
self.project_list.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
|
||||
self.project_list.customContextMenuRequested.connect(
|
||||
self._on_context_menu)
|
||||
self.project_name = None
|
||||
self.local_site = None
|
||||
self.icons = {}
|
||||
|
||||
def validate_context_change(self):
|
||||
return True
|
||||
|
||||
def refresh(self):
|
||||
model = self.project_list.model()
|
||||
model.clear()
|
||||
|
||||
project_name = None
|
||||
for project_name in self.sync_server.sync_project_settings.\
|
||||
keys():
|
||||
if self.sync_server.is_paused() or \
|
||||
self.sync_server.is_project_paused(project_name):
|
||||
icon = self._get_icon("paused")
|
||||
else:
|
||||
icon = self._get_icon("synced")
|
||||
|
||||
model.appendRow(QtGui.QStandardItem(icon, project_name))
|
||||
|
||||
if len(self.sync_server.sync_project_settings.keys()) == 0:
|
||||
model.appendRow(QtGui.QStandardItem(lib.DUMMY_PROJECT))
|
||||
|
||||
self.current_project = self.project_list.currentIndex().data(
|
||||
QtCore.Qt.DisplayRole
|
||||
)
|
||||
if not self.current_project:
|
||||
self.current_project = self.project_list.model().item(0). \
|
||||
data(QtCore.Qt.DisplayRole)
|
||||
|
||||
if project_name:
|
||||
self.local_site = self.sync_server.get_active_site(project_name)
|
||||
|
||||
def _get_icon(self, status):
|
||||
if not self.icons.get(status):
|
||||
resource_path = os.path.dirname(__file__)
|
||||
resource_path = os.path.join(resource_path, "..",
|
||||
"resources")
|
||||
pix_url = "{}/{}.png".format(resource_path, status)
|
||||
icon = QtGui.QIcon(pix_url)
|
||||
self.icons[status] = icon
|
||||
else:
|
||||
icon = self.icons[status]
|
||||
return icon
|
||||
|
||||
def _on_context_menu(self, point):
|
||||
point_index = self.project_list.indexAt(point)
|
||||
if not point_index.isValid():
|
||||
return
|
||||
|
||||
self.project_name = point_index.data(QtCore.Qt.DisplayRole)
|
||||
|
||||
menu = QtWidgets.QMenu()
|
||||
menu.setStyleSheet(style.load_stylesheet())
|
||||
actions_mapping = {}
|
||||
|
||||
if self.sync_server.is_project_paused(self.project_name):
|
||||
action = QtWidgets.QAction("Unpause")
|
||||
actions_mapping[action] = self._unpause
|
||||
else:
|
||||
action = QtWidgets.QAction("Pause")
|
||||
actions_mapping[action] = self._pause
|
||||
menu.addAction(action)
|
||||
|
||||
if self.local_site == get_local_site_id():
|
||||
action = QtWidgets.QAction("Clear local project")
|
||||
actions_mapping[action] = self._clear_project
|
||||
menu.addAction(action)
|
||||
|
||||
result = menu.exec_(QtGui.QCursor.pos())
|
||||
if result:
|
||||
to_run = actions_mapping[result]
|
||||
if to_run:
|
||||
to_run()
|
||||
|
||||
def _pause(self):
|
||||
if self.project_name:
|
||||
self.sync_server.pause_project(self.project_name)
|
||||
self.project_name = None
|
||||
self.refresh()
|
||||
|
||||
def _unpause(self):
|
||||
if self.project_name:
|
||||
self.sync_server.unpause_project(self.project_name)
|
||||
self.project_name = None
|
||||
self.refresh()
|
||||
|
||||
def _clear_project(self):
|
||||
if self.project_name:
|
||||
self.sync_server.clear_project(self.project_name, self.local_site)
|
||||
self.project_name = None
|
||||
self.refresh()
|
||||
|
||||
|
||||
class SyncRepresentationWidget(QtWidgets.QWidget):
|
||||
"""
|
||||
Summary dialog with list of representations that matches current
|
||||
settings 'local_site' and 'remote_site'.
|
||||
"""
|
||||
active_changed = QtCore.Signal() # active index changed
|
||||
message_generated = QtCore.Signal(str)
|
||||
|
||||
default_widths = (
|
||||
("asset", 220),
|
||||
("subset", 190),
|
||||
("version", 55),
|
||||
("representation", 95),
|
||||
("local_site", 170),
|
||||
("remote_site", 170),
|
||||
("files_count", 50),
|
||||
("files_size", 60),
|
||||
("priority", 50),
|
||||
("state", 110)
|
||||
)
|
||||
|
||||
def __init__(self, sync_server, project=None, parent=None):
|
||||
super(SyncRepresentationWidget, self).__init__(parent)
|
||||
|
||||
self.sync_server = sync_server
|
||||
|
||||
self._selected_id = None # keep last selected _id
|
||||
self.representation_id = None
|
||||
self.site_name = None # to pause/unpause representation
|
||||
|
||||
self.filter = QtWidgets.QLineEdit()
|
||||
self.filter.setPlaceholderText("Filter representations..")
|
||||
|
||||
self._scrollbar_pos = None
|
||||
|
||||
top_bar_layout = QtWidgets.QHBoxLayout()
|
||||
top_bar_layout.addWidget(self.filter)
|
||||
|
||||
self.table_view = QtWidgets.QTableView()
|
||||
headers = [item[0] for item in self.default_widths]
|
||||
|
||||
model = SyncRepresentationSummaryModel(sync_server, headers, project)
|
||||
self.table_view.setModel(model)
|
||||
self.table_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
|
||||
self.table_view.setSelectionMode(
|
||||
QtWidgets.QAbstractItemView.SingleSelection)
|
||||
self.table_view.setSelectionBehavior(
|
||||
QtWidgets.QAbstractItemView.SelectRows)
|
||||
self.table_view.horizontalHeader().setSortIndicator(
|
||||
-1, Qt.AscendingOrder)
|
||||
self.table_view.setSortingEnabled(True)
|
||||
self.table_view.horizontalHeader().setSortIndicatorShown(True)
|
||||
self.table_view.setAlternatingRowColors(True)
|
||||
self.table_view.verticalHeader().hide()
|
||||
|
||||
column = self.table_view.model().get_header_index("local_site")
|
||||
delegate = ImageDelegate(self)
|
||||
self.table_view.setItemDelegateForColumn(column, delegate)
|
||||
|
||||
column = self.table_view.model().get_header_index("remote_site")
|
||||
delegate = ImageDelegate(self)
|
||||
self.table_view.setItemDelegateForColumn(column, delegate)
|
||||
|
||||
for column_name, width in self.default_widths:
|
||||
idx = model.get_header_index(column_name)
|
||||
self.table_view.setColumnWidth(idx, width)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.addLayout(top_bar_layout)
|
||||
layout.addWidget(self.table_view)
|
||||
|
||||
self.table_view.doubleClicked.connect(self._double_clicked)
|
||||
self.filter.textChanged.connect(lambda: model.set_filter(
|
||||
self.filter.text()))
|
||||
self.table_view.customContextMenuRequested.connect(
|
||||
self._on_context_menu)
|
||||
|
||||
model.refresh_started.connect(self._save_scrollbar)
|
||||
model.refresh_finished.connect(self._set_scrollbar)
|
||||
self.table_view.model().modelReset.connect(self._set_selection)
|
||||
|
||||
self.selection_model = self.table_view.selectionModel()
|
||||
self.selection_model.selectionChanged.connect(self._selection_changed)
|
||||
|
||||
def _selection_changed(self, _new_selection):
|
||||
index = self.selection_model.currentIndex()
|
||||
self._selected_id = \
|
||||
self.table_view.model().data(index, Qt.UserRole)
|
||||
|
||||
def _set_selection(self):
|
||||
"""
|
||||
Sets selection to 'self._selected_id' if exists.
|
||||
|
||||
Keep selection during model refresh.
|
||||
"""
|
||||
if self._selected_id:
|
||||
index = self.table_view.model().get_index(self._selected_id)
|
||||
if index and index.isValid():
|
||||
mode = QtCore.QItemSelectionModel.Select | \
|
||||
QtCore.QItemSelectionModel.Rows
|
||||
self.selection_model.setCurrentIndex(index, mode)
|
||||
else:
|
||||
self._selected_id = None
|
||||
|
||||
def _double_clicked(self, index):
|
||||
"""
|
||||
Opens representation dialog with all files after doubleclick
|
||||
"""
|
||||
_id = self.table_view.model().data(index, Qt.UserRole)
|
||||
detail_window = SyncServerDetailWindow(
|
||||
self.sync_server, _id, self.table_view.model().project)
|
||||
detail_window.exec()
|
||||
|
||||
def _on_context_menu(self, point):
|
||||
"""
|
||||
Shows menu with loader actions on Right-click.
|
||||
"""
|
||||
point_index = self.table_view.indexAt(point)
|
||||
if not point_index.isValid():
|
||||
return
|
||||
|
||||
self.item = self.table_view.model()._data[point_index.row()]
|
||||
self.representation_id = self.item._id
|
||||
log.debug("menu representation _id:: {}".
|
||||
format(self.representation_id))
|
||||
|
||||
menu = QtWidgets.QMenu()
|
||||
menu.setStyleSheet(style.load_stylesheet())
|
||||
actions_mapping = {}
|
||||
actions_kwargs_mapping = {}
|
||||
|
||||
local_site = self.item.local_site
|
||||
local_progress = self.item.local_progress
|
||||
remote_site = self.item.remote_site
|
||||
remote_progress = self.item.remote_progress
|
||||
|
||||
for site, progress in {local_site: local_progress,
|
||||
remote_site: remote_progress}.items():
|
||||
project = self.table_view.model().project
|
||||
provider = self.sync_server.get_provider_for_site(project,
|
||||
site)
|
||||
if provider == 'local_drive':
|
||||
if 'studio' in site:
|
||||
txt = " studio version"
|
||||
else:
|
||||
txt = " local version"
|
||||
action = QtWidgets.QAction("Open in explorer" + txt)
|
||||
if progress == 1.0:
|
||||
actions_mapping[action] = self._open_in_explorer
|
||||
actions_kwargs_mapping[action] = {'site': site}
|
||||
menu.addAction(action)
|
||||
|
||||
# progress smaller then 1.0 --> in progress or queued
|
||||
if local_progress < 1.0:
|
||||
self.site_name = local_site
|
||||
else:
|
||||
self.site_name = remote_site
|
||||
|
||||
if self.item.state in [lib.STATUS[0], lib.STATUS[1]]:
|
||||
action = QtWidgets.QAction("Pause")
|
||||
actions_mapping[action] = self._pause
|
||||
menu.addAction(action)
|
||||
|
||||
if self.item.state == lib.STATUS[3]:
|
||||
action = QtWidgets.QAction("Unpause")
|
||||
actions_mapping[action] = self._unpause
|
||||
menu.addAction(action)
|
||||
|
||||
# if self.item.state == lib.STATUS[1]:
|
||||
# action = QtWidgets.QAction("Open error detail")
|
||||
# actions_mapping[action] = self._show_detail
|
||||
# menu.addAction(action)
|
||||
|
||||
if remote_progress == 1.0:
|
||||
action = QtWidgets.QAction("Re-sync Active site")
|
||||
actions_mapping[action] = self._reset_local_site
|
||||
menu.addAction(action)
|
||||
|
||||
if local_progress == 1.0:
|
||||
action = QtWidgets.QAction("Re-sync Remote site")
|
||||
actions_mapping[action] = self._reset_remote_site
|
||||
menu.addAction(action)
|
||||
|
||||
if local_site != self.sync_server.DEFAULT_SITE:
|
||||
action = QtWidgets.QAction("Completely remove from local")
|
||||
actions_mapping[action] = self._remove_site
|
||||
menu.addAction(action)
|
||||
else:
|
||||
action = QtWidgets.QAction("Mark for sync to local")
|
||||
actions_mapping[action] = self._add_site
|
||||
menu.addAction(action)
|
||||
|
||||
if not actions_mapping:
|
||||
action = QtWidgets.QAction("< No action >")
|
||||
actions_mapping[action] = None
|
||||
menu.addAction(action)
|
||||
|
||||
result = menu.exec_(QtGui.QCursor.pos())
|
||||
if result:
|
||||
to_run = actions_mapping[result]
|
||||
to_run_kwargs = actions_kwargs_mapping.get(result, {})
|
||||
if to_run:
|
||||
to_run(**to_run_kwargs)
|
||||
|
||||
self.table_view.model().refresh()
|
||||
|
||||
def _pause(self):
|
||||
self.sync_server.pause_representation(self.table_view.model().project,
|
||||
self.representation_id,
|
||||
self.site_name)
|
||||
self.site_name = None
|
||||
self.message_generated.emit("Paused {}".format(self.representation_id))
|
||||
|
||||
def _unpause(self):
|
||||
self.sync_server.unpause_representation(
|
||||
self.table_view.model().project,
|
||||
self.representation_id,
|
||||
self.site_name)
|
||||
self.site_name = None
|
||||
self.message_generated.emit("Unpaused {}".format(
|
||||
self.representation_id))
|
||||
|
||||
# temporary here for testing, will be removed TODO
|
||||
def _add_site(self):
|
||||
log.info(self.representation_id)
|
||||
project_name = self.table_view.model().project
|
||||
local_site_name = get_local_site_id()
|
||||
try:
|
||||
self.sync_server.add_site(
|
||||
project_name,
|
||||
self.representation_id,
|
||||
local_site_name
|
||||
)
|
||||
self.message_generated.emit(
|
||||
"Site {} added for {}".format(local_site_name,
|
||||
self.representation_id))
|
||||
except ValueError as exp:
|
||||
self.message_generated.emit("Error {}".format(str(exp)))
|
||||
|
||||
def _remove_site(self):
|
||||
"""
|
||||
Removes site record AND files.
|
||||
|
||||
This is ONLY for representations stored on local site, which
|
||||
cannot be same as SyncServer.DEFAULT_SITE.
|
||||
|
||||
This could only happen when artist work on local machine, not
|
||||
connected to studio mounted drives.
|
||||
"""
|
||||
log.info("Removing {}".format(self.representation_id))
|
||||
try:
|
||||
local_site = get_local_site_id()
|
||||
self.sync_server.remove_site(
|
||||
self.table_view.model().project,
|
||||
self.representation_id,
|
||||
local_site,
|
||||
True)
|
||||
self.message_generated.emit("Site {} removed".format(local_site))
|
||||
except ValueError as exp:
|
||||
self.message_generated.emit("Error {}".format(str(exp)))
|
||||
self.table_view.model().refresh(
|
||||
load_records=self.table_view.model()._rec_loaded)
|
||||
|
||||
def _reset_local_site(self):
|
||||
"""
|
||||
Removes errors or success metadata for particular file >> forces
|
||||
redo of upload/download
|
||||
"""
|
||||
self.sync_server.reset_provider_for_file(
|
||||
self.table_view.model().project,
|
||||
self.representation_id,
|
||||
'local')
|
||||
self.table_view.model().refresh(
|
||||
load_records=self.table_view.model()._rec_loaded)
|
||||
|
||||
def _reset_remote_site(self):
|
||||
"""
|
||||
Removes errors or success metadata for particular file >> forces
|
||||
redo of upload/download
|
||||
"""
|
||||
self.sync_server.reset_provider_for_file(
|
||||
self.table_view.model().project,
|
||||
self.representation_id,
|
||||
'remote')
|
||||
self.table_view.model().refresh(
|
||||
load_records=self.table_view.model()._rec_loaded)
|
||||
|
||||
def _open_in_explorer(self, site):
|
||||
if not self.item:
|
||||
return
|
||||
|
||||
fpath = self.item.path
|
||||
project = self.table_view.model().project
|
||||
fpath = self.sync_server.get_local_file_path(project,
|
||||
site,
|
||||
fpath)
|
||||
|
||||
fpath = os.path.normpath(os.path.dirname(fpath))
|
||||
if os.path.isdir(fpath):
|
||||
if 'win' in sys.platform: # windows
|
||||
subprocess.Popen('explorer "%s"' % fpath)
|
||||
elif sys.platform == 'darwin': # macOS
|
||||
subprocess.Popen(['open', fpath])
|
||||
else: # linux
|
||||
try:
|
||||
subprocess.Popen(['xdg-open', fpath])
|
||||
except OSError:
|
||||
raise OSError('unsupported xdg-open call??')
|
||||
|
||||
def _save_scrollbar(self):
|
||||
self._scrollbar_pos = self.table_view.verticalScrollBar().value()
|
||||
|
||||
def _set_scrollbar(self):
|
||||
if self._scrollbar_pos:
|
||||
self.table_view.verticalScrollBar().setValue(self._scrollbar_pos)
|
||||
|
||||
|
||||
class SyncRepresentationDetailWidget(QtWidgets.QWidget):
|
||||
"""
|
||||
Widget to display list of synchronizable files for single repre.
|
||||
|
||||
Args:
|
||||
_id (str): representation _id
|
||||
project (str): name of project with repre
|
||||
parent (QDialog): SyncServerDetailWindow
|
||||
"""
|
||||
active_changed = QtCore.Signal() # active index changed
|
||||
|
||||
default_widths = (
|
||||
("file", 290),
|
||||
("local_site", 185),
|
||||
("remote_site", 185),
|
||||
("size", 60),
|
||||
("priority", 25),
|
||||
("state", 110)
|
||||
)
|
||||
|
||||
def __init__(self, sync_server, _id=None, project=None, parent=None):
|
||||
super(SyncRepresentationDetailWidget, self).__init__(parent)
|
||||
|
||||
log.debug("Representation_id:{}".format(_id))
|
||||
self.representation_id = _id
|
||||
self.item = None # set to item that mouse was clicked over
|
||||
self.project = project
|
||||
|
||||
self.sync_server = sync_server
|
||||
|
||||
self._selected_id = None
|
||||
|
||||
self.filter = QtWidgets.QLineEdit()
|
||||
self.filter.setPlaceholderText("Filter representation..")
|
||||
|
||||
self._scrollbar_pos = None
|
||||
|
||||
top_bar_layout = QtWidgets.QHBoxLayout()
|
||||
top_bar_layout.addWidget(self.filter)
|
||||
|
||||
self.table_view = QtWidgets.QTableView()
|
||||
headers = [item[0] for item in self.default_widths]
|
||||
|
||||
model = SyncRepresentationDetailModel(sync_server, headers, _id,
|
||||
project)
|
||||
self.table_view.setModel(model)
|
||||
self.table_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
|
||||
self.table_view.setSelectionMode(
|
||||
QtWidgets.QAbstractItemView.SingleSelection)
|
||||
self.table_view.setSelectionBehavior(
|
||||
QtWidgets.QTableView.SelectRows)
|
||||
self.table_view.horizontalHeader().setSortIndicator(-1,
|
||||
Qt.AscendingOrder)
|
||||
self.table_view.setSortingEnabled(True)
|
||||
self.table_view.horizontalHeader().setSortIndicatorShown(True)
|
||||
self.table_view.setAlternatingRowColors(True)
|
||||
self.table_view.verticalHeader().hide()
|
||||
|
||||
column = self.table_view.model().get_header_index("local_site")
|
||||
delegate = ImageDelegate(self)
|
||||
self.table_view.setItemDelegateForColumn(column, delegate)
|
||||
|
||||
column = self.table_view.model().get_header_index("remote_site")
|
||||
delegate = ImageDelegate(self)
|
||||
self.table_view.setItemDelegateForColumn(column, delegate)
|
||||
|
||||
for column_name, width in self.default_widths:
|
||||
idx = model.get_header_index(column_name)
|
||||
self.table_view.setColumnWidth(idx, width)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.addLayout(top_bar_layout)
|
||||
layout.addWidget(self.table_view)
|
||||
|
||||
self.filter.textChanged.connect(lambda: model.set_filter(
|
||||
self.filter.text()))
|
||||
self.table_view.customContextMenuRequested.connect(
|
||||
self._on_context_menu)
|
||||
|
||||
model.refresh_started.connect(self._save_scrollbar)
|
||||
model.refresh_finished.connect(self._set_scrollbar)
|
||||
self.table_view.model().modelReset.connect(self._set_selection)
|
||||
|
||||
self.selection_model = self.table_view.selectionModel()
|
||||
self.selection_model.selectionChanged.connect(self._selection_changed)
|
||||
|
||||
def _selection_changed(self):
|
||||
index = self.selection_model.currentIndex()
|
||||
self._selected_id = self.table_view.model().data(index, Qt.UserRole)
|
||||
|
||||
def _set_selection(self):
|
||||
"""
|
||||
Sets selection to 'self._selected_id' if exists.
|
||||
|
||||
Keep selection during model refresh.
|
||||
"""
|
||||
if self._selected_id:
|
||||
index = self.table_view.model().get_index(self._selected_id)
|
||||
if index and index.isValid():
|
||||
mode = QtCore.QItemSelectionModel.Select | \
|
||||
QtCore.QItemSelectionModel.Rows
|
||||
self.selection_model.setCurrentIndex(index, mode)
|
||||
else:
|
||||
self._selected_id = None
|
||||
|
||||
def _show_detail(self):
|
||||
"""
|
||||
Shows windows with error message for failed sync of a file.
|
||||
"""
|
||||
dt = max(self.item.created_dt, self.item.sync_dt)
|
||||
detail_window = SyncRepresentationErrorWindow(self.item._id,
|
||||
self.project,
|
||||
dt,
|
||||
self.item.tries,
|
||||
self.item.error)
|
||||
detail_window.exec()
|
||||
|
||||
def _on_context_menu(self, point):
|
||||
"""
|
||||
Shows menu with loader actions on Right-click.
|
||||
"""
|
||||
point_index = self.table_view.indexAt(point)
|
||||
if not point_index.isValid():
|
||||
return
|
||||
|
||||
self.item = self.table_view.model()._data[point_index.row()]
|
||||
|
||||
menu = QtWidgets.QMenu()
|
||||
menu.setStyleSheet(style.load_stylesheet())
|
||||
actions_mapping = {}
|
||||
actions_kwargs_mapping = {}
|
||||
|
||||
local_site = self.item.local_site
|
||||
local_progress = self.item.local_progress
|
||||
remote_site = self.item.remote_site
|
||||
remote_progress = self.item.remote_progress
|
||||
|
||||
for site, progress in {local_site: local_progress,
|
||||
remote_site: remote_progress}.items():
|
||||
project = self.table_view.model().project
|
||||
provider = self.sync_server.get_provider_for_site(project,
|
||||
site)
|
||||
if provider == 'local_drive':
|
||||
if 'studio' in site:
|
||||
txt = " studio version"
|
||||
else:
|
||||
txt = " local version"
|
||||
action = QtWidgets.QAction("Open in explorer" + txt)
|
||||
if progress == 1:
|
||||
actions_mapping[action] = self._open_in_explorer
|
||||
actions_kwargs_mapping[action] = {'site': site}
|
||||
menu.addAction(action)
|
||||
|
||||
if self.item.state == lib.STATUS[2]:
|
||||
action = QtWidgets.QAction("Open error detail")
|
||||
actions_mapping[action] = self._show_detail
|
||||
menu.addAction(action)
|
||||
|
||||
if float(remote_progress) == 1.0:
|
||||
action = QtWidgets.QAction("Re-sync active site")
|
||||
actions_mapping[action] = self._reset_local_site
|
||||
menu.addAction(action)
|
||||
|
||||
if float(local_progress) == 1.0:
|
||||
action = QtWidgets.QAction("Re-sync remote site")
|
||||
actions_mapping[action] = self._reset_remote_site
|
||||
menu.addAction(action)
|
||||
|
||||
if not actions_mapping:
|
||||
action = QtWidgets.QAction("< No action >")
|
||||
actions_mapping[action] = None
|
||||
menu.addAction(action)
|
||||
|
||||
result = menu.exec_(QtGui.QCursor.pos())
|
||||
if result:
|
||||
to_run = actions_mapping[result]
|
||||
to_run_kwargs = actions_kwargs_mapping.get(result, {})
|
||||
if to_run:
|
||||
to_run(**to_run_kwargs)
|
||||
|
||||
def _reset_local_site(self):
|
||||
"""
|
||||
Removes errors or success metadata for particular file >> forces
|
||||
redo of upload/download
|
||||
"""
|
||||
self.sync_server.reset_provider_for_file(
|
||||
self.table_view.model().project,
|
||||
self.representation_id,
|
||||
'local',
|
||||
self.item._id)
|
||||
self.table_view.model().refresh(
|
||||
load_records=self.table_view.model()._rec_loaded)
|
||||
|
||||
def _reset_remote_site(self):
|
||||
"""
|
||||
Removes errors or success metadata for particular file >> forces
|
||||
redo of upload/download
|
||||
"""
|
||||
self.sync_server.reset_provider_for_file(
|
||||
self.table_view.model().project,
|
||||
self.representation_id,
|
||||
'remote',
|
||||
self.item._id)
|
||||
self.table_view.model().refresh(
|
||||
load_records=self.table_view.model()._rec_loaded)
|
||||
|
||||
def _open_in_explorer(self, site):
|
||||
if not self.item:
|
||||
return
|
||||
|
||||
fpath = self.item.path
|
||||
project = self.project
|
||||
fpath = self.sync_server.get_local_file_path(project, site, fpath)
|
||||
|
||||
fpath = os.path.normpath(os.path.dirname(fpath))
|
||||
if os.path.isdir(fpath):
|
||||
if 'win' in sys.platform: # windows
|
||||
subprocess.Popen('explorer "%s"' % fpath)
|
||||
elif sys.platform == 'darwin': # macOS
|
||||
subprocess.Popen(['open', fpath])
|
||||
else: # linux
|
||||
try:
|
||||
subprocess.Popen(['xdg-open', fpath])
|
||||
except OSError:
|
||||
raise OSError('unsupported xdg-open call??')
|
||||
|
||||
def _save_scrollbar(self):
|
||||
self._scrollbar_pos = self.table_view.verticalScrollBar().value()
|
||||
|
||||
def _set_scrollbar(self):
|
||||
if self._scrollbar_pos:
|
||||
self.table_view.verticalScrollBar().setValue(self._scrollbar_pos)
|
||||
|
||||
|
||||
class SyncRepresentationErrorWidget(QtWidgets.QWidget):
|
||||
"""
|
||||
Dialog to show when sync error happened, prints error message
|
||||
"""
|
||||
|
||||
def __init__(self, _id, dt, tries, msg, parent=None):
|
||||
super(SyncRepresentationErrorWidget, self).__init__(parent)
|
||||
|
||||
layout = QtWidgets.QHBoxLayout(self)
|
||||
|
||||
txts = []
|
||||
txts.append("{}: {}".format("Last update date", pretty_timestamp(dt)))
|
||||
txts.append("{}: {}".format("Retries", str(tries)))
|
||||
txts.append("{}: {}".format("Error message", msg))
|
||||
|
||||
text_area = QtWidgets.QPlainTextEdit("\n\n".join(txts))
|
||||
text_area.setReadOnly(True)
|
||||
layout.addWidget(text_area)
|
||||
|
||||
|
||||
class ImageDelegate(QtWidgets.QStyledItemDelegate):
|
||||
"""
|
||||
Prints icon of site and progress of synchronization
|
||||
"""
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(ImageDelegate, self).__init__(parent)
|
||||
self.icons = {}
|
||||
|
||||
def paint(self, painter, option, index):
|
||||
super(ImageDelegate, self).paint(painter, option, index)
|
||||
option = QtWidgets.QStyleOptionViewItem(option)
|
||||
option.showDecorationSelected = True
|
||||
|
||||
provider = index.data(lib.ProviderRole)
|
||||
value = index.data(lib.ProgressRole)
|
||||
date_value = index.data(lib.DateRole)
|
||||
is_failed = index.data(lib.FailedRole)
|
||||
|
||||
if not self.icons.get(provider):
|
||||
resource_path = os.path.dirname(__file__)
|
||||
resource_path = os.path.join(resource_path, "..",
|
||||
"providers", "resources")
|
||||
pix_url = "{}/{}.png".format(resource_path, provider)
|
||||
pixmap = QtGui.QPixmap(pix_url)
|
||||
self.icons[provider] = pixmap
|
||||
else:
|
||||
pixmap = self.icons[provider]
|
||||
|
||||
padding = 10
|
||||
point = QtCore.QPoint(option.rect.x() + padding,
|
||||
option.rect.y() +
|
||||
(option.rect.height() - pixmap.height()) / 2)
|
||||
painter.drawPixmap(point, pixmap)
|
||||
|
||||
overlay_rect = option.rect.translated(0, 0)
|
||||
overlay_rect.setHeight(overlay_rect.height() * (1.0 - float(value)))
|
||||
painter.fillRect(overlay_rect,
|
||||
QtGui.QBrush(QtGui.QColor(0, 0, 0, 100)))
|
||||
text_rect = option.rect.translated(10, 0)
|
||||
painter.drawText(text_rect,
|
||||
QtCore.Qt.AlignCenter,
|
||||
date_value)
|
||||
|
||||
if is_failed:
|
||||
overlay_rect = option.rect.translated(0, 0)
|
||||
painter.fillRect(overlay_rect,
|
||||
QtGui.QBrush(QtGui.QColor(255, 0, 0, 35)))
|
||||
|
||||
|
||||
class SyncServerDetailWindow(QtWidgets.QDialog):
|
||||
def __init__(self, sync_server, _id, project, parent=None):
|
||||
log.debug(
|
||||
"!!! SyncServerDetailWindow _id:: {}".format(_id))
|
||||
super(SyncServerDetailWindow, self).__init__(parent)
|
||||
self.setWindowFlags(QtCore.Qt.Window)
|
||||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
self.setWindowIcon(QtGui.QIcon(style.app_icon_path()))
|
||||
self.resize(1000, 400)
|
||||
|
||||
body = QtWidgets.QWidget()
|
||||
footer = QtWidgets.QWidget()
|
||||
footer.setFixedHeight(20)
|
||||
|
||||
container = SyncRepresentationDetailWidget(sync_server, _id, project,
|
||||
parent=self)
|
||||
body_layout = QtWidgets.QHBoxLayout(body)
|
||||
body_layout.addWidget(container)
|
||||
body_layout.setContentsMargins(0, 0, 0, 0)
|
||||
|
||||
self.message = QtWidgets.QLabel()
|
||||
self.message.hide()
|
||||
|
||||
footer_layout = QtWidgets.QVBoxLayout(footer)
|
||||
footer_layout.addWidget(self.message)
|
||||
footer_layout.setContentsMargins(0, 0, 0, 0)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.addWidget(body)
|
||||
layout.addWidget(footer)
|
||||
|
||||
self.setLayout(body_layout)
|
||||
self.setWindowTitle("Sync Representation Detail")
|
||||
|
||||
|
||||
class SyncRepresentationErrorWindow(QtWidgets.QDialog):
|
||||
def __init__(self, _id, project, dt, tries, msg, parent=None):
|
||||
super(SyncRepresentationErrorWindow, self).__init__(parent)
|
||||
self.setWindowFlags(QtCore.Qt.Window)
|
||||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
self.setWindowIcon(QtGui.QIcon(style.app_icon_path()))
|
||||
self.resize(900, 150)
|
||||
|
||||
body = QtWidgets.QWidget()
|
||||
|
||||
container = SyncRepresentationErrorWidget(_id, dt, tries, msg,
|
||||
parent=self)
|
||||
body_layout = QtWidgets.QHBoxLayout(body)
|
||||
body_layout.addWidget(container)
|
||||
body_layout.setContentsMargins(0, 0, 0, 0)
|
||||
|
||||
message = QtWidgets.QLabel()
|
||||
message.hide()
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.addWidget(body)
|
||||
|
||||
self.setLayout(body_layout)
|
||||
self.setWindowTitle("Sync Representation Error Detail")
|
||||
|
|
@ -1,8 +1,14 @@
|
|||
import time
|
||||
from openpype.api import Logger
|
||||
from openpype.api import Logger
|
||||
log = Logger().get_logger("SyncServer")
|
||||
|
||||
|
||||
class SyncStatus:
|
||||
DO_NOTHING = 0
|
||||
DO_UPLOAD = 1
|
||||
DO_DOWNLOAD = 2
|
||||
|
||||
|
||||
def time_function(method):
|
||||
""" Decorator to print how much time function took.
|
||||
For debugging.
|
||||
|
|
|
|||
33
openpype/plugins/load/add_site.py
Normal file
33
openpype/plugins/load/add_site.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
from avalon import api
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
|
||||
class AddSyncSite(api.Loader):
|
||||
"""Add sync site to representation"""
|
||||
representations = ["*"]
|
||||
families = ["*"]
|
||||
|
||||
label = "Add Sync Site"
|
||||
order = 2 # lower means better
|
||||
icon = "download"
|
||||
color = "#999999"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
self.log.info("Adding {} to representation: {}".format(
|
||||
data["site_name"], data["_id"]))
|
||||
self.add_site_to_representation(data["project_name"],
|
||||
data["_id"],
|
||||
data["site_name"])
|
||||
self.log.debug("Site added.")
|
||||
|
||||
@staticmethod
|
||||
def add_site_to_representation(project_name, representation_id, site_name):
|
||||
"""Adds new site to representation_id, resets if exists"""
|
||||
manager = ModulesManager()
|
||||
sync_server = manager.modules_by_name["sync_server"]
|
||||
sync_server.add_site(project_name, representation_id, site_name,
|
||||
force=True)
|
||||
|
||||
def filepath_from_context(self, context):
|
||||
"""No real file loading"""
|
||||
return ""
|
||||
|
|
@ -15,11 +15,12 @@ from openpype.api import Anatomy
|
|||
|
||||
|
||||
class DeleteOldVersions(api.Loader):
|
||||
|
||||
"""Deletes specific number of old version"""
|
||||
representations = ["*"]
|
||||
families = ["*"]
|
||||
|
||||
label = "Delete Old Versions"
|
||||
order = 35
|
||||
icon = "trash"
|
||||
color = "#d8d8d8"
|
||||
|
||||
|
|
@ -421,8 +422,9 @@ class DeleteOldVersions(api.Loader):
|
|||
|
||||
|
||||
class CalculateOldVersions(DeleteOldVersions):
|
||||
|
||||
"""Calculate file size of old versions"""
|
||||
label = "Calculate Old Versions"
|
||||
order = 30
|
||||
|
||||
options = [
|
||||
qargparse.Integer(
|
||||
|
|
|
|||
33
openpype/plugins/load/remove_site.py
Normal file
33
openpype/plugins/load/remove_site.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
from avalon import api
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
|
||||
class RemoveSyncSite(api.Loader):
|
||||
"""Remove sync site and its files on representation"""
|
||||
representations = ["*"]
|
||||
families = ["*"]
|
||||
|
||||
label = "Remove Sync Site"
|
||||
order = 4
|
||||
icon = "download"
|
||||
color = "#999999"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
self.log.info("Removing {} on representation: {}".format(
|
||||
data["site_name"], data["_id"]))
|
||||
self.remove_site_on_representation(data["project_name"],
|
||||
data["_id"],
|
||||
data["site_name"])
|
||||
self.log.debug("Site added.")
|
||||
|
||||
@staticmethod
|
||||
def remove_site_on_representation(project_name, representation_id,
|
||||
site_name):
|
||||
manager = ModulesManager()
|
||||
sync_server = manager.modules_by_name["sync_server"]
|
||||
sync_server.remove_site(project_name, representation_id,
|
||||
site_name, True)
|
||||
|
||||
def filepath_from_context(self, context):
|
||||
"""No real file loading"""
|
||||
return ""
|
||||
|
|
@ -976,6 +976,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
local_site = local_site_id
|
||||
|
||||
remote_site = sync_server_presets["config"].get("remote_site")
|
||||
if remote_site == local_site:
|
||||
remote_site = None
|
||||
|
||||
if remote_site == 'local':
|
||||
remote_site = local_site_id
|
||||
|
||||
|
|
|
|||
|
|
@ -10,17 +10,17 @@
|
|||
"resolutionHeight": 1080,
|
||||
"pixelAspect": 1.0,
|
||||
"applications": [
|
||||
"maya_2020",
|
||||
"nuke_12-2",
|
||||
"nukex_12-2",
|
||||
"hiero_12-2",
|
||||
"resolve_16",
|
||||
"houdini_18-5",
|
||||
"blender_2-90",
|
||||
"harmony_20",
|
||||
"photoshop_2021",
|
||||
"aftereffects_2021",
|
||||
"unreal_4-24"
|
||||
"maya/2020",
|
||||
"nuke/12-2",
|
||||
"nukex/12-2",
|
||||
"hiero/12-2",
|
||||
"resolve/16",
|
||||
"houdini/18-5",
|
||||
"blender/2-90",
|
||||
"harmony/20",
|
||||
"photoshop/2021",
|
||||
"aftereffects/2021",
|
||||
"unreal/4-24"
|
||||
],
|
||||
"tools_env": []
|
||||
}
|
||||
|
|
@ -7,6 +7,14 @@
|
|||
"not ready"
|
||||
]
|
||||
},
|
||||
"prepare_project": {
|
||||
"enabled": true,
|
||||
"role_list": [
|
||||
"Pypeclub",
|
||||
"Administrator",
|
||||
"Project manager"
|
||||
]
|
||||
},
|
||||
"sync_hier_entity_attributes": {
|
||||
"enabled": true,
|
||||
"interest_entity_types": [
|
||||
|
|
@ -195,7 +203,7 @@
|
|||
"publish": {
|
||||
"IntegrateFtrackNote": {
|
||||
"enabled": true,
|
||||
"note_with_intent_template": "",
|
||||
"note_with_intent_template": "{intent}: {comment}",
|
||||
"note_labels": []
|
||||
},
|
||||
"ValidateFtrackAttributes": {
|
||||
|
|
|
|||
|
|
@ -6,7 +6,9 @@
|
|||
"ExtractJpegEXR": {
|
||||
"enabled": true,
|
||||
"ffmpeg_args": {
|
||||
"input": [],
|
||||
"input": [
|
||||
"-gamma 2.2"
|
||||
],
|
||||
"output": []
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -313,8 +313,8 @@
|
|||
"rendererName": "vp2Renderer"
|
||||
},
|
||||
"Resolution": {
|
||||
"width": 1080,
|
||||
"height": 1920,
|
||||
"width": 1920,
|
||||
"height": 1080,
|
||||
"percent": 1.0,
|
||||
"mode": "Custom"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@
|
|||
"ExtractThumbnailSP": {
|
||||
"ffmpeg_args": {
|
||||
"input": [
|
||||
"gamma 2.2"
|
||||
"-gamma 2.2"
|
||||
],
|
||||
"output": []
|
||||
}
|
||||
|
|
|
|||
15
openpype/settings/defaults/project_settings/tvpaint.json
Normal file
15
openpype/settings/defaults/project_settings/tvpaint.json
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"publish": {
|
||||
"ValidateProjectSettings": {
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
"active": true
|
||||
},
|
||||
"ValidateMarks": {
|
||||
"enabled": true,
|
||||
"optional": true,
|
||||
"active": true
|
||||
}
|
||||
},
|
||||
"filters": {}
|
||||
}
|
||||
|
|
@ -6,9 +6,9 @@
|
|||
"host_name": "maya",
|
||||
"environment": {
|
||||
"PYTHONPATH": [
|
||||
"{OPENPYPE_ROOT}/pype/hosts/maya/startup",
|
||||
"{OPENPYPE_ROOT}/repos/avalon-core/setup/maya",
|
||||
"{OPENPYPE_ROOT}/repos/maya-look-assigner",
|
||||
"{OPENPYPE_REPOS_ROOT}/openpype/hosts/maya/startup",
|
||||
"{OPENPYPE_REPOS_ROOT}/repos/avalon-core/setup/maya",
|
||||
"{OPENPYPE_REPOS_ROOT}/repos/maya-look-assigner",
|
||||
"{PYTHONPATH}"
|
||||
],
|
||||
"MAYA_DISABLE_CLIC_IPM": "Yes",
|
||||
|
|
@ -85,8 +85,8 @@
|
|||
"host_name": "nuke",
|
||||
"environment": {
|
||||
"NUKE_PATH": [
|
||||
"{OPENPYPE_ROOT}/repos/avalon-core/setup/nuke/nuke_path",
|
||||
"{OPENPYPE_ROOT}/openpype/hosts/nuke/startup",
|
||||
"{OPENPYPE_REPOS_ROOT}/repos/avalon-core/setup/nuke/nuke_path",
|
||||
"{OPENPYPE_REPOS_ROOT}/openpype/hosts/nuke/startup",
|
||||
"{OPENPYPE_STUDIO_PLUGINS}/nuke"
|
||||
],
|
||||
"PATH": {
|
||||
|
|
@ -175,8 +175,8 @@
|
|||
"host_name": "nuke",
|
||||
"environment": {
|
||||
"NUKE_PATH": [
|
||||
"{OPENPYPE_ROOT}/repos/avalon-core/setup/nuke/nuke_path",
|
||||
"{OPENPYPE_ROOT}/openpype/hosts/nuke/startup",
|
||||
"{OPENPYPE_REPOS_ROOT}/repos/avalon-core/setup/nuke/nuke_path",
|
||||
"{OPENPYPE_REPOS_ROOT}/openpype/hosts/nuke/startup",
|
||||
"{OPENPYPE_STUDIO_PLUGINS}/nuke"
|
||||
],
|
||||
"PATH": {
|
||||
|
|
@ -290,7 +290,7 @@
|
|||
"host_name": "hiero",
|
||||
"environment": {
|
||||
"HIERO_PLUGIN_PATH": [
|
||||
"{OPENPYPE_ROOT}/openpype/hosts/hiero/startup"
|
||||
"{OPENPYPE_REPOS_ROOT}/openpype/hosts/hiero/startup"
|
||||
],
|
||||
"PATH": {
|
||||
"windows": "C:/Program Files (x86)/QuickTime/QTSystem/;{PATH}"
|
||||
|
|
@ -403,7 +403,7 @@
|
|||
"host_name": "hiero",
|
||||
"environment": {
|
||||
"HIERO_PLUGIN_PATH": [
|
||||
"{OPENPYPE_ROOT}/openpype/hosts/hiero/startup"
|
||||
"{OPENPYPE_REPOS_ROOT}/openpype/hosts/hiero/startup"
|
||||
],
|
||||
"PATH": {
|
||||
"windows": "C:/Program Files (x86)/QuickTime/QTSystem/;{PATH}"
|
||||
|
|
@ -614,7 +614,7 @@
|
|||
"{PYTHON36_RESOLVE}/Scripts",
|
||||
"{PATH}"
|
||||
],
|
||||
"PRE_PYTHON_SCRIPT": "{OPENPYPE_ROOT}/openpype/resolve/preload_console.py",
|
||||
"PRE_PYTHON_SCRIPT": "{OPENPYPE_REPOS_ROOT}/openpype/resolve/preload_console.py",
|
||||
"OPENPYPE_LOG_NO_COLORS": "True",
|
||||
"RESOLVE_DEV": "True"
|
||||
},
|
||||
|
|
@ -645,14 +645,14 @@
|
|||
"host_name": "houdini",
|
||||
"environment": {
|
||||
"HOUDINI_PATH": {
|
||||
"darwin": "{OPENPYPE_ROOT}/openpype/hosts/houdini/startup:&",
|
||||
"linux": "{OPENPYPE_ROOT}/openpype/hosts/houdini/startup:&",
|
||||
"windows": "{OPENPYPE_ROOT}/openpype/hosts/houdini/startup;&"
|
||||
"darwin": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/houdini/startup:&",
|
||||
"linux": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/houdini/startup:&",
|
||||
"windows": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/houdini/startup;&"
|
||||
},
|
||||
"HOUDINI_MENU_PATH": {
|
||||
"darwin": "{OPENPYPE_ROOT}/openpype/hosts/houdini/startup:&",
|
||||
"linux": "{OPENPYPE_ROOT}/openpype/hosts/houdini/startup:&",
|
||||
"windows": "{OPENPYPE_ROOT}/openpype/hosts/houdini/startup;&"
|
||||
"darwin": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/houdini/startup:&",
|
||||
"linux": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/houdini/startup:&",
|
||||
"windows": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/houdini/startup;&"
|
||||
}
|
||||
},
|
||||
"variants": {
|
||||
|
|
@ -710,12 +710,12 @@
|
|||
"icon": "{}/app_icons/blender.png",
|
||||
"host_name": "blender",
|
||||
"environment": {
|
||||
"BLENDER_USER_SCRIPTS": "{OPENPYPE_ROOT}/repos/avalon-core/setup/blender",
|
||||
"BLENDER_USER_SCRIPTS": "{OPENPYPE_REPOS_ROOT}/repos/avalon-core/setup/blender",
|
||||
"PYTHONPATH": [
|
||||
"{OPENPYPE_ROOT}/repos/avalon-core/setup/blender",
|
||||
"{OPENPYPE_REPOS_ROOT}/repos/avalon-core/setup/blender",
|
||||
"{PYTHONPATH}"
|
||||
],
|
||||
"CREATE_NEW_CONSOLE": "yes"
|
||||
"QT_PREFERRED_BINDING": "PySide2"
|
||||
},
|
||||
"variants": {
|
||||
"2-83": {
|
||||
|
|
@ -773,7 +773,7 @@
|
|||
"host_name": "harmony",
|
||||
"environment": {
|
||||
"AVALON_HARMONY_WORKFILES_ON_LAUNCH": "1",
|
||||
"LIB_OPENHARMONY_PATH": "{OPENPYPE_ROOT}/pype/vendor/OpenHarmony"
|
||||
"LIB_OPENHARMONY_PATH": "{OPENPYPE_REPOS_ROOT}/pype/vendor/OpenHarmony"
|
||||
},
|
||||
"variants": {
|
||||
"20": {
|
||||
|
|
@ -957,7 +957,7 @@
|
|||
"icon": "app_icons/celaction.png",
|
||||
"host_name": "celaction",
|
||||
"environment": {
|
||||
"CELACTION_TEMPLATE": "{OPENPYPE_ROOT}/openpype/hosts/celaction/celaction_template_scene.scn"
|
||||
"CELACTION_TEMPLATE": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/celaction/celaction_template_scene.scn"
|
||||
},
|
||||
"variants": {
|
||||
"local": {
|
||||
|
|
@ -983,7 +983,7 @@
|
|||
"icon": "{}/app_icons/ue4.png'",
|
||||
"host_name": "unreal",
|
||||
"environment": {
|
||||
"AVALON_UNREAL_PLUGIN": "{OPENPYPE_ROOT}/repos/avalon-unreal-integration",
|
||||
"AVALON_UNREAL_PLUGIN": "{OPENPYPE_REPOS_ROOT}/repos/avalon-unreal-integration",
|
||||
"OPENPYPE_LOG_NO_COLORS": "True",
|
||||
"QT_PREFERRED_BINDING": "PySide"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -14,4 +14,4 @@
|
|||
"darwin": [],
|
||||
"linux": []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -82,6 +82,10 @@
|
|||
"type": "schema",
|
||||
"name": "schema_project_harmony"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_project_tvpaint"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_project_celaction"
|
||||
|
|
|
|||
|
|
@ -36,6 +36,25 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "prepare_project",
|
||||
"label": "Prepare Project",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "role_list",
|
||||
"label": "Roles",
|
||||
"object_type": "text"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "sync_hier_entity_attributes",
|
||||
|
|
|
|||
|
|
@ -0,0 +1,44 @@
|
|||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "tvpaint",
|
||||
"label": "TVPaint",
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "publish",
|
||||
"label": "Publish plugins",
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_publish_plugin",
|
||||
"template_data": [
|
||||
{
|
||||
"key": "ValidateProjectSettings",
|
||||
"label": "ValidateProjectSettings",
|
||||
"docstring": "Validate if FPS and Resolution match shot data"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_publish_plugin",
|
||||
"template_data": [
|
||||
{
|
||||
"key": "ValidateMarks",
|
||||
"label": "Validate MarkIn/Out",
|
||||
"docstring": "Validate MarkIn/Out match Frame start/end on shot data"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_publish_gui_filter"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -4,6 +4,31 @@
|
|||
"key": "create",
|
||||
"label": "Creator plugins",
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "CreateLook",
|
||||
"label": "Create Look",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "make_tx",
|
||||
"label": "Make tx files"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "defaults",
|
||||
"label": "Default Subsets",
|
||||
"object_type": "text"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "schema_template",
|
||||
"name": "template_create_plugin",
|
||||
|
|
@ -28,10 +53,6 @@
|
|||
"key": "CreateLayout",
|
||||
"label": "Create Layout"
|
||||
},
|
||||
{
|
||||
"key": "CreateLook",
|
||||
"label": "Create Look"
|
||||
},
|
||||
{
|
||||
"key": "CreateMayaScene",
|
||||
"label": "Create Maya Scene"
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
from openpype import resources
|
||||
|
||||
|
||||
def load_stylesheet():
|
||||
|
|
@ -9,4 +10,4 @@ def load_stylesheet():
|
|||
|
||||
|
||||
def app_icon_path():
|
||||
return os.path.join(os.path.dirname(__file__), "openpype_icon.png")
|
||||
return resources.pype_icon_filepath()
|
||||
|
|
|
|||
Binary file not shown.
|
Before Width: | Height: | Size: 3.7 KiB |
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.0.0-beta"
|
||||
__version__ = "3.0.0-beta2"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue