Merge remote-tracking branch 'origin/develop' into feature/1294-sync-queue-gui-issues

This commit is contained in:
Petr Kalis 2021-04-13 18:59:16 +02:00
commit 5832e2bf61
38 changed files with 1043 additions and 586 deletions

View file

@ -51,18 +51,38 @@ def set_start_end_frames():
"name": asset_name
})
# Default frame start/end
frameStart = 0
frameEnd = 100
scene = bpy.context.scene
# Check if frameStart/frameEnd are set
if asset_doc["data"]["frameStart"]:
frameStart = asset_doc["data"]["frameStart"]
if asset_doc["data"]["frameEnd"]:
frameEnd = asset_doc["data"]["frameEnd"]
# Default scene settings
frameStart = scene.frame_start
frameEnd = scene.frame_end
fps = scene.render.fps
resolution_x = scene.render.resolution_x
resolution_y = scene.render.resolution_y
# Check if settings are set
data = asset_doc.get("data")
if not data:
return
if data.get("frameStart"):
frameStart = data.get("frameStart")
if data.get("frameEnd"):
frameEnd = data.get("frameEnd")
if data.get("fps"):
fps = data.get("fps")
if data.get("resolutionWidth"):
resolution_x = data.get("resolutionWidth")
if data.get("resolutionHeight"):
resolution_y = data.get("resolutionHeight")
scene.frame_start = frameStart
scene.frame_end = frameEnd
scene.render.fps = fps
scene.render.resolution_x = resolution_x
scene.render.resolution_y = resolution_y
bpy.context.scene.frame_start = frameStart
bpy.context.scene.frame_end = frameEnd
def on_new(arg1, arg2):
set_start_end_frames()

View file

@ -0,0 +1,35 @@
from typing import List
import pyblish.api
import openpype.hosts.blender.api.action
class ValidateObjectIsInObjectMode(pyblish.api.InstancePlugin):
"""Validate that the current object is in Object Mode."""
order = pyblish.api.ValidatorOrder - 0.01
hosts = ["blender"]
families = ["model", "rig"]
category = "geometry"
label = "Object is in Object Mode"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
optional = True
@classmethod
def get_invalid(cls, instance) -> List:
invalid = []
for obj in [obj for obj in instance]:
try:
if obj.type == 'MESH' or obj.type == 'ARMATURE':
# Check if the object is in object mode.
if not obj.mode == 'OBJECT':
invalid.append(obj)
except Exception:
continue
return invalid
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
f"Object found in instance is not in Object Mode: {invalid}")

View file

@ -80,25 +80,31 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
# Add all nodes in group instances.
if node.Class() == "Group":
# check if it is write node in family
if "write" in families:
# only alter families for render family
if "write" in families_ak:
target = node["render"].value()
if target == "Use existing frames":
# Local rendering
self.log.info("flagged for no render")
families.append("render")
families.append(family)
elif target == "Local":
# Local rendering
self.log.info("flagged for local render")
families.append("{}.local".format("render"))
families.append("{}.local".format(family))
elif target == "On farm":
# Farm rendering
self.log.info("flagged for farm render")
instance.data["transfer"] = False
families.append("{}.farm".format("render"))
families.append("{}.farm".format(family))
# suffle family to `write` as it is main family
# this will be changed later on in process
if "render" in families:
families.remove("render")
family = "write"
elif "prerender" in families:
families.remove("prerender")
family = "write"
node.begin()
for i in nuke.allNodes():

View file

@ -108,6 +108,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
# Add version data to instance
version_data = {
"families": [f.replace(".local", "").replace(".farm", "")
for f in families if "write" not in f],
"colorspace": node["colorspace"].value(),
}

View file

@ -11,7 +11,9 @@ from .api.pipeline import (
update_container,
publish,
launch_workfiles_app,
maintained_selection
maintained_selection,
remove_instance,
list_instances
)
from .api.lib import (
@ -73,6 +75,8 @@ __all__ = [
"publish",
"launch_workfiles_app",
"maintained_selection",
"remove_instance",
"list_instances",
# utils
"setup",

View file

@ -12,7 +12,8 @@ from avalon.tools import (
creator,
loader,
sceneinventory,
libraryloader
libraryloader,
subsetmanager
)
@ -64,8 +65,9 @@ class OpenPypeMenu(QtWidgets.QWidget):
publish_btn = QtWidgets.QPushButton("Publish ...", self)
load_btn = QtWidgets.QPushButton("Load ...", self)
inventory_btn = QtWidgets.QPushButton("Inventory ...", self)
subsetm_btn = QtWidgets.QPushButton("Subset Manager ...", self)
libload_btn = QtWidgets.QPushButton("Library ...", self)
# rename_btn = QtWidgets.QPushButton("Rename ...", self)
# rename_btn = QtWidgets.QPushButton("Rename", self)
# set_colorspace_btn = QtWidgets.QPushButton(
# "Set colorspace from presets", self
# )
@ -81,6 +83,7 @@ class OpenPypeMenu(QtWidgets.QWidget):
layout.addWidget(publish_btn)
layout.addWidget(load_btn)
layout.addWidget(inventory_btn)
layout.addWidget(subsetm_btn)
layout.addWidget(Spacer(15, self))
@ -102,6 +105,7 @@ class OpenPypeMenu(QtWidgets.QWidget):
publish_btn.clicked.connect(self.on_publish_clicked)
load_btn.clicked.connect(self.on_load_clicked)
inventory_btn.clicked.connect(self.on_inventory_clicked)
subsetm_btn.clicked.connect(self.on_subsetm_clicked)
libload_btn.clicked.connect(self.on_libload_clicked)
# rename_btn.clicked.connect(self.on_rename_clicked)
# set_colorspace_btn.clicked.connect(self.on_set_colorspace_clicked)
@ -127,6 +131,10 @@ class OpenPypeMenu(QtWidgets.QWidget):
print("Clicked Inventory")
sceneinventory.show()
def on_subsetm_clicked(self):
print("Clicked Subset Manager")
subsetmanager.show()
def on_libload_clicked(self):
print("Clicked Library")
libraryloader.show()

View file

@ -258,3 +258,51 @@ def on_pyblish_instance_toggled(instance, old_value, new_value):
# Whether instances should be passthrough based on new value
timeline_item = instance.data["item"]
set_publish_attribute(timeline_item, new_value)
def remove_instance(instance):
"""Remove instance marker from track item."""
instance_id = instance.get("uuid")
selected_timeline_items = lib.get_current_timeline_items(
filter=True, selecting_color=lib.publish_clip_color)
found_ti = None
for timeline_item_data in selected_timeline_items:
timeline_item = timeline_item_data["clip"]["item"]
# get openpype tag data
tag_data = lib.get_timeline_item_pype_tag(timeline_item)
_ti_id = tag_data.get("uuid")
if _ti_id == instance_id:
found_ti = timeline_item
break
if found_ti is None:
return
# removing instance by marker color
print(f"Removing instance: {found_ti.GetName()}")
found_ti.DeleteMarkersByColor(lib.pype_marker_color)
def list_instances():
"""List all created instances from current workfile."""
listed_instances = []
selected_timeline_items = lib.get_current_timeline_items(
filter=True, selecting_color=lib.publish_clip_color)
for timeline_item_data in selected_timeline_items:
timeline_item = timeline_item_data["clip"]["item"]
ti_name = timeline_item.GetName().split(".")[0]
# get openpype tag data
tag_data = lib.get_timeline_item_pype_tag(timeline_item)
if tag_data:
asset = tag_data.get("asset")
subset = tag_data.get("subset")
tag_data["label"] = f"{ti_name} [{asset}-{subset}]"
listed_instances.append(tag_data)
return listed_instances

View file

@ -1,4 +1,5 @@
import re
import uuid
from avalon import api
import openpype.api as pype
from openpype.hosts import resolve
@ -697,13 +698,13 @@ class PublishClip:
Populating the tag data into internal variable self.tag_data
"""
# define vertical sync attributes
master_layer = True
hero_track = True
self.review_layer = ""
if self.vertical_sync:
# check if track name is not in driving layer
if self.track_name not in self.driving_layer:
# if it is not then define vertical sync as None
master_layer = False
hero_track = False
# increasing steps by index of rename iteration
self.count_steps *= self.rename_index
@ -717,7 +718,7 @@ class PublishClip:
self.tag_data[_k] = _v["value"]
# driving layer is set as positive match
if master_layer or self.vertical_sync:
if hero_track or self.vertical_sync:
# mark review layer
if self.review_track and (
self.review_track not in self.review_track_default):
@ -751,35 +752,39 @@ class PublishClip:
hierarchy_formating_data
)
tag_hierarchy_data.update({"masterLayer": True})
if master_layer and self.vertical_sync:
# tag_hierarchy_data.update({"masterLayer": True})
tag_hierarchy_data.update({"heroTrack": True})
if hero_track and self.vertical_sync:
self.vertical_clip_match.update({
(self.clip_in, self.clip_out): tag_hierarchy_data
})
if not master_layer and self.vertical_sync:
if not hero_track and self.vertical_sync:
# driving layer is set as negative match
for (_in, _out), master_data in self.vertical_clip_match.items():
master_data.update({"masterLayer": False})
for (_in, _out), hero_data in self.vertical_clip_match.items():
hero_data.update({"heroTrack": False})
if _in == self.clip_in and _out == self.clip_out:
data_subset = master_data["subset"]
# add track index in case duplicity of names in master data
data_subset = hero_data["subset"]
# add track index in case duplicity of names in hero data
if self.subset in data_subset:
master_data["subset"] = self.subset + str(
hero_data["subset"] = self.subset + str(
self.track_index)
# in case track name and subset name is the same then add
if self.subset_name == self.track_name:
master_data["subset"] = self.subset
hero_data["subset"] = self.subset
# assing data to return hierarchy data to tag
tag_hierarchy_data = master_data
tag_hierarchy_data = hero_data
# add data to return data dict
self.tag_data.update(tag_hierarchy_data)
if master_layer and self.review_layer:
# add uuid to tag data
self.tag_data["uuid"] = str(uuid.uuid4())
# add review track only to hero track
if hero_track and self.review_layer:
self.tag_data.update({"reviewTrack": self.review_layer})
def _solve_tag_hierarchy_data(self, hierarchy_formating_data):
""" Solve tag data from hierarchy data and templates. """
# fill up clip name and hierarchy keys

View file

@ -117,7 +117,7 @@ class CreateShotClip(resolve.Creator):
"vSyncTrack": {
"value": gui_tracks, # noqa
"type": "QComboBox",
"label": "Master track",
"label": "Hero track",
"target": "ui",
"toolTip": "Select driving track name which should be mastering all others", # noqa
"order": 1}

View file

@ -5,11 +5,11 @@ from openpype.hosts import resolve
from pprint import pformat
class CollectInstances(pyblish.api.ContextPlugin):
class PrecollectInstances(pyblish.api.ContextPlugin):
"""Collect all Track items selection."""
order = pyblish.api.CollectorOrder - 0.59
label = "Collect Instances"
label = "Precollect Instances"
hosts = ["resolve"]
def process(self, context):
@ -26,7 +26,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
data = dict()
timeline_item = timeline_item_data["clip"]["item"]
# get openpype tag data
# get pype tag data
tag_data = resolve.get_timeline_item_pype_tag(timeline_item)
self.log.debug(f"__ tag_data: {pformat(tag_data)}")
@ -102,10 +102,10 @@ class CollectInstances(pyblish.api.ContextPlugin):
})
def create_shot_instance(self, context, timeline_item, **data):
master_layer = data.get("masterLayer")
hero_track = data.get("heroTrack")
hierarchy_data = data.get("hierarchyData")
if not master_layer:
if not hero_track:
return
if not hierarchy_data:

View file

@ -9,10 +9,10 @@ from openpype.hosts.resolve.otio import davinci_export
reload(davinci_export)
class CollectWorkfile(pyblish.api.ContextPlugin):
"""Inject the current working file into context"""
class PrecollectWorkfile(pyblish.api.ContextPlugin):
"""Precollect the current working file into context"""
label = "Collect Workfile"
label = "Precollect Workfile"
order = pyblish.api.CollectorOrder - 0.6
def process(self, context):
@ -21,8 +21,6 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
subset = "workfile"
project = resolve.get_current_project()
fps = project.GetSetting("timelineFrameRate")
active_timeline = resolve.get_current_timeline()
video_tracks = resolve.get_video_track_names()
# adding otio timeline to context

View file

@ -58,9 +58,8 @@ def _close_window(event):
def _export_button(event):
pm = resolve.GetProjectManager()
project = pm.GetCurrentProject()
fps = project.GetSetting("timelineFrameRate")
timeline = project.GetCurrentTimeline()
otio_timeline = otio_export.create_otio_timeline(timeline, fps)
otio_timeline = otio_export.create_otio_timeline(project)
otio_path = os.path.join(
itm["exportfilebttn"].Text,
timeline.GetName() + ".otio")

View file

@ -18,7 +18,7 @@ class CollectInstances(pyblish.api.ContextPlugin):
))
for instance_data in workfile_instances:
instance_data["fps"] = context.data["fps"]
instance_data["fps"] = context.data["sceneFps"]
# Store workfile instance data to instance data
instance_data["originData"] = copy.deepcopy(instance_data)
@ -32,6 +32,11 @@ class CollectInstances(pyblish.api.ContextPlugin):
subset_name = instance_data["subset"]
name = instance_data.get("name", subset_name)
instance_data["name"] = name
instance_data["label"] = "{} [{}-{}]".format(
name,
context.data["sceneFrameStart"],
context.data["sceneFrameEnd"]
)
active = instance_data.get("active", True)
instance_data["active"] = active
@ -73,8 +78,8 @@ class CollectInstances(pyblish.api.ContextPlugin):
if instance is None:
continue
instance.data["frameStart"] = context.data["frameStart"]
instance.data["frameEnd"] = context.data["frameEnd"]
instance.data["frameStart"] = context.data["sceneFrameStart"]
instance.data["frameEnd"] = context.data["sceneFrameEnd"]
self.log.debug("Created instance: {}\n{}".format(
instance, json.dumps(instance.data, indent=4)

View file

@ -127,11 +127,11 @@ class CollectWorkfileData(pyblish.api.ContextPlugin):
"currentFile": workfile_path,
"sceneWidth": width,
"sceneHeight": height,
"pixelAspect": pixel_apsect,
"frameStart": frame_start,
"frameEnd": frame_end,
"fps": frame_rate,
"fieldOrder": field_order
"scenePixelAspect": pixel_apsect,
"sceneFrameStart": frame_start,
"sceneFrameEnd": frame_end,
"sceneFps": frame_rate,
"sceneFieldOrder": field_order
}
self.log.debug(
"Scene data: {}".format(json.dumps(scene_data, indent=4))

View file

@ -0,0 +1,36 @@
import json
import pyblish.api
class ValidateProjectSettings(pyblish.api.ContextPlugin):
"""Validate project settings against database.
"""
label = "Validate Project Settings"
order = pyblish.api.ValidatorOrder
optional = True
def process(self, context):
scene_data = {
"frameStart": context.data.get("sceneFrameStart"),
"frameEnd": context.data.get("sceneFrameEnd"),
"fps": context.data.get("sceneFps"),
"resolutionWidth": context.data.get("sceneWidth"),
"resolutionHeight": context.data.get("sceneHeight"),
"pixelAspect": context.data.get("scenePixelAspect")
}
invalid = {}
for k in scene_data.keys():
expected_value = context.data["assetEntity"]["data"][k]
if scene_data[k] != expected_value:
invalid[k] = {
"current": scene_data[k], "expected": expected_value
}
if invalid:
raise AssertionError(
"Project settings does not match database:\n{}".format(
json.dumps(invalid, sort_keys=True, indent=4)
)
)

View file

@ -498,12 +498,12 @@ class OpenPypeSettingsRegistry(JSONSettingRegistry):
def _create_local_site_id(registry=None):
"""Create a local site identifier."""
from uuid import uuid4
from coolname import generate_slug
if registry is None:
registry = OpenPypeSettingsRegistry()
new_id = str(uuid4())
new_id = generate_slug(3)
print("Created local site id \"{}\"".format(new_id))

View file

@ -1,13 +1,16 @@
import os
import re
import time
import requests
import json
import datetime
import requests
from .constants import (
CLOCKIFY_ENDPOINT, ADMIN_PERMISSION_NAMES, CREDENTIALS_JSON_PATH
CLOCKIFY_ENDPOINT,
ADMIN_PERMISSION_NAMES
)
from openpype.lib.local_settings import OpenPypeSecureRegistry
def time_check(obj):
if obj.request_counter < 10:
@ -31,6 +34,8 @@ class ClockifyAPI:
self.request_counter = 0
self.request_time = time.time()
self.secure_registry = OpenPypeSecureRegistry("clockify")
@property
def headers(self):
return {"X-Api-Key": self.api_key}
@ -129,22 +134,10 @@ class ClockifyAPI:
return False
def get_api_key(self):
api_key = None
try:
file = open(CREDENTIALS_JSON_PATH, 'r')
api_key = json.load(file).get('api_key', None)
if api_key == '':
api_key = None
except Exception:
file = open(CREDENTIALS_JSON_PATH, 'w')
file.close()
return api_key
return self.secure_registry.get_item("api_key", None)
def save_api_key(self, api_key):
data = {'api_key': api_key}
file = open(CREDENTIALS_JSON_PATH, 'w')
file.write(json.dumps(data))
file.close()
self.secure_registry.set_item("api_key", api_key)
def get_workspaces(self):
action_url = 'workspaces/'

View file

@ -1,17 +1,12 @@
import os
import appdirs
CLOCKIFY_FTRACK_SERVER_PATH = os.path.join(
os.path.dirname(__file__), "ftrack", "server"
os.path.dirname(os.path.abspath(__file__)), "ftrack", "server"
)
CLOCKIFY_FTRACK_USER_PATH = os.path.join(
os.path.dirname(__file__), "ftrack", "user"
os.path.dirname(os.path.abspath(__file__)), "ftrack", "user"
)
CREDENTIALS_JSON_PATH = os.path.normpath(os.path.join(
appdirs.user_data_dir("pype-app", "pype"),
"clockify.json"
))
ADMIN_PERMISSION_NAMES = ["WORKSPACE_OWN", "WORKSPACE_ADMIN"]
CLOCKIFY_ENDPOINT = "https://api.clockify.me/api/"

View file

@ -102,7 +102,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
hosts = ["fusion", "maya", "nuke", "celaction", "aftereffects", "harmony"]
families = ["render.farm", "prerender",
families = ["render.farm", "prerender.farm",
"renderlayer", "imagesequence", "vrayscene"]
aov_filter = {"maya": [r".+(?:\.|_)([Bb]eauty)(?:\.|_).*"],

View file

@ -0,0 +1,365 @@
import json
from openpype.api import ProjectSettings
from openpype.modules.ftrack.lib import ServerAction
from openpype.modules.ftrack.lib.avalon_sync import (
get_pype_attr,
CUST_ATTR_AUTO_SYNC
)
class PrepareProjectServer(ServerAction):
"""Prepare project attributes in Anatomy."""
identifier = "prepare.project.server"
label = "OpenPype Admin"
variant = "- Prepare Project (Server)"
description = "Set basic attributes on the project"
settings_key = "prepare_project"
role_list = ["Pypeclub", "Administrator", "Project Manager"]
# Key to store info about trigerring create folder structure
item_splitter = {"type": "label", "value": "---"}
def discover(self, session, entities, event):
"""Show only on project."""
if (
len(entities) != 1
or entities[0].entity_type.lower() != "project"
):
return False
return self.valid_roles(session, entities, event)
def interface(self, session, entities, event):
if event['data'].get('values', {}):
return
# Inform user that this may take a while
self.show_message(event, "Preparing data... Please wait", True)
self.log.debug("Preparing data which will be shown")
self.log.debug("Loading custom attributes")
project_entity = entities[0]
project_name = project_entity["full_name"]
try:
project_settings = ProjectSettings(project_name)
except ValueError:
return {
"message": "Project is not synchronized yet",
"success": False
}
project_anatom_settings = project_settings["project_anatomy"]
root_items = self.prepare_root_items(project_anatom_settings)
ca_items, multiselect_enumerators = (
self.prepare_custom_attribute_items(project_anatom_settings)
)
self.log.debug("Heavy items are ready. Preparing last items group.")
title = "Prepare Project"
items = []
# Add root items
items.extend(root_items)
items.append(self.item_splitter)
items.append({
"type": "label",
"value": "<h3>Set basic Attributes:</h3>"
})
items.extend(ca_items)
# This item will be last (before enumerators)
# - sets value of auto synchronization
auto_sync_name = "avalon_auto_sync"
auto_sync_value = project_entity["custom_attributes"].get(
CUST_ATTR_AUTO_SYNC, False
)
auto_sync_item = {
"name": auto_sync_name,
"type": "boolean",
"value": auto_sync_value,
"label": "AutoSync to Avalon"
}
# Add autosync attribute
items.append(auto_sync_item)
# Add enumerator items at the end
for item in multiselect_enumerators:
items.append(item)
return {
"items": items,
"title": title
}
def prepare_root_items(self, project_anatom_settings):
self.log.debug("Root items preparation begins.")
root_items = []
root_items.append({
"type": "label",
"value": "<h3>Check your Project root settings</h3>"
})
root_items.append({
"type": "label",
"value": (
"<p><i>NOTE: Roots are <b>crutial</b> for path filling"
" (and creating folder structure).</i></p>"
)
})
root_items.append({
"type": "label",
"value": (
"<p><i>WARNING: Do not change roots on running project,"
" that <b>will cause workflow issues</b>.</i></p>"
)
})
empty_text = "Enter root path here..."
roots_entity = project_anatom_settings["roots"]
for root_name, root_entity in roots_entity.items():
root_items.append(self.item_splitter)
root_items.append({
"type": "label",
"value": "Root: \"{}\"".format(root_name)
})
for platform_name, value_entity in root_entity.items():
root_items.append({
"label": platform_name,
"name": "__root__{}__{}".format(root_name, platform_name),
"type": "text",
"value": value_entity.value,
"empty_text": empty_text
})
root_items.append({
"type": "hidden",
"name": "__rootnames__",
"value": json.dumps(list(roots_entity.keys()))
})
self.log.debug("Root items preparation ended.")
return root_items
def _attributes_to_set(self, project_anatom_settings):
attributes_to_set = {}
attribute_values_by_key = {}
for key, entity in project_anatom_settings["attributes"].items():
attribute_values_by_key[key] = entity.value
cust_attrs, hier_cust_attrs = get_pype_attr(self.session, True)
for attr in hier_cust_attrs:
key = attr["key"]
if key.startswith("avalon_"):
continue
attributes_to_set[key] = {
"label": attr["label"],
"object": attr,
"default": attribute_values_by_key.get(key)
}
for attr in cust_attrs:
if attr["entity_type"].lower() != "show":
continue
key = attr["key"]
if key.startswith("avalon_"):
continue
attributes_to_set[key] = {
"label": attr["label"],
"object": attr,
"default": attribute_values_by_key.get(key)
}
# Sort by label
attributes_to_set = dict(sorted(
attributes_to_set.items(),
key=lambda x: x[1]["label"]
))
return attributes_to_set
def prepare_custom_attribute_items(self, project_anatom_settings):
items = []
multiselect_enumerators = []
attributes_to_set = self._attributes_to_set(project_anatom_settings)
self.log.debug("Preparing interface for keys: \"{}\"".format(
str([key for key in attributes_to_set])
))
for key, in_data in attributes_to_set.items():
attr = in_data["object"]
# initial item definition
item = {
"name": key,
"label": in_data["label"]
}
# cust attr type - may have different visualization
type_name = attr["type"]["name"].lower()
easy_types = ["text", "boolean", "date", "number"]
easy_type = False
if type_name in easy_types:
easy_type = True
elif type_name == "enumerator":
attr_config = json.loads(attr["config"])
attr_config_data = json.loads(attr_config["data"])
if attr_config["multiSelect"] is True:
multiselect_enumerators.append(self.item_splitter)
multiselect_enumerators.append({
"type": "label",
"value": in_data["label"]
})
default = in_data["default"]
names = []
for option in sorted(
attr_config_data, key=lambda x: x["menu"]
):
name = option["value"]
new_name = "__{}__{}".format(key, name)
names.append(new_name)
item = {
"name": new_name,
"type": "boolean",
"label": "- {}".format(option["menu"])
}
if default:
if isinstance(default, (list, tuple)):
if name in default:
item["value"] = True
else:
if name == default:
item["value"] = True
multiselect_enumerators.append(item)
multiselect_enumerators.append({
"type": "hidden",
"name": "__hidden__{}".format(key),
"value": json.dumps(names)
})
else:
easy_type = True
item["data"] = attr_config_data
else:
self.log.warning((
"Custom attribute \"{}\" has type \"{}\"."
" I don't know how to handle"
).format(key, type_name))
items.append({
"type": "label",
"value": (
"!!! Can't handle Custom attritubte type \"{}\""
" (key: \"{}\")"
).format(type_name, key)
})
if easy_type:
item["type"] = type_name
# default value in interface
default = in_data["default"]
if default is not None:
item["value"] = default
items.append(item)
return items, multiselect_enumerators
def launch(self, session, entities, event):
if not event['data'].get('values', {}):
return
in_data = event['data']['values']
root_values = {}
root_key = "__root__"
for key in tuple(in_data.keys()):
if key.startswith(root_key):
_key = key[len(root_key):]
root_values[_key] = in_data.pop(key)
root_names = in_data.pop("__rootnames__", None)
root_data = {}
for root_name in json.loads(root_names):
root_data[root_name] = {}
for key, value in tuple(root_values.items()):
prefix = "{}__".format(root_name)
if not key.startswith(prefix):
continue
_key = key[len(prefix):]
root_data[root_name][_key] = value
# Find hidden items for multiselect enumerators
keys_to_process = []
for key in in_data:
if key.startswith("__hidden__"):
keys_to_process.append(key)
self.log.debug("Preparing data for Multiselect Enumerators")
enumerators = {}
for key in keys_to_process:
new_key = key.replace("__hidden__", "")
enumerator_items = in_data.pop(key)
enumerators[new_key] = json.loads(enumerator_items)
# find values set for multiselect enumerator
for key, enumerator_items in enumerators.items():
in_data[key] = []
name = "__{}__".format(key)
for item in enumerator_items:
value = in_data.pop(item)
if value is True:
new_key = item.replace(name, "")
in_data[key].append(new_key)
self.log.debug("Setting Custom Attribute values")
project_name = entities[0]["full_name"]
project_settings = ProjectSettings(project_name)
project_anatomy_settings = project_settings["project_anatomy"]
project_anatomy_settings["roots"] = root_data
custom_attribute_values = {}
attributes_entity = project_anatomy_settings["attributes"]
for key, value in in_data.items():
if key not in attributes_entity:
custom_attribute_values[key] = value
else:
attributes_entity[key] = value
project_settings.save()
entity = entities[0]
for key, value in custom_attribute_values.items():
entity["custom_attributes"][key] = value
self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value))
return True
def register(session):
'''Register plugin. Called when used as an plugin.'''
PrepareProjectServer(session).register()

View file

@ -1,31 +1,34 @@
import os
import json
from openpype.modules.ftrack.lib import BaseAction, statics_icon
from openpype.api import config, Anatomy
from openpype.modules.ftrack.lib.avalon_sync import get_pype_attr
from openpype.api import ProjectSettings
from openpype.modules.ftrack.lib import (
BaseAction,
statics_icon
)
from openpype.modules.ftrack.lib.avalon_sync import (
get_pype_attr,
CUST_ATTR_AUTO_SYNC
)
class PrepareProject(BaseAction):
'''Edit meta data action.'''
class PrepareProjectLocal(BaseAction):
"""Prepare project attributes in Anatomy."""
#: Action identifier.
identifier = 'prepare.project'
#: Action label.
label = 'Prepare Project'
#: Action description.
description = 'Set basic attributes on the project'
#: roles that are allowed to register this action
identifier = "prepare.project.local"
label = "Prepare Project"
description = "Set basic attributes on the project"
icon = statics_icon("ftrack", "action_icons", "PrepareProject.svg")
role_list = ["Pypeclub", "Administrator", "Project Manager"]
settings_key = "prepare_project"
# Key to store info about trigerring create folder structure
create_project_structure_key = "create_folder_structure"
item_splitter = {'type': 'label', 'value': '---'}
item_splitter = {"type": "label", "value": "---"}
def discover(self, session, entities, event):
''' Validation '''
"""Show only on project."""
if (
len(entities) != 1
or entities[0].entity_type.lower() != "project"
@ -44,27 +47,22 @@ class PrepareProject(BaseAction):
self.log.debug("Loading custom attributes")
project_name = entities[0]["full_name"]
project_entity = entities[0]
project_name = project_entity["full_name"]
project_defaults = (
config.get_presets(project_name)
.get("ftrack", {})
.get("project_defaults", {})
)
anatomy = Anatomy(project_name)
if not anatomy.roots:
try:
project_settings = ProjectSettings(project_name)
except ValueError:
return {
"success": False,
"message": (
"Have issues with loading Roots for project \"{}\"."
).format(anatomy.project_name)
"message": "Project is not synchronized yet",
"success": False
}
root_items = self.prepare_root_items(anatomy)
project_anatom_settings = project_settings["project_anatomy"]
root_items = self.prepare_root_items(project_anatom_settings)
ca_items, multiselect_enumerators = (
self.prepare_custom_attribute_items(project_defaults)
self.prepare_custom_attribute_items(project_anatom_settings)
)
self.log.debug("Heavy items are ready. Preparing last items group.")
@ -74,19 +72,6 @@ class PrepareProject(BaseAction):
# Add root items
items.extend(root_items)
items.append(self.item_splitter)
# Ask if want to trigger Action Create Folder Structure
items.append({
"type": "label",
"value": "<h3>Want to create basic Folder Structure?</h3>"
})
items.append({
"name": self.create_project_structure_key,
"type": "boolean",
"value": False,
"label": "Check if Yes"
})
items.append(self.item_splitter)
items.append({
@ -99,10 +84,13 @@ class PrepareProject(BaseAction):
# This item will be last (before enumerators)
# - sets value of auto synchronization
auto_sync_name = "avalon_auto_sync"
auto_sync_value = project_entity["custom_attributes"].get(
CUST_ATTR_AUTO_SYNC, False
)
auto_sync_item = {
"name": auto_sync_name,
"type": "boolean",
"value": project_defaults.get(auto_sync_name, False),
"value": auto_sync_value,
"label": "AutoSync to Avalon"
}
# Add autosync attribute
@ -117,13 +105,10 @@ class PrepareProject(BaseAction):
"title": title
}
def prepare_root_items(self, anatomy):
root_items = []
def prepare_root_items(self, project_anatom_settings):
self.log.debug("Root items preparation begins.")
root_names = anatomy.root_names()
roots = anatomy.roots
root_items = []
root_items.append({
"type": "label",
"value": "<h3>Check your Project root settings</h3>"
@ -143,85 +128,40 @@ class PrepareProject(BaseAction):
)
})
default_roots = anatomy.roots
while isinstance(default_roots, dict):
key = tuple(default_roots.keys())[0]
default_roots = default_roots[key]
empty_text = "Enter root path here..."
# Root names is None when anatomy templates contain "{root}"
all_platforms = ["windows", "linux", "darwin"]
if root_names is None:
root_items.append(self.item_splitter)
# find first possible key
for platform in all_platforms:
value = default_roots.raw_data.get(platform) or ""
root_items.append({
"label": platform,
"name": "__root__{}".format(platform),
"type": "text",
"value": value,
"empty_text": empty_text
})
return root_items
root_name_data = {}
missing_roots = []
for root_name in root_names:
root_name_data[root_name] = {}
if not isinstance(roots, dict):
missing_roots.append(root_name)
continue
root_item = roots.get(root_name)
if not root_item:
missing_roots.append(root_name)
continue
for platform in all_platforms:
root_name_data[root_name][platform] = (
root_item.raw_data.get(platform) or ""
)
if missing_roots:
default_values = {}
for platform in all_platforms:
default_values[platform] = (
default_roots.raw_data.get(platform) or ""
)
for root_name in missing_roots:
root_name_data[root_name] = default_values
root_names = list(root_name_data.keys())
root_items.append({
"type": "hidden",
"name": "__rootnames__",
"value": json.dumps(root_names)
})
for root_name, values in root_name_data.items():
roots_entity = project_anatom_settings["roots"]
for root_name, root_entity in roots_entity.items():
root_items.append(self.item_splitter)
root_items.append({
"type": "label",
"value": "Root: \"{}\"".format(root_name)
})
for platform, value in values.items():
for platform_name, value_entity in root_entity.items():
root_items.append({
"label": platform,
"name": "__root__{}{}".format(root_name, platform),
"label": platform_name,
"name": "__root__{}__{}".format(root_name, platform_name),
"type": "text",
"value": value,
"value": value_entity.value,
"empty_text": empty_text
})
root_items.append({
"type": "hidden",
"name": "__rootnames__",
"value": json.dumps(list(roots_entity.keys()))
})
self.log.debug("Root items preparation ended.")
return root_items
def _attributes_to_set(self, project_defaults):
def _attributes_to_set(self, project_anatom_settings):
attributes_to_set = {}
attribute_values_by_key = {}
for key, entity in project_anatom_settings["attributes"].items():
attribute_values_by_key[key] = entity.value
cust_attrs, hier_cust_attrs = get_pype_attr(self.session, True)
for attr in hier_cust_attrs:
@ -231,7 +171,7 @@ class PrepareProject(BaseAction):
attributes_to_set[key] = {
"label": attr["label"],
"object": attr,
"default": project_defaults.get(key)
"default": attribute_values_by_key.get(key)
}
for attr in cust_attrs:
@ -243,7 +183,7 @@ class PrepareProject(BaseAction):
attributes_to_set[key] = {
"label": attr["label"],
"object": attr,
"default": project_defaults.get(key)
"default": attribute_values_by_key.get(key)
}
# Sort by label
@ -253,10 +193,10 @@ class PrepareProject(BaseAction):
))
return attributes_to_set
def prepare_custom_attribute_items(self, project_defaults):
def prepare_custom_attribute_items(self, project_anatom_settings):
items = []
multiselect_enumerators = []
attributes_to_set = self._attributes_to_set(project_defaults)
attributes_to_set = self._attributes_to_set(project_anatom_settings)
self.log.debug("Preparing interface for keys: \"{}\"".format(
str([key for key in attributes_to_set])
@ -363,24 +303,15 @@ class PrepareProject(BaseAction):
root_names = in_data.pop("__rootnames__", None)
root_data = {}
if root_names:
for root_name in json.loads(root_names):
root_data[root_name] = {}
for key, value in tuple(root_values.items()):
if key.startswith(root_name):
_key = key[len(root_name):]
root_data[root_name][_key] = value
for root_name in json.loads(root_names):
root_data[root_name] = {}
for key, value in tuple(root_values.items()):
prefix = "{}__".format(root_name)
if not key.startswith(prefix):
continue
else:
for key, value in root_values.items():
root_data[key] = value
# TODO implement creating of anatomy for new projects
# project_name = entities[0]["full_name"]
# anatomy = Anatomy(project_name)
# pop out info about creating project structure
create_proj_struct = in_data.pop(self.create_project_structure_key)
_key = key[len(prefix):]
root_data[root_name][_key] = value
# Find hidden items for multiselect enumerators
keys_to_process = []
@ -407,54 +338,31 @@ class PrepareProject(BaseAction):
new_key = item.replace(name, "")
in_data[key].append(new_key)
self.log.debug("Setting Custom Attribute values:")
entity = entities[0]
self.log.debug("Setting Custom Attribute values")
project_name = entities[0]["full_name"]
project_settings = ProjectSettings(project_name)
project_anatomy_settings = project_settings["project_anatomy"]
project_anatomy_settings["roots"] = root_data
custom_attribute_values = {}
attributes_entity = project_anatomy_settings["attributes"]
for key, value in in_data.items():
if key not in attributes_entity:
custom_attribute_values[key] = value
else:
attributes_entity[key] = value
project_settings.save()
entity = entities[0]
for key, value in custom_attribute_values.items():
entity["custom_attributes"][key] = value
self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value))
session.commit()
# Create project structure
self.create_project_specific_config(entities[0]["full_name"], in_data)
# Trigger Create Project Structure action
if create_proj_struct is True:
self.trigger_action("create.project.structure", event)
return True
def create_project_specific_config(self, project_name, json_data):
self.log.debug("*** Creating project specifig configs ***")
project_specific_path = project_overrides_dir_path(project_name)
if not os.path.exists(project_specific_path):
os.makedirs(project_specific_path)
self.log.debug((
"Project specific config folder for project \"{}\" created."
).format(project_name))
# Presets ####################################
self.log.debug("--- Processing Presets Begins: ---")
project_defaults_dir = os.path.normpath(os.path.join(
project_specific_path, "presets", "ftrack"
))
project_defaults_path = os.path.normpath(os.path.join(
project_defaults_dir, "project_defaults.json"
))
# Create folder if not exist
if not os.path.exists(project_defaults_dir):
self.log.debug("Creating Ftrack Presets folder: \"{}\"".format(
project_defaults_dir
))
os.makedirs(project_defaults_dir)
with open(project_defaults_path, 'w') as file_stream:
json.dump(json_data, file_stream, indent=4)
self.log.debug("*** Creating project specifig configs Finished ***")
def register(session):
'''Register plugin. Called when used as an plugin.'''
PrepareProject(session).register()
PrepareProjectLocal(session).register()

View file

@ -210,3 +210,7 @@ class FtrackModule(
def tray_exit(self):
return self.tray_module.stop_action_server()
def set_credentials_to_env(self, username, api_key):
os.environ["FTRACK_API_USER"] = username or ""
os.environ["FTRACK_API_KEY"] = api_key or ""

View file

@ -891,6 +891,33 @@ class SyncEntitiesFactory:
self.entities_dict[parent_id]["children"].remove(id)
def _query_custom_attributes(self, session, conf_ids, entity_ids):
output = []
# Prepare values to query
attributes_joined = join_query_keys(conf_ids)
attributes_len = len(conf_ids)
chunk_size = int(5000 / attributes_len)
for idx in range(0, len(entity_ids), chunk_size):
entity_ids_joined = join_query_keys(
entity_ids[idx:idx + chunk_size]
)
call_expr = [{
"action": "query",
"expression": (
"select value, entity_id from ContextCustomAttributeValue "
"where entity_id in ({}) and configuration_id in ({})"
).format(entity_ids_joined, attributes_joined)
}]
if hasattr(session, "call"):
[result] = session.call(call_expr)
else:
[result] = session._call(call_expr)
for item in result["data"]:
output.append(item)
return output
def set_cutom_attributes(self):
self.log.debug("* Preparing custom attributes")
# Get custom attributes and values
@ -1000,31 +1027,13 @@ class SyncEntitiesFactory:
copy.deepcopy(prepared_avalon_attr_ca_id)
)
# TODO query custom attributes by entity_id
entity_ids_joined = ", ".join([
"\"{}\"".format(id) for id in sync_ids
])
attributes_joined = ", ".join([
"\"{}\"".format(attr_id) for attr_id in attribute_key_by_id.keys()
])
cust_attr_query = (
"select value, configuration_id, entity_id"
" from ContextCustomAttributeValue"
" where entity_id in ({}) and configuration_id in ({})"
items = self._query_custom_attributes(
self.session,
list(attribute_key_by_id.keys()),
sync_ids
)
call_expr = [{
"action": "query",
"expression": cust_attr_query.format(
entity_ids_joined, attributes_joined
)
}]
if hasattr(self.session, "call"):
[values] = self.session.call(call_expr)
else:
[values] = self.session._call(call_expr)
for item in values["data"]:
for item in items:
entity_id = item["entity_id"]
attr_id = item["configuration_id"]
key = attribute_key_by_id[attr_id]
@ -1106,28 +1115,14 @@ class SyncEntitiesFactory:
for key, val in prepare_dict_avalon.items():
entity_dict["avalon_attrs"][key] = val
# Prepare values to query
entity_ids_joined = ", ".join([
"\"{}\"".format(id) for id in sync_ids
])
attributes_joined = ", ".join([
"\"{}\"".format(attr_id) for attr_id in attribute_key_by_id.keys()
])
avalon_hier = []
call_expr = [{
"action": "query",
"expression": (
"select value, entity_id, configuration_id"
" from ContextCustomAttributeValue"
" where entity_id in ({}) and configuration_id in ({})"
).format(entity_ids_joined, attributes_joined)
}]
if hasattr(self.session, "call"):
[values] = self.session.call(call_expr)
else:
[values] = self.session._call(call_expr)
items = self._query_custom_attributes(
self.session,
list(attribute_key_by_id.keys()),
sync_ids
)
for item in values["data"]:
avalon_hier = []
for item in items:
value = item["value"]
# WARNING It is not possible to propage enumerate hierachical
# attributes with multiselection 100% right. Unseting all values
@ -1256,19 +1251,21 @@ class SyncEntitiesFactory:
if not msg or not items:
continue
self.report_items["warning"][msg] = items
tasks = {}
for task_type in task_types:
task_type_name = task_type["name"]
# Set short name to empty string
# QUESTION Maybe better would be to lower and remove spaces
# from task type name.
tasks[task_type_name] = {
"short_name": ""
}
current_project_anatomy_data = get_anatomy_settings(
project_name, exclude_locals=True
)
anatomy_tasks = current_project_anatomy_data["tasks"]
tasks = {}
default_type_data = {
"short_name": ""
}
for task_type in task_types:
task_type_name = task_type["name"]
tasks[task_type_name] = copy.deepcopy(
anatomy_tasks.get(task_type_name)
or default_type_data
)
project_config = {
"tasks": tasks,

View file

@ -1,23 +1,16 @@
import os
import json
import ftrack_api
import appdirs
import getpass
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
CONFIG_PATH = os.path.normpath(appdirs.user_data_dir("pype-app", "pype"))
CREDENTIALS_FILE_NAME = "ftrack_cred.json"
CREDENTIALS_PATH = os.path.join(CONFIG_PATH, CREDENTIALS_FILE_NAME)
CREDENTIALS_FOLDER = os.path.dirname(CREDENTIALS_PATH)
from openpype.lib import OpenPypeSecureRegistry
if not os.path.isdir(CREDENTIALS_FOLDER):
os.makedirs(CREDENTIALS_FOLDER)
USER_GETTER = None
USERNAME_KEY = "username"
API_KEY_KEY = "api_key"
def get_ftrack_hostname(ftrack_server=None):
@ -30,112 +23,73 @@ def get_ftrack_hostname(ftrack_server=None):
return urlparse(ftrack_server).hostname
def get_user():
if USER_GETTER:
return USER_GETTER()
return getpass.getuser()
def _get_ftrack_secure_key(hostname, key):
"""Secure item key for entered hostname."""
return "/".join(("ftrack", hostname, key))
def get_credentials(ftrack_server=None, user=None):
credentials = {}
if not os.path.exists(CREDENTIALS_PATH):
with open(CREDENTIALS_PATH, "w") as file:
file.write(json.dumps(credentials))
file.close()
return credentials
with open(CREDENTIALS_PATH, "r") as file:
content = file.read()
def get_credentials(ftrack_server=None):
hostname = get_ftrack_hostname(ftrack_server)
if not user:
user = get_user()
username_name = _get_ftrack_secure_key(hostname, USERNAME_KEY)
api_key_name = _get_ftrack_secure_key(hostname, API_KEY_KEY)
content_json = json.loads(content or "{}")
credentials = content_json.get(hostname, {}).get(user) or {}
username_registry = OpenPypeSecureRegistry(username_name)
api_key_registry = OpenPypeSecureRegistry(api_key_name)
return credentials
def save_credentials(ft_user, ft_api_key, ftrack_server=None, user=None):
hostname = get_ftrack_hostname(ftrack_server)
if not user:
user = get_user()
with open(CREDENTIALS_PATH, "r") as file:
content = file.read()
content_json = json.loads(content or "{}")
if hostname not in content_json:
content_json[hostname] = {}
content_json[hostname][user] = {
"username": ft_user,
"api_key": ft_api_key
return {
USERNAME_KEY: username_registry.get_item(USERNAME_KEY, None),
API_KEY_KEY: api_key_registry.get_item(API_KEY_KEY, None)
}
# Deprecated keys
if "username" in content_json:
content_json.pop("username")
if "apiKey" in content_json:
content_json.pop("apiKey")
with open(CREDENTIALS_PATH, "w") as file:
file.write(json.dumps(content_json, indent=4))
def clear_credentials(ft_user=None, ftrack_server=None, user=None):
if not ft_user:
ft_user = os.environ.get("FTRACK_API_USER")
if not ft_user:
return
def save_credentials(username, api_key, ftrack_server=None):
hostname = get_ftrack_hostname(ftrack_server)
if not user:
user = get_user()
username_name = _get_ftrack_secure_key(hostname, USERNAME_KEY)
api_key_name = _get_ftrack_secure_key(hostname, API_KEY_KEY)
with open(CREDENTIALS_PATH, "r") as file:
content = file.read()
# Clear credentials
clear_credentials(ftrack_server)
content_json = json.loads(content or "{}")
if hostname not in content_json:
content_json[hostname] = {}
username_registry = OpenPypeSecureRegistry(username_name)
api_key_registry = OpenPypeSecureRegistry(api_key_name)
content_json[hostname].pop(user, None)
with open(CREDENTIALS_PATH, "w") as file:
file.write(json.dumps(content_json))
username_registry.set_item(USERNAME_KEY, username)
api_key_registry.set_item(API_KEY_KEY, api_key)
def set_env(ft_user=None, ft_api_key=None):
os.environ["FTRACK_API_USER"] = ft_user or ""
os.environ["FTRACK_API_KEY"] = ft_api_key or ""
def clear_credentials(ftrack_server=None):
hostname = get_ftrack_hostname(ftrack_server)
username_name = _get_ftrack_secure_key(hostname, USERNAME_KEY)
api_key_name = _get_ftrack_secure_key(hostname, API_KEY_KEY)
username_registry = OpenPypeSecureRegistry(username_name)
api_key_registry = OpenPypeSecureRegistry(api_key_name)
current_username = username_registry.get_item(USERNAME_KEY, None)
current_api_key = api_key_registry.get_item(API_KEY_KEY, None)
if current_username is not None:
username_registry.delete_item(USERNAME_KEY)
if current_api_key is not None:
api_key_registry.delete_item(API_KEY_KEY)
def get_env_credentials():
return (
os.environ.get("FTRACK_API_USER"),
os.environ.get("FTRACK_API_KEY")
)
def check_credentials(ft_user, ft_api_key, ftrack_server=None):
def check_credentials(username, api_key, ftrack_server=None):
if not ftrack_server:
ftrack_server = os.environ["FTRACK_SERVER"]
if not ft_user or not ft_api_key:
if not username or not api_key:
return False
try:
session = ftrack_api.Session(
server_url=ftrack_server,
api_key=ft_api_key,
api_user=ft_user
api_key=api_key,
api_user=username
)
session.close()
except Exception:
return False
return True

View file

@ -1,6 +1,7 @@
import os
from openpype.api import get_system_settings
def get_ftrack_settings():
return get_system_settings()["modules"]["ftrack"]
@ -10,7 +11,6 @@ def get_ftrack_url_from_settings():
def get_ftrack_event_mongo_info():
ftrack_settings = get_ftrack_settings()
database_name = os.environ["OPENPYPE_DATABASE_NAME"]
collection_name = "ftrack_events"
return database_name, collection_name

View file

@ -30,7 +30,7 @@ class FtrackTrayWrapper:
self.bool_action_thread_running = False
self.bool_timer_event = False
self.widget_login = login_dialog.CredentialsDialog()
self.widget_login = login_dialog.CredentialsDialog(module)
self.widget_login.login_changed.connect(self.on_login_change)
self.widget_login.logout_signal.connect(self.on_logout)
@ -56,7 +56,7 @@ class FtrackTrayWrapper:
validation = credentials.check_credentials(ft_user, ft_api_key)
if validation:
self.widget_login.set_credentials(ft_user, ft_api_key)
credentials.set_env(ft_user, ft_api_key)
self.module.set_credentials_to_env(ft_user, ft_api_key)
log.info("Connected to Ftrack successfully")
self.on_login_change()
@ -337,7 +337,7 @@ class FtrackTrayWrapper:
def changed_user(self):
self.stop_action_server()
credentials.set_env()
self.module.set_credentials_to_env(None, None)
self.validate()
def start_timer_manager(self, data):

View file

@ -14,11 +14,13 @@ class CredentialsDialog(QtWidgets.QDialog):
login_changed = QtCore.Signal()
logout_signal = QtCore.Signal()
def __init__(self, parent=None):
def __init__(self, module, parent=None):
super(CredentialsDialog, self).__init__(parent)
self.setWindowTitle("OpenPype - Ftrack Login")
self._module = module
self._login_server_thread = None
self._is_logged = False
self._in_advance_mode = False
@ -268,7 +270,7 @@ class CredentialsDialog(QtWidgets.QDialog):
verification = credentials.check_credentials(username, api_key)
if verification:
credentials.save_credentials(username, api_key, False)
credentials.set_env(username, api_key)
self._module.set_credentials_to_env(username, api_key)
self.set_credentials(username, api_key)
self.login_changed.emit()
return verification

View file

@ -40,7 +40,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin):
continue
# exclude if not masterLayer True
if not instance.data.get("masterLayer"):
if not instance.data.get("heroTrack"):
continue
# get asset build data if any available
@ -50,7 +50,7 @@ class CollectHierarchy(pyblish.api.ContextPlugin):
# suppose that all instances are Shots
shot_data['entity_type'] = 'Shot'
shot_data['tasks'] = instance.data.get("tasks") or []
shot_data['tasks'] = instance.data.get("tasks") or {}
shot_data["comments"] = instance.data.get("comments", [])
shot_data['custom_attributes'] = {

View file

@ -7,6 +7,14 @@
"not ready"
]
},
"prepare_project": {
"enabled": true,
"role_list": [
"Pypeclub",
"Administrator",
"Project manager"
]
},
"sync_hier_entity_attributes": {
"enabled": true,
"interest_entity_types": [

View file

@ -0,0 +1,10 @@
{
"publish": {
"ValidateMissingLayers": {
"enabled": true,
"optional": true,
"active": true
}
},
"filters": {}
}

View file

@ -82,6 +82,10 @@
"type": "schema",
"name": "schema_project_harmony"
},
{
"type": "schema",
"name": "schema_project_tvpaint"
},
{
"type": "schema",
"name": "schema_project_celaction"

View file

@ -36,6 +36,25 @@
}
]
},
{
"type": "dict",
"key": "prepare_project",
"label": "Prepare Project",
"checkbox_key": "enabled",
"children": [
{
"type": "boolean",
"key": "enabled",
"label": "Enabled"
},
{
"type": "list",
"key": "role_list",
"label": "Roles",
"object_type": "text"
}
]
},
{
"type": "dict",
"key": "sync_hier_entity_attributes",

View file

@ -0,0 +1,32 @@
{
"type": "dict",
"collapsible": true,
"key": "tvpaint",
"label": "TVPaint",
"is_file": true,
"children": [
{
"type": "dict",
"collapsible": true,
"key": "publish",
"label": "Publish plugins",
"is_file": true,
"children": [
{
"type": "schema_template",
"name": "template_publish_plugin",
"template_data": [
{
"key": "ValidateMissingLayers",
"label": "ValidateMissingLayers"
}
]
}
]
},
{
"type": "schema",
"name": "schema_publish_gui_filter"
}
]
}

View file

@ -5,28 +5,16 @@ class LocalGeneralWidgets(QtWidgets.QWidget):
def __init__(self, parent):
super(LocalGeneralWidgets, self).__init__(parent)
local_site_name_input = QtWidgets.QLineEdit(self)
layout = QtWidgets.QFormLayout(self)
layout.setContentsMargins(0, 0, 0, 0)
layout.addRow("Local site label", local_site_name_input)
self.local_site_name_input = local_site_name_input
def update_local_settings(self, value):
site_label = ""
if value:
site_label = value.get("site_label", site_label)
self.local_site_name_input.setText(site_label)
return
# RETURNING EARLY TO HIDE WIDGET WITHOUT CONTENT
def settings_value(self):
# Add changed
# If these have changed then
output = {}
local_site_name = self.local_site_name_input.text()
if local_site_name:
output["site_label"] = local_site_name
# Do not return output yet since we don't have mechanism to save or
# load these data through api calls
# TEMPORARILY EMPTY AS THERE IS NOTHING TO PUT HERE
return output

View file

@ -80,6 +80,7 @@ class LocalSettingsWidget(QtWidgets.QWidget):
general_widget = LocalGeneralWidgets(general_content)
general_layout.addWidget(general_widget)
general_expand_widget.hide()
self.main_layout.addWidget(general_expand_widget)
@ -126,9 +127,9 @@ class LocalSettingsWidget(QtWidgets.QWidget):
self.system_settings.reset()
self.project_settings.reset()
self.general_widget.update_local_settings(
value.get(LOCAL_GENERAL_KEY)
)
# self.general_widget.update_local_settings(
# value.get(LOCAL_GENERAL_KEY)
# )
self.app_widget.update_local_settings(
value.get(LOCAL_APPS_KEY)
)
@ -138,9 +139,9 @@ class LocalSettingsWidget(QtWidgets.QWidget):
def settings_value(self):
output = {}
general_value = self.general_widget.settings_value()
if general_value:
output[LOCAL_GENERAL_KEY] = general_value
# general_value = self.general_widget.settings_value()
# if general_value:
# output[LOCAL_GENERAL_KEY] = general_value
app_value = self.app_widget.settings_value()
if app_value:

View file

@ -363,7 +363,7 @@ class PypeInfoWidget(QtWidgets.QWidget):
"version_value": "OpenPype version:",
"executable": "OpenPype executable:",
"pype_root": "OpenPype location:",
"mongo_url": "OpenPype Mongo URL:"
"mongo_url": "OpenPype Mongo URL:"
}
# Prepare keys order
keys_order = ["version_value", "executable", "pype_root", "mongo_url"]