mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'hotfix/harmony-postpone-scene-settings' into develop
This commit is contained in:
commit
df4c7d6375
33 changed files with 834 additions and 512 deletions
126
pype/hooks/tvpaint/prelaunch.py
Normal file
126
pype/hooks/tvpaint/prelaunch.py
Normal file
|
|
@ -0,0 +1,126 @@
|
|||
import os
|
||||
import shutil
|
||||
from pype.lib import PypeHook
|
||||
from pype.api import (
|
||||
Anatomy,
|
||||
Logger
|
||||
)
|
||||
import getpass
|
||||
import avalon.api
|
||||
|
||||
|
||||
class TvpaintPrelaunchHook(PypeHook):
|
||||
"""
|
||||
Workfile preparation hook
|
||||
"""
|
||||
host_name = "tvpaint"
|
||||
|
||||
def __init__(self, logger=None):
|
||||
if not logger:
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
else:
|
||||
self.log = logger
|
||||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# get context variables
|
||||
project_name = env["AVALON_PROJECT"]
|
||||
asset_name = env["AVALON_ASSET"]
|
||||
task_name = env["AVALON_TASK"]
|
||||
workdir = env["AVALON_WORKDIR"]
|
||||
extension = avalon.api.HOST_WORKFILE_EXTENSIONS[self.host_name][0]
|
||||
|
||||
# get workfile path
|
||||
workfile_path = self.get_anatomy_filled(
|
||||
workdir, project_name, asset_name, task_name)
|
||||
|
||||
# create workdir if doesn't exist
|
||||
os.makedirs(workdir, exist_ok=True)
|
||||
self.log.info(f"Work dir is: `{workdir}`")
|
||||
|
||||
# get last version of workfile
|
||||
workfile_last = env.get("AVALON_LAST_WORKFILE")
|
||||
self.log.debug(f"_ workfile_last: `{workfile_last}`")
|
||||
|
||||
if workfile_last:
|
||||
workfile = workfile_last
|
||||
workfile_path = os.path.join(workdir, workfile)
|
||||
|
||||
# copy workfile from template if doesnt exist any on path
|
||||
if not os.path.isfile(workfile_path):
|
||||
# try to get path from environment or use default
|
||||
# from `pype.hosts.tvpaint` dir
|
||||
template_path = env.get("TVPAINT_TEMPLATE") or os.path.join(
|
||||
env.get("PYPE_MODULE_ROOT"),
|
||||
"pype/hosts/tvpaint/template.tvpp"
|
||||
)
|
||||
|
||||
# try to get template from project config folder
|
||||
proj_config_path = os.path.join(
|
||||
env["PYPE_PROJECT_CONFIGS"], project_name)
|
||||
if os.path.exists(proj_config_path):
|
||||
|
||||
template_file = None
|
||||
for f in os.listdir(proj_config_path):
|
||||
if extension in os.path.splitext(f):
|
||||
template_file = f
|
||||
|
||||
if template_file:
|
||||
template_path = os.path.join(
|
||||
proj_config_path, template_file)
|
||||
self.log.info(
|
||||
f"Creating workfile from template: `{template_path}`")
|
||||
|
||||
# copy template to new destinantion
|
||||
shutil.copy2(
|
||||
os.path.normpath(template_path),
|
||||
os.path.normpath(workfile_path)
|
||||
)
|
||||
|
||||
self.log.info(f"Workfile to open: `{workfile_path}`")
|
||||
|
||||
# adding compulsory environment var for openting file
|
||||
env["PYPE_TVPAINT_PROJECT_FILE"] = workfile_path
|
||||
|
||||
return True
|
||||
|
||||
def get_anatomy_filled(self, workdir, project_name, asset_name, task_name):
|
||||
dbcon = avalon.api.AvalonMongoDB()
|
||||
dbcon.install()
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
project_document = dbcon.find_one({"type": "project"})
|
||||
asset_document = dbcon.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
})
|
||||
dbcon.uninstall()
|
||||
|
||||
asset_doc_parents = asset_document["data"].get("parents")
|
||||
hierarchy = "/".join(asset_doc_parents)
|
||||
|
||||
data = {
|
||||
"project": {
|
||||
"name": project_document["name"],
|
||||
"code": project_document["data"].get("code")
|
||||
},
|
||||
"task": task_name,
|
||||
"asset": asset_name,
|
||||
"app": self.host_name,
|
||||
"hierarchy": hierarchy
|
||||
}
|
||||
anatomy = Anatomy(project_name)
|
||||
extensions = avalon.api.HOST_WORKFILE_EXTENSIONS[self.host_name]
|
||||
file_template = anatomy.templates["work"]["file"]
|
||||
data.update({
|
||||
"version": 1,
|
||||
"user": os.environ.get("PYPE_USERNAME") or getpass.getuser(),
|
||||
"ext": extensions[0]
|
||||
})
|
||||
|
||||
return avalon.api.last_workfile(
|
||||
workdir, file_template, data, extensions, True
|
||||
)
|
||||
|
|
@ -155,8 +155,11 @@ def check_inventory():
|
|||
|
||||
|
||||
def application_launch():
|
||||
ensure_scene_settings()
|
||||
check_inventory()
|
||||
# FIXME: This is breaking server <-> client communication.
|
||||
# It is now moved so it it manually called.
|
||||
# ensure_scene_settings()
|
||||
# check_inventory()
|
||||
pass
|
||||
|
||||
|
||||
def export_template(backdrops, nodes, filepath):
|
||||
|
|
|
|||
|
|
@ -389,24 +389,28 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
|
|||
# imprinting group node
|
||||
avalon.nuke.imprint(GN, data["avalon"])
|
||||
|
||||
divider = nuke.Text_Knob('')
|
||||
GN.addKnob(divider)
|
||||
# add divider
|
||||
GN.addKnob(nuke.Text_Knob(''))
|
||||
|
||||
add_rendering_knobs(GN)
|
||||
|
||||
if review:
|
||||
add_review_knob(GN)
|
||||
|
||||
# add divider
|
||||
GN.addKnob(nuke.Text_Knob(''))
|
||||
|
||||
# Add linked knobs.
|
||||
linked_knob_names = ["Render", "use_limit", "first", "last"]
|
||||
for name in linked_knob_names:
|
||||
link = nuke.Link_Knob(name)
|
||||
link.makeLink(write_node.name(), name)
|
||||
link.setName(name)
|
||||
link.setFlag(0x1000)
|
||||
GN.addKnob(link)
|
||||
|
||||
divider = nuke.Text_Knob('')
|
||||
GN.addKnob(divider)
|
||||
# add divider
|
||||
GN.addKnob(nuke.Text_Knob(''))
|
||||
|
||||
# adding write to read button
|
||||
add_button_write_to_read(GN)
|
||||
|
|
@ -431,13 +435,9 @@ def add_rendering_knobs(node):
|
|||
node (obj): with added knobs
|
||||
'''
|
||||
if "render" not in node.knobs():
|
||||
knob = nuke.Boolean_Knob("render", "Render")
|
||||
knob = nuke.Enumeration_Knob("render", "Render", [
|
||||
"Use existing frames", "Local", "On farm"])
|
||||
knob.setFlag(0x1000)
|
||||
knob.setValue(False)
|
||||
node.addKnob(knob)
|
||||
if "render_farm" not in node.knobs():
|
||||
knob = nuke.Boolean_Knob("render_farm", "Render on Farm")
|
||||
knob.setValue(False)
|
||||
node.addKnob(knob)
|
||||
return node
|
||||
|
||||
|
|
|
|||
1
pype/hosts/tvpaint/__init__.py
Normal file
1
pype/hosts/tvpaint/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
kwargs = None
|
||||
BIN
pype/hosts/tvpaint/template.tvpp
Normal file
BIN
pype/hosts/tvpaint/template.tvpp
Normal file
Binary file not shown.
|
|
@ -1,6 +1,6 @@
|
|||
from . import ftrack_server
|
||||
from .ftrack_server import FtrackServer, check_ftrack_url
|
||||
from .lib import BaseHandler, BaseEvent, BaseAction
|
||||
from .lib import BaseHandler, BaseEvent, BaseAction, ServerAction
|
||||
|
||||
__all__ = (
|
||||
"ftrack_server",
|
||||
|
|
@ -8,5 +8,6 @@ __all__ = (
|
|||
"check_ftrack_url",
|
||||
"BaseHandler",
|
||||
"BaseEvent",
|
||||
"BaseAction"
|
||||
"BaseAction",
|
||||
"ServerAction"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ class RVAction(BaseAction):
|
|||
return
|
||||
|
||||
self.allowed_types = self.config_data.get(
|
||||
'file_ext', ["img", "mov", "exr"]
|
||||
'file_ext', ["img", "mov", "exr", "mp4"]
|
||||
)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import json
|
||||
import collections
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack.lib import BaseAction
|
||||
from pype.modules.ftrack.lib import ServerAction
|
||||
|
||||
|
||||
class PushFrameValuesToTaskAction(BaseAction):
|
||||
class PushFrameValuesToTaskAction(ServerAction):
|
||||
"""Action for testing purpose or as base for new actions."""
|
||||
|
||||
# Ignore event handler by default
|
||||
|
|
@ -34,50 +34,14 @@ class PushFrameValuesToTaskAction(BaseAction):
|
|||
"frameStart": "fstart",
|
||||
"frameEnd": "fend"
|
||||
}
|
||||
discover_role_list = {"Pypeclub", "Administrator", "Project Manager"}
|
||||
|
||||
def register(self):
|
||||
modified_role_names = set()
|
||||
for role_name in self.discover_role_list:
|
||||
modified_role_names.add(role_name.lower())
|
||||
self.discover_role_list = modified_role_names
|
||||
|
||||
self.session.event_hub.subscribe(
|
||||
"topic=ftrack.action.discover",
|
||||
self._discover,
|
||||
priority=self.priority
|
||||
)
|
||||
|
||||
launch_subscription = (
|
||||
"topic=ftrack.action.launch and data.actionIdentifier={0}"
|
||||
).format(self.identifier)
|
||||
self.session.event_hub.subscribe(launch_subscription, self._launch)
|
||||
role_list = {"Pypeclub", "Administrator", "Project Manager"}
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
""" Validation """
|
||||
# Check if selection is valid
|
||||
valid_selection = False
|
||||
for ent in event["data"]["selection"]:
|
||||
# Ignore entities that are not tasks or projects
|
||||
if ent["entityType"].lower() == "show":
|
||||
valid_selection = True
|
||||
break
|
||||
|
||||
if not valid_selection:
|
||||
return False
|
||||
|
||||
# Get user and check his roles
|
||||
user_id = event.get("source", {}).get("user", {}).get("id")
|
||||
if not user_id:
|
||||
return False
|
||||
|
||||
user = session.query("User where id is \"{}\"".format(user_id)).first()
|
||||
if not user:
|
||||
return False
|
||||
|
||||
for role in user["user_security_roles"]:
|
||||
lowered_role = role["security_role"]["name"].lower()
|
||||
if lowered_role in self.discover_role_list:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
import time
|
||||
import traceback
|
||||
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack import ServerAction
|
||||
from pype.modules.ftrack.lib.avalon_sync import SyncEntitiesFactory
|
||||
|
||||
|
||||
class SyncToAvalonServer(BaseAction):
|
||||
class SyncToAvalonServer(ServerAction):
|
||||
"""
|
||||
Synchronizing data action - from Ftrack to Avalon DB
|
||||
|
||||
|
|
@ -15,7 +15,7 @@ class SyncToAvalonServer(BaseAction):
|
|||
- Data(dictionary):
|
||||
- VisualParent(ObjectId) - Avalon Id of parent asset
|
||||
- Parents(array of string) - All parent names except project
|
||||
- Tasks(array of string) - Tasks on asset
|
||||
- Tasks(dictionary of dictionaries) - Tasks on asset
|
||||
- FtrackId(string)
|
||||
- entityType(string) - entity's type on Ftrack
|
||||
* All Custom attributes in group 'Avalon'
|
||||
|
|
@ -36,48 +36,18 @@ class SyncToAvalonServer(BaseAction):
|
|||
variant = "- Sync To Avalon (Server)"
|
||||
#: Action description.
|
||||
description = "Send data from Ftrack to Avalon"
|
||||
role_list = {"Pypeclub", "Administrator", "Project Manager"}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.entities_factory = SyncEntitiesFactory(self.log, self.session)
|
||||
|
||||
def register(self):
|
||||
self.session.event_hub.subscribe(
|
||||
"topic=ftrack.action.discover",
|
||||
self._discover,
|
||||
priority=self.priority
|
||||
)
|
||||
|
||||
launch_subscription = (
|
||||
"topic=ftrack.action.launch and data.actionIdentifier={0}"
|
||||
).format(self.identifier)
|
||||
self.session.event_hub.subscribe(launch_subscription, self._launch)
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
""" Validation """
|
||||
# Check if selection is valid
|
||||
valid_selection = False
|
||||
for ent in event["data"]["selection"]:
|
||||
# Ignore entities that are not tasks or projects
|
||||
if ent["entityType"].lower() in ["show", "task"]:
|
||||
valid_selection = True
|
||||
break
|
||||
|
||||
if not valid_selection:
|
||||
return False
|
||||
|
||||
# Get user and check his roles
|
||||
user_id = event.get("source", {}).get("user", {}).get("id")
|
||||
if not user_id:
|
||||
return False
|
||||
|
||||
user = session.query("User where id is \"{}\"".format(user_id)).first()
|
||||
if not user:
|
||||
return False
|
||||
|
||||
role_list = ["Pypeclub", "Administrator", "Project Manager"]
|
||||
for role in user["user_security_roles"]:
|
||||
if role["security_role"]["name"] in role_list:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
|
|
|||
|
|
@ -40,6 +40,15 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
"select id, name, parent_id, link, custom_attributes from TypedContext"
|
||||
" where project_id is \"{}\" and id in ({})"
|
||||
)
|
||||
|
||||
# useful for getting all tasks for asset
|
||||
task_entities_query_by_parent_id = (
|
||||
"select id, name, parent_id, type_id from Task"
|
||||
" where project_id is \"{}\" and parent_id in ({})"
|
||||
)
|
||||
task_types_query = (
|
||||
"select id, name from Type"
|
||||
)
|
||||
entities_name_query_by_name = (
|
||||
"select id, name from TypedContext"
|
||||
" where project_id is \"{}\" and name in ({})"
|
||||
|
|
@ -313,9 +322,6 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
if self._avalon_archived_by_id is not None:
|
||||
self._avalon_archived_by_id[mongo_id] = entity
|
||||
|
||||
if mongo_id in self.task_changes_by_avalon_id:
|
||||
self.task_changes_by_avalon_id.pop(mongo_id)
|
||||
|
||||
def _bubble_changeability(self, unchangeable_ids):
|
||||
unchangeable_queue = queue.Queue()
|
||||
for entity_id in unchangeable_ids:
|
||||
|
|
@ -383,8 +389,6 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
self._avalon_archived_by_id = None
|
||||
self._avalon_archived_by_name = None
|
||||
|
||||
self.task_changes_by_avalon_id = {}
|
||||
|
||||
self._avalon_custom_attributes = None
|
||||
self._ent_types_by_name = None
|
||||
|
||||
|
|
@ -398,6 +402,10 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
self.ftrack_updated = {}
|
||||
self.ftrack_removed = {}
|
||||
|
||||
# set of ftrack ids with modified tasks
|
||||
# handled separately by full wipeout and replace from FTrack
|
||||
self.modified_tasks_ftrackids = set()
|
||||
|
||||
self.moved_in_avalon = []
|
||||
self.renamed_in_avalon = []
|
||||
self.hier_cust_attrs_changes = collections.defaultdict(list)
|
||||
|
|
@ -472,6 +480,16 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
return filtered_updates
|
||||
|
||||
def get_ent_path(self, ftrack_id):
|
||||
"""
|
||||
Looks for entity in FTrack with 'ftrack_id'. If found returns
|
||||
concatenated paths from its 'link' elemenent's names. Describes
|
||||
location of entity in tree.
|
||||
Args:
|
||||
ftrack_id (string): entityId of FTrack entity
|
||||
|
||||
Returns:
|
||||
(string) - example : "/test_project/assets/my_asset"
|
||||
"""
|
||||
entity = self.ftrack_ents_by_id.get(ftrack_id)
|
||||
if not entity:
|
||||
entity = self.process_session.query(
|
||||
|
|
@ -486,12 +504,24 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
return "/".join([ent["name"] for ent in entity["link"]])
|
||||
|
||||
def launch(self, session, event):
|
||||
"""
|
||||
Main entry port for synchronization.
|
||||
Goes through event (can contain multiple changes) and decides if
|
||||
the event is interesting for us (interest_entTypes).
|
||||
It separates changes into add|remove|update.
|
||||
All task changes are handled together by refresh from Ftrack.
|
||||
Args:
|
||||
session (object): session to Ftrack
|
||||
event (dictionary): event content
|
||||
|
||||
Returns:
|
||||
(boolean or None)
|
||||
"""
|
||||
# Try to commit and if any error happen then recreate session
|
||||
try:
|
||||
self.process_session.commit()
|
||||
except Exception:
|
||||
self.set_process_session(session)
|
||||
|
||||
# Reset object values for each launch
|
||||
self.reset_variables()
|
||||
self._cur_event = event
|
||||
|
|
@ -527,9 +557,21 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
continue
|
||||
ftrack_id = ftrack_id[0]
|
||||
|
||||
# task modified, collect parent id of task, handle separately
|
||||
if entity_type.lower() == "task":
|
||||
changes = ent_info.get("changes") or {}
|
||||
if action == "move":
|
||||
parent_changes = changes["parent_id"]
|
||||
self.modified_tasks_ftrackids.add(parent_changes["new"])
|
||||
self.modified_tasks_ftrackids.add(parent_changes["old"])
|
||||
|
||||
elif "typeid" in changes or "name" in changes:
|
||||
self.modified_tasks_ftrackids.add(ent_info["parentId"])
|
||||
continue
|
||||
|
||||
if action == "move":
|
||||
ent_keys = ent_info["keys"]
|
||||
# Seprate update info from move action
|
||||
# Separate update info from move action
|
||||
if len(ent_keys) > 1:
|
||||
_ent_info = ent_info.copy()
|
||||
for ent_key in ent_keys:
|
||||
|
|
@ -539,14 +581,13 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
else:
|
||||
ent_info["changes"].pop(ent_key, None)
|
||||
ent_info["keys"].remove(ent_key)
|
||||
|
||||
entities_by_action["update"][ftrack_id] = _ent_info
|
||||
|
||||
# regular change process handles all other than Tasks
|
||||
found_actions.add(action)
|
||||
entities_by_action[action][ftrack_id] = ent_info
|
||||
|
||||
found_actions = list(found_actions)
|
||||
if not found_actions:
|
||||
if not found_actions and not self.modified_tasks_ftrackids:
|
||||
return True
|
||||
|
||||
# Check if auto sync was turned on/off
|
||||
|
|
@ -585,9 +626,10 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
|
||||
# skip most of events where nothing has changed for avalon
|
||||
if (
|
||||
len(found_actions) == 1 and
|
||||
found_actions[0] == "update" and
|
||||
not updated
|
||||
len(found_actions) == 1
|
||||
and found_actions[0] == "update"
|
||||
and not updated
|
||||
and not self.modified_tasks_ftrackids
|
||||
):
|
||||
return True
|
||||
|
||||
|
|
@ -622,19 +664,14 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
ft_project["full_name"], debug_msg
|
||||
))
|
||||
# Get ftrack entities - find all ftrack ids first
|
||||
ftrack_ids = []
|
||||
for ftrack_id in updated:
|
||||
ftrack_ids.append(ftrack_id)
|
||||
ftrack_ids = set(updated.keys())
|
||||
|
||||
for action, ftrack_ids in entities_by_action.items():
|
||||
for action, _ftrack_ids in entities_by_action.items():
|
||||
# skip updated (already prepared) and removed (not exist in ftrack)
|
||||
if action == "remove":
|
||||
continue
|
||||
|
||||
for ftrack_id in ftrack_ids:
|
||||
if ftrack_id not in ftrack_ids:
|
||||
ftrack_ids.append(ftrack_id)
|
||||
if action not in ("remove", "update"):
|
||||
ftrack_ids |= set(_ftrack_ids)
|
||||
|
||||
# collect entity records data which might not be in event
|
||||
if ftrack_ids:
|
||||
joined_ids = ", ".join(["\"{}\"".format(id) for id in ftrack_ids])
|
||||
ftrack_entities = self.process_session.query(
|
||||
|
|
@ -688,9 +725,11 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
time_6 = time.time()
|
||||
# 6.) Process changes in hierarchy or hier custom attribues
|
||||
self.process_hier_cleanup()
|
||||
time_7 = time.time()
|
||||
self.process_task_updates()
|
||||
if self.updates:
|
||||
self.update_entities()
|
||||
time_7 = time.time()
|
||||
time_8 = time.time()
|
||||
|
||||
time_removed = time_2 - time_1
|
||||
time_renamed = time_3 - time_2
|
||||
|
|
@ -698,10 +737,14 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
time_moved = time_5 - time_4
|
||||
time_updated = time_6 - time_5
|
||||
time_cleanup = time_7 - time_6
|
||||
time_total = time_7 - time_1
|
||||
self.log.debug("Process time: {} <{}, {}, {}, {}, {}, {}>".format(
|
||||
time_total, time_removed, time_renamed, time_added, time_moved,
|
||||
time_updated, time_cleanup
|
||||
time_task_updates = time_8 - time_7
|
||||
time_total = time_8 - time_1
|
||||
self.log.debug((
|
||||
"Process time: {:.2f} <{:.2f}, {:.2f}, {:.2f}, "
|
||||
"{:.2f}, {:.2f}, {:.2f}, {:.2f}>"
|
||||
).format(
|
||||
time_total, time_removed, time_renamed, time_added,
|
||||
time_moved, time_updated, time_cleanup, time_task_updates
|
||||
))
|
||||
|
||||
except Exception:
|
||||
|
|
@ -714,6 +757,9 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
return True
|
||||
|
||||
def process_removed(self):
|
||||
"""
|
||||
Handles removed entities (not removed tasks - handle separately).
|
||||
"""
|
||||
if not self.ftrack_removed:
|
||||
return
|
||||
ent_infos = self.ftrack_removed
|
||||
|
|
@ -725,29 +771,11 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
removed_names = []
|
||||
for ftrack_id, removed in ent_infos.items():
|
||||
entity_type = removed["entity_type"]
|
||||
parent_id = removed["parentId"]
|
||||
removed_name = removed["changes"]["name"]["old"]
|
||||
if entity_type == "Task":
|
||||
avalon_ent = self.avalon_ents_by_ftrack_id.get(parent_id)
|
||||
if not avalon_ent:
|
||||
self.log.debug((
|
||||
"Parent entity of task was not found in avalon <{}>"
|
||||
).format(self.get_ent_path(parent_id)))
|
||||
continue
|
||||
|
||||
mongo_id = avalon_ent["_id"]
|
||||
if mongo_id not in self.task_changes_by_avalon_id:
|
||||
self.task_changes_by_avalon_id[mongo_id] = (
|
||||
avalon_ent["data"]["tasks"]
|
||||
)
|
||||
|
||||
if removed_name in self.task_changes_by_avalon_id[mongo_id]:
|
||||
self.task_changes_by_avalon_id[mongo_id].remove(
|
||||
removed_name
|
||||
)
|
||||
|
||||
if entity_type.lower() == "task":
|
||||
continue
|
||||
|
||||
removed_name = removed["changes"]["name"]["old"]
|
||||
|
||||
avalon_ent = self.avalon_ents_by_ftrack_id.get(ftrack_id)
|
||||
if not avalon_ent:
|
||||
continue
|
||||
|
|
@ -1067,12 +1095,8 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
)
|
||||
)
|
||||
|
||||
# Tasks
|
||||
tasks = []
|
||||
for child in ftrack_ent["children"]:
|
||||
if child.entity_type.lower() != "task":
|
||||
continue
|
||||
tasks.append(child["name"])
|
||||
# Add entity to modified so tasks are added at the end
|
||||
self.modified_tasks_ftrackids.add(ftrack_ent["id"])
|
||||
|
||||
# Visual Parent
|
||||
vis_par = None
|
||||
|
|
@ -1092,7 +1116,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
"entityType": ftrack_ent.entity_type,
|
||||
"parents": parents,
|
||||
"hierarchy": hierarchy,
|
||||
"tasks": tasks,
|
||||
"tasks": {},
|
||||
"visualParent": vis_par
|
||||
}
|
||||
}
|
||||
|
|
@ -1267,21 +1291,14 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
"Processing renamed entities: {}".format(str(ent_infos))
|
||||
)
|
||||
|
||||
renamed_tasks = {}
|
||||
not_found = {}
|
||||
changeable_queue = queue.Queue()
|
||||
for ftrack_id, ent_info in ent_infos.items():
|
||||
entity_type = ent_info["entity_type"]
|
||||
if entity_type == "Task":
|
||||
continue
|
||||
|
||||
new_name = ent_info["changes"]["name"]["new"]
|
||||
old_name = ent_info["changes"]["name"]["old"]
|
||||
if entity_type == "Task":
|
||||
parent_id = ent_info["parentId"]
|
||||
renamed_tasks[parent_id] = {
|
||||
"new": new_name,
|
||||
"old": old_name,
|
||||
"ent_info": ent_info
|
||||
}
|
||||
continue
|
||||
|
||||
ent_path = self.get_ent_path(ftrack_id)
|
||||
avalon_ent = self.avalon_ents_by_ftrack_id.get(ftrack_id)
|
||||
|
|
@ -1400,60 +1417,6 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
if old_names:
|
||||
self.check_names_synchronizable(old_names)
|
||||
|
||||
for parent_id, task_change in renamed_tasks.items():
|
||||
avalon_ent = self.avalon_ents_by_ftrack_id.get(parent_id)
|
||||
ent_info = task_change["ent_info"]
|
||||
if not avalon_ent:
|
||||
not_found[ent_info["entityId"]] = ent_info
|
||||
continue
|
||||
|
||||
new_name = task_change["new"]
|
||||
old_name = task_change["old"]
|
||||
passed_regex = avalon_sync.check_regex(
|
||||
new_name, "task", schema_patterns=self.regex_schemas
|
||||
)
|
||||
if not passed_regex:
|
||||
ftrack_id = ent_info["enityId"]
|
||||
self.regex_failed.append(ftrack_id)
|
||||
continue
|
||||
|
||||
mongo_id = avalon_ent["_id"]
|
||||
if mongo_id not in self.task_changes_by_avalon_id:
|
||||
self.task_changes_by_avalon_id[mongo_id] = (
|
||||
avalon_ent["data"]["tasks"]
|
||||
)
|
||||
|
||||
if old_name in self.task_changes_by_avalon_id[mongo_id]:
|
||||
self.task_changes_by_avalon_id[mongo_id].remove(old_name)
|
||||
else:
|
||||
parent_ftrack_ent = self.ftrack_ents_by_id.get(parent_id)
|
||||
if not parent_ftrack_ent:
|
||||
parent_ftrack_ent = self.process_session.query(
|
||||
self.entities_query_by_id.format(
|
||||
self.cur_project["id"], parent_id
|
||||
)
|
||||
).first()
|
||||
|
||||
if parent_ftrack_ent:
|
||||
self.ftrack_ents_by_id[parent_id] = parent_ftrack_ent
|
||||
child_names = []
|
||||
for child in parent_ftrack_ent["children"]:
|
||||
if child.entity_type.lower() != "task":
|
||||
continue
|
||||
child_names.append(child["name"])
|
||||
|
||||
tasks = [task for task in (
|
||||
self.task_changes_by_avalon_id[mongo_id]
|
||||
)]
|
||||
for task in tasks:
|
||||
if task not in child_names:
|
||||
self.task_changes_by_avalon_id[mongo_id].remove(
|
||||
task
|
||||
)
|
||||
|
||||
if new_name not in self.task_changes_by_avalon_id[mongo_id]:
|
||||
self.task_changes_by_avalon_id[mongo_id].append(new_name)
|
||||
|
||||
# not_found are not processed since all not found are
|
||||
# not found because they are not synchronizable
|
||||
|
||||
|
|
@ -1471,7 +1434,6 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
# Skip if already exit in avalon db or tasks entities
|
||||
# - happen when was created by any sync event/action
|
||||
pop_out_ents = []
|
||||
new_tasks_by_parent = collections.defaultdict(list)
|
||||
for ftrack_id, ent_info in ent_infos.items():
|
||||
if self.avalon_ents_by_ftrack_id.get(ftrack_id):
|
||||
pop_out_ents.append(ftrack_id)
|
||||
|
|
@ -1484,9 +1446,6 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
|
||||
entity_type = ent_info["entity_type"]
|
||||
if entity_type == "Task":
|
||||
parent_id = ent_info["parentId"]
|
||||
new_tasks_by_parent[parent_id].append(ent_info)
|
||||
pop_out_ents.append(ftrack_id)
|
||||
continue
|
||||
|
||||
name = (
|
||||
|
|
@ -1663,82 +1622,11 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
|
||||
self.create_entity_in_avalon(entity, parent_avalon)
|
||||
|
||||
for parent_id, ent_infos in new_tasks_by_parent.items():
|
||||
avalon_ent = self.avalon_ents_by_ftrack_id.get(parent_id)
|
||||
if not avalon_ent:
|
||||
# TODO logging
|
||||
self.log.debug((
|
||||
"Skipping synchronization of task"
|
||||
" because parent was not found in Avalon DB <{}>"
|
||||
).format(self.get_ent_path(parent_id)))
|
||||
continue
|
||||
|
||||
mongo_id = avalon_ent["_id"]
|
||||
if mongo_id not in self.task_changes_by_avalon_id:
|
||||
self.task_changes_by_avalon_id[mongo_id] = (
|
||||
avalon_ent["data"]["tasks"]
|
||||
)
|
||||
|
||||
for ent_info in ent_infos:
|
||||
new_name = ent_info["changes"]["name"]["new"]
|
||||
passed_regex = avalon_sync.check_regex(
|
||||
new_name, "task", schema_patterns=self.regex_schemas
|
||||
)
|
||||
if not passed_regex:
|
||||
self.regex_failed.append(ent_info["entityId"])
|
||||
continue
|
||||
|
||||
if new_name not in self.task_changes_by_avalon_id[mongo_id]:
|
||||
self.task_changes_by_avalon_id[mongo_id].append(new_name)
|
||||
|
||||
def _mongo_id_configuration(
|
||||
self,
|
||||
ent_info,
|
||||
cust_attrs,
|
||||
hier_attrs,
|
||||
temp_dict
|
||||
):
|
||||
# Use hierarchical mongo id attribute if possible.
|
||||
if "_hierarchical" not in temp_dict:
|
||||
hier_mongo_id_configuration_id = None
|
||||
for attr in hier_attrs:
|
||||
if attr["key"] == CUST_ATTR_ID_KEY:
|
||||
hier_mongo_id_configuration_id = attr["id"]
|
||||
break
|
||||
temp_dict["_hierarchical"] = hier_mongo_id_configuration_id
|
||||
|
||||
hier_mongo_id_configuration_id = temp_dict.get("_hierarchical")
|
||||
if hier_mongo_id_configuration_id is not None:
|
||||
return hier_mongo_id_configuration_id
|
||||
|
||||
# Legacy part for cases that MongoID attribute is per entity type.
|
||||
entity_type = ent_info["entity_type"]
|
||||
mongo_id_configuration_id = temp_dict.get(entity_type)
|
||||
if mongo_id_configuration_id is not None:
|
||||
return mongo_id_configuration_id
|
||||
|
||||
for attr in cust_attrs:
|
||||
key = attr["key"]
|
||||
if key != CUST_ATTR_ID_KEY:
|
||||
continue
|
||||
|
||||
if attr["entity_type"] != ent_info["entityType"]:
|
||||
continue
|
||||
|
||||
if (
|
||||
ent_info["entityType"] == "task" and
|
||||
attr["object_type_id"] != ent_info["objectTypeId"]
|
||||
):
|
||||
continue
|
||||
|
||||
mongo_id_configuration_id = attr["id"]
|
||||
break
|
||||
|
||||
temp_dict[entity_type] = mongo_id_configuration_id
|
||||
|
||||
return mongo_id_configuration_id
|
||||
|
||||
def process_moved(self):
|
||||
"""
|
||||
Handles moved entities to different place in hiearchy.
|
||||
(Not tasks - handled separately.)
|
||||
"""
|
||||
if not self.ftrack_moved:
|
||||
return
|
||||
|
||||
|
|
@ -1872,7 +1760,9 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
)
|
||||
|
||||
def process_updated(self):
|
||||
# Only custom attributes changes should get here
|
||||
"""
|
||||
Only custom attributes changes should get here
|
||||
"""
|
||||
if not self.ftrack_updated:
|
||||
return
|
||||
|
||||
|
|
@ -1970,8 +1860,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
if (
|
||||
not self.moved_in_avalon and
|
||||
not self.renamed_in_avalon and
|
||||
not self.hier_cust_attrs_changes and
|
||||
not self.task_changes_by_avalon_id
|
||||
not self.hier_cust_attrs_changes
|
||||
):
|
||||
return
|
||||
|
||||
|
|
@ -2000,14 +1889,6 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
if not all_keys and key not in hier_cust_attrs_keys:
|
||||
hier_cust_attrs_keys.append(key)
|
||||
|
||||
# Tasks preparation ****
|
||||
for mongo_id, tasks in self.task_changes_by_avalon_id.items():
|
||||
avalon_ent = self.avalon_ents_by_id[mongo_id]
|
||||
if "data" not in self.updates[mongo_id]:
|
||||
self.updates[mongo_id]["data"] = {}
|
||||
|
||||
self.updates[mongo_id]["data"]["tasks"] = tasks
|
||||
|
||||
# Parents preparation ***
|
||||
mongo_to_ftrack_parents = {}
|
||||
missing_ftrack_ents = {}
|
||||
|
|
@ -2289,11 +2170,96 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
|
||||
self.update_entities()
|
||||
|
||||
def process_task_updates(self):
|
||||
"""
|
||||
Pull task information for selected ftrack ids to replace stored
|
||||
existing in Avalon.
|
||||
Solves problem of changing type (even Status in the future) of
|
||||
task without storing ftrack id for task in the DB. (Which doesn't
|
||||
bring much advantage currently and it could be troublesome for
|
||||
all hosts or plugins (for example Nuke) to collect and store.
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
self.log.debug(
|
||||
"Processing task changes for parents: {}".format(
|
||||
self.modified_tasks_ftrackids
|
||||
)
|
||||
)
|
||||
if not self.modified_tasks_ftrackids:
|
||||
return
|
||||
|
||||
joined_ids = ", ".join([
|
||||
"\"{}\"".format(ftrack_id)
|
||||
for ftrack_id in self.modified_tasks_ftrackids
|
||||
])
|
||||
task_entities = self.process_session.query(
|
||||
self.task_entities_query_by_parent_id.format(
|
||||
self.cur_project["id"], joined_ids
|
||||
)
|
||||
).all()
|
||||
|
||||
ftrack_mongo_mapping_found = {}
|
||||
not_found_ids = []
|
||||
# Make sure all parents have updated tasks, as they may not have any
|
||||
tasks_per_ftrack_id = {
|
||||
ftrack_id: {}
|
||||
for ftrack_id in self.modified_tasks_ftrackids
|
||||
}
|
||||
|
||||
# Query all task types at once
|
||||
task_types = self.process_session.query(self.task_types_query).all()
|
||||
task_types_by_id = {
|
||||
task_type["id"]: task_type
|
||||
for task_type in task_types
|
||||
}
|
||||
|
||||
# prepare all tasks per parentId, eg. Avalon asset record
|
||||
for task_entity in task_entities:
|
||||
task_type = task_types_by_id[task_entity["type_id"]]
|
||||
ftrack_id = task_entity["parent_id"]
|
||||
if ftrack_id not in tasks_per_ftrack_id:
|
||||
tasks_per_ftrack_id[ftrack_id] = {}
|
||||
|
||||
passed_regex = avalon_sync.check_regex(
|
||||
task_entity["name"], "task",
|
||||
schema_patterns=self.regex_schemas
|
||||
)
|
||||
if not passed_regex:
|
||||
self.regex_failed.append(task_entity["id"])
|
||||
continue
|
||||
|
||||
tasks_per_ftrack_id[ftrack_id][task_entity["name"]] = {
|
||||
"type": task_type["name"]
|
||||
}
|
||||
|
||||
# find avalon entity by parentId
|
||||
# should be there as create was run first
|
||||
for ftrack_id in tasks_per_ftrack_id.keys():
|
||||
avalon_entity = self.avalon_ents_by_ftrack_id.get(ftrack_id)
|
||||
if not avalon_entity:
|
||||
not_found_ids.append(ftrack_id)
|
||||
continue
|
||||
ftrack_mongo_mapping_found[ftrack_id] = avalon_entity["_id"]
|
||||
|
||||
self._update_avalon_tasks(
|
||||
ftrack_mongo_mapping_found,
|
||||
tasks_per_ftrack_id
|
||||
)
|
||||
|
||||
def update_entities(self):
|
||||
"""
|
||||
Update Avalon entities by mongo bulk changes.
|
||||
Expects self.updates which are transfered to $set part of update
|
||||
command.
|
||||
Resets self.updates afterwards.
|
||||
"""
|
||||
mongo_changes_bulk = []
|
||||
for mongo_id, changes in self.updates.items():
|
||||
filter = {"_id": mongo_id}
|
||||
change_data = avalon_sync.from_dict_to_set(changes)
|
||||
avalon_ent = self.avalon_ents_by_id[mongo_id]
|
||||
is_project = avalon_ent["type"] == "project"
|
||||
change_data = avalon_sync.from_dict_to_set(changes, is_project)
|
||||
mongo_changes_bulk.append(UpdateOne(filter, change_data))
|
||||
|
||||
if not mongo_changes_bulk:
|
||||
|
|
@ -2477,6 +2443,77 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
)
|
||||
return True
|
||||
|
||||
def _update_avalon_tasks(
|
||||
self, ftrack_mongo_mapping_found, tasks_per_ftrack_id
|
||||
):
|
||||
"""
|
||||
Prepare new "tasks" content for existing records in Avalon.
|
||||
Args:
|
||||
ftrack_mongo_mapping_found (dictionary): ftrack parentId to
|
||||
Avalon _id mapping
|
||||
tasks_per_ftrack_id (dictionary): task dictionaries per ftrack
|
||||
parentId
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
mongo_changes_bulk = []
|
||||
for ftrack_id, mongo_id in ftrack_mongo_mapping_found.items():
|
||||
filter = {"_id": mongo_id}
|
||||
change_data = {"$set": {}}
|
||||
change_data["$set"]["data.tasks"] = tasks_per_ftrack_id[ftrack_id]
|
||||
mongo_changes_bulk.append(UpdateOne(filter, change_data))
|
||||
|
||||
if mongo_changes_bulk:
|
||||
self.dbcon.bulk_write(mongo_changes_bulk)
|
||||
|
||||
def _mongo_id_configuration(
|
||||
self,
|
||||
ent_info,
|
||||
cust_attrs,
|
||||
hier_attrs,
|
||||
temp_dict
|
||||
):
|
||||
# Use hierarchical mongo id attribute if possible.
|
||||
if "_hierarchical" not in temp_dict:
|
||||
hier_mongo_id_configuration_id = None
|
||||
for attr in hier_attrs:
|
||||
if attr["key"] == CUST_ATTR_ID_KEY:
|
||||
hier_mongo_id_configuration_id = attr["id"]
|
||||
break
|
||||
temp_dict["_hierarchical"] = hier_mongo_id_configuration_id
|
||||
|
||||
hier_mongo_id_configuration_id = temp_dict.get("_hierarchical")
|
||||
if hier_mongo_id_configuration_id is not None:
|
||||
return hier_mongo_id_configuration_id
|
||||
|
||||
# Legacy part for cases that MongoID attribute is per entity type.
|
||||
entity_type = ent_info["entity_type"]
|
||||
mongo_id_configuration_id = temp_dict.get(entity_type)
|
||||
if mongo_id_configuration_id is not None:
|
||||
return mongo_id_configuration_id
|
||||
|
||||
for attr in cust_attrs:
|
||||
key = attr["key"]
|
||||
if key != CUST_ATTR_ID_KEY:
|
||||
continue
|
||||
|
||||
if attr["entity_type"] != ent_info["entityType"]:
|
||||
continue
|
||||
|
||||
if (
|
||||
ent_info["entityType"] == "task" and
|
||||
attr["object_type_id"] != ent_info["objectTypeId"]
|
||||
):
|
||||
continue
|
||||
|
||||
mongo_id_configuration_id = attr["id"]
|
||||
break
|
||||
|
||||
temp_dict[entity_type] = mongo_id_configuration_id
|
||||
|
||||
return mongo_id_configuration_id
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from . import avalon_sync
|
|||
from . import credentials
|
||||
from .ftrack_base_handler import BaseHandler
|
||||
from .ftrack_event_handler import BaseEvent
|
||||
from .ftrack_action_handler import BaseAction, statics_icon
|
||||
from .ftrack_action_handler import BaseAction, ServerAction, statics_icon
|
||||
from .ftrack_app_handler import AppAction
|
||||
|
||||
__all__ = (
|
||||
|
|
@ -11,6 +11,7 @@ __all__ = (
|
|||
"BaseHandler",
|
||||
"BaseEvent",
|
||||
"BaseAction",
|
||||
"ServerAction",
|
||||
"statics_icon",
|
||||
"AppAction"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -24,9 +24,9 @@ log = Logger().get_logger(__name__)
|
|||
|
||||
# Current schemas for avalon types
|
||||
EntitySchemas = {
|
||||
"project": "avalon-core:project-2.1",
|
||||
"asset": "avalon-core:asset-3.0",
|
||||
"config": "avalon-core:config-1.1"
|
||||
"project": "pype:project-2.1",
|
||||
"asset": "pype:asset-3.0",
|
||||
"config": "pype:config-1.1"
|
||||
}
|
||||
|
||||
# Group name of custom attributes
|
||||
|
|
@ -103,15 +103,40 @@ def get_pype_attr(session, split_hierarchical=True):
|
|||
return custom_attributes
|
||||
|
||||
|
||||
def from_dict_to_set(data):
|
||||
def from_dict_to_set(data, is_project):
|
||||
"""
|
||||
Converts 'data' into $set part of MongoDB update command.
|
||||
Sets new or modified keys.
|
||||
Tasks are updated completely, not per task. (Eg. change in any of the
|
||||
tasks results in full update of "tasks" from Ftrack.
|
||||
Args:
|
||||
data: (dictionary) - up-to-date data from Ftrack
|
||||
data (dictionary): up-to-date data from Ftrack
|
||||
is_project (boolean): true for project
|
||||
|
||||
Returns:
|
||||
(dictionary) - { "$set" : "{..}"}
|
||||
"""
|
||||
not_set = object()
|
||||
task_changes = not_set
|
||||
if (
|
||||
is_project
|
||||
and "config" in data
|
||||
and "tasks" in data["config"]
|
||||
):
|
||||
task_changes = data["config"].pop("tasks")
|
||||
task_changes_key = "config.tasks"
|
||||
if not data["config"]:
|
||||
data.pop("config")
|
||||
elif (
|
||||
not is_project
|
||||
and "data" in data
|
||||
and "tasks" in data["data"]
|
||||
):
|
||||
task_changes = data["data"].pop("tasks")
|
||||
task_changes_key = "data.tasks"
|
||||
if not data["data"]:
|
||||
data.pop("data")
|
||||
|
||||
result = {"$set": {}}
|
||||
dict_queue = queue.Queue()
|
||||
dict_queue.put((None, data))
|
||||
|
|
@ -128,6 +153,9 @@ def from_dict_to_set(data):
|
|||
result["$set"][new_key] = value
|
||||
continue
|
||||
dict_queue.put((new_key, value))
|
||||
|
||||
if task_changes is not not_set and task_changes_key:
|
||||
result["$set"][task_changes_key] = task_changes
|
||||
return result
|
||||
|
||||
|
||||
|
|
@ -659,7 +687,7 @@ class SyncEntitiesFactory:
|
|||
# Tasks must be checked too
|
||||
for task in entity_dict["tasks"].items():
|
||||
task_name, task = task
|
||||
passed = task_name
|
||||
passed = task_names.get(task_name)
|
||||
if passed is None:
|
||||
passed = check_regex(
|
||||
task_name, "task", schema_patterns=_schema_patterns
|
||||
|
|
@ -731,7 +759,7 @@ class SyncEntitiesFactory:
|
|||
for id in ids:
|
||||
if id not in self.entities_dict:
|
||||
continue
|
||||
self.entities_dict[id]["tasks"].remove(name)
|
||||
self.entities_dict[id]["tasks"].pop(name)
|
||||
ent_path = self.get_ent_path(id)
|
||||
self.log.warning(failed_regex_msg.format(
|
||||
"/".join([ent_path, name])
|
||||
|
|
@ -1680,6 +1708,18 @@ class SyncEntitiesFactory:
|
|||
self.updates[avalon_id]
|
||||
)
|
||||
|
||||
# double check changes in tasks, some task could be renamed or
|
||||
# deleted in Ftrack - not captured otherwise
|
||||
final_entity = self.entities_dict[ftrack_id]["final_entity"]
|
||||
if final_entity["data"].get("tasks", {}) != \
|
||||
avalon_entity["data"].get("tasks", {}):
|
||||
if "data" not in self.updates[avalon_id]:
|
||||
self.updates[avalon_id]["data"] = {}
|
||||
|
||||
self.updates[avalon_id]["data"]["tasks"] = (
|
||||
final_entity["data"]["tasks"]
|
||||
)
|
||||
|
||||
def synchronize(self):
|
||||
self.log.debug("* Synchronization begins")
|
||||
avalon_project_id = self.ftrack_avalon_mapper.get(self.ft_project_id)
|
||||
|
|
@ -2027,15 +2067,20 @@ class SyncEntitiesFactory:
|
|||
self._changeability_by_mongo_id[mongo_id] = is_changeable
|
||||
|
||||
def update_entities(self):
|
||||
"""
|
||||
Runs changes converted to "$set" queries in bulk.
|
||||
"""
|
||||
mongo_changes_bulk = []
|
||||
for mongo_id, changes in self.updates.items():
|
||||
filter = {"_id": ObjectId(mongo_id)}
|
||||
change_data = from_dict_to_set(changes)
|
||||
mongo_id = ObjectId(mongo_id)
|
||||
is_project = mongo_id == self.avalon_project_id
|
||||
change_data = from_dict_to_set(changes, is_project)
|
||||
|
||||
filter = {"_id": mongo_id}
|
||||
mongo_changes_bulk.append(UpdateOne(filter, change_data))
|
||||
if not mongo_changes_bulk:
|
||||
# TODO LOG
|
||||
return
|
||||
log.debug("mongo_changes_bulk:: {}".format(mongo_changes_bulk))
|
||||
self.dbcon.bulk_write(mongo_changes_bulk)
|
||||
|
||||
def reload_parents(self, hierarchy_changing_ids):
|
||||
|
|
@ -2107,6 +2152,18 @@ class SyncEntitiesFactory:
|
|||
)
|
||||
|
||||
def compare_dict(self, dict_new, dict_old, _ignore_keys=[]):
|
||||
"""
|
||||
Recursively compares and list changes between dictionaries
|
||||
'dict_new' and 'dict_old'.
|
||||
Keys in '_ignore_keys' are skipped and not compared.
|
||||
Args:
|
||||
dict_new (dictionary):
|
||||
dict_old (dictionary):
|
||||
_ignore_keys (list):
|
||||
|
||||
Returns:
|
||||
(dictionary) of new or updated keys and theirs values
|
||||
"""
|
||||
# _ignore_keys may be used for keys nested dict like"data.visualParent"
|
||||
changes = {}
|
||||
ignore_keys = []
|
||||
|
|
@ -2148,6 +2205,18 @@ class SyncEntitiesFactory:
|
|||
return changes
|
||||
|
||||
def merge_dicts(self, dict_new, dict_old):
|
||||
"""
|
||||
Apply all new or updated keys from 'dict_new' on 'dict_old'.
|
||||
Recursively.
|
||||
Doesn't recognise that 'dict_new' doesn't contain some keys
|
||||
anymore.
|
||||
Args:
|
||||
dict_new (dictionary): from Ftrack most likely
|
||||
dict_old (dictionary): current in DB
|
||||
|
||||
Returns:
|
||||
(dictionary) of applied changes to original dictionary
|
||||
"""
|
||||
for key, value in dict_new.items():
|
||||
if key not in dict_old:
|
||||
dict_old[key] = value
|
||||
|
|
|
|||
|
|
@ -195,3 +195,82 @@ class BaseAction(BaseHandler):
|
|||
).format(str(type(result))))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class ServerAction(BaseAction):
|
||||
"""Action class meant to be used on event server.
|
||||
|
||||
Unlike the `BaseAction` roles are not checked on register but on discover.
|
||||
For the same reason register is modified to not filter topics by username.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if not self.role_list:
|
||||
self.role_list = set()
|
||||
else:
|
||||
self.role_list = set(
|
||||
role_name.lower()
|
||||
for role_name in self.role_list
|
||||
)
|
||||
super(ServerAction, self).__init__(*args, **kwargs)
|
||||
|
||||
def _register_role_check(self):
|
||||
# Skip register role check.
|
||||
return
|
||||
|
||||
def _discover(self, event):
|
||||
"""Check user discover availability."""
|
||||
if not self._check_user_discover(event):
|
||||
return
|
||||
return super(ServerAction, self)._discover(event)
|
||||
|
||||
def _check_user_discover(self, event):
|
||||
"""Should be action discovered by user trying to show actions."""
|
||||
if not self.role_list:
|
||||
return True
|
||||
|
||||
user_entity = self._get_user_entity(event)
|
||||
if not user_entity:
|
||||
return False
|
||||
|
||||
for role in user_entity["user_security_roles"]:
|
||||
lowered_role = role["security_role"]["name"].lower()
|
||||
if lowered_role in self.role_list:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _get_user_entity(self, event):
|
||||
"""Query user entity from event."""
|
||||
not_set = object()
|
||||
|
||||
# Check if user is already stored in event data
|
||||
user_entity = event["data"].get("user_entity", not_set)
|
||||
if user_entity is not_set:
|
||||
# Query user entity from event
|
||||
user_info = event.get("source", {}).get("user", {})
|
||||
user_id = user_info.get("id")
|
||||
username = user_info.get("username")
|
||||
if user_id:
|
||||
user_entity = self.session.query(
|
||||
"User where id is {}".format(user_id)
|
||||
).first()
|
||||
if not user_entity and username:
|
||||
user_entity = self.session.query(
|
||||
"User where username is {}".format(username)
|
||||
).first()
|
||||
event["data"]["user_entity"] = user_entity
|
||||
|
||||
return user_entity
|
||||
|
||||
def register(self):
|
||||
"""Register subcription to Ftrack event hub."""
|
||||
self.session.event_hub.subscribe(
|
||||
"topic=ftrack.action.discover",
|
||||
self._discover,
|
||||
priority=self.priority
|
||||
)
|
||||
|
||||
launch_subscription = (
|
||||
"topic=ftrack.action.launch and data.actionIdentifier={0}"
|
||||
).format(self.identifier)
|
||||
self.session.event_hub.subscribe(launch_subscription, self._launch)
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ class BaseHandler(object):
|
|||
type = 'No-type'
|
||||
ignore_me = False
|
||||
preactions = []
|
||||
role_list = []
|
||||
|
||||
def __init__(self, session, plugins_presets=None):
|
||||
'''Expects a ftrack_api.Session instance'''
|
||||
|
|
@ -148,20 +149,27 @@ class BaseHandler(object):
|
|||
def reset_session(self):
|
||||
self.session.reset()
|
||||
|
||||
def _register_role_check(self):
|
||||
if not self.role_list or not isinstance(self.role_list, (list, tuple)):
|
||||
return
|
||||
|
||||
user_entity = self.session.query(
|
||||
"User where username is \"{}\"".format(self.session.api_user)
|
||||
).one()
|
||||
available = False
|
||||
lowercase_rolelist = [
|
||||
role_name.lower()
|
||||
for role_name in self.role_list
|
||||
]
|
||||
for role in user_entity["user_security_roles"]:
|
||||
if role["security_role"]["name"].lower() in lowercase_rolelist:
|
||||
available = True
|
||||
break
|
||||
if available is False:
|
||||
raise MissingPermision
|
||||
|
||||
def _preregister(self):
|
||||
if hasattr(self, "role_list") and len(self.role_list) > 0:
|
||||
username = self.session.api_user
|
||||
user = self.session.query(
|
||||
'User where username is "{}"'.format(username)
|
||||
).one()
|
||||
available = False
|
||||
lowercase_rolelist = [x.lower() for x in self.role_list]
|
||||
for role in user['user_security_roles']:
|
||||
if role['security_role']['name'].lower() in lowercase_rolelist:
|
||||
available = True
|
||||
break
|
||||
if available is False:
|
||||
raise MissingPermision
|
||||
self._register_role_check()
|
||||
|
||||
# Custom validations
|
||||
result = self.preregister()
|
||||
|
|
@ -172,12 +180,11 @@ class BaseHandler(object):
|
|||
).format(self.__class__.__name__))
|
||||
return
|
||||
|
||||
if result is True:
|
||||
return
|
||||
msg = None
|
||||
if isinstance(result, str):
|
||||
msg = result
|
||||
raise PreregisterException(msg)
|
||||
if result is not True:
|
||||
msg = None
|
||||
if isinstance(result, str):
|
||||
msg = result
|
||||
raise PreregisterException(msg)
|
||||
|
||||
def preregister(self):
|
||||
'''
|
||||
|
|
|
|||
|
|
@ -154,6 +154,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
# Create copy with ftrack.unmanaged location if thumb or prev
|
||||
if comp.get('thumbnail') or comp.get('preview') \
|
||||
or ("preview" in comp.get('tags', [])) \
|
||||
or ("review" in comp.get('tags', [])) \
|
||||
or ("thumbnail" in comp.get('tags', [])):
|
||||
unmanaged_loc = self.get_ftrack_location(
|
||||
'ftrack.unmanaged', ft_session
|
||||
|
|
|
|||
|
|
@ -142,11 +142,10 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
existing_tasks.append(child['name'].lower())
|
||||
# existing_tasks.append(child['type']['name'])
|
||||
|
||||
for task in tasks:
|
||||
task_name = next(iter(task))
|
||||
task_type = task[task_name]["type"]
|
||||
for task_name in tasks:
|
||||
task_type = tasks[task_name]["type"]
|
||||
if task_name.lower() in existing_tasks:
|
||||
print("Task {} already exists".format(task))
|
||||
print("Task {} already exists".format(task_name))
|
||||
continue
|
||||
tasks_to_create.append((task_name, task_type))
|
||||
|
||||
|
|
|
|||
|
|
@ -102,11 +102,10 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
new_tasks = data.pop("tasks", {})
|
||||
if "tasks" not in cur_entity_data and not new_tasks:
|
||||
continue
|
||||
for task in new_tasks:
|
||||
task_name = next(iter(task))
|
||||
for task_name in new_tasks:
|
||||
if task_name in cur_entity_data["tasks"].keys():
|
||||
continue
|
||||
cur_entity_data["tasks"][task_name] = task[task_name]
|
||||
cur_entity_data["tasks"][task_name] = new_tasks[task_name]
|
||||
cur_entity_data.update(data)
|
||||
data = cur_entity_data
|
||||
else:
|
||||
|
|
|
|||
53
pype/plugins/harmony/publish/collect_scene.py
Normal file
53
pype/plugins/harmony/publish/collect_scene.py
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect scene data."""
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
from avalon import harmony
|
||||
|
||||
|
||||
class CollectScene(pyblish.api.ContextPlugin):
|
||||
"""Collect basic scene information."""
|
||||
|
||||
label = "Scene Data"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["harmony"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
sig = harmony.signature()
|
||||
func = """function %s()
|
||||
{
|
||||
return [
|
||||
about.getApplicationPath(),
|
||||
scene.currentProjectPath(),
|
||||
scene.currentScene(),
|
||||
scene.getFrameRate(),
|
||||
scene.getStartFrame(),
|
||||
scene.getStopFrame(),
|
||||
sound.getSoundtrackAll().path(),
|
||||
scene.defaultResolutionX(),
|
||||
scene.defaultResolutionY()
|
||||
]
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
result = harmony.send(
|
||||
{"function": func, "args": []}
|
||||
)["result"]
|
||||
|
||||
context.data["applicationPath"] = result[0]
|
||||
context.data["scenePath"] = os.path.join(
|
||||
result[1], result[2] + ".xstage")
|
||||
context.data["frameRate"] = result[3]
|
||||
context.data["frameStart"] = result[4]
|
||||
context.data["frameEnd"] = result[5]
|
||||
context.data["audioPath"] = result[6]
|
||||
context.data["resolutionWidth"] = result[7]
|
||||
context.data["resolutionHeight"] = result[8]
|
||||
|
||||
all_nodes = harmony.send(
|
||||
{"function": "node.subNodes", "args": ["Top"]}
|
||||
)["result"]
|
||||
|
||||
context.data["allNodes"] = all_nodes
|
||||
|
|
@ -21,30 +21,17 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
# Collect scene data.
|
||||
sig = harmony.signature()
|
||||
func = """function %s(write_node)
|
||||
{
|
||||
return [
|
||||
about.getApplicationPath(),
|
||||
scene.currentProjectPath(),
|
||||
scene.currentScene(),
|
||||
scene.getFrameRate(),
|
||||
scene.getStartFrame(),
|
||||
scene.getStopFrame(),
|
||||
sound.getSoundtrackAll().path()
|
||||
]
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
result = harmony.send(
|
||||
{"function": func, "args": [instance[0]]}
|
||||
)["result"]
|
||||
application_path = result[0]
|
||||
scene_path = os.path.join(result[1], result[2] + ".xstage")
|
||||
frame_rate = result[3]
|
||||
frame_start = result[4]
|
||||
frame_end = result[5]
|
||||
audio_path = result[6]
|
||||
|
||||
application_path = instance.context.data.get("applicationPath")
|
||||
scene_path = instance.context.data.get("scenePath")
|
||||
frame_rate = instance.context.data.get("frameRate")
|
||||
frame_start = instance.context.data.get("frameStart")
|
||||
frame_end = instance.context.data.get("frameEnd")
|
||||
audio_path = instance.context.data.get("audioPath")
|
||||
|
||||
if audio_path and os.path.exists(audio_path):
|
||||
self.log.info(f"Using audio from {audio_path}")
|
||||
instance.data["audio"] = [{"filename": audio_path}]
|
||||
|
||||
instance.data["fps"] = frame_rate
|
||||
|
||||
|
|
@ -57,7 +44,7 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
result = harmony.send(
|
||||
harmony.send(
|
||||
{
|
||||
"function": func,
|
||||
"args": [instance[0], path + "/" + instance.data["name"]]
|
||||
|
|
@ -67,6 +54,7 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
|
||||
# Execute rendering. Ignoring error cause Harmony returns error code
|
||||
# always.
|
||||
self.log.info(f"running [ {application_path} -batch {scene_path}")
|
||||
proc = subprocess.Popen(
|
||||
[application_path, "-batch", scene_path],
|
||||
stdout=subprocess.PIPE,
|
||||
|
|
@ -74,12 +62,16 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
stdin=subprocess.PIPE
|
||||
)
|
||||
output, error = proc.communicate()
|
||||
self.log.info("Click on the line below to see more details.")
|
||||
self.log.info(output.decode("utf-8"))
|
||||
|
||||
# Collect rendered files.
|
||||
self.log.debug(path)
|
||||
self.log.debug(f"collecting from: {path}")
|
||||
files = os.listdir(path)
|
||||
self.log.debug(files)
|
||||
assert files, (
|
||||
"No rendered files found, render failed."
|
||||
)
|
||||
self.log.debug(f"files there: {files}")
|
||||
collections, remainder = clique.assemble(files, minimum_items=1)
|
||||
assert not remainder, (
|
||||
"There should not be a remainder for {0}: {1}".format(
|
||||
|
|
|
|||
|
|
@ -15,9 +15,9 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filepath = os.path.join(staging_dir, "{}.tpl".format(instance.name))
|
||||
filepath = os.path.join(staging_dir, f"{instance.name}.tpl")
|
||||
|
||||
self.log.info("Outputting template to {}".format(staging_dir))
|
||||
self.log.info(f"Outputting template to {staging_dir}")
|
||||
|
||||
dependencies = []
|
||||
self.get_dependencies(instance[0], dependencies)
|
||||
|
|
@ -30,9 +30,7 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
unique_backdrops = [backdrops[x] for x in set(backdrops.keys())]
|
||||
|
||||
# Get non-connected nodes within backdrops.
|
||||
all_nodes = harmony.send(
|
||||
{"function": "node.subNodes", "args": ["Top"]}
|
||||
)["result"]
|
||||
all_nodes = instance.context.data.get("allNodes")
|
||||
for node in [x for x in all_nodes if x not in dependencies]:
|
||||
within_unique_backdrops = bool(
|
||||
[x for x in self.get_backdrops(node) if x in unique_backdrops]
|
||||
|
|
@ -52,15 +50,15 @@ class ExtractTemplate(pype.api.Extractor):
|
|||
# Prep representation.
|
||||
os.chdir(staging_dir)
|
||||
shutil.make_archive(
|
||||
"{}".format(instance.name),
|
||||
f"{instance.name}",
|
||||
"zip",
|
||||
os.path.join(staging_dir, "{}.tpl".format(instance.name))
|
||||
os.path.join(staging_dir, f"{instance.name}.tpl")
|
||||
)
|
||||
|
||||
representation = {
|
||||
"name": "tpl",
|
||||
"ext": "zip",
|
||||
"files": "{}.zip".format(instance.name),
|
||||
"files": f"{instance.name}.zip",
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"] = [representation]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Extract work file."""
|
||||
import os
|
||||
import shutil
|
||||
from zipfile import ZipFile
|
||||
|
||||
import pype.api
|
||||
from avalon import harmony
|
||||
|
|
@ -14,13 +17,12 @@ class ExtractWorkfile(pype.api.Extractor):
|
|||
families = ["workfile"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
# Export template.
|
||||
backdrops = harmony.send(
|
||||
{"function": "Backdrop.backdrops", "args": ["Top"]}
|
||||
)["result"]
|
||||
nodes = harmony.send(
|
||||
{"function": "node.subNodes", "args": ["Top"]}
|
||||
)["result"]
|
||||
nodes = instance.context.data.get("allNodes")
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filepath = os.path.join(staging_dir, "{}.tpl".format(instance.name))
|
||||
|
||||
|
|
@ -29,15 +31,19 @@ class ExtractWorkfile(pype.api.Extractor):
|
|||
# Prep representation.
|
||||
os.chdir(staging_dir)
|
||||
shutil.make_archive(
|
||||
"{}".format(instance.name),
|
||||
f"{instance.name}",
|
||||
"zip",
|
||||
os.path.join(staging_dir, "{}.tpl".format(instance.name))
|
||||
os.path.join(staging_dir, f"{instance.name}.tpl")
|
||||
)
|
||||
# Check if archive is ok
|
||||
with ZipFile(os.path.basename(f"{instance.name}.zip")) as zr:
|
||||
if zr.testzip() is not None:
|
||||
raise Exception("File archive is corrupted.")
|
||||
|
||||
representation = {
|
||||
"name": "tpl",
|
||||
"ext": "zip",
|
||||
"files": "{}.zip".format(instance.name),
|
||||
"files": f"{instance.name}.zip",
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"] = [representation]
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ class ValidateAudio(pyblish.api.InstancePlugin):
|
|||
|
||||
If you are sure that you want to send render without audio, you can
|
||||
disable this validator before clicking on "publish"
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate scene settings."""
|
||||
import os
|
||||
import json
|
||||
|
||||
import pyblish.api
|
||||
|
|
@ -14,9 +17,17 @@ class ValidateSceneSettingsRepair(pyblish.api.Action):
|
|||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
"""Repair action entry point."""
|
||||
pype.hosts.harmony.set_scene_settings(
|
||||
pype.hosts.harmony.get_asset_settings()
|
||||
)
|
||||
if not os.patch.exists(context.data["scenePath"]):
|
||||
self.log.info("correcting scene name")
|
||||
scene_dir = os.path.dirname(context.data["currentFile"])
|
||||
scene_path = os.path.join(
|
||||
scene_dir, os.path.basename(scene_dir) + ".xstage"
|
||||
)
|
||||
harmony.save_scene_as(scene_path)
|
||||
|
||||
|
||||
class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
||||
|
|
@ -31,6 +42,7 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
frame_check_filter = ["_ch_", "_pr_", "_intd_", "_extd_"]
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
expected_settings = pype.hosts.harmony.get_asset_settings()
|
||||
self.log.info(expected_settings)
|
||||
|
||||
|
|
@ -46,20 +58,20 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
for string in self.frame_check_filter):
|
||||
expected_settings.pop("frameEnd")
|
||||
|
||||
sig = harmony.signature()
|
||||
func = """function %s()
|
||||
{
|
||||
return {
|
||||
"fps": scene.getFrameRate(),
|
||||
"frameStart": scene.getStartFrame(),
|
||||
"frameEnd": scene.getStopFrame(),
|
||||
"resolutionWidth": scene.defaultResolutionX(),
|
||||
"resolutionHeight": scene.defaultResolutionY()
|
||||
};
|
||||
# handle case where ftrack uses only two decimal places
|
||||
# 23.976023976023978 vs. 23.98
|
||||
fps = instance.context.data.get("frameRate")
|
||||
if isinstance(instance.context.data.get("frameRate"), float):
|
||||
fps = float(
|
||||
"{:.2f}".format(instance.context.data.get("frameRate")))
|
||||
|
||||
current_settings = {
|
||||
"fps": fps,
|
||||
"frameStart": instance.context.data.get("frameStart"),
|
||||
"frameEnd": instance.context.data.get("frameEnd"),
|
||||
"resolutionWidth": instance.context.data.get("resolutionWidth"),
|
||||
"resolutionHeight": instance.context.data.get("resolutionHeight"),
|
||||
}
|
||||
%s
|
||||
""" % (sig, sig)
|
||||
current_settings = harmony.send({"function": func})["result"]
|
||||
|
||||
invalid_settings = []
|
||||
for key, value in expected_settings.items():
|
||||
|
|
@ -74,3 +86,6 @@ class ValidateSceneSettings(pyblish.api.InstancePlugin):
|
|||
json.dumps(invalid_settings, sort_keys=True, indent=4)
|
||||
)
|
||||
assert not invalid_settings, msg
|
||||
assert os.path.exists(instance.context.data.get("scenePath")), (
|
||||
"Scene file not found (saved under wrong name)"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -144,8 +144,9 @@ class CollectClips(api.ContextPlugin):
|
|||
"family": "clip",
|
||||
"families": [],
|
||||
"handleStart": projectdata.get("handleStart", 0),
|
||||
"handleEnd": projectdata.get("handleEnd", 0)})
|
||||
|
||||
"handleEnd": projectdata.get("handleEnd", 0),
|
||||
"fps": context.data["fps"]
|
||||
})
|
||||
instance = context.create_instance(**data)
|
||||
|
||||
self.log.info("Created instance: {}".format(instance))
|
||||
|
|
|
|||
|
|
@ -4,13 +4,14 @@ from pyblish import api
|
|||
class CollectFramerate(api.ContextPlugin):
|
||||
"""Collect framerate from selected sequence."""
|
||||
|
||||
order = api.CollectorOrder + 0.01
|
||||
order = api.CollectorOrder + 0.001
|
||||
label = "Collect Framerate"
|
||||
hosts = ["hiero"]
|
||||
|
||||
def process(self, context):
|
||||
sequence = context.data["activeSequence"]
|
||||
context.data["fps"] = self.get_rate(sequence)
|
||||
self.log.info("Framerate is collected: {}".format(context.data["fps"]))
|
||||
|
||||
def get_rate(self, sequence):
|
||||
num, den = sequence.framerate().toRational()
|
||||
|
|
|
|||
|
|
@ -192,11 +192,12 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
instance.data["representations"].append(
|
||||
plates_mov_representation)
|
||||
|
||||
thumb_frame = instance.data["clipInH"] + (
|
||||
(instance.data["clipOutH"] - instance.data["clipInH"]) / 2)
|
||||
thumb_frame = instance.data["sourceInH"] + (
|
||||
(instance.data["sourceOutH"] - instance.data["sourceInH"]) / 2)
|
||||
thumb_file = "{}_{}{}".format(head, thumb_frame, ".png")
|
||||
thumb_path = os.path.join(staging_dir, thumb_file)
|
||||
|
||||
self.log.debug("__ thumb_path: `{}`, frame: `{}`".format(
|
||||
thumb_path, thumb_frame))
|
||||
thumbnail = item.thumbnail(thumb_frame).save(
|
||||
thumb_path,
|
||||
format='png'
|
||||
|
|
|
|||
|
|
@ -142,8 +142,15 @@ class CollectReviews(api.InstancePlugin):
|
|||
staging_dir = os.path.dirname(
|
||||
source_path)
|
||||
|
||||
thumb_frame = instance.data["clipInH"] + (
|
||||
(instance.data["clipOutH"] - instance.data["clipInH"]) / 2)
|
||||
media_duration = instance.data.get("mediaDuration")
|
||||
clip_duration_h = instance.data.get("clipDurationH")
|
||||
|
||||
if media_duration > clip_duration_h:
|
||||
thumb_frame = instance.data["clipInH"] + (
|
||||
(instance.data["clipOutH"] - instance.data["clipInH"]) / 2)
|
||||
elif media_duration <= clip_duration_h:
|
||||
thumb_frame = instance.data["sourceIn"] + (
|
||||
(instance.data["sourceOut"] - instance.data["sourceIn"]) / 2)
|
||||
thumb_file = "{}_{}{}".format(head, thumb_frame, ".png")
|
||||
thumb_path = os.path.join(staging_dir, thumb_file)
|
||||
self.log.debug("__ thumb_path: {}".format(thumb_path))
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class CollectShots(api.InstancePlugin):
|
|||
"{} - {} - tasks:{} - assetbuilds:{} - comments:{}".format(
|
||||
data["asset"],
|
||||
data["subset"],
|
||||
[task.keys()[0] for task in data["tasks"]],
|
||||
[task for task in data["tasks"]],
|
||||
[x["name"] for x in data.get("assetbuilds", [])],
|
||||
len(data.get("comments", []))
|
||||
)
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class CollectClipTagTasks(api.InstancePlugin):
|
|||
# gets tags
|
||||
tags = instance.data["tags"]
|
||||
|
||||
tasks = list()
|
||||
tasks = dict()
|
||||
for t in tags:
|
||||
t_metadata = dict(t["metadata"])
|
||||
t_family = t_metadata.get("tag.family", "")
|
||||
|
|
@ -22,7 +22,7 @@ class CollectClipTagTasks(api.InstancePlugin):
|
|||
if "task" in t_family:
|
||||
t_task_name = t_metadata.get("tag.label", "")
|
||||
t_task_type = t_metadata.get("tag.type", "")
|
||||
tasks.append({t_task_name: {"type": t_task_type}})
|
||||
tasks[t_task_name] = {"type": t_task_type}
|
||||
|
||||
instance.data["tasks"] = tasks
|
||||
|
||||
|
|
|
|||
|
|
@ -73,19 +73,24 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
# Add all nodes in group instances.
|
||||
if node.Class() == "Group":
|
||||
# only alter families for render family
|
||||
if "write" == families_ak:
|
||||
if node["render"].value():
|
||||
self.log.info("flagged for render")
|
||||
add_family = "{}.local".format("render")
|
||||
# dealing with local/farm rendering
|
||||
if node["render_farm"].value():
|
||||
self.log.info("adding render farm family")
|
||||
add_family = "{}.farm".format("render")
|
||||
instance.data["transfer"] = False
|
||||
families.append(add_family)
|
||||
if "render" in families:
|
||||
families.remove("render")
|
||||
family = "write"
|
||||
if "write" in families_ak:
|
||||
target = node["render"].value()
|
||||
if target == "Use existing frames":
|
||||
# Local rendering
|
||||
self.log.info("flagged for no render")
|
||||
families.append("render")
|
||||
elif target == "Local":
|
||||
# Local rendering
|
||||
self.log.info("flagged for local render")
|
||||
families.append("{}.local".format("render"))
|
||||
elif target == "On farm":
|
||||
# Farm rendering
|
||||
self.log.info("flagged for farm render")
|
||||
instance.data["transfer"] = False
|
||||
families.append("{}.farm".format("render"))
|
||||
if "render" in families:
|
||||
families.remove("render")
|
||||
family = "write"
|
||||
|
||||
node.begin()
|
||||
for i in nuke.allNodes():
|
||||
|
|
|
|||
|
|
@ -1,31 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectWriteLegacy(pyblish.api.InstancePlugin):
|
||||
"""Collect legacy write nodes."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.0101
|
||||
label = "Collect Write node Legacy"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
def process(self, instance):
|
||||
self.log.info(instance[:])
|
||||
node = instance[0]
|
||||
|
||||
if node.Class() not in ["Group", "Write"]:
|
||||
return
|
||||
|
||||
family_knobs = ["ak:family", "avalon:family"]
|
||||
test = [k for k in node.knobs().keys() if k in family_knobs]
|
||||
self.log.info(test)
|
||||
|
||||
if len(test) == 1:
|
||||
if "render" in node[test[0]].value():
|
||||
self.log.info("render")
|
||||
return
|
||||
|
||||
if "render" in node.knobs():
|
||||
instance.data.update(
|
||||
{"family": "write.legacy",
|
||||
"families": []}
|
||||
)
|
||||
|
|
@ -6,81 +6,99 @@ import nuke
|
|||
from avalon import api
|
||||
import re
|
||||
import pyblish.api
|
||||
import pype.api
|
||||
from avalon.nuke import get_avalon_knob_data
|
||||
|
||||
class RepairWriteLegacyAction(pyblish.api.Action):
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
for instance in instances:
|
||||
if "Write" in instance[0].Class():
|
||||
data = toml.loads(instance[0]["avalon"].value())
|
||||
else:
|
||||
data = get_avalon_knob_data(instance[0])
|
||||
|
||||
self.log.info(data)
|
||||
|
||||
data["xpos"] = instance[0].xpos()
|
||||
data["ypos"] = instance[0].ypos()
|
||||
data["input"] = instance[0].input(0)
|
||||
data["publish"] = instance[0]["publish"].value()
|
||||
data["render"] = instance[0]["render"].value()
|
||||
data["render_farm"] = instance[0]["render_farm"].value()
|
||||
data["review"] = instance[0]["review"].value()
|
||||
|
||||
# nuke.delete(instance[0])
|
||||
|
||||
task = os.environ["AVALON_TASK"]
|
||||
sanitized_task = re.sub('[^0-9a-zA-Z]+', '', task)
|
||||
subset_name = "render{}Main".format(
|
||||
sanitized_task.capitalize())
|
||||
|
||||
Create_name = "CreateWriteRender"
|
||||
|
||||
creator_plugin = None
|
||||
for Creator in api.discover(api.Creator):
|
||||
if Creator.__name__ != Create_name:
|
||||
continue
|
||||
|
||||
creator_plugin = Creator
|
||||
|
||||
# return api.create()
|
||||
creator_plugin(data["subset"], data["asset"]).process()
|
||||
|
||||
node = nuke.toNode(data["subset"])
|
||||
node.setXYpos(data["xpos"], data["ypos"])
|
||||
node.setInput(0, data["input"])
|
||||
node["publish"].setValue(data["publish"])
|
||||
node["render"].setValue(data["render"])
|
||||
node["render_farm"].setValue(data["render_farm"])
|
||||
node["review"].setValue(data["review"])
|
||||
|
||||
|
||||
class ValidateWriteLegacy(pyblish.api.InstancePlugin):
|
||||
"""Validate legacy write nodes."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
optional = True
|
||||
families = ["write.legacy"]
|
||||
label = "Write Legacy"
|
||||
families = ["write"]
|
||||
label = "Validate Write Legacy"
|
||||
hosts = ["nuke"]
|
||||
actions = [RepairWriteLegacyAction]
|
||||
actions = [pype.api.RepairAction]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
node = instance[0]
|
||||
msg = "Clean up legacy write node \"{}\"".format(instance)
|
||||
assert False, msg
|
||||
|
||||
if node.Class() not in ["Group", "Write"]:
|
||||
return
|
||||
|
||||
# test avalon knobs
|
||||
family_knobs = ["ak:family", "avalon:family"]
|
||||
family_test = [k for k in node.knobs().keys() if k in family_knobs]
|
||||
self.log.debug("_ family_test: {}".format(family_test))
|
||||
|
||||
# test if render in family test knob
|
||||
# and only one item should be available
|
||||
assert len(family_test) != 1, msg
|
||||
assert "render" in node[family_test[0]].value(), msg
|
||||
|
||||
# test if `file` knob in node, this way old
|
||||
# non-group-node write could be detected
|
||||
assert "file" in node.knobs(), msg
|
||||
|
||||
# check if write node is having old render targeting
|
||||
assert "render_farm" in node.knobs(), msg
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
node = instance[0]
|
||||
|
||||
if "Write" in node.Class():
|
||||
data = toml.loads(node["avalon"].value())
|
||||
else:
|
||||
data = get_avalon_knob_data(node)
|
||||
|
||||
# collect reusable data
|
||||
data["XYpos"] = (node.xpos(), node.ypos())
|
||||
data["input"] = node.input(0)
|
||||
data["publish"] = node["publish"].value()
|
||||
data["render"] = node["render"].value()
|
||||
data["render_farm"] = node["render_farm"].value()
|
||||
data["review"] = node["review"].value()
|
||||
data["use_limit"] = node["use_limit"].value()
|
||||
data["first"] = node["first"].value()
|
||||
data["last"] = node["last"].value()
|
||||
|
||||
family = data["family"]
|
||||
cls.log.debug("_ orig node family: {}".format(family))
|
||||
|
||||
# define what family of write node should be recreated
|
||||
if family == "render":
|
||||
Create_name = "CreateWriteRender"
|
||||
elif family == "prerender":
|
||||
Create_name = "CreateWritePrerender"
|
||||
|
||||
# get appropriate plugin class
|
||||
creator_plugin = None
|
||||
for Creator in api.discover(api.Creator):
|
||||
if Creator.__name__ != Create_name:
|
||||
continue
|
||||
|
||||
creator_plugin = Creator
|
||||
|
||||
# delete the legaci write node
|
||||
nuke.delete(node)
|
||||
|
||||
# create write node with creator
|
||||
new_node_name = data["subset"]
|
||||
creator_plugin(new_node_name, data["asset"]).process()
|
||||
|
||||
node = nuke.toNode(new_node_name)
|
||||
node.setXYpos(*data["XYpos"])
|
||||
node.setInput(0, data["input"])
|
||||
node["publish"].setValue(data["publish"])
|
||||
node["review"].setValue(data["review"])
|
||||
node["use_limit"].setValue(data["use_limit"])
|
||||
node["first"].setValue(data["first"])
|
||||
node["last"].setValue(data["last"])
|
||||
|
||||
# recreate render targets
|
||||
if data["render"]:
|
||||
node["render"].setValue("Local")
|
||||
if data["render_farm"]:
|
||||
node["render"].setValue("On farm")
|
||||
|
|
|
|||
BIN
pype/resources/app_icons/tvpaint.png
Normal file
BIN
pype/resources/app_icons/tvpaint.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 131 KiB |
Loading…
Add table
Add a link
Reference in a new issue