mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into feature/fix_assembly_loading
This commit is contained in:
commit
c7197c8d57
214 changed files with 18660 additions and 8128 deletions
34
pype/blender/__init__.py
Normal file
34
pype/blender/__init__.py
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
import os
|
||||
|
||||
import bpy
|
||||
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
|
||||
from .plugin import AssetLoader
|
||||
|
||||
logger = logging.getLogger("pype.blender")
|
||||
|
||||
PARENT_DIR = os.path.dirname(__file__)
|
||||
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
|
||||
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
|
||||
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "blender", "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "blender", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "blender", "create")
|
||||
|
||||
|
||||
def install():
|
||||
"""Install Blender configuration for Avalon."""
|
||||
pyblish.register_plugin_path(str(PUBLISH_PATH))
|
||||
avalon.register_plugin_path(avalon.Loader, str(LOAD_PATH))
|
||||
avalon.register_plugin_path(avalon.Creator, str(CREATE_PATH))
|
||||
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall Blender configuration for Avalon."""
|
||||
pyblish.deregister_plugin_path(str(PUBLISH_PATH))
|
||||
avalon.deregister_plugin_path(avalon.Loader, str(LOAD_PATH))
|
||||
avalon.deregister_plugin_path(avalon.Creator, str(CREATE_PATH))
|
||||
47
pype/blender/action.py
Normal file
47
pype/blender/action.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from ..action import get_errored_instances_from_context
|
||||
|
||||
|
||||
class SelectInvalidAction(pyblish.api.Action):
|
||||
"""Select invalid objects in Blender when a publish plug-in failed."""
|
||||
label = "Select Invalid"
|
||||
on = "failed"
|
||||
icon = "search"
|
||||
|
||||
def process(self, context, plugin):
|
||||
errored_instances = get_errored_instances_from_context(context)
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid nodes...")
|
||||
invalid = list()
|
||||
for instance in instances:
|
||||
invalid_nodes = plugin.get_invalid(instance)
|
||||
if invalid_nodes:
|
||||
if isinstance(invalid_nodes, (list, tuple)):
|
||||
invalid.extend(invalid_nodes)
|
||||
else:
|
||||
self.log.warning(
|
||||
"Failed plug-in doens't have any selectable objects."
|
||||
)
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
# Make sure every node is only processed once
|
||||
invalid = list(set(invalid))
|
||||
if not invalid:
|
||||
self.log.info("No invalid nodes found.")
|
||||
return
|
||||
|
||||
invalid_names = [obj.name for obj in invalid]
|
||||
self.log.info(
|
||||
"Selecting invalid objects: %s", ", ".join(invalid_names)
|
||||
)
|
||||
# Select the objects and also make the last one the active object.
|
||||
for obj in invalid:
|
||||
obj.select_set(True)
|
||||
|
||||
bpy.context.view_layer.objects.active = invalid[-1]
|
||||
135
pype/blender/plugin.py
Normal file
135
pype/blender/plugin.py
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
"""Shared functionality for pipeline plugins for Blender."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import bpy
|
||||
|
||||
from avalon import api
|
||||
|
||||
VALID_EXTENSIONS = [".blend"]
|
||||
|
||||
|
||||
def model_name(asset: str, subset: str, namespace: Optional[str] = None) -> str:
|
||||
"""Return a consistent name for a model asset."""
|
||||
name = f"{asset}_{subset}"
|
||||
if namespace:
|
||||
name = f"{namespace}:{name}"
|
||||
return name
|
||||
|
||||
|
||||
class AssetLoader(api.Loader):
|
||||
"""A basic AssetLoader for Blender
|
||||
|
||||
This will implement the basic logic for linking/appending assets
|
||||
into another Blender scene.
|
||||
|
||||
The `update` method should be implemented by a sub-class, because
|
||||
it's different for different types (e.g. model, rig, animation,
|
||||
etc.).
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _get_instance_empty(instance_name: str, nodes: List) -> Optional[bpy.types.Object]:
|
||||
"""Get the 'instance empty' that holds the collection instance."""
|
||||
for node in nodes:
|
||||
if not isinstance(node, bpy.types.Object):
|
||||
continue
|
||||
if (node.type == 'EMPTY' and node.instance_type == 'COLLECTION'
|
||||
and node.instance_collection and node.name == instance_name):
|
||||
return node
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_instance_collection(instance_name: str, nodes: List) -> Optional[bpy.types.Collection]:
|
||||
"""Get the 'instance collection' (container) for this asset."""
|
||||
for node in nodes:
|
||||
if not isinstance(node, bpy.types.Collection):
|
||||
continue
|
||||
if node.name == instance_name:
|
||||
return node
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _get_library_from_container(container: bpy.types.Collection) -> bpy.types.Library:
|
||||
"""Find the library file from the container.
|
||||
|
||||
It traverses the objects from this collection, checks if there is only
|
||||
1 library from which the objects come from and returns the library.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
assert not container.children, "Nested collections are not supported."
|
||||
assert container.objects, "The collection doesn't contain any objects."
|
||||
libraries = set()
|
||||
for obj in container.objects:
|
||||
assert obj.library, f"'{obj.name}' is not linked."
|
||||
libraries.add(obj.library)
|
||||
|
||||
assert len(libraries) == 1, "'{container.name}' contains objects from more then 1 library."
|
||||
|
||||
return list(libraries)[0]
|
||||
|
||||
def process_asset(self,
|
||||
context: dict,
|
||||
name: str,
|
||||
namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None):
|
||||
"""Must be implemented by a sub-class"""
|
||||
raise NotImplementedError("Must be implemented by a sub-class")
|
||||
|
||||
def load(self,
|
||||
context: dict,
|
||||
name: Optional[str] = None,
|
||||
namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None) -> Optional[bpy.types.Collection]:
|
||||
"""Load asset via database
|
||||
|
||||
Arguments:
|
||||
context: Full parenthood of representation to load
|
||||
name: Use pre-defined name
|
||||
namespace: Use pre-defined namespace
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
# TODO (jasper): make it possible to add the asset several times by
|
||||
# just re-using the collection
|
||||
assert Path(self.fname).exists(), f"{self.fname} doesn't exist."
|
||||
|
||||
self.process_asset(
|
||||
context=context,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
options=options,
|
||||
)
|
||||
|
||||
# Only containerise if anything was loaded by the Loader.
|
||||
nodes = self[:]
|
||||
if not nodes:
|
||||
return None
|
||||
|
||||
# Only containerise if it's not already a collection from a .blend file.
|
||||
representation = context["representation"]["name"]
|
||||
if representation != "blend":
|
||||
from avalon.blender.pipeline import containerise
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__,
|
||||
)
|
||||
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
instance_name = model_name(asset, subset, namespace)
|
||||
|
||||
return self._get_instance_collection(instance_name, nodes)
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
"""Must be implemented by a sub-class"""
|
||||
raise NotImplementedError("Must be implemented by a sub-class")
|
||||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
"""Must be implemented by a sub-class"""
|
||||
raise NotImplementedError("Must be implemented by a sub-class")
|
||||
|
|
@ -5,7 +5,8 @@ import json
|
|||
import arrow
|
||||
import logging
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction, get_ca_mongoid
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pypeapp import config
|
||||
from ftrack_api.exception import NoResultFoundError
|
||||
|
||||
|
|
@ -171,7 +172,6 @@ class CustomAttributes(BaseAction):
|
|||
|
||||
def avalon_mongo_id_attributes(self, session):
|
||||
# Attribute Name and Label
|
||||
cust_attr_name = get_ca_mongoid()
|
||||
cust_attr_label = 'Avalon/Mongo Id'
|
||||
|
||||
# Types that don't need object_type_id
|
||||
|
|
@ -207,7 +207,7 @@ class CustomAttributes(BaseAction):
|
|||
group = self.get_group('avalon')
|
||||
|
||||
data = {}
|
||||
data['key'] = cust_attr_name
|
||||
data['key'] = CustAttrIdKey
|
||||
data['label'] = cust_attr_label
|
||||
data['type'] = custom_attribute_type
|
||||
data['default'] = ''
|
||||
|
|
|
|||
|
|
@ -142,6 +142,13 @@ class CreateProjectFolders(BaseAction):
|
|||
else:
|
||||
data['project_id'] = parent['project']['id']
|
||||
|
||||
existing_entity = self.session.query((
|
||||
"TypedContext where name is \"{}\" and "
|
||||
"parent_id is \"{}\" and project_id is \"{}\""
|
||||
).format(name, data['parent_id'], data['project_id'])).first()
|
||||
if existing_entity:
|
||||
return existing_entity
|
||||
|
||||
new_ent = self.session.create(ent_type, data)
|
||||
self.session.commit()
|
||||
return new_ent
|
||||
|
|
|
|||
|
|
@ -1,354 +1,606 @@
|
|||
import os
|
||||
import sys
|
||||
import logging
|
||||
import collections
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from queue import Queue
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
import argparse
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class DeleteAsset(BaseAction):
|
||||
class DeleteAssetSubset(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'delete.asset'
|
||||
identifier = "delete.asset.subset"
|
||||
#: Action label.
|
||||
label = 'Delete Asset/Subsets'
|
||||
label = "Delete Asset/Subsets"
|
||||
#: Action description.
|
||||
description = 'Removes from Avalon with all childs and asset from Ftrack'
|
||||
icon = '{}/ftrack/action_icons/DeleteAsset.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
description = "Removes from Avalon with all childs and asset from Ftrack"
|
||||
icon = "{}/ftrack/action_icons/DeleteAsset.svg".format(
|
||||
os.environ.get("PYPE_STATICS_SERVER", "")
|
||||
)
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
#: Db
|
||||
db = DbConnector()
|
||||
role_list = ["Pypeclub", "Administrator", "Project Manager"]
|
||||
#: Db connection
|
||||
dbcon = DbConnector()
|
||||
|
||||
value = None
|
||||
splitter = {"type": "label", "value": "---"}
|
||||
action_data_by_id = {}
|
||||
asset_prefix = "asset:"
|
||||
subset_prefix = "subset:"
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
""" Validation """
|
||||
task_ids = []
|
||||
for ent_info in event["data"]["selection"]:
|
||||
entType = ent_info.get("entityType", "")
|
||||
if entType == "task":
|
||||
task_ids.append(ent_info["entityId"])
|
||||
|
||||
valid = ["task"]
|
||||
entityType = event["data"]["selection"][0].get("entityType", "")
|
||||
if entityType.lower() not in valid:
|
||||
return False
|
||||
|
||||
return True
|
||||
for entity in entities:
|
||||
ftrack_id = entity["id"]
|
||||
if ftrack_id not in task_ids:
|
||||
continue
|
||||
if entity.entity_type.lower() != "task":
|
||||
return True
|
||||
return False
|
||||
|
||||
def _launch(self, event):
|
||||
self.reset_session()
|
||||
try:
|
||||
self.db.install()
|
||||
args = self._translate_event(
|
||||
self.session, event
|
||||
)
|
||||
if "values" not in event["data"]:
|
||||
self.dbcon.install()
|
||||
return self._interface(self.session, *args)
|
||||
|
||||
interface = self._interface(
|
||||
self.session, *args
|
||||
)
|
||||
|
||||
confirmation = self.confirm_delete(
|
||||
True, *args
|
||||
)
|
||||
|
||||
if interface:
|
||||
return interface
|
||||
|
||||
confirmation = self.confirm_delete(*args)
|
||||
if confirmation:
|
||||
return confirmation
|
||||
|
||||
self.dbcon.install()
|
||||
response = self.launch(
|
||||
self.session, *args
|
||||
)
|
||||
finally:
|
||||
self.db.uninstall()
|
||||
self.dbcon.uninstall()
|
||||
|
||||
return self._handle_result(
|
||||
self.session, response, *args
|
||||
)
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if not event['data'].get('values', {}):
|
||||
self.attempt = 1
|
||||
items = []
|
||||
entity = entities[0]
|
||||
title = 'Choose items to delete from "{}"'.format(entity['name'])
|
||||
project = entity['project']
|
||||
self.show_message(event, "Preparing data...", True)
|
||||
items = []
|
||||
title = "Choose items to delete"
|
||||
|
||||
self.db.Session['AVALON_PROJECT'] = project["full_name"]
|
||||
# Filter selection and get ftrack ids
|
||||
selection = event["data"].get("selection") or []
|
||||
ftrack_ids = []
|
||||
project_in_selection = False
|
||||
for entity in selection:
|
||||
entity_type = (entity.get("entityType") or "").lower()
|
||||
if entity_type != "task":
|
||||
if entity_type == "show":
|
||||
project_in_selection = True
|
||||
continue
|
||||
|
||||
av_entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': entity['name']
|
||||
ftrack_id = entity.get("entityId")
|
||||
if not ftrack_id:
|
||||
continue
|
||||
|
||||
ftrack_ids.append(ftrack_id)
|
||||
|
||||
if project_in_selection:
|
||||
msg = "It is not possible to use this action on project entity."
|
||||
self.show_message(event, msg, True)
|
||||
|
||||
# Filter event even more (skip task entities)
|
||||
# - task entities are not relevant for avalon
|
||||
for entity in entities:
|
||||
ftrack_id = entity["id"]
|
||||
if ftrack_id not in ftrack_ids:
|
||||
continue
|
||||
|
||||
if entity.entity_type.lower() == "task":
|
||||
ftrack_ids.remove(ftrack_id)
|
||||
|
||||
if not ftrack_ids:
|
||||
# It is bug if this happens!
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Invalid selection for this action (Bug)"
|
||||
}
|
||||
|
||||
if entities[0].entity_type.lower() == "project":
|
||||
project = entities[0]
|
||||
else:
|
||||
project = entities[0]["project"]
|
||||
|
||||
project_name = project["full_name"]
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
selected_av_entities = self.dbcon.find({
|
||||
"type": "asset",
|
||||
"data.ftrackId": {"$in": ftrack_ids}
|
||||
})
|
||||
selected_av_entities = [ent for ent in selected_av_entities]
|
||||
if not selected_av_entities:
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Didn't found entities in avalon"
|
||||
}
|
||||
|
||||
# Remove cached action older than 2 minutes
|
||||
old_action_ids = []
|
||||
for id, data in self.action_data_by_id.items():
|
||||
created_at = data.get("created_at")
|
||||
if not created_at:
|
||||
old_action_ids.append(id)
|
||||
continue
|
||||
cur_time = datetime.now()
|
||||
existing_in_sec = (created_at - cur_time).total_seconds()
|
||||
if existing_in_sec > 60 * 2:
|
||||
old_action_ids.append(id)
|
||||
|
||||
for id in old_action_ids:
|
||||
self.action_data_by_id.pop(id, None)
|
||||
|
||||
# Store data for action id
|
||||
action_id = str(uuid.uuid1())
|
||||
self.action_data_by_id[action_id] = {
|
||||
"attempt": 1,
|
||||
"created_at": datetime.now(),
|
||||
"project_name": project_name,
|
||||
"subset_ids_by_name": {},
|
||||
"subset_ids_by_parent": {}
|
||||
}
|
||||
|
||||
id_item = {
|
||||
"type": "hidden",
|
||||
"name": "action_id",
|
||||
"value": action_id
|
||||
}
|
||||
|
||||
items.append(id_item)
|
||||
asset_ids = [ent["_id"] for ent in selected_av_entities]
|
||||
subsets_for_selection = self.dbcon.find({
|
||||
"type": "subset",
|
||||
"parent": {"$in": asset_ids}
|
||||
})
|
||||
|
||||
asset_ending = ""
|
||||
if len(selected_av_entities) > 1:
|
||||
asset_ending = "s"
|
||||
|
||||
asset_title = {
|
||||
"type": "label",
|
||||
"value": "# Delete asset{}:".format(asset_ending)
|
||||
}
|
||||
asset_note = {
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<p><i>NOTE: Action will delete checked entities"
|
||||
" in Ftrack and Avalon with all children entities and"
|
||||
" published content.</i></p>"
|
||||
)
|
||||
}
|
||||
|
||||
items.append(asset_title)
|
||||
items.append(asset_note)
|
||||
|
||||
asset_items = collections.defaultdict(list)
|
||||
for asset in selected_av_entities:
|
||||
ent_path_items = [project_name]
|
||||
ent_path_items.extend(asset.get("data", {}).get("parents") or [])
|
||||
ent_path_to_parent = "/".join(ent_path_items) + "/"
|
||||
asset_items[ent_path_to_parent].append(asset)
|
||||
|
||||
for asset_parent_path, assets in sorted(asset_items.items()):
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "## <b>- {}</b>".format(asset_parent_path)
|
||||
})
|
||||
|
||||
if av_entity is None:
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Didn\'t found assets in avalon'
|
||||
}
|
||||
|
||||
asset_label = {
|
||||
'type': 'label',
|
||||
'value': '## Delete whole asset: ##'
|
||||
}
|
||||
asset_item = {
|
||||
'label': av_entity['name'],
|
||||
'name': 'whole_asset',
|
||||
'type': 'boolean',
|
||||
'value': False
|
||||
}
|
||||
splitter = {
|
||||
'type': 'label',
|
||||
'value': '{}'.format(200*"-")
|
||||
}
|
||||
subset_label = {
|
||||
'type': 'label',
|
||||
'value': '## Subsets: ##'
|
||||
}
|
||||
if av_entity is not None:
|
||||
items.append(asset_label)
|
||||
items.append(asset_item)
|
||||
items.append(splitter)
|
||||
|
||||
all_subsets = self.db.find({
|
||||
'type': 'subset',
|
||||
'parent': av_entity['_id']
|
||||
for asset in assets:
|
||||
items.append({
|
||||
"label": asset["name"],
|
||||
"name": "{}{}".format(
|
||||
self.asset_prefix, str(asset["_id"])
|
||||
),
|
||||
"type": 'boolean',
|
||||
"value": False
|
||||
})
|
||||
|
||||
subset_items = []
|
||||
for subset in all_subsets:
|
||||
item = {
|
||||
'label': subset['name'],
|
||||
'name': str(subset['_id']),
|
||||
'type': 'boolean',
|
||||
'value': False
|
||||
}
|
||||
subset_items.append(item)
|
||||
if len(subset_items) > 0:
|
||||
items.append(subset_label)
|
||||
items.extend(subset_items)
|
||||
else:
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Didn\'t found assets in avalon'
|
||||
}
|
||||
subset_ids_by_name = collections.defaultdict(list)
|
||||
subset_ids_by_parent = collections.defaultdict(list)
|
||||
for subset in subsets_for_selection:
|
||||
subset_id = subset["_id"]
|
||||
name = subset["name"]
|
||||
parent_id = subset["parent"]
|
||||
subset_ids_by_name[name].append(subset_id)
|
||||
subset_ids_by_parent[parent_id].append(subset_id)
|
||||
|
||||
if not subset_ids_by_name:
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
"items": items,
|
||||
"title": title
|
||||
}
|
||||
|
||||
def confirm_delete(self, first_attempt, entities, event):
|
||||
if first_attempt is True:
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
subset_ending = ""
|
||||
if len(subset_ids_by_name.keys()) > 1:
|
||||
subset_ending = "s"
|
||||
|
||||
values = event['data']['values']
|
||||
subset_title = {
|
||||
"type": "label",
|
||||
"value": "# Subset{} to delete:".format(subset_ending)
|
||||
}
|
||||
subset_note = {
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<p><i>WARNING: Subset{} will be removed"
|
||||
" for all <b>selected</b> entities.</i></p>"
|
||||
).format(subset_ending)
|
||||
}
|
||||
|
||||
if len(values) <= 0:
|
||||
return
|
||||
if 'whole_asset' not in values:
|
||||
return
|
||||
else:
|
||||
values = self.values
|
||||
items.append(self.splitter)
|
||||
items.append(subset_title)
|
||||
items.append(subset_note)
|
||||
|
||||
title = 'Confirmation of deleting {}'
|
||||
if values['whole_asset'] is True:
|
||||
title = title.format(
|
||||
'whole asset {}'.format(
|
||||
entities[0]['name']
|
||||
)
|
||||
)
|
||||
else:
|
||||
subsets = []
|
||||
for key, value in values.items():
|
||||
if value is True:
|
||||
subsets.append(key)
|
||||
len_subsets = len(subsets)
|
||||
if len_subsets == 0:
|
||||
for name in subset_ids_by_name:
|
||||
items.append({
|
||||
"label": "<b>{}</b>".format(name),
|
||||
"name": "{}{}".format(self.subset_prefix, name),
|
||||
"type": "boolean",
|
||||
"value": False
|
||||
})
|
||||
|
||||
self.action_data_by_id[action_id]["subset_ids_by_parent"] = (
|
||||
subset_ids_by_parent
|
||||
)
|
||||
self.action_data_by_id[action_id]["subset_ids_by_name"] = (
|
||||
subset_ids_by_name
|
||||
)
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
"title": title
|
||||
}
|
||||
|
||||
def confirm_delete(self, entities, event):
|
||||
values = event["data"]["values"]
|
||||
action_id = values.get("action_id")
|
||||
spec_data = self.action_data_by_id.get(action_id)
|
||||
if not spec_data:
|
||||
# it is a bug if this happens!
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Something bad has happened. Please try again."
|
||||
}
|
||||
|
||||
# Process Delete confirmation
|
||||
delete_key = values.get("delete_key")
|
||||
if delete_key:
|
||||
delete_key = delete_key.lower().strip()
|
||||
# Go to launch part if user entered `delete`
|
||||
if delete_key == "delete":
|
||||
return
|
||||
# Skip whole process if user didn't enter any text
|
||||
elif delete_key == "":
|
||||
self.action_data_by_id.pop(action_id, None)
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'Nothing was selected to delete'
|
||||
"success": True,
|
||||
"message": "Deleting cancelled (delete entry was empty)"
|
||||
}
|
||||
elif len_subsets == 1:
|
||||
title = title.format(
|
||||
'{} subset'.format(len_subsets)
|
||||
)
|
||||
else:
|
||||
title = title.format(
|
||||
'{} subsets'.format(len_subsets)
|
||||
)
|
||||
# Get data to show again
|
||||
to_delete = spec_data["to_delete"]
|
||||
|
||||
else:
|
||||
to_delete = collections.defaultdict(list)
|
||||
for key, value in values.items():
|
||||
if not value:
|
||||
continue
|
||||
if key.startswith(self.asset_prefix):
|
||||
_key = key.replace(self.asset_prefix, "")
|
||||
to_delete["assets"].append(_key)
|
||||
|
||||
elif key.startswith(self.subset_prefix):
|
||||
_key = key.replace(self.subset_prefix, "")
|
||||
to_delete["subsets"].append(_key)
|
||||
|
||||
self.action_data_by_id[action_id]["to_delete"] = to_delete
|
||||
|
||||
asset_to_delete = len(to_delete.get("assets") or []) > 0
|
||||
subset_to_delete = len(to_delete.get("subsets") or []) > 0
|
||||
|
||||
if not asset_to_delete and not subset_to_delete:
|
||||
self.action_data_by_id.pop(action_id, None)
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Nothing was selected to delete"
|
||||
}
|
||||
|
||||
attempt = spec_data["attempt"]
|
||||
if attempt > 3:
|
||||
self.action_data_by_id.pop(action_id, None)
|
||||
return {
|
||||
"success": False,
|
||||
"message": "You didn't enter \"DELETE\" properly 3 times!"
|
||||
}
|
||||
|
||||
self.action_data_by_id[action_id]["attempt"] += 1
|
||||
|
||||
title = "Confirmation of deleting"
|
||||
|
||||
if asset_to_delete:
|
||||
asset_len = len(to_delete["assets"])
|
||||
asset_ending = ""
|
||||
if asset_len > 1:
|
||||
asset_ending = "s"
|
||||
title += " {} Asset{}".format(asset_len, asset_ending)
|
||||
if subset_to_delete:
|
||||
title += " and"
|
||||
|
||||
if subset_to_delete:
|
||||
sub_len = len(to_delete["subsets"])
|
||||
type_ending = ""
|
||||
sub_ending = ""
|
||||
if sub_len == 1:
|
||||
subset_ids_by_name = spec_data["subset_ids_by_name"]
|
||||
if len(subset_ids_by_name[to_delete["subsets"][0]]) > 1:
|
||||
sub_ending = "s"
|
||||
|
||||
elif sub_len > 1:
|
||||
type_ending = "s"
|
||||
sub_ending = "s"
|
||||
|
||||
title += " {} type{} of subset{}".format(
|
||||
sub_len, type_ending, sub_ending
|
||||
)
|
||||
|
||||
self.values = values
|
||||
items = []
|
||||
|
||||
id_item = {"type": "hidden", "name": "action_id", "value": action_id}
|
||||
delete_label = {
|
||||
'type': 'label',
|
||||
'value': '# Please enter "DELETE" to confirm #'
|
||||
}
|
||||
|
||||
delete_item = {
|
||||
'name': 'delete_key',
|
||||
'type': 'text',
|
||||
'value': '',
|
||||
'empty_text': 'Type Delete here...'
|
||||
"name": "delete_key",
|
||||
"type": "text",
|
||||
"value": "",
|
||||
"empty_text": "Type Delete here..."
|
||||
}
|
||||
|
||||
items.append(id_item)
|
||||
items.append(delete_label)
|
||||
items.append(delete_item)
|
||||
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
"items": items,
|
||||
"title": title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
||||
values = event['data']['values']
|
||||
if len(values) <= 0:
|
||||
return
|
||||
if 'delete_key' not in values:
|
||||
return
|
||||
|
||||
if values['delete_key'].lower() != 'delete':
|
||||
if values['delete_key'].lower() == '':
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Deleting cancelled'
|
||||
}
|
||||
if self.attempt < 3:
|
||||
self.attempt += 1
|
||||
return_dict = self.confirm_delete(False, entities, event)
|
||||
return_dict['title'] = '{} ({} attempt)'.format(
|
||||
return_dict['title'], self.attempt
|
||||
)
|
||||
return return_dict
|
||||
self.show_message(event, "Processing...", True)
|
||||
values = event["data"]["values"]
|
||||
action_id = values.get("action_id")
|
||||
spec_data = self.action_data_by_id.get(action_id)
|
||||
if not spec_data:
|
||||
# it is a bug if this happens!
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'You didn\'t enter "DELETE" properly 3 times!'
|
||||
"success": False,
|
||||
"message": "Something bad has happened. Please try again."
|
||||
}
|
||||
|
||||
entity = entities[0]
|
||||
project = entity['project']
|
||||
report_messages = collections.defaultdict(list)
|
||||
|
||||
self.db.Session['AVALON_PROJECT'] = project["full_name"]
|
||||
project_name = spec_data["project_name"]
|
||||
to_delete = spec_data["to_delete"]
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
all_ids = []
|
||||
if self.values.get('whole_asset', False) is True:
|
||||
av_entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': entity['name']
|
||||
assets_to_delete = to_delete.get("assets") or []
|
||||
subsets_to_delete = to_delete.get("subsets") or []
|
||||
|
||||
# Convert asset ids to ObjectId obj
|
||||
assets_to_delete = [ObjectId(id) for id in assets_to_delete if id]
|
||||
|
||||
subset_ids_by_parent = spec_data["subset_ids_by_parent"]
|
||||
subset_ids_by_name = spec_data["subset_ids_by_name"]
|
||||
|
||||
subset_ids_to_archive = []
|
||||
asset_ids_to_archive = []
|
||||
ftrack_ids_to_delete = []
|
||||
if len(assets_to_delete) > 0:
|
||||
# Prepare data when deleting whole avalon asset
|
||||
avalon_assets = self.dbcon.find({"type": "asset"})
|
||||
avalon_assets_by_parent = collections.defaultdict(list)
|
||||
for asset in avalon_assets:
|
||||
parent_id = asset["data"]["visualParent"]
|
||||
avalon_assets_by_parent[parent_id].append(asset)
|
||||
if asset["_id"] in assets_to_delete:
|
||||
ftrack_id = asset["data"]["ftrackId"]
|
||||
ftrack_ids_to_delete.append(ftrack_id)
|
||||
|
||||
children_queue = Queue()
|
||||
for mongo_id in assets_to_delete:
|
||||
children_queue.put(mongo_id)
|
||||
|
||||
while not children_queue.empty():
|
||||
mongo_id = children_queue.get()
|
||||
if mongo_id in asset_ids_to_archive:
|
||||
continue
|
||||
|
||||
asset_ids_to_archive.append(mongo_id)
|
||||
for subset_id in subset_ids_by_parent.get(mongo_id, []):
|
||||
if subset_id not in subset_ids_to_archive:
|
||||
subset_ids_to_archive.append(subset_id)
|
||||
|
||||
children = avalon_assets_by_parent.get(mongo_id)
|
||||
if not children:
|
||||
continue
|
||||
|
||||
for child in children:
|
||||
child_id = child["_id"]
|
||||
if child_id not in asset_ids_to_archive:
|
||||
children_queue.put(child_id)
|
||||
|
||||
# Prepare names of assets in ftrack and ids of subsets in mongo
|
||||
asset_names_to_delete = []
|
||||
if len(subsets_to_delete) > 0:
|
||||
for name in subsets_to_delete:
|
||||
asset_names_to_delete.append(name)
|
||||
for subset_id in subset_ids_by_name[name]:
|
||||
if subset_id in subset_ids_to_archive:
|
||||
continue
|
||||
subset_ids_to_archive.append(subset_id)
|
||||
|
||||
# Get ftrack ids of entities where will be delete only asset
|
||||
not_deleted_entities_id = []
|
||||
ftrack_id_name_map = {}
|
||||
if asset_names_to_delete:
|
||||
for entity in entities:
|
||||
ftrack_id = entity["id"]
|
||||
ftrack_id_name_map[ftrack_id] = entity["name"]
|
||||
if ftrack_id in ftrack_ids_to_delete:
|
||||
continue
|
||||
not_deleted_entities_id.append(ftrack_id)
|
||||
|
||||
mongo_proc_txt = "MongoProcessing: "
|
||||
ftrack_proc_txt = "Ftrack processing: "
|
||||
if asset_ids_to_archive:
|
||||
self.log.debug("{}Archivation of assets <{}>".format(
|
||||
mongo_proc_txt,
|
||||
", ".join([str(id) for id in asset_ids_to_archive])
|
||||
))
|
||||
self.dbcon.update_many(
|
||||
{
|
||||
"_id": {"$in": asset_ids_to_archive},
|
||||
"type": "asset"
|
||||
},
|
||||
{"$set": {"type": "archived_asset"}}
|
||||
)
|
||||
|
||||
if subset_ids_to_archive:
|
||||
self.log.debug("{}Archivation of subsets <{}>".format(
|
||||
mongo_proc_txt,
|
||||
", ".join([str(id) for id in subset_ids_to_archive])
|
||||
))
|
||||
self.dbcon.update_many(
|
||||
{
|
||||
"_id": {"$in": subset_ids_to_archive},
|
||||
"type": "subset"
|
||||
},
|
||||
{"$set": {"type": "archived_subset"}}
|
||||
)
|
||||
|
||||
if ftrack_ids_to_delete:
|
||||
self.log.debug("{}Deleting Ftrack Entities <{}>".format(
|
||||
ftrack_proc_txt, ", ".join(ftrack_ids_to_delete)
|
||||
))
|
||||
|
||||
joined_ids_to_delete = ", ".join(
|
||||
["\"{}\"".format(id) for id in ftrack_ids_to_delete]
|
||||
)
|
||||
ftrack_ents_to_delete = self.session.query(
|
||||
"select id, link from TypedContext where id in ({})".format(
|
||||
joined_ids_to_delete
|
||||
)
|
||||
).all()
|
||||
for entity in ftrack_ents_to_delete:
|
||||
self.session.delete(entity)
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
ent_path = "/".join(
|
||||
[ent["name"] for ent in entity["link"]]
|
||||
)
|
||||
msg = "Failed to delete entity"
|
||||
report_messages[msg].append(ent_path)
|
||||
self.session.rollback()
|
||||
self.log.warning(
|
||||
"{} <{}>".format(msg, ent_path),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
if not_deleted_entities_id:
|
||||
joined_not_deleted = ", ".join([
|
||||
"\"{}\"".format(ftrack_id)
|
||||
for ftrack_id in not_deleted_entities_id
|
||||
])
|
||||
joined_asset_names = ", ".join([
|
||||
"\"{}\"".format(name)
|
||||
for name in asset_names_to_delete
|
||||
])
|
||||
# Find assets of selected entities with names of checked subsets
|
||||
assets = self.session.query((
|
||||
"select id from Asset where"
|
||||
" context_id in ({}) and name in ({})"
|
||||
).format(joined_not_deleted, joined_asset_names)).all()
|
||||
|
||||
self.log.debug("{}Deleting Ftrack Assets <{}>".format(
|
||||
ftrack_proc_txt,
|
||||
", ".join([asset["id"] for asset in assets])
|
||||
))
|
||||
for asset in assets:
|
||||
self.session.delete(asset)
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
self.session.rollback()
|
||||
msg = "Failed to delete asset"
|
||||
report_messages[msg].append(asset["id"])
|
||||
self.log.warning(
|
||||
"{} <{}>".format(asset["id"]),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
return self.report_handle(report_messages, project_name, event)
|
||||
|
||||
def report_handle(self, report_messages, project_name, event):
|
||||
if not report_messages:
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Deletion was successful!"
|
||||
}
|
||||
|
||||
title = "Delete report ({}):".format(project_name)
|
||||
items = []
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "# Deleting was not completely successful"
|
||||
})
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "<p><i>Check logs for more information</i></p>"
|
||||
})
|
||||
for msg, _items in report_messages.items():
|
||||
if not _items or not msg:
|
||||
continue
|
||||
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "# {}".format(msg)
|
||||
})
|
||||
|
||||
if av_entity is not None:
|
||||
all_ids.append(av_entity['_id'])
|
||||
all_ids.extend(self.find_child(av_entity))
|
||||
if isinstance(_items, str):
|
||||
_items = [_items]
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": '<p>{}</p>'.format("<br>".join(_items))
|
||||
})
|
||||
items.append(self.splitter)
|
||||
|
||||
session.delete(entity)
|
||||
session.commit()
|
||||
else:
|
||||
subset_names = []
|
||||
for key, value in self.values.items():
|
||||
if key == 'delete_key' or value is False:
|
||||
continue
|
||||
|
||||
entity_id = ObjectId(key)
|
||||
av_entity = self.db.find_one({'_id': entity_id})
|
||||
subset_names.append(av_entity['name'])
|
||||
if av_entity is None:
|
||||
continue
|
||||
all_ids.append(entity_id)
|
||||
all_ids.extend(self.find_child(av_entity))
|
||||
|
||||
for ft_asset in entity['assets']:
|
||||
if ft_asset['name'] in subset_names:
|
||||
session.delete(ft_asset)
|
||||
session.commit()
|
||||
|
||||
if len(all_ids) == 0:
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'No entities to delete in avalon'
|
||||
}
|
||||
|
||||
delete_query = {'_id': {'$in': all_ids}}
|
||||
self.db.delete_many(delete_query)
|
||||
self.show_interface(items, title, event)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'All assets were deleted!'
|
||||
"success": False,
|
||||
"message": "Deleting finished. Read report messages."
|
||||
}
|
||||
|
||||
def find_child(self, entity):
|
||||
output = []
|
||||
id = entity['_id']
|
||||
visuals = [x for x in self.db.find({'data.visualParent': id})]
|
||||
assert len(visuals) == 0, 'This asset has another asset as child'
|
||||
childs = self.db.find({'parent': id})
|
||||
for child in childs:
|
||||
output.append(child['_id'])
|
||||
output.extend(self.find_child(child))
|
||||
return output
|
||||
|
||||
def find_assets(self, asset_names):
|
||||
assets = []
|
||||
for name in asset_names:
|
||||
entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': name
|
||||
})
|
||||
if entity is not None and entity not in assets:
|
||||
assets.append(entity)
|
||||
return assets
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
DeleteAsset(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
DeleteAssetSubset(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,175 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import logging
|
||||
import argparse
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class AssetsRemover(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = 'remove.assets'
|
||||
#: Action label.
|
||||
label = "Pype Admin"
|
||||
variant = '- Delete Assets by Name'
|
||||
#: Action description.
|
||||
description = 'Removes assets from Ftrack and Avalon db with all childs'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
#: Db
|
||||
db = DbConnector()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
valid = ["show", "task"]
|
||||
entityType = event["data"]["selection"][0].get("entityType", "")
|
||||
if entityType.lower() not in valid:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if not event['data'].get('values', {}):
|
||||
title = 'Enter Asset names to delete'
|
||||
|
||||
items = []
|
||||
for i in range(15):
|
||||
|
||||
item = {
|
||||
'label': 'Asset {}'.format(i+1),
|
||||
'name': 'asset_{}'.format(i+1),
|
||||
'type': 'text',
|
||||
'value': ''
|
||||
}
|
||||
items.append(item)
|
||||
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
entity = entities[0]
|
||||
if entity.entity_type.lower() != 'Project':
|
||||
project = entity['project']
|
||||
else:
|
||||
project = entity
|
||||
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
||||
values = event['data']['values']
|
||||
if len(values) <= 0:
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'No Assets to delete!'
|
||||
}
|
||||
|
||||
asset_names = []
|
||||
|
||||
for k, v in values.items():
|
||||
if v.replace(' ', '') != '':
|
||||
asset_names.append(v)
|
||||
|
||||
self.db.install()
|
||||
self.db.Session['AVALON_PROJECT'] = project["full_name"]
|
||||
|
||||
assets = self.find_assets(asset_names)
|
||||
|
||||
all_ids = []
|
||||
for asset in assets:
|
||||
all_ids.append(asset['_id'])
|
||||
all_ids.extend(self.find_child(asset))
|
||||
|
||||
if len(all_ids) == 0:
|
||||
self.db.uninstall()
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'None of assets'
|
||||
}
|
||||
|
||||
delete_query = {'_id': {'$in': all_ids}}
|
||||
self.db.delete_many(delete_query)
|
||||
|
||||
self.db.uninstall()
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'All assets were deleted!'
|
||||
}
|
||||
|
||||
def find_child(self, entity):
|
||||
output = []
|
||||
id = entity['_id']
|
||||
visuals = [x for x in self.db.find({'data.visualParent': id})]
|
||||
assert len(visuals) == 0, 'This asset has another asset as child'
|
||||
childs = self.db.find({'parent': id})
|
||||
for child in childs:
|
||||
output.append(child['_id'])
|
||||
output.extend(self.find_child(child))
|
||||
return output
|
||||
|
||||
def find_assets(self, asset_names):
|
||||
assets = []
|
||||
for name in asset_names:
|
||||
entity = self.db.find_one({
|
||||
'type': 'asset',
|
||||
'name': name
|
||||
})
|
||||
if entity is not None and entity not in assets:
|
||||
assets.append(entity)
|
||||
return assets
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
AssetsRemover(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
528
pype/ftrack/actions/action_delivery.py
Normal file
528
pype/ftrack/actions/action_delivery.py
Normal file
|
|
@ -0,0 +1,528 @@
|
|||
import os
|
||||
import copy
|
||||
import shutil
|
||||
import collections
|
||||
import string
|
||||
|
||||
import clique
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from avalon import pipeline
|
||||
from avalon.vendor import filelink
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
|
||||
from pypeapp import Anatomy
|
||||
from pype.ftrack import BaseAction
|
||||
from pype.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
|
||||
|
||||
class Delivery(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
#: Action identifier.
|
||||
identifier = "delivery.action"
|
||||
#: Action label.
|
||||
label = "Delivery"
|
||||
#: Action description.
|
||||
description = "Deliver data to client"
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub", "Administrator", "Project manager"]
|
||||
icon = '{}/ftrack/action_icons/Delivery.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
db_con = DbConnector()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
for entity in entities:
|
||||
if entity.entity_type.lower() == "assetversion":
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if event["data"].get("values", {}):
|
||||
return
|
||||
|
||||
title = "Delivery data to Client"
|
||||
|
||||
items = []
|
||||
item_splitter = {"type": "label", "value": "---"}
|
||||
|
||||
# Prepare component names for processing
|
||||
components = None
|
||||
project = None
|
||||
for entity in entities:
|
||||
if project is None:
|
||||
project_id = None
|
||||
for ent_info in entity["link"]:
|
||||
if ent_info["type"].lower() == "project":
|
||||
project_id = ent_info["id"]
|
||||
break
|
||||
|
||||
if project_id is None:
|
||||
project = entity["asset"]["parent"]["project"]
|
||||
else:
|
||||
project = session.query((
|
||||
"select id, full_name from Project where id is \"{}\""
|
||||
).format(project_id)).one()
|
||||
|
||||
_components = set(
|
||||
[component["name"] for component in entity["components"]]
|
||||
)
|
||||
if components is None:
|
||||
components = _components
|
||||
continue
|
||||
|
||||
components = components.intersection(_components)
|
||||
if not components:
|
||||
break
|
||||
|
||||
project_name = project["full_name"]
|
||||
items.append({
|
||||
"type": "hidden",
|
||||
"name": "__project_name__",
|
||||
"value": project_name
|
||||
})
|
||||
|
||||
# Prpeare anatomy data
|
||||
anatomy = Anatomy(project_name)
|
||||
new_anatomies = []
|
||||
first = None
|
||||
for key in (anatomy.templates.get("delivery") or {}):
|
||||
new_anatomies.append({
|
||||
"label": key,
|
||||
"value": key
|
||||
})
|
||||
if first is None:
|
||||
first = key
|
||||
|
||||
skipped = False
|
||||
# Add message if there are any common components
|
||||
if not components or not new_anatomies:
|
||||
skipped = True
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "<h1>Something went wrong:</h1>"
|
||||
})
|
||||
|
||||
items.append({
|
||||
"type": "hidden",
|
||||
"name": "__skipped__",
|
||||
"value": skipped
|
||||
})
|
||||
|
||||
if not components:
|
||||
if len(entities) == 1:
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"- Selected entity doesn't have components to deliver."
|
||||
)
|
||||
})
|
||||
else:
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"- Selected entities don't have common components."
|
||||
)
|
||||
})
|
||||
|
||||
# Add message if delivery anatomies are not set
|
||||
if not new_anatomies:
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"- `\"delivery\"` anatomy key is not set in config."
|
||||
)
|
||||
})
|
||||
|
||||
# Skip if there are any data shortcomings
|
||||
if skipped:
|
||||
return {
|
||||
"items": items,
|
||||
"title": title
|
||||
}
|
||||
|
||||
items.append({
|
||||
"value": "<h1>Choose Components to deliver</h1>",
|
||||
"type": "label"
|
||||
})
|
||||
|
||||
for component in components:
|
||||
items.append({
|
||||
"type": "boolean",
|
||||
"value": False,
|
||||
"label": component,
|
||||
"name": component
|
||||
})
|
||||
|
||||
items.append(item_splitter)
|
||||
|
||||
items.append({
|
||||
"value": "<h2>Location for delivery</h2>",
|
||||
"type": "label"
|
||||
})
|
||||
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<i>NOTE: It is possible to replace `root` key in anatomy.</i>"
|
||||
)
|
||||
})
|
||||
|
||||
items.append({
|
||||
"type": "text",
|
||||
"name": "__location_path__",
|
||||
"empty_text": "Type location path here...(Optional)"
|
||||
})
|
||||
|
||||
items.append(item_splitter)
|
||||
|
||||
items.append({
|
||||
"value": "<h2>Anatomy of delivery files</h2>",
|
||||
"type": "label"
|
||||
})
|
||||
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<p><i>NOTE: These can be set in Anatomy.yaml"
|
||||
" within `delivery` key.</i></p>"
|
||||
)
|
||||
})
|
||||
|
||||
items.append({
|
||||
"type": "enumerator",
|
||||
"name": "__new_anatomies__",
|
||||
"data": new_anatomies,
|
||||
"value": first
|
||||
})
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
"title": title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if "values" not in event["data"]:
|
||||
return
|
||||
|
||||
self.report_items = collections.defaultdict(list)
|
||||
|
||||
values = event["data"]["values"]
|
||||
skipped = values.pop("__skipped__")
|
||||
if skipped:
|
||||
return None
|
||||
|
||||
component_names = []
|
||||
location_path = values.pop("__location_path__")
|
||||
anatomy_name = values.pop("__new_anatomies__")
|
||||
project_name = values.pop("__project_name__")
|
||||
|
||||
for key, value in values.items():
|
||||
if value is True:
|
||||
component_names.append(key)
|
||||
|
||||
if not component_names:
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Not selected components to deliver."
|
||||
}
|
||||
|
||||
location_path = location_path.strip()
|
||||
if location_path:
|
||||
location_path = os.path.normpath(location_path)
|
||||
if not os.path.exists(location_path):
|
||||
return {
|
||||
"success": False,
|
||||
"message": (
|
||||
"Entered location path does not exists. \"{}\""
|
||||
).format(location_path)
|
||||
}
|
||||
|
||||
self.db_con.install()
|
||||
self.db_con.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
repres_to_deliver = []
|
||||
for entity in entities:
|
||||
asset = entity["asset"]
|
||||
subset_name = asset["name"]
|
||||
version = entity["version"]
|
||||
|
||||
parent = asset["parent"]
|
||||
parent_mongo_id = parent["custom_attributes"].get(CustAttrIdKey)
|
||||
if parent_mongo_id:
|
||||
parent_mongo_id = ObjectId(parent_mongo_id)
|
||||
else:
|
||||
asset_ent = self.db_con.find_one({
|
||||
"type": "asset",
|
||||
"data.ftrackId": parent["id"]
|
||||
})
|
||||
if not asset_ent:
|
||||
ent_path = "/".join(
|
||||
[ent["name"] for ent in parent["link"]]
|
||||
)
|
||||
msg = "Not synchronized entities to avalon"
|
||||
self.report_items[msg].append(ent_path)
|
||||
self.log.warning("{} <{}>".format(msg, ent_path))
|
||||
continue
|
||||
|
||||
parent_mongo_id = asset_ent["_id"]
|
||||
|
||||
subset_ent = self.db_con.find_one({
|
||||
"type": "subset",
|
||||
"parent": parent_mongo_id,
|
||||
"name": subset_name
|
||||
})
|
||||
|
||||
version_ent = self.db_con.find_one({
|
||||
"type": "version",
|
||||
"name": version,
|
||||
"parent": subset_ent["_id"]
|
||||
})
|
||||
|
||||
repre_ents = self.db_con.find({
|
||||
"type": "representation",
|
||||
"parent": version_ent["_id"]
|
||||
})
|
||||
|
||||
repres_by_name = {}
|
||||
for repre in repre_ents:
|
||||
repre_name = repre["name"]
|
||||
repres_by_name[repre_name] = repre
|
||||
|
||||
for component in entity["components"]:
|
||||
comp_name = component["name"]
|
||||
if comp_name not in component_names:
|
||||
continue
|
||||
|
||||
repre = repres_by_name.get(comp_name)
|
||||
repres_to_deliver.append(repre)
|
||||
|
||||
if not location_path:
|
||||
location_path = os.environ.get("AVALON_PROJECTS") or ""
|
||||
|
||||
print(location_path)
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
for repre in repres_to_deliver:
|
||||
# Get destination repre path
|
||||
anatomy_data = copy.deepcopy(repre["context"])
|
||||
anatomy_data["root"] = location_path
|
||||
|
||||
anatomy_filled = anatomy.format_all(anatomy_data)
|
||||
test_path = anatomy_filled["delivery"][anatomy_name]
|
||||
|
||||
if not test_path.solved:
|
||||
msg = (
|
||||
"Missing keys in Representation's context"
|
||||
" for anatomy template \"{}\"."
|
||||
).format(anatomy_name)
|
||||
|
||||
if test_path.missing_keys:
|
||||
keys = ", ".join(test_path.missing_keys)
|
||||
sub_msg = (
|
||||
"Representation: {}<br>- Missing keys: \"{}\"<br>"
|
||||
).format(str(repre["_id"]), keys)
|
||||
|
||||
if test_path.invalid_types:
|
||||
items = []
|
||||
for key, value in test_path.invalid_types.items():
|
||||
items.append("\"{}\" {}".format(key, str(value)))
|
||||
|
||||
keys = ", ".join(items)
|
||||
sub_msg = (
|
||||
"Representation: {}<br>"
|
||||
"- Invalid value DataType: \"{}\"<br>"
|
||||
).format(str(repre["_id"]), keys)
|
||||
|
||||
self.report_items[msg].append(sub_msg)
|
||||
self.log.warning(
|
||||
"{} Representation: \"{}\" Filled: <{}>".format(
|
||||
msg, str(repre["_id"]), str(result)
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
# Get source repre path
|
||||
frame = repre['context'].get('frame')
|
||||
|
||||
if frame:
|
||||
repre["context"]["frame"] = len(str(frame)) * "#"
|
||||
|
||||
repre_path = self.path_from_represenation(repre)
|
||||
# TODO add backup solution where root of path from component
|
||||
# is repalced with AVALON_PROJECTS root
|
||||
if not frame:
|
||||
self.process_single_file(
|
||||
repre_path, anatomy, anatomy_name, anatomy_data
|
||||
)
|
||||
|
||||
else:
|
||||
self.process_sequence(
|
||||
repre_path, anatomy, anatomy_name, anatomy_data
|
||||
)
|
||||
|
||||
self.db_con.uninstall()
|
||||
|
||||
return self.report()
|
||||
|
||||
def process_single_file(
|
||||
self, repre_path, anatomy, anatomy_name, anatomy_data
|
||||
):
|
||||
anatomy_filled = anatomy.format(anatomy_data)
|
||||
delivery_path = anatomy_filled["delivery"][anatomy_name]
|
||||
delivery_folder = os.path.dirname(delivery_path)
|
||||
if not os.path.exists(delivery_folder):
|
||||
os.makedirs(delivery_folder)
|
||||
|
||||
self.copy_file(repre_path, delivery_path)
|
||||
|
||||
def process_sequence(
|
||||
self, repre_path, anatomy, anatomy_name, anatomy_data
|
||||
):
|
||||
dir_path, file_name = os.path.split(str(repre_path))
|
||||
|
||||
base_name, ext = os.path.splitext(file_name)
|
||||
file_name_items = None
|
||||
if "#" in base_name:
|
||||
file_name_items = [part for part in base_name.split("#") if part]
|
||||
|
||||
elif "%" in base_name:
|
||||
file_name_items = base_name.split("%")
|
||||
|
||||
if not file_name_items:
|
||||
msg = "Source file was not found"
|
||||
self.report_items[msg].append(repre_path)
|
||||
self.log.warning("{} <{}>".format(msg, repre_path))
|
||||
return
|
||||
|
||||
src_collections, remainder = clique.assemble(os.listdir(dir_path))
|
||||
src_collection = None
|
||||
for col in src_collections:
|
||||
if col.tail != ext:
|
||||
continue
|
||||
|
||||
# skip if collection don't have same basename
|
||||
if not col.head.startswith(file_name_items[0]):
|
||||
continue
|
||||
|
||||
src_collection = col
|
||||
break
|
||||
|
||||
if src_collection is None:
|
||||
# TODO log error!
|
||||
msg = "Source collection of files was not found"
|
||||
self.report_items[msg].append(repre_path)
|
||||
self.log.warning("{} <{}>".format(msg, repre_path))
|
||||
return
|
||||
|
||||
frame_indicator = "@####@"
|
||||
|
||||
anatomy_data["frame"] = frame_indicator
|
||||
anatomy_filled = anatomy.format(anatomy_data)
|
||||
|
||||
delivery_path = anatomy_filled["delivery"][anatomy_name]
|
||||
print(delivery_path)
|
||||
delivery_folder = os.path.dirname(delivery_path)
|
||||
dst_head, dst_tail = delivery_path.split(frame_indicator)
|
||||
dst_padding = src_collection.padding
|
||||
dst_collection = clique.Collection(
|
||||
head=dst_head,
|
||||
tail=dst_tail,
|
||||
padding=dst_padding
|
||||
)
|
||||
|
||||
if not os.path.exists(delivery_folder):
|
||||
os.makedirs(delivery_folder)
|
||||
|
||||
src_head = src_collection.head
|
||||
src_tail = src_collection.tail
|
||||
for index in src_collection.indexes:
|
||||
src_padding = src_collection.format("{padding}") % index
|
||||
src_file_name = "{}{}{}".format(src_head, src_padding, src_tail)
|
||||
src = os.path.normpath(
|
||||
os.path.join(dir_path, src_file_name)
|
||||
)
|
||||
|
||||
dst_padding = dst_collection.format("{padding}") % index
|
||||
dst = "{}{}{}".format(dst_head, dst_padding, dst_tail)
|
||||
|
||||
self.copy_file(src, dst)
|
||||
|
||||
def path_from_represenation(self, representation):
|
||||
try:
|
||||
template = representation["data"]["template"]
|
||||
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
try:
|
||||
context = representation["context"]
|
||||
context["root"] = os.environ.get("AVALON_PROJECTS") or ""
|
||||
path = pipeline.format_template_with_optional_keys(
|
||||
context, template
|
||||
)
|
||||
|
||||
except KeyError:
|
||||
# Template references unavailable data
|
||||
return None
|
||||
|
||||
return os.path.normpath(path)
|
||||
|
||||
def copy_file(self, src_path, dst_path):
|
||||
if os.path.exists(dst_path):
|
||||
return
|
||||
try:
|
||||
filelink.create(
|
||||
src_path,
|
||||
dst_path,
|
||||
filelink.HARDLINK
|
||||
)
|
||||
except OSError:
|
||||
shutil.copyfile(src_path, dst_path)
|
||||
|
||||
def report(self):
|
||||
items = []
|
||||
title = "Delivery report"
|
||||
for msg, _items in self.report_items.items():
|
||||
if not _items:
|
||||
continue
|
||||
|
||||
if items:
|
||||
items.append({"type": "label", "value": "---"})
|
||||
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": "# {}".format(msg)
|
||||
})
|
||||
if not isinstance(_items, (list, tuple)):
|
||||
_items = [_items]
|
||||
__items = []
|
||||
for item in _items:
|
||||
__items.append(str(item))
|
||||
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": '<p>{}</p>'.format("<br>".join(__items))
|
||||
})
|
||||
|
||||
if not items:
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Delivery Finished"
|
||||
}
|
||||
|
||||
return {
|
||||
"items": items,
|
||||
"title": title,
|
||||
"success": False,
|
||||
"message": "Delivery Finished"
|
||||
}
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
Delivery(session, plugins_presets).register()
|
||||
|
|
@ -2,12 +2,9 @@ import os
|
|||
import json
|
||||
|
||||
from ruamel import yaml
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from pypeapp import config
|
||||
from pype.ftrack.lib import get_avalon_attr
|
||||
|
||||
from ftrack_api import session as fa_session
|
||||
from pype.ftrack.lib.avalon_sync import get_avalon_attr
|
||||
|
||||
|
||||
class PrepareProject(BaseAction):
|
||||
|
|
@ -55,6 +52,8 @@ class PrepareProject(BaseAction):
|
|||
attributes_to_set = {}
|
||||
for attr in hier_cust_attrs:
|
||||
key = attr["key"]
|
||||
if key.startswith("avalon_"):
|
||||
continue
|
||||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
|
|
@ -65,6 +64,8 @@ class PrepareProject(BaseAction):
|
|||
if attr["entity_type"].lower() != "show":
|
||||
continue
|
||||
key = attr["key"]
|
||||
if key.startswith("avalon_"):
|
||||
continue
|
||||
attributes_to_set[key] = {
|
||||
"label": attr["label"],
|
||||
"object": attr,
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class SeedDebugProject(BaseAction):
|
|||
#: Action identifier.
|
||||
identifier = "seed.debug.project"
|
||||
#: Action label.
|
||||
label = "SeedDebugProject"
|
||||
label = "Seed Debug Project"
|
||||
#: Action description.
|
||||
description = "Description"
|
||||
#: priority
|
||||
|
|
@ -265,6 +265,15 @@ class SeedDebugProject(BaseAction):
|
|||
def create_assets(self, project, asset_count):
|
||||
self.log.debug("*** Creating assets:")
|
||||
|
||||
try:
|
||||
asset_count = int(asset_count)
|
||||
except ValueError:
|
||||
asset_count = 0
|
||||
|
||||
if asset_count <= 0:
|
||||
self.log.debug("No assets to create")
|
||||
return
|
||||
|
||||
main_entity = self.session.create("Folder", {
|
||||
"name": "Assets",
|
||||
"parent": project
|
||||
|
|
@ -305,6 +314,31 @@ class SeedDebugProject(BaseAction):
|
|||
|
||||
def create_shots(self, project, seq_count, shots_count):
|
||||
self.log.debug("*** Creating shots:")
|
||||
|
||||
# Convert counts to integers
|
||||
try:
|
||||
seq_count = int(seq_count)
|
||||
except ValueError:
|
||||
seq_count = 0
|
||||
|
||||
try:
|
||||
shots_count = int(shots_count)
|
||||
except ValueError:
|
||||
shots_count = 0
|
||||
|
||||
# Check if both are higher than 0
|
||||
missing = []
|
||||
if seq_count <= 0:
|
||||
missing.append("sequences")
|
||||
|
||||
if shots_count <= 0:
|
||||
missing.append("shots")
|
||||
|
||||
if missing:
|
||||
self.log.debug("No {} to create".format(" and ".join(missing)))
|
||||
return
|
||||
|
||||
# Create Folder "Shots"
|
||||
main_entity = self.session.create("Folder", {
|
||||
"name": "Shots",
|
||||
"parent": project
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,14 +1,6 @@
|
|||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import collections
|
||||
import json
|
||||
import re
|
||||
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from avalon import io, inventory, schema
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
|
|
@ -134,7 +126,6 @@ class PypeUpdateFromV2_2_0(BaseAction):
|
|||
"title": title
|
||||
}
|
||||
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
|
@ -182,7 +173,7 @@ class PypeUpdateFromV2_2_0(BaseAction):
|
|||
{"type": "asset"},
|
||||
{"$unset": {"silo": ""}}
|
||||
)
|
||||
|
||||
|
||||
self.log.debug("- setting schema of assets to v.3")
|
||||
self.db_con.update_many(
|
||||
{"type": "asset"},
|
||||
|
|
@ -191,10 +182,8 @@ class PypeUpdateFromV2_2_0(BaseAction):
|
|||
|
||||
return True
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
"""Register plugin. Called when used as an plugin."""
|
||||
|
||||
if not isinstance(session, ftrack_api.session.Session):
|
||||
return
|
||||
|
||||
PypeUpdateFromV2_2_0(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
import os
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseAction
|
||||
from ftrack_api import session as fa_session
|
||||
|
||||
|
||||
class ActionAskWhereIRun(BaseAction):
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,6 +1,6 @@
|
|||
import ftrack_api
|
||||
from pype.ftrack import BaseEvent, get_ca_mongoid
|
||||
from pype.ftrack.events.event_sync_to_avalon import SyncToAvalon
|
||||
from pype.ftrack.lib import BaseEvent
|
||||
from pype.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pype.ftrack.events.event_sync_to_avalon import SyncToAvalonEvent
|
||||
|
||||
|
||||
class DelAvalonIdFromNew(BaseEvent):
|
||||
|
|
@ -11,7 +11,8 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
|
||||
Priority of this event must be less than SyncToAvalon event
|
||||
'''
|
||||
priority = SyncToAvalon.priority - 1
|
||||
priority = SyncToAvalonEvent.priority - 1
|
||||
ignore_me = True
|
||||
|
||||
def launch(self, session, event):
|
||||
created = []
|
||||
|
|
@ -28,7 +29,7 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
|
||||
elif (
|
||||
entity.get('action', None) == 'update' and
|
||||
get_ca_mongoid() in entity['keys'] and
|
||||
CustAttrIdKey in entity['keys'] and
|
||||
entity_id in created
|
||||
):
|
||||
ftrack_entity = session.get(
|
||||
|
|
@ -37,13 +38,11 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
)
|
||||
|
||||
cust_attr = ftrack_entity['custom_attributes'][
|
||||
get_ca_mongoid()
|
||||
CustAttrIdKey
|
||||
]
|
||||
|
||||
if cust_attr != '':
|
||||
ftrack_entity['custom_attributes'][
|
||||
get_ca_mongoid()
|
||||
] = ''
|
||||
ftrack_entity['custom_attributes'][CustAttrIdKey] = ''
|
||||
session.commit()
|
||||
|
||||
except Exception:
|
||||
|
|
@ -53,5 +52,4 @@ class DelAvalonIdFromNew(BaseEvent):
|
|||
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
DelAvalonIdFromNew(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,213 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
import ftrack_api
|
||||
from pype.ftrack import BaseEvent, lib
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
|
||||
class SyncHierarchicalAttrs(BaseEvent):
|
||||
# After sync to avalon event!
|
||||
priority = 101
|
||||
db_con = DbConnector()
|
||||
ca_mongoid = lib.get_ca_mongoid()
|
||||
|
||||
def launch(self, session, event):
|
||||
# Filter entities and changed values if it makes sence to run script
|
||||
processable = []
|
||||
processable_ent = {}
|
||||
for ent in event['data']['entities']:
|
||||
# Ignore entities that are not tasks or projects
|
||||
if ent['entityType'].lower() not in ['task', 'show']:
|
||||
continue
|
||||
|
||||
action = ent.get("action")
|
||||
# skip if remove (Entity does not exist in Ftrack)
|
||||
if action == "remove":
|
||||
continue
|
||||
|
||||
# When entity was add we don't care about keys
|
||||
if action != "add":
|
||||
keys = ent.get('keys')
|
||||
if not keys:
|
||||
continue
|
||||
|
||||
entity = session.get(self._get_entity_type(ent), ent['entityId'])
|
||||
processable.append(ent)
|
||||
|
||||
processable_ent[ent['entityId']] = {
|
||||
"entity": entity,
|
||||
"action": action,
|
||||
"link": entity["link"]
|
||||
}
|
||||
|
||||
if not processable:
|
||||
return True
|
||||
|
||||
# Find project of entities
|
||||
ft_project = None
|
||||
for entity_dict in processable_ent.values():
|
||||
try:
|
||||
base_proj = entity_dict['link'][0]
|
||||
except Exception:
|
||||
continue
|
||||
ft_project = session.get(base_proj['type'], base_proj['id'])
|
||||
break
|
||||
|
||||
# check if project is set to auto-sync
|
||||
if (
|
||||
ft_project is None or
|
||||
'avalon_auto_sync' not in ft_project['custom_attributes'] or
|
||||
ft_project['custom_attributes']['avalon_auto_sync'] is False
|
||||
):
|
||||
return True
|
||||
|
||||
# Get hierarchical custom attributes from "avalon" group
|
||||
custom_attributes = {}
|
||||
query = 'CustomAttributeGroup where name is "avalon"'
|
||||
all_avalon_attr = session.query(query).one()
|
||||
for cust_attr in all_avalon_attr['custom_attribute_configurations']:
|
||||
if 'avalon_' in cust_attr['key']:
|
||||
continue
|
||||
if not cust_attr['is_hierarchical']:
|
||||
continue
|
||||
custom_attributes[cust_attr['key']] = cust_attr
|
||||
|
||||
if not custom_attributes:
|
||||
return True
|
||||
|
||||
self.db_con.install()
|
||||
self.db_con.Session['AVALON_PROJECT'] = ft_project['full_name']
|
||||
|
||||
for ent in processable:
|
||||
entity_dict = processable_ent[ent['entityId']]
|
||||
|
||||
entity = entity_dict["entity"]
|
||||
ent_path = "/".join([ent["name"] for ent in entity_dict['link']])
|
||||
action = entity_dict["action"]
|
||||
|
||||
keys_to_process = {}
|
||||
if action == "add":
|
||||
# Store all custom attributes when entity was added
|
||||
for key in custom_attributes:
|
||||
keys_to_process[key] = entity['custom_attributes'][key]
|
||||
else:
|
||||
# Update only updated keys
|
||||
for key in ent['keys']:
|
||||
if key in custom_attributes:
|
||||
keys_to_process[key] = entity['custom_attributes'][key]
|
||||
|
||||
processed_keys = self.get_hierarchical_values(
|
||||
keys_to_process, entity
|
||||
)
|
||||
# Do the processing of values
|
||||
self.update_hierarchical_attribute(entity, processed_keys, ent_path)
|
||||
|
||||
self.db_con.uninstall()
|
||||
|
||||
return True
|
||||
|
||||
def get_hierarchical_values(self, keys_dict, entity):
|
||||
# check already set values
|
||||
_set_keys = []
|
||||
for key, value in keys_dict.items():
|
||||
if value is not None:
|
||||
_set_keys.append(key)
|
||||
|
||||
# pop set values from keys_dict
|
||||
set_keys = {}
|
||||
for key in _set_keys:
|
||||
set_keys[key] = keys_dict.pop(key)
|
||||
|
||||
# find if entity has set values and pop them out
|
||||
keys_to_pop = []
|
||||
for key in keys_dict.keys():
|
||||
_val = entity["custom_attributes"][key]
|
||||
if _val:
|
||||
keys_to_pop.append(key)
|
||||
set_keys[key] = _val
|
||||
|
||||
for key in keys_to_pop:
|
||||
keys_dict.pop(key)
|
||||
|
||||
# if there are not keys to find value return found
|
||||
if not keys_dict:
|
||||
return set_keys
|
||||
|
||||
# end recursion if entity is project
|
||||
if entity.entity_type.lower() == "project":
|
||||
for key, value in keys_dict.items():
|
||||
set_keys[key] = value
|
||||
|
||||
else:
|
||||
result = self.get_hierarchical_values(keys_dict, entity["parent"])
|
||||
for key, value in result.items():
|
||||
set_keys[key] = value
|
||||
|
||||
return set_keys
|
||||
|
||||
def update_hierarchical_attribute(self, entity, keys_dict, ent_path):
|
||||
# TODO store all keys at once for entity
|
||||
custom_attributes = entity.get('custom_attributes')
|
||||
if not custom_attributes:
|
||||
return
|
||||
|
||||
mongoid = custom_attributes.get(self.ca_mongoid)
|
||||
if not mongoid:
|
||||
return
|
||||
|
||||
try:
|
||||
mongoid = ObjectId(mongoid)
|
||||
except Exception:
|
||||
return
|
||||
|
||||
mongo_entity = self.db_con.find_one({'_id': mongoid})
|
||||
if not mongo_entity:
|
||||
return
|
||||
|
||||
changed_keys = {}
|
||||
data = mongo_entity.get('data') or {}
|
||||
for key, value in keys_dict.items():
|
||||
cur_value = data.get(key)
|
||||
if cur_value:
|
||||
if cur_value == value:
|
||||
continue
|
||||
changed_keys[key] = value
|
||||
data[key] = value
|
||||
|
||||
if not changed_keys:
|
||||
return
|
||||
|
||||
self.log.debug(
|
||||
"{} - updated hierarchical attributes: {}".format(
|
||||
ent_path, str(changed_keys)
|
||||
)
|
||||
)
|
||||
|
||||
self.db_con.update_many(
|
||||
{'_id': mongoid},
|
||||
{'$set': {'data': data}}
|
||||
)
|
||||
|
||||
for child in entity.get('children', []):
|
||||
_keys_dict = {}
|
||||
for key, value in keys_dict.items():
|
||||
if key not in child.get('custom_attributes', {}):
|
||||
continue
|
||||
child_value = child['custom_attributes'][key]
|
||||
if child_value is not None:
|
||||
continue
|
||||
_keys_dict[key] = value
|
||||
|
||||
if not _keys_dict:
|
||||
continue
|
||||
child_path = "/".join([ent["name"] for ent in child['link']])
|
||||
self.update_hierarchical_attribute(child, _keys_dict, child_path)
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
SyncHierarchicalAttrs(session, plugins_presets).register()
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,4 +1,3 @@
|
|||
import ftrack_api
|
||||
from pype.ftrack import BaseEvent
|
||||
|
||||
|
||||
|
|
@ -26,28 +25,34 @@ class ThumbnailEvents(BaseEvent):
|
|||
# Update task thumbnail from published version
|
||||
# if (entity['entityType'] == 'assetversion' and
|
||||
# entity['action'] == 'encoded'):
|
||||
if (
|
||||
entity['entityType'] == 'assetversion'
|
||||
and 'thumbid' in (entity.get('keys') or [])
|
||||
elif (
|
||||
entity['entityType'] == 'assetversion' and
|
||||
entity['action'] != 'remove' and
|
||||
'thumbid' in (entity.get('keys') or [])
|
||||
):
|
||||
|
||||
version = session.get('AssetVersion', entity['entityId'])
|
||||
if not version:
|
||||
continue
|
||||
|
||||
thumbnail = version.get('thumbnail')
|
||||
if thumbnail:
|
||||
parent = version['asset']['parent']
|
||||
task = version['task']
|
||||
parent['thumbnail_id'] = version['thumbnail_id']
|
||||
if parent.entity_type.lower() == "project":
|
||||
name = parent["full_name"]
|
||||
else:
|
||||
name = parent["name"]
|
||||
msg = '>>> Updating thumbnail for shot [ {} ]'.format(name)
|
||||
if not thumbnail:
|
||||
continue
|
||||
|
||||
if task:
|
||||
task['thumbnail_id'] = version['thumbnail_id']
|
||||
msg += " and task [ {} ]".format(task["name"])
|
||||
parent = version['asset']['parent']
|
||||
task = version['task']
|
||||
parent['thumbnail_id'] = version['thumbnail_id']
|
||||
if parent.entity_type.lower() == "project":
|
||||
name = parent["full_name"]
|
||||
else:
|
||||
name = parent["name"]
|
||||
msg = '>>> Updating thumbnail for shot [ {} ]'.format(name)
|
||||
|
||||
self.log.info(msg)
|
||||
if task:
|
||||
task['thumbnail_id'] = version['thumbnail_id']
|
||||
msg += " and task [ {} ]".format(task["name"])
|
||||
|
||||
self.log.info(msg)
|
||||
|
||||
try:
|
||||
session.commit()
|
||||
|
|
@ -57,5 +62,4 @@ class ThumbnailEvents(BaseEvent):
|
|||
|
||||
def register(session, plugins_presets):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
ThumbnailEvents(session, plugins_presets).register()
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
import ftrack_api
|
||||
from pype.ftrack import BaseEvent, lib
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from bson.objectid import ObjectId
|
||||
from pypeapp import config
|
||||
from pypeapp import Anatomy
|
||||
import subprocess
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from pype.ftrack import BaseEvent
|
||||
from pype.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pype.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from pypeapp import config
|
||||
from pypeapp import Anatomy
|
||||
|
||||
|
||||
class UserAssigmentEvent(BaseEvent):
|
||||
|
|
@ -36,7 +39,6 @@ class UserAssigmentEvent(BaseEvent):
|
|||
"""
|
||||
|
||||
db_con = DbConnector()
|
||||
ca_mongoid = lib.get_ca_mongoid()
|
||||
|
||||
def error(self, *err):
|
||||
for e in err:
|
||||
|
|
@ -105,7 +107,7 @@ class UserAssigmentEvent(BaseEvent):
|
|||
self.db_con.Session['AVALON_PROJECT'] = task['project']['full_name']
|
||||
|
||||
avalon_entity = None
|
||||
parent_id = parent['custom_attributes'].get(self.ca_mongoid)
|
||||
parent_id = parent['custom_attributes'].get(CustAttrIdKey)
|
||||
if parent_id:
|
||||
parent_id = ObjectId(parent_id)
|
||||
avalon_entity = self.db_con.find_one({
|
||||
|
|
@ -205,7 +207,9 @@ class UserAssigmentEvent(BaseEvent):
|
|||
# formatting work dir is easiest part as we can use whole path
|
||||
work_dir = anatomy.format(data)['avalon']['work']
|
||||
# we also need publish but not whole
|
||||
publish = anatomy.format_all(data)['partial']['avalon']['publish']
|
||||
filled_all = anatomy.format_all(data)
|
||||
publish = filled_all['avalon']['publish']
|
||||
|
||||
# now find path to {asset}
|
||||
m = re.search("(^.+?{})".format(data['asset']),
|
||||
publish)
|
||||
|
|
|
|||
|
|
@ -1,73 +1,134 @@
|
|||
import ftrack_api
|
||||
from pype.ftrack import BaseEvent
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class VersionToTaskStatus(BaseEvent):
|
||||
|
||||
# Presets usage
|
||||
default_status_mapping = {}
|
||||
|
||||
def launch(self, session, event):
|
||||
'''Propagates status from version to task when changed'''
|
||||
|
||||
# start of event procedure ----------------------------------
|
||||
for entity in event['data'].get('entities', []):
|
||||
# Filter non-assetversions
|
||||
if (
|
||||
entity['entityType'] == 'assetversion' and
|
||||
'statusid' in (entity.get('keys') or [])
|
||||
):
|
||||
# Filter AssetVersions
|
||||
if entity["entityType"] != "assetversion":
|
||||
continue
|
||||
|
||||
version = session.get('AssetVersion', entity['entityId'])
|
||||
try:
|
||||
version_status = session.get(
|
||||
'Status', entity['changes']['statusid']['new']
|
||||
)
|
||||
except Exception:
|
||||
# Skip if statusid not in keys (in changes)
|
||||
keys = entity.get("keys")
|
||||
if not keys or "statusid" not in keys:
|
||||
continue
|
||||
|
||||
# Get new version task name
|
||||
version_status_id = (
|
||||
entity
|
||||
.get("changes", {})
|
||||
.get("statusid", {})
|
||||
.get("new", {})
|
||||
)
|
||||
|
||||
# Just check that `new` is set to any value
|
||||
if not version_status_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
version_status = session.get("Status", version_status_id)
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Troubles with query status id [ {} ]".format(
|
||||
version_status_id
|
||||
),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
if not version_status:
|
||||
continue
|
||||
|
||||
version_status_orig = version_status["name"]
|
||||
|
||||
# Load status mapping from presets
|
||||
status_mapping = (
|
||||
config.get_presets()
|
||||
.get("ftrack", {})
|
||||
.get("ftrack_config", {})
|
||||
.get("status_version_to_task")
|
||||
) or self.default_status_mapping
|
||||
|
||||
# Skip if mapping is empty
|
||||
if not status_mapping:
|
||||
continue
|
||||
|
||||
# Lower version status name and check if has mapping
|
||||
version_status = version_status_orig.lower()
|
||||
new_status_names = []
|
||||
mapped = status_mapping.get(version_status)
|
||||
if mapped:
|
||||
new_status_names.extend(list(mapped))
|
||||
|
||||
new_status_names.append(version_status)
|
||||
|
||||
self.log.debug(
|
||||
"Processing AssetVersion status change: [ {} ]".format(
|
||||
version_status_orig
|
||||
)
|
||||
)
|
||||
|
||||
# Lower all names from presets
|
||||
new_status_names = [name.lower() for name in new_status_names]
|
||||
|
||||
# Get entities necessary for processing
|
||||
version = session.get("AssetVersion", entity["entityId"])
|
||||
task = version.get("task")
|
||||
if not task:
|
||||
continue
|
||||
|
||||
project_schema = task["project"]["project_schema"]
|
||||
# Get all available statuses for Task
|
||||
statuses = project_schema.get_statuses("Task", task["type_id"])
|
||||
# map lowered status name with it's object
|
||||
stat_names_low = {
|
||||
status["name"].lower(): status for status in statuses
|
||||
}
|
||||
|
||||
new_status = None
|
||||
for status_name in new_status_names:
|
||||
if status_name not in stat_names_low:
|
||||
continue
|
||||
task_status = version_status
|
||||
task = version['task']
|
||||
self.log.info('>>> version status: [ {} ]'.format(
|
||||
version_status['name']))
|
||||
|
||||
status_to_set = None
|
||||
# Filter to versions with status change to "render complete"
|
||||
if version_status['name'].lower() == 'reviewed':
|
||||
status_to_set = 'Change requested'
|
||||
# store object of found status
|
||||
new_status = stat_names_low[status_name]
|
||||
self.log.debug("Status to set: [ {} ]".format(
|
||||
new_status["name"]
|
||||
))
|
||||
break
|
||||
|
||||
if version_status['name'].lower() == 'approved':
|
||||
status_to_set = 'Complete'
|
||||
# Skip if status names were not found for paticulat entity
|
||||
if not new_status:
|
||||
self.log.warning(
|
||||
"Any of statuses from presets can be set: {}".format(
|
||||
str(new_status_names)
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
self.log.info(
|
||||
'>>> status to set: [ {} ]'.format(status_to_set))
|
||||
# Get full path to task for logging
|
||||
ent_path = "/".join([ent["name"] for ent in task["link"]])
|
||||
|
||||
if status_to_set is not None:
|
||||
query = 'Status where name is "{}"'.format(status_to_set)
|
||||
try:
|
||||
task_status = session.query(query).one()
|
||||
except Exception:
|
||||
self.log.info(
|
||||
'!!! status was not found in Ftrack [ {} ]'.format(
|
||||
status_to_set
|
||||
))
|
||||
continue
|
||||
|
||||
# Proceed if the task status was set
|
||||
if task_status is not None:
|
||||
# Get path to task
|
||||
path = task['name']
|
||||
for p in task['ancestors']:
|
||||
path = p['name'] + '/' + path
|
||||
|
||||
# Setting task status
|
||||
try:
|
||||
task['status'] = task_status
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
self.log.warning('!!! [ {} ] status couldnt be set:\
|
||||
[ {} ]'.format(path, e))
|
||||
session.rollback()
|
||||
else:
|
||||
self.log.info('>>> [ {} ] updated to [ {} ]'.format(
|
||||
path, task_status['name']))
|
||||
# Setting task status
|
||||
try:
|
||||
task["status"] = new_status
|
||||
session.commit()
|
||||
self.log.debug("[ {} ] Status updated to [ {} ]".format(
|
||||
ent_path, new_status['name']
|
||||
))
|
||||
except Exception:
|
||||
session.rollback()
|
||||
self.log.warning(
|
||||
"[ {} ]Status couldn't be set".format(ent_path),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
|
||||
def register(session, plugins_presets):
|
||||
|
|
|
|||
|
|
@ -7,11 +7,9 @@ import socket
|
|||
import argparse
|
||||
import atexit
|
||||
import time
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import ftrack_api
|
||||
from pype.ftrack.lib import credentials
|
||||
from pype.ftrack.ftrack_server import FtrackServer
|
||||
from pype.ftrack.ftrack_server.lib import (
|
||||
ftrack_events_mongo_settings, check_ftrack_url
|
||||
)
|
||||
|
|
@ -67,9 +65,8 @@ def validate_credentials(url, user, api):
|
|||
except Exception as e:
|
||||
print(
|
||||
'ERROR: Can\'t log into Ftrack with used credentials:'
|
||||
' Ftrack server: "{}" // Username: {} // API key: {}'.format(
|
||||
url, user, api
|
||||
))
|
||||
' Ftrack server: "{}" // Username: {} // API key: {}'
|
||||
).format(url, user, api)
|
||||
return False
|
||||
|
||||
print('DEBUG: Credentials Username: "{}", API key: "{}" are valid.'.format(
|
||||
|
|
@ -147,9 +144,9 @@ def legacy_server(ftrack_url):
|
|||
).format(str(max_fail_count), str(wait_time_after_max_fail)))
|
||||
subproc_failed_count += 1
|
||||
elif ((
|
||||
datetime.datetime.now() - subproc_last_failed
|
||||
).seconds > wait_time_after_max_fail):
|
||||
subproc_failed_count = 0
|
||||
datetime.datetime.now() - subproc_last_failed
|
||||
).seconds > wait_time_after_max_fail):
|
||||
subproc_failed_count = 0
|
||||
|
||||
# If thread failed test Ftrack and Mongo connection
|
||||
elif subproc.poll() is not None:
|
||||
|
|
@ -277,9 +274,9 @@ def main_loop(ftrack_url):
|
|||
).format(str(max_fail_count), str(wait_time_after_max_fail)))
|
||||
storer_failed_count += 1
|
||||
elif ((
|
||||
datetime.datetime.now() - storer_last_failed
|
||||
).seconds > wait_time_after_max_fail):
|
||||
storer_failed_count = 0
|
||||
datetime.datetime.now() - storer_last_failed
|
||||
).seconds > wait_time_after_max_fail):
|
||||
storer_failed_count = 0
|
||||
|
||||
# If thread failed test Ftrack and Mongo connection
|
||||
elif not storer_thread.isAlive():
|
||||
|
|
@ -313,13 +310,13 @@ def main_loop(ftrack_url):
|
|||
processor_failed_count += 1
|
||||
|
||||
elif ((
|
||||
datetime.datetime.now() - processor_last_failed
|
||||
).seconds > wait_time_after_max_fail):
|
||||
processor_failed_count = 0
|
||||
datetime.datetime.now() - processor_last_failed
|
||||
).seconds > wait_time_after_max_fail):
|
||||
processor_failed_count = 0
|
||||
|
||||
# If thread failed test Ftrack and Mongo connection
|
||||
elif not processor_thread.isAlive():
|
||||
if storer_thread.mongo_error:
|
||||
if processor_thread.mongo_error:
|
||||
raise Exception(
|
||||
"Exiting because have issue with acces to MongoDB"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,32 @@
|
|||
import os
|
||||
import sys
|
||||
import logging
|
||||
import getpass
|
||||
import atexit
|
||||
import tempfile
|
||||
import threading
|
||||
import datetime
|
||||
import time
|
||||
import queue
|
||||
import pymongo
|
||||
|
||||
import requests
|
||||
import ftrack_api
|
||||
import ftrack_api.session
|
||||
import ftrack_api.cache
|
||||
import ftrack_api.operation
|
||||
import ftrack_api._centralized_storage_scenario
|
||||
import ftrack_api.event
|
||||
from ftrack_api.logging import LazyLogMessage as L
|
||||
try:
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
except ImportError:
|
||||
from urlparse import urlparse, parse_qs
|
||||
|
||||
from pypeapp import Logger
|
||||
|
||||
from pype.ftrack.lib.custom_db_connector import DbConnector
|
||||
|
||||
|
||||
def ftrack_events_mongo_settings():
|
||||
host = None
|
||||
|
|
@ -49,7 +71,9 @@ def ftrack_events_mongo_settings():
|
|||
|
||||
|
||||
def get_ftrack_event_mongo_info():
|
||||
host, port, database, username, password, collection, auth_db = ftrack_events_mongo_settings()
|
||||
host, port, database, username, password, collection, auth_db = (
|
||||
ftrack_events_mongo_settings()
|
||||
)
|
||||
user_pass = ""
|
||||
if username and password:
|
||||
user_pass = "{}:{}@".format(username, password)
|
||||
|
|
@ -97,3 +121,334 @@ def check_ftrack_url(url, log_errors=True):
|
|||
print('DEBUG: Ftrack server {} is accessible.'.format(url))
|
||||
|
||||
return url
|
||||
|
||||
|
||||
class StorerEventHub(ftrack_api.event.hub.EventHub):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.sock = kwargs.pop("sock")
|
||||
super(StorerEventHub, self).__init__(*args, **kwargs)
|
||||
|
||||
def _handle_packet(self, code, packet_identifier, path, data):
|
||||
"""Override `_handle_packet` which extend heartbeat"""
|
||||
code_name = self._code_name_mapping[code]
|
||||
if code_name == "heartbeat":
|
||||
# Reply with heartbeat.
|
||||
self.sock.sendall(b"storer")
|
||||
return self._send_packet(self._code_name_mapping['heartbeat'])
|
||||
|
||||
elif code_name == "connect":
|
||||
event = ftrack_api.event.base.Event(
|
||||
topic="pype.storer.started",
|
||||
data={},
|
||||
source={
|
||||
"id": self.id,
|
||||
"user": {"username": self._api_user}
|
||||
}
|
||||
)
|
||||
self._event_queue.put(event)
|
||||
|
||||
return super(StorerEventHub, self)._handle_packet(
|
||||
code, packet_identifier, path, data
|
||||
)
|
||||
|
||||
|
||||
class ProcessEventHub(ftrack_api.event.hub.EventHub):
|
||||
url, database, table_name = get_ftrack_event_mongo_info()
|
||||
|
||||
is_table_created = False
|
||||
pypelog = Logger().get_logger("Session Processor")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.dbcon = DbConnector(
|
||||
mongo_url=self.url,
|
||||
database_name=self.database,
|
||||
table_name=self.table_name
|
||||
)
|
||||
self.sock = kwargs.pop("sock")
|
||||
super(ProcessEventHub, self).__init__(*args, **kwargs)
|
||||
|
||||
def prepare_dbcon(self):
|
||||
try:
|
||||
self.dbcon.install()
|
||||
self.dbcon._database.list_collection_names()
|
||||
except pymongo.errors.AutoReconnect:
|
||||
self.pypelog.error(
|
||||
"Mongo server \"{}\" is not responding, exiting.".format(
|
||||
os.environ["AVALON_MONGO"]
|
||||
)
|
||||
)
|
||||
sys.exit(0)
|
||||
|
||||
except pymongo.errors.OperationFailure:
|
||||
self.pypelog.error((
|
||||
"Error with Mongo access, probably permissions."
|
||||
"Check if exist database with name \"{}\""
|
||||
" and collection \"{}\" inside."
|
||||
).format(self.database, self.table_name))
|
||||
self.sock.sendall(b"MongoError")
|
||||
sys.exit(0)
|
||||
|
||||
def wait(self, duration=None):
|
||||
"""Overriden wait
|
||||
|
||||
Event are loaded from Mongo DB when queue is empty. Handled event is
|
||||
set as processed in Mongo DB.
|
||||
"""
|
||||
started = time.time()
|
||||
self.prepare_dbcon()
|
||||
while True:
|
||||
try:
|
||||
event = self._event_queue.get(timeout=0.1)
|
||||
except queue.Empty:
|
||||
if not self.load_events():
|
||||
time.sleep(0.5)
|
||||
else:
|
||||
try:
|
||||
self._handle(event)
|
||||
self.dbcon.update_one(
|
||||
{"id": event["id"]},
|
||||
{"$set": {"pype_data.is_processed": True}}
|
||||
)
|
||||
except pymongo.errors.AutoReconnect:
|
||||
self.pypelog.error((
|
||||
"Mongo server \"{}\" is not responding, exiting."
|
||||
).format(os.environ["AVALON_MONGO"]))
|
||||
sys.exit(0)
|
||||
# Additional special processing of events.
|
||||
if event['topic'] == 'ftrack.meta.disconnected':
|
||||
break
|
||||
|
||||
if duration is not None:
|
||||
if (time.time() - started) > duration:
|
||||
break
|
||||
|
||||
def load_events(self):
|
||||
"""Load not processed events sorted by stored date"""
|
||||
ago_date = datetime.datetime.now() - datetime.timedelta(days=3)
|
||||
result = self.dbcon.delete_many({
|
||||
"pype_data.stored": {"$lte": ago_date},
|
||||
"pype_data.is_processed": True
|
||||
})
|
||||
|
||||
not_processed_events = self.dbcon.find(
|
||||
{"pype_data.is_processed": False}
|
||||
).sort(
|
||||
[("pype_data.stored", pymongo.ASCENDING)]
|
||||
)
|
||||
|
||||
found = False
|
||||
for event_data in not_processed_events:
|
||||
new_event_data = {
|
||||
k: v for k, v in event_data.items()
|
||||
if k not in ["_id", "pype_data"]
|
||||
}
|
||||
try:
|
||||
event = ftrack_api.event.base.Event(**new_event_data)
|
||||
except Exception:
|
||||
self.logger.exception(L(
|
||||
'Failed to convert payload into event: {0}',
|
||||
event_data
|
||||
))
|
||||
continue
|
||||
found = True
|
||||
self._event_queue.put(event)
|
||||
|
||||
return found
|
||||
|
||||
def _handle_packet(self, code, packet_identifier, path, data):
|
||||
"""Override `_handle_packet` which skip events and extend heartbeat"""
|
||||
code_name = self._code_name_mapping[code]
|
||||
if code_name == "event":
|
||||
return
|
||||
if code_name == "heartbeat":
|
||||
self.sock.sendall(b"processor")
|
||||
return self._send_packet(self._code_name_mapping["heartbeat"])
|
||||
|
||||
return super()._handle_packet(code, packet_identifier, path, data)
|
||||
|
||||
|
||||
class UserEventHub(ftrack_api.event.hub.EventHub):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.sock = kwargs.pop("sock")
|
||||
super(UserEventHub, self).__init__(*args, **kwargs)
|
||||
|
||||
def _handle_packet(self, code, packet_identifier, path, data):
|
||||
"""Override `_handle_packet` which extend heartbeat"""
|
||||
code_name = self._code_name_mapping[code]
|
||||
if code_name == "heartbeat":
|
||||
# Reply with heartbeat.
|
||||
self.sock.sendall(b"hearbeat")
|
||||
return self._send_packet(self._code_name_mapping['heartbeat'])
|
||||
|
||||
elif code_name == "connect":
|
||||
event = ftrack_api.event.base.Event(
|
||||
topic="pype.storer.started",
|
||||
data={},
|
||||
source={
|
||||
"id": self.id,
|
||||
"user": {"username": self._api_user}
|
||||
}
|
||||
)
|
||||
self._event_queue.put(event)
|
||||
|
||||
return super(UserEventHub, self)._handle_packet(
|
||||
code, packet_identifier, path, data
|
||||
)
|
||||
|
||||
|
||||
class SocketSession(ftrack_api.session.Session):
|
||||
'''An isolated session for interaction with an ftrack server.'''
|
||||
def __init__(
|
||||
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
|
||||
plugin_paths=None, cache=None, cache_key_maker=None,
|
||||
auto_connect_event_hub=None, schema_cache_path=None,
|
||||
plugin_arguments=None, sock=None, Eventhub=None
|
||||
):
|
||||
super(ftrack_api.session.Session, self).__init__()
|
||||
self.logger = logging.getLogger(
|
||||
__name__ + '.' + self.__class__.__name__
|
||||
)
|
||||
self._closed = False
|
||||
|
||||
if server_url is None:
|
||||
server_url = os.environ.get('FTRACK_SERVER')
|
||||
|
||||
if not server_url:
|
||||
raise TypeError(
|
||||
'Required "server_url" not specified. Pass as argument or set '
|
||||
'in environment variable FTRACK_SERVER.'
|
||||
)
|
||||
|
||||
self._server_url = server_url
|
||||
|
||||
if api_key is None:
|
||||
api_key = os.environ.get(
|
||||
'FTRACK_API_KEY',
|
||||
# Backwards compatibility
|
||||
os.environ.get('FTRACK_APIKEY')
|
||||
)
|
||||
|
||||
if not api_key:
|
||||
raise TypeError(
|
||||
'Required "api_key" not specified. Pass as argument or set in '
|
||||
'environment variable FTRACK_API_KEY.'
|
||||
)
|
||||
|
||||
self._api_key = api_key
|
||||
|
||||
if api_user is None:
|
||||
api_user = os.environ.get('FTRACK_API_USER')
|
||||
if not api_user:
|
||||
try:
|
||||
api_user = getpass.getuser()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not api_user:
|
||||
raise TypeError(
|
||||
'Required "api_user" not specified. Pass as argument, set in '
|
||||
'environment variable FTRACK_API_USER or one of the standard '
|
||||
'environment variables used by Python\'s getpass module.'
|
||||
)
|
||||
|
||||
self._api_user = api_user
|
||||
|
||||
# Currently pending operations.
|
||||
self.recorded_operations = ftrack_api.operation.Operations()
|
||||
self.record_operations = True
|
||||
|
||||
self.cache_key_maker = cache_key_maker
|
||||
if self.cache_key_maker is None:
|
||||
self.cache_key_maker = ftrack_api.cache.StringKeyMaker()
|
||||
|
||||
# Enforce always having a memory cache at top level so that the same
|
||||
# in-memory instance is returned from session.
|
||||
self.cache = ftrack_api.cache.LayeredCache([
|
||||
ftrack_api.cache.MemoryCache()
|
||||
])
|
||||
|
||||
if cache is not None:
|
||||
if callable(cache):
|
||||
cache = cache(self)
|
||||
|
||||
if cache is not None:
|
||||
self.cache.caches.append(cache)
|
||||
|
||||
self._managed_request = None
|
||||
self._request = requests.Session()
|
||||
self._request.auth = ftrack_api.session.SessionAuthentication(
|
||||
self._api_key, self._api_user
|
||||
)
|
||||
|
||||
self.auto_populate = auto_populate
|
||||
|
||||
# Fetch server information and in doing so also check credentials.
|
||||
self._server_information = self._fetch_server_information()
|
||||
|
||||
# Now check compatibility of server based on retrieved information.
|
||||
self.check_server_compatibility()
|
||||
|
||||
# Construct event hub and load plugins.
|
||||
if Eventhub is None:
|
||||
Eventhub = ftrack_api.event.hub.EventHub
|
||||
self._event_hub = Eventhub(
|
||||
self._server_url,
|
||||
self._api_user,
|
||||
self._api_key,
|
||||
sock=sock
|
||||
)
|
||||
|
||||
self._auto_connect_event_hub_thread = None
|
||||
if auto_connect_event_hub in (None, True):
|
||||
# Connect to event hub in background thread so as not to block main
|
||||
# session usage waiting for event hub connection.
|
||||
self._auto_connect_event_hub_thread = threading.Thread(
|
||||
target=self._event_hub.connect
|
||||
)
|
||||
self._auto_connect_event_hub_thread.daemon = True
|
||||
self._auto_connect_event_hub_thread.start()
|
||||
|
||||
# To help with migration from auto_connect_event_hub default changing
|
||||
# from True to False.
|
||||
self._event_hub._deprecation_warning_auto_connect = (
|
||||
auto_connect_event_hub is None
|
||||
)
|
||||
|
||||
# Register to auto-close session on exit.
|
||||
atexit.register(self.close)
|
||||
|
||||
self._plugin_paths = plugin_paths
|
||||
if self._plugin_paths is None:
|
||||
self._plugin_paths = os.environ.get(
|
||||
'FTRACK_EVENT_PLUGIN_PATH', ''
|
||||
).split(os.pathsep)
|
||||
|
||||
self._discover_plugins(plugin_arguments=plugin_arguments)
|
||||
|
||||
# TODO: Make schemas read-only and non-mutable (or at least without
|
||||
# rebuilding types)?
|
||||
if schema_cache_path is not False:
|
||||
if schema_cache_path is None:
|
||||
schema_cache_path = os.environ.get(
|
||||
'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir()
|
||||
)
|
||||
|
||||
schema_cache_path = os.path.join(
|
||||
schema_cache_path, 'ftrack_api_schema_cache.json'
|
||||
)
|
||||
|
||||
self.schemas = self._load_schemas(schema_cache_path)
|
||||
self.types = self._build_entity_type_classes(self.schemas)
|
||||
|
||||
ftrack_api._centralized_storage_scenario.register(self)
|
||||
|
||||
self._configure_locations()
|
||||
self.event_hub.publish(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.api.session.ready',
|
||||
data=dict(
|
||||
session=self
|
||||
)
|
||||
),
|
||||
synchronous=True
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,292 +0,0 @@
|
|||
import logging
|
||||
import os
|
||||
import atexit
|
||||
import datetime
|
||||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import requests
|
||||
import queue
|
||||
import pymongo
|
||||
|
||||
import ftrack_api
|
||||
import ftrack_api.session
|
||||
import ftrack_api.cache
|
||||
import ftrack_api.operation
|
||||
import ftrack_api._centralized_storage_scenario
|
||||
import ftrack_api.event
|
||||
from ftrack_api.logging import LazyLogMessage as L
|
||||
|
||||
from pype.ftrack.lib.custom_db_connector import DbConnector
|
||||
from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger("Session processor")
|
||||
|
||||
|
||||
class ProcessEventHub(ftrack_api.event.hub.EventHub):
|
||||
url, database, table_name = get_ftrack_event_mongo_info()
|
||||
|
||||
is_table_created = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.dbcon = DbConnector(
|
||||
mongo_url=self.url,
|
||||
database_name=self.database,
|
||||
table_name=self.table_name
|
||||
)
|
||||
self.sock = kwargs.pop("sock")
|
||||
super(ProcessEventHub, self).__init__(*args, **kwargs)
|
||||
|
||||
def prepare_dbcon(self):
|
||||
try:
|
||||
self.dbcon.install()
|
||||
self.dbcon._database.collection_names()
|
||||
except pymongo.errors.AutoReconnect:
|
||||
log.error("Mongo server \"{}\" is not responding, exiting.".format(
|
||||
os.environ["AVALON_MONGO"]
|
||||
))
|
||||
sys.exit(0)
|
||||
|
||||
except pymongo.errors.OperationFailure:
|
||||
log.error((
|
||||
"Error with Mongo access, probably permissions."
|
||||
"Check if exist database with name \"{}\""
|
||||
" and collection \"{}\" inside."
|
||||
).format(self.database, self.table_name))
|
||||
self.sock.sendall(b"MongoError")
|
||||
sys.exit(0)
|
||||
|
||||
def wait(self, duration=None):
|
||||
"""Overriden wait
|
||||
|
||||
Event are loaded from Mongo DB when queue is empty. Handled event is
|
||||
set as processed in Mongo DB.
|
||||
"""
|
||||
started = time.time()
|
||||
self.prepare_dbcon()
|
||||
while True:
|
||||
try:
|
||||
event = self._event_queue.get(timeout=0.1)
|
||||
except queue.Empty:
|
||||
if not self.load_events():
|
||||
time.sleep(0.5)
|
||||
else:
|
||||
try:
|
||||
self._handle(event)
|
||||
self.dbcon.update_one(
|
||||
{"id": event["id"]},
|
||||
{"$set": {"pype_data.is_processed": True}}
|
||||
)
|
||||
except pymongo.errors.AutoReconnect:
|
||||
log.error((
|
||||
"Mongo server \"{}\" is not responding, exiting."
|
||||
).format(os.environ["AVALON_MONGO"]))
|
||||
sys.exit(0)
|
||||
# Additional special processing of events.
|
||||
if event['topic'] == 'ftrack.meta.disconnected':
|
||||
break
|
||||
|
||||
if duration is not None:
|
||||
if (time.time() - started) > duration:
|
||||
break
|
||||
|
||||
def load_events(self):
|
||||
"""Load not processed events sorted by stored date"""
|
||||
ago_date = datetime.datetime.now() - datetime.timedelta(days=3)
|
||||
result = self.dbcon.delete_many({
|
||||
"pype_data.stored": {"$lte": ago_date},
|
||||
"pype_data.is_processed": True
|
||||
})
|
||||
|
||||
not_processed_events = self.dbcon.find(
|
||||
{"pype_data.is_processed": False}
|
||||
).sort(
|
||||
[("pype_data.stored", pymongo.ASCENDING)]
|
||||
)
|
||||
|
||||
found = False
|
||||
for event_data in not_processed_events:
|
||||
new_event_data = {
|
||||
k: v for k, v in event_data.items()
|
||||
if k not in ["_id", "pype_data"]
|
||||
}
|
||||
try:
|
||||
event = ftrack_api.event.base.Event(**new_event_data)
|
||||
except Exception:
|
||||
self.logger.exception(L(
|
||||
'Failed to convert payload into event: {0}',
|
||||
event_data
|
||||
))
|
||||
continue
|
||||
found = True
|
||||
self._event_queue.put(event)
|
||||
|
||||
return found
|
||||
|
||||
def _handle_packet(self, code, packet_identifier, path, data):
|
||||
"""Override `_handle_packet` which skip events and extend heartbeat"""
|
||||
code_name = self._code_name_mapping[code]
|
||||
if code_name == "event":
|
||||
return
|
||||
if code_name == "heartbeat":
|
||||
self.sock.sendall(b"processor")
|
||||
return self._send_packet(self._code_name_mapping["heartbeat"])
|
||||
|
||||
return super()._handle_packet(code, packet_identifier, path, data)
|
||||
|
||||
|
||||
class ProcessSession(ftrack_api.session.Session):
|
||||
'''An isolated session for interaction with an ftrack server.'''
|
||||
def __init__(
|
||||
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
|
||||
plugin_paths=None, cache=None, cache_key_maker=None,
|
||||
auto_connect_event_hub=None, schema_cache_path=None,
|
||||
plugin_arguments=None, sock=None
|
||||
):
|
||||
super(ftrack_api.session.Session, self).__init__()
|
||||
self.logger = logging.getLogger(
|
||||
__name__ + '.' + self.__class__.__name__
|
||||
)
|
||||
self._closed = False
|
||||
|
||||
if server_url is None:
|
||||
server_url = os.environ.get('FTRACK_SERVER')
|
||||
|
||||
if not server_url:
|
||||
raise TypeError(
|
||||
'Required "server_url" not specified. Pass as argument or set '
|
||||
'in environment variable FTRACK_SERVER.'
|
||||
)
|
||||
|
||||
self._server_url = server_url
|
||||
|
||||
if api_key is None:
|
||||
api_key = os.environ.get(
|
||||
'FTRACK_API_KEY',
|
||||
# Backwards compatibility
|
||||
os.environ.get('FTRACK_APIKEY')
|
||||
)
|
||||
|
||||
if not api_key:
|
||||
raise TypeError(
|
||||
'Required "api_key" not specified. Pass as argument or set in '
|
||||
'environment variable FTRACK_API_KEY.'
|
||||
)
|
||||
|
||||
self._api_key = api_key
|
||||
|
||||
if api_user is None:
|
||||
api_user = os.environ.get('FTRACK_API_USER')
|
||||
if not api_user:
|
||||
try:
|
||||
api_user = getpass.getuser()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not api_user:
|
||||
raise TypeError(
|
||||
'Required "api_user" not specified. Pass as argument, set in '
|
||||
'environment variable FTRACK_API_USER or one of the standard '
|
||||
'environment variables used by Python\'s getpass module.'
|
||||
)
|
||||
|
||||
self._api_user = api_user
|
||||
|
||||
# Currently pending operations.
|
||||
self.recorded_operations = ftrack_api.operation.Operations()
|
||||
self.record_operations = True
|
||||
|
||||
self.cache_key_maker = cache_key_maker
|
||||
if self.cache_key_maker is None:
|
||||
self.cache_key_maker = ftrack_api.cache.StringKeyMaker()
|
||||
|
||||
# Enforce always having a memory cache at top level so that the same
|
||||
# in-memory instance is returned from session.
|
||||
self.cache = ftrack_api.cache.LayeredCache([
|
||||
ftrack_api.cache.MemoryCache()
|
||||
])
|
||||
|
||||
if cache is not None:
|
||||
if callable(cache):
|
||||
cache = cache(self)
|
||||
|
||||
if cache is not None:
|
||||
self.cache.caches.append(cache)
|
||||
|
||||
self._managed_request = None
|
||||
self._request = requests.Session()
|
||||
self._request.auth = ftrack_api.session.SessionAuthentication(
|
||||
self._api_key, self._api_user
|
||||
)
|
||||
|
||||
self.auto_populate = auto_populate
|
||||
|
||||
# Fetch server information and in doing so also check credentials.
|
||||
self._server_information = self._fetch_server_information()
|
||||
|
||||
# Now check compatibility of server based on retrieved information.
|
||||
self.check_server_compatibility()
|
||||
|
||||
# Construct event hub and load plugins.
|
||||
self._event_hub = ProcessEventHub(
|
||||
self._server_url,
|
||||
self._api_user,
|
||||
self._api_key,
|
||||
sock=sock
|
||||
)
|
||||
|
||||
self._auto_connect_event_hub_thread = None
|
||||
if auto_connect_event_hub in (None, True):
|
||||
# Connect to event hub in background thread so as not to block main
|
||||
# session usage waiting for event hub connection.
|
||||
self._auto_connect_event_hub_thread = threading.Thread(
|
||||
target=self._event_hub.connect
|
||||
)
|
||||
self._auto_connect_event_hub_thread.daemon = True
|
||||
self._auto_connect_event_hub_thread.start()
|
||||
|
||||
# To help with migration from auto_connect_event_hub default changing
|
||||
# from True to False.
|
||||
self._event_hub._deprecation_warning_auto_connect = (
|
||||
auto_connect_event_hub is None
|
||||
)
|
||||
|
||||
# Register to auto-close session on exit.
|
||||
atexit.register(self.close)
|
||||
|
||||
self._plugin_paths = plugin_paths
|
||||
if self._plugin_paths is None:
|
||||
self._plugin_paths = os.environ.get(
|
||||
'FTRACK_EVENT_PLUGIN_PATH', ''
|
||||
).split(os.pathsep)
|
||||
|
||||
self._discover_plugins(plugin_arguments=plugin_arguments)
|
||||
|
||||
# TODO: Make schemas read-only and non-mutable (or at least without
|
||||
# rebuilding types)?
|
||||
if schema_cache_path is not False:
|
||||
if schema_cache_path is None:
|
||||
schema_cache_path = os.environ.get(
|
||||
'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir()
|
||||
)
|
||||
|
||||
schema_cache_path = os.path.join(
|
||||
schema_cache_path, 'ftrack_api_schema_cache.json'
|
||||
)
|
||||
|
||||
self.schemas = self._load_schemas(schema_cache_path)
|
||||
self.types = self._build_entity_type_classes(self.schemas)
|
||||
|
||||
ftrack_api._centralized_storage_scenario.register(self)
|
||||
|
||||
self._configure_locations()
|
||||
self.event_hub.publish(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.api.session.ready',
|
||||
data=dict(
|
||||
session=self
|
||||
)
|
||||
),
|
||||
synchronous=True
|
||||
)
|
||||
|
|
@ -1,257 +0,0 @@
|
|||
import logging
|
||||
import os
|
||||
import atexit
|
||||
import tempfile
|
||||
import threading
|
||||
import requests
|
||||
|
||||
import ftrack_api
|
||||
import ftrack_api.session
|
||||
import ftrack_api.cache
|
||||
import ftrack_api.operation
|
||||
import ftrack_api._centralized_storage_scenario
|
||||
import ftrack_api.event
|
||||
from ftrack_api.logging import LazyLogMessage as L
|
||||
|
||||
|
||||
class StorerEventHub(ftrack_api.event.hub.EventHub):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.sock = kwargs.pop("sock")
|
||||
super(StorerEventHub, self).__init__(*args, **kwargs)
|
||||
|
||||
def _handle_packet(self, code, packet_identifier, path, data):
|
||||
"""Override `_handle_packet` which extend heartbeat"""
|
||||
if self._code_name_mapping[code] == "heartbeat":
|
||||
# Reply with heartbeat.
|
||||
self.sock.sendall(b"storer")
|
||||
return self._send_packet(self._code_name_mapping['heartbeat'])
|
||||
|
||||
return super(StorerEventHub, self)._handle_packet(
|
||||
code, packet_identifier, path, data
|
||||
)
|
||||
|
||||
|
||||
class StorerSession(ftrack_api.session.Session):
|
||||
'''An isolated session for interaction with an ftrack server.'''
|
||||
def __init__(
|
||||
self, server_url=None, api_key=None, api_user=None, auto_populate=True,
|
||||
plugin_paths=None, cache=None, cache_key_maker=None,
|
||||
auto_connect_event_hub=None, schema_cache_path=None,
|
||||
plugin_arguments=None, sock=None
|
||||
):
|
||||
'''Initialise session.
|
||||
|
||||
*server_url* should be the URL of the ftrack server to connect to
|
||||
including any port number. If not specified attempt to look up from
|
||||
:envvar:`FTRACK_SERVER`.
|
||||
|
||||
*api_key* should be the API key to use for authentication whilst
|
||||
*api_user* should be the username of the user in ftrack to record
|
||||
operations against. If not specified, *api_key* should be retrieved
|
||||
from :envvar:`FTRACK_API_KEY` and *api_user* from
|
||||
:envvar:`FTRACK_API_USER`.
|
||||
|
||||
If *auto_populate* is True (the default), then accessing entity
|
||||
attributes will cause them to be automatically fetched from the server
|
||||
if they are not already. This flag can be changed on the session
|
||||
directly at any time.
|
||||
|
||||
*plugin_paths* should be a list of paths to search for plugins. If not
|
||||
specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`.
|
||||
|
||||
*cache* should be an instance of a cache that fulfils the
|
||||
:class:`ftrack_api.cache.Cache` interface and will be used as the cache
|
||||
for the session. It can also be a callable that will be called with the
|
||||
session instance as sole argument. The callable should return ``None``
|
||||
if a suitable cache could not be configured, but session instantiation
|
||||
can continue safely.
|
||||
|
||||
.. note::
|
||||
|
||||
The session will add the specified cache to a pre-configured layered
|
||||
cache that specifies the top level cache as a
|
||||
:class:`ftrack_api.cache.MemoryCache`. Therefore, it is unnecessary
|
||||
to construct a separate memory cache for typical behaviour. Working
|
||||
around this behaviour or removing the memory cache can lead to
|
||||
unexpected behaviour.
|
||||
|
||||
*cache_key_maker* should be an instance of a key maker that fulfils the
|
||||
:class:`ftrack_api.cache.KeyMaker` interface and will be used to
|
||||
generate keys for objects being stored in the *cache*. If not specified,
|
||||
a :class:`~ftrack_api.cache.StringKeyMaker` will be used.
|
||||
|
||||
If *auto_connect_event_hub* is True then embedded event hub will be
|
||||
automatically connected to the event server and allow for publishing and
|
||||
subscribing to **non-local** events. If False, then only publishing and
|
||||
subscribing to **local** events will be possible until the hub is
|
||||
manually connected using :meth:`EventHub.connect
|
||||
<ftrack_api.event.hub.EventHub.connect>`.
|
||||
|
||||
.. note::
|
||||
|
||||
The event hub connection is performed in a background thread to
|
||||
improve session startup time. If a registered plugin requires a
|
||||
connected event hub then it should check the event hub connection
|
||||
status explicitly. Subscribing to events does *not* require a
|
||||
connected event hub.
|
||||
|
||||
Enable schema caching by setting *schema_cache_path* to a folder path.
|
||||
If not set, :envvar:`FTRACK_API_SCHEMA_CACHE_PATH` will be used to
|
||||
determine the path to store cache in. If the environment variable is
|
||||
also not specified then a temporary directory will be used. Set to
|
||||
`False` to disable schema caching entirely.
|
||||
|
||||
*plugin_arguments* should be an optional mapping (dict) of keyword
|
||||
arguments to pass to plugin register functions upon discovery. If a
|
||||
discovered plugin has a signature that is incompatible with the passed
|
||||
arguments, the discovery mechanism will attempt to reduce the passed
|
||||
arguments to only those that the plugin accepts. Note that a warning
|
||||
will be logged in this case.
|
||||
|
||||
'''
|
||||
super(ftrack_api.session.Session, self).__init__()
|
||||
self.logger = logging.getLogger(
|
||||
__name__ + '.' + self.__class__.__name__
|
||||
)
|
||||
self._closed = False
|
||||
|
||||
if server_url is None:
|
||||
server_url = os.environ.get('FTRACK_SERVER')
|
||||
|
||||
if not server_url:
|
||||
raise TypeError(
|
||||
'Required "server_url" not specified. Pass as argument or set '
|
||||
'in environment variable FTRACK_SERVER.'
|
||||
)
|
||||
|
||||
self._server_url = server_url
|
||||
|
||||
if api_key is None:
|
||||
api_key = os.environ.get(
|
||||
'FTRACK_API_KEY',
|
||||
# Backwards compatibility
|
||||
os.environ.get('FTRACK_APIKEY')
|
||||
)
|
||||
|
||||
if not api_key:
|
||||
raise TypeError(
|
||||
'Required "api_key" not specified. Pass as argument or set in '
|
||||
'environment variable FTRACK_API_KEY.'
|
||||
)
|
||||
|
||||
self._api_key = api_key
|
||||
|
||||
if api_user is None:
|
||||
api_user = os.environ.get('FTRACK_API_USER')
|
||||
if not api_user:
|
||||
try:
|
||||
api_user = getpass.getuser()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not api_user:
|
||||
raise TypeError(
|
||||
'Required "api_user" not specified. Pass as argument, set in '
|
||||
'environment variable FTRACK_API_USER or one of the standard '
|
||||
'environment variables used by Python\'s getpass module.'
|
||||
)
|
||||
|
||||
self._api_user = api_user
|
||||
|
||||
# Currently pending operations.
|
||||
self.recorded_operations = ftrack_api.operation.Operations()
|
||||
self.record_operations = True
|
||||
|
||||
self.cache_key_maker = cache_key_maker
|
||||
if self.cache_key_maker is None:
|
||||
self.cache_key_maker = ftrack_api.cache.StringKeyMaker()
|
||||
|
||||
# Enforce always having a memory cache at top level so that the same
|
||||
# in-memory instance is returned from session.
|
||||
self.cache = ftrack_api.cache.LayeredCache([
|
||||
ftrack_api.cache.MemoryCache()
|
||||
])
|
||||
|
||||
if cache is not None:
|
||||
if callable(cache):
|
||||
cache = cache(self)
|
||||
|
||||
if cache is not None:
|
||||
self.cache.caches.append(cache)
|
||||
|
||||
self._managed_request = None
|
||||
self._request = requests.Session()
|
||||
self._request.auth = ftrack_api.session.SessionAuthentication(
|
||||
self._api_key, self._api_user
|
||||
)
|
||||
|
||||
self.auto_populate = auto_populate
|
||||
|
||||
# Fetch server information and in doing so also check credentials.
|
||||
self._server_information = self._fetch_server_information()
|
||||
|
||||
# Now check compatibility of server based on retrieved information.
|
||||
self.check_server_compatibility()
|
||||
|
||||
# Construct event hub and load plugins.
|
||||
self._event_hub = StorerEventHub(
|
||||
self._server_url,
|
||||
self._api_user,
|
||||
self._api_key,
|
||||
sock=sock
|
||||
)
|
||||
|
||||
self._auto_connect_event_hub_thread = None
|
||||
if auto_connect_event_hub in (None, True):
|
||||
# Connect to event hub in background thread so as not to block main
|
||||
# session usage waiting for event hub connection.
|
||||
self._auto_connect_event_hub_thread = threading.Thread(
|
||||
target=self._event_hub.connect
|
||||
)
|
||||
self._auto_connect_event_hub_thread.daemon = True
|
||||
self._auto_connect_event_hub_thread.start()
|
||||
|
||||
# To help with migration from auto_connect_event_hub default changing
|
||||
# from True to False.
|
||||
self._event_hub._deprecation_warning_auto_connect = (
|
||||
auto_connect_event_hub is None
|
||||
)
|
||||
|
||||
# Register to auto-close session on exit.
|
||||
atexit.register(self.close)
|
||||
|
||||
self._plugin_paths = plugin_paths
|
||||
if self._plugin_paths is None:
|
||||
self._plugin_paths = os.environ.get(
|
||||
'FTRACK_EVENT_PLUGIN_PATH', ''
|
||||
).split(os.pathsep)
|
||||
|
||||
self._discover_plugins(plugin_arguments=plugin_arguments)
|
||||
|
||||
# TODO: Make schemas read-only and non-mutable (or at least without
|
||||
# rebuilding types)?
|
||||
if schema_cache_path is not False:
|
||||
if schema_cache_path is None:
|
||||
schema_cache_path = os.environ.get(
|
||||
'FTRACK_API_SCHEMA_CACHE_PATH', tempfile.gettempdir()
|
||||
)
|
||||
|
||||
schema_cache_path = os.path.join(
|
||||
schema_cache_path, 'ftrack_api_schema_cache.json'
|
||||
)
|
||||
|
||||
self.schemas = self._load_schemas(schema_cache_path)
|
||||
self.types = self._build_entity_type_classes(self.schemas)
|
||||
|
||||
ftrack_api._centralized_storage_scenario.register(self)
|
||||
|
||||
self._configure_locations()
|
||||
self.event_hub.publish(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.api.session.ready',
|
||||
data=dict(
|
||||
session=self
|
||||
)
|
||||
),
|
||||
synchronous=True
|
||||
)
|
||||
|
|
@ -1,7 +1,6 @@
|
|||
import os
|
||||
import sys
|
||||
import time
|
||||
import signal
|
||||
import socket
|
||||
import threading
|
||||
import subprocess
|
||||
|
|
@ -10,7 +9,9 @@ from pypeapp import Logger
|
|||
|
||||
class SocketThread(threading.Thread):
|
||||
"""Thread that checks suprocess of storer of processor of events"""
|
||||
|
||||
MAX_TIMEOUT = 35
|
||||
|
||||
def __init__(self, name, port, filepath):
|
||||
super(SocketThread, self).__init__()
|
||||
self.log = Logger().get_logger("SocketThread", "Event Thread")
|
||||
|
|
@ -26,6 +27,8 @@ class SocketThread(threading.Thread):
|
|||
|
||||
self.mongo_error = False
|
||||
|
||||
self._temp_data = {}
|
||||
|
||||
def stop(self):
|
||||
self._is_running = False
|
||||
|
||||
|
|
@ -50,8 +53,7 @@ class SocketThread(threading.Thread):
|
|||
)
|
||||
|
||||
self.subproc = subprocess.Popen(
|
||||
["python", self.filepath, "-port", str(self.port)],
|
||||
stdout=subprocess.PIPE
|
||||
[sys.executable, self.filepath, "-port", str(self.port)]
|
||||
)
|
||||
|
||||
# Listen for incoming connections
|
||||
|
|
@ -81,8 +83,9 @@ class SocketThread(threading.Thread):
|
|||
try:
|
||||
if not self._is_running:
|
||||
break
|
||||
data = None
|
||||
try:
|
||||
data = connection.recv(16)
|
||||
data = self.get_data_from_con(connection)
|
||||
time_con = time.time()
|
||||
|
||||
except socket.timeout:
|
||||
|
|
@ -99,10 +102,7 @@ class SocketThread(threading.Thread):
|
|||
self._is_running = False
|
||||
break
|
||||
|
||||
if data:
|
||||
if data == b"MongoError":
|
||||
self.mongo_error = True
|
||||
connection.sendall(data)
|
||||
self._handle_data(connection, data)
|
||||
|
||||
except Exception as exc:
|
||||
self.log.error(
|
||||
|
|
@ -115,9 +115,15 @@ class SocketThread(threading.Thread):
|
|||
if self.subproc.poll() is None:
|
||||
self.subproc.terminate()
|
||||
|
||||
lines = self.subproc.stdout.readlines()
|
||||
if lines:
|
||||
print("*** Socked Thread stdout ***")
|
||||
for line in lines:
|
||||
os.write(1, line)
|
||||
self.finished = True
|
||||
|
||||
def get_data_from_con(self, connection):
|
||||
return connection.recv(16)
|
||||
|
||||
def _handle_data(self, connection, data):
|
||||
if not data:
|
||||
return
|
||||
|
||||
if data == b"MongoError":
|
||||
self.mongo_error = True
|
||||
connection.sendall(data)
|
||||
|
|
|
|||
|
|
@ -1,12 +1,9 @@
|
|||
import os
|
||||
import sys
|
||||
import datetime
|
||||
import signal
|
||||
import socket
|
||||
import pymongo
|
||||
|
||||
from ftrack_server import FtrackServer
|
||||
from pype.ftrack.ftrack_server.session_processor import ProcessSession
|
||||
from pype.ftrack.ftrack_server.lib import SocketSession, ProcessEventHub
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger("Event processor")
|
||||
|
|
@ -24,12 +21,14 @@ def main(args):
|
|||
|
||||
sock.sendall(b"CreatedProcess")
|
||||
try:
|
||||
session = ProcessSession(auto_connect_event_hub=True, sock=sock)
|
||||
server = FtrackServer('event')
|
||||
session = SocketSession(
|
||||
auto_connect_event_hub=True, sock=sock, Eventhub=ProcessEventHub
|
||||
)
|
||||
server = FtrackServer("event")
|
||||
log.debug("Launched Ftrack Event processor")
|
||||
server.run_server(session)
|
||||
|
||||
except Exception as exc:
|
||||
except Exception:
|
||||
log.error("Event server crashed. See traceback below", exc_info=True)
|
||||
|
||||
finally:
|
||||
|
|
|
|||
|
|
@ -5,16 +5,24 @@ import signal
|
|||
import socket
|
||||
import pymongo
|
||||
|
||||
import ftrack_api
|
||||
from ftrack_server import FtrackServer
|
||||
from pype.ftrack.ftrack_server.lib import get_ftrack_event_mongo_info
|
||||
from pype.ftrack.ftrack_server.lib import (
|
||||
get_ftrack_event_mongo_info,
|
||||
SocketSession,
|
||||
StorerEventHub
|
||||
)
|
||||
from pype.ftrack.lib.custom_db_connector import DbConnector
|
||||
from session_storer import StorerSession
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger("Event storer")
|
||||
|
||||
url, database, table_name = get_ftrack_event_mongo_info()
|
||||
|
||||
class SessionFactory:
|
||||
session = None
|
||||
|
||||
|
||||
url, database, table_name = get_ftrack_event_mongo_info()
|
||||
dbcon = DbConnector(
|
||||
mongo_url=url,
|
||||
database_name=database,
|
||||
|
|
@ -24,10 +32,11 @@ dbcon = DbConnector(
|
|||
# ignore_topics = ["ftrack.meta.connected"]
|
||||
ignore_topics = []
|
||||
|
||||
|
||||
def install_db():
|
||||
try:
|
||||
dbcon.install()
|
||||
dbcon._database.collection_names()
|
||||
dbcon._database.list_collection_names()
|
||||
except pymongo.errors.AutoReconnect:
|
||||
log.error("Mongo server \"{}\" is not responding, exiting.".format(
|
||||
os.environ["AVALON_MONGO"]
|
||||
|
|
@ -49,7 +58,7 @@ def launch(event):
|
|||
|
||||
try:
|
||||
# dbcon.insert_one(event_data)
|
||||
dbcon.update({"id": event_id}, event_data, upsert=True)
|
||||
dbcon.replace_one({"id": event_id}, event_data, upsert=True)
|
||||
log.debug("Event: {} stored".format(event_id))
|
||||
|
||||
except pymongo.errors.AutoReconnect:
|
||||
|
|
@ -65,10 +74,75 @@ def launch(event):
|
|||
)
|
||||
|
||||
|
||||
def trigger_sync(event):
|
||||
session = SessionFactory.session
|
||||
source_id = event.get("source", {}).get("id")
|
||||
if not source_id or source_id != session.event_hub.id:
|
||||
return
|
||||
|
||||
if session is None:
|
||||
log.warning("Session is not set. Can't trigger Sync to avalon action.")
|
||||
return True
|
||||
|
||||
projects = session.query("Project").all()
|
||||
if not projects:
|
||||
return True
|
||||
|
||||
query = {
|
||||
"pype_data.is_processed": False,
|
||||
"topic": "ftrack.action.launch",
|
||||
"data.actionIdentifier": "sync.to.avalon.server"
|
||||
}
|
||||
set_dict = {
|
||||
"$set": {"pype_data.is_processed": True}
|
||||
}
|
||||
dbcon.update_many(query, set_dict)
|
||||
|
||||
selections = []
|
||||
for project in projects:
|
||||
if project["status"] != "active":
|
||||
continue
|
||||
|
||||
auto_sync = project["custom_attributes"].get("avalon_auto_sync")
|
||||
if not auto_sync:
|
||||
continue
|
||||
|
||||
selections.append({
|
||||
"entityId": project["id"],
|
||||
"entityType": "show"
|
||||
})
|
||||
|
||||
if not selections:
|
||||
return
|
||||
|
||||
user = session.query(
|
||||
"User where username is \"{}\"".format(session.api_user)
|
||||
).one()
|
||||
user_data = {
|
||||
"username": user["username"],
|
||||
"id": user["id"]
|
||||
}
|
||||
|
||||
for selection in selections:
|
||||
event_data = {
|
||||
"actionIdentifier": "sync.to.avalon.server",
|
||||
"selection": [selection]
|
||||
}
|
||||
session.event_hub.publish(
|
||||
ftrack_api.event.base.Event(
|
||||
topic="ftrack.action.launch",
|
||||
data=event_data,
|
||||
source=dict(user=user_data)
|
||||
),
|
||||
on_error="ignore"
|
||||
)
|
||||
|
||||
|
||||
def register(session):
|
||||
'''Registers the event, subscribing the discover and launch topics.'''
|
||||
install_db()
|
||||
session.event_hub.subscribe("topic=*", launch)
|
||||
session.event_hub.subscribe("topic=pype.storer.started", trigger_sync)
|
||||
|
||||
|
||||
def main(args):
|
||||
|
|
@ -84,7 +158,10 @@ def main(args):
|
|||
sock.sendall(b"CreatedStore")
|
||||
|
||||
try:
|
||||
session = StorerSession(auto_connect_event_hub=True, sock=sock)
|
||||
session = SocketSession(
|
||||
auto_connect_event_hub=True, sock=sock, Eventhub=StorerEventHub
|
||||
)
|
||||
SessionFactory.session = session
|
||||
register(session)
|
||||
server = FtrackServer("event")
|
||||
log.debug("Launched Ftrack Event storer")
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import os
|
||||
import sys
|
||||
import time
|
||||
import datetime
|
||||
|
|
@ -7,7 +6,6 @@ import threading
|
|||
|
||||
from ftrack_server import FtrackServer
|
||||
import ftrack_api
|
||||
from ftrack_api.event.hub import EventHub
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger("Event Server Legacy")
|
||||
|
|
@ -37,7 +35,10 @@ class TimerChecker(threading.Thread):
|
|||
|
||||
if not self.session.event_hub.connected:
|
||||
if not connected:
|
||||
if (datetime.datetime.now() - start).seconds > self.max_time_out:
|
||||
if (
|
||||
(datetime.datetime.now() - start).seconds >
|
||||
self.max_time_out
|
||||
):
|
||||
log.error((
|
||||
"Exiting event server. Session was not connected"
|
||||
" to ftrack server in {} seconds."
|
||||
|
|
@ -61,7 +62,7 @@ class TimerChecker(threading.Thread):
|
|||
def main(args):
|
||||
check_thread = None
|
||||
try:
|
||||
server = FtrackServer('event')
|
||||
server = FtrackServer("event")
|
||||
session = ftrack_api.Session(auto_connect_event_hub=True)
|
||||
|
||||
check_thread = TimerChecker(server, session)
|
||||
|
|
|
|||
56
pype/ftrack/ftrack_server/sub_user_server.py
Normal file
56
pype/ftrack/ftrack_server/sub_user_server.py
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
import sys
|
||||
import signal
|
||||
import socket
|
||||
|
||||
import traceback
|
||||
|
||||
from ftrack_server import FtrackServer
|
||||
from pype.ftrack.ftrack_server.lib import SocketSession, UserEventHub
|
||||
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger("FtrackUserServer")
|
||||
|
||||
|
||||
def main(args):
|
||||
port = int(args[-1])
|
||||
|
||||
# Create a TCP/IP socket
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
|
||||
# Connect the socket to the port where the server is listening
|
||||
server_address = ("localhost", port)
|
||||
log.debug(
|
||||
"User Ftrack Server connected to {} port {}".format(*server_address)
|
||||
)
|
||||
sock.connect(server_address)
|
||||
sock.sendall(b"CreatedUser")
|
||||
|
||||
try:
|
||||
session = SocketSession(
|
||||
auto_connect_event_hub=True, sock=sock, Eventhub=UserEventHub
|
||||
)
|
||||
server = FtrackServer("action")
|
||||
log.debug("Launched User Ftrack Server")
|
||||
server.run_server(session=session)
|
||||
except Exception:
|
||||
traceback.print_exception(*sys.exc_info())
|
||||
|
||||
finally:
|
||||
log.debug("Closing socket")
|
||||
sock.close()
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Register interupt signal
|
||||
def signal_handler(sig, frame):
|
||||
log.info(
|
||||
"Process was forced to stop. Process ended."
|
||||
)
|
||||
sys.exit(0)
|
||||
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
sys.exit(main(sys.argv))
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
from .avalon_sync import *
|
||||
from . import avalon_sync
|
||||
from .credentials import *
|
||||
from .ftrack_app_handler import *
|
||||
from .ftrack_event_handler import *
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -2,8 +2,7 @@ import functools
|
|||
import time
|
||||
from pypeapp import Logger
|
||||
import ftrack_api
|
||||
from ftrack_api import session as fa_session
|
||||
from pype.ftrack.ftrack_server import session_processor
|
||||
from pype.ftrack.ftrack_server.lib import SocketSession
|
||||
|
||||
|
||||
class MissingPermision(Exception):
|
||||
|
|
@ -42,7 +41,7 @@ class BaseHandler(object):
|
|||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
if not(
|
||||
isinstance(session, ftrack_api.session.Session) or
|
||||
isinstance(session, session_processor.ProcessSession)
|
||||
isinstance(session, SocketSession)
|
||||
):
|
||||
raise Exception((
|
||||
"Session object entered with args is instance of \"{}\""
|
||||
|
|
@ -243,7 +242,7 @@ class BaseHandler(object):
|
|||
_entities is None or
|
||||
_entities[0].get(
|
||||
'link', None
|
||||
) == fa_session.ftrack_api.symbol.NOT_SET
|
||||
) == ftrack_api.symbol.NOT_SET
|
||||
):
|
||||
_entities = self._get_entities(event)
|
||||
|
||||
|
|
@ -447,7 +446,7 @@ class BaseHandler(object):
|
|||
'applicationId=ftrack.client.web and user.id="{0}"'
|
||||
).format(user_id)
|
||||
self.session.event_hub.publish(
|
||||
fa_session.ftrack_api.event.base.Event(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.trigger-user-interface',
|
||||
data=dict(
|
||||
type='message',
|
||||
|
|
@ -495,8 +494,8 @@ class BaseHandler(object):
|
|||
|
||||
if not user:
|
||||
raise TypeError((
|
||||
'Ftrack user with {} "{}" was not found!'.format(key, value)
|
||||
))
|
||||
'Ftrack user with {} "{}" was not found!'
|
||||
).format(key, value))
|
||||
|
||||
user_id = user['id']
|
||||
|
||||
|
|
@ -505,7 +504,7 @@ class BaseHandler(object):
|
|||
).format(user_id)
|
||||
|
||||
self.session.event_hub.publish(
|
||||
fa_session.ftrack_api.event.base.Event(
|
||||
ftrack_api.event.base.Event(
|
||||
topic='ftrack.action.trigger-user-interface',
|
||||
data=dict(
|
||||
type='widget',
|
||||
|
|
@ -533,7 +532,7 @@ class BaseHandler(object):
|
|||
else:
|
||||
first = False
|
||||
|
||||
subtitle = {'type': 'label', 'value':'<h3>{}</h3>'.format(key)}
|
||||
subtitle = {'type': 'label', 'value': '<h3>{}</h3>'.format(key)}
|
||||
items.append(subtitle)
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
|
|
@ -593,7 +592,7 @@ class BaseHandler(object):
|
|||
|
||||
# Create and trigger event
|
||||
session.event_hub.publish(
|
||||
fa_session.ftrack_api.event.base.Event(
|
||||
ftrack_api.event.base.Event(
|
||||
topic=topic,
|
||||
data=_event_data,
|
||||
source=dict(user=_user_data)
|
||||
|
|
@ -614,7 +613,7 @@ class BaseHandler(object):
|
|||
if not source and event:
|
||||
source = event.get("source")
|
||||
# Create and trigger event
|
||||
event = fa_session.ftrack_api.event.base.Event(
|
||||
event = ftrack_api.event.base.Event(
|
||||
topic=topic,
|
||||
data=event_data,
|
||||
source=source
|
||||
|
|
|
|||
|
|
@ -1,26 +1,27 @@
|
|||
import os
|
||||
import json
|
||||
import threading
|
||||
import time
|
||||
from Qt import QtCore, QtGui, QtWidgets
|
||||
import datetime
|
||||
import threading
|
||||
from Qt import QtCore, QtWidgets
|
||||
|
||||
import ftrack_api
|
||||
from pypeapp import style
|
||||
from pype.ftrack import FtrackServer, check_ftrack_url, credentials
|
||||
from ..ftrack_server.lib import check_ftrack_url
|
||||
from ..ftrack_server import socket_thread
|
||||
from ..lib import credentials
|
||||
from . import login_dialog
|
||||
|
||||
from pype import api as pype
|
||||
from pypeapp import Logger
|
||||
|
||||
|
||||
log = pype.Logger().get_logger("FtrackModule", "ftrack")
|
||||
log = Logger().get_logger("FtrackModule", "ftrack")
|
||||
|
||||
|
||||
class FtrackModule:
|
||||
def __init__(self, main_parent=None, parent=None):
|
||||
self.parent = parent
|
||||
self.widget_login = login_dialog.Login_Dialog_ui(self)
|
||||
self.action_server = FtrackServer('action')
|
||||
self.thread_action_server = None
|
||||
self.thread_socket_server = None
|
||||
self.thread_timer = None
|
||||
|
||||
self.bool_logged = False
|
||||
|
|
@ -75,14 +76,6 @@ class FtrackModule:
|
|||
|
||||
# Actions part
|
||||
def start_action_server(self):
|
||||
self.bool_action_thread_running = True
|
||||
self.set_menu_visibility()
|
||||
if (
|
||||
self.thread_action_server is not None and
|
||||
self.bool_action_thread_running is False
|
||||
):
|
||||
self.stop_action_server()
|
||||
|
||||
if self.thread_action_server is None:
|
||||
self.thread_action_server = threading.Thread(
|
||||
target=self.set_action_server
|
||||
|
|
@ -90,35 +83,114 @@ class FtrackModule:
|
|||
self.thread_action_server.start()
|
||||
|
||||
def set_action_server(self):
|
||||
first_check = True
|
||||
while self.bool_action_thread_running is True:
|
||||
if not check_ftrack_url(os.environ['FTRACK_SERVER']):
|
||||
if first_check:
|
||||
log.warning(
|
||||
"Could not connect to Ftrack server"
|
||||
)
|
||||
first_check = False
|
||||
if self.bool_action_server_running:
|
||||
return
|
||||
|
||||
self.bool_action_server_running = True
|
||||
self.bool_action_thread_running = False
|
||||
|
||||
ftrack_url = os.environ['FTRACK_SERVER']
|
||||
|
||||
parent_file_path = os.path.dirname(
|
||||
os.path.dirname(os.path.realpath(__file__))
|
||||
)
|
||||
|
||||
min_fail_seconds = 5
|
||||
max_fail_count = 3
|
||||
wait_time_after_max_fail = 10
|
||||
|
||||
# Threads data
|
||||
thread_name = "ActionServerThread"
|
||||
thread_port = 10021
|
||||
subprocess_path = (
|
||||
"{}/ftrack_server/sub_user_server.py".format(parent_file_path)
|
||||
)
|
||||
if self.thread_socket_server is not None:
|
||||
self.thread_socket_server.stop()
|
||||
self.thread_socket_server.join()
|
||||
self.thread_socket_server = None
|
||||
|
||||
last_failed = datetime.datetime.now()
|
||||
failed_count = 0
|
||||
|
||||
ftrack_accessible = False
|
||||
printed_ftrack_error = False
|
||||
|
||||
# Main loop
|
||||
while True:
|
||||
if not self.bool_action_server_running:
|
||||
log.debug("Action server was pushed to stop.")
|
||||
break
|
||||
|
||||
# Check if accessible Ftrack and Mongo url
|
||||
if not ftrack_accessible:
|
||||
ftrack_accessible = check_ftrack_url(ftrack_url)
|
||||
|
||||
# Run threads only if Ftrack is accessible
|
||||
if not ftrack_accessible:
|
||||
if not printed_ftrack_error:
|
||||
log.warning("Can't access Ftrack {}".format(ftrack_url))
|
||||
|
||||
if self.thread_socket_server is not None:
|
||||
self.thread_socket_server.stop()
|
||||
self.thread_socket_server.join()
|
||||
self.thread_socket_server = None
|
||||
self.bool_action_thread_running = False
|
||||
self.set_menu_visibility()
|
||||
|
||||
printed_ftrack_error = True
|
||||
|
||||
time.sleep(1)
|
||||
continue
|
||||
log.info(
|
||||
"Connected to Ftrack server. Running actions session"
|
||||
)
|
||||
try:
|
||||
self.bool_action_server_running = True
|
||||
|
||||
printed_ftrack_error = False
|
||||
|
||||
# Run backup thread which does not requeire mongo to work
|
||||
if self.thread_socket_server is None:
|
||||
if failed_count < max_fail_count:
|
||||
self.thread_socket_server = socket_thread.SocketThread(
|
||||
thread_name, thread_port, subprocess_path
|
||||
)
|
||||
self.thread_socket_server.start()
|
||||
self.bool_action_thread_running = True
|
||||
self.set_menu_visibility()
|
||||
|
||||
elif failed_count == max_fail_count:
|
||||
log.warning((
|
||||
"Action server failed {} times."
|
||||
" I'll try to run again {}s later"
|
||||
).format(
|
||||
str(max_fail_count), str(wait_time_after_max_fail))
|
||||
)
|
||||
failed_count += 1
|
||||
|
||||
elif ((
|
||||
datetime.datetime.now() - last_failed
|
||||
).seconds > wait_time_after_max_fail):
|
||||
failed_count = 0
|
||||
|
||||
# If thread failed test Ftrack and Mongo connection
|
||||
elif not self.thread_socket_server.isAlive():
|
||||
self.thread_socket_server.join()
|
||||
self.thread_socket_server = None
|
||||
ftrack_accessible = False
|
||||
|
||||
self.bool_action_thread_running = False
|
||||
self.set_menu_visibility()
|
||||
self.action_server.run_server()
|
||||
if self.bool_action_thread_running:
|
||||
log.debug("Ftrack action server has stopped")
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Ftrack Action server crashed. Trying to connect again",
|
||||
exc_info=True
|
||||
)
|
||||
self.bool_action_server_running = False
|
||||
self.set_menu_visibility()
|
||||
first_check = True
|
||||
|
||||
_last_failed = datetime.datetime.now()
|
||||
delta_time = (_last_failed - last_failed).seconds
|
||||
if delta_time < min_fail_seconds:
|
||||
failed_count += 1
|
||||
else:
|
||||
failed_count = 0
|
||||
last_failed = _last_failed
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
self.bool_action_thread_running = False
|
||||
self.bool_action_server_running = False
|
||||
self.set_menu_visibility()
|
||||
|
||||
def reset_action_server(self):
|
||||
self.stop_action_server()
|
||||
|
|
@ -126,16 +198,18 @@ class FtrackModule:
|
|||
|
||||
def stop_action_server(self):
|
||||
try:
|
||||
self.bool_action_thread_running = False
|
||||
self.action_server.stop_session()
|
||||
self.bool_action_server_running = False
|
||||
if self.thread_socket_server is not None:
|
||||
self.thread_socket_server.stop()
|
||||
self.thread_socket_server.join()
|
||||
self.thread_socket_server = None
|
||||
|
||||
if self.thread_action_server is not None:
|
||||
self.thread_action_server.join()
|
||||
self.thread_action_server = None
|
||||
|
||||
log.info("Ftrack action server was forced to stop")
|
||||
|
||||
self.bool_action_server_running = False
|
||||
self.set_menu_visibility()
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Error has happened during Killing action server",
|
||||
|
|
@ -201,9 +275,9 @@ class FtrackModule:
|
|||
self.stop_timer_thread()
|
||||
return
|
||||
|
||||
self.aRunActionS.setVisible(not self.bool_action_thread_running)
|
||||
self.aRunActionS.setVisible(not self.bool_action_server_running)
|
||||
self.aResetActionS.setVisible(self.bool_action_thread_running)
|
||||
self.aStopActionS.setVisible(self.bool_action_thread_running)
|
||||
self.aStopActionS.setVisible(self.bool_action_server_running)
|
||||
|
||||
if self.bool_timer_event is False:
|
||||
self.start_timer_thread()
|
||||
|
|
|
|||
108
pype/lib.py
108
pype/lib.py
|
|
@ -1,14 +1,12 @@
|
|||
import os
|
||||
import re
|
||||
import logging
|
||||
import importlib
|
||||
import itertools
|
||||
import contextlib
|
||||
import subprocess
|
||||
import inspect
|
||||
|
||||
|
||||
import avalon.io as io
|
||||
from avalon import io
|
||||
import avalon.api
|
||||
import avalon
|
||||
|
||||
|
|
@ -16,21 +14,38 @@ log = logging.getLogger(__name__)
|
|||
|
||||
|
||||
# Special naming case for subprocess since its a built-in method.
|
||||
def _subprocess(args):
|
||||
def _subprocess(*args, **kwargs):
|
||||
"""Convenience method for getting output errors for subprocess."""
|
||||
|
||||
proc = subprocess.Popen(
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdin=subprocess.PIPE,
|
||||
env=os.environ
|
||||
)
|
||||
# make sure environment contains only strings
|
||||
if not kwargs.get("env"):
|
||||
filtered_env = {k: str(v) for k, v in os.environ.items()}
|
||||
else:
|
||||
filtered_env = {k: str(v) for k, v in kwargs.get("env").items()}
|
||||
|
||||
output = proc.communicate()[0]
|
||||
# set overrides
|
||||
kwargs['stdout'] = kwargs.get('stdout', subprocess.PIPE)
|
||||
kwargs['stderr'] = kwargs.get('stderr', subprocess.STDOUT)
|
||||
kwargs['stdin'] = kwargs.get('stdin', subprocess.PIPE)
|
||||
kwargs['env'] = filtered_env
|
||||
|
||||
proc = subprocess.Popen(*args, **kwargs)
|
||||
|
||||
output, error = proc.communicate()
|
||||
|
||||
if output:
|
||||
output = output.decode("utf-8")
|
||||
output += "\n"
|
||||
for line in output.strip().split("\n"):
|
||||
log.info(line)
|
||||
|
||||
if error:
|
||||
error = error.decode("utf-8")
|
||||
error += "\n"
|
||||
for line in error.strip().split("\n"):
|
||||
log.error(line)
|
||||
|
||||
if proc.returncode != 0:
|
||||
log.error(output)
|
||||
raise ValueError("\"{}\" was not successful: {}".format(args, output))
|
||||
return output
|
||||
|
||||
|
|
@ -181,9 +196,13 @@ def any_outdated():
|
|||
if representation in checked:
|
||||
continue
|
||||
|
||||
representation_doc = io.find_one({"_id": io.ObjectId(representation),
|
||||
"type": "representation"},
|
||||
projection={"parent": True})
|
||||
representation_doc = io.find_one(
|
||||
{
|
||||
"_id": io.ObjectId(representation),
|
||||
"type": "representation"
|
||||
},
|
||||
projection={"parent": True}
|
||||
)
|
||||
if representation_doc and not is_latest(representation_doc):
|
||||
return True
|
||||
elif not representation_doc:
|
||||
|
|
@ -293,27 +312,38 @@ def switch_item(container,
|
|||
representation_name = representation["name"]
|
||||
|
||||
# Find the new one
|
||||
asset = io.find_one({"name": asset_name, "type": "asset"})
|
||||
asset = io.find_one({
|
||||
"name": asset_name,
|
||||
"type": "asset"
|
||||
})
|
||||
assert asset, ("Could not find asset in the database with the name "
|
||||
"'%s'" % asset_name)
|
||||
|
||||
subset = io.find_one({"name": subset_name,
|
||||
"type": "subset",
|
||||
"parent": asset["_id"]})
|
||||
subset = io.find_one({
|
||||
"name": subset_name,
|
||||
"type": "subset",
|
||||
"parent": asset["_id"]
|
||||
})
|
||||
assert subset, ("Could not find subset in the database with the name "
|
||||
"'%s'" % subset_name)
|
||||
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[('name', -1)])
|
||||
version = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset["_id"]
|
||||
},
|
||||
sort=[('name', -1)]
|
||||
)
|
||||
|
||||
assert version, "Could not find a version for {}.{}".format(
|
||||
asset_name, subset_name
|
||||
)
|
||||
|
||||
representation = io.find_one({"name": representation_name,
|
||||
"type": "representation",
|
||||
"parent": version["_id"]})
|
||||
representation = io.find_one({
|
||||
"name": representation_name,
|
||||
"type": "representation",
|
||||
"parent": version["_id"]}
|
||||
)
|
||||
|
||||
assert representation, ("Could not find representation in the database with"
|
||||
" the name '%s'" % representation_name)
|
||||
|
|
@ -351,7 +381,10 @@ def get_asset(asset_name=None):
|
|||
if not asset_name:
|
||||
asset_name = avalon.api.Session["AVALON_ASSET"]
|
||||
|
||||
asset_document = io.find_one({"name": asset_name, "type": "asset"})
|
||||
asset_document = io.find_one({
|
||||
"name": asset_name,
|
||||
"type": "asset"
|
||||
})
|
||||
if not asset_document:
|
||||
raise TypeError("Entity \"{}\" was not found in DB".format(asset_name))
|
||||
|
||||
|
|
@ -523,8 +556,7 @@ def get_subsets(asset_name,
|
|||
from avalon import io
|
||||
|
||||
# query asset from db
|
||||
asset_io = io.find_one({"type": "asset",
|
||||
"name": asset_name})
|
||||
asset_io = io.find_one({"type": "asset", "name": asset_name})
|
||||
|
||||
# check if anything returned
|
||||
assert asset_io, "Asset not existing. \
|
||||
|
|
@ -548,14 +580,20 @@ def get_subsets(asset_name,
|
|||
# Process subsets
|
||||
for subset in subsets:
|
||||
if not version:
|
||||
version_sel = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
version_sel = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset["_id"]
|
||||
},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
else:
|
||||
assert isinstance(version, int), "version needs to be `int` type"
|
||||
version_sel = io.find_one({"type": "version",
|
||||
"parent": subset["_id"],
|
||||
"name": int(version)})
|
||||
version_sel = io.find_one({
|
||||
"type": "version",
|
||||
"parent": subset["_id"],
|
||||
"name": int(version)
|
||||
})
|
||||
|
||||
find_dict = {"type": "representation",
|
||||
"parent": version_sel["_id"]}
|
||||
|
|
|
|||
|
|
@ -33,5 +33,7 @@ class LogsWindow(QtWidgets.QWidget):
|
|||
|
||||
def on_selection_changed(self):
|
||||
index = self.logs_widget.selected_log()
|
||||
if not index or not index.isValid():
|
||||
return
|
||||
node = index.data(self.logs_widget.model.NodeRole)
|
||||
self.log_detail.set_detail(node)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
import datetime
|
||||
import inspect
|
||||
import getpass
|
||||
from Qt import QtCore, QtWidgets, QtGui
|
||||
from PyQt5.QtCore import QVariant
|
||||
from .models import LogModel
|
||||
|
||||
from .lib import preserve_states
|
||||
|
||||
|
||||
class SearchComboBox(QtWidgets.QComboBox):
|
||||
"""Searchable ComboBox with empty placeholder value as first value"""
|
||||
|
|
@ -53,6 +49,7 @@ class SearchComboBox(QtWidgets.QComboBox):
|
|||
|
||||
return text
|
||||
|
||||
|
||||
class CheckableComboBox2(QtWidgets.QComboBox):
|
||||
def __init__(self, parent=None):
|
||||
super(CheckableComboBox, self).__init__(parent)
|
||||
|
|
@ -96,9 +93,11 @@ class SelectableMenu(QtWidgets.QMenu):
|
|||
else:
|
||||
super(SelectableMenu, self).mouseReleaseEvent(event)
|
||||
|
||||
|
||||
class CustomCombo(QtWidgets.QWidget):
|
||||
|
||||
selection_changed = QtCore.Signal()
|
||||
checked_changed = QtCore.Signal(bool)
|
||||
|
||||
def __init__(self, title, parent=None):
|
||||
super(CustomCombo, self).__init__(parent)
|
||||
|
|
@ -127,12 +126,27 @@ class CustomCombo(QtWidgets.QWidget):
|
|||
self.toolmenu.clear()
|
||||
self.addItems(items)
|
||||
|
||||
def select_items(self, items, ignore_input=False):
|
||||
if not isinstance(items, list):
|
||||
items = [items]
|
||||
|
||||
for action in self.toolmenu.actions():
|
||||
check = True
|
||||
if (
|
||||
action.text() in items and ignore_input or
|
||||
action.text() not in items and not ignore_input
|
||||
):
|
||||
check = False
|
||||
|
||||
action.setChecked(check)
|
||||
|
||||
def addItems(self, items):
|
||||
for item in items:
|
||||
action = self.toolmenu.addAction(item)
|
||||
action.setCheckable(True)
|
||||
action.setChecked(True)
|
||||
self.toolmenu.addAction(action)
|
||||
action.setChecked(True)
|
||||
action.triggered.connect(self.checked_changed)
|
||||
|
||||
def items(self):
|
||||
for action in self.toolmenu.actions():
|
||||
|
|
@ -186,15 +200,42 @@ class CheckableComboBox(QtWidgets.QComboBox):
|
|||
for text, checked in items:
|
||||
text_item = QtGui.QStandardItem(text)
|
||||
checked_item = QtGui.QStandardItem()
|
||||
checked_item.setData(QVariant(checked), QtCore.Qt.CheckStateRole)
|
||||
checked_item.setData(
|
||||
QtCore.QVariant(checked), QtCore.Qt.CheckStateRole
|
||||
)
|
||||
self.model.appendRow([text_item, checked_item])
|
||||
|
||||
|
||||
class FilterLogModel(QtCore.QSortFilterProxyModel):
|
||||
sub_dict = ["$gt", "$lt", "$not"]
|
||||
def __init__(self, key_values, parent=None):
|
||||
super(FilterLogModel, self).__init__(parent)
|
||||
self.allowed_key_values = key_values
|
||||
|
||||
def filterAcceptsRow(self, row, parent):
|
||||
"""
|
||||
Reimplemented from base class.
|
||||
"""
|
||||
model = self.sourceModel()
|
||||
for key, values in self.allowed_key_values.items():
|
||||
col_indx = model.COLUMNS.index(key)
|
||||
value = model.index(row, col_indx, parent).data(
|
||||
QtCore.Qt.DisplayRole
|
||||
)
|
||||
if value not in values:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class LogsWidget(QtWidgets.QWidget):
|
||||
"""A widget that lists the published subsets for an asset"""
|
||||
|
||||
active_changed = QtCore.Signal()
|
||||
|
||||
_level_order = [
|
||||
"DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"
|
||||
]
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(LogsWidget, self).__init__(parent=parent)
|
||||
|
||||
|
|
@ -202,47 +243,45 @@ class LogsWidget(QtWidgets.QWidget):
|
|||
|
||||
filter_layout = QtWidgets.QHBoxLayout()
|
||||
|
||||
# user_filter = SearchComboBox(self, "Users")
|
||||
user_filter = CustomCombo("Users", self)
|
||||
users = model.dbcon.distinct("user")
|
||||
user_filter.populate(users)
|
||||
user_filter.selection_changed.connect(self.user_changed)
|
||||
user_filter.checked_changed.connect(self.user_changed)
|
||||
user_filter.select_items(getpass.getuser())
|
||||
|
||||
level_filter = CustomCombo("Levels", self)
|
||||
# levels = [(level, True) for level in model.dbcon.distinct("level")]
|
||||
levels = model.dbcon.distinct("level")
|
||||
level_filter.addItems(levels)
|
||||
_levels = []
|
||||
for level in self._level_order:
|
||||
if level in levels:
|
||||
_levels.append(level)
|
||||
level_filter.populate(_levels)
|
||||
level_filter.checked_changed.connect(self.level_changed)
|
||||
|
||||
date_from_label = QtWidgets.QLabel("From:")
|
||||
date_filter_from = QtWidgets.QDateTimeEdit()
|
||||
|
||||
date_from_layout = QtWidgets.QVBoxLayout()
|
||||
date_from_layout.addWidget(date_from_label)
|
||||
date_from_layout.addWidget(date_filter_from)
|
||||
|
||||
# now = datetime.datetime.now()
|
||||
# QtCore.QDateTime(now.year, now.month, now.day, now.hour, now.minute, second = 0, msec = 0, timeSpec = 0)
|
||||
date_to_label = QtWidgets.QLabel("To:")
|
||||
date_filter_to = QtWidgets.QDateTimeEdit()
|
||||
|
||||
date_to_layout = QtWidgets.QVBoxLayout()
|
||||
date_to_layout.addWidget(date_to_label)
|
||||
date_to_layout.addWidget(date_filter_to)
|
||||
# date_from_label = QtWidgets.QLabel("From:")
|
||||
# date_filter_from = QtWidgets.QDateTimeEdit()
|
||||
#
|
||||
# date_from_layout = QtWidgets.QVBoxLayout()
|
||||
# date_from_layout.addWidget(date_from_label)
|
||||
# date_from_layout.addWidget(date_filter_from)
|
||||
#
|
||||
# date_to_label = QtWidgets.QLabel("To:")
|
||||
# date_filter_to = QtWidgets.QDateTimeEdit()
|
||||
#
|
||||
# date_to_layout = QtWidgets.QVBoxLayout()
|
||||
# date_to_layout.addWidget(date_to_label)
|
||||
# date_to_layout.addWidget(date_filter_to)
|
||||
|
||||
filter_layout.addWidget(user_filter)
|
||||
filter_layout.addWidget(level_filter)
|
||||
filter_layout.setAlignment(QtCore.Qt.AlignLeft)
|
||||
|
||||
filter_layout.addLayout(date_from_layout)
|
||||
filter_layout.addLayout(date_to_layout)
|
||||
# filter_layout.addLayout(date_from_layout)
|
||||
# filter_layout.addLayout(date_to_layout)
|
||||
|
||||
view = QtWidgets.QTreeView(self)
|
||||
view.setAllColumnsShowFocus(True)
|
||||
|
||||
# # Set view delegates
|
||||
# time_delegate = PrettyTimeDelegate()
|
||||
# column = model.COLUMNS.index("time")
|
||||
# view.setItemDelegateForColumn(column, time_delegate)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.addLayout(filter_layout)
|
||||
|
|
@ -255,34 +294,54 @@ class LogsWidget(QtWidgets.QWidget):
|
|||
QtCore.Qt.AscendingOrder
|
||||
)
|
||||
|
||||
view.setModel(model)
|
||||
key_val = {
|
||||
"user": users,
|
||||
"level": levels
|
||||
}
|
||||
proxy_model = FilterLogModel(key_val, view)
|
||||
proxy_model.setSourceModel(model)
|
||||
view.setModel(proxy_model)
|
||||
|
||||
view.customContextMenuRequested.connect(self.on_context_menu)
|
||||
view.selectionModel().selectionChanged.connect(self.active_changed)
|
||||
# user_filter.connect()
|
||||
|
||||
# TODO remove if nothing will affect...
|
||||
# header = self.view.header()
|
||||
# WARNING this is cool but slows down widget a lot
|
||||
# header = view.header()
|
||||
# # Enforce the columns to fit the data (purely cosmetic)
|
||||
# if Qt.__binding__ in ("PySide2", "PyQt5"):
|
||||
# header.setSectionResizeMode(QtWidgets.QHeaderView.ResizeToContents)
|
||||
# else:
|
||||
# header.setResizeMode(QtWidgets.QHeaderView.ResizeToContents)
|
||||
|
||||
# Set signals
|
||||
|
||||
# prepare
|
||||
model.refresh()
|
||||
|
||||
# Store to memory
|
||||
self.model = model
|
||||
self.proxy_model = proxy_model
|
||||
self.view = view
|
||||
|
||||
self.user_filter = user_filter
|
||||
self.level_filter = level_filter
|
||||
|
||||
def user_changed(self):
|
||||
valid_actions = []
|
||||
for action in self.user_filter.items():
|
||||
print(action)
|
||||
if action.isChecked():
|
||||
valid_actions.append(action.text())
|
||||
|
||||
self.proxy_model.allowed_key_values["user"] = valid_actions
|
||||
self.proxy_model.invalidate()
|
||||
|
||||
def level_changed(self):
|
||||
valid_actions = []
|
||||
for action in self.level_filter.items():
|
||||
if action.isChecked():
|
||||
valid_actions.append(action.text())
|
||||
|
||||
self.proxy_model.allowed_key_values["level"] = valid_actions
|
||||
self.proxy_model.invalidate()
|
||||
|
||||
|
||||
def on_context_menu(self, point):
|
||||
# TODO will be any actions? it's ready
|
||||
|
|
|
|||
|
|
@ -162,6 +162,7 @@ def on_open(_):
|
|||
# Validate FPS after update_task_from_path to
|
||||
# ensure it is using correct FPS for the asset
|
||||
lib.validate_fps()
|
||||
lib.fix_incompatible_containers()
|
||||
|
||||
if any_outdated():
|
||||
log.warning("Scene has outdated content.")
|
||||
|
|
|
|||
|
|
@ -2318,6 +2318,25 @@ def get_attr_in_layer(attr, layer):
|
|||
return cmds.getAttr(attr)
|
||||
|
||||
|
||||
def fix_incompatible_containers():
|
||||
"""Return whether the current scene has any outdated content"""
|
||||
|
||||
host = avalon.api.registered_host()
|
||||
for container in host.ls():
|
||||
loader = container['loader']
|
||||
|
||||
print(container['loader'])
|
||||
|
||||
if loader in ["MayaAsciiLoader",
|
||||
"AbcLoader",
|
||||
"ModelLoader",
|
||||
"CameraLoader",
|
||||
"RigLoader",
|
||||
"FBXLoader"]:
|
||||
cmds.setAttr(container["objectName"] + ".loader",
|
||||
"ReferenceLoader", type="string")
|
||||
|
||||
|
||||
def _null(*args):
|
||||
pass
|
||||
|
||||
|
|
|
|||
|
|
@ -15,12 +15,13 @@ log = logging.getLogger(__name__)
|
|||
def _get_menu():
|
||||
"""Return the menu instance if it currently exists in Maya"""
|
||||
|
||||
app = QtWidgets.QApplication.instance()
|
||||
widgets = dict((w.objectName(), w) for w in app.allWidgets())
|
||||
widgets = dict((
|
||||
w.objectName(), w) for w in QtWidgets.QApplication.allWidgets())
|
||||
menu = widgets.get(self._menu)
|
||||
return menu
|
||||
|
||||
|
||||
|
||||
def deferred():
|
||||
|
||||
log.info("Attempting to install scripts menu..")
|
||||
|
|
|
|||
|
|
@ -33,40 +33,41 @@ if os.getenv("PYBLISH_GUI", None):
|
|||
pyblish.register_gui(os.getenv("PYBLISH_GUI", None))
|
||||
|
||||
|
||||
class NukeHandler(logging.Handler):
|
||||
'''
|
||||
Nuke Handler - emits logs into nuke's script editor.
|
||||
warning will emit nuke.warning()
|
||||
critical and fatal would popup msg dialog to alert of the error.
|
||||
'''
|
||||
# class NukeHandler(logging.Handler):
|
||||
# '''
|
||||
# Nuke Handler - emits logs into nuke's script editor.
|
||||
# warning will emit nuke.warning()
|
||||
# critical and fatal would popup msg dialog to alert of the error.
|
||||
# '''
|
||||
#
|
||||
# def __init__(self):
|
||||
# logging.Handler.__init__(self)
|
||||
# self.set_name("Pype_Nuke_Handler")
|
||||
#
|
||||
# def emit(self, record):
|
||||
# # Formated message:
|
||||
# msg = self.format(record)
|
||||
#
|
||||
# if record.levelname.lower() in [
|
||||
# # "warning",
|
||||
# "critical",
|
||||
# "fatal",
|
||||
# "error"
|
||||
# ]:
|
||||
# msg = self.format(record)
|
||||
# nuke.message(msg)
|
||||
#
|
||||
#
|
||||
# '''Adding Nuke Logging Handler'''
|
||||
# log.info([handler.get_name() for handler in logging.root.handlers[:]])
|
||||
# nuke_handler = NukeHandler()
|
||||
# if nuke_handler.get_name() \
|
||||
# not in [handler.get_name()
|
||||
# for handler in logging.root.handlers[:]]:
|
||||
# logging.getLogger().addHandler(nuke_handler)
|
||||
# logging.getLogger().setLevel(logging.INFO)
|
||||
# log.info([handler.get_name() for handler in logging.root.handlers[:]])
|
||||
|
||||
def __init__(self):
|
||||
logging.Handler.__init__(self)
|
||||
self.set_name("Pype_Nuke_Handler")
|
||||
|
||||
def emit(self, record):
|
||||
# Formated message:
|
||||
msg = self.format(record)
|
||||
|
||||
if record.levelname.lower() in [
|
||||
# "warning",
|
||||
"critical",
|
||||
"fatal",
|
||||
"error"
|
||||
]:
|
||||
msg = self.format(record)
|
||||
nuke.message(msg)
|
||||
|
||||
|
||||
'''Adding Nuke Logging Handler'''
|
||||
log.info([handler.get_name() for handler in logging.root.handlers[:]])
|
||||
nuke_handler = NukeHandler()
|
||||
if nuke_handler.get_name() \
|
||||
not in [handler.get_name()
|
||||
for handler in logging.root.handlers[:]]:
|
||||
logging.getLogger().addHandler(nuke_handler)
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
log.info([handler.get_name() for handler in logging.root.handlers[:]])
|
||||
|
||||
def reload_config():
|
||||
"""Attempt to reload pipeline at run-time.
|
||||
|
|
@ -112,7 +113,9 @@ def install():
|
|||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = [
|
||||
"write",
|
||||
"review"
|
||||
"review",
|
||||
"nukenodes"
|
||||
"gizmo"
|
||||
]
|
||||
|
||||
avalon.data["familiesStateDefault"] = False
|
||||
|
|
|
|||
539
pype/nuke/lib.py
539
pype/nuke/lib.py
|
|
@ -6,6 +6,7 @@ from collections import OrderedDict
|
|||
|
||||
from avalon import api, io, lib
|
||||
import avalon.nuke
|
||||
from avalon.nuke import lib as anlib
|
||||
import pype.api as pype
|
||||
|
||||
import nuke
|
||||
|
|
@ -20,7 +21,6 @@ from .presets import (
|
|||
from .presets import (
|
||||
get_anatomy
|
||||
)
|
||||
# TODO: remove get_anatomy and import directly Anatomy() here
|
||||
|
||||
from pypeapp import Logger
|
||||
log = Logger().get_logger(__name__, "nuke")
|
||||
|
|
@ -49,8 +49,6 @@ def checkInventoryVersions():
|
|||
and check if the node is having actual version. If not then it will color
|
||||
it to red.
|
||||
"""
|
||||
# TODO: make it for all nodes not just Read (Loader
|
||||
|
||||
# get all Loader nodes by avalon attribute metadata
|
||||
for each in nuke.allNodes():
|
||||
if each.Class() == 'Read':
|
||||
|
|
@ -92,7 +90,6 @@ def checkInventoryVersions():
|
|||
def writes_version_sync():
|
||||
''' Callback synchronizing version of publishable write nodes
|
||||
'''
|
||||
# TODO: make it work with new write node group
|
||||
try:
|
||||
rootVersion = pype.get_version_from_path(nuke.root().name())
|
||||
padding = len(rootVersion)
|
||||
|
|
@ -105,6 +102,10 @@ def writes_version_sync():
|
|||
|
||||
for each in nuke.allNodes():
|
||||
if each.Class() == 'Write':
|
||||
# check if the node is avalon tracked
|
||||
if "AvalonTab" not in each.knobs():
|
||||
continue
|
||||
|
||||
avalon_knob_data = avalon.nuke.get_avalon_knob_data(
|
||||
each, ['avalon:', 'ak:'])
|
||||
|
||||
|
|
@ -125,7 +126,8 @@ def writes_version_sync():
|
|||
os.makedirs(os.path.dirname(node_new_file), 0o766)
|
||||
except Exception as e:
|
||||
log.warning(
|
||||
"Write node: `{}` has no version in path: {}".format(each.name(), e))
|
||||
"Write node: `{}` has no version in path: {}".format(
|
||||
each.name(), e))
|
||||
|
||||
|
||||
def version_up_script():
|
||||
|
|
@ -178,9 +180,12 @@ def format_anatomy(data):
|
|||
try:
|
||||
padding = int(anatomy.templates['render']['padding'])
|
||||
except KeyError as e:
|
||||
log.error("`padding` key is not in `render` "
|
||||
"Anatomy template. Please, add it there and restart "
|
||||
"the pipeline (padding: \"4\"): `{}`".format(e))
|
||||
msg = ("`padding` key is not in `render` "
|
||||
"Anatomy template. Please, add it there and restart "
|
||||
"the pipeline (padding: \"4\"): `{}`").format(e)
|
||||
|
||||
log.error(msg)
|
||||
nuke.message(msg)
|
||||
|
||||
version = data.get("version", None)
|
||||
if not version:
|
||||
|
|
@ -260,7 +265,9 @@ def create_write_node(name, data, input=None, prenodes=None):
|
|||
anatomy_filled = format_anatomy(data)
|
||||
|
||||
except Exception as e:
|
||||
log.error("problem with resolving anatomy tepmlate: {}".format(e))
|
||||
msg = "problem with resolving anatomy tepmlate: {}".format(e)
|
||||
log.error(msg)
|
||||
nuke.message(msg)
|
||||
|
||||
# build file path to workfiles
|
||||
fpath = str(anatomy_filled["work"]["folder"]).replace("\\", "/")
|
||||
|
|
@ -538,8 +545,11 @@ class WorkfileSettings(object):
|
|||
viewer_dict (dict): adjustments from presets
|
||||
|
||||
'''
|
||||
assert isinstance(viewer_dict, dict), log.error(
|
||||
"set_viewers_colorspace(): argument should be dictionary")
|
||||
if not isinstance(viewer_dict, dict):
|
||||
msg = "set_viewers_colorspace(): argument should be dictionary"
|
||||
log.error(msg)
|
||||
nuke.message(msg)
|
||||
return
|
||||
|
||||
filter_knobs = [
|
||||
"viewerProcess",
|
||||
|
|
@ -587,8 +597,10 @@ class WorkfileSettings(object):
|
|||
root_dict (dict): adjustmensts from presets
|
||||
|
||||
'''
|
||||
assert isinstance(root_dict, dict), log.error(
|
||||
"set_root_colorspace(): argument should be dictionary")
|
||||
if not isinstance(root_dict, dict):
|
||||
msg = "set_root_colorspace(): argument should be dictionary"
|
||||
log.error(msg)
|
||||
nuke.message(msg)
|
||||
|
||||
log.debug(">> root_dict: {}".format(root_dict))
|
||||
|
||||
|
|
@ -635,8 +647,11 @@ class WorkfileSettings(object):
|
|||
'''
|
||||
# TODO: complete this function so any write node in
|
||||
# scene will have fixed colorspace following presets for the project
|
||||
assert isinstance(write_dict, dict), log.error(
|
||||
"set_root_colorspace(): argument should be dictionary")
|
||||
if not isinstance(write_dict, dict):
|
||||
msg = "set_root_colorspace(): argument should be dictionary"
|
||||
nuke.message(msg)
|
||||
log.error(msg)
|
||||
return
|
||||
|
||||
log.debug("__ set_writes_colorspace(): {}".format(write_dict))
|
||||
|
||||
|
|
@ -648,25 +663,28 @@ class WorkfileSettings(object):
|
|||
try:
|
||||
self.set_root_colorspace(nuke_colorspace["root"])
|
||||
except AttributeError:
|
||||
log.error(
|
||||
"set_colorspace(): missing `root` settings in template")
|
||||
msg = "set_colorspace(): missing `root` settings in template"
|
||||
|
||||
try:
|
||||
self.set_viewers_colorspace(nuke_colorspace["viewer"])
|
||||
except AttributeError:
|
||||
log.error(
|
||||
"set_colorspace(): missing `viewer` settings in template")
|
||||
msg = "set_colorspace(): missing `viewer` settings in template"
|
||||
nuke.message(msg)
|
||||
log.error(msg)
|
||||
try:
|
||||
self.set_writes_colorspace(nuke_colorspace["write"])
|
||||
except AttributeError:
|
||||
log.error(
|
||||
"set_colorspace(): missing `write` settings in template")
|
||||
msg = "set_colorspace(): missing `write` settings in template"
|
||||
nuke.message(msg)
|
||||
log.error(msg)
|
||||
|
||||
try:
|
||||
for key in nuke_colorspace:
|
||||
log.debug("Preset's colorspace key: {}".format(key))
|
||||
except TypeError:
|
||||
log.error("Nuke is not in templates! \n\n\n"
|
||||
"contact your supervisor!")
|
||||
msg = "Nuke is not in templates! Contact your supervisor!"
|
||||
nuke.message(msg)
|
||||
log.error(msg)
|
||||
|
||||
def reset_frame_range_handles(self):
|
||||
"""Set frame range to current asset"""
|
||||
|
|
@ -702,9 +720,11 @@ class WorkfileSettings(object):
|
|||
frame_start = int(data["frameStart"]) - handle_start
|
||||
frame_end = int(data["frameEnd"]) + handle_end
|
||||
|
||||
self._root_node["lock_range"].setValue(False)
|
||||
self._root_node["fps"].setValue(fps)
|
||||
self._root_node["first_frame"].setValue(frame_start)
|
||||
self._root_node["last_frame"].setValue(frame_end)
|
||||
self._root_node["lock_range"].setValue(True)
|
||||
|
||||
# setting active viewers
|
||||
try:
|
||||
|
|
@ -751,13 +771,13 @@ class WorkfileSettings(object):
|
|||
}
|
||||
|
||||
if any(x for x in data.values() if x is None):
|
||||
log.error(
|
||||
"Missing set shot attributes in DB."
|
||||
"\nContact your supervisor!."
|
||||
"\n\nWidth: `{width}`"
|
||||
"\nHeight: `{height}`"
|
||||
"\nPixel Asspect: `{pixel_aspect}`".format(**data)
|
||||
)
|
||||
msg = ("Missing set shot attributes in DB."
|
||||
"\nContact your supervisor!."
|
||||
"\n\nWidth: `{width}`"
|
||||
"\nHeight: `{height}`"
|
||||
"\nPixel Asspect: `{pixel_aspect}`").format(**data)
|
||||
log.error(msg)
|
||||
nuke.message(msg)
|
||||
|
||||
bbox = self._asset_entity.get('data', {}).get('crop')
|
||||
|
||||
|
|
@ -774,10 +794,10 @@ class WorkfileSettings(object):
|
|||
)
|
||||
except Exception as e:
|
||||
bbox = None
|
||||
log.error(
|
||||
"{}: {} \nFormat:Crop need to be set with dots, example: "
|
||||
"0.0.1920.1080, /nSetting to default".format(__name__, e)
|
||||
)
|
||||
msg = ("{}:{} \nFormat:Crop need to be set with dots, example: "
|
||||
"0.0.1920.1080, /nSetting to default").format(__name__, e)
|
||||
log.error(msg)
|
||||
nuke.message(msg)
|
||||
|
||||
existing_format = None
|
||||
for format in nuke.formats():
|
||||
|
|
@ -1190,3 +1210,454 @@ class BuildWorkfile(WorkfileSettings):
|
|||
|
||||
def position_up(self, multiply=1):
|
||||
self.ypos -= (self.ypos_size * multiply) + self.ypos_gap
|
||||
|
||||
|
||||
class ExporterReview:
|
||||
"""
|
||||
Base class object for generating review data from Nuke
|
||||
|
||||
Args:
|
||||
klass (pyblish.plugin): pyblish plugin parent
|
||||
instance (pyblish.instance): instance of pyblish context
|
||||
|
||||
"""
|
||||
_temp_nodes = []
|
||||
data = dict({
|
||||
"representations": list()
|
||||
})
|
||||
|
||||
def __init__(self,
|
||||
klass,
|
||||
instance
|
||||
):
|
||||
|
||||
self.log = klass.log
|
||||
self.instance = instance
|
||||
self.path_in = self.instance.data.get("path", None)
|
||||
self.staging_dir = self.instance.data["stagingDir"]
|
||||
self.collection = self.instance.data.get("collection", None)
|
||||
|
||||
def get_file_info(self):
|
||||
if self.collection:
|
||||
self.log.debug("Collection: `{}`".format(self.collection))
|
||||
# get path
|
||||
self.fname = os.path.basename(self.collection.format(
|
||||
"{head}{padding}{tail}"))
|
||||
self.fhead = self.collection.format("{head}")
|
||||
|
||||
# get first and last frame
|
||||
self.first_frame = min(self.collection.indexes)
|
||||
self.last_frame = max(self.collection.indexes)
|
||||
if "slate" in self.instance.data["families"]:
|
||||
self.first_frame += 1
|
||||
else:
|
||||
self.fname = os.path.basename(self.path_in)
|
||||
self.fhead = os.path.splitext(self.fname)[0] + "."
|
||||
self.first_frame = self.instance.data.get("frameStart", None)
|
||||
self.last_frame = self.instance.data.get("frameEnd", None)
|
||||
|
||||
if "#" in self.fhead:
|
||||
self.fhead = self.fhead.replace("#", "")[:-1]
|
||||
|
||||
def get_representation_data(self, tags=None, range=False):
|
||||
add_tags = []
|
||||
if tags:
|
||||
add_tags = tags
|
||||
|
||||
repre = {
|
||||
'name': self.name,
|
||||
'ext': self.ext,
|
||||
'files': self.file,
|
||||
"stagingDir": self.staging_dir,
|
||||
"anatomy_template": "publish",
|
||||
"tags": [self.name.replace("_", "-")] + add_tags
|
||||
}
|
||||
|
||||
if range:
|
||||
repre.update({
|
||||
"frameStart": self.first_frame,
|
||||
"frameEnd": self.last_frame,
|
||||
})
|
||||
|
||||
self.data["representations"].append(repre)
|
||||
|
||||
def get_view_process_node(self):
|
||||
"""
|
||||
Will get any active view process.
|
||||
|
||||
Arguments:
|
||||
self (class): in object definition
|
||||
|
||||
Returns:
|
||||
nuke.Node: copy node of Input Process node
|
||||
"""
|
||||
anlib.reset_selection()
|
||||
ipn_orig = None
|
||||
for v in [n for n in nuke.allNodes()
|
||||
if "Viewer" in n.Class()]:
|
||||
ip = v['input_process'].getValue()
|
||||
ipn = v['input_process_node'].getValue()
|
||||
if "VIEWER_INPUT" not in ipn and ip:
|
||||
ipn_orig = nuke.toNode(ipn)
|
||||
ipn_orig.setSelected(True)
|
||||
|
||||
if ipn_orig:
|
||||
# copy selected to clipboard
|
||||
nuke.nodeCopy('%clipboard%')
|
||||
# reset selection
|
||||
anlib.reset_selection()
|
||||
# paste node and selection is on it only
|
||||
nuke.nodePaste('%clipboard%')
|
||||
# assign to variable
|
||||
ipn = nuke.selectedNode()
|
||||
|
||||
return ipn
|
||||
|
||||
def clean_nodes(self):
|
||||
for node in self._temp_nodes:
|
||||
nuke.delete(node)
|
||||
self.log.info("Deleted nodes...")
|
||||
|
||||
|
||||
class ExporterReviewLut(ExporterReview):
|
||||
"""
|
||||
Generator object for review lut from Nuke
|
||||
|
||||
Args:
|
||||
klass (pyblish.plugin): pyblish plugin parent
|
||||
instance (pyblish.instance): instance of pyblish context
|
||||
|
||||
|
||||
"""
|
||||
def __init__(self,
|
||||
klass,
|
||||
instance,
|
||||
name=None,
|
||||
ext=None,
|
||||
cube_size=None,
|
||||
lut_size=None,
|
||||
lut_style=None):
|
||||
# initialize parent class
|
||||
ExporterReview.__init__(self, klass, instance)
|
||||
|
||||
# deal with now lut defined in viewer lut
|
||||
if hasattr(klass, "viewer_lut_raw"):
|
||||
self.viewer_lut_raw = klass.viewer_lut_raw
|
||||
else:
|
||||
self.viewer_lut_raw = False
|
||||
|
||||
self.name = name or "baked_lut"
|
||||
self.ext = ext or "cube"
|
||||
self.cube_size = cube_size or 32
|
||||
self.lut_size = lut_size or 1024
|
||||
self.lut_style = lut_style or "linear"
|
||||
|
||||
# set frame start / end and file name to self
|
||||
self.get_file_info()
|
||||
|
||||
self.log.info("File info was set...")
|
||||
|
||||
self.file = self.fhead + self.name + ".{}".format(self.ext)
|
||||
self.path = os.path.join(
|
||||
self.staging_dir, self.file).replace("\\", "/")
|
||||
|
||||
def generate_lut(self):
|
||||
# ---------- start nodes creation
|
||||
|
||||
# CMSTestPattern
|
||||
cms_node = nuke.createNode("CMSTestPattern")
|
||||
cms_node["cube_size"].setValue(self.cube_size)
|
||||
# connect
|
||||
self._temp_nodes.append(cms_node)
|
||||
self.previous_node = cms_node
|
||||
self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes))
|
||||
|
||||
# Node View Process
|
||||
ipn = self.get_view_process_node()
|
||||
if ipn is not None:
|
||||
# connect
|
||||
ipn.setInput(0, self.previous_node)
|
||||
self._temp_nodes.append(ipn)
|
||||
self.previous_node = ipn
|
||||
self.log.debug("ViewProcess... `{}`".format(self._temp_nodes))
|
||||
|
||||
if not self.viewer_lut_raw:
|
||||
# OCIODisplay
|
||||
dag_node = nuke.createNode("OCIODisplay")
|
||||
# connect
|
||||
dag_node.setInput(0, self.previous_node)
|
||||
self._temp_nodes.append(dag_node)
|
||||
self.previous_node = dag_node
|
||||
self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes))
|
||||
|
||||
# GenerateLUT
|
||||
gen_lut_node = nuke.createNode("GenerateLUT")
|
||||
gen_lut_node["file"].setValue(self.path)
|
||||
gen_lut_node["file_type"].setValue(".{}".format(self.ext))
|
||||
gen_lut_node["lut1d"].setValue(self.lut_size)
|
||||
gen_lut_node["style1d"].setValue(self.lut_style)
|
||||
# connect
|
||||
gen_lut_node.setInput(0, self.previous_node)
|
||||
self._temp_nodes.append(gen_lut_node)
|
||||
self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes))
|
||||
|
||||
# ---------- end nodes creation
|
||||
|
||||
# Export lut file
|
||||
nuke.execute(
|
||||
gen_lut_node.name(),
|
||||
int(self.first_frame),
|
||||
int(self.first_frame))
|
||||
|
||||
self.log.info("Exported...")
|
||||
|
||||
# ---------- generate representation data
|
||||
self.get_representation_data()
|
||||
|
||||
self.log.debug("Representation... `{}`".format(self.data))
|
||||
|
||||
# ---------- Clean up
|
||||
self.clean_nodes()
|
||||
|
||||
return self.data
|
||||
|
||||
|
||||
class ExporterReviewMov(ExporterReview):
|
||||
"""
|
||||
Metaclass for generating review mov files
|
||||
|
||||
Args:
|
||||
klass (pyblish.plugin): pyblish plugin parent
|
||||
instance (pyblish.instance): instance of pyblish context
|
||||
|
||||
"""
|
||||
def __init__(self,
|
||||
klass,
|
||||
instance,
|
||||
name=None,
|
||||
ext=None,
|
||||
):
|
||||
# initialize parent class
|
||||
ExporterReview.__init__(self, klass, instance)
|
||||
|
||||
# passing presets for nodes to self
|
||||
if hasattr(klass, "nodes"):
|
||||
self.nodes = klass.nodes
|
||||
else:
|
||||
self.nodes = {}
|
||||
|
||||
# deal with now lut defined in viewer lut
|
||||
if hasattr(klass, "viewer_lut_raw"):
|
||||
self.viewer_lut_raw = klass.viewer_lut_raw
|
||||
else:
|
||||
self.viewer_lut_raw = False
|
||||
|
||||
self.name = name or "baked"
|
||||
self.ext = ext or "mov"
|
||||
|
||||
# set frame start / end and file name to self
|
||||
self.get_file_info()
|
||||
|
||||
self.log.info("File info was set...")
|
||||
|
||||
self.file = self.fhead + self.name + ".{}".format(self.ext)
|
||||
self.path = os.path.join(
|
||||
self.staging_dir, self.file).replace("\\", "/")
|
||||
|
||||
def render(self, render_node_name):
|
||||
self.log.info("Rendering... ")
|
||||
# Render Write node
|
||||
nuke.execute(
|
||||
render_node_name,
|
||||
int(self.first_frame),
|
||||
int(self.last_frame))
|
||||
|
||||
self.log.info("Rendered...")
|
||||
|
||||
def save_file(self):
|
||||
import shutil
|
||||
with anlib.maintained_selection():
|
||||
self.log.info("Saving nodes as file... ")
|
||||
# create nk path
|
||||
path = os.path.splitext(self.path)[0] + ".nk"
|
||||
# save file to the path
|
||||
shutil.copyfile(self.instance.context.data["currentFile"], path)
|
||||
|
||||
self.log.info("Nodes exported...")
|
||||
return path
|
||||
|
||||
def generate_mov(self, farm=False):
|
||||
# ---------- start nodes creation
|
||||
|
||||
# Read node
|
||||
r_node = nuke.createNode("Read")
|
||||
r_node["file"].setValue(self.path_in)
|
||||
r_node["first"].setValue(self.first_frame)
|
||||
r_node["origfirst"].setValue(self.first_frame)
|
||||
r_node["last"].setValue(self.last_frame)
|
||||
r_node["origlast"].setValue(self.last_frame)
|
||||
# connect
|
||||
self._temp_nodes.append(r_node)
|
||||
self.previous_node = r_node
|
||||
self.log.debug("Read... `{}`".format(self._temp_nodes))
|
||||
|
||||
# View Process node
|
||||
ipn = self.get_view_process_node()
|
||||
if ipn is not None:
|
||||
# connect
|
||||
ipn.setInput(0, self.previous_node)
|
||||
self._temp_nodes.append(ipn)
|
||||
self.previous_node = ipn
|
||||
self.log.debug("ViewProcess... `{}`".format(self._temp_nodes))
|
||||
|
||||
if not self.viewer_lut_raw:
|
||||
# OCIODisplay node
|
||||
dag_node = nuke.createNode("OCIODisplay")
|
||||
# connect
|
||||
dag_node.setInput(0, self.previous_node)
|
||||
self._temp_nodes.append(dag_node)
|
||||
self.previous_node = dag_node
|
||||
self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes))
|
||||
|
||||
# Write node
|
||||
write_node = nuke.createNode("Write")
|
||||
self.log.debug("Path: {}".format(self.path))
|
||||
write_node["file"].setValue(self.path)
|
||||
write_node["file_type"].setValue(self.ext)
|
||||
write_node["meta_codec"].setValue("ap4h")
|
||||
write_node["mov64_codec"].setValue("ap4h")
|
||||
write_node["mov64_write_timecode"].setValue(1)
|
||||
write_node["raw"].setValue(1)
|
||||
# connect
|
||||
write_node.setInput(0, self.previous_node)
|
||||
self._temp_nodes.append(write_node)
|
||||
self.log.debug("Write... `{}`".format(self._temp_nodes))
|
||||
# ---------- end nodes creation
|
||||
|
||||
# ---------- render or save to nk
|
||||
if farm:
|
||||
nuke.scriptSave()
|
||||
path_nk = self.save_file()
|
||||
self.data.update({
|
||||
"bakeScriptPath": path_nk,
|
||||
"bakeWriteNodeName": write_node.name(),
|
||||
"bakeRenderPath": self.path
|
||||
})
|
||||
else:
|
||||
self.render(write_node.name())
|
||||
# ---------- generate representation data
|
||||
self.get_representation_data(
|
||||
tags=["review", "delete"],
|
||||
range=True
|
||||
)
|
||||
|
||||
self.log.debug("Representation... `{}`".format(self.data))
|
||||
|
||||
# ---------- Clean up
|
||||
self.clean_nodes()
|
||||
nuke.scriptSave()
|
||||
return self.data
|
||||
|
||||
|
||||
def get_dependent_nodes(nodes):
|
||||
"""Get all dependent nodes connected to the list of nodes.
|
||||
|
||||
Looking for connections outside of the nodes in incoming argument.
|
||||
|
||||
Arguments:
|
||||
nodes (list): list of nuke.Node objects
|
||||
|
||||
Returns:
|
||||
connections_in: dictionary of nodes and its dependencies
|
||||
connections_out: dictionary of nodes and its dependency
|
||||
"""
|
||||
|
||||
connections_in = dict()
|
||||
connections_out = dict()
|
||||
node_names = [n.name() for n in nodes]
|
||||
for node in nodes:
|
||||
inputs = node.dependencies()
|
||||
outputs = node.dependent()
|
||||
# collect all inputs outside
|
||||
test_in = [(i, n) for i, n in enumerate(inputs)
|
||||
if n.name() not in node_names]
|
||||
if test_in:
|
||||
connections_in.update({
|
||||
node: test_in
|
||||
})
|
||||
# collect all outputs outside
|
||||
test_out = [i for i in outputs if i.name() not in node_names]
|
||||
if test_out:
|
||||
# only one dependent node is allowed
|
||||
connections_out.update({
|
||||
node: test_out[-1]
|
||||
})
|
||||
|
||||
return connections_in, connections_out
|
||||
|
||||
|
||||
def find_free_space_to_paste_nodes(
|
||||
nodes,
|
||||
group=nuke.root(),
|
||||
direction="right",
|
||||
offset=300):
|
||||
"""
|
||||
For getting coordinates in DAG (node graph) for placing new nodes
|
||||
|
||||
Arguments:
|
||||
nodes (list): list of nuke.Node objects
|
||||
group (nuke.Node) [optional]: object in which context it is
|
||||
direction (str) [optional]: where we want it to be placed
|
||||
[left, right, top, bottom]
|
||||
offset (int) [optional]: what offset it is from rest of nodes
|
||||
|
||||
Returns:
|
||||
xpos (int): x coordinace in DAG
|
||||
ypos (int): y coordinace in DAG
|
||||
"""
|
||||
if len(nodes) == 0:
|
||||
return 0, 0
|
||||
|
||||
group_xpos = list()
|
||||
group_ypos = list()
|
||||
|
||||
# get local coordinates of all nodes
|
||||
nodes_xpos = [n.xpos() for n in nodes] + \
|
||||
[n.xpos() + n.screenWidth() for n in nodes]
|
||||
|
||||
nodes_ypos = [n.ypos() for n in nodes] + \
|
||||
[n.ypos() + n.screenHeight() for n in nodes]
|
||||
|
||||
# get complete screen size of all nodes to be placed in
|
||||
nodes_screen_width = max(nodes_xpos) - min(nodes_xpos)
|
||||
nodes_screen_heigth = max(nodes_ypos) - min(nodes_ypos)
|
||||
|
||||
# get screen size (r,l,t,b) of all nodes in `group`
|
||||
with group:
|
||||
group_xpos = [n.xpos() for n in nuke.allNodes() if n not in nodes] + \
|
||||
[n.xpos() + n.screenWidth() for n in nuke.allNodes()
|
||||
if n not in nodes]
|
||||
group_ypos = [n.ypos() for n in nuke.allNodes() if n not in nodes] + \
|
||||
[n.ypos() + n.screenHeight() for n in nuke.allNodes()
|
||||
if n not in nodes]
|
||||
|
||||
# calc output left
|
||||
if direction in "left":
|
||||
xpos = min(group_xpos) - abs(nodes_screen_width) - abs(offset)
|
||||
ypos = min(group_ypos)
|
||||
return xpos, ypos
|
||||
# calc output right
|
||||
if direction in "right":
|
||||
xpos = max(group_xpos) + abs(offset)
|
||||
ypos = min(group_ypos)
|
||||
return xpos, ypos
|
||||
# calc output top
|
||||
if direction in "top":
|
||||
xpos = min(group_xpos)
|
||||
ypos = min(group_ypos) - abs(nodes_screen_heigth) - abs(offset)
|
||||
return xpos, ypos
|
||||
# calc output bottom
|
||||
if direction in "bottom":
|
||||
xpos = min(group_xpos)
|
||||
ypos = max(group_ypos) + abs(offset)
|
||||
return xpos, ypos
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from pype import api as pype
|
||||
from pypeapp import Anatomy, config
|
||||
|
||||
import nuke
|
||||
|
||||
log = pype.Logger().get_logger(__name__, "nuke")
|
||||
|
||||
|
|
@ -28,7 +28,7 @@ def get_node_dataflow_preset(**kwarg):
|
|||
families = kwarg.get("families", [])
|
||||
preset = kwarg.get("preset", None) # omit < 2.0.0v
|
||||
|
||||
assert any([host, cls]), log.error(
|
||||
assert any([host, cls]), nuke.message(
|
||||
"`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__))
|
||||
|
||||
nuke_dataflow = get_dataflow_preset().get(str(host), None)
|
||||
|
|
@ -56,8 +56,10 @@ def get_node_colorspace_preset(**kwarg):
|
|||
families = kwarg.get("families", [])
|
||||
preset = kwarg.get("preset", None) # omit < 2.0.0v
|
||||
|
||||
assert any([host, cls]), log.error(
|
||||
"`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__))
|
||||
if not any([host, cls]):
|
||||
msg = "`{}`: Missing mandatory kwargs `host`, `cls`".format(__file__)
|
||||
log.error(msg)
|
||||
nuke.message(msg)
|
||||
|
||||
nuke_colorspace = get_colorspace_preset().get(str(host), None)
|
||||
nuke_colorspace_node = nuke_colorspace.get(str(cls), None)
|
||||
|
|
|
|||
64
pype/nuke/utils.py
Normal file
64
pype/nuke/utils.py
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
import os
|
||||
import nuke
|
||||
from avalon.nuke import lib as anlib
|
||||
|
||||
|
||||
def get_node_outputs(node):
|
||||
'''
|
||||
Return a dictionary of the nodes and pipes that are connected to node
|
||||
'''
|
||||
dep_dict = {}
|
||||
dependencies = node.dependent(nuke.INPUTS | nuke.HIDDEN_INPUTS)
|
||||
for d in dependencies:
|
||||
dep_dict[d] = []
|
||||
for i in range(d.inputs()):
|
||||
if d.input(i) == node:
|
||||
dep_dict[d].append(i)
|
||||
return dep_dict
|
||||
|
||||
|
||||
def is_node_gizmo(node):
|
||||
'''
|
||||
return True if node is gizmo
|
||||
'''
|
||||
return 'gizmo_file' in node.knobs()
|
||||
|
||||
|
||||
def gizmo_is_nuke_default(gizmo):
|
||||
'''Check if gizmo is in default install path'''
|
||||
plug_dir = os.path.join(os.path.dirname(
|
||||
nuke.env['ExecutablePath']), 'plugins')
|
||||
return gizmo.filename().startswith(plug_dir)
|
||||
|
||||
|
||||
def bake_gizmos_recursively(in_group=nuke.Root()):
|
||||
"""Converting a gizmo to group
|
||||
|
||||
Argumets:
|
||||
is_group (nuke.Node)[optonal]: group node or all nodes
|
||||
"""
|
||||
# preserve selection after all is done
|
||||
with anlib.maintained_selection():
|
||||
# jump to the group
|
||||
with in_group:
|
||||
for node in nuke.allNodes():
|
||||
if is_node_gizmo(node) and not gizmo_is_nuke_default(node):
|
||||
with node:
|
||||
outputs = get_node_outputs(node)
|
||||
group = node.makeGroup()
|
||||
# Reconnect inputs and outputs if any
|
||||
if outputs:
|
||||
for n, pipes in outputs.items():
|
||||
for i in pipes:
|
||||
n.setInput(i, group)
|
||||
for i in range(node.inputs()):
|
||||
group.setInput(i, node.input(i))
|
||||
# set node position and name
|
||||
group.setXYpos(node.xpos(), node.ypos())
|
||||
name = node.name()
|
||||
nuke.delete(node)
|
||||
group.setName(name)
|
||||
node = group
|
||||
|
||||
if node.Class() == "Group":
|
||||
bake_gizmos_recursively(node)
|
||||
|
|
@ -22,19 +22,16 @@ def has_unsaved_changes():
|
|||
|
||||
|
||||
def save_file(filepath):
|
||||
file = os.path.basename(filepath)
|
||||
project = hiero.core.projects()[-1]
|
||||
|
||||
# close `Untitled` project
|
||||
if "Untitled" not in project.name():
|
||||
log.info("Saving project: `{}`".format(project.name()))
|
||||
if project:
|
||||
log.info("Saving project: `{}` as '{}'".format(project.name(), file))
|
||||
project.saveAs(filepath)
|
||||
elif not project:
|
||||
else:
|
||||
log.info("Creating new project...")
|
||||
project = hiero.core.newProject()
|
||||
project.saveAs(filepath)
|
||||
else:
|
||||
log.info("Dropping `Untitled` project...")
|
||||
return
|
||||
|
||||
|
||||
def open_file(filepath):
|
||||
|
|
|
|||
|
|
@ -1,9 +1,6 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from avalon import (
|
||||
io,
|
||||
api as avalon
|
||||
)
|
||||
from avalon import api as avalon
|
||||
from pype import api as pype
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
|
|
|||
32
pype/plugins/blender/create/create_model.py
Normal file
32
pype/plugins/blender/create/create_model.py
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
"""Create a model asset."""
|
||||
|
||||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import Creator, lib
|
||||
|
||||
|
||||
class CreateModel(Creator):
|
||||
"""Polygonal static geometry"""
|
||||
|
||||
name = "modelMain"
|
||||
label = "Model"
|
||||
family = "model"
|
||||
icon = "cube"
|
||||
|
||||
def process(self):
|
||||
import pype.blender
|
||||
|
||||
asset = self.data["asset"]
|
||||
subset = self.data["subset"]
|
||||
name = pype.blender.plugin.model_name(asset, subset)
|
||||
collection = bpy.data.collections.new(name=name)
|
||||
bpy.context.scene.collection.children.link(collection)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
lib.imprint(collection, self.data)
|
||||
|
||||
if (self.options or {}).get("useSelection"):
|
||||
for obj in lib.get_selection():
|
||||
collection.objects.link(obj)
|
||||
|
||||
return collection
|
||||
315
pype/plugins/blender/load/load_model.py
Normal file
315
pype/plugins/blender/load/load_model.py
Normal file
|
|
@ -0,0 +1,315 @@
|
|||
"""Load a model asset in Blender."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import avalon.blender.pipeline
|
||||
import bpy
|
||||
import pype.blender
|
||||
from avalon import api
|
||||
|
||||
logger = logging.getLogger("pype").getChild("blender").getChild("load_model")
|
||||
|
||||
|
||||
class BlendModelLoader(pype.blender.AssetLoader):
|
||||
"""Load models from a .blend file.
|
||||
|
||||
Because they come from a .blend file we can simply link the collection that
|
||||
contains the model. There is no further need to 'containerise' it.
|
||||
|
||||
Warning:
|
||||
Loading the same asset more then once is not properly supported at the
|
||||
moment.
|
||||
"""
|
||||
|
||||
families = ["model"]
|
||||
representations = ["blend"]
|
||||
|
||||
label = "Link Model"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
@staticmethod
|
||||
def _get_lib_collection(name: str, libpath: Path) -> Optional[bpy.types.Collection]:
|
||||
"""Find the collection(s) with name, loaded from libpath.
|
||||
|
||||
Note:
|
||||
It is assumed that only 1 matching collection is found.
|
||||
"""
|
||||
for collection in bpy.data.collections:
|
||||
if collection.name != name:
|
||||
continue
|
||||
if collection.library is None:
|
||||
continue
|
||||
if not collection.library.filepath:
|
||||
continue
|
||||
collection_lib_path = str(Path(bpy.path.abspath(collection.library.filepath)).resolve())
|
||||
normalized_libpath = str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
if collection_lib_path == normalized_libpath:
|
||||
return collection
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _collection_contains_object(
|
||||
collection: bpy.types.Collection, object: bpy.types.Object
|
||||
) -> bool:
|
||||
"""Check if the collection contains the object."""
|
||||
for obj in collection.objects:
|
||||
if obj == object:
|
||||
return True
|
||||
return False
|
||||
|
||||
def process_asset(
|
||||
self, context: dict, name: str, namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None
|
||||
) -> Optional[List]:
|
||||
"""
|
||||
Arguments:
|
||||
name: Use pre-defined name
|
||||
namespace: Use pre-defined namespace
|
||||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
lib_container = pype.blender.plugin.model_name(asset, subset)
|
||||
container_name = pype.blender.plugin.model_name(
|
||||
asset, subset, namespace
|
||||
)
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
|
||||
with bpy.data.libraries.load(
|
||||
libpath, link=True, relative=relative
|
||||
) as (_, data_to):
|
||||
data_to.collections = [lib_container]
|
||||
|
||||
scene = bpy.context.scene
|
||||
instance_empty = bpy.data.objects.new(
|
||||
container_name, None
|
||||
)
|
||||
if not instance_empty.get("avalon"):
|
||||
instance_empty["avalon"] = dict()
|
||||
avalon_info = instance_empty["avalon"]
|
||||
avalon_info.update({"container_name": container_name})
|
||||
scene.collection.objects.link(instance_empty)
|
||||
instance_empty.instance_type = 'COLLECTION'
|
||||
container = bpy.data.collections[lib_container]
|
||||
container.name = container_name
|
||||
instance_empty.instance_collection = container
|
||||
container.make_local()
|
||||
avalon.blender.pipeline.containerise_existing(
|
||||
container,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
)
|
||||
|
||||
nodes = list(container.objects)
|
||||
nodes.append(container)
|
||||
nodes.append(instance_empty)
|
||||
self[:] = nodes
|
||||
return nodes
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
ones and add them to the collection.
|
||||
If the objects of the collection are used in another collection they
|
||||
will not be removed, only unlinked. Normally this should not be the
|
||||
case though.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
libpath = Path(api.get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
logger.debug(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert collection, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
)
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
assert libpath, (
|
||||
"No existing library file found for {container['objectName']}"
|
||||
)
|
||||
assert libpath.is_file(), (
|
||||
f"The file doesn't exist: {libpath}"
|
||||
)
|
||||
assert extension in pype.blender.plugin.VALID_EXTENSIONS, (
|
||||
f"Unsupported file: {libpath}"
|
||||
)
|
||||
collection_libpath = (
|
||||
self._get_library_from_container(collection).filepath
|
||||
)
|
||||
normalized_collection_libpath = (
|
||||
str(Path(bpy.path.abspath(collection_libpath)).resolve())
|
||||
)
|
||||
normalized_libpath = (
|
||||
str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
)
|
||||
logger.debug(
|
||||
"normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_collection_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_collection_libpath == normalized_libpath:
|
||||
logger.info("Library already loaded, not updating...")
|
||||
return
|
||||
# Let Blender's garbage collection take care of removing the library
|
||||
# itself after removing the objects.
|
||||
objects_to_remove = set()
|
||||
collection_objects = list()
|
||||
collection_objects[:] = collection.objects
|
||||
for obj in collection_objects:
|
||||
# Unlink every object
|
||||
collection.objects.unlink(obj)
|
||||
remove_obj = True
|
||||
for coll in [
|
||||
coll for coll in bpy.data.collections
|
||||
if coll != collection
|
||||
]:
|
||||
if (
|
||||
coll.objects and
|
||||
self._collection_contains_object(coll, obj)
|
||||
):
|
||||
remove_obj = False
|
||||
if remove_obj:
|
||||
objects_to_remove.add(obj)
|
||||
|
||||
for obj in objects_to_remove:
|
||||
# Only delete objects that are not used elsewhere
|
||||
bpy.data.objects.remove(obj)
|
||||
|
||||
instance_empties = [
|
||||
obj for obj in collection.users_dupli_group
|
||||
if obj.name in collection.name
|
||||
]
|
||||
if instance_empties:
|
||||
instance_empty = instance_empties[0]
|
||||
container_name = instance_empty["avalon"]["container_name"]
|
||||
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
with bpy.data.libraries.load(
|
||||
str(libpath), link=True, relative=relative
|
||||
) as (_, data_to):
|
||||
data_to.collections = [container_name]
|
||||
|
||||
new_collection = self._get_lib_collection(container_name, libpath)
|
||||
if new_collection is None:
|
||||
raise ValueError(
|
||||
"A matching collection '{container_name}' "
|
||||
"should have been found in: {libpath}"
|
||||
)
|
||||
|
||||
for obj in new_collection.objects:
|
||||
collection.objects.link(obj)
|
||||
bpy.data.collections.remove(new_collection)
|
||||
# Update the representation on the collection
|
||||
avalon_prop = collection[avalon.blender.pipeline.AVALON_PROPERTY]
|
||||
avalon_prop["representation"] = str(representation["_id"])
|
||||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
||||
Arguments:
|
||||
container (avalon-core:container-1.0): Container to remove,
|
||||
from `host.ls()`.
|
||||
|
||||
Returns:
|
||||
bool: Whether the container was deleted.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
if not collection:
|
||||
return False
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
instance_parents = list(collection.users_dupli_group)
|
||||
instance_objects = list(collection.objects)
|
||||
for obj in instance_objects + instance_parents:
|
||||
bpy.data.objects.remove(obj)
|
||||
bpy.data.collections.remove(collection)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class CacheModelLoader(pype.blender.AssetLoader):
|
||||
"""Load cache models.
|
||||
|
||||
Stores the imported asset in a collection named after the asset.
|
||||
|
||||
Note:
|
||||
At least for now it only supports Alembic files.
|
||||
"""
|
||||
|
||||
families = ["model"]
|
||||
representations = ["abc"]
|
||||
|
||||
label = "Link Model"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_asset(
|
||||
self, context: dict, name: str, namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None
|
||||
) -> Optional[List]:
|
||||
"""
|
||||
Arguments:
|
||||
name: Use pre-defined name
|
||||
namespace: Use pre-defined namespace
|
||||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
raise NotImplementedError("Loading of Alembic files is not yet implemented.")
|
||||
# TODO (jasper): implement Alembic import.
|
||||
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
# TODO (jasper): evaluate use of namespace which is 'alien' to Blender.
|
||||
lib_container = container_name = (
|
||||
pype.blender.plugin.model_name(asset, subset, namespace)
|
||||
)
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
|
||||
with bpy.data.libraries.load(
|
||||
libpath, link=True, relative=relative
|
||||
) as (data_from, data_to):
|
||||
data_to.collections = [lib_container]
|
||||
|
||||
scene = bpy.context.scene
|
||||
instance_empty = bpy.data.objects.new(
|
||||
container_name, None
|
||||
)
|
||||
scene.collection.objects.link(instance_empty)
|
||||
instance_empty.instance_type = 'COLLECTION'
|
||||
collection = bpy.data.collections[lib_container]
|
||||
collection.name = container_name
|
||||
instance_empty.instance_collection = collection
|
||||
|
||||
nodes = list(collection.objects)
|
||||
nodes.append(collection)
|
||||
nodes.append(instance_empty)
|
||||
self[:] = nodes
|
||||
return nodes
|
||||
16
pype/plugins/blender/publish/collect_current_file.py
Normal file
16
pype/plugins/blender/publish/collect_current_file.py
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectBlenderCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
label = "Blender Current File"
|
||||
hosts = ['blender']
|
||||
|
||||
def process(self, context):
|
||||
"""Inject the current working file"""
|
||||
current_file = bpy.data.filepath
|
||||
context.data['currentFile'] = current_file
|
||||
53
pype/plugins/blender/publish/collect_model.py
Normal file
53
pype/plugins/blender/publish/collect_model.py
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
import typing
|
||||
from typing import Generator
|
||||
|
||||
import bpy
|
||||
|
||||
import avalon.api
|
||||
import pyblish.api
|
||||
from avalon.blender.pipeline import AVALON_PROPERTY
|
||||
|
||||
|
||||
class CollectModel(pyblish.api.ContextPlugin):
|
||||
"""Collect the data of a model."""
|
||||
|
||||
hosts = ["blender"]
|
||||
label = "Collect Model"
|
||||
order = pyblish.api.CollectorOrder
|
||||
|
||||
@staticmethod
|
||||
def get_model_collections() -> Generator:
|
||||
"""Return all 'model' collections.
|
||||
|
||||
Check if the family is 'model' and if it doesn't have the
|
||||
representation set. If the representation is set, it is a loaded model
|
||||
and we don't want to publish it.
|
||||
"""
|
||||
for collection in bpy.data.collections:
|
||||
avalon_prop = collection.get(AVALON_PROPERTY) or dict()
|
||||
if (avalon_prop.get('family') == 'model'
|
||||
and not avalon_prop.get('representation')):
|
||||
yield collection
|
||||
|
||||
def process(self, context):
|
||||
"""Collect the models from the current Blender scene."""
|
||||
collections = self.get_model_collections()
|
||||
for collection in collections:
|
||||
avalon_prop = collection[AVALON_PROPERTY]
|
||||
asset = avalon_prop['asset']
|
||||
family = avalon_prop['family']
|
||||
subset = avalon_prop['subset']
|
||||
task = avalon_prop['task']
|
||||
name = f"{asset}_{subset}"
|
||||
instance = context.create_instance(
|
||||
name=name,
|
||||
family=family,
|
||||
families=[family],
|
||||
subset=subset,
|
||||
asset=asset,
|
||||
task=task,
|
||||
)
|
||||
members = list(collection.objects)
|
||||
members.append(collection)
|
||||
instance[:] = members
|
||||
self.log.debug(instance.data)
|
||||
47
pype/plugins/blender/publish/extract_model.py
Normal file
47
pype/plugins/blender/publish/extract_model.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import os
|
||||
import avalon.blender.workio
|
||||
|
||||
import pype.api
|
||||
|
||||
|
||||
class ExtractModel(pype.api.Extractor):
|
||||
"""Extract as model."""
|
||||
|
||||
label = "Model"
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
# Define extract output file path
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = f"{instance.name}.blend"
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
# Just save the file to a temporary location. At least for now it's no
|
||||
# problem to have (possibly) extra stuff in the file.
|
||||
avalon.blender.workio.save_file(filepath, copy=True)
|
||||
#
|
||||
# # Store reference for integration
|
||||
# if "files" not in instance.data:
|
||||
# instance.data["files"] = list()
|
||||
#
|
||||
# # instance.data["files"].append(filename)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'blend',
|
||||
'ext': 'blend',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s", instance.name, representation)
|
||||
49
pype/plugins/blender/publish/validate_mesh_has_uv.py
Normal file
49
pype/plugins/blender/publish/validate_mesh_has_uv.py
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
from typing import List
|
||||
|
||||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import pype.blender.action
|
||||
|
||||
|
||||
class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
|
||||
"""Validate that the current mesh has UV's."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
category = "geometry"
|
||||
label = "Mesh Has UV's"
|
||||
actions = [pype.blender.action.SelectInvalidAction]
|
||||
optional = True
|
||||
|
||||
@staticmethod
|
||||
def has_uvs(obj: bpy.types.Object) -> bool:
|
||||
"""Check if an object has uv's."""
|
||||
if not obj.data.uv_layers:
|
||||
return False
|
||||
for uv_layer in obj.data.uv_layers:
|
||||
for polygon in obj.data.polygons:
|
||||
for loop_index in polygon.loop_indices:
|
||||
if not uv_layer.data[loop_index].uv:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance) -> List:
|
||||
invalid = []
|
||||
# TODO (jasper): only check objects in the collection that will be published?
|
||||
for obj in [
|
||||
obj for obj in bpy.data.objects if obj.type == 'MESH'
|
||||
]:
|
||||
# Make sure we are in object mode.
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
if not cls.has_uvs(obj):
|
||||
invalid.append(obj)
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(f"Meshes found in instance without valid UV's: {invalid}")
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
from typing import List
|
||||
|
||||
import bpy
|
||||
|
||||
import pyblish.api
|
||||
import pype.blender.action
|
||||
|
||||
|
||||
class ValidateMeshNoNegativeScale(pyblish.api.Validator):
|
||||
"""Ensure that meshes don't have a negative scale."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
label = "Mesh No Negative Scale"
|
||||
actions = [pype.blender.action.SelectInvalidAction]
|
||||
|
||||
@staticmethod
|
||||
def get_invalid(instance) -> List:
|
||||
invalid = []
|
||||
# TODO (jasper): only check objects in the collection that will be published?
|
||||
for obj in [
|
||||
obj for obj in bpy.data.objects if obj.type == 'MESH'
|
||||
]:
|
||||
if any(v < 0 for v in obj.scale):
|
||||
invalid.append(obj)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
f"Meshes found in instance with negative scale: {invalid}"
|
||||
)
|
||||
|
|
@ -7,8 +7,9 @@ class IntegrateFtrackComments(pyblish.api.InstancePlugin):
|
|||
"""Create comments in Ftrack."""
|
||||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = "Integrate Comments to Ftrack."
|
||||
label = "Integrate Comments to Ftrack"
|
||||
families = ["shot"]
|
||||
enabled = False
|
||||
|
||||
def process(self, instance):
|
||||
session = instance.context.data["ftrackSession"]
|
||||
|
|
@ -23,25 +23,43 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
|
|||
|
||||
# Collect session
|
||||
session = ftrack_api.Session()
|
||||
self.log.debug("Ftrack user: \"{0}\"".format(session.api_user))
|
||||
context.data["ftrackSession"] = session
|
||||
|
||||
# Collect task
|
||||
|
||||
project = os.environ.get('AVALON_PROJECT', '')
|
||||
asset = os.environ.get('AVALON_ASSET', '')
|
||||
task = os.environ.get('AVALON_TASK', None)
|
||||
self.log.debug(task)
|
||||
project_name = os.environ.get('AVALON_PROJECT', '')
|
||||
asset_name = os.environ.get('AVALON_ASSET', '')
|
||||
task_name = os.environ.get('AVALON_TASK', None)
|
||||
|
||||
# Find project entity
|
||||
project_query = 'Project where full_name is "{0}"'.format(project_name)
|
||||
self.log.debug("Project query: < {0} >".format(project_query))
|
||||
project_entity = session.query(project_query).one()
|
||||
self.log.debug("Project found: {0}".format(project_entity))
|
||||
|
||||
# Find asset entity
|
||||
entity_query = (
|
||||
'TypedContext where project_id is "{0}"'
|
||||
' and name is "{1}"'
|
||||
).format(project_entity["id"], asset_name)
|
||||
self.log.debug("Asset entity query: < {0} >".format(entity_query))
|
||||
asset_entity = session.query(entity_query).one()
|
||||
self.log.debug("Asset found: {0}".format(asset_entity))
|
||||
|
||||
# Find task entity if task is set
|
||||
if task_name:
|
||||
task_query = (
|
||||
'Task where name is "{0}" and parent_id is "{1}"'
|
||||
).format(task_name, asset_entity["id"])
|
||||
self.log.debug("Task entity query: < {0} >".format(task_query))
|
||||
task_entity = session.query(task_query).one()
|
||||
self.log.debug("Task entity found: {0}".format(task_entity))
|
||||
|
||||
if task:
|
||||
result = session.query('Task where\
|
||||
project.full_name is "{0}" and\
|
||||
name is "{1}" and\
|
||||
parent.name is "{2}"'.format(project, task, asset)).one()
|
||||
context.data["ftrackTask"] = result
|
||||
else:
|
||||
result = session.query('TypedContext where\
|
||||
project.full_name is "{0}" and\
|
||||
name is "{1}"'.format(project, asset)).one()
|
||||
context.data["ftrackEntity"] = result
|
||||
task_entity = None
|
||||
self.log.warning("Task name is not set.")
|
||||
|
||||
self.log.info(result)
|
||||
context.data["ftrackProject"] = asset_entity
|
||||
context.data["ftrackEntity"] = asset_entity
|
||||
context.data["ftrackTask"] = task_entity
|
||||
|
|
|
|||
|
|
@ -77,6 +77,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
info_msg = "Created new {entity_type} with data: {data}"
|
||||
info_msg += ", metadata: {metadata}."
|
||||
|
||||
used_asset_versions = []
|
||||
# Iterate over components and publish
|
||||
for data in instance.data.get("ftrackComponentsList", []):
|
||||
|
||||
|
|
@ -144,8 +145,14 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
"version": 0,
|
||||
"asset": asset_entity,
|
||||
}
|
||||
|
||||
assetversion_data.update(data.get("assetversion_data", {}))
|
||||
_assetversion_data = data.get("assetversion_data", {})
|
||||
assetversion_cust_attrs = _assetversion_data.pop(
|
||||
"custom_attributes", {}
|
||||
)
|
||||
asset_version_comment = _assetversion_data.pop(
|
||||
"comment", None
|
||||
)
|
||||
assetversion_data.update(_assetversion_data)
|
||||
|
||||
assetversion_entity = session.query(
|
||||
self.query("AssetVersion", assetversion_data)
|
||||
|
|
@ -182,6 +189,36 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
existing_assetversion_metadata.update(assetversion_metadata)
|
||||
assetversion_entity["metadata"] = existing_assetversion_metadata
|
||||
|
||||
# Add comment
|
||||
if asset_version_comment:
|
||||
assetversion_entity["comment"] = asset_version_comment
|
||||
try:
|
||||
session.commit()
|
||||
except Exception:
|
||||
session.rollback()
|
||||
self.log.warning((
|
||||
"Comment was not possible to set for AssetVersion"
|
||||
"\"{0}\". Can't set it's value to: \"{1}\""
|
||||
).format(
|
||||
assetversion_entity["id"], str(asset_version_comment)
|
||||
))
|
||||
|
||||
# Adding Custom Attributes
|
||||
for attr, val in assetversion_cust_attrs.items():
|
||||
if attr in assetversion_entity["custom_attributes"]:
|
||||
try:
|
||||
assetversion_entity["custom_attributes"][attr] = val
|
||||
session.commit()
|
||||
continue
|
||||
except Exception:
|
||||
session.rollback()
|
||||
|
||||
self.log.warning((
|
||||
"Custom Attrubute \"{0}\""
|
||||
" is not available for AssetVersion <{1}>."
|
||||
" Can't set it's value to: \"{2}\""
|
||||
).format(attr, assetversion_entity["id"], str(val)))
|
||||
|
||||
# Have to commit the version and asset, because location can't
|
||||
# determine the final location without.
|
||||
try:
|
||||
|
|
@ -350,3 +387,14 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
if assetversion_entity not in used_asset_versions:
|
||||
used_asset_versions.append(assetversion_entity)
|
||||
|
||||
asset_versions_key = "ftrackIntegratedAssetVersions"
|
||||
if asset_versions_key not in instance.data:
|
||||
instance.data[asset_versions_key] = []
|
||||
|
||||
for asset_version in used_asset_versions:
|
||||
if asset_version not in instance.data[asset_versions_key]:
|
||||
instance.data[asset_versions_key].append(asset_version)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
'plate': 'img',
|
||||
'audio': 'audio',
|
||||
'workfile': 'scene',
|
||||
'animation': 'cache'
|
||||
'animation': 'cache',
|
||||
'image': 'img'
|
||||
}
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -115,6 +116,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
},
|
||||
"assetversion_data": {
|
||||
"version": version_number,
|
||||
"comment": instance.context.data.get("comment", "")
|
||||
},
|
||||
"component_data": component_data,
|
||||
"component_path": comp['published_path'],
|
||||
|
|
@ -123,6 +125,16 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
"thumbnail": comp['thumbnail']
|
||||
}
|
||||
|
||||
# Add custom attributes for AssetVersion
|
||||
assetversion_cust_attrs = {}
|
||||
intent_val = instance.context.data.get("intent")
|
||||
if intent_val:
|
||||
assetversion_cust_attrs["intent"] = intent_val
|
||||
|
||||
component_item["assetversion_data"]["custom_attributes"] = (
|
||||
assetversion_cust_attrs
|
||||
)
|
||||
|
||||
componentList.append(component_item)
|
||||
# Create copy with ftrack.unmanaged location if thumb or prev
|
||||
if comp.get('thumbnail') or comp.get('preview') \
|
||||
|
|
|
|||
51
pype/plugins/ftrack/publish/integrate_ftrack_note.py
Normal file
51
pype/plugins/ftrack/publish/integrate_ftrack_note.py
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
import sys
|
||||
import pyblish.api
|
||||
import six
|
||||
|
||||
|
||||
class IntegrateFtrackNote(pyblish.api.InstancePlugin):
|
||||
"""Create comments in Ftrack."""
|
||||
|
||||
# Must be after integrate asset new
|
||||
order = pyblish.api.IntegratorOrder + 0.4999
|
||||
label = "Integrate Ftrack note"
|
||||
families = ["ftrack"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
comment = (instance.context.data.get("comment") or "").strip()
|
||||
if not comment:
|
||||
self.log.info("Comment is not set.")
|
||||
return
|
||||
|
||||
self.log.debug("Comment is set to {}".format(comment))
|
||||
|
||||
asset_versions_key = "ftrackIntegratedAssetVersions"
|
||||
asset_versions = instance.data.get(asset_versions_key)
|
||||
if not asset_versions:
|
||||
self.log.info("There are any integrated AssetVersions")
|
||||
return
|
||||
|
||||
session = instance.context.data["ftrackSession"]
|
||||
user = session.query(
|
||||
"User where username is \"{}\"".format(session.api_user)
|
||||
).first()
|
||||
if not user:
|
||||
self.log.warning(
|
||||
"Was not able to query current User {}".format(
|
||||
session.api_user
|
||||
)
|
||||
)
|
||||
|
||||
for asset_version in asset_versions:
|
||||
asset_version.create_note(comment, author=user)
|
||||
|
||||
try:
|
||||
session.commit()
|
||||
self.log.debug("Note added to AssetVersion \"{}\"".format(
|
||||
str(asset_version)
|
||||
))
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
session.rollback()
|
||||
six.reraise(tp, value, tb)
|
||||
|
|
@ -11,13 +11,13 @@ class IntegrateCleanComponentData(pyblish.api.InstancePlugin):
|
|||
label = 'Clean component data'
|
||||
families = ["ftrack"]
|
||||
optional = True
|
||||
active = True
|
||||
active = False
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
for comp in instance.data['representations']:
|
||||
self.log.debug('component {}'.format(comp))
|
||||
|
||||
|
||||
if "%" in comp['published_path'] or "#" in comp['published_path']:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -15,4 +15,5 @@ class CollectComment(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.CollectorOrder
|
||||
|
||||
def process(self, context):
|
||||
context.data["comment"] = ""
|
||||
comment = (context.data.get("comment") or "").strip()
|
||||
context.data["comment"] = comment
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ import os
|
|||
import re
|
||||
import copy
|
||||
import json
|
||||
from pprint import pformat
|
||||
|
||||
import pyblish.api
|
||||
from avalon import api
|
||||
|
|
@ -54,10 +53,6 @@ def collect(root,
|
|||
patterns=[pattern],
|
||||
minimum_items=1)
|
||||
|
||||
# Ignore any remainders
|
||||
if remainder:
|
||||
print("Skipping remainder {}".format(remainder))
|
||||
|
||||
# Exclude any frames outside start and end frame.
|
||||
for collection in collections:
|
||||
for index in list(collection.indexes):
|
||||
|
|
@ -71,7 +66,7 @@ def collect(root,
|
|||
# Keep only collections that have at least a single frame
|
||||
collections = [c for c in collections if c.indexes]
|
||||
|
||||
return collections
|
||||
return collections, remainder
|
||||
|
||||
|
||||
class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
||||
|
|
@ -95,11 +90,22 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
order = pyblish.api.CollectorOrder - 0.0001
|
||||
targets = ["filesequence"]
|
||||
label = "RenderedFrames"
|
||||
|
||||
def process(self, context):
|
||||
pixel_aspect = 1
|
||||
resolution_width = 1920
|
||||
resolution_height = 1080
|
||||
lut_path = None
|
||||
slate_frame = None
|
||||
families_data = None
|
||||
baked_mov_path = None
|
||||
subset = None
|
||||
version = None
|
||||
frame_start = 0
|
||||
frame_end = 0
|
||||
if os.environ.get("PYPE_PUBLISH_PATHS"):
|
||||
paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep)
|
||||
self.log.info("Collecting paths: {}".format(paths))
|
||||
|
|
@ -117,12 +123,18 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
try:
|
||||
data = json.load(f)
|
||||
except Exception as exc:
|
||||
self.log.error("Error loading json: "
|
||||
"{} - Exception: {}".format(path, exc))
|
||||
self.log.error(
|
||||
"Error loading json: "
|
||||
"{} - Exception: {}".format(path, exc)
|
||||
)
|
||||
raise
|
||||
|
||||
cwd = os.path.dirname(path)
|
||||
root_override = data.get("root")
|
||||
frame_start = int(data.get("frameStart"))
|
||||
frame_end = int(data.get("frameEnd"))
|
||||
subset = data.get("subset")
|
||||
|
||||
if root_override:
|
||||
if os.path.isabs(root_override):
|
||||
root = root_override
|
||||
|
|
@ -144,6 +156,18 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
self.log.info("setting session using metadata")
|
||||
api.Session.update(session)
|
||||
os.environ.update(session)
|
||||
instance = metadata.get("instance")
|
||||
if instance:
|
||||
instance_family = instance.get("family")
|
||||
pixel_aspect = instance.get("pixelAspect", 1)
|
||||
resolution_width = instance.get("resolutionWidth", 1920)
|
||||
resolution_height = instance.get("resolutionHeight", 1080)
|
||||
lut_path = instance.get("lutPath", None)
|
||||
baked_mov_path = instance.get("bakeRenderPath")
|
||||
families_data = instance.get("families")
|
||||
slate_frame = instance.get("slateFrame")
|
||||
version = instance.get("version")
|
||||
|
||||
|
||||
else:
|
||||
# Search in directory
|
||||
|
|
@ -151,88 +175,279 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
|
|||
root = path
|
||||
|
||||
self.log.info("Collecting: {}".format(root))
|
||||
|
||||
regex = data.get("regex")
|
||||
if baked_mov_path:
|
||||
regex = "^{}.*$".format(subset)
|
||||
|
||||
if regex:
|
||||
self.log.info("Using regex: {}".format(regex))
|
||||
|
||||
collections = collect(root=root,
|
||||
regex=regex,
|
||||
exclude_regex=data.get("exclude_regex"),
|
||||
frame_start=data.get("frameStart"),
|
||||
frame_end=data.get("frameEnd"))
|
||||
if "slate" in families_data:
|
||||
frame_start -= 1
|
||||
|
||||
collections, remainder = collect(
|
||||
root=root,
|
||||
regex=regex,
|
||||
exclude_regex=data.get("exclude_regex"),
|
||||
frame_start=frame_start,
|
||||
frame_end=frame_end,
|
||||
)
|
||||
|
||||
self.log.info("Found collections: {}".format(collections))
|
||||
|
||||
if data.get("subset"):
|
||||
# If subset is provided for this json then it must be a single
|
||||
# collection.
|
||||
if len(collections) > 1:
|
||||
self.log.error("Forced subset can only work with a single "
|
||||
"found sequence")
|
||||
raise RuntimeError("Invalid sequence")
|
||||
self.log.info("Found remainder: {}".format(remainder))
|
||||
|
||||
fps = data.get("fps", 25)
|
||||
|
||||
# adding publish comment and intent to context
|
||||
context.data["comment"] = data.get("comment", "")
|
||||
context.data["intent"] = data.get("intent", "")
|
||||
|
||||
if data.get("user"):
|
||||
context.data["user"] = data["user"]
|
||||
|
||||
if data.get("version"):
|
||||
version = data.get("version")
|
||||
|
||||
# Get family from the data
|
||||
families = data.get("families", ["render"])
|
||||
if "render" not in families:
|
||||
families.append("render")
|
||||
if "ftrack" not in families:
|
||||
families.append("ftrack")
|
||||
if "review" not in families:
|
||||
families.append("review")
|
||||
if "write" in instance_family:
|
||||
families.append("write")
|
||||
if families_data and "slate" in families_data:
|
||||
families.append("slate")
|
||||
|
||||
for collection in collections:
|
||||
instance = context.create_instance(str(collection))
|
||||
self.log.info("Collection: %s" % list(collection))
|
||||
if data.get("attachTo"):
|
||||
# we need to attach found collections to existing
|
||||
# subset version as review represenation.
|
||||
|
||||
# Ensure each instance gets a unique reference to the data
|
||||
for attach in data.get("attachTo"):
|
||||
self.log.info(
|
||||
"Attaching render {}:v{}".format(
|
||||
attach["subset"], attach["version"]))
|
||||
instance = context.create_instance(
|
||||
attach["subset"])
|
||||
instance.data.update(
|
||||
{
|
||||
"name": attach["subset"],
|
||||
"version": attach["version"],
|
||||
"family": 'review',
|
||||
"families": ['review', 'ftrack'],
|
||||
"asset": data.get(
|
||||
"asset", api.Session["AVALON_ASSET"]),
|
||||
"stagingDir": root,
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"fps": fps,
|
||||
"source": data.get("source", ""),
|
||||
"pixelAspect": pixel_aspect,
|
||||
"resolutionWidth": resolution_width,
|
||||
"resolutionHeight": resolution_height
|
||||
})
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
for collection in collections:
|
||||
self.log.info(
|
||||
" - adding representation: {}".format(
|
||||
str(collection))
|
||||
)
|
||||
ext = collection.tail.lstrip(".")
|
||||
|
||||
representation = {
|
||||
"name": ext,
|
||||
"ext": "{}".format(ext),
|
||||
"files": list(collection),
|
||||
"stagingDir": root,
|
||||
"anatomy_template": "render",
|
||||
"fps": fps,
|
||||
"tags": ["review"],
|
||||
}
|
||||
instance.data["representations"].append(
|
||||
representation)
|
||||
|
||||
elif subset:
|
||||
# if we have subset - add all collections and known
|
||||
# reminder as representations
|
||||
|
||||
# take out review family if mov path
|
||||
# this will make imagesequence none review
|
||||
|
||||
if baked_mov_path:
|
||||
self.log.info(
|
||||
"Baked mov is available {}".format(
|
||||
baked_mov_path))
|
||||
families.append("review")
|
||||
|
||||
if session['AVALON_APP'] == "maya":
|
||||
families.append("review")
|
||||
|
||||
self.log.info(
|
||||
"Adding representations to subset {}".format(
|
||||
subset))
|
||||
|
||||
instance = context.create_instance(subset)
|
||||
data = copy.deepcopy(data)
|
||||
|
||||
# If no subset provided, get it from collection's head
|
||||
subset = data.get("subset", collection.head.rstrip("_. "))
|
||||
|
||||
# If no start or end frame provided, get it from collection
|
||||
indices = list(collection.indexes)
|
||||
start = data.get("frameStart", indices[0])
|
||||
end = data.get("frameEnd", indices[-1])
|
||||
|
||||
# root = os.path.normpath(root)
|
||||
# self.log.info("Source: {}}".format(data.get("source", "")))
|
||||
|
||||
ext = list(collection)[0].split('.')[-1]
|
||||
|
||||
instance.data.update({
|
||||
"name": str(collection),
|
||||
"family": families[0], # backwards compatibility / pyblish
|
||||
"families": list(families),
|
||||
"subset": subset,
|
||||
"asset": data.get("asset", api.Session["AVALON_ASSET"]),
|
||||
"stagingDir": root,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"fps": fps,
|
||||
"source": data.get('source', '')
|
||||
})
|
||||
instance.append(collection)
|
||||
instance.context.data['fps'] = fps
|
||||
instance.data.update(
|
||||
{
|
||||
"name": subset,
|
||||
"family": families[0],
|
||||
"families": list(families),
|
||||
"subset": subset,
|
||||
"asset": data.get(
|
||||
"asset", api.Session["AVALON_ASSET"]),
|
||||
"stagingDir": root,
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"fps": fps,
|
||||
"source": data.get("source", ""),
|
||||
"pixelAspect": pixel_aspect,
|
||||
"resolutionWidth": resolution_width,
|
||||
"resolutionHeight": resolution_height,
|
||||
"slateFrame": slate_frame,
|
||||
"version": version
|
||||
}
|
||||
)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': ext,
|
||||
'ext': '{}'.format(ext),
|
||||
'files': list(collection),
|
||||
"stagingDir": root,
|
||||
"anatomy_template": "render",
|
||||
"fps": fps,
|
||||
"tags": ['review']
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
for collection in collections:
|
||||
self.log.info(" - {}".format(str(collection)))
|
||||
|
||||
if data.get('user'):
|
||||
context.data["user"] = data['user']
|
||||
ext = collection.tail.lstrip(".")
|
||||
|
||||
self.log.debug("Collected instance:\n"
|
||||
"{}".format(pformat(instance.data)))
|
||||
if "slate" in instance.data["families"]:
|
||||
frame_start += 1
|
||||
|
||||
representation = {
|
||||
"name": ext,
|
||||
"ext": "{}".format(ext),
|
||||
"files": list(collection),
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"stagingDir": root,
|
||||
"anatomy_template": "render",
|
||||
"fps": fps,
|
||||
"tags": ["review"] if not baked_mov_path else [],
|
||||
}
|
||||
instance.data["representations"].append(
|
||||
representation)
|
||||
|
||||
# filter out only relevant mov in case baked available
|
||||
self.log.debug("__ remainder {}".format(remainder))
|
||||
if baked_mov_path:
|
||||
remainder = [r for r in remainder
|
||||
if r in baked_mov_path]
|
||||
self.log.debug("__ remainder {}".format(remainder))
|
||||
|
||||
# process reminders
|
||||
for rem in remainder:
|
||||
# add only known types to representation
|
||||
if rem.split(".")[-1] in ['mov', 'jpg', 'mp4']:
|
||||
self.log.info(" . {}".format(rem))
|
||||
|
||||
if "slate" in instance.data["families"]:
|
||||
frame_start += 1
|
||||
|
||||
tags = ["review"]
|
||||
|
||||
if baked_mov_path:
|
||||
tags.append("delete")
|
||||
|
||||
representation = {
|
||||
"name": rem.split(".")[-1],
|
||||
"ext": "{}".format(rem.split(".")[-1]),
|
||||
"files": rem,
|
||||
"stagingDir": root,
|
||||
"frameStart": frame_start,
|
||||
"anatomy_template": "render",
|
||||
"fps": fps,
|
||||
"tags": tags
|
||||
}
|
||||
instance.data["representations"].append(
|
||||
representation)
|
||||
|
||||
else:
|
||||
# we have no subset so we take every collection and create one
|
||||
# from it
|
||||
for collection in collections:
|
||||
instance = context.create_instance(str(collection))
|
||||
self.log.info("Creating subset from: %s" % str(collection))
|
||||
|
||||
# Ensure each instance gets a unique reference to the data
|
||||
data = copy.deepcopy(data)
|
||||
|
||||
# If no subset provided, get it from collection's head
|
||||
subset = data.get("subset", collection.head.rstrip("_. "))
|
||||
|
||||
# If no start or end frame provided, get it from collection
|
||||
indices = list(collection.indexes)
|
||||
start = data.get("frameStart", indices[0])
|
||||
end = data.get("frameEnd", indices[-1])
|
||||
|
||||
ext = list(collection)[0].split(".")[-1]
|
||||
|
||||
if "review" not in families:
|
||||
families.append("review")
|
||||
|
||||
instance.data.update(
|
||||
{
|
||||
"name": str(collection),
|
||||
"family": families[0], # backwards compatibility
|
||||
"families": list(families),
|
||||
"subset": subset,
|
||||
"asset": data.get(
|
||||
"asset", api.Session["AVALON_ASSET"]),
|
||||
"stagingDir": root,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"fps": fps,
|
||||
"source": data.get("source", ""),
|
||||
"pixelAspect": pixel_aspect,
|
||||
"resolutionWidth": resolution_width,
|
||||
"resolutionHeight": resolution_height,
|
||||
"version": version
|
||||
}
|
||||
)
|
||||
if lut_path:
|
||||
instance.data.update({"lutPath": lut_path})
|
||||
|
||||
instance.append(collection)
|
||||
instance.context.data["fps"] = fps
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": ext,
|
||||
"ext": "{}".format(ext),
|
||||
"files": list(collection),
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"stagingDir": root,
|
||||
"anatomy_template": "render",
|
||||
"fps": fps,
|
||||
"tags": ["review"],
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
# temporary ... allow only beauty on ftrack
|
||||
if session['AVALON_APP'] == "maya":
|
||||
AOV_filter = ['beauty']
|
||||
for aov in AOV_filter:
|
||||
if aov not in instance.data['subset']:
|
||||
instance.data['families'].remove('review')
|
||||
instance.data['families'].remove('ftrack')
|
||||
representation["tags"].remove('review')
|
||||
|
||||
self.log.debug(
|
||||
"__ representations {}".format(
|
||||
instance.data["representations"]))
|
||||
self.log.debug(
|
||||
"__ instance.data {}".format(instance.data))
|
||||
|
|
|
|||
|
|
@ -31,32 +31,44 @@ class CollectTemplates(pyblish.api.InstancePlugin):
|
|||
asset_name = instance.data["asset"]
|
||||
project_name = api.Session["AVALON_PROJECT"]
|
||||
|
||||
project = io.find_one({"type": "project",
|
||||
"name": project_name},
|
||||
projection={"config": True, "data": True})
|
||||
project = io.find_one(
|
||||
{
|
||||
"type": "project",
|
||||
"name": project_name
|
||||
},
|
||||
projection={"config": True, "data": True}
|
||||
)
|
||||
|
||||
template = project["config"]["template"]["publish"]
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project["_id"]})
|
||||
asset = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project["_id"]
|
||||
})
|
||||
|
||||
assert asset, ("No asset found by the name '{}' "
|
||||
"in project '{}'".format(asset_name, project_name))
|
||||
silo = asset.get('silo')
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]})
|
||||
subset = io.find_one({
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]
|
||||
})
|
||||
|
||||
# assume there is no version yet, we start at `1`
|
||||
version = None
|
||||
version_number = 1
|
||||
if subset is not None:
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
version = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset["_id"]
|
||||
},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
|
||||
# if there is a subset there ought to be version
|
||||
if version is not None:
|
||||
|
|
@ -75,8 +87,19 @@ class CollectTemplates(pyblish.api.InstancePlugin):
|
|||
"asset": asset_name,
|
||||
"subset": subset_name,
|
||||
"version": version_number,
|
||||
"hierarchy": hierarchy,
|
||||
"representation": "TEMP"}
|
||||
"hierarchy": hierarchy.replace("\\", "/"),
|
||||
"representation": "TEMP")}
|
||||
|
||||
resolution_width = instance.data.get("resolutionWidth")
|
||||
resolution_height = instance.data.get("resolutionHeight")
|
||||
fps = instance.data.get("fps")
|
||||
|
||||
if resolution_width:
|
||||
template_data["resolution_width"] = resolution_width
|
||||
if resolution_width:
|
||||
template_data["resolution_height"] = resolution_height
|
||||
if resolution_width:
|
||||
template_data["fps"] = fps
|
||||
|
||||
instance.data["template"] = template
|
||||
instance.data["assumedTemplateData"] = template_data
|
||||
|
|
@ -85,3 +108,6 @@ class CollectTemplates(pyblish.api.InstancePlugin):
|
|||
instance.data["assumedDestination"] = os.path.dirname(
|
||||
(anatomy.format(template_data))["publish"]["path"]
|
||||
)
|
||||
self.log.info("Assumed Destination has been created...")
|
||||
self.log.debug("__ assumedTemplateData: `{}`".format(instance.data["assumedTemplateData"]))
|
||||
self.log.debug("__ template: `{}`".format(instance.data["template"]))
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
import os
|
||||
import json
|
||||
import copy
|
||||
|
||||
import pype.api
|
||||
import pyblish
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class ExtractBurnin(pype.api.Extractor):
|
||||
|
|
@ -24,18 +26,50 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
if "representations" not in instance.data:
|
||||
raise RuntimeError("Burnin needs already created mov to work on.")
|
||||
|
||||
# TODO: expand burnin data list to include all usefull keys
|
||||
version = ''
|
||||
if instance.context.data.get('version'):
|
||||
version = "v" + str(instance.context.data['version'])
|
||||
version = instance.context.data.get(
|
||||
'version', instance.data.get('version'))
|
||||
frame_start = int(instance.data.get("frameStart") or 0)
|
||||
frame_end = int(instance.data.get("frameEnd") or 1)
|
||||
duration = frame_end - frame_start + 1
|
||||
|
||||
prep_data = {
|
||||
"username": instance.context.data['user'],
|
||||
"asset": os.environ['AVALON_ASSET'],
|
||||
"task": os.environ['AVALON_TASK'],
|
||||
"start_frame": int(instance.data["frameStart"]),
|
||||
"version": version
|
||||
"frame_start": frame_start,
|
||||
"frame_end": frame_end,
|
||||
"duration": duration,
|
||||
"version": int(version),
|
||||
"comment": instance.context.data.get("comment", ""),
|
||||
"intent": instance.context.data.get("intent", "")
|
||||
}
|
||||
|
||||
# Add datetime data to preparation data
|
||||
prep_data.update(config.get_datetime_data())
|
||||
|
||||
slate_frame_start = frame_start
|
||||
slate_frame_end = frame_end
|
||||
slate_duration = duration
|
||||
|
||||
# exception for slate workflow
|
||||
if "slate" in instance.data["families"]:
|
||||
slate_frame_start = frame_start - 1
|
||||
slate_frame_end = frame_end
|
||||
slate_duration = slate_frame_end - slate_frame_start + 1
|
||||
|
||||
prep_data.update({
|
||||
"slate_frame_start": slate_frame_start,
|
||||
"slate_frame_end": slate_frame_end,
|
||||
"slate_duration": slate_duration
|
||||
})
|
||||
|
||||
# Update data with template data
|
||||
template_data = instance.data.get("assumedTemplateData") or {}
|
||||
prep_data.update(template_data)
|
||||
|
||||
# get anatomy project
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
self.log.debug("__ prep_data: {}".format(prep_data))
|
||||
for i, repre in enumerate(instance.data["representations"]):
|
||||
self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre))
|
||||
|
|
@ -47,16 +81,28 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
filename = "{0}".format(repre["files"])
|
||||
|
||||
name = "_burnin"
|
||||
movieFileBurnin = filename.replace(".mov", "") + name + ".mov"
|
||||
ext = os.path.splitext(filename)[1]
|
||||
movieFileBurnin = filename.replace(ext, "") + name + ext
|
||||
|
||||
full_movie_path = os.path.join(os.path.normpath(stagingdir), repre["files"])
|
||||
full_burnin_path = os.path.join(os.path.normpath(stagingdir), movieFileBurnin)
|
||||
full_movie_path = os.path.join(
|
||||
os.path.normpath(stagingdir), repre["files"]
|
||||
)
|
||||
full_burnin_path = os.path.join(
|
||||
os.path.normpath(stagingdir), movieFileBurnin
|
||||
)
|
||||
self.log.debug("__ full_burnin_path: {}".format(full_burnin_path))
|
||||
|
||||
# create copy of prep_data for anatomy formatting
|
||||
_prep_data = copy.deepcopy(prep_data)
|
||||
_prep_data["representation"] = repre["name"]
|
||||
filled_anatomy = anatomy.format_all(_prep_data)
|
||||
_prep_data["anatomy"] = filled_anatomy.get_solved()
|
||||
|
||||
burnin_data = {
|
||||
"input": full_movie_path.replace("\\", "/"),
|
||||
"codec": repre.get("codec", []),
|
||||
"output": full_burnin_path.replace("\\", "/"),
|
||||
"burnin_data": prep_data
|
||||
"burnin_data": _prep_data
|
||||
}
|
||||
|
||||
self.log.debug("__ burnin_data2: {}".format(burnin_data))
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import pype.api
|
|||
|
||||
|
||||
class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
||||
"""Resolve any dependency issies
|
||||
"""Resolve any dependency issues
|
||||
|
||||
This plug-in resolves any paths which, if not updated might break
|
||||
the published file.
|
||||
|
|
@ -20,6 +20,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
hosts = ["shell"]
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "write", "source"]
|
||||
enabled = False
|
||||
|
||||
def process(self, instance):
|
||||
start = instance.data.get("frameStart")
|
||||
|
|
@ -28,51 +29,74 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
collected_frames = os.listdir(stagingdir)
|
||||
collections, remainder = clique.assemble(collected_frames)
|
||||
|
||||
input_file = (
|
||||
collections[0].format('{head}{padding}{tail}') % start
|
||||
)
|
||||
full_input_path = os.path.join(stagingdir, input_file)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
self.log.info("subset {}".format(instance.data['subset']))
|
||||
if 'crypto' in instance.data['subset']:
|
||||
return
|
||||
|
||||
filename = collections[0].format('{head}')
|
||||
if not filename.endswith('.'):
|
||||
filename += "."
|
||||
jpegFile = filename + "jpg"
|
||||
full_output_path = os.path.join(stagingdir, jpegFile)
|
||||
# get representation and loop them
|
||||
representations = instance.data["representations"]
|
||||
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
# filter out mov and img sequences
|
||||
representations_new = representations[:]
|
||||
|
||||
config_data = instance.context.data['output_repre_config']
|
||||
for repre in representations:
|
||||
self.log.debug(repre)
|
||||
if 'review' not in repre['tags']:
|
||||
return
|
||||
|
||||
proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
profile = config_data.get(proj_name, config_data['__default__'])
|
||||
input_file = repre['files'][0]
|
||||
|
||||
jpeg_items = []
|
||||
jpeg_items.append(
|
||||
os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg"))
|
||||
# override file if already exists
|
||||
jpeg_items.append("-y")
|
||||
# use same input args like with mov
|
||||
jpeg_items.extend(profile.get('input', []))
|
||||
# input file
|
||||
jpeg_items.append("-i {}".format(full_input_path))
|
||||
# output file
|
||||
jpeg_items.append(full_output_path)
|
||||
# input_file = (
|
||||
# collections[0].format('{head}{padding}{tail}') % start
|
||||
# )
|
||||
full_input_path = os.path.join(stagingdir, input_file)
|
||||
self.log.info("input {}".format(full_input_path))
|
||||
|
||||
subprocess_jpeg = " ".join(jpeg_items)
|
||||
filename = os.path.splitext(input_file)[0]
|
||||
if not filename.endswith('.'):
|
||||
filename += "."
|
||||
jpeg_file = filename + "jpg"
|
||||
full_output_path = os.path.join(stagingdir, jpeg_file)
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("{}".format(subprocess_jpeg))
|
||||
pype.api.subprocess(subprocess_jpeg)
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
config_data = instance.context.data['output_repre_config']
|
||||
|
||||
representation = {
|
||||
'name': 'jpg',
|
||||
'ext': 'jpg',
|
||||
'files': jpegFile,
|
||||
"stagingDir": stagingdir,
|
||||
"thumbnail": True
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
profile = config_data.get(proj_name, config_data['__default__'])
|
||||
|
||||
jpeg_items = []
|
||||
jpeg_items.append(
|
||||
os.path.join(os.environ.get("FFMPEG_PATH"), "ffmpeg"))
|
||||
# override file if already exists
|
||||
jpeg_items.append("-y")
|
||||
# use same input args like with mov
|
||||
jpeg_items.extend(profile.get('input', []))
|
||||
# input file
|
||||
jpeg_items.append("-i {}".format(full_input_path))
|
||||
# output file
|
||||
jpeg_items.append(full_output_path)
|
||||
|
||||
subprocess_jpeg = " ".join(jpeg_items)
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("{}".format(subprocess_jpeg))
|
||||
pype.api.subprocess(subprocess_jpeg)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'thumbnail',
|
||||
'ext': 'jpg',
|
||||
'files': jpeg_file,
|
||||
"stagingDir": stagingdir,
|
||||
"thumbnail": True,
|
||||
"tags": ['thumbnail']
|
||||
}
|
||||
|
||||
# adding representation
|
||||
self.log.debug("Adding: {}".format(representation))
|
||||
representations_new.append(representation)
|
||||
|
||||
instance.data["representations"] = representations_new
|
||||
|
|
|
|||
|
|
@ -1,9 +1,7 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import clique
|
||||
import pype.api
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class ExtractReview(pyblish.api.InstancePlugin):
|
||||
|
|
@ -22,27 +20,35 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
families = ["review"]
|
||||
hosts = ["nuke", "maya", "shell"]
|
||||
|
||||
outputs = {}
|
||||
ext_filter = []
|
||||
|
||||
def process(self, instance):
|
||||
# adding plugin attributes from presets
|
||||
publish_presets = config.get_presets()["plugins"]["global"]["publish"]
|
||||
plugin_attrs = publish_presets[self.__class__.__name__]
|
||||
output_profiles = plugin_attrs.get("outputs", {})
|
||||
to_width = 1920
|
||||
to_height = 1080
|
||||
|
||||
output_profiles = self.outputs or {}
|
||||
|
||||
inst_data = instance.data
|
||||
fps = inst_data.get("fps")
|
||||
start_frame = inst_data.get("frameStart")
|
||||
|
||||
self.log.debug("Families In: `{}`".format(instance.data["families"]))
|
||||
resolution_width = inst_data.get("resolutionWidth", to_width)
|
||||
resolution_height = inst_data.get("resolutionHeight", to_height)
|
||||
pixel_aspect = inst_data.get("pixelAspect", 1)
|
||||
self.log.debug("Families In: `{}`".format(inst_data["families"]))
|
||||
|
||||
# get representation and loop them
|
||||
representations = instance.data["representations"]
|
||||
representations = inst_data["representations"]
|
||||
|
||||
# filter out mov and img sequences
|
||||
representations_new = representations[:]
|
||||
for repre in representations:
|
||||
if repre['ext'] in plugin_attrs["ext_filter"]:
|
||||
if repre['ext'] in self.ext_filter:
|
||||
tags = repre.get("tags", [])
|
||||
|
||||
if "thumbnail" in tags:
|
||||
continue
|
||||
|
||||
self.log.info("Try repre: {}".format(repre))
|
||||
|
||||
if "review" in tags:
|
||||
|
|
@ -54,10 +60,14 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
if not ext:
|
||||
ext = "mov"
|
||||
self.log.warning(
|
||||
"`ext` attribute not in output profile. Setting to default ext: `mov`")
|
||||
str("`ext` attribute not in output "
|
||||
"profile. Setting to default ext: `mov`"))
|
||||
|
||||
self.log.debug("instance.families: {}".format(instance.data['families']))
|
||||
self.log.debug("profile.families: {}".format(profile['families']))
|
||||
self.log.debug(
|
||||
"instance.families: {}".format(
|
||||
instance.data['families']))
|
||||
self.log.debug(
|
||||
"profile.families: {}".format(profile['families']))
|
||||
|
||||
if any(item in instance.data['families'] for item in profile['families']):
|
||||
if isinstance(repre["files"], list):
|
||||
|
|
@ -92,8 +102,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
self.log.info("p_tags: `{}`".format(p_tags))
|
||||
# add families
|
||||
[instance.data["families"].append(t)
|
||||
for t in p_tags
|
||||
if t not in instance.data["families"]]
|
||||
for t in p_tags
|
||||
if t not in instance.data["families"]]
|
||||
|
||||
# add to
|
||||
[new_tags.append(t) for t in p_tags
|
||||
if t not in new_tags]
|
||||
|
|
@ -111,8 +122,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
# necessary input data
|
||||
# adds start arg only if image sequence
|
||||
if isinstance(repre["files"], list):
|
||||
input_args.append("-start_number {0} -framerate {1}".format(
|
||||
start_frame, fps))
|
||||
input_args.append(
|
||||
"-start_number {0} -framerate {1}".format(
|
||||
start_frame, fps))
|
||||
|
||||
input_args.append("-i {}".format(full_input_path))
|
||||
|
||||
|
|
@ -147,21 +159,135 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
)
|
||||
|
||||
output_args = []
|
||||
codec_args = profile.get('codec', [])
|
||||
output_args.extend(codec_args)
|
||||
# preset's output data
|
||||
output_args.extend(profile.get('output', []))
|
||||
|
||||
# defining image ratios
|
||||
resolution_ratio = float(resolution_width / (
|
||||
resolution_height * pixel_aspect))
|
||||
delivery_ratio = float(to_width) / float(to_height)
|
||||
self.log.debug(resolution_ratio)
|
||||
self.log.debug(delivery_ratio)
|
||||
|
||||
# get scale factor
|
||||
scale_factor = to_height / (
|
||||
resolution_height * pixel_aspect)
|
||||
self.log.debug(scale_factor)
|
||||
|
||||
# letter_box
|
||||
# TODO: add to documentation
|
||||
lb = profile.get('letter_box', None)
|
||||
if lb:
|
||||
output_args.append(
|
||||
"-filter:v drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb))
|
||||
lb = profile.get('letter_box', 0)
|
||||
if lb != 0:
|
||||
ffmpet_width = to_width
|
||||
ffmpet_height = to_height
|
||||
if "reformat" not in p_tags:
|
||||
lb /= pixel_aspect
|
||||
if resolution_ratio != delivery_ratio:
|
||||
ffmpet_width = resolution_width
|
||||
ffmpet_height = int(
|
||||
resolution_height * pixel_aspect)
|
||||
else:
|
||||
if resolution_ratio != delivery_ratio:
|
||||
lb /= scale_factor
|
||||
else:
|
||||
lb /= pixel_aspect
|
||||
|
||||
output_args.append(str(
|
||||
"-filter:v scale={0}x{1}:flags=lanczos,"
|
||||
"setsar=1,drawbox=0:0:iw:"
|
||||
"round((ih-(iw*(1/{2})))/2):t=fill:"
|
||||
"c=black,drawbox=0:ih-round((ih-(iw*("
|
||||
"1/{2})))/2):iw:round((ih-(iw*(1/{2})))"
|
||||
"/2):t=fill:c=black").format(
|
||||
ffmpet_width, ffmpet_height, lb))
|
||||
|
||||
# In case audio is longer than video.
|
||||
output_args.append("-shortest")
|
||||
|
||||
# output filename
|
||||
output_args.append(full_output_path)
|
||||
|
||||
self.log.debug(
|
||||
"__ pixel_aspect: `{}`".format(pixel_aspect))
|
||||
self.log.debug(
|
||||
"__ resolution_width: `{}`".format(
|
||||
resolution_width))
|
||||
self.log.debug(
|
||||
"__ resolution_height: `{}`".format(
|
||||
resolution_height))
|
||||
|
||||
# scaling none square pixels and 1920 width
|
||||
if "reformat" in p_tags:
|
||||
if resolution_ratio < delivery_ratio:
|
||||
self.log.debug("lower then delivery")
|
||||
width_scale = int(to_width * scale_factor)
|
||||
width_half_pad = int((
|
||||
to_width - width_scale)/2)
|
||||
height_scale = to_height
|
||||
height_half_pad = 0
|
||||
else:
|
||||
self.log.debug("heigher then delivery")
|
||||
width_scale = to_width
|
||||
width_half_pad = 0
|
||||
scale_factor = float(to_width) / float(
|
||||
resolution_width)
|
||||
self.log.debug(scale_factor)
|
||||
height_scale = int(
|
||||
resolution_height * scale_factor)
|
||||
height_half_pad = int(
|
||||
(to_height - height_scale)/2)
|
||||
|
||||
self.log.debug(
|
||||
"__ width_scale: `{}`".format(width_scale))
|
||||
self.log.debug(
|
||||
"__ width_half_pad: `{}`".format(
|
||||
width_half_pad))
|
||||
self.log.debug(
|
||||
"__ height_scale: `{}`".format(
|
||||
height_scale))
|
||||
self.log.debug(
|
||||
"__ height_half_pad: `{}`".format(
|
||||
height_half_pad))
|
||||
|
||||
scaling_arg = str(
|
||||
"scale={0}x{1}:flags=lanczos,"
|
||||
"pad={2}:{3}:{4}:{5}:black,setsar=1"
|
||||
).format(width_scale, height_scale,
|
||||
to_width, to_height,
|
||||
width_half_pad,
|
||||
height_half_pad
|
||||
)
|
||||
|
||||
vf_back = self.add_video_filter_args(
|
||||
output_args, scaling_arg)
|
||||
# add it to output_args
|
||||
output_args.insert(0, vf_back)
|
||||
|
||||
# baking lut file application
|
||||
lut_path = instance.data.get("lutPath")
|
||||
if lut_path and ("bake-lut" in p_tags):
|
||||
# removing Gama info as it is all baked in lut
|
||||
gamma = next((g for g in input_args
|
||||
if "-gamma" in g), None)
|
||||
if gamma:
|
||||
input_args.remove(gamma)
|
||||
|
||||
# create lut argument
|
||||
lut_arg = "lut3d=file='{}'".format(
|
||||
lut_path.replace(
|
||||
"\\", "/").replace(":/", "\\:/")
|
||||
)
|
||||
lut_arg += ",colormatrix=bt601:bt709"
|
||||
|
||||
vf_back = self.add_video_filter_args(
|
||||
output_args, lut_arg)
|
||||
# add it to output_args
|
||||
output_args.insert(0, vf_back)
|
||||
self.log.info("Added Lut to ffmpeg command")
|
||||
self.log.debug(
|
||||
"_ output_args: `{}`".format(output_args))
|
||||
|
||||
mov_args = [
|
||||
os.path.join(
|
||||
os.environ.get(
|
||||
|
|
@ -183,7 +309,11 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
'ext': ext,
|
||||
'files': repr_file,
|
||||
"tags": new_tags,
|
||||
"outputName": name
|
||||
"outputName": name,
|
||||
"codec": codec_args,
|
||||
"_profile": profile,
|
||||
"resolutionHeight": resolution_height,
|
||||
"resolutionWidth": resolution_width,
|
||||
})
|
||||
if repre_new.get('preview'):
|
||||
repre_new.pop("preview")
|
||||
|
|
@ -207,3 +337,40 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
instance.data["representations"] = representations_new
|
||||
|
||||
self.log.debug("Families Out: `{}`".format(instance.data["families"]))
|
||||
|
||||
def add_video_filter_args(self, args, inserting_arg):
|
||||
"""
|
||||
Fixing video filter argumets to be one long string
|
||||
|
||||
Args:
|
||||
args (list): list of string arguments
|
||||
inserting_arg (str): string argument we want to add
|
||||
(without flag `-vf`)
|
||||
|
||||
Returns:
|
||||
str: long joined argument to be added back to list of arguments
|
||||
|
||||
"""
|
||||
# find all video format settings
|
||||
vf_settings = [p for p in args
|
||||
for v in ["-filter:v", "-vf"]
|
||||
if v in p]
|
||||
self.log.debug("_ vf_settings: `{}`".format(vf_settings))
|
||||
|
||||
# remove them from output args list
|
||||
for p in vf_settings:
|
||||
self.log.debug("_ remove p: `{}`".format(p))
|
||||
args.remove(p)
|
||||
self.log.debug("_ args: `{}`".format(args))
|
||||
|
||||
# strip them from all flags
|
||||
vf_fixed = [p.replace("-vf ", "").replace("-filter:v ", "")
|
||||
for p in vf_settings]
|
||||
|
||||
self.log.debug("_ vf_fixed: `{}`".format(vf_fixed))
|
||||
vf_fixed.insert(0, inserting_arg)
|
||||
self.log.debug("_ vf_fixed: `{}`".format(vf_fixed))
|
||||
# create new video filter setting
|
||||
vf_back = "-vf " + ",".join(vf_fixed)
|
||||
|
||||
return vf_back
|
||||
|
|
|
|||
243
pype/plugins/global/publish/extract_review_slate.py
Normal file
243
pype/plugins/global/publish/extract_review_slate.py
Normal file
|
|
@ -0,0 +1,243 @@
|
|||
import os
|
||||
import pype.api
|
||||
import pyblish
|
||||
|
||||
|
||||
class ExtractReviewSlate(pype.api.Extractor):
|
||||
"""
|
||||
Will add slate frame at the start of the video files
|
||||
"""
|
||||
|
||||
label = "Review with Slate frame"
|
||||
order = pyblish.api.ExtractorOrder + 0.031
|
||||
families = ["slate"]
|
||||
hosts = ["nuke", "maya", "shell"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
inst_data = instance.data
|
||||
if "representations" not in inst_data:
|
||||
raise RuntimeError("Burnin needs already created mov to work on.")
|
||||
|
||||
suffix = "_slate"
|
||||
slate_path = inst_data.get("slateFrame")
|
||||
ffmpeg_path = os.path.join(os.environ.get("FFMPEG_PATH", ""), "ffmpeg")
|
||||
|
||||
to_width = 1920
|
||||
to_height = 1080
|
||||
resolution_width = inst_data.get("resolutionWidth", to_width)
|
||||
resolution_height = inst_data.get("resolutionHeight", to_height)
|
||||
pixel_aspect = inst_data.get("pixelAspect", 1)
|
||||
fps = inst_data.get("fps")
|
||||
|
||||
# defining image ratios
|
||||
resolution_ratio = float(resolution_width / (
|
||||
resolution_height * pixel_aspect))
|
||||
delivery_ratio = float(to_width) / float(to_height)
|
||||
self.log.debug(resolution_ratio)
|
||||
self.log.debug(delivery_ratio)
|
||||
|
||||
# get scale factor
|
||||
scale_factor = to_height / (
|
||||
resolution_height * pixel_aspect)
|
||||
self.log.debug(scale_factor)
|
||||
|
||||
for i, repre in enumerate(inst_data["representations"]):
|
||||
_remove_at_end = []
|
||||
self.log.debug("__ i: `{}`, repre: `{}`".format(i, repre))
|
||||
|
||||
p_tags = repre.get("tags", [])
|
||||
|
||||
if "slate-frame" not in p_tags:
|
||||
continue
|
||||
|
||||
stagingdir = repre["stagingDir"]
|
||||
input_file = "{0}".format(repre["files"])
|
||||
|
||||
ext = os.path.splitext(input_file)[1]
|
||||
output_file = input_file.replace(ext, "") + suffix + ext
|
||||
|
||||
input_path = os.path.join(
|
||||
os.path.normpath(stagingdir), repre["files"])
|
||||
self.log.debug("__ input_path: {}".format(input_path))
|
||||
_remove_at_end.append(input_path)
|
||||
|
||||
output_path = os.path.join(
|
||||
os.path.normpath(stagingdir), output_file)
|
||||
self.log.debug("__ output_path: {}".format(output_path))
|
||||
|
||||
input_args = []
|
||||
output_args = []
|
||||
# overrides output file
|
||||
input_args.append("-y")
|
||||
# preset's input data
|
||||
input_args.extend(repre["_profile"].get('input', []))
|
||||
input_args.append("-loop 1 -i {}".format(slate_path))
|
||||
input_args.extend([
|
||||
"-r {}".format(fps),
|
||||
"-t 0.04"]
|
||||
)
|
||||
|
||||
# output args
|
||||
codec_args = repre["_profile"].get('codec', [])
|
||||
output_args.extend(codec_args)
|
||||
# preset's output data
|
||||
output_args.extend(repre["_profile"].get('output', []))
|
||||
|
||||
# make sure colors are correct
|
||||
output_args.extend([
|
||||
"-vf scale=out_color_matrix=bt709",
|
||||
"-color_primaries bt709",
|
||||
"-color_trc bt709",
|
||||
"-colorspace bt709"
|
||||
])
|
||||
|
||||
# scaling none square pixels and 1920 width
|
||||
if "reformat" in p_tags:
|
||||
if resolution_ratio < delivery_ratio:
|
||||
self.log.debug("lower then delivery")
|
||||
width_scale = int(to_width * scale_factor)
|
||||
width_half_pad = int((
|
||||
to_width - width_scale)/2)
|
||||
height_scale = to_height
|
||||
height_half_pad = 0
|
||||
else:
|
||||
self.log.debug("heigher then delivery")
|
||||
width_scale = to_width
|
||||
width_half_pad = 0
|
||||
scale_factor = float(to_width) / float(resolution_width)
|
||||
self.log.debug(scale_factor)
|
||||
height_scale = int(
|
||||
resolution_height * scale_factor)
|
||||
height_half_pad = int(
|
||||
(to_height - height_scale)/2)
|
||||
|
||||
self.log.debug(
|
||||
"__ width_scale: `{}`".format(width_scale))
|
||||
self.log.debug(
|
||||
"__ width_half_pad: `{}`".format(width_half_pad))
|
||||
self.log.debug(
|
||||
"__ height_scale: `{}`".format(height_scale))
|
||||
self.log.debug(
|
||||
"__ height_half_pad: `{}`".format(height_half_pad))
|
||||
|
||||
scaling_arg = "scale={0}x{1}:flags=lanczos,pad={2}:{3}:{4}:{5}:black,setsar=1".format(
|
||||
width_scale, height_scale, to_width, to_height, width_half_pad, height_half_pad
|
||||
)
|
||||
|
||||
vf_back = self.add_video_filter_args(
|
||||
output_args, scaling_arg)
|
||||
# add it to output_args
|
||||
output_args.insert(0, vf_back)
|
||||
|
||||
slate_v_path = slate_path.replace(".png", ext)
|
||||
output_args.append(slate_v_path)
|
||||
_remove_at_end.append(slate_v_path)
|
||||
|
||||
slate_args = [
|
||||
ffmpeg_path,
|
||||
" ".join(input_args),
|
||||
" ".join(output_args)
|
||||
]
|
||||
slate_subprcs_cmd = " ".join(slate_args)
|
||||
|
||||
# run slate generation subprocess
|
||||
self.log.debug("Slate Executing: {}".format(slate_subprcs_cmd))
|
||||
slate_output = pype.api.subprocess(slate_subprcs_cmd)
|
||||
self.log.debug("Slate Output: {}".format(slate_output))
|
||||
|
||||
# create ffmpeg concat text file path
|
||||
conc_text_file = input_file.replace(ext, "") + "_concat" + ".txt"
|
||||
conc_text_path = os.path.join(
|
||||
os.path.normpath(stagingdir), conc_text_file)
|
||||
_remove_at_end.append(conc_text_path)
|
||||
self.log.debug("__ conc_text_path: {}".format(conc_text_path))
|
||||
|
||||
new_line = "\n"
|
||||
with open(conc_text_path, "w") as conc_text_f:
|
||||
conc_text_f.writelines([
|
||||
"file {}".format(
|
||||
slate_v_path.replace("\\", "/")),
|
||||
new_line,
|
||||
"file {}".format(input_path.replace("\\", "/"))
|
||||
])
|
||||
|
||||
# concat slate and videos together
|
||||
conc_input_args = ["-y", "-f concat", "-safe 0"]
|
||||
conc_input_args.append("-i {}".format(conc_text_path))
|
||||
|
||||
conc_output_args = ["-c copy"]
|
||||
conc_output_args.append(output_path)
|
||||
|
||||
concat_args = [
|
||||
ffmpeg_path,
|
||||
" ".join(conc_input_args),
|
||||
" ".join(conc_output_args)
|
||||
]
|
||||
concat_subprcs_cmd = " ".join(concat_args)
|
||||
|
||||
# ffmpeg concat subprocess
|
||||
self.log.debug("Executing concat: {}".format(concat_subprcs_cmd))
|
||||
concat_output = pype.api.subprocess(concat_subprcs_cmd)
|
||||
self.log.debug("Output concat: {}".format(concat_output))
|
||||
|
||||
self.log.debug("__ repre[tags]: {}".format(repre["tags"]))
|
||||
repre_update = {
|
||||
"files": output_file,
|
||||
"name": repre["name"],
|
||||
"tags": [x for x in repre["tags"] if x != "delete"]
|
||||
}
|
||||
inst_data["representations"][i].update(repre_update)
|
||||
self.log.debug(
|
||||
"_ representation {}: `{}`".format(
|
||||
i, inst_data["representations"][i]))
|
||||
|
||||
# removing temp files
|
||||
for f in _remove_at_end:
|
||||
os.remove(f)
|
||||
self.log.debug("Removed: `{}`".format(f))
|
||||
|
||||
# Remove any representations tagged for deletion.
|
||||
for repre in inst_data.get("representations", []):
|
||||
if "delete" in repre.get("tags", []):
|
||||
self.log.debug("Removing representation: {}".format(repre))
|
||||
inst_data["representations"].remove(repre)
|
||||
|
||||
self.log.debug(inst_data["representations"])
|
||||
|
||||
def add_video_filter_args(self, args, inserting_arg):
|
||||
"""
|
||||
Fixing video filter argumets to be one long string
|
||||
|
||||
Args:
|
||||
args (list): list of string arguments
|
||||
inserting_arg (str): string argument we want to add
|
||||
(without flag `-vf`)
|
||||
|
||||
Returns:
|
||||
str: long joined argument to be added back to list of arguments
|
||||
|
||||
"""
|
||||
# find all video format settings
|
||||
vf_settings = [p for p in args
|
||||
for v in ["-filter:v", "-vf"]
|
||||
if v in p]
|
||||
self.log.debug("_ vf_settings: `{}`".format(vf_settings))
|
||||
|
||||
# remove them from output args list
|
||||
for p in vf_settings:
|
||||
self.log.debug("_ remove p: `{}`".format(p))
|
||||
args.remove(p)
|
||||
self.log.debug("_ args: `{}`".format(args))
|
||||
|
||||
# strip them from all flags
|
||||
vf_fixed = [p.replace("-vf ", "").replace("-filter:v ", "")
|
||||
for p in vf_settings]
|
||||
|
||||
self.log.debug("_ vf_fixed: `{}`".format(vf_fixed))
|
||||
vf_fixed.insert(0, inserting_arg)
|
||||
self.log.debug("_ vf_fixed: `{}`".format(vf_fixed))
|
||||
# create new video filter setting
|
||||
vf_back = "-vf " + ",".join(vf_fixed)
|
||||
|
||||
return vf_back
|
||||
|
|
@ -84,9 +84,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
project = io.find_one({"type": "project"})
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": ASSET,
|
||||
"parent": project["_id"]})
|
||||
asset = io.find_one({
|
||||
"type": "asset",
|
||||
"name": ASSET,
|
||||
"parent": project["_id"]
|
||||
})
|
||||
|
||||
assert all([project, asset]), ("Could not find current project or "
|
||||
"asset '%s'" % ASSET)
|
||||
|
|
@ -94,10 +96,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
subset = self.get_subset(asset, instance)
|
||||
|
||||
# get next version
|
||||
latest_version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
{"name": True},
|
||||
sort=[("name", -1)])
|
||||
latest_version = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset["_id"]
|
||||
},
|
||||
{"name": True},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
|
||||
next_version = 1
|
||||
if latest_version is not None:
|
||||
|
|
@ -318,9 +324,11 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
def get_subset(self, asset, instance):
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
"name": instance.data["subset"]})
|
||||
subset = io.find_one({
|
||||
"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
"name": instance.data["subset"]
|
||||
})
|
||||
|
||||
if subset is None:
|
||||
subset_name = instance.data["subset"]
|
||||
|
|
|
|||
|
|
@ -82,31 +82,40 @@ class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
|
|||
project_name = api.Session["AVALON_PROJECT"]
|
||||
a_template = anatomy.templates
|
||||
|
||||
project = io.find_one({"type": "project",
|
||||
"name": project_name},
|
||||
projection={"config": True, "data": True})
|
||||
project = io.find_one(
|
||||
{"type": "project", "name": project_name},
|
||||
projection={"config": True, "data": True}
|
||||
)
|
||||
|
||||
template = a_template['publish']['path']
|
||||
# anatomy = instance.context.data['anatomy']
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project["_id"]})
|
||||
asset = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project["_id"]
|
||||
})
|
||||
|
||||
assert asset, ("No asset found by the name '{}' "
|
||||
"in project '{}'".format(asset_name, project_name))
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]})
|
||||
subset = io.find_one({
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]
|
||||
})
|
||||
|
||||
# assume there is no version yet, we start at `1`
|
||||
version = None
|
||||
version_number = 1
|
||||
if subset is not None:
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
version = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset["_id"]
|
||||
},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
|
||||
# if there is a subset there ought to be version
|
||||
if version is not None:
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import errno
|
|||
import pyblish.api
|
||||
from avalon import api, io
|
||||
from avalon.vendor import filelink
|
||||
|
||||
# this is needed until speedcopy for linux is fixed
|
||||
if sys.platform == "win32":
|
||||
from speedcopy import copyfile
|
||||
|
|
@ -70,6 +71,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"audio",
|
||||
"yetiRig",
|
||||
"yeticache",
|
||||
"nukenodes",
|
||||
"gizmo",
|
||||
"source",
|
||||
"matchmove",
|
||||
"image"
|
||||
"source",
|
||||
"assembly"
|
||||
]
|
||||
|
|
@ -149,9 +155,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
io.install()
|
||||
project = io.find_one({"type": "project"})
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": ASSET,
|
||||
"parent": project["_id"]})
|
||||
asset = io.find_one({
|
||||
"type": "asset",
|
||||
"name": ASSET,
|
||||
"parent": project["_id"]
|
||||
})
|
||||
|
||||
assert all([project, asset]), ("Could not find current project or "
|
||||
"asset '%s'" % ASSET)
|
||||
|
|
@ -159,10 +167,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
subset = self.get_subset(asset, instance)
|
||||
|
||||
# get next version
|
||||
latest_version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
{"name": True},
|
||||
sort=[("name", -1)])
|
||||
latest_version = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset["_id"]
|
||||
},
|
||||
{"name": True},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
|
||||
next_version = 1
|
||||
if latest_version is not None:
|
||||
|
|
@ -171,16 +183,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
if instance.data.get('version'):
|
||||
next_version = int(instance.data.get('version'))
|
||||
|
||||
# self.log.info("Verifying version from assumed destination")
|
||||
|
||||
# assumed_data = instance.data["assumedTemplateData"]
|
||||
# assumed_version = assumed_data["version"]
|
||||
# if assumed_version != next_version:
|
||||
# raise AttributeError("Assumed version 'v{0:03d}' does not match"
|
||||
# "next version in database "
|
||||
# "('v{1:03d}')".format(assumed_version,
|
||||
# next_version))
|
||||
|
||||
self.log.debug("Next version: v{0:03d}".format(next_version))
|
||||
|
||||
version_data = self.create_version_data(context, instance)
|
||||
|
|
@ -266,6 +268,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"version": int(version["name"]),
|
||||
"hierarchy": hierarchy}
|
||||
|
||||
resolution_width = repre.get("resolutionWidth")
|
||||
resolution_height = repre.get("resolutionHeight")
|
||||
fps = instance.data.get("fps")
|
||||
|
||||
if resolution_width:
|
||||
template_data["resolution_width"] = resolution_width
|
||||
if resolution_width:
|
||||
template_data["resolution_height"] = resolution_height
|
||||
if resolution_width:
|
||||
template_data["fps"] = fps
|
||||
|
||||
files = repre['files']
|
||||
if repre.get('stagingDir'):
|
||||
stagingdir = repre['stagingDir']
|
||||
|
|
@ -315,10 +328,13 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
index_frame_start = None
|
||||
|
||||
if repre.get("frameStart"):
|
||||
frame_start_padding = len(str(
|
||||
repre.get("frameEnd")))
|
||||
frame_start_padding = anatomy.templates["render"]["padding"]
|
||||
index_frame_start = int(repre.get("frameStart"))
|
||||
|
||||
# exception for slate workflow
|
||||
if "slate" in instance.data["families"]:
|
||||
index_frame_start -= 1
|
||||
|
||||
dst_padding_exp = src_padding_exp
|
||||
dst_start_frame = None
|
||||
for i in src_collection.indexes:
|
||||
|
|
@ -353,7 +369,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
dst_head,
|
||||
dst_start_frame,
|
||||
dst_tail).replace("..", ".")
|
||||
repre['published_path'] = dst
|
||||
repre['published_path'] = self.unc_convert(dst)
|
||||
|
||||
else:
|
||||
# Single file
|
||||
|
|
@ -382,10 +398,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
instance.data["transfers"].append([src, dst])
|
||||
|
||||
repre['published_path'] = dst
|
||||
repre['published_path'] = self.unc_convert(dst)
|
||||
self.log.debug("__ dst: {}".format(dst))
|
||||
|
||||
representation = {
|
||||
"_id": io.ObjectId(),
|
||||
"schema": "pype:representation-2.0",
|
||||
"type": "representation",
|
||||
"parent": version_id,
|
||||
|
|
@ -410,8 +427,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
}
|
||||
}
|
||||
|
||||
if repre.get("outputName"):
|
||||
representation["context"]["output"] = repre['outputName']
|
||||
|
||||
if sequence_repre and repre.get("frameStart"):
|
||||
representation['context']['frame'] = repre.get("frameStart")
|
||||
representation['context']['frame'] = src_padding_exp % int(repre.get("frameStart"))
|
||||
|
||||
self.log.debug("__ representation: {}".format(representation))
|
||||
destination_list.append(dst)
|
||||
|
|
@ -425,6 +445,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
self.log.debug("__ represNAME: {}".format(rep['name']))
|
||||
self.log.debug("__ represPATH: {}".format(rep['published_path']))
|
||||
io.insert_many(representations)
|
||||
instance.data["published_representations"] = representations
|
||||
# self.log.debug("Representation: {}".format(representations))
|
||||
self.log.info("Registered {} items".format(len(representations)))
|
||||
|
||||
|
|
@ -456,6 +477,23 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
self.log.debug("Hardlinking file .. {} -> {}".format(src, dest))
|
||||
self.hardlink_file(src, dest)
|
||||
|
||||
def unc_convert(self, path):
|
||||
self.log.debug("> __ path: `{}`".format(path))
|
||||
drive, _path = os.path.splitdrive(path)
|
||||
self.log.debug("> __ drive, _path: `{}`, `{}`".format(drive, _path))
|
||||
|
||||
if not os.path.exists(drive + "/"):
|
||||
self.log.info("Converting to unc from environments ..")
|
||||
|
||||
path_replace = os.getenv("PYPE_STUDIO_PROJECTS_PATH")
|
||||
path_mount = os.getenv("PYPE_STUDIO_PROJECTS_MOUNT")
|
||||
|
||||
if "/" in path_mount:
|
||||
path = path.replace(path_mount[0:-1], path_replace)
|
||||
else:
|
||||
path = path.replace(path_mount, path_replace)
|
||||
return path
|
||||
|
||||
def copy_file(self, src, dst):
|
||||
""" Copy given source to destination
|
||||
|
||||
|
|
@ -465,8 +503,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
Returns:
|
||||
None
|
||||
"""
|
||||
src = os.path.normpath(src)
|
||||
dst = os.path.normpath(dst)
|
||||
src = self.unc_convert(src)
|
||||
dst = self.unc_convert(dst)
|
||||
|
||||
self.log.debug("Copying file .. {} -> {}".format(src, dst))
|
||||
dirname = os.path.dirname(dst)
|
||||
|
|
@ -487,6 +525,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
def hardlink_file(self, src, dst):
|
||||
dirname = os.path.dirname(dst)
|
||||
|
||||
src = self.unc_convert(src)
|
||||
dst = self.unc_convert(dst)
|
||||
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except OSError as e:
|
||||
|
|
@ -499,9 +541,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
filelink.create(src, dst, filelink.HARDLINK)
|
||||
|
||||
def get_subset(self, asset, instance):
|
||||
subset = io.find_one({"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
"name": instance.data["subset"]})
|
||||
subset = io.find_one({
|
||||
"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
"name": instance.data["subset"]
|
||||
})
|
||||
|
||||
if subset is None:
|
||||
subset_name = instance.data["subset"]
|
||||
|
|
@ -524,13 +568,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
# add group if available
|
||||
if instance.data.get("subsetGroup"):
|
||||
subset["data"].update(
|
||||
{"subsetGroup": instance.data.get("subsetGroup")}
|
||||
)
|
||||
io.update_many({
|
||||
'type': 'subset',
|
||||
'_id': io.ObjectId(subset["_id"])
|
||||
}, {'$set': subset["data"]}
|
||||
}, {'$set': {'data.subsetGroup':
|
||||
instance.data.get('subsetGroup')}}
|
||||
)
|
||||
|
||||
return subset
|
||||
|
|
@ -594,7 +636,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"source": source,
|
||||
"comment": context.data.get("comment"),
|
||||
"machine": context.data.get("machine"),
|
||||
"fps": context.data.get("fps")}
|
||||
"fps": context.data.get(
|
||||
"fps", instance.data.get("fps"))}
|
||||
|
||||
# Include optional data if present in
|
||||
optionals = [
|
||||
|
|
|
|||
|
|
@ -88,9 +88,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
project = io.find_one({"type": "project"})
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": ASSET,
|
||||
"parent": project["_id"]})
|
||||
asset = io.find_one({
|
||||
"type": "asset",
|
||||
"name": ASSET,
|
||||
"parent": project["_id"]
|
||||
})
|
||||
|
||||
assert all([project, asset]), ("Could not find current project or "
|
||||
"asset '%s'" % ASSET)
|
||||
|
|
@ -98,10 +100,14 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
subset = self.get_subset(asset, instance)
|
||||
|
||||
# get next version
|
||||
latest_version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
{"name": True},
|
||||
sort=[("name", -1)])
|
||||
latest_version = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset["_id"]
|
||||
},
|
||||
{"name": True},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
|
||||
next_version = 1
|
||||
if latest_version is not None:
|
||||
|
|
@ -251,9 +257,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug("path_to_save: {}".format(path_to_save))
|
||||
|
||||
|
||||
|
||||
|
||||
representation = {
|
||||
"schema": "pype:representation-2.0",
|
||||
"type": "representation",
|
||||
|
|
@ -332,9 +335,11 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
def get_subset(self, asset, instance):
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
"name": instance.data["subset"]})
|
||||
subset = io.find_one({
|
||||
"type": "subset",
|
||||
"parent": asset["_id"],
|
||||
"name": instance.data["subset"]
|
||||
})
|
||||
|
||||
if subset is None:
|
||||
subset_name = instance.data["subset"]
|
||||
|
|
|
|||
139
pype/plugins/global/publish/integrate_thumbnail.py
Normal file
139
pype/plugins/global/publish/integrate_thumbnail.py
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
import os
|
||||
import sys
|
||||
import errno
|
||||
import shutil
|
||||
import copy
|
||||
|
||||
import six
|
||||
import pyblish.api
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from avalon import api, io
|
||||
|
||||
|
||||
class IntegrateThumbnails(pyblish.api.InstancePlugin):
|
||||
"""Integrate Thumbnails."""
|
||||
|
||||
label = "Integrate Thumbnails"
|
||||
order = pyblish.api.IntegratorOrder + 0.01
|
||||
families = ["review"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
if not os.environ.get("AVALON_THUMBNAIL_ROOT"):
|
||||
self.log.info("AVALON_THUMBNAIL_ROOT is not set."
|
||||
" Skipping thumbnail integration.")
|
||||
return
|
||||
|
||||
published_repres = instance.data.get("published_representations")
|
||||
if not published_repres:
|
||||
self.log.debug(
|
||||
"There are not published representation ids on the instance."
|
||||
)
|
||||
return
|
||||
|
||||
project_name = api.Session["AVALON_PROJECT"]
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
if "publish" not in anatomy.templates:
|
||||
raise AssertionError("Anatomy does not have set publish key!")
|
||||
|
||||
if "thumbnail" not in anatomy.templates["publish"]:
|
||||
raise AssertionError((
|
||||
"There is not set \"thumbnail\" template for project \"{}\""
|
||||
).format(project_name))
|
||||
|
||||
thumbnail_template = anatomy.templates["publish"]["thumbnail"]
|
||||
|
||||
io.install()
|
||||
|
||||
thumb_repre = None
|
||||
for repre in published_repres:
|
||||
if repre["name"].lower() == "thumbnail":
|
||||
thumb_repre = repre
|
||||
break
|
||||
|
||||
if not thumb_repre:
|
||||
self.log.debug(
|
||||
"There is not representation with name \"thumbnail\""
|
||||
)
|
||||
return
|
||||
|
||||
version = io.find_one({"_id": thumb_repre["parent"]})
|
||||
if not version:
|
||||
raise AssertionError(
|
||||
"There does not exist version with id {}".format(
|
||||
str(thumb_repre["parent"])
|
||||
)
|
||||
)
|
||||
|
||||
# Get full path to thumbnail file from representation
|
||||
src_full_path = os.path.normpath(thumb_repre["data"]["path"])
|
||||
if not os.path.exists(src_full_path):
|
||||
self.log.warning("Thumbnail file was not found. Path: {}".format(
|
||||
src_full_path
|
||||
))
|
||||
return
|
||||
|
||||
filename, file_extension = os.path.splitext(src_full_path)
|
||||
# Create id for mongo entity now to fill anatomy template
|
||||
thumbnail_id = ObjectId()
|
||||
|
||||
# Prepare anatomy template fill data
|
||||
template_data = copy.deepcopy(thumb_repre["context"])
|
||||
template_data.update({
|
||||
"_id": str(thumbnail_id),
|
||||
"thumbnail_root": os.environ.get("AVALON_THUMBNAIL_ROOT"),
|
||||
"ext": file_extension,
|
||||
"thumbnail_type": "thumbnail"
|
||||
})
|
||||
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
final_path = anatomy_filled.get("publish", {}).get("thumbnail")
|
||||
if not final_path:
|
||||
raise AssertionError((
|
||||
"Anatomy template was not filled with entered data"
|
||||
"\nTemplate: {} "
|
||||
"\nData: {}"
|
||||
).format(thumbnail_template, str(template_data)))
|
||||
|
||||
dst_full_path = os.path.normpath(final_path)
|
||||
self.log.debug(
|
||||
"Copying file .. {} -> {}".format(src_full_path, dst_full_path)
|
||||
)
|
||||
dirname = os.path.dirname(dst_full_path)
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
tp, value, tb = sys.exc_info()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
shutil.copy(src_full_path, dst_full_path)
|
||||
|
||||
# Clean template data from keys that are dynamic
|
||||
template_data.pop("_id")
|
||||
template_data.pop("thumbnail_root")
|
||||
|
||||
thumbnail_entity = {
|
||||
"_id": thumbnail_id,
|
||||
"type": "thumbnail",
|
||||
"schema": "pype:thumbnail-1.0",
|
||||
"data": {
|
||||
"template": thumbnail_template,
|
||||
"template_data": template_data
|
||||
}
|
||||
}
|
||||
# Create thumbnail entity
|
||||
io.insert_one(thumbnail_entity)
|
||||
self.log.debug(
|
||||
"Creating entity in database {}".format(str(thumbnail_entity))
|
||||
)
|
||||
# Set thumbnail id for version
|
||||
io.update_many(
|
||||
{"_id": version["_id"]},
|
||||
{"$set": {"data.thumbnail_id": thumbnail_id}}
|
||||
)
|
||||
self.log.debug("Setting thumbnail for version \"{}\" <{}>".format(
|
||||
version["name"], str(version["_id"])
|
||||
))
|
||||
|
|
@ -21,20 +21,34 @@ def _get_script():
|
|||
if module_path.endswith(".pyc"):
|
||||
module_path = module_path[:-len(".pyc")] + ".py"
|
||||
|
||||
module_path = os.path.normpath(module_path)
|
||||
mount_root = os.path.normpath(os.environ['PYPE_STUDIO_CORE_MOUNT'])
|
||||
network_root = os.path.normpath(os.environ['PYPE_STUDIO_CORE_PATH'])
|
||||
|
||||
module_path = module_path.replace(mount_root, network_root)
|
||||
|
||||
return module_path
|
||||
|
||||
|
||||
# Logic to retrieve latest files concerning extendFrames
|
||||
def get_latest_version(asset_name, subset_name, family):
|
||||
# Get asset
|
||||
asset_name = io.find_one({"type": "asset",
|
||||
"name": asset_name},
|
||||
projection={"name": True})
|
||||
asset_name = io.find_one(
|
||||
{
|
||||
"type": "asset",
|
||||
"name": asset_name
|
||||
},
|
||||
projection={"name": True}
|
||||
)
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset_name["_id"]},
|
||||
projection={"_id": True, "name": True})
|
||||
subset = io.find_one(
|
||||
{
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset_name["_id"]
|
||||
},
|
||||
projection={"_id": True, "name": True}
|
||||
)
|
||||
|
||||
# Check if subsets actually exists (pre-run check)
|
||||
assert subset, "No subsets found, please publish with `extendFrames` off"
|
||||
|
|
@ -45,11 +59,15 @@ def get_latest_version(asset_name, subset_name, family):
|
|||
"data.endFrame": True,
|
||||
"parent": True}
|
||||
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"],
|
||||
"data.families": family},
|
||||
projection=version_projection,
|
||||
sort=[("name", -1)])
|
||||
version = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset["_id"],
|
||||
"data.families": family
|
||||
},
|
||||
projection=version_projection,
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
|
||||
assert version, "No version found, this is a bug"
|
||||
|
||||
|
|
@ -143,7 +161,10 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER",
|
||||
"PYPE_ROOT"
|
||||
"PYPE_ROOT",
|
||||
"PYPE_METADATA_FILE",
|
||||
"PYPE_STUDIO_PROJECTS_PATH",
|
||||
"PYPE_STUDIO_PROJECTS_MOUNT"
|
||||
]
|
||||
|
||||
def _submit_deadline_post_job(self, instance, job):
|
||||
|
|
@ -154,7 +175,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"""
|
||||
data = instance.data.copy()
|
||||
subset = data["subset"]
|
||||
state = data.get("publishJobState", "Suspended")
|
||||
job_name = "{batch} - {subset} [publish image sequence]".format(
|
||||
batch=job["Props"]["Name"],
|
||||
subset=subset
|
||||
|
|
@ -164,6 +184,13 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
output_dir = instance.data["outputDir"]
|
||||
metadata_path = os.path.join(output_dir, metadata_filename)
|
||||
|
||||
metadata_path = os.path.normpath(metadata_path)
|
||||
mount_root = os.path.normpath(os.environ['PYPE_STUDIO_PROJECTS_MOUNT'])
|
||||
network_root = os.path.normpath(
|
||||
os.environ['PYPE_STUDIO_PROJECTS_PATH'])
|
||||
|
||||
metadata_path = metadata_path.replace(mount_root, network_root)
|
||||
|
||||
# Generate the payload for Deadline submission
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
|
|
@ -174,13 +201,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"JobDependency0": job["_id"],
|
||||
"UserName": job["Props"]["User"],
|
||||
"Comment": instance.context.data.get("comment", ""),
|
||||
"InitialStatus": state,
|
||||
"Priority": job["Props"]["Pri"]
|
||||
},
|
||||
"PluginInfo": {
|
||||
"Version": "3.6",
|
||||
"ScriptFile": _get_script(),
|
||||
"Arguments": '--paths "{}"'.format(metadata_path),
|
||||
"Arguments": "",
|
||||
"SingleFrameOnly": "True"
|
||||
},
|
||||
|
||||
|
|
@ -192,6 +218,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
# job so they use the same environment
|
||||
|
||||
environment = job["Props"].get("Env", {})
|
||||
environment["PYPE_METADATA_FILE"] = metadata_path
|
||||
i = 0
|
||||
for index, key in enumerate(environment):
|
||||
self.log.info("KEY: {}".format(key))
|
||||
|
|
@ -282,6 +309,19 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
relative_path = os.path.relpath(source, api.registered_root())
|
||||
source = os.path.join("{root}", relative_path).replace("\\", "/")
|
||||
|
||||
# find subsets and version to attach render to
|
||||
attach_to = instance.data.get("attachTo")
|
||||
attach_subset_versions = []
|
||||
if attach_to:
|
||||
for subset in attach_to:
|
||||
for instance in context:
|
||||
if instance.data["subset"] != subset["subset"]:
|
||||
continue
|
||||
attach_subset_versions.append(
|
||||
{"version": instance.data["version"],
|
||||
"subset": subset["subset"],
|
||||
"family": subset["family"]})
|
||||
|
||||
# Write metadata for publish job
|
||||
metadata = {
|
||||
"asset": asset,
|
||||
|
|
@ -293,6 +333,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"source": source,
|
||||
"user": context.data["user"],
|
||||
"version": context.data["version"],
|
||||
"intent": context.data.get("intent"),
|
||||
"comment": context.data.get("comment"),
|
||||
# Optional metadata (for debugging)
|
||||
"metadata": {
|
||||
"instance": data,
|
||||
|
|
@ -301,6 +343,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
}
|
||||
}
|
||||
|
||||
if api.Session["AVALON_APP"] == "nuke":
|
||||
metadata['subset'] = subset
|
||||
|
||||
if submission_type == "muster":
|
||||
ftrack = {
|
||||
"FTRACK_API_USER": os.environ.get("FTRACK_API_USER"),
|
||||
|
|
|
|||
|
|
@ -27,6 +27,8 @@ class ValidateFfmpegInstallef(pyblish.api.Validator):
|
|||
return True
|
||||
|
||||
def process(self, instance):
|
||||
self.log.info("ffmpeg path: `{}`".format(
|
||||
os.environ.get("FFMPEG_PATH", "")))
|
||||
if self.is_tool(
|
||||
os.path.join(
|
||||
os.environ.get("FFMPEG_PATH", ""), "ffmpeg")) is False:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,4 @@
|
|||
import os
|
||||
import sys
|
||||
from avalon import io
|
||||
from pprint import pprint
|
||||
import acre
|
||||
|
||||
from avalon import api, lib
|
||||
|
|
|
|||
|
|
@ -1,10 +1,9 @@
|
|||
import os
|
||||
import sys
|
||||
from pprint import pprint
|
||||
import acre
|
||||
|
||||
from avalon import api, lib, io
|
||||
import pype.api as pype
|
||||
from pypeapp import Anatomy
|
||||
|
||||
|
||||
class PremierePro(api.Action):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
from collections import OrderedDict
|
||||
|
||||
import avalon.maya
|
||||
from pype.maya import lib
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
|
@ -14,10 +15,21 @@ class CreateAss(avalon.maya.Creator):
|
|||
icon = "cube"
|
||||
defaults = ['Main']
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateAss, self).__init__(*args, **kwargs)
|
||||
|
||||
# Add animation data
|
||||
self.data.update(lib.collect_animation_data())
|
||||
|
||||
# Vertex colors with the geometry
|
||||
self.data["exportSequence"] = False
|
||||
|
||||
def process(self):
|
||||
instance = super(CreateAss, self).process()
|
||||
|
||||
data = OrderedDict(**self.data)
|
||||
# data = OrderedDict(**self.data)
|
||||
|
||||
|
||||
|
||||
nodes = list()
|
||||
|
||||
|
|
@ -30,4 +42,6 @@ class CreateAss(avalon.maya.Creator):
|
|||
assProxy = cmds.sets(name="proxy_SET", empty=True)
|
||||
cmds.sets([assContent, assProxy], forceElement=instance)
|
||||
|
||||
self.data = data
|
||||
# self.log.info(data)
|
||||
#
|
||||
# self.data = data
|
||||
|
|
|
|||
|
|
@ -140,9 +140,9 @@ class ImportMayaLoader(api.Loader):
|
|||
|
||||
message = "Are you sure you want import this"
|
||||
state = QtWidgets.QMessageBox.warning(None,
|
||||
"Are you sure?",
|
||||
message,
|
||||
buttons=buttons,
|
||||
defaultButton=accept)
|
||||
"Are you sure?",
|
||||
message,
|
||||
buttons=buttons,
|
||||
defaultButton=accept)
|
||||
|
||||
return state == accept
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ from avalon import api
|
|||
import pype.maya.plugin
|
||||
import os
|
||||
from pypeapp import config
|
||||
import clique
|
||||
|
||||
|
||||
class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
||||
|
|
@ -21,6 +22,13 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
|||
from avalon import maya
|
||||
import pymel.core as pm
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
|
||||
frameStart = version_data.get("frameStart", None)
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
|
|
@ -30,7 +38,24 @@ class AssProxyLoader(pype.maya.plugin.ReferenceLoader):
|
|||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
path = self.fname
|
||||
proxyPath = os.path.splitext(path)[0] + ".ma"
|
||||
proxyPath_base = os.path.splitext(path)[0]
|
||||
|
||||
if frameStart is not None:
|
||||
proxyPath_base = os.path.splitext(proxyPath_base)[0]
|
||||
|
||||
publish_folder = os.path.split(path)[0]
|
||||
files_in_folder = os.listdir(publish_folder)
|
||||
collections, remainder = clique.assemble(files_in_folder)
|
||||
|
||||
if collections:
|
||||
hashes = collections[0].padding * '#'
|
||||
coll = collections[0].format('{head}[index]{tail}')
|
||||
filename = coll.replace('[index]', hashes)
|
||||
|
||||
path = os.path.join(publish_folder, filename)
|
||||
|
||||
proxyPath = proxyPath_base + ".ma"
|
||||
self.log.info
|
||||
|
||||
nodes = cmds.file(proxyPath,
|
||||
namespace=namespace,
|
||||
|
|
@ -147,6 +172,13 @@ class AssStandinLoader(api.Loader):
|
|||
import mtoa.ui.arnoldmenu
|
||||
import pymel.core as pm
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
|
||||
frameStart = version_data.get("frameStart", None)
|
||||
|
||||
asset = context['asset']['name']
|
||||
namespace = namespace or lib.unique_namespace(
|
||||
asset + "_",
|
||||
|
|
@ -182,6 +214,8 @@ class AssStandinLoader(api.Loader):
|
|||
|
||||
# Set the standin filepath
|
||||
standinShape.dso.set(self.fname)
|
||||
if frameStart is not None:
|
||||
standinShape.useFrameExtension.set(1)
|
||||
|
||||
nodes = [root, standin]
|
||||
self[:] = nodes
|
||||
|
|
@ -199,14 +233,23 @@ class AssStandinLoader(api.Loader):
|
|||
|
||||
path = api.get_representation_path(representation)
|
||||
|
||||
# Update the standin
|
||||
members = pm.sets(container['objectName'], query=True)
|
||||
standins = pm.ls(members, type="AiStandIn", long=True)
|
||||
files_in_path = os.listdir(os.path.split(path)[0])
|
||||
sequence = 0
|
||||
collections, remainder = clique.assemble(files_in_path)
|
||||
if collections:
|
||||
sequence = 1
|
||||
|
||||
assert len(caches) == 1, "This is a bug"
|
||||
# Update the standin
|
||||
standins = list()
|
||||
members = pm.sets(container['objectName'], query=True)
|
||||
for member in members:
|
||||
shape = member.getShape()
|
||||
if (shape and shape.type() == "aiStandIn"):
|
||||
standins.append(shape)
|
||||
|
||||
for standin in standins:
|
||||
standin.cacheFileName.set(path)
|
||||
standin.dso.set(path)
|
||||
standin.useFrameExtension.set(sequence)
|
||||
|
||||
container = pm.PyNode(container["objectName"])
|
||||
container.representation.set(str(representation["_id"]))
|
||||
|
|
|
|||
|
|
@ -1,62 +0,0 @@
|
|||
import pype.maya.plugin
|
||||
import os
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class CameraLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Specific loader of Alembic for the pype.camera family"""
|
||||
|
||||
families = ["camera"]
|
||||
label = "Reference camera"
|
||||
representations = ["abc", "ma"]
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
|
||||
import maya.cmds as cmds
|
||||
# Get family type from the context
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "camera"
|
||||
|
||||
cmds.loadPlugin("AbcImport.mll", quiet=True)
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
sharedReferenceFile=False,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name),
|
||||
reference=True,
|
||||
returnNewNodes=True)
|
||||
|
||||
cameras = cmds.ls(nodes, type="camera")
|
||||
|
||||
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
|
||||
colors = presets['plugins']['maya']['load']['colors']
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
# Check the Maya version, lockTransform has been introduced since
|
||||
# Maya 2016.5 Ext 2
|
||||
version = int(cmds.about(version=True))
|
||||
if version >= 2016:
|
||||
for camera in cameras:
|
||||
cmds.camera(camera, edit=True, lockTransform=True)
|
||||
else:
|
||||
self.log.warning("This version of Maya does not support locking of"
|
||||
" transforms of cameras.")
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
import pype.maya.plugin
|
||||
import os
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class FBXLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Load the FBX"""
|
||||
|
||||
families = ["fbx"]
|
||||
representations = ["fbx"]
|
||||
|
||||
label = "Reference FBX"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
|
||||
import maya.cmds as cmds
|
||||
from avalon import maya
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "fbx"
|
||||
|
||||
# Ensure FBX plug-in is loaded
|
||||
cmds.loadPlugin("fbxmaya", quiet=True)
|
||||
|
||||
with maya.maintained_selection():
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name))
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
|
||||
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
|
||||
colors = presets['plugins']['maya']['load']['colors']
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -116,9 +116,11 @@ class LookLoader(pype.maya.plugin.ReferenceLoader):
|
|||
shapes=True))
|
||||
nodes = set(nodes_list)
|
||||
|
||||
json_representation = io.find_one({"type": "representation",
|
||||
"parent": representation['parent'],
|
||||
"name": "json"})
|
||||
json_representation = io.find_one({
|
||||
"type": "representation",
|
||||
"parent": representation['parent'],
|
||||
"name": "json"
|
||||
})
|
||||
|
||||
# Load relationships
|
||||
shader_relation = api.get_representation_path(json_representation)
|
||||
|
|
|
|||
30
pype/plugins/maya/load/load_matchmove.py
Normal file
30
pype/plugins/maya/load/load_matchmove.py
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
from avalon import api
|
||||
from maya import mel
|
||||
|
||||
|
||||
class MatchmoveLoader(api.Loader):
|
||||
"""
|
||||
This will run matchmove script to create track in scene.
|
||||
|
||||
Supported script types are .py and .mel
|
||||
"""
|
||||
|
||||
families = ["matchmove"]
|
||||
representations = ["py", "mel"]
|
||||
defaults = ["Camera", "Object", "Mocap"]
|
||||
|
||||
label = "Run matchmove script"
|
||||
icon = "empire"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
if self.fname.lower().endswith(".py"):
|
||||
exec(open(self.fname).read())
|
||||
|
||||
elif self.fname.lower().endswith(".mel"):
|
||||
mel.eval('source "{}"'.format(self.fname))
|
||||
|
||||
else:
|
||||
self.log.error("Unsupported script type")
|
||||
|
||||
return True
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
import pype.maya.plugin
|
||||
from pypeapp import config
|
||||
import os
|
||||
|
||||
|
||||
class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Load the model"""
|
||||
|
||||
families = ["mayaAscii",
|
||||
"setdress",
|
||||
"layout"]
|
||||
representations = ["ma"]
|
||||
|
||||
label = "Reference Maya Ascii"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
|
||||
import maya.cmds as cmds
|
||||
from avalon import maya
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "model"
|
||||
|
||||
with maya.maintained_selection():
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name))
|
||||
|
||||
self[:] = nodes
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
|
||||
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
|
||||
colors = presets['plugins']['maya']['load']['colors']
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
cmds.setAttr(groupName + ".displayHandle", 1)
|
||||
# get bounding box
|
||||
bbox = cmds.exactWorldBoundingBox(groupName)
|
||||
# get pivot position on world space
|
||||
pivot = cmds.xform(groupName, q=True, sp=True, ws=True)
|
||||
# center of bounding box
|
||||
cx = (bbox[0] + bbox[3]) / 2
|
||||
cy = (bbox[1] + bbox[4]) / 2
|
||||
cz = (bbox[2] + bbox[5]) / 2
|
||||
# add pivot position to calculate offset
|
||||
cx = cx + pivot[0]
|
||||
cy = cy + pivot[1]
|
||||
cz = cz + pivot[2]
|
||||
# set selection handle offset to center of bounding box
|
||||
cmds.setAttr(groupName + ".selectHandleX", cx)
|
||||
cmds.setAttr(groupName + ".selectHandleY", cy)
|
||||
cmds.setAttr(groupName + ".selectHandleZ", cz)
|
||||
return nodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -1,15 +1,22 @@
|
|||
import pype.maya.plugin
|
||||
from avalon import api, maya
|
||||
from maya import cmds
|
||||
import os
|
||||
from pypeapp import config
|
||||
reload(config)
|
||||
import pype.maya.plugin
|
||||
reload(pype.maya.plugin)
|
||||
|
||||
|
||||
class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Load the model"""
|
||||
|
||||
families = ["model", "pointcache", "animation"]
|
||||
representations = ["ma", "abc"]
|
||||
families = ["model",
|
||||
"pointcache",
|
||||
"animation",
|
||||
"mayaAscii",
|
||||
"setdress",
|
||||
"layout",
|
||||
"camera",
|
||||
"rig"]
|
||||
representations = ["ma", "abc", "fbx"]
|
||||
tool_names = ["loader"]
|
||||
|
||||
label = "Reference"
|
||||
|
|
@ -22,7 +29,6 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
|
|||
from avalon import maya
|
||||
import pymel.core as pm
|
||||
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
|
|
@ -40,7 +46,7 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
|
|||
reference=True,
|
||||
returnNewNodes=True)
|
||||
|
||||
namespace = cmds.referenceQuery(nodes[0], namespace=True)
|
||||
# namespace = cmds.referenceQuery(nodes[0], namespace=True)
|
||||
|
||||
shapes = cmds.ls(nodes, shapes=True, long=True)
|
||||
|
||||
|
|
@ -57,12 +63,12 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
|
|||
for node in newNodes:
|
||||
try:
|
||||
roots.add(pm.PyNode(node).getAllParents()[-2])
|
||||
except:
|
||||
except: # noqa: E722
|
||||
pass
|
||||
for root in roots:
|
||||
root.setParent(world=True)
|
||||
|
||||
groupNode.root().zeroTransformPivots()
|
||||
groupNode.zeroTransformPivots()
|
||||
for root in roots:
|
||||
root.setParent(groupNode)
|
||||
|
||||
|
|
@ -95,23 +101,39 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
|
|||
cmds.setAttr(groupName + ".selectHandleY", cy)
|
||||
cmds.setAttr(groupName + ".selectHandleZ", cz)
|
||||
|
||||
if data.get("post_process", True):
|
||||
if family == "rig":
|
||||
self._post_process_rig(name, namespace, context, data)
|
||||
|
||||
return newNodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def _post_process_rig(self, name, namespace, context, data):
|
||||
|
||||
# for backwards compatibility
|
||||
class AbcLoader(ReferenceLoader):
|
||||
label = "Deprecated loader (don't use)"
|
||||
families = ["pointcache", "animation"]
|
||||
representations = ["abc"]
|
||||
tool_names = []
|
||||
output = next((node for node in self if
|
||||
node.endswith("out_SET")), None)
|
||||
controls = next((node for node in self if
|
||||
node.endswith("controls_SET")), None)
|
||||
|
||||
assert output, "No out_SET in rig, this is a bug."
|
||||
assert controls, "No controls_SET in rig, this is a bug."
|
||||
|
||||
# for backwards compatibility
|
||||
class ModelLoader(ReferenceLoader):
|
||||
label = "Deprecated loader (don't use)"
|
||||
families = ["model", "pointcache"]
|
||||
representations = ["abc"]
|
||||
tool_names = []
|
||||
# Find the roots amongst the loaded nodes
|
||||
roots = cmds.ls(self[:], assemblies=True, long=True)
|
||||
assert roots, "No root nodes in rig, this is a bug."
|
||||
|
||||
asset = api.Session["AVALON_ASSET"]
|
||||
dependency = str(context["representation"]["_id"])
|
||||
|
||||
self.log.info("Creating subset: {}".format(namespace))
|
||||
|
||||
# Create the animation instance
|
||||
with maya.maintained_selection():
|
||||
cmds.select([output, controls] + roots, noExpand=True)
|
||||
api.create(name=namespace,
|
||||
asset=asset,
|
||||
family="animation",
|
||||
options={"useSelection": True},
|
||||
data={"dependencies": dependency})
|
||||
|
|
|
|||
|
|
@ -1,95 +0,0 @@
|
|||
from maya import cmds
|
||||
|
||||
import pype.maya.plugin
|
||||
from avalon import api, maya
|
||||
import os
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class RigLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Specific loader for rigs
|
||||
|
||||
This automatically creates an instance for animators upon load.
|
||||
|
||||
"""
|
||||
|
||||
families = ["rig"]
|
||||
representations = ["ma"]
|
||||
|
||||
label = "Reference rig"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "rig"
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName=groupName)
|
||||
|
||||
cmds.xform(groupName, pivots=(0, 0, 0))
|
||||
|
||||
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
|
||||
colors = presets['plugins']['maya']['load']['colors']
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
shapes = cmds.ls(nodes, shapes=True, long=True)
|
||||
print(shapes)
|
||||
|
||||
newNodes = (list(set(nodes) - set(shapes)))
|
||||
print(newNodes)
|
||||
|
||||
# Store for post-process
|
||||
self[:] = newNodes
|
||||
if data.get("post_process", True):
|
||||
self._post_process(name, namespace, context, data)
|
||||
|
||||
return newNodes
|
||||
|
||||
def _post_process(self, name, namespace, context, data):
|
||||
|
||||
# TODO(marcus): We are hardcoding the name "out_SET" here.
|
||||
# Better register this keyword, so that it can be used
|
||||
# elsewhere, such as in the Integrator plug-in,
|
||||
# without duplication.
|
||||
|
||||
output = next((node for node in self if
|
||||
node.endswith("out_SET")), None)
|
||||
controls = next((node for node in self if
|
||||
node.endswith("controls_SET")), None)
|
||||
|
||||
assert output, "No out_SET in rig, this is a bug."
|
||||
assert controls, "No controls_SET in rig, this is a bug."
|
||||
|
||||
# Find the roots amongst the loaded nodes
|
||||
roots = cmds.ls(self[:], assemblies=True, long=True)
|
||||
assert roots, "No root nodes in rig, this is a bug."
|
||||
|
||||
asset = api.Session["AVALON_ASSET"]
|
||||
dependency = str(context["representation"]["_id"])
|
||||
|
||||
# Create the animation instance
|
||||
with maya.maintained_selection():
|
||||
cmds.select([output, controls] + roots, noExpand=True)
|
||||
api.create(name=namespace,
|
||||
asset=asset,
|
||||
family="animation",
|
||||
options={"useSelection": True},
|
||||
data={"dependencies": dependency})
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -117,7 +117,7 @@ class VRayProxyLoader(api.Loader):
|
|||
vray_mesh = cmds.createNode('VRayMesh', name="{}_VRMS".format(name))
|
||||
mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name))
|
||||
vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True,
|
||||
name="{}_VRMM".format(name))
|
||||
name="{}_VRMM".format(name))
|
||||
vray_mat_sg = cmds.sets(name="{}_VRSG".format(name),
|
||||
empty=True,
|
||||
renderable=True,
|
||||
|
|
|
|||
|
|
@ -21,15 +21,17 @@ class CollectAssData(pyblish.api.InstancePlugin):
|
|||
objsets = instance.data['setMembers']
|
||||
|
||||
for objset in objsets:
|
||||
objset = str(objset)
|
||||
members = cmds.sets(objset, query=True)
|
||||
if members is None:
|
||||
self.log.warning("Skipped empty instance: \"%s\" " % objset)
|
||||
continue
|
||||
if objset == "content_SET":
|
||||
if "content_SET" in objset:
|
||||
instance.data['setMembers'] = members
|
||||
elif objset == "proxy_SET":
|
||||
self.log.debug('content members: {}'.format(members))
|
||||
elif objset.startswith("proxy_SET"):
|
||||
assert len(members) == 1, "You have multiple proxy meshes, please only use one"
|
||||
instance.data['proxy'] = members
|
||||
|
||||
self.log.debug('proxy members: {}'.format(members))
|
||||
|
||||
self.log.debug("data: {}".format(instance.data))
|
||||
|
|
|
|||
|
|
@ -219,10 +219,6 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
with lib.renderlayer(instance.data["renderlayer"]):
|
||||
self.collect(instance)
|
||||
|
||||
# make ftrack publishable
|
||||
self.maketx = instance.data.get('maketx', True)
|
||||
instance.data['maketx'] = self.maketx
|
||||
self.log.info('maketx: {}'.format(self.maketx))
|
||||
|
||||
def collect(self, instance):
|
||||
|
||||
|
|
@ -297,9 +293,11 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.info("Collected file nodes:\n{}".format(files))
|
||||
# Collect textures if any file nodes are found
|
||||
instance.data["resources"] = [self.collect_resource(n)
|
||||
for n in files]
|
||||
self.log.info("Collected resources:\n{}".format(instance.data["resources"]))
|
||||
instance.data["resources"] = []
|
||||
for n in files:
|
||||
instance.data["resources"].append(self.collect_resource(n))
|
||||
|
||||
self.log.info("Collected resources: {}".format(instance.data["resources"]))
|
||||
|
||||
# Log a warning when no relevant sets were retrieved for the look.
|
||||
if not instance.data["lookData"]["relationships"]:
|
||||
|
|
@ -423,7 +421,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug("processing: {}".format(node))
|
||||
if cmds.nodeType(node) == 'file':
|
||||
self.log.debug("file node")
|
||||
self.log.debug(" - file node")
|
||||
attribute = "{}.fileTextureName".format(node)
|
||||
computed_attribute = "{}.computedFileTextureNamePattern".format(node)
|
||||
elif cmds.nodeType(node) == 'aiImage':
|
||||
|
|
@ -431,7 +429,7 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
attribute = "{}.filename".format(node)
|
||||
computed_attribute = attribute
|
||||
source = cmds.getAttr(attribute)
|
||||
|
||||
self.log.info(" - file source: {}".format(source))
|
||||
color_space_attr = "{}.colorSpace".format(node)
|
||||
color_space = cmds.getAttr(color_space_attr)
|
||||
# Compare with the computed file path, e.g. the one with the <UDIM>
|
||||
|
|
@ -455,6 +453,13 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
if len(files) == 0:
|
||||
self.log.error("No valid files found from node `%s`" % node)
|
||||
|
||||
self.log.info("collection of resource done:")
|
||||
self.log.info(" - node: {}".format(node))
|
||||
self.log.info(" - attribute: {}".format(attribute))
|
||||
self.log.info(" - source: {}".format(source))
|
||||
self.log.info(" - file: {}".format(files))
|
||||
self.log.info(" - color space: {}".format(color_space))
|
||||
|
||||
# Define the resource
|
||||
return {"node": node,
|
||||
"attribute": attribute,
|
||||
|
|
|
|||
|
|
@ -119,11 +119,15 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
texture_filenames = []
|
||||
if image_search_paths:
|
||||
|
||||
|
||||
# TODO: Somehow this uses OS environment path separator, `:` vs `;`
|
||||
# Later on check whether this is pipeline OS cross-compatible.
|
||||
image_search_paths = [p for p in
|
||||
image_search_paths.split(os.path.pathsep) if p]
|
||||
|
||||
# find all ${TOKEN} tokens and replace them with $TOKEN env. variable
|
||||
image_search_paths = self._replace_tokens(image_search_paths)
|
||||
|
||||
# List all related textures
|
||||
texture_filenames = cmds.pgYetiCommand(node, listTextures=True)
|
||||
self.log.info("Found %i texture(s)" % len(texture_filenames))
|
||||
|
|
@ -140,6 +144,8 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
"atttribute'" % node)
|
||||
|
||||
# Collect all texture files
|
||||
# find all ${TOKEN} tokens and replace them with $TOKEN env. variable
|
||||
texture_filenames = self._replace_tokens(texture_filenames)
|
||||
for texture in texture_filenames:
|
||||
|
||||
files = []
|
||||
|
|
@ -283,3 +289,20 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
collection, remainder = clique.assemble(files, patterns=pattern)
|
||||
|
||||
return collection
|
||||
|
||||
def _replace_tokens(self, strings):
|
||||
env_re = re.compile(r"\$\{(\w+)\}")
|
||||
|
||||
replaced = []
|
||||
for s in strings:
|
||||
matches = re.finditer(env_re, s)
|
||||
for m in matches:
|
||||
try:
|
||||
s = s.replace(m.group(), os.environ[m.group(1)])
|
||||
except KeyError:
|
||||
msg = "Cannot find requested {} in environment".format(
|
||||
m.group(1))
|
||||
self.log.error(msg)
|
||||
raise RuntimeError(msg)
|
||||
replaced.append(s)
|
||||
return replaced
|
||||
|
|
|
|||
|
|
@ -17,11 +17,15 @@ class ExtractAssStandin(pype.api.Extractor):
|
|||
label = "Ass Standin (.ass)"
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
asciiAss = False
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
sequence = instance.data.get("exportSequence", False)
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filename = "{}.ass".format(instance.name)
|
||||
filenames = list()
|
||||
file_path = os.path.join(staging_dir, filename)
|
||||
|
||||
# Write out .ass file
|
||||
|
|
@ -29,13 +33,49 @@ class ExtractAssStandin(pype.api.Extractor):
|
|||
with avalon.maya.maintained_selection():
|
||||
self.log.info("Writing: {}".format(instance.data["setMembers"]))
|
||||
cmds.select(instance.data["setMembers"], noExpand=True)
|
||||
cmds.arnoldExportAss( filename=file_path,
|
||||
selected=True,
|
||||
asciiAss=True,
|
||||
shadowLinks=True,
|
||||
lightLinks=True,
|
||||
boundingBox=True
|
||||
)
|
||||
|
||||
if sequence:
|
||||
self.log.info("Extracting ass sequence")
|
||||
|
||||
# Collect the start and end including handles
|
||||
start = instance.data.get("frameStart", 1)
|
||||
end = instance.data.get("frameEnd", 1)
|
||||
handles = instance.data.get("handles", 0)
|
||||
step = instance.data.get("step", 0)
|
||||
if handles:
|
||||
start -= handles
|
||||
end += handles
|
||||
|
||||
exported_files = cmds.arnoldExportAss(filename=file_path,
|
||||
selected=True,
|
||||
asciiAss=self.asciiAss,
|
||||
shadowLinks=True,
|
||||
lightLinks=True,
|
||||
boundingBox=True,
|
||||
startFrame=start,
|
||||
endFrame=end,
|
||||
frameStep=step
|
||||
)
|
||||
for file in exported_files:
|
||||
filenames.append(os.path.split(file)[1])
|
||||
self.log.info("Exported: {}".format(filenames))
|
||||
else:
|
||||
self.log.info("Extracting ass")
|
||||
cmds.arnoldExportAss(filename=file_path,
|
||||
selected=True,
|
||||
asciiAss=False,
|
||||
shadowLinks=True,
|
||||
lightLinks=True,
|
||||
boundingBox=True
|
||||
)
|
||||
self.log.info("Extracted {}".format(filename))
|
||||
filenames = filename
|
||||
optionals = [
|
||||
"frameStart", "frameEnd", "step", "handles",
|
||||
"handleEnd", "handleStart"
|
||||
]
|
||||
for key in optionals:
|
||||
instance.data.pop(key, None)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
|
@ -43,9 +83,13 @@ class ExtractAssStandin(pype.api.Extractor):
|
|||
representation = {
|
||||
'name': 'ass',
|
||||
'ext': 'ass',
|
||||
'files': filename,
|
||||
'files': filenames,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
|
||||
if sequence:
|
||||
representation['frameStart'] = start
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s"
|
||||
|
|
|
|||
|
|
@ -43,8 +43,13 @@ class ExtractAssProxy(pype.api.Extractor):
|
|||
|
||||
# Get only the shape contents we need in such a way that we avoid
|
||||
# taking along intermediateObjects
|
||||
members = instance.data['proxy']
|
||||
members = cmds.ls(members,
|
||||
proxy = instance.data.get('proxy', None)
|
||||
|
||||
if not proxy:
|
||||
self.log.info("no proxy mesh")
|
||||
return
|
||||
|
||||
members = cmds.ls(proxy,
|
||||
dag=True,
|
||||
transforms=True,
|
||||
noIntermediate=True)
|
||||
|
|
|
|||
|
|
@ -38,11 +38,7 @@ def source_hash(filepath, *args):
|
|||
file_name = os.path.basename(filepath)
|
||||
time = str(os.path.getmtime(filepath))
|
||||
size = str(os.path.getsize(filepath))
|
||||
return "|".join([
|
||||
file_name,
|
||||
time,
|
||||
size
|
||||
] + list(args)).replace(".", ",")
|
||||
return "|".join([file_name, time, size] + list(args)).replace(".", ",")
|
||||
|
||||
|
||||
def find_paths_by_hash(texture_hash):
|
||||
|
|
@ -64,36 +60,33 @@ def maketx(source, destination, *args):
|
|||
"""
|
||||
|
||||
cmd = [
|
||||
"maketx",
|
||||
"-v", # verbose
|
||||
"-u", # update mode
|
||||
# unpremultiply before conversion (recommended when alpha present)
|
||||
"--unpremult",
|
||||
"--checknan",
|
||||
# use oiio-optimized settings for tile-size, planarconfig, metadata
|
||||
"--oiio",
|
||||
"--filter lanczos3"
|
||||
]
|
||||
"maketx",
|
||||
"-v", # verbose
|
||||
"-u", # update mode
|
||||
# unpremultiply before conversion (recommended when alpha present)
|
||||
"--unpremult",
|
||||
"--checknan",
|
||||
# use oiio-optimized settings for tile-size, planarconfig, metadata
|
||||
"--oiio",
|
||||
"--filter lanczos3",
|
||||
]
|
||||
|
||||
cmd.extend(args)
|
||||
cmd.extend([
|
||||
"-o", destination,
|
||||
source
|
||||
])
|
||||
cmd.extend(["-o", destination, source])
|
||||
|
||||
cmd = " ".join(cmd)
|
||||
|
||||
CREATE_NO_WINDOW = 0x08000000
|
||||
kwargs = dict(
|
||||
args=cmd,
|
||||
stderr=subprocess.STDOUT
|
||||
)
|
||||
kwargs = dict(args=cmd, stderr=subprocess.STDOUT)
|
||||
|
||||
if sys.platform == "win32":
|
||||
kwargs["creationflags"] = CREATE_NO_WIDOW
|
||||
kwargs["creationflags"] = CREATE_NO_WINDOW
|
||||
try:
|
||||
out = subprocess.check_output(**kwargs)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
print(exc)
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
raise
|
||||
|
||||
|
|
@ -180,41 +173,51 @@ class ExtractLook(pype.api.Extractor):
|
|||
# Preserve color space values (force value after filepath change)
|
||||
# This will also trigger in the same order at end of context to
|
||||
# ensure after context it's still the original value.
|
||||
color_space = resource.get('color_space')
|
||||
color_space = resource.get("color_space")
|
||||
|
||||
for f in resource["files"]:
|
||||
|
||||
files_metadata[os.path.normpath(f)] = {'color_space': color_space}
|
||||
files_metadata[os.path.normpath(f)] = {
|
||||
"color_space": color_space}
|
||||
# files.update(os.path.normpath(f))
|
||||
|
||||
# Process the resource files
|
||||
transfers = list()
|
||||
hardlinks = list()
|
||||
hashes = dict()
|
||||
forceCopy = instance.data.get("forceCopy", False)
|
||||
|
||||
self.log.info(files)
|
||||
for filepath in files_metadata:
|
||||
|
||||
cspace = files_metadata[filepath]['color_space']
|
||||
cspace = files_metadata[filepath]["color_space"]
|
||||
linearise = False
|
||||
if cspace == 'sRGB':
|
||||
if cspace == "sRGB":
|
||||
linearise = True
|
||||
# set its file node to 'raw' as tx will be linearized
|
||||
files_metadata[filepath]["color_space"] = "raw"
|
||||
|
||||
source, mode, hash = self._process_texture(
|
||||
filepath, do_maketx, staging=dir_path, linearise=linearise
|
||||
)
|
||||
destination = self.resource_destination(
|
||||
instance, source, do_maketx
|
||||
filepath,
|
||||
do_maketx,
|
||||
staging=dir_path,
|
||||
linearise=linearise,
|
||||
force=forceCopy
|
||||
)
|
||||
destination = self.resource_destination(instance,
|
||||
source,
|
||||
do_maketx)
|
||||
|
||||
# Force copy is specified.
|
||||
if instance.data.get("forceCopy", False):
|
||||
if forceCopy:
|
||||
mode = COPY
|
||||
|
||||
if mode == COPY:
|
||||
transfers.append((source, destination))
|
||||
self.log.info('copying')
|
||||
elif mode == HARDLINK:
|
||||
hardlinks.append((source, destination))
|
||||
self.log.info('hardlinking')
|
||||
|
||||
# Store the hashes from hash to destination to include in the
|
||||
# database
|
||||
|
|
@ -235,13 +238,14 @@ class ExtractLook(pype.api.Extractor):
|
|||
# Preserve color space values (force value after filepath change)
|
||||
# This will also trigger in the same order at end of context to
|
||||
# ensure after context it's still the original value.
|
||||
color_space_attr = resource['node'] + ".colorSpace"
|
||||
color_space_attr = resource["node"] + ".colorSpace"
|
||||
color_space = cmds.getAttr(color_space_attr)
|
||||
|
||||
if files_metadata[source]["color_space"] == "raw":
|
||||
# set colorpsace to raw if we linearized it
|
||||
color_space = "Raw"
|
||||
# Remap file node filename to destination
|
||||
attr = resource['attribute']
|
||||
attr = resource["attribute"]
|
||||
remap[attr] = destinations[source]
|
||||
|
||||
remap[color_space_attr] = color_space
|
||||
|
||||
self.log.info("Finished remapping destinations ...")
|
||||
|
|
@ -268,13 +272,15 @@ class ExtractLook(pype.api.Extractor):
|
|||
channels=True,
|
||||
constraints=True,
|
||||
expressions=True,
|
||||
constructionHistory=True
|
||||
constructionHistory=True,
|
||||
)
|
||||
|
||||
# Write the JSON data
|
||||
self.log.info("Extract json..")
|
||||
data = {"attributes": lookdata["attributes"],
|
||||
"relationships": relationships}
|
||||
data = {
|
||||
"attributes": lookdata["attributes"],
|
||||
"relationships": relationships
|
||||
}
|
||||
|
||||
with open(json_path, "w") as f:
|
||||
json.dump(data, f)
|
||||
|
|
@ -293,7 +299,7 @@ class ExtractLook(pype.api.Extractor):
|
|||
instance.data["representations"].append(
|
||||
{
|
||||
"name": "ma",
|
||||
"ext": 'ma',
|
||||
"ext": "ma",
|
||||
"files": os.path.basename(maya_fname),
|
||||
"stagingDir": os.path.dirname(maya_fname),
|
||||
}
|
||||
|
|
@ -301,7 +307,7 @@ class ExtractLook(pype.api.Extractor):
|
|||
instance.data["representations"].append(
|
||||
{
|
||||
"name": "json",
|
||||
"ext": 'json',
|
||||
"ext": "json",
|
||||
"files": os.path.basename(json_fname),
|
||||
"stagingDir": os.path.dirname(json_fname),
|
||||
}
|
||||
|
|
@ -314,13 +320,18 @@ class ExtractLook(pype.api.Extractor):
|
|||
# Source hash for the textures
|
||||
instance.data["sourceHashes"] = hashes
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s" % (
|
||||
instance.name, maya_path)
|
||||
)
|
||||
"""
|
||||
self.log.info("Returning colorspaces to their original values ...")
|
||||
for attr, value in remap.items():
|
||||
self.log.info(" - {}: {}".format(attr, value))
|
||||
cmds.setAttr(attr, value, type="string")
|
||||
"""
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
|
||||
maya_path))
|
||||
|
||||
def resource_destination(self, instance, filepath, do_maketx):
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
self.create_destination_template(instance, anatomy)
|
||||
|
||||
|
|
@ -332,12 +343,10 @@ class ExtractLook(pype.api.Extractor):
|
|||
ext = ".tx"
|
||||
|
||||
return os.path.join(
|
||||
instance.data["assumedDestination"],
|
||||
"resources",
|
||||
basename + ext
|
||||
instance.data["assumedDestination"], "resources", basename + ext
|
||||
)
|
||||
|
||||
def _process_texture(self, filepath, do_maketx, staging, linearise):
|
||||
def _process_texture(self, filepath, do_maketx, staging, linearise, force):
|
||||
"""Process a single texture file on disk for publishing.
|
||||
This will:
|
||||
1. Check whether it's already published, if so it will do hardlink
|
||||
|
|
@ -359,24 +368,20 @@ class ExtractLook(pype.api.Extractor):
|
|||
# If source has been published before with the same settings,
|
||||
# then don't reprocess but hardlink from the original
|
||||
existing = find_paths_by_hash(texture_hash)
|
||||
if existing:
|
||||
if existing and not force:
|
||||
self.log.info("Found hash in database, preparing hardlink..")
|
||||
source = next((p for p in existing if os.path.exists(p)), None)
|
||||
if filepath:
|
||||
return source, HARDLINK, texture_hash
|
||||
else:
|
||||
self.log.warning(
|
||||
"Paths not found on disk, "
|
||||
"skipping hardlink: %s" % (existing,)
|
||||
("Paths not found on disk, "
|
||||
"skipping hardlink: %s") % (existing,)
|
||||
)
|
||||
|
||||
if do_maketx and ext != ".tx":
|
||||
# Produce .tx file in staging if source file is not .tx
|
||||
converted = os.path.join(
|
||||
staging,
|
||||
"resources",
|
||||
fname + ".tx"
|
||||
)
|
||||
converted = os.path.join(staging, "resources", fname + ".tx")
|
||||
|
||||
if linearise:
|
||||
self.log.info("tx: converting sRGB -> linear")
|
||||
|
|
@ -389,9 +394,15 @@ class ExtractLook(pype.api.Extractor):
|
|||
os.makedirs(os.path.dirname(converted))
|
||||
|
||||
self.log.info("Generating .tx file for %s .." % filepath)
|
||||
maketx(filepath, converted,
|
||||
# Include `source-hash` as string metadata
|
||||
"-sattrib", "sourceHash", texture_hash, colorconvert)
|
||||
maketx(
|
||||
filepath,
|
||||
converted,
|
||||
# Include `source-hash` as string metadata
|
||||
"-sattrib",
|
||||
"sourceHash",
|
||||
texture_hash,
|
||||
colorconvert,
|
||||
)
|
||||
|
||||
return converted, COPY, texture_hash
|
||||
|
||||
|
|
@ -417,58 +428,71 @@ class ExtractLook(pype.api.Extractor):
|
|||
project_name = api.Session["AVALON_PROJECT"]
|
||||
a_template = anatomy.templates
|
||||
|
||||
project = io.find_one({"type": "project",
|
||||
"name": project_name},
|
||||
projection={"config": True, "data": True})
|
||||
project = io.find_one(
|
||||
{
|
||||
"type": "project",
|
||||
"name": project_name
|
||||
},
|
||||
projection={"config": True, "data": True}
|
||||
)
|
||||
|
||||
template = a_template['publish']['path']
|
||||
template = a_template["publish"]["path"]
|
||||
# anatomy = instance.context.data['anatomy']
|
||||
|
||||
asset = io.find_one({"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project["_id"]})
|
||||
asset = io.find_one({
|
||||
"type": "asset",
|
||||
"name": asset_name,
|
||||
"parent": project["_id"]
|
||||
})
|
||||
|
||||
assert asset, ("No asset found by the name '{}' "
|
||||
"in project '{}'".format(asset_name, project_name))
|
||||
silo = asset.get('silo')
|
||||
"in project '{}'").format(asset_name, project_name)
|
||||
silo = asset.get("silo")
|
||||
|
||||
subset = io.find_one({"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]})
|
||||
subset = io.find_one({
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]
|
||||
})
|
||||
|
||||
# assume there is no version yet, we start at `1`
|
||||
version = None
|
||||
version_number = 1
|
||||
if subset is not None:
|
||||
version = io.find_one({"type": "version",
|
||||
"parent": subset["_id"]},
|
||||
sort=[("name", -1)])
|
||||
version = io.find_one(
|
||||
{
|
||||
"type": "version",
|
||||
"parent": subset["_id"]
|
||||
},
|
||||
sort=[("name", -1)]
|
||||
)
|
||||
|
||||
# if there is a subset there ought to be version
|
||||
if version is not None:
|
||||
version_number += version["name"]
|
||||
|
||||
if instance.data.get('version'):
|
||||
version_number = int(instance.data.get('version'))
|
||||
if instance.data.get("version"):
|
||||
version_number = int(instance.data.get("version"))
|
||||
|
||||
padding = int(a_template['render']['padding'])
|
||||
padding = int(a_template["render"]["padding"])
|
||||
|
||||
hierarchy = asset['data']['parents']
|
||||
hierarchy = asset["data"]["parents"]
|
||||
if hierarchy:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = "/".join(hierarchy)
|
||||
|
||||
template_data = {"root": api.Session["AVALON_PROJECTS"],
|
||||
"project": {"name": project_name,
|
||||
"code": project['data']['code']},
|
||||
"silo": silo,
|
||||
"family": instance.data['family'],
|
||||
"asset": asset_name,
|
||||
"subset": subset_name,
|
||||
"frame": ('#' * padding),
|
||||
"version": version_number,
|
||||
"hierarchy": hierarchy,
|
||||
"representation": "TEMP"}
|
||||
template_data = {
|
||||
"root": api.Session["AVALON_PROJECTS"],
|
||||
"project": {"name": project_name, "code": project["data"]["code"]},
|
||||
"silo": silo,
|
||||
"family": instance.data["family"],
|
||||
"asset": asset_name,
|
||||
"subset": subset_name,
|
||||
"frame": ("#" * padding),
|
||||
"version": version_number,
|
||||
"hierarchy": hierarchy,
|
||||
"representation": "TEMP",
|
||||
}
|
||||
|
||||
instance.data["assumedTemplateData"] = template_data
|
||||
self.log.info(template_data)
|
||||
|
|
|
|||
|
|
@ -1,16 +1,14 @@
|
|||
import os
|
||||
import glob
|
||||
import contextlib
|
||||
import capture_gui
|
||||
import clique
|
||||
import capture
|
||||
#
|
||||
import pype.maya.lib as lib
|
||||
import pype.api
|
||||
#
|
||||
from maya import cmds, mel
|
||||
import pymel.core as pm
|
||||
# import ffmpeg
|
||||
# # from pype.scripts import otio_burnin
|
||||
# reload(ffmpeg)
|
||||
|
||||
|
||||
# TODO: move codec settings to presets
|
||||
|
|
@ -93,7 +91,18 @@ class ExtractQuicktime(pype.api.Extractor):
|
|||
pm.currentTime(refreshFrameInt, edit=True)
|
||||
|
||||
with maintained_time():
|
||||
playblast = capture_gui.lib.capture_scene(preset)
|
||||
filename = preset.get("filename", "%TEMP%")
|
||||
|
||||
# Force viewer to False in call to capture because we have our own
|
||||
# viewer opening call to allow a signal to trigger between playblast
|
||||
# and viewer
|
||||
preset['viewer'] = False
|
||||
|
||||
# Remove panel key since it's internal value to capture_gui
|
||||
preset.pop("panel", None)
|
||||
|
||||
path = capture.capture(**preset)
|
||||
playblast = self._fix_playblast_output_path(path)
|
||||
|
||||
self.log.info("file list {}".format(playblast))
|
||||
|
||||
|
|
@ -119,6 +128,46 @@ class ExtractQuicktime(pype.api.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
def _fix_playblast_output_path(self, filepath):
|
||||
"""Workaround a bug in maya.cmds.playblast to return correct filepath.
|
||||
|
||||
When the `viewer` argument is set to False and maya.cmds.playblast
|
||||
does not automatically open the playblasted file the returned
|
||||
filepath does not have the file's extension added correctly.
|
||||
|
||||
To workaround this we just glob.glob() for any file extensions and
|
||||
assume the latest modified file is the correct file and return it.
|
||||
|
||||
"""
|
||||
# Catch cancelled playblast
|
||||
if filepath is None:
|
||||
self.log.warning("Playblast did not result in output path. "
|
||||
"Playblast is probably interrupted.")
|
||||
return None
|
||||
|
||||
# Fix: playblast not returning correct filename (with extension)
|
||||
# Lets assume the most recently modified file is the correct one.
|
||||
if not os.path.exists(filepath):
|
||||
directory = os.path.dirname(filepath)
|
||||
filename = os.path.basename(filepath)
|
||||
# check if the filepath is has frame based filename
|
||||
# example : capture.####.png
|
||||
parts = filename.split(".")
|
||||
if len(parts) == 3:
|
||||
query = os.path.join(directory, "{}.*.{}".format(parts[0],
|
||||
parts[-1]))
|
||||
files = glob.glob(query)
|
||||
else:
|
||||
files = glob.glob("{}.*".format(filepath))
|
||||
|
||||
if not files:
|
||||
raise RuntimeError("Couldn't find playblast from: "
|
||||
"{0}".format(filepath))
|
||||
filepath = max(files, key=os.path.getmtime)
|
||||
|
||||
return filepath
|
||||
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_time():
|
||||
|
|
|
|||
|
|
@ -1,31 +1,14 @@
|
|||
import os
|
||||
import contextlib
|
||||
import time
|
||||
import sys
|
||||
import glob
|
||||
|
||||
import capture_gui
|
||||
import clique
|
||||
import capture
|
||||
|
||||
import pype.maya.lib as lib
|
||||
import pype.api
|
||||
|
||||
from maya import cmds
|
||||
import pymel.core as pm
|
||||
# import ffmpeg
|
||||
# reload(ffmpeg)
|
||||
|
||||
import avalon.maya
|
||||
|
||||
# import maya_utils as mu
|
||||
|
||||
# from tweakHUD import master
|
||||
# from tweakHUD import draft_hud as dHUD
|
||||
# from tweakHUD import ftrackStrings as fStrings
|
||||
|
||||
#
|
||||
# def soundOffsetFunc(oSF, SF, H):
|
||||
# tmOff = (oSF - H) - SF
|
||||
# return tmOff
|
||||
|
||||
|
||||
class ExtractThumbnail(pype.api.Extractor):
|
||||
|
|
@ -47,39 +30,8 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
end = cmds.currentTime(query=True)
|
||||
self.log.info("start: {}, end: {}".format(start, end))
|
||||
|
||||
members = instance.data['setMembers']
|
||||
camera = instance.data['review_camera']
|
||||
|
||||
# project_code = ftrack_data['Project']['code']
|
||||
# task_type = ftrack_data['Task']['type']
|
||||
#
|
||||
# # load Preset
|
||||
# studio_repos = os.path.abspath(os.environ.get('studio_repos'))
|
||||
# shot_preset_path = os.path.join(studio_repos, 'maya',
|
||||
# 'capture_gui_presets',
|
||||
# (project_code + '_' + task_type + '_' + asset + '.json'))
|
||||
#
|
||||
# task_preset_path = os.path.join(studio_repos, 'maya',
|
||||
# 'capture_gui_presets',
|
||||
# (project_code + '_' + task_type + '.json'))
|
||||
#
|
||||
# project_preset_path = os.path.join(studio_repos, 'maya',
|
||||
# 'capture_gui_presets',
|
||||
# (project_code + '.json'))
|
||||
#
|
||||
# default_preset_path = os.path.join(studio_repos, 'maya',
|
||||
# 'capture_gui_presets',
|
||||
# 'default.json')
|
||||
#
|
||||
# if os.path.isfile(shot_preset_path):
|
||||
# preset_to_use = shot_preset_path
|
||||
# elif os.path.isfile(task_preset_path):
|
||||
# preset_to_use = task_preset_path
|
||||
# elif os.path.isfile(project_preset_path):
|
||||
# preset_to_use = project_preset_path
|
||||
# else:
|
||||
# preset_to_use = default_preset_path
|
||||
|
||||
capture_preset = ""
|
||||
capture_preset = instance.context.data['presets']['maya']['capture']
|
||||
try:
|
||||
|
|
@ -126,7 +78,18 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
pm.currentTime(refreshFrameInt, edit=True)
|
||||
|
||||
with maintained_time():
|
||||
playblast = capture_gui.lib.capture_scene(preset)
|
||||
filename = preset.get("filename", "%TEMP%")
|
||||
|
||||
# Force viewer to False in call to capture because we have our own
|
||||
# viewer opening call to allow a signal to trigger between
|
||||
# playblast and viewer
|
||||
preset['viewer'] = False
|
||||
|
||||
# Remove panel key since it's internal value to capture_gui
|
||||
preset.pop("panel", None)
|
||||
|
||||
path = capture.capture(**preset)
|
||||
playblast = self._fix_playblast_output_path(path)
|
||||
|
||||
_, thumbnail = os.path.split(playblast)
|
||||
|
||||
|
|
@ -144,6 +107,45 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
def _fix_playblast_output_path(self, filepath):
|
||||
"""Workaround a bug in maya.cmds.playblast to return correct filepath.
|
||||
|
||||
When the `viewer` argument is set to False and maya.cmds.playblast
|
||||
does not automatically open the playblasted file the returned
|
||||
filepath does not have the file's extension added correctly.
|
||||
|
||||
To workaround this we just glob.glob() for any file extensions and
|
||||
assume the latest modified file is the correct file and return it.
|
||||
|
||||
"""
|
||||
# Catch cancelled playblast
|
||||
if filepath is None:
|
||||
self.log.warning("Playblast did not result in output path. "
|
||||
"Playblast is probably interrupted.")
|
||||
return None
|
||||
|
||||
# Fix: playblast not returning correct filename (with extension)
|
||||
# Lets assume the most recently modified file is the correct one.
|
||||
if not os.path.exists(filepath):
|
||||
directory = os.path.dirname(filepath)
|
||||
filename = os.path.basename(filepath)
|
||||
# check if the filepath is has frame based filename
|
||||
# example : capture.####.png
|
||||
parts = filename.split(".")
|
||||
if len(parts) == 3:
|
||||
query = os.path.join(directory, "{}.*.{}".format(parts[0],
|
||||
parts[-1]))
|
||||
files = glob.glob(query)
|
||||
else:
|
||||
files = glob.glob("{}.*".format(filepath))
|
||||
|
||||
if not files:
|
||||
raise RuntimeError("Couldn't find playblast from: "
|
||||
"{0}".format(filepath))
|
||||
filepath = max(files, key=os.path.getmtime)
|
||||
|
||||
return filepath
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_time():
|
||||
|
|
|
|||
|
|
@ -228,80 +228,19 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"AuxFiles": []
|
||||
}
|
||||
|
||||
# Include critical environment variables with submission
|
||||
# We need those to pass them to pype for it to set correct context
|
||||
keys = [
|
||||
# This will trigger `userSetup.py` on the slave
|
||||
# such that proper initialisation happens the same
|
||||
# way as it does on a local machine.
|
||||
# TODO(marcus): This won't work if the slaves don't
|
||||
# have accesss to these paths, such as if slaves are
|
||||
# running Linux and the submitter is on Windows.
|
||||
"PYTHONPATH",
|
||||
"PATH",
|
||||
|
||||
"MTOA_EXTENSIONS_PATH",
|
||||
"MTOA_EXTENSIONS",
|
||||
"DYLD_LIBRARY_PATH",
|
||||
"MAYA_RENDER_DESC_PATH",
|
||||
"MAYA_MODULE_PATH",
|
||||
"ARNOLD_PLUGIN_PATH",
|
||||
"AVALON_SCHEMA",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"PYBLISHPLUGINPATH",
|
||||
|
||||
# todo: This is a temporary fix for yeti variables
|
||||
"PEREGRINEL_LICENSE",
|
||||
"SOLIDANGLE_LICENSE",
|
||||
"ARNOLD_LICENSE"
|
||||
"MAYA_MODULE_PATH",
|
||||
"TOOL_ENV"
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK",
|
||||
"PYPE_USERNAME"
|
||||
]
|
||||
|
||||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **api.Session)
|
||||
# self.log.debug("enviro: {}".format(pprint(environment)))
|
||||
for path in os.environ:
|
||||
if path.lower().startswith('pype_'):
|
||||
environment[path] = os.environ[path]
|
||||
|
||||
environment["PATH"] = os.environ["PATH"]
|
||||
# self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
|
||||
clean_environment = {}
|
||||
for key in environment:
|
||||
clean_path = ""
|
||||
self.log.debug("key: {}".format(key))
|
||||
self.log.debug("value: {}".format(environment[key]))
|
||||
to_process = str(environment[key])
|
||||
if key == "PYPE_STUDIO_CORE_MOUNT":
|
||||
clean_path = to_process
|
||||
elif "://" in to_process:
|
||||
clean_path = to_process
|
||||
elif os.pathsep not in str(to_process):
|
||||
try:
|
||||
path = to_process
|
||||
path.decode('UTF-8', 'strict')
|
||||
clean_path = os.path.normpath(path)
|
||||
except UnicodeDecodeError:
|
||||
print('path contains non UTF characters')
|
||||
else:
|
||||
for path in to_process.split(os.pathsep):
|
||||
try:
|
||||
path.decode('UTF-8', 'strict')
|
||||
clean_path += os.path.normpath(path) + os.pathsep
|
||||
except UnicodeDecodeError:
|
||||
print('path contains non UTF characters')
|
||||
|
||||
if key == "PYTHONPATH":
|
||||
clean_path = clean_path.replace('python2', 'python3')
|
||||
clean_path = clean_path.replace(
|
||||
os.path.normpath(
|
||||
environment['PYPE_STUDIO_CORE_MOUNT']), # noqa
|
||||
os.path.normpath(
|
||||
environment['PYPE_STUDIO_CORE_PATH'])) # noqa
|
||||
clean_environment[key] = clean_path
|
||||
|
||||
environment = clean_environment
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
|
|
@ -319,7 +258,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
self.preflight_check(instance)
|
||||
|
||||
self.log.info("Submitting..")
|
||||
self.log.info("Submitting ...")
|
||||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import pyblish.api
|
||||
|
||||
import avalon.io as io
|
||||
from avalon import io
|
||||
|
||||
import pype.api
|
||||
import pype.maya.action
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import pyblish.api
|
||||
import pype.api
|
||||
|
||||
import avalon.io as io
|
||||
from avalon import io
|
||||
import pype.maya.action
|
||||
|
||||
from pype.maya import lib
|
||||
|
|
@ -38,9 +38,13 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin):
|
|||
invalid = list()
|
||||
|
||||
asset = instance.data['asset']
|
||||
asset_data = io.find_one({"name": asset,
|
||||
"type": "asset"},
|
||||
projection={"_id": True})
|
||||
asset_data = io.find_one(
|
||||
{
|
||||
"name": asset,
|
||||
"type": "asset"
|
||||
},
|
||||
projection={"_id": True}
|
||||
)
|
||||
asset_id = str(asset_data['_id'])
|
||||
|
||||
# We do want to check the referenced nodes as we it might be
|
||||
|
|
|
|||
|
|
@ -49,9 +49,10 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin):
|
|||
"""Check if subset is registered in the database under the asset"""
|
||||
|
||||
asset = io.find_one({"type": "asset", "name": asset_name})
|
||||
is_valid = io.find_one({"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]})
|
||||
is_valid = io.find_one({
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]
|
||||
})
|
||||
|
||||
return is_valid
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import nuke
|
||||
import os
|
||||
import pyblish.api
|
||||
import avalon.io as io
|
||||
from avalon import io
|
||||
# TODO: add repair function
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,16 +1,52 @@
|
|||
from avalon.nuke.pipeline import Creator
|
||||
|
||||
from avalon.nuke import lib as anlib
|
||||
import nuke
|
||||
|
||||
class CreateBackdrop(Creator):
|
||||
"""Add Publishable Backdrop"""
|
||||
|
||||
name = "backdrop"
|
||||
label = "Backdrop"
|
||||
family = "group"
|
||||
icon = "cube"
|
||||
name = "nukenodes"
|
||||
label = "Create Backdrop"
|
||||
family = "nukenodes"
|
||||
icon = "file-archive-o"
|
||||
defaults = ["Main"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateBackdrop, self).__init__(*args, **kwargs)
|
||||
|
||||
self.nodes = nuke.selectedNodes()
|
||||
self.node_color = "0xdfea5dff"
|
||||
return
|
||||
|
||||
def process(self):
|
||||
from nukescripts import autoBackdrop
|
||||
nodes = list()
|
||||
if (self.options or {}).get("useSelection"):
|
||||
nodes = self.nodes
|
||||
|
||||
if len(nodes) >= 1:
|
||||
anlib.select_nodes(nodes)
|
||||
bckd_node = autoBackdrop()
|
||||
bckd_node["name"].setValue("{}_BDN".format(self.name))
|
||||
bckd_node["tile_color"].setValue(int(self.node_color, 16))
|
||||
bckd_node["note_font_size"].setValue(24)
|
||||
bckd_node["label"].setValue("[{}]".format(self.name))
|
||||
# add avalon knobs
|
||||
instance = anlib.imprint(bckd_node, self.data)
|
||||
|
||||
return instance
|
||||
else:
|
||||
msg = "Please select nodes you "
|
||||
"wish to add to a container"
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
return
|
||||
else:
|
||||
bckd_node = autoBackdrop()
|
||||
bckd_node["name"].setValue("{}_BDN".format(self.name))
|
||||
bckd_node["tile_color"].setValue(int(self.node_color, 16))
|
||||
bckd_node["note_font_size"].setValue(24)
|
||||
bckd_node["label"].setValue("[{}]".format(self.name))
|
||||
# add avalon knobs
|
||||
instance = anlib.imprint(bckd_node, self.data)
|
||||
|
||||
return instance
|
||||
|
|
|
|||
83
pype/plugins/nuke/create/create_gizmo.py
Normal file
83
pype/plugins/nuke/create/create_gizmo.py
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
from avalon.nuke.pipeline import Creator
|
||||
from avalon.nuke import lib as anlib
|
||||
import nuke
|
||||
import nukescripts
|
||||
|
||||
class CreateGizmo(Creator):
|
||||
"""Add Publishable "gizmo" group
|
||||
|
||||
The name is symbolically gizmo as presumably
|
||||
it is something familiar to nuke users as group of nodes
|
||||
distributed downstream in workflow
|
||||
"""
|
||||
|
||||
name = "gizmo"
|
||||
label = "Gizmo"
|
||||
family = "gizmo"
|
||||
icon = "file-archive-o"
|
||||
defaults = ["ViewerInput", "Lut", "Effect"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateGizmo, self).__init__(*args, **kwargs)
|
||||
self.nodes = nuke.selectedNodes()
|
||||
self.node_color = "0x7533c1ff"
|
||||
return
|
||||
|
||||
def process(self):
|
||||
if (self.options or {}).get("useSelection"):
|
||||
nodes = self.nodes
|
||||
self.log.info(len(nodes))
|
||||
if len(nodes) == 1:
|
||||
anlib.select_nodes(nodes)
|
||||
node = nodes[-1]
|
||||
# check if Group node
|
||||
if node.Class() in "Group":
|
||||
node["name"].setValue("{}_GZM".format(self.name))
|
||||
node["tile_color"].setValue(int(self.node_color, 16))
|
||||
return anlib.imprint(node, self.data)
|
||||
else:
|
||||
msg = ("Please select a group node "
|
||||
"you wish to publish as the gizmo")
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
|
||||
if len(nodes) >= 2:
|
||||
anlib.select_nodes(nodes)
|
||||
nuke.makeGroup()
|
||||
gizmo_node = nuke.selectedNode()
|
||||
gizmo_node["name"].setValue("{}_GZM".format(self.name))
|
||||
gizmo_node["tile_color"].setValue(int(self.node_color, 16))
|
||||
|
||||
# add sticky node wit guide
|
||||
with gizmo_node:
|
||||
sticky = nuke.createNode("StickyNote")
|
||||
sticky["label"].setValue(
|
||||
"Add following:\n- set Input"
|
||||
" nodes\n- set one Output1\n"
|
||||
"- create User knobs on the group")
|
||||
|
||||
# add avalon knobs
|
||||
return anlib.imprint(gizmo_node, self.data)
|
||||
|
||||
else:
|
||||
msg = ("Please select nodes you "
|
||||
"wish to add to the gizmo")
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
return
|
||||
else:
|
||||
with anlib.maintained_selection():
|
||||
gizmo_node = nuke.createNode("Group")
|
||||
gizmo_node["name"].setValue("{}_GZM".format(self.name))
|
||||
gizmo_node["tile_color"].setValue(int(self.node_color, 16))
|
||||
|
||||
# add sticky node wit guide
|
||||
with gizmo_node:
|
||||
sticky = nuke.createNode("StickyNote")
|
||||
sticky["label"].setValue(
|
||||
"Add following:\n- add Input"
|
||||
" nodes\n- add one Output1\n"
|
||||
"- create User knobs on the group")
|
||||
|
||||
# add avalon knobs
|
||||
return anlib.imprint(gizmo_node, self.data)
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue