Merge branch 'develop' into feature/PYPE-762_multi_root

This commit is contained in:
iLLiCiTiT 2020-05-11 16:34:09 +02:00
commit 8438de5816
131 changed files with 32461 additions and 11610 deletions

View file

@ -0,0 +1,92 @@
import os
import pyblish.api
from avalon import (
io,
api as avalon
)
import json
from pathlib import Path
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
"""
Collecting temp json data sent from a host context
and path for returning json data back to hostself.
Setting avalon session into correct context
Args:
context (obj): pyblish context session
"""
label = "AdobeCommunicator Collect Context"
order = pyblish.api.CollectorOrder - 0.49
def process(self, context):
self.log.info(
"registred_hosts: `{}`".format(pyblish.api.registered_hosts()))
io.install()
# get json paths from data
input_json_path = os.environ.get("AC_PUBLISH_INPATH")
output_json_path = os.environ.get("AC_PUBLISH_OUTPATH")
rqst_json_data_path = Path(input_json_path)
post_json_data_path = Path(output_json_path)
context.data['post_json_data_path'] = str(post_json_data_path)
# get avalon session data and convert \ to /
_S = avalon.session
projects = Path(_S["AVALON_PROJECTS"]).resolve()
asset = _S["AVALON_ASSET"]
workdir = Path(_S["AVALON_WORKDIR"]).resolve()
_S["AVALON_PROJECTS"] = str(projects)
_S["AVALON_WORKDIR"] = str(workdir)
context.data["avalonSession"] = _S
self.log.info(f"__ avalonSession: `{_S}`")
# get stagin directory from recieved path to json
context.data["stagingDir"] = post_json_data_path.parent
# get data from json file recieved
with rqst_json_data_path.open(mode='r') as f:
context.data["jsonData"] = json_data = json.load(f)
assert json_data, "No `data` in json file"
# get and check host type
host = json_data.get("host", None)
host_version = json_data.get("hostVersion", None)
assert host, "No `host` data in json file"
assert host_version, "No `hostVersion` data in json file"
context.data["host"] = _S["AVALON_APP"] = host
context.data["hostVersion"] = \
_S["AVALON_APP_VERSION"] = host_version
# get current file
current_file = json_data.get("currentFile", None)
assert current_file, "No `currentFile` data in json file"
context.data["currentFile"] = str(Path(current_file).resolve())
# get project data from avalon
project_data = io.find_one({'type': 'project'})
assert project_data, "No `project_data` data in avalon db"
context.data["projectData"] = project_data
self.log.debug("project_data: {}".format(project_data))
# get asset data from avalon and fix all paths
asset_data = io.find_one({
"type": 'asset',
"name": asset
})["data"]
assert asset_data, "No `asset_data` data in avalon db"
context.data["assetData"] = asset_data
self.log.debug("asset_data: {}".format(asset_data))
self.log.info("rqst_json_data_path is: {}".format(rqst_json_data_path))
self.log.info("post_json_data_path is: {}".format(post_json_data_path))
# self.log.info("avalon.session is: {}".format(avalon.session))

View file

@ -1,12 +1,5 @@
import os
import json
import pyblish.api
from avalon import (
io,
api as avalon
)
from pype import api as pype
class CollectInstancesFromJson(pyblish.api.ContextPlugin):
@ -26,7 +19,11 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
def process(self, context):
a_session = context.data.get("avalonSession")
_S = context.data["avalonSession"]
asset = _S["AVALON_ASSET"]
task = _S["AVALON_TASK"]
host = _S["AVALON_APP"]
json_data = context.data.get("jsonData", None)
assert json_data, "No `json_data` data in json file"
@ -36,96 +33,91 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
staging_dir = json_data.get("stagingDir", None)
assert staging_dir, "No `stagingDir` path in json file"
presets = context.data["presets"]
rules_tasks = presets["rules_tasks"]
ftrack_types = rules_tasks["ftrackTypes"]
assert ftrack_types, "No `ftrack_types` data in `/templates/presets/[host]/rules_tasks.json` file"
host = context.data["host"]
presets = context.data["presets"][host]
context.data["ftrackTypes"] = ftrack_types
rules_tasks = presets["rules_tasks"]
asset_default = presets["asset_default"]
assert asset_default, "No `asset_default` data in `/templates/presets/[host]/asset_default.json` file"
asset_name = a_session["AVALON_ASSET"]
entity = io.find_one({"name": asset_name,
"type": "asset"})
assert asset_default, ("No `asset_default` data in"
"`/presets/[host]/asset_default.json` file")
# get frame start > first try from asset data
frame_start = context.data["assetData"].get("fstart", None)
frame_start = context.data["assetData"].get("frameStart", None)
if not frame_start:
self.log.debug("frame_start not on assetData")
# get frame start > second try from parent data
frame_start = pype.get_data_hierarchical_attr(entity, "fstart")
if not frame_start:
self.log.debug("frame_start not on any parent entity")
# get frame start > third try from parent data
frame_start = asset_default["fstart"]
self.log.debug("frame_start not on any parent entity")
# get frame start > third try from parent data
frame_start = asset_default["frameStart"]
assert frame_start, "No `frame_start` data found, "
"please set `fstart` on asset"
self.log.debug("frame_start: `{}`".format(frame_start))
# get handles > first try from asset data
handles = context.data["assetData"].get("handles", None)
if not handles:
handle_start = context.data["assetData"].get("handleStart", None)
handle_end = context.data["assetData"].get("handleEnd", None)
if (handle_start is None) or (handle_end is None):
# get frame start > second try from parent data
handles = pype.get_data_hierarchical_attr(entity, "handles")
if not handles:
# get frame start > third try from parent data
handles = asset_default["handles"]
handle_start = asset_default.get("handleStart", None)
handle_end = asset_default.get("handleEnd", None)
assert handles, "No `handles` data found, "
"please set `fstart` on asset"
self.log.debug("handles: `{}`".format(handles))
assert (
(handle_start is not None) or (
handle_end is not None)), (
"No `handle_start, handle_end` data found")
instances = []
task = a_session["AVALON_TASK"]
current_file = os.path.basename(context.data.get("currentFile"))
name, ext = os.path.splitext(current_file)
# get current file host
host = a_session["AVALON_APP"]
family = "projectfile"
families = "filesave"
family = "workfile"
subset_name = "{0}{1}".format(task, 'Default')
instance_name = "{0}_{1}_{2}".format(name,
family,
subset_name)
# Set label
label = "{0} - {1} > {2}".format(name, task, families)
label = "{0} - {1}".format(name, task)
# get project file instance Data
pf_instance = [inst for inst in instances_data
if inst.get("family", None) in 'projectfile']
self.log.debug('pf_instance: {}'.format(pf_instance))
# get working file into instance for publishing
instance = context.create_instance(instance_name)
if pf_instance:
instance.data.update(pf_instance[0])
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"task": task,
"representation": ext[1:],
"host": host,
"asset": asset_name,
"label": label,
"name": name,
# "hierarchy": hierarchy,
# "parents": parents,
"family": family,
"families": [families, 'ftrack'],
"publish": True,
# "files": files_list
})
instances.append(instance)
wf_instance = next((inst for inst in instances_data
if inst.get("family", None) in 'workfile'), None)
if wf_instance:
self.log.debug('wf_instance: {}'.format(wf_instance))
version = int(wf_instance.get("version", None))
# get working file into instance for publishing
instance = context.create_instance(instance_name)
instance.data.update(wf_instance)
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"task": task,
"representations": [{
"files": current_file,
'stagingDir': staging_dir,
'name': "projectfile",
'ext': ext[1:]
}],
"host": host,
"asset": asset,
"label": label,
"name": name,
"family": family,
"families": ["ftrack"],
"publish": True,
"version": version
})
instances.append(instance)
for inst in instances_data:
# for key, value in inst.items():
# self.log.debug('instance[key]: {}'.format(key))
#
version = inst.get("version", None)
version = int(inst.get("version", None))
assert version, "No `version` string in json file"
name = asset = inst.get("name", None)
@ -135,14 +127,14 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
assert family, "No `family` key in json_data.instance: {}".format(
inst)
if family in 'projectfile':
if family in 'workfile':
continue
files_list = inst.get("files", None)
assert files_list, "`files` are empty in json file"
hierarchy = inst.get("hierarchy", None)
assert hierarchy, "No `hierarchy` data in json file"
assert hierarchy, f"No `hierarchy` data in json file for {name}"
parents = inst.get("parents", None)
assert parents, "No `parents` data in json file"
@ -161,17 +153,12 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
# create list of tasks for creation
if not inst.get('tasks', None):
inst['tasks'] = list()
if not inst.get('tasksTypes', None):
inst['tasksTypes'] = {}
# append taks into list for later hierarchy cration
ftrack_task_type = ftrack_types[task]
if task not in inst['tasks']:
inst['tasks'].append(task)
inst['tasksTypes'][task] = ftrack_task_type
host = rules_tasks["taskHost"][task]
subsets = rules_tasks["taskSubsets"][task]
subsets = rules_tasks["taskToSubsets"][task]
for sub in subsets:
self.log.debug(sub)
try:
@ -184,8 +171,8 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
subset_lst.extend([s for s in subsets if s not in subset_lst])
for subset in subset_lst:
if inst["representations"].get(subset, None):
repr = inst["representations"][subset]
if inst["subsetToRepresentations"].get(subset, None):
repr = inst["subsetToRepresentations"][subset]
ext = repr['representation']
else:
continue
@ -197,7 +184,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
family = subset
subset_name = "{0}{1}".format(subset, "Main")
elif "reference" in subset:
family ="render"
family = "review"
subset_name = "{0}{1}".format(family, "Reference")
else:
subset_name = "{0}{1}".format(subset, 'Default')
@ -209,17 +196,15 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
instance = context.create_instance(name)
files = [f for f in files_list
if subset in f or "thumbnail" in f
]
if subset in f or "thumbnail" in f]
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"tasks": subset_dict[subset],
"taskTypes": inst['tasksTypes'],
"fstart": frame_start,
"handles": handles,
"host": host,
"frameStart": frame_start,
"handleStart": handle_start,
"handleEnd": handle_end,
"asset": asset,
"hierarchy": hierarchy,
"parents": parents,
@ -230,6 +215,8 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
"family": family,
"families": [subset, inst["family"], 'ftrack'],
"jsonData": inst,
"jsonReprSubset": subset,
"jsonReprExt": ext,
"publish": True,
"version": version})
self.log.info(
@ -238,9 +225,6 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
context.data["instances"] = instances
# Sort/grouped by family (preserving local index)
# context[:] = sorted(context, key=self.sort_by_task)
self.log.debug("context: {}".format(context))
def sort_by_task(self, instance):

View file

@ -2,7 +2,7 @@
import json
import clique
import pyblish.api
from pypeapp import Anatomy
class ExtractJSON(pyblish.api.ContextPlugin):
""" Extract all instances to a serialized json file. """
@ -14,28 +14,27 @@ class ExtractJSON(pyblish.api.ContextPlugin):
json_path = context.data['post_json_data_path']
data = dict(self.serialize(context.data()))
# self.log.info(data)
instances_data = []
for instance in context:
iData = {}
for key, value in instance.data.items():
if isinstance(value, clique.Collection):
value = value.format()
try:
json.dumps(value)
iData[key] = value
except KeyError:
msg = "\"{0}\"".format(value)
msg += " in instance.data[\"{0}\"]".format(key)
msg += " could not be serialized."
self.log.debug(msg)
instances_data.append(iData)
data["instances"] = instances_data
# instances_data = []
# for instance in context:
#
# iData = {}
# for key, value in instance.data.items():
# if isinstance(value, clique.Collection):
# value = value.format()
#
# try:
# json.dumps(value)
# iData[key] = value
# except KeyError:
# msg = "\"{0}\"".format(value)
# msg += " in instance.data[\"{0}\"]".format(key)
# msg += " could not be serialized."
# self.log.debug(msg)
#
# instances_data.append(iData)
#
# data["instances"] = instances_data
with open(json_path, "w") as outfile:
outfile.write(json.dumps(data, indent=4, sort_keys=True))
@ -60,6 +59,9 @@ class ExtractJSON(pyblish.api.ContextPlugin):
# self.log.info("1: {}".format(data))
if isinstance(data, Anatomy):
return
if not isinstance(data, dict):
# self.log.info("2: {}".format(data))
return data
@ -88,6 +90,9 @@ class ExtractJSON(pyblish.api.ContextPlugin):
# loops if dictionary
data[key] = self.serialize(value)
if isinstance(value, Anatomy):
continue
if isinstance(value, (list or tuple)):
# loops if list or tuple
for i, item in enumerate(value):

View file

@ -1,104 +0,0 @@
import os
import pyblish.api
from avalon import api as avalon
from pype import api as pype
import json
from pathlib import Path
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
"""
Collecting temp json data sent from a host context
and path for returning json data back to hostself.
Setting avalon session into correct context
Args:
context (obj): pyblish context session
"""
label = "Collect Aport Context"
order = pyblish.api.CollectorOrder - 0.49
def process(self, context):
# get json paths from data
rqst_json_data_path = Path(context.data['rqst_json_data_path'])
post_json_data_path = Path(context.data['post_json_data_path'])
# get avalon session data and convert \ to /
session = avalon.session
self.log.info(os.environ['AVALON_PROJECTS'])
projects = Path(session['AVALON_PROJECTS']).resolve()
wd = Path(session['AVALON_WORKDIR']).resolve()
session['AVALON_PROJECTS'] = str(projects)
session['AVALON_WORKDIR'] = str(wd)
context.data["avalonSession"] = session
self.log.debug("avalonSession: {}".format(session))
# get stagin directory from recieved path to json
context.data["stagingDir"] = staging_dir = post_json_data_path.parent
# get data from json file recieved
with rqst_json_data_path.open(mode='r') as f:
context.data['jsonData'] = json_data = json.load(f)
assert json_data, "No `data` in json file"
# get and check host type
host = json_data.get("host", None)
host_version = json_data.get("hostVersion", None)
assert host, "No `host` data in json file"
assert host_version, "No `hostVersion` data in json file"
context.data["host"] = session["AVALON_APP"] = host
context.data["hostVersion"] = \
session["AVALON_APP_VERSION"] = host_version
# register pyblish for filtering of hosts in plugins
pyblish.api.deregister_all_hosts()
pyblish.api.register_host(host)
# get path to studio templates
templates_dir = os.getenv("PYPE_STUDIO_TEMPLATES", None)
assert templates_dir, "Missing `PYPE_STUDIO_TEMPLATES` in os.environ"
# get presets for host
presets_dir = os.path.join(templates_dir, "presets", host)
assert os.path.exists(
presets_dir), "Required path `{}` doesn't exist".format(presets_dir)
# load all available preset json files
preset_data = dict()
for file in os.listdir(presets_dir):
name, ext = os.path.splitext(file)
with open(os.path.join(presets_dir, file)) as prst:
preset_data[name] = json.load(prst)
context.data['presets'] = preset_data
assert preset_data, "No `presets` data in json file"
self.log.debug("preset_data: {}".format(preset_data))
# get current file
current_file = json_data.get("currentFile", None)
assert current_file, "No `currentFile` data in json file"
context.data["currentFile"] = Path(current_file).resolve()
# get project data from avalon
project_data = pype.get_project_data()
assert project_data, "No `project_data` data in avalon db"
context.data["projectData"] = project_data
self.log.debug("project_data: {}".format(project_data))
# get asset data from avalon and fix all paths
asset_data = pype.get_asset_data()
assert asset_data, "No `asset_data` data in avalon db"
asset_data = {k: v.replace("\\", "/") for k, v in asset_data.items()
if isinstance(v, str)}
context.data["assetData"] = asset_data
self.log.debug("asset_data: {}".format(asset_data))
self.log.info("rqst_json_data_path is: {}".format(rqst_json_data_path))
self.log.info("post_json_data_path is: {}".format(post_json_data_path))
# self.log.info("avalon.session is: {}".format(avalon.session))

View file

@ -89,7 +89,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
# CUSTOM ATTRIBUTES
custom_attributes = entity_data.get('custom_attributes', [])
instances = [
i for i in self.context[:] if i.data['asset'] in entity['name']
i for i in self.context if i.data['asset'] in entity['name']
]
for key in custom_attributes:
assert (key in entity['custom_attributes']), (

View file

@ -51,10 +51,26 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
context.data["frameStart"] = data.get("frameStart")
context.data["frameEnd"] = data.get("frameEnd")
handles = int(data.get("handles") or 0)
context.data["handles"] = handles
context.data["handleStart"] = int(data.get("handleStart", handles))
context.data["handleEnd"] = int(data.get("handleEnd", handles))
handles = data.get("handles") or 0
handle_start = data.get("handleStart")
if handle_start is None:
handle_start = handles
self.log.info((
"Key \"handleStart\" is not set."
" Using value from \"handles\" key {}."
).format(handle_start))
handle_end = data.get("handleEnd")
if handle_end is None:
handle_end = handles
self.log.info((
"Key \"handleEnd\" is not set."
" Using value from \"handles\" key {}."
).format(handle_end))
context.data["handles"] = int(handles)
context.data["handleStart"] = int(handle_start)
context.data["handleEnd"] = int(handle_end)
frame_start_h = data.get("frameStart") - context.data["handleStart"]
frame_end_h = data.get("frameEnd") + context.data["handleEnd"]

View file

@ -18,7 +18,7 @@ class ExtractBurnin(pype.api.Extractor):
label = "Extract burnins"
order = pyblish.api.ExtractorOrder + 0.03
families = ["review", "burnin"]
hosts = ["nuke", "maya", "shell"]
hosts = ["nuke", "maya", "shell", "premiere"]
optional = True
def process(self, instance):
@ -193,6 +193,7 @@ class ExtractBurnin(pype.api.Extractor):
self.log.debug("Output: {}".format(output))
repre_update = {
"anatomy_template": "render",
"files": movieFileBurnin,
"name": repre["name"],
"tags": [x for x in repre["tags"] if x != "delete"]

View file

@ -20,7 +20,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
label = "Extract Review"
order = pyblish.api.ExtractorOrder + 0.02
families = ["review"]
hosts = ["nuke", "maya", "shell"]
hosts = ["nuke", "maya", "shell", "premiere"]
outputs = {}
ext_filter = []

View file

@ -5,6 +5,7 @@ import sys
import copy
import clique
import errno
import six
from pymongo import DeleteOne, InsertOne
import pyblish.api
@ -551,7 +552,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# copy file with speedcopy and check if size of files are simetrical
while True:
copyfile(src, dst)
try:
copyfile(src, dst)
except OSError as e:
self.log.critical("Cannot copy {} to {}".format(src, dst))
self.log.critical(e)
six.reraise(*sys.exc_info())
if str(getsize(src)) in str(getsize(dst)):
break
@ -589,7 +595,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"name": subset_name,
"data": {
"families": instance.data.get('families')
},
},
"parent": asset["_id"]
}).inserted_id

View file

@ -237,6 +237,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
environment["PYPE_METADATA_FILE"] = metadata_path
environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
environment["PYPE_LOG_NO_COLORS"] = "1"
try:
environment["PYPE_PYTHON_EXE"] = os.environ["PYPE_PYTHON_EXE"]
except KeyError:
# PYPE_PYTHON_EXE not set
pass
i = 0
for index, key in enumerate(environment):
if key.upper() in self.enviro_filter:

View file

@ -11,6 +11,7 @@ class CreateYetiCache(avalon.maya.Creator):
label = "Yeti Cache"
family = "yeticache"
icon = "pagelines"
defaults = ["Main"]
def __init__(self, *args, **kwargs):
super(CreateYetiCache, self).__init__(*args, **kwargs)

View file

@ -10,6 +10,7 @@ class CreateYetiRig(avalon.maya.Creator):
label = "Yeti Rig"
family = "yetiRig"
icon = "usb"
defaults = ["Main"]
def process(self):

View file

@ -18,17 +18,17 @@ class LookLoader(pype.maya.plugin.ReferenceLoader):
def process_reference(self, context, name, namespace, options):
"""
Load and try to assign Lookdev to nodes based on relationship data
Load and try to assign Lookdev to nodes based on relationship data.
Args:
name:
namespace:
context:
data:
options:
Returns:
"""
import maya.cmds as cmds
from avalon import maya

View file

@ -65,8 +65,10 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
roots.add(pm.PyNode(node).getAllParents()[-2])
except: # noqa: E722
pass
for root in roots:
root.setParent(world=True)
if family not in ["layout", "setdress", "mayaAscii"]:
for root in roots:
root.setParent(world=True)
groupNode.zeroTransformPivots()
for root in roots:

View file

@ -21,7 +21,8 @@ class YetiRigLoader(pype.maya.plugin.ReferenceLoader):
icon = "code-fork"
color = "orange"
def process_reference(self, context, name=None, namespace=None, data=None):
def process_reference(
self, context, name=None, namespace=None, options=None):
import maya.cmds as cmds
from avalon import maya

View file

@ -250,7 +250,8 @@ class CollectLook(pyblish.api.InstancePlugin):
# Remove sets that didn't have any members assigned in the end
# Thus the data will be limited to only what we need.
if not sets[objset]["members"]:
self.log.info("objset {}".format(sets[objset]))
if not sets[objset]["members"] or (not objset.endswith("SG")):
self.log.info("Removing redundant set information: "
"%s" % objset)
sets.pop(objset, None)

View file

@ -157,6 +157,9 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
attachTo = []
if sets:
for s in sets:
if "family" not in cmds.listAttr(s):
continue
attachTo.append(
{
"version": None, # we need integrator for that

View file

@ -56,7 +56,8 @@ class ExtractAnimation(pype.api.Extractor):
"writeCreases": True,
"uvWrite": True,
"selection": True,
"worldSpace": instance.data.get("worldSpace", True)
"worldSpace": instance.data.get("worldSpace", True),
"writeColorSets": instance.data.get("writeColorSets", False)
}
if not instance.data.get("includeParentHierarchy", True):

View file

@ -147,7 +147,7 @@ class ExtractYetiRig(pype.api.Extractor):
nodes = instance.data["setMembers"]
resources = instance.data.get("resources", {})
with disconnect_plugs(settings, members):
with yetigraph_attribute_values(destination_folder, resources):
with yetigraph_attribute_values(resources_dir, resources):
with maya.attribute_values(attr_value):
cmds.select(nodes, noExpand=True)
cmds.file(maya_path,

View file

@ -48,6 +48,14 @@ class CreateWritePrerender(plugin.PypeCreator):
self.log.error(msg)
nuke.message(msg)
if len(nodes) == 0:
msg = (
"No nodes selected. Please select a single node to connect"
" to or tick off `Use selection`"
)
self.log.error(msg)
nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]
outputs = selected_node.dependent()

View file

@ -49,6 +49,14 @@ class CreateWriteRender(plugin.PypeCreator):
self.log.error(msg)
nuke.message(msg)
if len(nodes) == 0:
msg = (
"No nodes selected. Please select a single node to connect"
" to or tick off `Use selection`"
)
self.log.error(msg)
nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]
outputs = selected_node.dependent()

View file

@ -125,6 +125,7 @@ class ExtractThumbnail(pype.api.Extractor):
repre = {
'name': name,
'ext': "jpeg",
"outputName": "thumb",
'files': file,
"stagingDir": staging_dir,
"frameStart": first_frame,

View file

@ -13,5 +13,5 @@ class CollectAudioVersion(pyblish.api.InstancePlugin):
def process(self, instance):
self.log.info('Audio: {}'.format(instance.data['name']))
instance.data['version'] = '001'
instance.data['version'] = 1
self.log.info('Audio version to: {}'.format(instance.data['version']))

View file

@ -1,12 +0,0 @@
import pyblish.api
class CollectContextDataPremiera(pyblish.api.ContextPlugin):
"""Collecting data from temp json sent from premiera context"""
label = "Collect Premiera Context"
order = pyblish.api.CollectorOrder + 0.1
def process(self, context):
data_path = context.data['rqst_json_data_path']
self.log.info("Context is: {}".format(data_path))

View file

@ -19,16 +19,18 @@ class CollectFrameranges(pyblish.api.InstancePlugin):
metadata = instance.data.get('jsonData').get('metadata')
# getting important metadata time calculation
fps = metadata['ppro.timeline.fps']
fps = float(metadata['ppro.timeline.fps'])
sec_start = metadata['ppro.clip.start']
sec_end = metadata['ppro.clip.end']
fstart = instance.data.get('fstart')
fstart = instance.data.get('frameStart')
fend = fstart + (sec_end * fps) - (sec_start * fps) - 1
self.log.debug("instance: {}, fps: {}\nsec_start: {}\nsec_end: {}\nfstart: {}\nfend: {}\n".format(
instance.data['name'],
fps, sec_start, sec_end, fstart, fend))
instance.data['startFrame'] = fstart
instance.data['endFrame'] = fend
instance.data['frameStart'] = fstart
instance.data['frameEnd'] = fend
instance.data['handleStart'] = instance.context.data['handleStart']
instance.data['handleEnd'] = instance.context.data['handleEnd']
instance.data['fps'] = metadata['ppro.timeline.fps']

View file

@ -26,7 +26,7 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
json_data = context.data.get("jsonData", None)
temp_context = {}
for instance in json_data['instances']:
if instance['family'] in 'projectfile':
if instance['family'] in 'workfile':
continue
in_info = {}
@ -35,10 +35,13 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
in_info['entity_type'] = 'Shot'
instance_pyblish = [
i for i in context.data["instances"] if i.data['asset'] in name][0]
i for i in context.data["instances"]
if i.data['asset'] in name][0]
in_info['custom_attributes'] = {
'fend': instance_pyblish.data['endFrame'],
'fstart': instance_pyblish.data['startFrame'],
'frameStart': instance_pyblish.data['frameStart'],
'frameEnd': instance_pyblish.data['frameEnd'],
'handleStart': instance_pyblish.data['handleStart'],
'handleEnd': instance_pyblish.data['handleEnd'],
'fps': instance_pyblish.data['fps']
}

View file

@ -0,0 +1,83 @@
import os
import pyblish.api
class CollectClipRepresentations(pyblish.api.InstancePlugin):
"""
Collecting frameranges needed for ftrack integration
Args:
context (obj): pyblish context session
"""
label = "Collect Clip Representations"
order = pyblish.api.CollectorOrder
families = ['clip']
def process(self, instance):
# add to representations
if not instance.data.get("representations"):
instance.data["representations"] = list()
ins_d = instance.data
staging_dir = ins_d["stagingDir"]
frame_start = ins_d["frameStart"]
frame_end = ins_d["frameEnd"]
handle_start = ins_d["handleStart"]
handle_end = ins_d["handleEnd"]
fps = ins_d["fps"]
files_list = ins_d.get("files")
if not files_list:
return
json_repr_ext = ins_d["jsonReprExt"]
json_repr_subset = ins_d["jsonReprSubset"]
if files_list:
file = next((f for f in files_list
if json_repr_subset in f), None)
else:
return
if json_repr_ext in ["mov", "mp4"]:
representation = {
"files": file,
"stagingDir": staging_dir,
"frameStart": frame_start,
"frameEnd": frame_end,
"frameStartFtrack": frame_start - handle_start,
"frameEndFtrack": frame_end - handle_end,
"step": 1,
"fps": fps,
"name": json_repr_subset,
"ext": json_repr_ext,
"tags": ["review", "delete"]
}
else:
representation = {
"files": file,
"stagingDir": staging_dir,
"step": 1,
"fps": fps,
"name": json_repr_subset,
"ext": json_repr_ext,
"tags": ["review"]
}
self.log.debug("representation: {}".format(representation))
instance.data["representations"].append(representation)
thumb = next((f for f in files_list
if "thumbnail" in f), None)
if thumb:
thumb_representation = {
'files': thumb,
'stagingDir': staging_dir,
'name': "thumbnail",
'thumbnail': True,
'ext': os.path.splitext(thumb)[-1].replace(".", "")
}
self.log.debug("representation: {}".format(thumb_representation))
instance.data["representations"].append(
thumb_representation)

View file

@ -0,0 +1,31 @@
import pyblish.api
class CollectResolution(pyblish.api.InstancePlugin):
"""
Collecting frameranges needed for ftrack integration
Args:
context (obj): pyblish context session
"""
label = "Collect Clip Resolution"
order = pyblish.api.CollectorOrder
families = ['clip']
def process(self, instance):
# getting metadata from jsonData key
metadata = instance.data.get('jsonData').get('metadata')
# getting important metadata time calculation
pixel_aspect = float(metadata['ppro.format.pixelaspect'])
res_width = metadata['ppro.format.width']
res_height = metadata['ppro.format.height']
instance.data['pixelAspect'] = pixel_aspect
instance.data['resolutionWidth'] = res_width
instance.data['resolutionHeight'] = res_height
self.log.info(f"Resolution was set to: `{res_width}x{res_height}`,"
f" and pixel aspect ration to: `{pixel_aspect}`")

View file

@ -1,144 +0,0 @@
import pyblish.api
import os
from avalon import io, api
class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
"""Generate the assumed destination path where the file will be stored"""
label = "Integrate Assumed Destination"
order = pyblish.api.IntegratorOrder - 0.05
families = ["clip", "projectfile"]
def process(self, instance):
self.create_destination_template(instance)
template_data = instance.data["assumedTemplateData"]
# template = instance.data["template"]
anatomy = instance.context.data['anatomy']
# template = anatomy.publish.path
anatomy_filled = anatomy.format(template_data)
mock_template = anatomy_filled.publish.path
# For now assume resources end up in a "resources" folder in the
# published folder
mock_destination = os.path.join(os.path.dirname(mock_template),
"resources")
# Clean the path
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
# Define resource destination and transfers
resources = instance.data.get("resources", list())
transfers = instance.data.get("transfers", list())
for resource in resources:
# Add destination to the resource
source_filename = os.path.basename(resource["source"])
destination = os.path.join(mock_destination, source_filename)
# Force forward slashes to fix issue with software unable
# to work correctly with backslashes in specific scenarios
# (e.g. escape characters in PLN-151 V-Ray UDIM)
destination = destination.replace("\\", "/")
resource['destination'] = destination
# Collect transfers for the individual files of the resource
# e.g. all individual files of a cache or UDIM textures.
files = resource['files']
for fsrc in files:
fname = os.path.basename(fsrc)
fdest = os.path.join(mock_destination, fname)
transfers.append([fsrc, fdest])
instance.data["resources"] = resources
instance.data["transfers"] = transfers
def create_destination_template(self, instance):
"""Create a filepath based on the current data available
Example template:
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
{subset}.{representation}
Args:
instance: the instance to publish
Returns:
file path (str)
"""
# get all the stuff from the database
subset_name = instance.data["subset"]
self.log.info(subset_name)
asset_name = instance.data["asset"]
project_name = api.Session["AVALON_PROJECT"]
project = io.find_one(
{
"type": "project",
"name": project_name
},
projection={"config": True, "data": True}
)
template = project["config"]["template"]["publish"]
# anatomy = instance.context.data['anatomy']
asset = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project["_id"]
})
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset.get('silo')
subset = io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset["_id"]
})
# assume there is no version yet, we start at `1`
version = None
version_number = 1
if subset is not None:
version = io.find_one(
{
"type": "version",
"parent": subset["_id"]
},
sort=[("name", -1)]
)
# if there is a subset there ought to be version
if version is not None:
version_number += version["name"]
if instance.data.get('version'):
version_number = int(instance.data.get('version'))
hierarchy = asset['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
template_data = {"root": api.Session["AVALON_PROJECTS"],
"project": {"name": project_name,
"code": project['data']['code']},
"silo": silo,
"family": instance.data['family'],
"asset": asset_name,
"subset": subset_name,
"version": version_number,
"hierarchy": hierarchy,
"representation": "TEMP"}
instance.data["assumedTemplateData"] = template_data
self.log.info(template_data)
instance.data["template"] = template

View file

@ -1,140 +0,0 @@
import pyblish.api
from avalon import io
class IntegrateHierarchyToAvalon(pyblish.api.ContextPlugin):
"""
Create entities in ftrack based on collected data from premiere
"""
order = pyblish.api.IntegratorOrder - 0.1
label = 'Integrate Hierarchy To Avalon'
families = ['clip']
def process(self, context):
if "hierarchyContext" not in context.data:
return
self.db = io
if not self.db.Session:
self.db.install()
input_data = context.data["hierarchyContext"]
self.import_to_avalon(input_data)
def import_to_avalon(self, input_data, parent=None):
for name in input_data:
self.log.info('input_data[name]: {}'.format(input_data[name]))
entity_data = input_data[name]
entity_type = entity_data['entity_type']
data = {}
# Process project
if entity_type.lower() == 'project':
entity = self.db.find_one({'type': 'project'})
# TODO: should be in validator?
assert (entity is not None), "Didn't find project in DB"
# get data from already existing project
for key, value in entity.get('data', {}).items():
data[key] = value
self.av_project = entity
# Raise error if project or parent are not set
elif self.av_project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
)
# Else process assset
else:
entity = self.db.find_one({'type': 'asset', 'name': name})
# Create entity if doesn't exist
if entity is None:
if self.av_project['_id'] == parent['_id']:
silo = None
elif parent['silo'] is None:
silo = parent['name']
else:
silo = parent['silo']
entity = self.create_avalon_asset(name, silo)
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('____1____')
data['entityType'] = entity_type
# TASKS
tasks = entity_data.get('tasks', [])
if tasks is not None or len(tasks) > 0:
data['tasks'] = tasks
parents = []
visualParent = None
data = input_data[name]
if self.av_project['_id'] != parent['_id']:
visualParent = parent['_id']
parents.extend(parent.get('data', {}).get('parents', []))
parents.append(parent['name'])
data['visualParent'] = visualParent
data['parents'] = parents
self.db.update_many(
{'_id': entity['_id']},
{'$set': {
'data': data,
}})
entity = self.db.find_one({'type': 'asset', 'name': name})
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('____2____')
# Else get data from already existing
else:
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('________')
for key, value in entity.get('data', {}).items():
data[key] = value
data['entityType'] = entity_type
# TASKS
tasks = entity_data.get('tasks', [])
if tasks is not None or len(tasks) > 0:
data['tasks'] = tasks
parents = []
visualParent = None
# do not store project's id as visualParent (silo asset)
if self.av_project['_id'] != parent['_id']:
visualParent = parent['_id']
parents.extend(parent.get('data', {}).get('parents', []))
parents.append(parent['name'])
data['visualParent'] = visualParent
data['parents'] = parents
# CUSTOM ATTRIBUTES
for k, val in entity_data.get('custom_attributes', {}).items():
data[k] = val
# Update entity data with input data
self.db.update_many(
{'_id': entity['_id']},
{'$set': {
'data': data,
}})
if 'childs' in entity_data:
self.import_to_avalon(entity_data['childs'], entity)
def create_avalon_asset(self, name, silo):
item = {
'schema': 'avalon-core:asset-2.0',
'name': name,
'silo': silo,
'parent': self.av_project['_id'],
'type': 'asset',
'data': {}
}
entity_id = self.db.insert_one(item).inserted_id
return self.db.find_one({'_id': entity_id})

View file

@ -1,171 +0,0 @@
import sys
import pyblish.api
import six
class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
"""
Create entities in ftrack based on collected data from premiere
Example of entry data:
{
"ProjectXS": {
"entity_type": "Project",
"custom_attributes": {
"fps": 24,...
},
"tasks": [
"Compositing",
"Lighting",... *task must exist as task type in project schema*
],
"childs": {
"sq01": {
"entity_type": "Sequence",
...
}
}
}
}
"""
order = pyblish.api.IntegratorOrder
label = 'Integrate Hierarchy To Ftrack'
families = ["clip"]
optional = False
def process(self, context):
self.context = context
if "hierarchyContext" not in context.data:
return
self.ft_project = None
self.session = context.data["ftrackSession"]
input_data = context.data["hierarchyContext"]
# adding ftrack types from presets
ftrack_types = context.data['ftrackTypes']
self.import_to_ftrack(input_data, ftrack_types)
def import_to_ftrack(self, input_data, ftrack_types, parent=None):
for entity_name in input_data:
entity_data = input_data[entity_name]
entity_type = entity_data['entity_type'].capitalize()
if entity_type.lower() == 'project':
query = 'Project where full_name is "{}"'.format(entity_name)
entity = self.session.query(query).one()
self.ft_project = entity
self.task_types = self.get_all_task_types(entity)
elif self.ft_project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
)
# try to find if entity already exists
else:
query = '{} where name is "{}" and parent_id is "{}"'.format(
entity_type, entity_name, parent['id']
)
try:
entity = self.session.query(query).one()
except Exception:
entity = None
# Create entity if not exists
if entity is None:
entity = self.create_entity(
name=entity_name,
type=entity_type,
parent=parent
)
# self.log.info('entity: {}'.format(dict(entity)))
# CUSTOM ATTRIBUTES
custom_attributes = entity_data.get('custom_attributes', [])
instances = [
i for i in self.context.data["instances"] if i.data['asset'] in entity['name']]
for key in custom_attributes:
assert (key in entity['custom_attributes']), (
'Missing custom attribute')
entity['custom_attributes'][key] = custom_attributes[key]
for instance in instances:
instance.data['ftrackShotId'] = entity['id']
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
# TASKS
tasks = entity_data.get('tasks', [])
existing_tasks = []
tasks_to_create = []
for child in entity['children']:
if child.entity_type.lower() == 'task':
existing_tasks.append(child['name'])
# existing_tasks.append(child['type']['name'])
for task in tasks:
if task in existing_tasks:
print("Task {} already exists".format(task))
continue
tasks_to_create.append(task)
for task in tasks_to_create:
self.create_task(
name=task,
task_type=ftrack_types[task],
parent=entity
)
if 'childs' in entity_data:
self.import_to_ftrack(
entity_data['childs'], ftrack_types, entity)
def get_all_task_types(self, project):
tasks = {}
proj_template = project['project_schema']
temp_task_types = proj_template['_task_type_schema']['types']
for type in temp_task_types:
if type['name'] not in tasks:
tasks[type['name']] = type
return tasks
def create_task(self, name, task_type, parent):
task = self.session.create('Task', {
'name': name,
'parent': parent
})
# TODO not secured!!! - check if task_type exists
self.log.info(task_type)
self.log.info(self.task_types)
task['type'] = self.task_types[task_type]
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
return task
def create_entity(self, name, type, parent):
entity = self.session.create(type, {
'name': name,
'parent': parent
})
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
return entity