Merge branch 'develop' into feature/93-flexible_template_assignment

This commit is contained in:
Milan Kolar 2020-05-12 22:45:02 +02:00
commit f8652e9401
156 changed files with 33857 additions and 13060 deletions

View file

@ -0,0 +1,92 @@
import os
import pyblish.api
from avalon import (
io,
api as avalon
)
import json
from pathlib import Path
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
"""
Collecting temp json data sent from a host context
and path for returning json data back to hostself.
Setting avalon session into correct context
Args:
context (obj): pyblish context session
"""
label = "AdobeCommunicator Collect Context"
order = pyblish.api.CollectorOrder - 0.49
def process(self, context):
self.log.info(
"registred_hosts: `{}`".format(pyblish.api.registered_hosts()))
io.install()
# get json paths from data
input_json_path = os.environ.get("AC_PUBLISH_INPATH")
output_json_path = os.environ.get("AC_PUBLISH_OUTPATH")
rqst_json_data_path = Path(input_json_path)
post_json_data_path = Path(output_json_path)
context.data['post_json_data_path'] = str(post_json_data_path)
# get avalon session data and convert \ to /
_S = avalon.session
projects = Path(_S["AVALON_PROJECTS"]).resolve()
asset = _S["AVALON_ASSET"]
workdir = Path(_S["AVALON_WORKDIR"]).resolve()
_S["AVALON_PROJECTS"] = str(projects)
_S["AVALON_WORKDIR"] = str(workdir)
context.data["avalonSession"] = _S
self.log.info(f"__ avalonSession: `{_S}`")
# get stagin directory from recieved path to json
context.data["stagingDir"] = post_json_data_path.parent
# get data from json file recieved
with rqst_json_data_path.open(mode='r') as f:
context.data["jsonData"] = json_data = json.load(f)
assert json_data, "No `data` in json file"
# get and check host type
host = json_data.get("host", None)
host_version = json_data.get("hostVersion", None)
assert host, "No `host` data in json file"
assert host_version, "No `hostVersion` data in json file"
context.data["host"] = _S["AVALON_APP"] = host
context.data["hostVersion"] = \
_S["AVALON_APP_VERSION"] = host_version
# get current file
current_file = json_data.get("currentFile", None)
assert current_file, "No `currentFile` data in json file"
context.data["currentFile"] = str(Path(current_file).resolve())
# get project data from avalon
project_data = io.find_one({'type': 'project'})
assert project_data, "No `project_data` data in avalon db"
context.data["projectData"] = project_data
self.log.debug("project_data: {}".format(project_data))
# get asset data from avalon and fix all paths
asset_data = io.find_one({
"type": 'asset',
"name": asset
})["data"]
assert asset_data, "No `asset_data` data in avalon db"
context.data["assetData"] = asset_data
self.log.debug("asset_data: {}".format(asset_data))
self.log.info("rqst_json_data_path is: {}".format(rqst_json_data_path))
self.log.info("post_json_data_path is: {}".format(post_json_data_path))
# self.log.info("avalon.session is: {}".format(avalon.session))

View file

@ -1,12 +1,5 @@
import os
import json
import pyblish.api
from avalon import (
io,
api as avalon
)
from pype import api as pype
class CollectInstancesFromJson(pyblish.api.ContextPlugin):
@ -26,7 +19,11 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
def process(self, context):
a_session = context.data.get("avalonSession")
_S = context.data["avalonSession"]
asset = _S["AVALON_ASSET"]
task = _S["AVALON_TASK"]
host = _S["AVALON_APP"]
json_data = context.data.get("jsonData", None)
assert json_data, "No `json_data` data in json file"
@ -36,96 +33,91 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
staging_dir = json_data.get("stagingDir", None)
assert staging_dir, "No `stagingDir` path in json file"
presets = context.data["presets"]
rules_tasks = presets["rules_tasks"]
ftrack_types = rules_tasks["ftrackTypes"]
assert ftrack_types, "No `ftrack_types` data in `/templates/presets/[host]/rules_tasks.json` file"
host = context.data["host"]
presets = context.data["presets"][host]
context.data["ftrackTypes"] = ftrack_types
rules_tasks = presets["rules_tasks"]
asset_default = presets["asset_default"]
assert asset_default, "No `asset_default` data in `/templates/presets/[host]/asset_default.json` file"
asset_name = a_session["AVALON_ASSET"]
entity = io.find_one({"name": asset_name,
"type": "asset"})
assert asset_default, ("No `asset_default` data in"
"`/presets/[host]/asset_default.json` file")
# get frame start > first try from asset data
frame_start = context.data["assetData"].get("fstart", None)
frame_start = context.data["assetData"].get("frameStart", None)
if not frame_start:
self.log.debug("frame_start not on assetData")
# get frame start > second try from parent data
frame_start = pype.get_data_hierarchical_attr(entity, "fstart")
if not frame_start:
self.log.debug("frame_start not on any parent entity")
# get frame start > third try from parent data
frame_start = asset_default["fstart"]
self.log.debug("frame_start not on any parent entity")
# get frame start > third try from parent data
frame_start = asset_default["frameStart"]
assert frame_start, "No `frame_start` data found, "
"please set `fstart` on asset"
self.log.debug("frame_start: `{}`".format(frame_start))
# get handles > first try from asset data
handles = context.data["assetData"].get("handles", None)
if not handles:
handle_start = context.data["assetData"].get("handleStart", None)
handle_end = context.data["assetData"].get("handleEnd", None)
if (handle_start is None) or (handle_end is None):
# get frame start > second try from parent data
handles = pype.get_data_hierarchical_attr(entity, "handles")
if not handles:
# get frame start > third try from parent data
handles = asset_default["handles"]
handle_start = asset_default.get("handleStart", None)
handle_end = asset_default.get("handleEnd", None)
assert handles, "No `handles` data found, "
"please set `fstart` on asset"
self.log.debug("handles: `{}`".format(handles))
assert (
(handle_start is not None) or (
handle_end is not None)), (
"No `handle_start, handle_end` data found")
instances = []
task = a_session["AVALON_TASK"]
current_file = os.path.basename(context.data.get("currentFile"))
name, ext = os.path.splitext(current_file)
# get current file host
host = a_session["AVALON_APP"]
family = "projectfile"
families = "filesave"
family = "workfile"
subset_name = "{0}{1}".format(task, 'Default')
instance_name = "{0}_{1}_{2}".format(name,
family,
subset_name)
# Set label
label = "{0} - {1} > {2}".format(name, task, families)
label = "{0} - {1}".format(name, task)
# get project file instance Data
pf_instance = [inst for inst in instances_data
if inst.get("family", None) in 'projectfile']
self.log.debug('pf_instance: {}'.format(pf_instance))
# get working file into instance for publishing
instance = context.create_instance(instance_name)
if pf_instance:
instance.data.update(pf_instance[0])
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"task": task,
"representation": ext[1:],
"host": host,
"asset": asset_name,
"label": label,
"name": name,
# "hierarchy": hierarchy,
# "parents": parents,
"family": family,
"families": [families, 'ftrack'],
"publish": True,
# "files": files_list
})
instances.append(instance)
wf_instance = next((inst for inst in instances_data
if inst.get("family", None) in 'workfile'), None)
if wf_instance:
self.log.debug('wf_instance: {}'.format(wf_instance))
version = int(wf_instance.get("version", None))
# get working file into instance for publishing
instance = context.create_instance(instance_name)
instance.data.update(wf_instance)
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"task": task,
"representations": [{
"files": current_file,
'stagingDir': staging_dir,
'name': "projectfile",
'ext': ext[1:]
}],
"host": host,
"asset": asset,
"label": label,
"name": name,
"family": family,
"families": ["ftrack"],
"publish": True,
"version": version
})
instances.append(instance)
for inst in instances_data:
# for key, value in inst.items():
# self.log.debug('instance[key]: {}'.format(key))
#
version = inst.get("version", None)
version = int(inst.get("version", None))
assert version, "No `version` string in json file"
name = asset = inst.get("name", None)
@ -135,14 +127,14 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
assert family, "No `family` key in json_data.instance: {}".format(
inst)
if family in 'projectfile':
if family in 'workfile':
continue
files_list = inst.get("files", None)
assert files_list, "`files` are empty in json file"
hierarchy = inst.get("hierarchy", None)
assert hierarchy, "No `hierarchy` data in json file"
assert hierarchy, f"No `hierarchy` data in json file for {name}"
parents = inst.get("parents", None)
assert parents, "No `parents` data in json file"
@ -161,17 +153,12 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
# create list of tasks for creation
if not inst.get('tasks', None):
inst['tasks'] = list()
if not inst.get('tasksTypes', None):
inst['tasksTypes'] = {}
# append taks into list for later hierarchy cration
ftrack_task_type = ftrack_types[task]
if task not in inst['tasks']:
inst['tasks'].append(task)
inst['tasksTypes'][task] = ftrack_task_type
host = rules_tasks["taskHost"][task]
subsets = rules_tasks["taskSubsets"][task]
subsets = rules_tasks["taskToSubsets"][task]
for sub in subsets:
self.log.debug(sub)
try:
@ -184,8 +171,8 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
subset_lst.extend([s for s in subsets if s not in subset_lst])
for subset in subset_lst:
if inst["representations"].get(subset, None):
repr = inst["representations"][subset]
if inst["subsetToRepresentations"].get(subset, None):
repr = inst["subsetToRepresentations"][subset]
ext = repr['representation']
else:
continue
@ -197,7 +184,7 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
family = subset
subset_name = "{0}{1}".format(subset, "Main")
elif "reference" in subset:
family ="render"
family = "review"
subset_name = "{0}{1}".format(family, "Reference")
else:
subset_name = "{0}{1}".format(subset, 'Default')
@ -209,17 +196,15 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
instance = context.create_instance(name)
files = [f for f in files_list
if subset in f or "thumbnail" in f
]
if subset in f or "thumbnail" in f]
instance.data.update({
"subset": subset_name,
"stagingDir": staging_dir,
"tasks": subset_dict[subset],
"taskTypes": inst['tasksTypes'],
"fstart": frame_start,
"handles": handles,
"host": host,
"frameStart": frame_start,
"handleStart": handle_start,
"handleEnd": handle_end,
"asset": asset,
"hierarchy": hierarchy,
"parents": parents,
@ -230,6 +215,8 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
"family": family,
"families": [subset, inst["family"], 'ftrack'],
"jsonData": inst,
"jsonReprSubset": subset,
"jsonReprExt": ext,
"publish": True,
"version": version})
self.log.info(
@ -238,9 +225,6 @@ class CollectInstancesFromJson(pyblish.api.ContextPlugin):
context.data["instances"] = instances
# Sort/grouped by family (preserving local index)
# context[:] = sorted(context, key=self.sort_by_task)
self.log.debug("context: {}".format(context))
def sort_by_task(self, instance):

View file

@ -2,7 +2,7 @@
import json
import clique
import pyblish.api
from pypeapp import Anatomy
class ExtractJSON(pyblish.api.ContextPlugin):
""" Extract all instances to a serialized json file. """
@ -14,28 +14,27 @@ class ExtractJSON(pyblish.api.ContextPlugin):
json_path = context.data['post_json_data_path']
data = dict(self.serialize(context.data()))
# self.log.info(data)
instances_data = []
for instance in context:
iData = {}
for key, value in instance.data.items():
if isinstance(value, clique.Collection):
value = value.format()
try:
json.dumps(value)
iData[key] = value
except KeyError:
msg = "\"{0}\"".format(value)
msg += " in instance.data[\"{0}\"]".format(key)
msg += " could not be serialized."
self.log.debug(msg)
instances_data.append(iData)
data["instances"] = instances_data
# instances_data = []
# for instance in context:
#
# iData = {}
# for key, value in instance.data.items():
# if isinstance(value, clique.Collection):
# value = value.format()
#
# try:
# json.dumps(value)
# iData[key] = value
# except KeyError:
# msg = "\"{0}\"".format(value)
# msg += " in instance.data[\"{0}\"]".format(key)
# msg += " could not be serialized."
# self.log.debug(msg)
#
# instances_data.append(iData)
#
# data["instances"] = instances_data
with open(json_path, "w") as outfile:
outfile.write(json.dumps(data, indent=4, sort_keys=True))
@ -60,6 +59,9 @@ class ExtractJSON(pyblish.api.ContextPlugin):
# self.log.info("1: {}".format(data))
if isinstance(data, Anatomy):
return
if not isinstance(data, dict):
# self.log.info("2: {}".format(data))
return data
@ -88,6 +90,9 @@ class ExtractJSON(pyblish.api.ContextPlugin):
# loops if dictionary
data[key] = self.serialize(value)
if isinstance(value, Anatomy):
continue
if isinstance(value, (list or tuple)):
# loops if list or tuple
for i, item in enumerate(value):

View file

@ -1,104 +0,0 @@
import os
import pyblish.api
from avalon import api as avalon
from pype import api as pype
import json
from pathlib import Path
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
"""
Collecting temp json data sent from a host context
and path for returning json data back to hostself.
Setting avalon session into correct context
Args:
context (obj): pyblish context session
"""
label = "Collect Aport Context"
order = pyblish.api.CollectorOrder - 0.49
def process(self, context):
# get json paths from data
rqst_json_data_path = Path(context.data['rqst_json_data_path'])
post_json_data_path = Path(context.data['post_json_data_path'])
# get avalon session data and convert \ to /
session = avalon.session
self.log.info(os.environ['AVALON_PROJECTS'])
projects = Path(session['AVALON_PROJECTS']).resolve()
wd = Path(session['AVALON_WORKDIR']).resolve()
session['AVALON_PROJECTS'] = str(projects)
session['AVALON_WORKDIR'] = str(wd)
context.data["avalonSession"] = session
self.log.debug("avalonSession: {}".format(session))
# get stagin directory from recieved path to json
context.data["stagingDir"] = staging_dir = post_json_data_path.parent
# get data from json file recieved
with rqst_json_data_path.open(mode='r') as f:
context.data['jsonData'] = json_data = json.load(f)
assert json_data, "No `data` in json file"
# get and check host type
host = json_data.get("host", None)
host_version = json_data.get("hostVersion", None)
assert host, "No `host` data in json file"
assert host_version, "No `hostVersion` data in json file"
context.data["host"] = session["AVALON_APP"] = host
context.data["hostVersion"] = \
session["AVALON_APP_VERSION"] = host_version
# register pyblish for filtering of hosts in plugins
pyblish.api.deregister_all_hosts()
pyblish.api.register_host(host)
# get path to studio templates
templates_dir = os.getenv("PYPE_STUDIO_TEMPLATES", None)
assert templates_dir, "Missing `PYPE_STUDIO_TEMPLATES` in os.environ"
# get presets for host
presets_dir = os.path.join(templates_dir, "presets", host)
assert os.path.exists(
presets_dir), "Required path `{}` doesn't exist".format(presets_dir)
# load all available preset json files
preset_data = dict()
for file in os.listdir(presets_dir):
name, ext = os.path.splitext(file)
with open(os.path.join(presets_dir, file)) as prst:
preset_data[name] = json.load(prst)
context.data['presets'] = preset_data
assert preset_data, "No `presets` data in json file"
self.log.debug("preset_data: {}".format(preset_data))
# get current file
current_file = json_data.get("currentFile", None)
assert current_file, "No `currentFile` data in json file"
context.data["currentFile"] = Path(current_file).resolve()
# get project data from avalon
project_data = pype.get_project_data()
assert project_data, "No `project_data` data in avalon db"
context.data["projectData"] = project_data
self.log.debug("project_data: {}".format(project_data))
# get asset data from avalon and fix all paths
asset_data = pype.get_asset_data()
assert asset_data, "No `asset_data` data in avalon db"
asset_data = {k: v.replace("\\", "/") for k, v in asset_data.items()
if isinstance(v, str)}
context.data["assetData"] = asset_data
self.log.debug("asset_data: {}".format(asset_data))
self.log.info("rqst_json_data_path is: {}".format(rqst_json_data_path))
self.log.info("post_json_data_path is: {}".format(post_json_data_path))
# self.log.info("avalon.session is: {}".format(avalon.session))

View file

@ -89,7 +89,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
# CUSTOM ATTRIBUTES
custom_attributes = entity_data.get('custom_attributes', [])
instances = [
i for i in self.context[:] if i.data['asset'] in entity['name']
i for i in self.context if i.data['asset'] in entity['name']
]
for key in custom_attributes:
assert (key in entity['custom_attributes']), (

View file

@ -68,6 +68,9 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
# Top-level group name
"BatchName": filename,
# Asset dependency to wait for at least the scene file to sync.
"AssetDependency0": filepath,
# Job name, as seen in Monitor
"Name": filename,

View file

@ -1,13 +1,14 @@
"""Collect Anatomy and global anatomy data.
"""Collect global context Anatomy data.
Requires:
context -> anatomy
context -> projectEntity
context -> assetEntity
context -> username
context -> datetimeData
session -> AVALON_TASK
projectEntity, assetEntity -> collect_avalon_entities *(pyblish.api.CollectorOrder)
username -> collect_pype_user *(pyblish.api.CollectorOrder + 0.001)
datetimeData -> collect_datetime_data *(pyblish.api.CollectorOrder)
Provides:
context -> anatomy (pypeapp.Anatomy)
context -> anatomyData
"""
@ -15,45 +16,51 @@ import os
import json
from avalon import api, lib
from pypeapp import Anatomy
import pyblish.api
class CollectAnatomy(pyblish.api.ContextPlugin):
"""Collect Anatomy into Context"""
class CollectAnatomyContextData(pyblish.api.ContextPlugin):
"""Collect Anatomy Context data.
Example:
context.data["anatomyData"] = {
"project": {
"name": "MyProject",
"code": "myproj"
},
"asset": "AssetName",
"hierarchy": "path/to/asset",
"task": "Working",
"username": "MeDespicable",
*** OPTIONAL ***
"app": "maya" # Current application base name
+ mutliple keys from `datetimeData` # see it's collector
}
"""
order = pyblish.api.CollectorOrder + 0.002
label = "Collect Anatomy"
label = "Collect Anatomy Context Data"
def process(self, context):
root_path = api.registered_root()
task_name = api.Session["AVALON_TASK"]
project_entity = context.data["projectEntity"]
asset_entity = context.data["assetEntity"]
project_name = project_entity["name"]
context.data["anatomy"] = Anatomy(project_name)
self.log.info(
"Anatomy object collected for project \"{}\".".format(project_name)
)
hierarchy_items = asset_entity["data"]["parents"]
hierarchy = ""
if hierarchy_items:
hierarchy = os.path.join(*hierarchy_items)
context_data = {
"root": root_path,
"project": {
"name": project_name,
"name": project_entity["name"],
"code": project_entity["data"].get("code")
},
"asset": asset_entity["name"],
"hierarchy": hierarchy.replace("\\", "/"),
"task": task_name,
"username": context.data["user"]
}

View file

@ -28,11 +28,11 @@ from avalon import io
import pyblish.api
class CollectInstanceAnatomyData(pyblish.api.InstancePlugin):
"""Fill templates with data needed for publish"""
class CollectAnatomyInstanceData(pyblish.api.InstancePlugin):
"""Collect Instance specific Anatomy data."""
order = pyblish.api.CollectorOrder + 0.49
label = "Collect instance anatomy data"
label = "Collect Anatomy Instance data"
def process(self, instance):
# get all the stuff from the database

View file

@ -0,0 +1,32 @@
"""Collect Anatomy object.
Requires:
os.environ -> AVALON_PROJECT
Provides:
context -> anatomy (pypeapp.Anatomy)
"""
import os
from pypeapp import Anatomy
import pyblish.api
class CollectAnatomyObject(pyblish.api.ContextPlugin):
"""Collect Anatomy object into Context"""
order = pyblish.api.CollectorOrder - 0.4
label = "Collect Anatomy Object"
def process(self, context):
project_name = os.environ.get("AVALON_PROJECT")
if project_name is None:
raise AssertionError(
"Environment `AVALON_PROJECT` is not set."
"Could not initialize project's Anatomy."
)
context.data["anatomy"] = Anatomy(project_name)
self.log.info(
"Anatomy object collected for project \"{}\".".format(project_name)
)

View file

@ -15,7 +15,7 @@ import pyblish.api
class CollectAvalonEntities(pyblish.api.ContextPlugin):
"""Collect Anatomy into Context"""
order = pyblish.api.CollectorOrder - 0.02
order = pyblish.api.CollectorOrder - 0.1
label = "Collect Avalon Entities"
def process(self, context):
@ -51,10 +51,26 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
context.data["frameStart"] = data.get("frameStart")
context.data["frameEnd"] = data.get("frameEnd")
handles = int(data.get("handles") or 0)
context.data["handles"] = handles
context.data["handleStart"] = int(data.get("handleStart", handles))
context.data["handleEnd"] = int(data.get("handleEnd", handles))
handles = data.get("handles") or 0
handle_start = data.get("handleStart")
if handle_start is None:
handle_start = handles
self.log.info((
"Key \"handleStart\" is not set."
" Using value from \"handles\" key {}."
).format(handle_start))
handle_end = data.get("handleEnd")
if handle_end is None:
handle_end = handles
self.log.info((
"Key \"handleEnd\" is not set."
" Using value from \"handles\" key {}."
).format(handle_end))
context.data["handles"] = int(handles)
context.data["handleStart"] = int(handle_start)
context.data["handleEnd"] = int(handle_end)
frame_start_h = data.get("frameStart") - context.data["handleStart"]
frame_end_h = data.get("frameEnd") + context.data["handleEnd"]

View file

@ -1,11 +1,18 @@
"""Loads publishing context from json and continues in publish process.
Requires:
anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11)
Provides:
context, instances -> All data from previous publishing process.
"""
import os
import json
import pyblish.api
from avalon import api
from pypeapp import PypeLauncher
class CollectRenderedFiles(pyblish.api.ContextPlugin):
"""
@ -13,14 +20,17 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
`PYPE_PUBLISH_DATA`. Those files _MUST_ share same context.
"""
order = pyblish.api.CollectorOrder - 0.1
order = pyblish.api.CollectorOrder - 0.2
targets = ["filesequence"]
label = "Collect rendered frames"
_context = None
def _load_json(self, path):
assert os.path.isfile(path), ("path to json file doesn't exist")
path = path.strip('\"')
assert os.path.isfile(path), (
"Path to json file doesn't exist. \"{}\"".format(path)
)
data = None
with open(path, "r") as json_file:
try:
@ -32,7 +42,12 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
)
return data
def _process_path(self, data):
def _fill_staging_dir(self, data_object, anatomy):
staging_dir = data_object.get("stagingDir")
if staging_dir:
data_object["stagingDir"] = anatomy.fill_root(staging_dir)
def _process_path(self, data, anatomy):
# validate basic necessary data
data_err = "invalid json file - missing data"
required = ["asset", "user", "comment",
@ -66,14 +81,23 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
os.environ["FTRACK_SERVER"] = ftrack["FTRACK_SERVER"]
# now we can just add instances from json file and we are done
for instance in data.get("instances"):
for instance_data in data.get("instances"):
self.log.info(" - processing instance for {}".format(
instance.get("subset")))
i = self._context.create_instance(instance.get("subset"))
self.log.info("remapping paths ...")
i.data["representations"] = [PypeLauncher().path_remapper(
data=r) for r in instance.get("representations")]
i.data.update(instance)
instance_data.get("subset")))
instance = self._context.create_instance(
instance_data.get("subset")
)
self.log.info("Filling stagignDir...")
self._fill_staging_dir(instance_data, anatomy)
instance.data.update(instance_data)
representations = []
for repre_data in instance_data.get("representations") or []:
self._fill_staging_dir(repre_data, anatomy)
representations.append(repre_data)
instance.data["representations"] = representations
def process(self, context):
self._context = context
@ -82,13 +106,39 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
"Missing `PYPE_PUBLISH_DATA`")
paths = os.environ["PYPE_PUBLISH_DATA"].split(os.pathsep)
session_set = False
for path in paths:
data = self._load_json(path)
if not session_set:
self.log.info("Setting session using data from file")
api.Session.update(data.get("session"))
os.environ.update(data.get("session"))
session_set = True
assert data, "failed to load json file"
self._process_path(data)
project_name = os.environ.get("AVALON_PROJECT")
if project_name is None:
raise AssertionError(
"Environment `AVALON_PROJECT` was not found."
"Could not set project `root` which may cause issues."
)
# TODO root filling should happen after collect Anatomy
self.log.info("Getting root setting for project \"{}\"".format(
project_name
))
anatomy = context.data["anatomy"]
self.log.info("anatomy: {}".format(anatomy.roots))
try:
session_is_set = False
for path in paths:
path = anatomy.fill_root(path)
data = self._load_json(path)
assert data, "failed to load json file"
if not session_is_set:
session_data = data["session"]
remapped = anatomy.roots_obj.path_remapper(
session_data["AVALON_WORKDIR"]
)
if remapped:
session_data["AVALON_WORKDIR"] = remapped
self.log.info("Setting session using data from file")
api.Session.update(session_data)
os.environ.update(session_data)
session_is_set = True
self._process_path(data, anatomy)
except Exception as e:
self.log.error(e, exc_info=True)
raise Exception("Error") from e

View file

@ -18,7 +18,7 @@ class ExtractBurnin(pype.api.Extractor):
label = "Extract burnins"
order = pyblish.api.ExtractorOrder + 0.03
families = ["review", "burnin"]
hosts = ["nuke", "maya", "shell"]
hosts = ["nuke", "maya", "shell", "premiere"]
optional = True
def process(self, instance):

View file

@ -20,7 +20,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
label = "Extract Review"
order = pyblish.api.ExtractorOrder + 0.02
families = ["review"]
hosts = ["nuke", "maya", "shell"]
hosts = ["nuke", "maya", "shell", "premiere"]
outputs = {}
ext_filter = []

View file

@ -481,9 +481,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
def copy_file(self, src_path, dst_path):
# TODO check drives if are the same to check if cas hardlink
dst_path = self.path_root_check(dst_path)
src_path = self.path_root_check(src_path)
dirname = os.path.dirname(dst_path)
try:
@ -513,75 +510,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
shutil.copy(src_path, dst_path)
def path_root_check(self, path):
normalized_path = os.path.normpath(path)
forward_slash_path = normalized_path.replace("\\", "/")
drive, _path = os.path.splitdrive(normalized_path)
if os.path.exists(drive + "/"):
key = "drive_check{}".format(drive)
if key not in self.path_checks:
self.log.debug(
"Drive \"{}\" exist. Nothing to change.".format(drive)
)
self.path_checks.append(key)
return normalized_path
path_env_key = "PYPE_STUDIO_PROJECTS_PATH"
mount_env_key = "PYPE_STUDIO_PROJECTS_MOUNT"
missing_envs = []
if path_env_key not in os.environ:
missing_envs.append(path_env_key)
if mount_env_key not in os.environ:
missing_envs.append(mount_env_key)
if missing_envs:
key = "missing_envs"
if key not in self.path_checks:
self.path_checks.append(key)
_add_s = ""
if len(missing_envs) > 1:
_add_s = "s"
self.log.warning((
"Can't replace MOUNT drive path to UNC path due to missing"
" environment variable{}: `{}`. This may cause issues"
" during publishing process."
).format(_add_s, ", ".join(missing_envs)))
return normalized_path
unc_root = os.environ[path_env_key].replace("\\", "/")
mount_root = os.environ[mount_env_key].replace("\\", "/")
# --- Remove slashes at the end of mount and unc roots ---
while unc_root.endswith("/"):
unc_root = unc_root[:-1]
while mount_root.endswith("/"):
mount_root = mount_root[:-1]
# ---
if forward_slash_path.startswith(unc_root):
self.log.debug((
"Path already starts with UNC root: \"{}\""
).format(unc_root))
return normalized_path
if not forward_slash_path.startswith(mount_root):
self.log.warning((
"Path do not start with MOUNT root \"{}\" "
"set in environment variable \"{}\""
).format(unc_root, mount_env_key))
return normalized_path
# Replace Mount root with Unc root
path = unc_root + forward_slash_path[len(mount_root):]
return os.path.normpath(path)
def version_from_representations(self, repres):
for repre in repres:
version = io.find_one({"_id": repre["parent"]})

View file

@ -5,6 +5,7 @@ import sys
import copy
import clique
import errno
import six
from pymongo import DeleteOne, InsertOne
import pyblish.api
@ -327,6 +328,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
test_dest_files.append(
os.path.normpath(template_filled)
)
template_data["frame"] = repre_context["frame"]
self.log.debug(
"test_dest_files: {}".format(str(test_dest_files)))
@ -390,7 +392,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
dst_start_frame,
dst_tail
).replace("..", ".")
repre['published_path'] = self.unc_convert(dst)
repre['published_path'] = dst
else:
# Single file
@ -418,7 +420,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
instance.data["transfers"].append([src, dst])
published_files.append(dst)
repre['published_path'] = self.unc_convert(dst)
repre['published_path'] = dst
self.log.debug("__ dst: {}".format(dst))
repre["publishedFiles"] = published_files
@ -522,23 +524,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("Hardlinking file .. {} -> {}".format(src, dest))
self.hardlink_file(src, dest)
def unc_convert(self, path):
self.log.debug("> __ path: `{}`".format(path))
drive, _path = os.path.splitdrive(path)
self.log.debug("> __ drive, _path: `{}`, `{}`".format(drive, _path))
if not os.path.exists(drive + "/"):
self.log.info("Converting to unc from environments ..")
path_replace = os.getenv("PYPE_STUDIO_PROJECTS_PATH")
path_mount = os.getenv("PYPE_STUDIO_PROJECTS_MOUNT")
if "/" in path_mount:
path = path.replace(path_mount[0:-1], path_replace)
else:
path = path.replace(path_mount, path_replace)
return path
def copy_file(self, src, dst):
""" Copy given source to destination
@ -548,8 +533,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
Returns:
None
"""
src = self.unc_convert(src)
dst = self.unc_convert(dst)
src = os.path.normpath(src)
dst = os.path.normpath(dst)
self.log.debug("Copying file .. {} -> {}".format(src, dst))
@ -565,16 +548,18 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
# copy file with speedcopy and check if size of files are simetrical
while True:
copyfile(src, dst)
try:
copyfile(src, dst)
except OSError as e:
self.log.critical("Cannot copy {} to {}".format(src, dst))
self.log.critical(e)
six.reraise(*sys.exc_info())
if str(getsize(src)) in str(getsize(dst)):
break
def hardlink_file(self, src, dst):
dirname = os.path.dirname(dst)
src = self.unc_convert(src)
dst = self.unc_convert(dst)
try:
os.makedirs(dirname)
except OSError as e:
@ -606,7 +591,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"name": subset_name,
"data": {
"families": instance.data.get('families')
},
},
"parent": asset["_id"]
}).inserted_id
@ -659,26 +644,35 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
families.append(instance_family)
families += current_families
self.log.debug("Registered root: {}".format(api.registered_root()))
# create relative source path for DB
try:
source = instance.data['source']
except KeyError:
if "source" in instance.data:
source = instance.data["source"]
else:
source = context.data["currentFile"]
source = source.replace(os.getenv("PYPE_STUDIO_PROJECTS_MOUNT"),
api.registered_root())
relative_path = os.path.relpath(source, api.registered_root())
source = os.path.join("{root}", relative_path).replace("\\", "/")
anatomy = instance.context.data["anatomy"]
success, rootless_path = (
anatomy.roots_obj.find_root_template_from_path(source)
)
if success:
source = rootless_path
else:
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(source))
self.log.debug("Source: {}".format(source))
version_data = {"families": families,
"time": context.data["time"],
"author": context.data["user"],
"source": source,
"comment": context.data.get("comment"),
"machine": context.data.get("machine"),
"fps": context.data.get(
"fps", instance.data.get("fps"))}
version_data = {
"families": families,
"time": context.data["time"],
"author": context.data["user"],
"source": source,
"comment": context.data.get("comment"),
"machine": context.data.get("machine"),
"fps": context.data.get(
"fps", instance.data.get("fps")
)
}
intent_value = instance.context.data.get("intent")
if intent_value and isinstance(intent_value, dict):
@ -720,7 +714,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
matching_profiles = None
highest_value = -1
for name, filters in self.template_name_profiles:
self.log.info(self.template_name_profiles)
for name, filters in self.template_name_profiles.items():
value = 0
families = filters.get("families")
if families:

View file

@ -1,3 +1,6 @@
# -*- coding: utf-8 -*-
"""Submit publishing job to farm."""
import os
import json
import re
@ -10,7 +13,7 @@ import pyblish.api
def _get_script():
"""Get path to the image sequence script"""
"""Get path to the image sequence script."""
try:
from pype.scripts import publish_filesequence
except Exception:
@ -20,17 +23,11 @@ def _get_script():
if module_path.endswith(".pyc"):
module_path = module_path[: -len(".pyc")] + ".py"
module_path = os.path.normpath(module_path)
mount_root = os.path.normpath(os.environ["PYPE_STUDIO_CORE_MOUNT"])
network_root = os.path.normpath(os.environ["PYPE_STUDIO_CORE_PATH"])
module_path = module_path.replace(mount_root, network_root)
return module_path
return os.path.normpath(module_path)
# Logic to retrieve latest files concerning extendFrames
def get_latest_version(asset_name, subset_name, family):
"""Retrieve latest files concerning extendFrame feature."""
# Get asset
asset_name = io.find_one(
{"type": "asset", "name": asset_name}, projection={"name": True}
@ -64,9 +61,7 @@ def get_latest_version(asset_name, subset_name, family):
def get_resources(version, extension=None):
"""
Get the files from the specific version
"""
"""Get the files from the specific version."""
query = {"type": "representation", "parent": version["_id"]}
if extension:
query["name"] = extension
@ -86,14 +81,25 @@ def get_resources(version, extension=None):
return resources
def get_resource_files(resources, frame_range, override=True):
def get_resource_files(resources, frame_range=None):
"""Get resource files at given path.
If `frame_range` is specified those outside will be removed.
Arguments:
resources (list): List of resources
frame_range (list): Frame range to apply override
Returns:
list of str: list of collected resources
"""
res_collections, _ = clique.assemble(resources)
assert len(res_collections) == 1, "Multiple collections found"
res_collection = res_collections[0]
# Remove any frames
if override:
if frame_range is not None:
for frame in frame_range:
if frame not in res_collection.indexes:
continue
@ -146,16 +152,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
aov_filter = {"maya": ["beauty"]}
enviro_filter = [
"PATH",
"PYTHONPATH",
"FTRACK_API_USER",
"FTRACK_API_KEY",
"FTRACK_SERVER",
"PYPE_ROOT",
"PYPE_METADATA_FILE",
"PYPE_STUDIO_PROJECTS_PATH",
"PYPE_STUDIO_PROJECTS_MOUNT",
"AVALON_PROJECT"
"AVALON_PROJECT",
"PYPE_LOG_NO_COLORS"
]
# pool used to do the publishing job
@ -177,10 +179,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
families_transfer = ["render3d", "render2d", "ftrack", "slate"]
def _submit_deadline_post_job(self, instance, job):
"""
"""Submit publish job to Deadline.
Deadline specific code separated from :meth:`process` for sake of
more universal code. Muster post job is sent directly by Muster
submitter, so this type of code isn't necessary for it.
"""
data = instance.data.copy()
subset = data["subset"]
@ -188,14 +192,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
batch=job["Props"]["Name"], subset=subset
)
metadata_filename = "{}_metadata.json".format(subset)
output_dir = instance.data["outputDir"]
metadata_path = os.path.join(output_dir, metadata_filename)
metadata_path = os.path.normpath(metadata_path)
mount_root = os.path.normpath(os.environ["PYPE_STUDIO_PROJECTS_MOUNT"])
network_root = os.environ["PYPE_STUDIO_PROJECTS_PATH"]
metadata_path = metadata_path.replace(mount_root, network_root)
# Convert output dir to `{root}/rest/of/path/...` with Anatomy
success, rootless_path = (
self.anatomy.roots_obj.find_root_template_from_path(output_dir)
)
if not success:
# `rootless_path` is not set to `output_dir` if none of roots match
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(output_dir))
rootless_path = output_dir
# Generate the payload for Deadline submission
payload = {
@ -222,9 +230,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# Transfer the environment from the original job to this dependent
# job so they use the same environment
metadata_filename = "{}_metadata.json".format(subset)
metadata_path = os.path.join(rootless_path, metadata_filename)
environment = job["Props"].get("Env", {})
environment["PYPE_METADATA_FILE"] = metadata_path
environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
environment["PYPE_LOG_NO_COLORS"] = "1"
try:
environment["PYPE_PYTHON_EXE"] = os.environ["PYPE_PYTHON_EXE"]
except KeyError:
# PYPE_PYTHON_EXE not set
pass
i = 0
for index, key in enumerate(environment):
if key.upper() in self.enviro_filter:
@ -250,14 +267,17 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
raise Exception(response.text)
def _copy_extend_frames(self, instance, representation):
"""
"""Copy existing frames from latest version.
This will copy all existing frames from subset's latest version back
to render directory and rename them to what renderer is expecting.
:param instance: instance to get required data from
:type instance: pyblish.plugin.Instance
"""
Arguments:
instance (pyblish.plugin.Instance): instance to get required
data from
representation (dict): presentation to operate on
"""
import speedcopy
self.log.info("Preparing to copy ...")
@ -297,9 +317,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# type
assert fn is not None, "padding string wasn't found"
# list of tuples (source, destination)
staging = representation.get("stagingDir")
staging = self.anatomy.fill_roots(staging)
resource_files.append(
(frame,
os.path.join(representation.get("stagingDir"),
os.path.join(staging,
"{}{}{}".format(pre,
fn.group("frame"),
post)))
@ -319,19 +341,20 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"Finished copying %i files" % len(resource_files))
def _create_instances_for_aov(self, instance_data, exp_files):
"""
"""Create instance for each AOV found.
This will create new instance for every aov it can detect in expected
files list.
:param instance_data: skeleton data for instance (those needed) later
by collector
:type instance_data: pyblish.plugin.Instance
:param exp_files: list of expected files divided by aovs
:type exp_files: list
:returns: list of instances
:rtype: list(publish.plugin.Instance)
"""
Arguments:
instance_data (pyblish.plugin.Instance): skeleton data for instance
(those needed) later by collector
exp_files (list): list of expected files divided by aovs
Returns:
list of instances
"""
task = os.environ["AVALON_TASK"]
subset = instance_data["subset"]
instances = []
@ -355,6 +378,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
subset_name = '{}_{}'.format(group_name, aov)
staging = os.path.dirname(list(cols[0])[0])
success, rootless_staging_dir = (
self.anatomy.roots_obj.find_root_template_from_path(staging)
)
if success:
staging = rootless_staging_dir
else:
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(staging))
self.log.info("Creating data for: {}".format(subset_name))
@ -397,26 +430,28 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
return instances
def _get_representations(self, instance, exp_files):
"""
"""Create representations for file sequences.
This will return representations of expected files if they are not
in hierarchy of aovs. There should be only one sequence of files for
most cases, but if not - we create representation from each of them.
:param instance: instance for which we are setting representations
:type instance: pyblish.plugin.Instance
:param exp_files: list of expected files
:type exp_files: list
:returns: list of representations
:rtype: list(dict)
"""
Arguments:
instance (pyblish.plugin.Instance): instance for which we are
setting representations
exp_files (list): list of expected files
Returns:
list of representations
"""
representations = []
cols, rem = clique.assemble(exp_files)
collections, remainders = clique.assemble(exp_files)
bake_render_path = instance.get("bakeRenderPath")
# create representation for every collected sequence
for c in cols:
ext = c.tail.lstrip(".")
for collection in collections:
ext = collection.tail.lstrip(".")
preview = False
# if filtered aov name is found in filename, toggle it for
# preview video rendering
@ -425,7 +460,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
for aov in self.aov_filter[app]:
if re.match(
r".+(?:\.|_)({})(?:\.|_).*".format(aov),
list(c)[0]
list(collection)[0]
):
preview = True
break
@ -434,14 +469,26 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
if bake_render_path:
preview = False
staging = os.path.dirname(list(collection)[0])
success, rootless_staging_dir = (
self.anatomy.roots_obj.find_root_template_from_path(staging)
)
if success:
staging = rootless_staging_dir
else:
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(staging))
rep = {
"name": ext,
"ext": ext,
"files": [os.path.basename(f) for f in list(c)],
"files": [os.path.basename(f) for f in list(collection)],
"frameStart": int(instance.get("frameStartHandle")),
"frameEnd": int(instance.get("frameEndHandle")),
# If expectedFile are absolute, we need only filenames
"stagingDir": os.path.dirname(list(c)[0]),
"stagingDir": staging,
"fps": instance.get("fps"),
"tags": ["review", "preview"] if preview else [],
}
@ -454,15 +501,28 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
self._solve_families(instance, preview)
# add reminders as representations
for r in rem:
ext = r.split(".")[-1]
for remainder in remainders:
ext = remainder.split(".")[-1]
staging = os.path.dirname(remainder)
success, rootless_staging_dir = (
self.anatomy.roots_obj.find_root_template_from_path(staging)
)
if success:
staging = rootless_staging_dir
else:
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(staging))
rep = {
"name": ext,
"ext": ext,
"files": os.path.basename(r),
"stagingDir": os.path.dirname(r)
"files": os.path.basename(remainder),
"stagingDir": os.path.dirname(remainder),
}
if r in bake_render_path:
if remainder in bake_render_path:
rep.update({
"fps": instance.get("fps"),
"tags": ["review", "delete"]
@ -486,7 +546,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
instance["families"] = families
def process(self, instance):
"""
"""Process plugin.
Detect type of renderfarm submission and create and post dependend job
in case of Deadline. It creates json file with metadata needed for
publishing in directory of render.
@ -497,6 +558,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
data = instance.data.copy()
context = instance.context
self.context = context
self.anatomy = instance.context.data["anatomy"]
if hasattr(instance, "_log"):
data['_log'] = instance._log
@ -556,11 +618,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
except KeyError:
source = context.data["currentFile"]
source = source.replace(
os.getenv("PYPE_STUDIO_PROJECTS_MOUNT"), api.registered_root()
success, rootless_path = (
self.anatomy.roots_obj.find_root_template_from_path(source)
)
relative_path = os.path.relpath(source, api.registered_root())
source = os.path.join("{root}", relative_path).replace("\\", "/")
if success:
source = rootless_path
else:
# `rootless_path` is not set to `source` if none of roots match
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues."
).format(source))
families = ["render"]
@ -611,13 +680,29 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# look into instance data if representations are not having any
# which are having tag `publish_on_farm` and include them
for r in instance.data.get("representations", []):
if "publish_on_farm" in r.get("tags"):
for repre in instance.data.get("representations", []):
staging_dir = repre.get("stagingDir")
if staging_dir:
success, rootless_staging_dir = (
self.anatomy.roots_obj.find_root_template_from_path(
staging_dir
)
)
if success:
repre["stagingDir"] = rootless_staging_dir
else:
self.log.warning((
"Could not find root path for remapping \"{}\"."
" This may cause issues on farm."
).format(staging_dir))
repre["stagingDir"] = staging_dir
if "publish_on_farm" in repre.get("tags"):
# create representations attribute of not there
if "representations" not in instance_skeleton_data.keys():
instance_skeleton_data["representations"] = []
instance_skeleton_data["representations"].append(r)
instance_skeleton_data["representations"].append(repre)
instances = None
assert data.get("expectedFiles"), ("Submission from old Pype version"
@ -754,12 +839,21 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
with open(metadata_path, "w") as f:
json.dump(publish_job, f, indent=4, sort_keys=True)
def _extend_frames(self, asset, subset, start, end, override):
"""
This will get latest version of asset and update frame range based
on minimum and maximuma values
"""
def _extend_frames(self, asset, subset, start, end):
"""Get latest version of asset nad update frame range.
Based on minimum and maximuma values.
Arguments:
asset (str): asset name
subset (str): subset name
start (int): start frame
end (int): end frame
Returns:
(int, int): upddate frame start/end
"""
# Frame comparison
prev_start = None
prev_end = None

View file

@ -122,6 +122,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
workspace = context.data["workspaceDir"]
self._rs = renderSetup.instance()
current_layer = self._rs.getVisibleRenderLayer()
maya_render_layers = {l.name(): l for l in self._rs.getRenderLayers()}
self.maya_layers = maya_render_layers
@ -157,6 +158,9 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
attachTo = []
if sets:
for s in sets:
if "family" not in cmds.listAttr(s):
continue
attachTo.append(
{
"version": None, # we need integrator for that
@ -303,6 +307,10 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
instance.data.update(data)
self.log.debug("data: {}".format(json.dumps(data, indent=4)))
# Restore current layer.
self.log.info("Restoring to {}".format(current_layer.name()))
self._rs.switchToLayer(current_layer)
def parse_options(self, render_globals):
"""Get all overrides with a value, skip those without
@ -397,6 +405,8 @@ class ExpectedFiles:
multipart = False
def get(self, renderer, layer):
renderSetup.instance().switchToLayerUsingLegacyName(layer)
if renderer.lower() == "arnold":
return self._get_files(ExpectedFilesArnold(layer))
elif renderer.lower() == "vray":

View file

@ -1,6 +1,17 @@
# -*- coding: utf-8 -*-
"""Submitting render job to Deadline.
This module is taking care of submitting job from Maya to Deadline. It
creates job and set correct environments. Its behavior is controlled by
`DEADLINE_REST_URL` environment variable - pointing to Deadline Web Service
and `MayaSubmitDeadline.use_published (bool)` property telling Deadline to
use published scene workfile or not.
"""
import os
import json
import getpass
import re
import clique
from maya import cmds
@ -14,7 +25,7 @@ import pype.maya.lib as lib
def get_renderer_variables(renderlayer=None):
"""Retrieve the extension which has been set in the VRay settings
"""Retrieve the extension which has been set in the VRay settings.
Will return None if the current renderer is not VRay
For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which
@ -25,16 +36,21 @@ def get_renderer_variables(renderlayer=None):
Returns:
dict
"""
"""
renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer())
render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"])
padding = cmds.getAttr("{}.{}".format(render_attrs["node"],
render_attrs["padding"]))
filename_0 = cmds.renderSettings(fullPath=True, firstImageName=True)[0]
filename_0 = cmds.renderSettings(
fullPath=True,
gin="#" * int(padding),
lut=True,
layer=renderlayer or lib.get_current_renderlayer())[0]
filename_0 = filename_0.replace('_<RenderPass>', '_beauty')
prefix_attr = "defaultRenderGlobals.imageFilePrefix"
if renderer == "vray":
# Maya's renderSettings function does not return V-Ray file extension
# so we get the extension from vraySettings
@ -46,62 +62,33 @@ def get_renderer_variables(renderlayer=None):
if extension is None:
extension = "png"
filename_prefix = "<Scene>/<Scene>_<Layer>/<Layer>"
if extension == "exr (multichannel)" or extension == "exr (deep)":
extension = "exr"
prefix_attr = "vraySettings.fileNamePrefix"
elif renderer == "renderman":
prefix_attr = "rmanGlobals.imageFileFormat"
elif renderer == "redshift":
# mapping redshift extension dropdown values to strings
ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"]
extension = ext_mapping[
cmds.getAttr("redshiftOptions.imageFormat")
]
else:
# Get the extension, getAttr defaultRenderGlobals.imageFormat
# returns an index number.
filename_base = os.path.basename(filename_0)
extension = os.path.splitext(filename_base)[-1].strip(".")
filename_prefix = cmds.getAttr("defaultRenderGlobals.imageFilePrefix")
filename_prefix = cmds.getAttr(prefix_attr)
return {"ext": extension,
"filename_prefix": filename_prefix,
"padding": padding,
"filename_0": filename_0}
def preview_fname(folder, scene, layer, padding, ext):
"""Return output file path with #### for padding.
Deadline requires the path to be formatted with # in place of numbers.
For example `/path/to/render.####.png`
Args:
folder (str): The root output folder (image path)
scene (str): The scene name
layer (str): The layer name to be rendered
padding (int): The padding length
ext(str): The output file extension
Returns:
str
"""
fileprefix = cmds.getAttr("defaultRenderGlobals.imageFilePrefix")
output = fileprefix + ".{number}.{ext}"
# RenderPass is currently hardcoded to "beauty" because its not important
# for the deadline submission, but we will need something to replace
# "<RenderPass>".
mapping = {
"<Scene>": "{scene}",
"<RenderLayer>": "{layer}",
"RenderPass": "beauty"
}
for key, value in mapping.items():
output = output.replace(key, value)
output = output.format(
scene=scene,
layer=layer,
number="#" * padding,
ext=ext
)
return os.path.join(folder, output)
class MayaSubmitDeadline(pyblish.api.InstancePlugin):
"""Submit available render layers to Deadline
"""Submit available render layers to Deadline.
Renders are submitted to a Deadline Web Service as
supplied via the environment variable DEADLINE_REST_URL
@ -194,22 +181,22 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
filename = os.path.basename(filepath)
comment = context.data.get("comment", "")
scene = os.path.splitext(filename)[0]
dirname = os.path.join(workspace, "renders")
renderlayer = instance.data['setMembers'] # rs_beauty
renderlayer_name = instance.data['subset'] # beauty
# renderlayer_globals = instance.data["renderGlobals"]
# legacy_layers = renderlayer_globals["UseLegacyRenderLayers"]
deadline_user = context.data.get("deadlineUser", getpass.getuser())
jobname = "%s - %s" % (filename, instance.name)
# Get the variables depending on the renderer
render_variables = get_renderer_variables(renderlayer)
output_filename_0 = preview_fname(folder=dirname,
scene=scene,
layer=renderlayer_name,
padding=render_variables["padding"],
ext=render_variables["ext"])
filename_0 = render_variables["filename_0"]
if self.use_published:
new_scene = os.path.splitext(filename)[0]
orig_scene = os.path.splitext(
os.path.basename(context.data["currentFile"]))[0]
filename_0 = render_variables["filename_0"].replace(
orig_scene, new_scene)
output_filename_0 = filename_0
try:
# Ensure render folder exists
@ -226,6 +213,9 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
# Top-level group name
"BatchName": filename,
# Asset dependency to wait for at least the scene file to sync.
"AssetDependency0": filepath,
# Job name, as seen in Monitor
"Name": jobname,
@ -284,7 +274,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
for aov, files in exp[0].items():
col = clique.assemble(files)[0][0]
outputFile = col.format('{head}{padding}{tail}')
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile # noqa: E501
OutputFilenames[expIndex] = outputFile
expIndex += 1
else:
@ -293,7 +283,6 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile
# OutputFilenames[expIndex] = outputFile
# We need those to pass them to pype for it to set correct context
keys = [
"FTRACK_API_KEY",
@ -334,7 +323,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
raise Exception(response.text)
# Store output dir for unified publisher (filesequence)
instance.data["outputDir"] = os.path.dirname(output_filename_0)
instance.data["outputDir"] = os.path.dirname(filename_0)
instance.data["deadlineSubmissionJob"] = response.json()
def preflight_check(self, instance):

View file

@ -309,14 +309,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
output_dir = instance.data["outputDir"]
metadata_path = os.path.join(output_dir, metadata_filename)
# replace path for UNC / network share paths, co PYPE is found
# over network. It assumes PYPE is located somewhere in
# PYPE_STUDIO_CORE_PATH
pype_root = os.environ["PYPE_ROOT"].replace(
os.path.normpath(
os.environ['PYPE_STUDIO_CORE_MOUNT']), # noqa
os.path.normpath(
os.environ['PYPE_STUDIO_CORE_PATH'])) # noqa
pype_root = os.environ["PYPE_SETUP_PATH"]
# we must provide either full path to executable or use musters own
# python named MPython.exe, residing directly in muster bin
@ -517,33 +510,25 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
environment["PATH"] = os.environ["PATH"]
# self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
clean_environment = {}
for key in environment:
for key, value in environment.items():
clean_path = ""
self.log.debug("key: {}".format(key))
to_process = environment[key]
if key == "PYPE_STUDIO_CORE_MOUNT":
clean_path = environment[key]
elif "://" in environment[key]:
clean_path = environment[key]
elif os.pathsep not in to_process:
try:
path = environment[key]
path.decode('UTF-8', 'strict')
clean_path = os.path.normpath(path)
except UnicodeDecodeError:
print('path contains non UTF characters')
if "://" in value:
clean_path = value
else:
for path in environment[key].split(os.pathsep):
valid_paths = []
for path in value.split(os.pathsep):
if not path:
continue
try:
path.decode('UTF-8', 'strict')
clean_path += os.path.normpath(path) + os.pathsep
valid_paths.append(os.path.normpath(path))
except UnicodeDecodeError:
print('path contains non UTF characters')
# this should replace paths so they are pointing to network share
clean_path = clean_path.replace(
os.path.normpath(environment['PYPE_STUDIO_CORE_MOUNT']),
os.path.normpath(environment['PYPE_STUDIO_CORE_PATH']))
if valid_paths:
clean_path = os.pathsep.join(valid_paths)
clean_environment[key] = clean_path
return clean_environment

View file

@ -48,6 +48,14 @@ class CreateWritePrerender(plugin.PypeCreator):
self.log.error(msg)
nuke.message(msg)
if len(nodes) == 0:
msg = (
"No nodes selected. Please select a single node to connect"
" to or tick off `Use selection`"
)
self.log.error(msg)
nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]
outputs = selected_node.dependent()

View file

@ -49,6 +49,14 @@ class CreateWriteRender(plugin.PypeCreator):
self.log.error(msg)
nuke.message(msg)
if len(nodes) == 0:
msg = (
"No nodes selected. Please select a single node to connect"
" to or tick off `Use selection`"
)
self.log.error(msg)
nuke.message(msg)
selected_node = nodes[0]
inputs = [selected_node]
outputs = selected_node.dependent()

View file

@ -237,7 +237,7 @@ class LoadSequence(api.Loader):
repr_cont = representation["context"]
file = self.fname
file = api.get_representation_path(representation)
if not file:
repr_id = representation["_id"]

View file

@ -128,6 +128,9 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
# Top-level group name
"BatchName": script_name,
# Asset dependency to wait for at least the scene file to sync.
"AssetDependency0": script_path,
# Job name, as seen in Monitor
"Name": jobname,
@ -201,40 +204,32 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
if path.lower().startswith('pype_'):
environment[path] = os.environ[path]
environment["PATH"] = os.environ["PATH"]
# environment["PATH"] = os.environ["PATH"]
# self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
clean_environment = {}
for key in environment:
for key, value in environment.items():
clean_path = ""
self.log.debug("key: {}".format(key))
to_process = environment[key]
if key == "PYPE_STUDIO_CORE_MOUNT":
clean_path = environment[key]
elif "://" in environment[key]:
clean_path = environment[key]
elif os.pathsep not in to_process:
try:
path = environment[key]
path.decode('UTF-8', 'strict')
clean_path = os.path.normpath(path)
except UnicodeDecodeError:
print('path contains non UTF characters')
if "://" in value:
clean_path = value
else:
for path in environment[key].split(os.pathsep):
valid_paths = []
for path in value.split(os.pathsep):
if not path:
continue
try:
path.decode('UTF-8', 'strict')
clean_path += os.path.normpath(path) + os.pathsep
valid_paths.append(os.path.normpath(path))
except UnicodeDecodeError:
print('path contains non UTF characters')
if valid_paths:
clean_path = os.pathsep.join(valid_paths)
if key == "PYTHONPATH":
clean_path = clean_path.replace('python2', 'python3')
clean_path = clean_path.replace(
os.path.normpath(
environment['PYPE_STUDIO_CORE_MOUNT']), # noqa
os.path.normpath(
environment['PYPE_STUDIO_CORE_PATH'])) # noqa
self.log.debug("clean path: {}".format(clean_path))
clean_environment[key] = clean_path
environment = clean_environment

View file

@ -159,7 +159,7 @@ class CollectReviews(api.InstancePlugin):
version_data.update({k: instance.data[k] for k in transfer_data})
if 'version' in instance.data:
version_data["version"] = instance.data[version]
version_data["version"] = instance.data["version"]
# add to data of representation
version_data.update({

View file

@ -13,5 +13,5 @@ class CollectAudioVersion(pyblish.api.InstancePlugin):
def process(self, instance):
self.log.info('Audio: {}'.format(instance.data['name']))
instance.data['version'] = '001'
instance.data['version'] = 1
self.log.info('Audio version to: {}'.format(instance.data['version']))

View file

@ -1,12 +0,0 @@
import pyblish.api
class CollectContextDataPremiera(pyblish.api.ContextPlugin):
"""Collecting data from temp json sent from premiera context"""
label = "Collect Premiera Context"
order = pyblish.api.CollectorOrder + 0.1
def process(self, context):
data_path = context.data['rqst_json_data_path']
self.log.info("Context is: {}".format(data_path))

View file

@ -19,16 +19,18 @@ class CollectFrameranges(pyblish.api.InstancePlugin):
metadata = instance.data.get('jsonData').get('metadata')
# getting important metadata time calculation
fps = metadata['ppro.timeline.fps']
fps = float(metadata['ppro.timeline.fps'])
sec_start = metadata['ppro.clip.start']
sec_end = metadata['ppro.clip.end']
fstart = instance.data.get('fstart')
fstart = instance.data.get('frameStart')
fend = fstart + (sec_end * fps) - (sec_start * fps) - 1
self.log.debug("instance: {}, fps: {}\nsec_start: {}\nsec_end: {}\nfstart: {}\nfend: {}\n".format(
instance.data['name'],
fps, sec_start, sec_end, fstart, fend))
instance.data['startFrame'] = fstart
instance.data['endFrame'] = fend
instance.data['frameStart'] = fstart
instance.data['frameEnd'] = fend
instance.data['handleStart'] = instance.context.data['handleStart']
instance.data['handleEnd'] = instance.context.data['handleEnd']
instance.data['fps'] = metadata['ppro.timeline.fps']

View file

@ -26,7 +26,7 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
json_data = context.data.get("jsonData", None)
temp_context = {}
for instance in json_data['instances']:
if instance['family'] in 'projectfile':
if instance['family'] in 'workfile':
continue
in_info = {}
@ -35,10 +35,13 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
in_info['entity_type'] = 'Shot'
instance_pyblish = [
i for i in context.data["instances"] if i.data['asset'] in name][0]
i for i in context.data["instances"]
if i.data['asset'] in name][0]
in_info['custom_attributes'] = {
'fend': instance_pyblish.data['endFrame'],
'fstart': instance_pyblish.data['startFrame'],
'frameStart': instance_pyblish.data['frameStart'],
'frameEnd': instance_pyblish.data['frameEnd'],
'handleStart': instance_pyblish.data['handleStart'],
'handleEnd': instance_pyblish.data['handleEnd'],
'fps': instance_pyblish.data['fps']
}

View file

@ -0,0 +1,83 @@
import os
import pyblish.api
class CollectClipRepresentations(pyblish.api.InstancePlugin):
"""
Collecting frameranges needed for ftrack integration
Args:
context (obj): pyblish context session
"""
label = "Collect Clip Representations"
order = pyblish.api.CollectorOrder
families = ['clip']
def process(self, instance):
# add to representations
if not instance.data.get("representations"):
instance.data["representations"] = list()
ins_d = instance.data
staging_dir = ins_d["stagingDir"]
frame_start = ins_d["frameStart"]
frame_end = ins_d["frameEnd"]
handle_start = ins_d["handleStart"]
handle_end = ins_d["handleEnd"]
fps = ins_d["fps"]
files_list = ins_d.get("files")
if not files_list:
return
json_repr_ext = ins_d["jsonReprExt"]
json_repr_subset = ins_d["jsonReprSubset"]
if files_list:
file = next((f for f in files_list
if json_repr_subset in f), None)
else:
return
if json_repr_ext in ["mov", "mp4"]:
representation = {
"files": file,
"stagingDir": staging_dir,
"frameStart": frame_start,
"frameEnd": frame_end,
"frameStartFtrack": frame_start - handle_start,
"frameEndFtrack": frame_end - handle_end,
"step": 1,
"fps": fps,
"name": json_repr_subset,
"ext": json_repr_ext,
"tags": ["review", "delete"]
}
else:
representation = {
"files": file,
"stagingDir": staging_dir,
"step": 1,
"fps": fps,
"name": json_repr_subset,
"ext": json_repr_ext,
"tags": ["review"]
}
self.log.debug("representation: {}".format(representation))
instance.data["representations"].append(representation)
thumb = next((f for f in files_list
if "thumbnail" in f), None)
if thumb:
thumb_representation = {
'files': thumb,
'stagingDir': staging_dir,
'name': "thumbnail",
'thumbnail': True,
'ext': os.path.splitext(thumb)[-1].replace(".", "")
}
self.log.debug("representation: {}".format(thumb_representation))
instance.data["representations"].append(
thumb_representation)

View file

@ -0,0 +1,31 @@
import pyblish.api
class CollectResolution(pyblish.api.InstancePlugin):
"""
Collecting frameranges needed for ftrack integration
Args:
context (obj): pyblish context session
"""
label = "Collect Clip Resolution"
order = pyblish.api.CollectorOrder
families = ['clip']
def process(self, instance):
# getting metadata from jsonData key
metadata = instance.data.get('jsonData').get('metadata')
# getting important metadata time calculation
pixel_aspect = float(metadata['ppro.format.pixelaspect'])
res_width = metadata['ppro.format.width']
res_height = metadata['ppro.format.height']
instance.data['pixelAspect'] = pixel_aspect
instance.data['resolutionWidth'] = res_width
instance.data['resolutionHeight'] = res_height
self.log.info(f"Resolution was set to: `{res_width}x{res_height}`,"
f" and pixel aspect ration to: `{pixel_aspect}`")

View file

@ -1,144 +0,0 @@
import pyblish.api
import os
from avalon import io, api
class IntegrateAssumedDestination(pyblish.api.InstancePlugin):
"""Generate the assumed destination path where the file will be stored"""
label = "Integrate Assumed Destination"
order = pyblish.api.IntegratorOrder - 0.05
families = ["clip", "projectfile"]
def process(self, instance):
self.create_destination_template(instance)
template_data = instance.data["assumedTemplateData"]
# template = instance.data["template"]
anatomy = instance.context.data['anatomy']
# template = anatomy.publish.path
anatomy_filled = anatomy.format(template_data)
mock_template = anatomy_filled.publish.path
# For now assume resources end up in a "resources" folder in the
# published folder
mock_destination = os.path.join(os.path.dirname(mock_template),
"resources")
# Clean the path
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
# Define resource destination and transfers
resources = instance.data.get("resources", list())
transfers = instance.data.get("transfers", list())
for resource in resources:
# Add destination to the resource
source_filename = os.path.basename(resource["source"])
destination = os.path.join(mock_destination, source_filename)
# Force forward slashes to fix issue with software unable
# to work correctly with backslashes in specific scenarios
# (e.g. escape characters in PLN-151 V-Ray UDIM)
destination = destination.replace("\\", "/")
resource['destination'] = destination
# Collect transfers for the individual files of the resource
# e.g. all individual files of a cache or UDIM textures.
files = resource['files']
for fsrc in files:
fname = os.path.basename(fsrc)
fdest = os.path.join(mock_destination, fname)
transfers.append([fsrc, fdest])
instance.data["resources"] = resources
instance.data["transfers"] = transfers
def create_destination_template(self, instance):
"""Create a filepath based on the current data available
Example template:
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
{subset}.{representation}
Args:
instance: the instance to publish
Returns:
file path (str)
"""
# get all the stuff from the database
subset_name = instance.data["subset"]
self.log.info(subset_name)
asset_name = instance.data["asset"]
project_name = api.Session["AVALON_PROJECT"]
project = io.find_one(
{
"type": "project",
"name": project_name
},
projection={"config": True, "data": True}
)
template = project["config"]["template"]["publish"]
# anatomy = instance.context.data['anatomy']
asset = io.find_one({
"type": "asset",
"name": asset_name,
"parent": project["_id"]
})
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset.get('silo')
subset = io.find_one({
"type": "subset",
"name": subset_name,
"parent": asset["_id"]
})
# assume there is no version yet, we start at `1`
version = None
version_number = 1
if subset is not None:
version = io.find_one(
{
"type": "version",
"parent": subset["_id"]
},
sort=[("name", -1)]
)
# if there is a subset there ought to be version
if version is not None:
version_number += version["name"]
if instance.data.get('version'):
version_number = int(instance.data.get('version'))
hierarchy = asset['data']['parents']
if hierarchy:
# hierarchy = os.path.sep.join(hierarchy)
hierarchy = os.path.join(*hierarchy)
template_data = {"root": api.Session["AVALON_PROJECTS"],
"project": {"name": project_name,
"code": project['data']['code']},
"silo": silo,
"family": instance.data['family'],
"asset": asset_name,
"subset": subset_name,
"version": version_number,
"hierarchy": hierarchy,
"representation": "TEMP"}
instance.data["assumedTemplateData"] = template_data
self.log.info(template_data)
instance.data["template"] = template

View file

@ -1,140 +0,0 @@
import pyblish.api
from avalon import io
class IntegrateHierarchyToAvalon(pyblish.api.ContextPlugin):
"""
Create entities in ftrack based on collected data from premiere
"""
order = pyblish.api.IntegratorOrder - 0.1
label = 'Integrate Hierarchy To Avalon'
families = ['clip']
def process(self, context):
if "hierarchyContext" not in context.data:
return
self.db = io
if not self.db.Session:
self.db.install()
input_data = context.data["hierarchyContext"]
self.import_to_avalon(input_data)
def import_to_avalon(self, input_data, parent=None):
for name in input_data:
self.log.info('input_data[name]: {}'.format(input_data[name]))
entity_data = input_data[name]
entity_type = entity_data['entity_type']
data = {}
# Process project
if entity_type.lower() == 'project':
entity = self.db.find_one({'type': 'project'})
# TODO: should be in validator?
assert (entity is not None), "Didn't find project in DB"
# get data from already existing project
for key, value in entity.get('data', {}).items():
data[key] = value
self.av_project = entity
# Raise error if project or parent are not set
elif self.av_project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
)
# Else process assset
else:
entity = self.db.find_one({'type': 'asset', 'name': name})
# Create entity if doesn't exist
if entity is None:
if self.av_project['_id'] == parent['_id']:
silo = None
elif parent['silo'] is None:
silo = parent['name']
else:
silo = parent['silo']
entity = self.create_avalon_asset(name, silo)
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('____1____')
data['entityType'] = entity_type
# TASKS
tasks = entity_data.get('tasks', [])
if tasks is not None or len(tasks) > 0:
data['tasks'] = tasks
parents = []
visualParent = None
data = input_data[name]
if self.av_project['_id'] != parent['_id']:
visualParent = parent['_id']
parents.extend(parent.get('data', {}).get('parents', []))
parents.append(parent['name'])
data['visualParent'] = visualParent
data['parents'] = parents
self.db.update_many(
{'_id': entity['_id']},
{'$set': {
'data': data,
}})
entity = self.db.find_one({'type': 'asset', 'name': name})
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('____2____')
# Else get data from already existing
else:
self.log.info('entity: {}'.format(entity))
self.log.info('data: {}'.format(entity.get('data', {})))
self.log.info('________')
for key, value in entity.get('data', {}).items():
data[key] = value
data['entityType'] = entity_type
# TASKS
tasks = entity_data.get('tasks', [])
if tasks is not None or len(tasks) > 0:
data['tasks'] = tasks
parents = []
visualParent = None
# do not store project's id as visualParent (silo asset)
if self.av_project['_id'] != parent['_id']:
visualParent = parent['_id']
parents.extend(parent.get('data', {}).get('parents', []))
parents.append(parent['name'])
data['visualParent'] = visualParent
data['parents'] = parents
# CUSTOM ATTRIBUTES
for k, val in entity_data.get('custom_attributes', {}).items():
data[k] = val
# Update entity data with input data
self.db.update_many(
{'_id': entity['_id']},
{'$set': {
'data': data,
}})
if 'childs' in entity_data:
self.import_to_avalon(entity_data['childs'], entity)
def create_avalon_asset(self, name, silo):
item = {
'schema': 'avalon-core:asset-2.0',
'name': name,
'silo': silo,
'parent': self.av_project['_id'],
'type': 'asset',
'data': {}
}
entity_id = self.db.insert_one(item).inserted_id
return self.db.find_one({'_id': entity_id})

View file

@ -1,171 +0,0 @@
import sys
import pyblish.api
import six
class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
"""
Create entities in ftrack based on collected data from premiere
Example of entry data:
{
"ProjectXS": {
"entity_type": "Project",
"custom_attributes": {
"fps": 24,...
},
"tasks": [
"Compositing",
"Lighting",... *task must exist as task type in project schema*
],
"childs": {
"sq01": {
"entity_type": "Sequence",
...
}
}
}
}
"""
order = pyblish.api.IntegratorOrder
label = 'Integrate Hierarchy To Ftrack'
families = ["clip"]
optional = False
def process(self, context):
self.context = context
if "hierarchyContext" not in context.data:
return
self.ft_project = None
self.session = context.data["ftrackSession"]
input_data = context.data["hierarchyContext"]
# adding ftrack types from presets
ftrack_types = context.data['ftrackTypes']
self.import_to_ftrack(input_data, ftrack_types)
def import_to_ftrack(self, input_data, ftrack_types, parent=None):
for entity_name in input_data:
entity_data = input_data[entity_name]
entity_type = entity_data['entity_type'].capitalize()
if entity_type.lower() == 'project':
query = 'Project where full_name is "{}"'.format(entity_name)
entity = self.session.query(query).one()
self.ft_project = entity
self.task_types = self.get_all_task_types(entity)
elif self.ft_project is None or parent is None:
raise AssertionError(
"Collected items are not in right order!"
)
# try to find if entity already exists
else:
query = '{} where name is "{}" and parent_id is "{}"'.format(
entity_type, entity_name, parent['id']
)
try:
entity = self.session.query(query).one()
except Exception:
entity = None
# Create entity if not exists
if entity is None:
entity = self.create_entity(
name=entity_name,
type=entity_type,
parent=parent
)
# self.log.info('entity: {}'.format(dict(entity)))
# CUSTOM ATTRIBUTES
custom_attributes = entity_data.get('custom_attributes', [])
instances = [
i for i in self.context.data["instances"] if i.data['asset'] in entity['name']]
for key in custom_attributes:
assert (key in entity['custom_attributes']), (
'Missing custom attribute')
entity['custom_attributes'][key] = custom_attributes[key]
for instance in instances:
instance.data['ftrackShotId'] = entity['id']
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
# TASKS
tasks = entity_data.get('tasks', [])
existing_tasks = []
tasks_to_create = []
for child in entity['children']:
if child.entity_type.lower() == 'task':
existing_tasks.append(child['name'])
# existing_tasks.append(child['type']['name'])
for task in tasks:
if task in existing_tasks:
print("Task {} already exists".format(task))
continue
tasks_to_create.append(task)
for task in tasks_to_create:
self.create_task(
name=task,
task_type=ftrack_types[task],
parent=entity
)
if 'childs' in entity_data:
self.import_to_ftrack(
entity_data['childs'], ftrack_types, entity)
def get_all_task_types(self, project):
tasks = {}
proj_template = project['project_schema']
temp_task_types = proj_template['_task_type_schema']['types']
for type in temp_task_types:
if type['name'] not in tasks:
tasks[type['name']] = type
return tasks
def create_task(self, name, task_type, parent):
task = self.session.create('Task', {
'name': name,
'parent': parent
})
# TODO not secured!!! - check if task_type exists
self.log.info(task_type)
self.log.info(self.task_types)
task['type'] = self.task_types[task_type]
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
return task
def create_entity(self, name, type, parent):
entity = self.session.create(type, {
'name': name,
'parent': parent
})
try:
self.session.commit()
except Exception:
tp, value, tb = sys.exc_info()
self.session.rollback()
six.reraise(tp, value, tb)
return entity