mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge pull request #1027 from pypeclub/feature/plugins_cleanup
3.0 plugins and hooks refactor
This commit is contained in:
commit
a16418a2bf
93 changed files with 9 additions and 841 deletions
|
|
@ -18,8 +18,8 @@ PACKAGE_DIR = os.path.dirname(os.path.abspath(__file__))
|
|||
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
|
||||
|
||||
# Global plugin paths
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "global", "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "global", "load")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
|
||||
|
||||
def import_wrapper(func):
|
||||
|
|
|
|||
0
pype/hosts/standalonepublisher/__init__.py
Normal file
0
pype/hosts/standalonepublisher/__init__.py
Normal file
|
|
@ -564,7 +564,7 @@ class ApplicationLaunchContext:
|
|||
# --- END: Backwards compatibility ---
|
||||
|
||||
subfolders_list = (
|
||||
("hooks", "global"),
|
||||
["hooks"],
|
||||
("hosts", self.host_name, "hooks")
|
||||
)
|
||||
for subfolders in subfolders_list:
|
||||
|
|
|
|||
|
|
@ -41,13 +41,12 @@ def filter_pyblish_plugins(plugins):
|
|||
file = os.path.normpath(inspect.getsourcefile(plugin))
|
||||
file = os.path.normpath(file)
|
||||
|
||||
|
||||
# host determined from path
|
||||
host_from_file = file.split(os.path.sep)[-4:-3][0]
|
||||
plugin_kind = file.split(os.path.sep)[-2:-1][0]
|
||||
|
||||
# TODO: change after all plugins are moved one level up
|
||||
if host_from_file == "plugins":
|
||||
if host_from_file == "pype":
|
||||
host_from_file = "global"
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -14,7 +14,11 @@ class StandAlonePublishAction(PypeModule, ITrayAction):
|
|||
self.enabled = modules_settings[self.name]["enabled"]
|
||||
self.publish_paths = [
|
||||
os.path.join(
|
||||
pype.PLUGINS_DIR, "standalonepublisher", "publish"
|
||||
pype.PACKAGE_DIR,
|
||||
"hosts",
|
||||
"standalonepublisher",
|
||||
"plugins",
|
||||
"publish"
|
||||
)
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,90 +0,0 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from avalon import (
|
||||
io,
|
||||
api as avalon
|
||||
)
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class CollectContextDataFromAport(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Collecting temp json data sent from a host context
|
||||
and path for returning json data back to hostself.
|
||||
|
||||
Setting avalon session into correct context
|
||||
|
||||
Args:
|
||||
context (obj): pyblish context session
|
||||
|
||||
"""
|
||||
|
||||
label = "AdobeCommunicator Collect Context"
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
|
||||
def process(self, context):
|
||||
self.log.info(
|
||||
"registred_hosts: `{}`".format(pyblish.api.registered_hosts()))
|
||||
io.install()
|
||||
# get json paths from data
|
||||
input_json_path = os.environ.get("AC_PUBLISH_INPATH")
|
||||
output_json_path = os.environ.get("AC_PUBLISH_OUTPATH")
|
||||
|
||||
rqst_json_data_path = Path(input_json_path)
|
||||
post_json_data_path = Path(output_json_path)
|
||||
|
||||
context.data['post_json_data_path'] = str(post_json_data_path)
|
||||
|
||||
# get avalon session data and convert \ to /
|
||||
_S = avalon.session
|
||||
|
||||
asset = _S["AVALON_ASSET"]
|
||||
workdir = Path(_S["AVALON_WORKDIR"]).resolve()
|
||||
_S["AVALON_WORKDIR"] = str(workdir)
|
||||
|
||||
context.data["avalonSession"] = _S
|
||||
self.log.info(f"__ avalonSession: `{_S}`")
|
||||
|
||||
# get stagin directory from recieved path to json
|
||||
context.data["stagingDir"] = post_json_data_path.parent
|
||||
|
||||
# get data from json file recieved
|
||||
with rqst_json_data_path.open(mode='r') as f:
|
||||
context.data["jsonData"] = json_data = json.load(f)
|
||||
assert json_data, "No `data` in json file"
|
||||
|
||||
# get and check host type
|
||||
host = json_data.get("host", None)
|
||||
host_version = json_data.get("hostVersion", None)
|
||||
assert host, "No `host` data in json file"
|
||||
assert host_version, "No `hostVersion` data in json file"
|
||||
context.data["host"] = _S["AVALON_APP"] = host
|
||||
context.data["hostVersion"] = \
|
||||
_S["AVALON_APP_VERSION"] = host_version
|
||||
|
||||
# get current file
|
||||
current_file = json_data.get("currentFile", None)
|
||||
assert current_file, "No `currentFile` data in json file"
|
||||
context.data["currentFile"] = str(Path(current_file).resolve())
|
||||
|
||||
# get project data from avalon
|
||||
project_data = io.find_one({'type': 'project'})
|
||||
assert project_data, "No `project_data` data in avalon db"
|
||||
context.data["projectData"] = project_data
|
||||
self.log.debug("project_data: {}".format(project_data))
|
||||
|
||||
# get asset data from avalon and fix all paths
|
||||
asset_data = io.find_one({
|
||||
"type": 'asset',
|
||||
"name": asset
|
||||
})["data"]
|
||||
assert asset_data, "No `asset_data` data in avalon db"
|
||||
|
||||
context.data["assetData"] = asset_data
|
||||
|
||||
self.log.debug("asset_data: {}".format(asset_data))
|
||||
self.log.info("rqst_json_data_path is: {}".format(rqst_json_data_path))
|
||||
self.log.info("post_json_data_path is: {}".format(post_json_data_path))
|
||||
|
||||
# self.log.info("avalon.session is: {}".format(avalon.session))
|
||||
|
|
@ -1,232 +0,0 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectInstancesFromJson(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Collecting temp json data sent from a host context
|
||||
and path for returning json data back to hostself.
|
||||
|
||||
Setting avalon session into correct context
|
||||
|
||||
Args:
|
||||
context (obj): pyblish context session
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect instances from JSON"
|
||||
order = pyblish.api.CollectorOrder - 0.48
|
||||
|
||||
def process(self, context):
|
||||
|
||||
_S = context.data["avalonSession"]
|
||||
asset = _S["AVALON_ASSET"]
|
||||
task = _S["AVALON_TASK"]
|
||||
host = _S["AVALON_APP"]
|
||||
|
||||
json_data = context.data.get("jsonData", None)
|
||||
assert json_data, "No `json_data` data in json file"
|
||||
|
||||
instances_data = json_data.get("instances", None)
|
||||
assert instances_data, "No `instance` data in json file"
|
||||
|
||||
staging_dir = json_data.get("stagingDir", None)
|
||||
assert staging_dir, "No `stagingDir` path in json file"
|
||||
|
||||
host = context.data["host"]
|
||||
presets = context.data["presets"][host]
|
||||
|
||||
rules_tasks = presets["rules_tasks"]
|
||||
|
||||
asset_default = presets["asset_default"]
|
||||
assert asset_default, ("No `asset_default` data in"
|
||||
"`/presets/[host]/asset_default.json` file")
|
||||
|
||||
# get frame start > first try from asset data
|
||||
frame_start = context.data["assetData"].get("frameStart", None)
|
||||
if not frame_start:
|
||||
self.log.debug("frame_start not on any parent entity")
|
||||
# get frame start > third try from parent data
|
||||
frame_start = asset_default["frameStart"]
|
||||
|
||||
assert frame_start, "No `frame_start` data found, "
|
||||
"please set `fstart` on asset"
|
||||
self.log.debug("frame_start: `{}`".format(frame_start))
|
||||
|
||||
# get handles > first try from asset data
|
||||
handle_start = context.data["assetData"].get("handleStart", None)
|
||||
handle_end = context.data["assetData"].get("handleEnd", None)
|
||||
if (handle_start is None) or (handle_end is None):
|
||||
# get frame start > second try from parent data
|
||||
handle_start = asset_default.get("handleStart", None)
|
||||
handle_end = asset_default.get("handleEnd", None)
|
||||
|
||||
assert (
|
||||
(handle_start is not None) or (
|
||||
handle_end is not None)), (
|
||||
"No `handle_start, handle_end` data found")
|
||||
|
||||
instances = []
|
||||
|
||||
current_file = os.path.basename(context.data.get("currentFile"))
|
||||
name, ext = os.path.splitext(current_file)
|
||||
|
||||
# get current file host
|
||||
family = "workfile"
|
||||
subset_name = "{0}{1}".format(task, 'Default')
|
||||
instance_name = "{0}_{1}_{2}".format(name,
|
||||
family,
|
||||
subset_name)
|
||||
# Set label
|
||||
label = "{0} - {1}".format(name, task)
|
||||
|
||||
# get project file instance Data
|
||||
wf_instance = next((inst for inst in instances_data
|
||||
if inst.get("family", None) in 'workfile'), None)
|
||||
|
||||
if wf_instance:
|
||||
self.log.debug('wf_instance: {}'.format(wf_instance))
|
||||
|
||||
version = int(wf_instance.get("version", None))
|
||||
# get working file into instance for publishing
|
||||
instance = context.create_instance(instance_name)
|
||||
instance.data.update(wf_instance)
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"stagingDir": staging_dir,
|
||||
"task": task,
|
||||
"representations": [{
|
||||
"files": current_file,
|
||||
'stagingDir': staging_dir,
|
||||
'name': "projectfile",
|
||||
'ext': ext[1:]
|
||||
}],
|
||||
"host": host,
|
||||
"asset": asset,
|
||||
"label": label,
|
||||
"name": name,
|
||||
"family": family,
|
||||
"families": ["ftrack"],
|
||||
"publish": True,
|
||||
"version": version
|
||||
})
|
||||
instances.append(instance)
|
||||
|
||||
for inst in instances_data:
|
||||
# for key, value in inst.items():
|
||||
# self.log.debug('instance[key]: {}'.format(key))
|
||||
#
|
||||
version = int(inst.get("version", None))
|
||||
assert version, "No `version` string in json file"
|
||||
|
||||
name = asset = inst.get("name", None)
|
||||
assert name, "No `name` key in json_data.instance: {}".format(inst)
|
||||
|
||||
family = inst.get("family", None)
|
||||
assert family, "No `family` key in json_data.instance: {}".format(
|
||||
inst)
|
||||
|
||||
if family in 'workfile':
|
||||
continue
|
||||
|
||||
files_list = inst.get("files", None)
|
||||
assert files_list, "`files` are empty in json file"
|
||||
|
||||
hierarchy = inst.get("hierarchy", None)
|
||||
assert hierarchy, f"No `hierarchy` data in json file for {name}"
|
||||
|
||||
parents = inst.get("parents", None)
|
||||
assert parents, "No `parents` data in json file"
|
||||
|
||||
tags = inst.get("tags", None)
|
||||
if tags:
|
||||
tasks = [t["task"] for t in tags
|
||||
if t.get("task")]
|
||||
else:
|
||||
tasks = rules_tasks["defaultTasks"]
|
||||
self.log.debug("tasks: `{}`".format(tasks))
|
||||
|
||||
subset_lst = []
|
||||
subset_dict = {}
|
||||
for task in tasks:
|
||||
# create list of tasks for creation
|
||||
if not inst.get('tasks', None):
|
||||
inst['tasks'] = list()
|
||||
|
||||
# append taks into list for later hierarchy cration
|
||||
if task not in inst['tasks']:
|
||||
inst['tasks'].append(task)
|
||||
|
||||
subsets = rules_tasks["taskToSubsets"][task]
|
||||
for sub in subsets:
|
||||
self.log.debug(sub)
|
||||
try:
|
||||
isinstance(subset_dict[sub], list)
|
||||
except Exception:
|
||||
subset_dict[sub] = list()
|
||||
|
||||
subset_dict[sub].append(task)
|
||||
|
||||
subset_lst.extend([s for s in subsets if s not in subset_lst])
|
||||
|
||||
for subset in subset_lst:
|
||||
if inst["subsetToRepresentations"].get(subset, None):
|
||||
repr = inst["subsetToRepresentations"][subset]
|
||||
ext = repr['representation']
|
||||
else:
|
||||
continue
|
||||
family = inst["family"]
|
||||
# skip if thumnail in name of subset
|
||||
if "thumbnail" in subset:
|
||||
continue
|
||||
elif "audio" in subset:
|
||||
family = subset
|
||||
subset_name = "{0}{1}".format(subset, "Main")
|
||||
elif "reference" in subset:
|
||||
family = "review"
|
||||
subset_name = "{0}{1}".format(family, "Reference")
|
||||
else:
|
||||
subset_name = "{0}{1}".format(subset, 'Default')
|
||||
|
||||
# create unique subset's name
|
||||
name = "{0}_{1}_{2}".format(asset,
|
||||
inst["family"],
|
||||
subset_name)
|
||||
|
||||
instance = context.create_instance(name)
|
||||
files = [f for f in files_list
|
||||
if subset in f or "thumbnail" in f]
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"stagingDir": staging_dir,
|
||||
"tasks": subset_dict[subset],
|
||||
"frameStart": frame_start,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"asset": asset,
|
||||
"hierarchy": hierarchy,
|
||||
"parents": parents,
|
||||
"files": files,
|
||||
"label": "{0} - {1}".format(
|
||||
asset, subset_name),
|
||||
"name": name,
|
||||
"family": family,
|
||||
"families": [subset, inst["family"], 'ftrack'],
|
||||
"jsonData": inst,
|
||||
"jsonReprSubset": subset,
|
||||
"jsonReprExt": ext,
|
||||
"publish": True,
|
||||
"version": version})
|
||||
self.log.info(
|
||||
"collected instance: {}".format(instance.data))
|
||||
instances.append(instance)
|
||||
|
||||
context.data["instances"] = instances
|
||||
|
||||
self.log.debug("context: {}".format(context))
|
||||
|
||||
def sort_by_task(self, instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get("task", instance.data.get("task"))
|
||||
|
|
@ -1,104 +0,0 @@
|
|||
|
||||
import json
|
||||
import clique
|
||||
import pyblish.api
|
||||
from pype.api import Anatomy
|
||||
|
||||
class ExtractJSON(pyblish.api.ContextPlugin):
|
||||
""" Extract all instances to a serialized json file. """
|
||||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = "Extract to JSON"
|
||||
|
||||
def process(self, context):
|
||||
json_path = context.data['post_json_data_path']
|
||||
|
||||
data = dict(self.serialize(context.data()))
|
||||
|
||||
# instances_data = []
|
||||
# for instance in context:
|
||||
#
|
||||
# iData = {}
|
||||
# for key, value in instance.data.items():
|
||||
# if isinstance(value, clique.Collection):
|
||||
# value = value.format()
|
||||
#
|
||||
# try:
|
||||
# json.dumps(value)
|
||||
# iData[key] = value
|
||||
# except KeyError:
|
||||
# msg = "\"{0}\"".format(value)
|
||||
# msg += " in instance.data[\"{0}\"]".format(key)
|
||||
# msg += " could not be serialized."
|
||||
# self.log.debug(msg)
|
||||
#
|
||||
# instances_data.append(iData)
|
||||
#
|
||||
# data["instances"] = instances_data
|
||||
|
||||
with open(json_path, "w") as outfile:
|
||||
outfile.write(json.dumps(data, indent=4, sort_keys=True))
|
||||
|
||||
def serialize(self, data):
|
||||
"""
|
||||
Convert all nested content to serialized objects
|
||||
|
||||
Args:
|
||||
data (dict): nested data
|
||||
|
||||
Returns:
|
||||
dict: nested data
|
||||
"""
|
||||
|
||||
def encoding_obj(value):
|
||||
try:
|
||||
value = getattr(value, '__dict__', value)
|
||||
except Exception:
|
||||
pass
|
||||
return value
|
||||
|
||||
# self.log.info("1: {}".format(data))
|
||||
|
||||
if isinstance(data, Anatomy):
|
||||
return
|
||||
|
||||
if not isinstance(data, dict):
|
||||
# self.log.info("2: {}".format(data))
|
||||
return data
|
||||
|
||||
for key, value in data.items():
|
||||
if key in ["records", "instances", "results"]:
|
||||
# escape all record objects
|
||||
data[key] = None
|
||||
continue
|
||||
|
||||
if hasattr(value, '__module__'):
|
||||
# only deals with module objects
|
||||
if "plugins" in value.__module__:
|
||||
# only dealing with plugin objects
|
||||
data[key] = str(value.__module__)
|
||||
else:
|
||||
if ".lib." in value.__module__:
|
||||
# will allow only anatomy dict
|
||||
data[key] = self.serialize(value)
|
||||
else:
|
||||
data[key] = None
|
||||
continue
|
||||
continue
|
||||
|
||||
if isinstance(value, dict):
|
||||
# loops if dictionary
|
||||
data[key] = self.serialize(value)
|
||||
|
||||
if isinstance(value, Anatomy):
|
||||
continue
|
||||
|
||||
if isinstance(value, (list or tuple)):
|
||||
# loops if list or tuple
|
||||
for i, item in enumerate(value):
|
||||
value[i] = self.serialize(item)
|
||||
data[key] = value
|
||||
|
||||
data[key] = encoding_obj(value)
|
||||
|
||||
return data
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
from avalon import api, lib
|
||||
|
||||
from pype.api import Logger
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
||||
class AssetCreator(api.Action):
|
||||
|
||||
name = "asset_creator"
|
||||
label = "Asset Creator"
|
||||
icon = "plus-square"
|
||||
order = 250
|
||||
|
||||
def is_compatible(self, session):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
compatible = True
|
||||
|
||||
# Check required modules.
|
||||
module_names = [
|
||||
"ftrack_api", "ftrack_api_old", "pype.tools.assetcreator"
|
||||
]
|
||||
for name in module_names:
|
||||
try:
|
||||
__import__(name)
|
||||
except ImportError:
|
||||
compatible = False
|
||||
|
||||
# Check session environment.
|
||||
if "AVALON_PROJECT" not in session:
|
||||
compatible = False
|
||||
|
||||
return compatible
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
asset = ''
|
||||
if 'AVALON_ASSET' in session:
|
||||
asset = session['AVALON_ASSET']
|
||||
return lib.launch(
|
||||
executable="python",
|
||||
args=[
|
||||
"-u", "-m", "pype.tools.assetcreator",
|
||||
session['AVALON_PROJECT'], asset
|
||||
]
|
||||
)
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
import os
|
||||
from avalon import api, lib
|
||||
|
||||
|
||||
class FusionRenderNode(api.Action):
|
||||
|
||||
name = "fusionrendernode9"
|
||||
label = "F9 Render Node"
|
||||
icon = "object-group"
|
||||
order = 997
|
||||
|
||||
def is_compatible(self, session):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
if "AVALON_PROJECT" in session:
|
||||
return False
|
||||
return True
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
"""Implement the behavior for when the action is triggered
|
||||
|
||||
Args:
|
||||
session (dict): environment dictionary
|
||||
|
||||
Returns:
|
||||
Popen instance of newly spawned process
|
||||
|
||||
"""
|
||||
|
||||
# Update environment with session
|
||||
env = os.environ.copy()
|
||||
env.update(session)
|
||||
|
||||
# Get executable by name
|
||||
app = lib.get_application(self.name)
|
||||
env.update(app["environment"])
|
||||
executable = lib.which(app["executable"])
|
||||
|
||||
return lib.launch(executable=executable, args=[], environment=env)
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
import os
|
||||
|
||||
from avalon import api, lib
|
||||
|
||||
|
||||
class VrayRenderSlave(api.Action):
|
||||
|
||||
name = "vrayrenderslave"
|
||||
label = "V-Ray Slave"
|
||||
icon = "object-group"
|
||||
order = 996
|
||||
|
||||
def is_compatible(self, session):
|
||||
"""Return whether the action is compatible with the session"""
|
||||
if "AVALON_PROJECT" in session:
|
||||
return False
|
||||
return True
|
||||
|
||||
def process(self, session, **kwargs):
|
||||
"""Implement the behavior for when the action is triggered
|
||||
|
||||
Args:
|
||||
session (dict): environment dictionary
|
||||
|
||||
Returns:
|
||||
Popen instance of newly spawned process
|
||||
|
||||
"""
|
||||
|
||||
# Update environment with session
|
||||
env = os.environ.copy()
|
||||
env.update(session)
|
||||
|
||||
# Get executable by name
|
||||
app = lib.get_application(self.name)
|
||||
env.update(app["environment"])
|
||||
executable = lib.which(app["executable"])
|
||||
|
||||
# Run as server
|
||||
arguments = ["-server", "-portNumber=20207"]
|
||||
|
||||
return lib.launch(executable=executable,
|
||||
args=arguments,
|
||||
environment=env)
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectAudioVersion(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect Audio Version"
|
||||
order = pyblish.api.CollectorOrder
|
||||
families = ['audio']
|
||||
|
||||
def process(self, instance):
|
||||
self.log.info('Audio: {}'.format(instance.data['name']))
|
||||
instance.data['version'] = 1
|
||||
self.log.info('Audio version to: {}'.format(instance.data['version']))
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectFrameranges(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Collecting frameranges needed for ftrack integration
|
||||
|
||||
Args:
|
||||
context (obj): pyblish context session
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect Clip Frameranges"
|
||||
order = pyblish.api.CollectorOrder - 0.01
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
# getting metadata from jsonData key
|
||||
metadata = instance.data.get('jsonData').get('metadata')
|
||||
|
||||
# getting important metadata time calculation
|
||||
fps = float(metadata['ppro.timeline.fps'])
|
||||
sec_start = metadata['ppro.clip.start']
|
||||
sec_end = metadata['ppro.clip.end']
|
||||
fstart = instance.data.get('frameStart')
|
||||
fend = fstart + (sec_end * fps) - (sec_start * fps) - 1
|
||||
|
||||
self.log.debug("instance: {}, fps: {}\nsec_start: {}\nsec_end: {}\nfstart: {}\nfend: {}\n".format(
|
||||
instance.data['name'],
|
||||
fps, sec_start, sec_end, fstart, fend))
|
||||
|
||||
instance.data['frameStart'] = fstart
|
||||
instance.data['frameEnd'] = fend
|
||||
instance.data['handleStart'] = instance.context.data['handleStart']
|
||||
instance.data['handleEnd'] = instance.context.data['handleEnd']
|
||||
instance.data['fps'] = metadata['ppro.timeline.fps']
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
import pyblish.api
|
||||
from avalon import api
|
||||
|
||||
|
||||
class CollectHierarchyContext(pyblish.api.ContextPlugin):
|
||||
"""Collecting hierarchy context from `parents` and `hierarchy` data
|
||||
present in `clip` family instances coming from the request json data file
|
||||
|
||||
It will add `hierarchical_context` into each instance for integrate
|
||||
plugins to be able to create needed parents for the context if they
|
||||
don't exist yet
|
||||
"""
|
||||
|
||||
label = "Collect Hierarchy Context"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
def update_dict(self, ex_dict, new_dict):
|
||||
for key in ex_dict:
|
||||
if key in new_dict and isinstance(ex_dict[key], dict):
|
||||
new_dict[key] = self.update_dict(ex_dict[key], new_dict[key])
|
||||
else:
|
||||
new_dict[key] = ex_dict[key]
|
||||
return new_dict
|
||||
|
||||
def process(self, context):
|
||||
json_data = context.data.get("jsonData", None)
|
||||
temp_context = {}
|
||||
for instance in json_data['instances']:
|
||||
if instance['family'] in 'workfile':
|
||||
continue
|
||||
|
||||
in_info = {}
|
||||
name = instance['name']
|
||||
# suppose that all instances are Shots
|
||||
in_info['entity_type'] = 'Shot'
|
||||
|
||||
instance_pyblish = [
|
||||
i for i in context.data["instances"]
|
||||
if i.data['asset'] in name][0]
|
||||
in_info['custom_attributes'] = {
|
||||
'frameStart': instance_pyblish.data['frameStart'],
|
||||
'frameEnd': instance_pyblish.data['frameEnd'],
|
||||
'handleStart': instance_pyblish.data['handleStart'],
|
||||
'handleEnd': instance_pyblish.data['handleEnd'],
|
||||
'fps': instance_pyblish.data['fps']
|
||||
}
|
||||
|
||||
in_info['tasks'] = instance['tasks']
|
||||
|
||||
parents = instance.get('parents', [])
|
||||
|
||||
actual = {name: in_info}
|
||||
|
||||
for parent in reversed(parents):
|
||||
next_dict = {}
|
||||
parent_name = parent["entityName"]
|
||||
next_dict[parent_name] = {}
|
||||
next_dict[parent_name]["entity_type"] = parent["entityType"]
|
||||
next_dict[parent_name]["childs"] = actual
|
||||
actual = next_dict
|
||||
|
||||
temp_context = self.update_dict(temp_context, actual)
|
||||
self.log.debug(temp_context)
|
||||
|
||||
# TODO: 100% sure way of get project! Will be Name or Code?
|
||||
project_name = api.Session["AVALON_PROJECT"]
|
||||
final_context = {}
|
||||
final_context[project_name] = {}
|
||||
final_context[project_name]['entity_type'] = 'Project'
|
||||
final_context[project_name]['childs'] = temp_context
|
||||
|
||||
# adding hierarchy context to instance
|
||||
context.data["hierarchyContext"] = final_context
|
||||
self.log.debug("context.data[hierarchyContext] is: {}".format(
|
||||
context.data["hierarchyContext"]))
|
||||
|
|
@ -1,83 +0,0 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectClipRepresentations(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Collecting frameranges needed for ftrack integration
|
||||
|
||||
Args:
|
||||
context (obj): pyblish context session
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect Clip Representations"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
# add to representations
|
||||
if not instance.data.get("representations"):
|
||||
instance.data["representations"] = list()
|
||||
|
||||
ins_d = instance.data
|
||||
staging_dir = ins_d["stagingDir"]
|
||||
frame_start = ins_d["frameStart"]
|
||||
frame_end = ins_d["frameEnd"]
|
||||
handle_start = ins_d["handleStart"]
|
||||
handle_end = ins_d["handleEnd"]
|
||||
fps = ins_d["fps"]
|
||||
files_list = ins_d.get("files")
|
||||
|
||||
if not files_list:
|
||||
return
|
||||
|
||||
json_repr_ext = ins_d["jsonReprExt"]
|
||||
json_repr_subset = ins_d["jsonReprSubset"]
|
||||
|
||||
if files_list:
|
||||
file = next((f for f in files_list
|
||||
if json_repr_subset in f), None)
|
||||
else:
|
||||
return
|
||||
|
||||
if json_repr_ext in ["mov", "mp4"]:
|
||||
representation = {
|
||||
"files": file,
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": frame_end,
|
||||
"frameStartFtrack": frame_start - handle_start,
|
||||
"frameEndFtrack": frame_end - handle_end,
|
||||
"step": 1,
|
||||
"fps": fps,
|
||||
"name": json_repr_subset,
|
||||
"ext": json_repr_ext,
|
||||
"tags": ["review", "passing", "ftrackreview"]
|
||||
}
|
||||
else:
|
||||
representation = {
|
||||
"files": file,
|
||||
"stagingDir": staging_dir,
|
||||
"step": 1,
|
||||
"fps": fps,
|
||||
"name": json_repr_subset,
|
||||
"ext": json_repr_ext,
|
||||
"tags": ["review"]
|
||||
}
|
||||
self.log.debug("representation: {}".format(representation))
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
thumb = next((f for f in files_list
|
||||
if "thumbnail" in f), None)
|
||||
if thumb:
|
||||
thumb_representation = {
|
||||
'files': thumb,
|
||||
'stagingDir': staging_dir,
|
||||
'name': "thumbnail",
|
||||
'thumbnail': True,
|
||||
'ext': os.path.splitext(thumb)[-1].replace(".", "")
|
||||
}
|
||||
self.log.debug("representation: {}".format(thumb_representation))
|
||||
instance.data["representations"].append(
|
||||
thumb_representation)
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectResolution(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Collecting frameranges needed for ftrack integration
|
||||
|
||||
Args:
|
||||
context (obj): pyblish context session
|
||||
|
||||
"""
|
||||
|
||||
label = "Collect Clip Resolution"
|
||||
order = pyblish.api.CollectorOrder
|
||||
families = ['clip']
|
||||
|
||||
def process(self, instance):
|
||||
# getting metadata from jsonData key
|
||||
metadata = instance.data.get('jsonData').get('metadata')
|
||||
|
||||
# getting important metadata time calculation
|
||||
pixel_aspect = float(metadata['ppro.format.pixelaspect'])
|
||||
res_width = metadata['ppro.format.width']
|
||||
res_height = metadata['ppro.format.height']
|
||||
|
||||
instance.data['pixelAspect'] = pixel_aspect
|
||||
instance.data['resolutionWidth'] = res_width
|
||||
instance.data['resolutionHeight'] = res_height
|
||||
|
||||
self.log.info(f"Resolution was set to: `{res_width}x{res_height}`,"
|
||||
f" and pixel aspect ration to: `{pixel_aspect}`")
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
|
||||
|
||||
class IntegrateCleanThumbs(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Cleaning up thumbnail files after they have been integrated
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 9
|
||||
label = 'Clean thumbnail files'
|
||||
families = ["clip"]
|
||||
optional = True
|
||||
active = True
|
||||
|
||||
def process(self, instance):
|
||||
remove_file = [tt for t in instance.data['transfers']
|
||||
for tt in t if 'jpg' in tt if 'temp' not in tt.lower()]
|
||||
if len(remove_file) is 1:
|
||||
os.remove(remove_file[0])
|
||||
self.log.info('Thumbnail image was erased')
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class IntegrateWorkfileVersion(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Will desynchronize versioning from actual version of work file
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder - 0.15
|
||||
label = 'Do not synchronize workfile version'
|
||||
families = ["clip"]
|
||||
optional = True
|
||||
active = False
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data['version']:
|
||||
del(instance.data['version'])
|
||||
self.log.info('Instance version was removed')
|
||||
Loading…
Add table
Add a link
Reference in a new issue