mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merge branch 'develop' into feature/PYPE-95-nks-load-subset-to-timeline
This commit is contained in:
commit
b4e1d5cdd8
51 changed files with 526 additions and 1162 deletions
|
|
@ -9,7 +9,7 @@ from pypeapp import config
|
|||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__version__ = "2.5.0"
|
||||
__version__ = "2.6.0"
|
||||
|
||||
PROJECT_PLUGINS_PATH = os.environ.get("PYPE_PROJECT_PLUGINS")
|
||||
PACKAGE_DIR = os.path.dirname(__file__)
|
||||
|
|
|
|||
|
|
@ -167,8 +167,11 @@ class DeleteOldVersions(BaseAction):
|
|||
asset_versions_by_parent_id = collections.defaultdict(list)
|
||||
subset_names_by_asset_name = collections.defaultdict(list)
|
||||
|
||||
ftrack_assets_by_name = {}
|
||||
for entity in entities:
|
||||
parent_ent = entity["asset"]["parent"]
|
||||
ftrack_asset = entity["asset"]
|
||||
|
||||
parent_ent = ftrack_asset["parent"]
|
||||
parent_ftrack_id = parent_ent["id"]
|
||||
parent_name = parent_ent["name"]
|
||||
|
||||
|
|
@ -183,9 +186,12 @@ class DeleteOldVersions(BaseAction):
|
|||
project = parent_ent["project"]
|
||||
|
||||
# Collect subset names per asset
|
||||
subset_name = entity["asset"]["name"]
|
||||
subset_name = ftrack_asset["name"]
|
||||
subset_names_by_asset_name[parent_name].append(subset_name)
|
||||
|
||||
if subset_name not in ftrack_assets_by_name:
|
||||
ftrack_assets_by_name[subset_name] = ftrack_asset
|
||||
|
||||
# Set Mongo collection
|
||||
project_name = project["full_name"]
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
|
|
@ -236,7 +242,6 @@ class DeleteOldVersions(BaseAction):
|
|||
def sort_func(ent):
|
||||
return int(ent["name"])
|
||||
|
||||
last_versions_by_parent = collections.defaultdict(list)
|
||||
all_last_versions = []
|
||||
for parent_id, _versions in versions_by_parent.items():
|
||||
for idx, version in enumerate(
|
||||
|
|
@ -244,7 +249,6 @@ class DeleteOldVersions(BaseAction):
|
|||
):
|
||||
if idx >= versions_count:
|
||||
break
|
||||
last_versions_by_parent[parent_id].append(version)
|
||||
all_last_versions.append(version)
|
||||
|
||||
self.log.debug("Collected versions ({})".format(len(versions)))
|
||||
|
|
@ -253,6 +257,11 @@ class DeleteOldVersions(BaseAction):
|
|||
for version in all_last_versions:
|
||||
versions.remove(version)
|
||||
|
||||
# Update versions_by_parent without filtered versions
|
||||
versions_by_parent = collections.defaultdict(list)
|
||||
for ent in versions:
|
||||
versions_by_parent[ent["parent"]].append(ent)
|
||||
|
||||
# Filter already deleted versions
|
||||
versions_to_pop = []
|
||||
for version in versions:
|
||||
|
|
@ -361,6 +370,47 @@ class DeleteOldVersions(BaseAction):
|
|||
|
||||
self.dbcon.uninstall()
|
||||
|
||||
# Set attribute `is_published` to `False` on ftrack AssetVersions
|
||||
for subset_id, _versions in versions_by_parent.items():
|
||||
subset_name = None
|
||||
for subset in subsets:
|
||||
if subset["_id"] == subset_id:
|
||||
subset_name = subset["name"]
|
||||
break
|
||||
|
||||
if subset_name is None:
|
||||
self.log.warning(
|
||||
"Subset with ID `{}` was not found.".format(str(subset_id))
|
||||
)
|
||||
continue
|
||||
|
||||
ftrack_asset = ftrack_assets_by_name.get(subset_name)
|
||||
if not ftrack_asset:
|
||||
self.log.warning((
|
||||
"Could not find Ftrack asset with name `{}`"
|
||||
).format(subset_name))
|
||||
continue
|
||||
|
||||
version_numbers = [int(ver["name"]) for ver in _versions]
|
||||
for version in ftrack_asset["versions"]:
|
||||
if int(version["version"]) in version_numbers:
|
||||
version["is_published"] = False
|
||||
|
||||
try:
|
||||
session.commit()
|
||||
|
||||
except Exception:
|
||||
msg = (
|
||||
"Could not set `is_published` attribute to `False`"
|
||||
" for selected AssetVersions."
|
||||
)
|
||||
self.log.warning(msg, exc_info=True)
|
||||
|
||||
return {
|
||||
"success": False,
|
||||
"message": msg
|
||||
}
|
||||
|
||||
return True
|
||||
|
||||
def delete_whole_dir_paths(self, dir_paths):
|
||||
|
|
|
|||
|
|
@ -26,11 +26,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
|
||||
dbcon = DbConnector()
|
||||
|
||||
ignore_entTypes = [
|
||||
"socialfeed", "socialnotification", "note",
|
||||
"assetversion", "job", "user", "reviewsessionobject", "timer",
|
||||
"timelog", "auth_userrole", "appointment", "notelabellink"
|
||||
]
|
||||
interest_entTypes = ["show", "task"]
|
||||
ignore_ent_types = ["Milestone"]
|
||||
ignore_keys = ["statusid", "thumbid"]
|
||||
|
||||
|
|
@ -137,9 +133,10 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
if self._avalon_ents_by_id is None:
|
||||
self._avalon_ents_by_id = {}
|
||||
proj, ents = self.avalon_entities
|
||||
self._avalon_ents_by_id[proj["_id"]] = proj
|
||||
for ent in ents:
|
||||
self._avalon_ents_by_id[ent["_id"]] = ent
|
||||
if proj:
|
||||
self._avalon_ents_by_id[proj["_id"]] = proj
|
||||
for ent in ents:
|
||||
self._avalon_ents_by_id[ent["_id"]] = ent
|
||||
return self._avalon_ents_by_id
|
||||
|
||||
@property
|
||||
|
|
@ -159,13 +156,14 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
if self._avalon_ents_by_ftrack_id is None:
|
||||
self._avalon_ents_by_ftrack_id = {}
|
||||
proj, ents = self.avalon_entities
|
||||
ftrack_id = proj["data"]["ftrackId"]
|
||||
self._avalon_ents_by_ftrack_id[ftrack_id] = proj
|
||||
for ent in ents:
|
||||
ftrack_id = ent["data"].get("ftrackId")
|
||||
if ftrack_id is None:
|
||||
continue
|
||||
self._avalon_ents_by_ftrack_id[ftrack_id] = ent
|
||||
if proj:
|
||||
ftrack_id = proj["data"]["ftrackId"]
|
||||
self._avalon_ents_by_ftrack_id[ftrack_id] = proj
|
||||
for ent in ents:
|
||||
ftrack_id = ent["data"].get("ftrackId")
|
||||
if ftrack_id is None:
|
||||
continue
|
||||
self._avalon_ents_by_ftrack_id[ftrack_id] = ent
|
||||
return self._avalon_ents_by_ftrack_id
|
||||
|
||||
@property
|
||||
|
|
@ -508,7 +506,7 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
found_actions = set()
|
||||
for ent_info in entities_info:
|
||||
entityType = ent_info["entityType"]
|
||||
if entityType in self.ignore_entTypes:
|
||||
if entityType not in self.interest_entTypes:
|
||||
continue
|
||||
|
||||
entity_type = ent_info.get("entity_type")
|
||||
|
|
|
|||
|
|
@ -369,13 +369,6 @@ def main(args):
|
|||
# store socket connection object
|
||||
ObjectFactory.sock = sock
|
||||
|
||||
statuse_names = {
|
||||
"main": "Main process",
|
||||
"storer": "Event Storer",
|
||||
"processor": "Event Processor"
|
||||
}
|
||||
|
||||
ObjectFactory.status_factory = StatusFactory(statuse_names)
|
||||
ObjectFactory.status_factory["main"].update(server_info)
|
||||
_returncode = 0
|
||||
try:
|
||||
|
|
@ -429,6 +422,13 @@ if __name__ == "__main__":
|
|||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
statuse_names = {
|
||||
"main": "Main process",
|
||||
"storer": "Event Storer",
|
||||
"processor": "Event Processor"
|
||||
}
|
||||
ObjectFactory.status_factory = StatusFactory(statuse_names)
|
||||
|
||||
checker_thread = OutputChecker()
|
||||
ObjectFactory.checker_thread = checker_thread
|
||||
checker_thread.start()
|
||||
|
|
|
|||
|
|
@ -93,11 +93,11 @@ def install():
|
|||
|
||||
# Set context settings.
|
||||
nuke.addOnCreate(workfile_settings.set_context_settings, nodeClass="Root")
|
||||
nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root")
|
||||
|
||||
menu.install()
|
||||
|
||||
|
||||
|
||||
def launch_workfiles_app():
|
||||
'''Function letting start workfiles after start of host
|
||||
'''
|
||||
|
|
|
|||
|
|
@ -15,13 +15,12 @@ import nuke
|
|||
from .presets import (
|
||||
get_colorspace_preset,
|
||||
get_node_dataflow_preset,
|
||||
get_node_colorspace_preset
|
||||
)
|
||||
|
||||
from .presets import (
|
||||
get_node_colorspace_preset,
|
||||
get_anatomy
|
||||
)
|
||||
|
||||
from .utils import set_context_favorites
|
||||
|
||||
from pypeapp import Logger
|
||||
log = Logger().get_logger(__name__, "nuke")
|
||||
|
||||
|
|
@ -432,7 +431,7 @@ def add_deadline_tab(node):
|
|||
node.addKnob(nuke.Tab_Knob("Deadline"))
|
||||
|
||||
knob = nuke.Int_Knob("deadlineChunkSize", "Chunk Size")
|
||||
knob.setValue(1)
|
||||
knob.setValue(0)
|
||||
node.addKnob(knob)
|
||||
|
||||
knob = nuke.Int_Knob("deadlinePriority", "Priority")
|
||||
|
|
@ -944,6 +943,26 @@ class WorkfileSettings(object):
|
|||
# add colorspace menu item
|
||||
self.set_colorspace()
|
||||
|
||||
def set_favorites(self):
|
||||
projects_root = os.getenv("AVALON_PROJECTS")
|
||||
work_dir = os.getenv("AVALON_WORKDIR")
|
||||
asset = os.getenv("AVALON_ASSET")
|
||||
project = os.getenv("AVALON_PROJECT")
|
||||
hierarchy = os.getenv("AVALON_HIERARCHY")
|
||||
favorite_items = OrderedDict()
|
||||
|
||||
# project
|
||||
favorite_items.update({"Project dir": os.path.join(
|
||||
projects_root, project).replace("\\", "/")})
|
||||
# shot
|
||||
favorite_items.update({"Shot dir": os.path.join(
|
||||
projects_root, project,
|
||||
hierarchy, asset).replace("\\", "/")})
|
||||
# workdir
|
||||
favorite_items.update({"Work dir": work_dir})
|
||||
|
||||
set_context_favorites(favorite_items)
|
||||
|
||||
|
||||
def get_hierarchical_attr(entity, attr, default=None):
|
||||
attr_parts = attr.split('.')
|
||||
|
|
|
|||
|
|
@ -3,6 +3,23 @@ import nuke
|
|||
from avalon.nuke import lib as anlib
|
||||
|
||||
|
||||
def set_context_favorites(favorites={}):
|
||||
""" Addig favorite folders to nuke's browser
|
||||
|
||||
Argumets:
|
||||
favorites (dict): couples of {name:path}
|
||||
"""
|
||||
dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
||||
icon_path = os.path.join(dir, 'res', 'icons', 'folder-favorite3.png')
|
||||
|
||||
for name, path in favorites.items():
|
||||
nuke.addFavoriteDir(
|
||||
name,
|
||||
path,
|
||||
nuke.IMAGE | nuke.SCRIPT | nuke.GEO,
|
||||
icon=icon_path)
|
||||
|
||||
|
||||
def get_node_outputs(node):
|
||||
'''
|
||||
Return a dictionary of the nodes and pipes that are connected to node
|
||||
|
|
|
|||
|
|
@ -1,60 +0,0 @@
|
|||
import os
|
||||
import subprocess
|
||||
|
||||
import pyblish.api
|
||||
|
||||
CREATE_NO_WINDOW = 0x08000000
|
||||
|
||||
|
||||
def deadline_command(cmd):
|
||||
# Find Deadline
|
||||
path = os.environ.get("DEADLINE_PATH", None)
|
||||
assert path is not None, "Variable 'DEADLINE_PATH' must be set"
|
||||
|
||||
executable = os.path.join(path, "deadlinecommand")
|
||||
if os.name == "nt":
|
||||
executable += ".exe"
|
||||
assert os.path.exists(
|
||||
executable), "Deadline executable not found at %s" % executable
|
||||
assert cmd, "Must have a command"
|
||||
|
||||
query = (executable, cmd)
|
||||
|
||||
process = subprocess.Popen(query, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
creationflags=CREATE_NO_WINDOW)
|
||||
out, err = process.communicate()
|
||||
|
||||
return out
|
||||
|
||||
|
||||
class CollectDeadlineUser(pyblish.api.ContextPlugin):
|
||||
"""Retrieve the local active Deadline user"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = "Deadline User"
|
||||
|
||||
hosts = ['maya', 'fusion', 'nuke']
|
||||
families = [
|
||||
"renderlayer",
|
||||
"saver.deadline",
|
||||
"imagesequence"
|
||||
]
|
||||
|
||||
|
||||
def process(self, context):
|
||||
"""Inject the current working file"""
|
||||
user = None
|
||||
try:
|
||||
user = deadline_command("GetCurrentUserName").strip()
|
||||
except:
|
||||
self.log.warning("Deadline command seems not to be working")
|
||||
|
||||
if not user:
|
||||
self.log.warning("No Deadline user found. "
|
||||
"Do you have Deadline installed?")
|
||||
return
|
||||
|
||||
self.log.info("Found Deadline user: {}".format(user))
|
||||
context.data['deadlineUser'] = user
|
||||
|
|
@ -1,127 +0,0 @@
|
|||
import os
|
||||
import json
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
import clique
|
||||
|
||||
|
||||
class CollectJSON(pyblish.api.ContextPlugin):
|
||||
""" Collecting the json files in current directory. """
|
||||
|
||||
label = "JSON"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ['maya']
|
||||
|
||||
def version_get(self, string, prefix):
|
||||
""" Extract version information from filenames. Code from Foundry"s
|
||||
nukescripts.version_get()
|
||||
"""
|
||||
|
||||
regex = r"[/_.]{}\d+".format(prefix)
|
||||
matches = re.findall(regex, string, re.IGNORECASE)
|
||||
|
||||
if not len(matches):
|
||||
msg = "No '_{}#' found in '{}'".format(prefix, string)
|
||||
raise ValueError(msg)
|
||||
return matches[-1:][0][1], re.search(r"\d+", matches[-1:][0]).group()
|
||||
|
||||
def process(self, context):
|
||||
current_file = context.data.get("currentFile", '')
|
||||
# Skip if current file is not a directory
|
||||
if not os.path.isdir(current_file):
|
||||
return
|
||||
|
||||
# Traverse directory and collect collections from json files.
|
||||
instances = []
|
||||
for root, dirs, files in os.walk(current_file):
|
||||
for f in files:
|
||||
if f.endswith(".json"):
|
||||
with open(os.path.join(root, f)) as json_data:
|
||||
for data in json.load(json_data):
|
||||
instances.append(data)
|
||||
|
||||
# Validate instance based on supported families.
|
||||
valid_families = ["img", "cache", "scene", "mov"]
|
||||
valid_data = []
|
||||
for data in instances:
|
||||
families = data.get("families", []) + [data["family"]]
|
||||
family_type = list(set(families) & set(valid_families))
|
||||
if family_type:
|
||||
valid_data.append(data)
|
||||
|
||||
# Create existing output instance.
|
||||
scanned_dirs = []
|
||||
files = []
|
||||
collections = []
|
||||
for data in valid_data:
|
||||
if "collection" not in data.keys():
|
||||
continue
|
||||
if data["collection"] is None:
|
||||
continue
|
||||
|
||||
instance_collection = clique.parse(data["collection"])
|
||||
|
||||
try:
|
||||
version = self.version_get(
|
||||
os.path.basename(instance_collection.format()), "v"
|
||||
)[1]
|
||||
except KeyError:
|
||||
# Ignore any output that is not versioned
|
||||
continue
|
||||
|
||||
# Getting collections of all previous versions and current version
|
||||
for count in range(1, int(version) + 1):
|
||||
|
||||
# Generate collection
|
||||
version_string = "v" + str(count).zfill(len(version))
|
||||
head = instance_collection.head.replace(
|
||||
"v" + version, version_string
|
||||
)
|
||||
collection = clique.Collection(
|
||||
head=head.replace("\\", "/"),
|
||||
padding=instance_collection.padding,
|
||||
tail=instance_collection.tail
|
||||
)
|
||||
collection.version = count
|
||||
|
||||
# Scan collection directory
|
||||
scan_dir = os.path.dirname(collection.head)
|
||||
if scan_dir not in scanned_dirs and os.path.exists(scan_dir):
|
||||
for f in os.listdir(scan_dir):
|
||||
file_path = os.path.join(scan_dir, f)
|
||||
files.append(file_path.replace("\\", "/"))
|
||||
scanned_dirs.append(scan_dir)
|
||||
|
||||
# Match files to collection and add
|
||||
for f in files:
|
||||
if collection.match(f):
|
||||
collection.add(f)
|
||||
|
||||
# Skip if no files were found in the collection
|
||||
if not list(collection):
|
||||
continue
|
||||
|
||||
# Skip existing collections
|
||||
if collection in collections:
|
||||
continue
|
||||
|
||||
instance = context.create_instance(name=data["name"])
|
||||
version = self.version_get(
|
||||
os.path.basename(collection.format()), "v"
|
||||
)[1]
|
||||
|
||||
basename = os.path.basename(collection.format())
|
||||
instance.data["label"] = "{0} - {1}".format(
|
||||
data["name"], basename
|
||||
)
|
||||
|
||||
families = data["families"] + [data["family"]]
|
||||
family = list(set(valid_families) & set(families))[0]
|
||||
instance.data["family"] = family
|
||||
instance.data["families"] = ["output"]
|
||||
instance.data["collection"] = collection
|
||||
instance.data["version"] = int(version)
|
||||
instance.data["publish"] = False
|
||||
|
||||
collections.append(collection)
|
||||
|
|
@ -1,88 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
import copy
|
||||
from avalon import io
|
||||
from pprint import pprint
|
||||
|
||||
import pyblish.api
|
||||
from avalon import api
|
||||
|
||||
|
||||
texture_extensions = ['.tif', '.tiff', '.jpg', '.jpeg', '.tx', '.png', '.tga',
|
||||
'.psd', '.dpx', '.hdr', '.hdri', '.exr', '.sxr', '.psb']
|
||||
|
||||
|
||||
class CollectTextures(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Gather all texture files in working directory, traversing whole structure.
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
targets = ["texture"]
|
||||
label = "Textures"
|
||||
hosts = ["shell"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
if os.environ.get("PYPE_PUBLISH_PATHS"):
|
||||
paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep)
|
||||
else:
|
||||
cwd = context.get("workspaceDir", os.getcwd())
|
||||
paths = [cwd]
|
||||
|
||||
textures = []
|
||||
for path in paths:
|
||||
for dir, subdir, files in os.walk(path):
|
||||
textures.extend(
|
||||
os.path.join(dir, x) for x in files
|
||||
if os.path.splitext(x)[1].lower() in texture_extensions)
|
||||
|
||||
self.log.info("Got {} texture files.".format(len(textures)))
|
||||
if len(textures) < 1:
|
||||
raise RuntimeError("no textures found.")
|
||||
|
||||
asset_name = os.environ.get("AVALON_ASSET")
|
||||
family = 'texture'
|
||||
subset = 'Main'
|
||||
|
||||
project = io.find_one({'type': 'project'})
|
||||
asset = io.find_one({
|
||||
'type': 'asset',
|
||||
'name': asset_name
|
||||
})
|
||||
|
||||
context.data['project'] = project
|
||||
context.data['asset'] = asset
|
||||
|
||||
for tex in textures:
|
||||
self.log.info("Processing: {}".format(tex))
|
||||
name, ext = os.path.splitext(tex)
|
||||
simple_name = os.path.splitext(os.path.basename(tex))[0]
|
||||
instance = context.create_instance(simple_name)
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"asset": asset_name,
|
||||
"label": simple_name,
|
||||
"name": simple_name,
|
||||
"family": family,
|
||||
"families": [family, 'ftrack'],
|
||||
})
|
||||
instance.data['destination_list'] = list()
|
||||
instance.data['representations'] = list()
|
||||
instance.data['source'] = 'pype command'
|
||||
|
||||
texture_data = {}
|
||||
texture_data['anatomy_template'] = 'texture'
|
||||
texture_data["ext"] = ext
|
||||
texture_data["label"] = simple_name
|
||||
texture_data["name"] = "texture"
|
||||
texture_data["stagingDir"] = os.path.dirname(tex)
|
||||
texture_data["files"] = os.path.basename(tex)
|
||||
texture_data["thumbnail"] = False
|
||||
texture_data["preview"] = False
|
||||
|
||||
instance.data["representations"].append(texture_data)
|
||||
self.log.info("collected instance: {}".format(instance.data))
|
||||
|
||||
self.log.info("All collected.")
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
import os
|
||||
import json
|
||||
import datetime
|
||||
import time
|
||||
|
||||
import pyblish.api
|
||||
import clique
|
||||
|
||||
|
||||
class ExtractJSON(pyblish.api.ContextPlugin):
|
||||
""" Extract all instances to a serialized json file. """
|
||||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = "JSON"
|
||||
hosts = ['maya']
|
||||
|
||||
def process(self, context):
|
||||
|
||||
workspace = os.path.join(
|
||||
os.path.dirname(context.data["currentFile"]), "workspace",
|
||||
"instances")
|
||||
|
||||
if not os.path.exists(workspace):
|
||||
os.makedirs(workspace)
|
||||
|
||||
output_data = []
|
||||
for instance in context:
|
||||
self.log.debug(instance['data'])
|
||||
|
||||
data = {}
|
||||
for key, value in instance.data.iteritems():
|
||||
if isinstance(value, clique.Collection):
|
||||
value = value.format()
|
||||
|
||||
try:
|
||||
json.dumps(value)
|
||||
data[key] = value
|
||||
except KeyError:
|
||||
msg = "\"{0}\"".format(value)
|
||||
msg += " in instance.data[\"{0}\"]".format(key)
|
||||
msg += " could not be serialized."
|
||||
self.log.debug(msg)
|
||||
|
||||
output_data.append(data)
|
||||
|
||||
timestamp = datetime.datetime.fromtimestamp(
|
||||
time.time()).strftime("%Y%m%d-%H%M%S")
|
||||
filename = timestamp + "_instances.json"
|
||||
|
||||
with open(os.path.join(workspace, filename), "w") as outfile:
|
||||
outfile.write(json.dumps(output_data, indent=4, sort_keys=True))
|
||||
|
|
@ -1,86 +0,0 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import subprocess
|
||||
import clique
|
||||
|
||||
|
||||
class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
|
||||
"""Resolve any dependency issies
|
||||
|
||||
This plug-in resolves any paths which, if not updated might break
|
||||
the published file.
|
||||
|
||||
The order of families is important, when working with lookdev you want to
|
||||
first publish the texture, update the texture paths in the nodes and then
|
||||
publish the shading network. Same goes for file dependent assets.
|
||||
"""
|
||||
|
||||
label = "Extract Quicktime"
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["imagesequence", "render", "write", "source"]
|
||||
hosts = ["shell"]
|
||||
|
||||
def process(self, instance):
|
||||
# fps = instance.data.get("fps")
|
||||
# start = instance.data.get("startFrame")
|
||||
# stagingdir = os.path.normpath(instance.data.get("stagingDir"))
|
||||
#
|
||||
# collected_frames = os.listdir(stagingdir)
|
||||
# collections, remainder = clique.assemble(collected_frames)
|
||||
#
|
||||
# full_input_path = os.path.join(
|
||||
# stagingdir, collections[0].format('{head}{padding}{tail}')
|
||||
# )
|
||||
# self.log.info("input {}".format(full_input_path))
|
||||
#
|
||||
# filename = collections[0].format('{head}')
|
||||
# if not filename.endswith('.'):
|
||||
# filename += "."
|
||||
# movFile = filename + "mov"
|
||||
# full_output_path = os.path.join(stagingdir, movFile)
|
||||
#
|
||||
# self.log.info("output {}".format(full_output_path))
|
||||
#
|
||||
# config_data = instance.context.data['output_repre_config']
|
||||
#
|
||||
# proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
# profile = config_data.get(proj_name, config_data['__default__'])
|
||||
#
|
||||
# input_args = []
|
||||
# # overrides output file
|
||||
# input_args.append("-y")
|
||||
# # preset's input data
|
||||
# input_args.extend(profile.get('input', []))
|
||||
# # necessary input data
|
||||
# input_args.append("-start_number {}".format(start))
|
||||
# input_args.append("-i {}".format(full_input_path))
|
||||
# input_args.append("-framerate {}".format(fps))
|
||||
#
|
||||
# output_args = []
|
||||
# # preset's output data
|
||||
# output_args.extend(profile.get('output', []))
|
||||
# # output filename
|
||||
# output_args.append(full_output_path)
|
||||
# mov_args = [
|
||||
# "ffmpeg",
|
||||
# " ".join(input_args),
|
||||
# " ".join(output_args)
|
||||
# ]
|
||||
# subprocess_mov = " ".join(mov_args)
|
||||
# sub_proc = subprocess.Popen(subprocess_mov)
|
||||
# sub_proc.wait()
|
||||
#
|
||||
# if not os.path.isfile(full_output_path):
|
||||
# raise("Quicktime wasn't created succesfully")
|
||||
#
|
||||
# if "representations" not in instance.data:
|
||||
# instance.data["representations"] = []
|
||||
#
|
||||
# representation = {
|
||||
# 'name': 'mov',
|
||||
# 'ext': 'mov',
|
||||
# 'files': movFile,
|
||||
# "stagingDir": stagingdir,
|
||||
# "preview": True
|
||||
# }
|
||||
# instance.data["representations"].append(representation)
|
||||
|
|
@ -1,153 +0,0 @@
|
|||
import os
|
||||
import subprocess
|
||||
|
||||
import pyblish.api
|
||||
import filelink
|
||||
|
||||
|
||||
class ExtractTranscode(pyblish.api.InstancePlugin):
|
||||
"""Extracts review movie from image sequence.
|
||||
|
||||
Offset to get images to transcode from.
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.1
|
||||
label = "Transcode"
|
||||
optional = True
|
||||
families = ["review"]
|
||||
|
||||
def find_previous_index(self, index, indexes):
|
||||
"""Finds the closest previous value in a list from a value."""
|
||||
|
||||
data = []
|
||||
for i in indexes:
|
||||
if i >= index:
|
||||
continue
|
||||
data.append(index - i)
|
||||
|
||||
return indexes[data.index(min(data))]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
if "collection" in instance.data.keys():
|
||||
self.process_image(instance)
|
||||
|
||||
if "output_path" in instance.data.keys():
|
||||
self.process_movie(instance)
|
||||
|
||||
def process_image(self, instance):
|
||||
|
||||
collection = instance.data.get("collection", [])
|
||||
|
||||
if not list(collection):
|
||||
msg = "Skipping \"{0}\" because no frames was found."
|
||||
self.log.warning(msg.format(instance.data["name"]))
|
||||
return
|
||||
|
||||
# Temporary fill the missing frames.
|
||||
missing = collection.holes()
|
||||
if not collection.is_contiguous():
|
||||
pattern = collection.format("{head}{padding}{tail}")
|
||||
for index in missing.indexes:
|
||||
dst = pattern % index
|
||||
src_index = self.find_previous_index(
|
||||
index, list(collection.indexes)
|
||||
)
|
||||
src = pattern % src_index
|
||||
|
||||
filelink.create(src, dst)
|
||||
|
||||
# Generate args.
|
||||
# Has to be yuv420p for compatibility with older players and smooth
|
||||
# playback. This does come with a sacrifice of more visible banding
|
||||
# issues.
|
||||
# -crf 18 is visually lossless.
|
||||
args = [
|
||||
"ffmpeg", "-y",
|
||||
"-start_number", str(min(collection.indexes)),
|
||||
"-framerate", str(instance.context.data["framerate"]),
|
||||
"-i", collection.format("{head}{padding}{tail}"),
|
||||
"-pix_fmt", "yuv420p",
|
||||
"-crf", "18",
|
||||
"-timecode", "00:00:00:01",
|
||||
"-vframes",
|
||||
str(max(collection.indexes) - min(collection.indexes) + 1),
|
||||
"-vf",
|
||||
"scale=trunc(iw/2)*2:trunc(ih/2)*2",
|
||||
]
|
||||
|
||||
if instance.data.get("baked_colorspace_movie"):
|
||||
args = [
|
||||
"ffmpeg", "-y",
|
||||
"-i", instance.data["baked_colorspace_movie"],
|
||||
"-pix_fmt", "yuv420p",
|
||||
"-crf", "18",
|
||||
"-timecode", "00:00:00:01",
|
||||
]
|
||||
|
||||
args.append(collection.format("{head}.mov"))
|
||||
|
||||
self.log.debug("Executing args: {0}".format(args))
|
||||
|
||||
# Can't use subprocess.check_output, cause Houdini doesn't like that.
|
||||
p = subprocess.Popen(
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdin=subprocess.PIPE,
|
||||
cwd=os.path.dirname(args[-1])
|
||||
)
|
||||
|
||||
output = p.communicate()[0]
|
||||
|
||||
# Remove temporary frame fillers
|
||||
for f in missing:
|
||||
os.remove(f)
|
||||
|
||||
if p.returncode != 0:
|
||||
raise ValueError(output)
|
||||
|
||||
self.log.debug(output)
|
||||
|
||||
def process_movie(self, instance):
|
||||
# Generate args.
|
||||
# Has to be yuv420p for compatibility with older players and smooth
|
||||
# playback. This does come with a sacrifice of more visible banding
|
||||
# issues.
|
||||
args = [
|
||||
"ffmpeg", "-y",
|
||||
"-i", instance.data["output_path"],
|
||||
"-pix_fmt", "yuv420p",
|
||||
"-crf", "18",
|
||||
"-timecode", "00:00:00:01",
|
||||
]
|
||||
|
||||
if instance.data.get("baked_colorspace_movie"):
|
||||
args = [
|
||||
"ffmpeg", "-y",
|
||||
"-i", instance.data["baked_colorspace_movie"],
|
||||
"-pix_fmt", "yuv420p",
|
||||
"-crf", "18",
|
||||
"-timecode", "00:00:00:01",
|
||||
]
|
||||
|
||||
split = os.path.splitext(instance.data["output_path"])
|
||||
args.append(split[0] + "_review.mov")
|
||||
|
||||
self.log.debug("Executing args: {0}".format(args))
|
||||
|
||||
# Can't use subprocess.check_output, cause Houdini doesn't like that.
|
||||
p = subprocess.Popen(
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdin=subprocess.PIPE,
|
||||
cwd=os.path.dirname(args[-1])
|
||||
)
|
||||
|
||||
output = p.communicate()[0]
|
||||
|
||||
if p.returncode != 0:
|
||||
raise ValueError(output)
|
||||
|
||||
self.log.debug(output)
|
||||
|
|
@ -30,7 +30,7 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
|
|||
assert project_entity, (
|
||||
"Project '{0}' was not found."
|
||||
).format(project_name)
|
||||
self.log.debug("Collected Project entity \"{}\"".format(project_entity))
|
||||
self.log.debug("Collected Project \"{}\"".format(project_entity))
|
||||
|
||||
asset_entity = io.find_one({
|
||||
"type": "asset",
|
||||
|
|
@ -41,7 +41,12 @@ class CollectAvalonEntities(pyblish.api.ContextPlugin):
|
|||
"No asset found by the name '{0}' in project '{1}'"
|
||||
).format(asset_name, project_name)
|
||||
|
||||
self.log.debug("Collected Asset entity \"{}\"".format(asset_entity))
|
||||
self.log.debug("Collected Asset \"{}\"".format(asset_entity))
|
||||
|
||||
context.data["projectEntity"] = project_entity
|
||||
context.data["assetEntity"] = asset_entity
|
||||
|
||||
data = asset_entity['data']
|
||||
context.data['handles'] = int(data.get("handles", 0))
|
||||
context.data["handleStart"] = int(data.get("handleStart", 0))
|
||||
context.data["handleEnd"] = int(data.get("handleEnd", 0))
|
||||
|
|
|
|||
|
|
@ -108,9 +108,15 @@ class CollectInstanceAnatomyData(pyblish.api.InstancePlugin):
|
|||
if resolution_height:
|
||||
anatomy_data["resolution_height"] = resolution_height
|
||||
|
||||
pixel_aspect = instance.data.get("pixelAspect")
|
||||
if pixel_aspect:
|
||||
anatomy_data["pixel_aspect"] = float("{:0.2f}".format(
|
||||
float(pixel_aspect)))
|
||||
|
||||
fps = instance.data.get("fps")
|
||||
if resolution_height:
|
||||
anatomy_data["fps"] = fps
|
||||
if fps:
|
||||
anatomy_data["fps"] = float("{:0.2f}".format(
|
||||
float(fps)))
|
||||
|
||||
instance.data["projectEntity"] = project_entity
|
||||
instance.data["assetEntity"] = asset_entity
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ class CollectSceneVersion(pyblish.api.ContextPlugin):
|
|||
if '<shell>' in filename:
|
||||
return
|
||||
|
||||
rootVersion = pype.get_version_from_path(filename)
|
||||
rootVersion = int(pype.get_version_from_path(filename))
|
||||
context.data['version'] = rootVersion
|
||||
|
||||
self.log.info("{}".format(type(rootVersion)))
|
||||
self.log.info('Scene Version: %s' % context.data.get('version'))
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ import copy
|
|||
|
||||
import pype.api
|
||||
import pyblish
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class ExtractBurnin(pype.api.Extractor):
|
||||
|
|
@ -26,13 +25,24 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
if "representations" not in instance.data:
|
||||
raise RuntimeError("Burnin needs already created mov to work on.")
|
||||
|
||||
context_data = instance.context.data
|
||||
|
||||
version = instance.data.get(
|
||||
'version', instance.context.data.get('version'))
|
||||
frame_start = int(instance.data.get("frameStart") or 0)
|
||||
frame_end = int(instance.data.get("frameEnd") or 1)
|
||||
handle_start = instance.data.get("handleStart",
|
||||
context_data.get("handleStart"))
|
||||
handle_end = instance.data.get("handleEnd",
|
||||
context_data.get("handleEnd"))
|
||||
duration = frame_end - frame_start + 1
|
||||
|
||||
prep_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
|
||||
if "slate.farm" in instance.data["families"]:
|
||||
frame_start += 1
|
||||
duration -= 1
|
||||
|
||||
prep_data.update({
|
||||
"frame_start": frame_start,
|
||||
"frame_end": frame_end,
|
||||
|
|
@ -42,22 +52,6 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
"intent": instance.context.data.get("intent", "")
|
||||
})
|
||||
|
||||
slate_frame_start = frame_start
|
||||
slate_frame_end = frame_end
|
||||
slate_duration = duration
|
||||
|
||||
# exception for slate workflow
|
||||
if "slate" in instance.data["families"]:
|
||||
slate_frame_start = frame_start - 1
|
||||
slate_frame_end = frame_end
|
||||
slate_duration = slate_frame_end - slate_frame_start + 1
|
||||
|
||||
prep_data.update({
|
||||
"slate_frame_start": slate_frame_start,
|
||||
"slate_frame_end": slate_frame_end,
|
||||
"slate_duration": slate_duration
|
||||
})
|
||||
|
||||
# get anatomy project
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
|
|
@ -70,6 +64,9 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
|
||||
is_sequence = "sequence" in repre.get("tags", [])
|
||||
|
||||
# no handles switch from profile tags
|
||||
no_handles = "no-handles" in repre.get("tags", [])
|
||||
|
||||
stagingdir = repre["stagingDir"]
|
||||
filename = "{0}".format(repre["files"])
|
||||
|
||||
|
|
@ -101,17 +98,32 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
filled_anatomy = anatomy.format_all(_prep_data)
|
||||
_prep_data["anatomy"] = filled_anatomy.get_solved()
|
||||
|
||||
# copy frame range variables
|
||||
frame_start_cp = frame_start
|
||||
frame_end_cp = frame_end
|
||||
duration_cp = duration
|
||||
|
||||
if no_handles:
|
||||
frame_start_cp = frame_start + handle_start
|
||||
frame_end_cp = frame_end - handle_end
|
||||
duration_cp = frame_end_cp - frame_start_cp + 1
|
||||
_prep_data.update({
|
||||
"frame_start": frame_start_cp,
|
||||
"frame_end": frame_end_cp,
|
||||
"duration": duration_cp,
|
||||
})
|
||||
|
||||
# dealing with slates
|
||||
slate_frame_start = frame_start
|
||||
slate_frame_end = frame_end
|
||||
slate_duration = duration
|
||||
slate_frame_start = frame_start_cp
|
||||
slate_frame_end = frame_end_cp
|
||||
slate_duration = duration_cp
|
||||
|
||||
# exception for slate workflow
|
||||
if ("slate" in instance.data["families"]):
|
||||
if "slate-frame" in repre.get("tags", []):
|
||||
slate_frame_start = frame_start - 1
|
||||
slate_frame_end = frame_end
|
||||
slate_duration = duration + 1
|
||||
slate_frame_start = frame_start_cp - 1
|
||||
slate_frame_end = frame_end_cp
|
||||
slate_duration = duration_cp + 1
|
||||
|
||||
self.log.debug("__1 slate_frame_start: {}".format(slate_frame_start))
|
||||
|
||||
|
|
|
|||
|
|
@ -27,8 +27,9 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
representations_new = representations[:]
|
||||
|
||||
for repre in representations:
|
||||
tags = repre.get("tags", [])
|
||||
self.log.debug(repre)
|
||||
valid = 'review' in repre['tags'] or "thumb-nuke" in repre['tags']
|
||||
valid = 'review' in tags or "thumb-nuke" in tags
|
||||
if not valid:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
otherwise the representation is ignored.
|
||||
|
||||
All new represetnations are created and encoded by ffmpeg following
|
||||
presets found in `pype-config/presets/plugins/global/publish.json:ExtractReview:outputs`. To change the file extension
|
||||
presets found in `pype-config/presets/plugins/global/
|
||||
publish.json:ExtractReview:outputs`. To change the file extension
|
||||
filter values use preset's attributes `ext_filter`
|
||||
"""
|
||||
|
||||
|
|
@ -23,20 +24,30 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
|
||||
outputs = {}
|
||||
ext_filter = []
|
||||
to_width = 1920
|
||||
to_height = 1080
|
||||
|
||||
def process(self, instance):
|
||||
to_width = 1920
|
||||
to_height = 1080
|
||||
|
||||
output_profiles = self.outputs or {}
|
||||
|
||||
inst_data = instance.data
|
||||
fps = inst_data.get("fps")
|
||||
start_frame = inst_data.get("frameStart")
|
||||
resolution_width = inst_data.get("resolutionWidth", to_width)
|
||||
resolution_height = inst_data.get("resolutionHeight", to_height)
|
||||
context_data = instance.context.data
|
||||
fps = float(inst_data.get("fps"))
|
||||
frame_start = inst_data.get("frameStart")
|
||||
frame_end = inst_data.get("frameEnd")
|
||||
handle_start = inst_data.get("handleStart",
|
||||
context_data.get("handleStart"))
|
||||
handle_end = inst_data.get("handleEnd",
|
||||
context_data.get("handleEnd"))
|
||||
pixel_aspect = inst_data.get("pixelAspect", 1)
|
||||
resolution_width = inst_data.get("resolutionWidth", self.to_width)
|
||||
resolution_height = inst_data.get("resolutionHeight", self.to_height)
|
||||
self.log.debug("Families In: `{}`".format(inst_data["families"]))
|
||||
self.log.debug("__ frame_start: {}".format(frame_start))
|
||||
self.log.debug("__ frame_end: {}".format(frame_end))
|
||||
self.log.debug("__ handle_start: {}".format(handle_start))
|
||||
self.log.debug("__ handle_end: {}".format(handle_end))
|
||||
|
||||
# get representation and loop them
|
||||
representations = inst_data["representations"]
|
||||
|
|
@ -73,6 +84,9 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
is_sequence = ("sequence" in p_tags) and (ext in (
|
||||
"png", "jpg", "jpeg"))
|
||||
|
||||
# no handles switch from profile tags
|
||||
no_handles = "no-handles" in p_tags
|
||||
|
||||
self.log.debug("Profile name: {}".format(name))
|
||||
|
||||
if not ext:
|
||||
|
|
@ -142,6 +156,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
self.log.info("new_tags: `{}`".format(new_tags))
|
||||
|
||||
input_args = []
|
||||
output_args = []
|
||||
|
||||
# overrides output file
|
||||
input_args.append("-y")
|
||||
|
|
@ -152,12 +167,23 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
# necessary input data
|
||||
# adds start arg only if image sequence
|
||||
if isinstance(repre["files"], list):
|
||||
if frame_start != repre.get("detectedStart", frame_start):
|
||||
frame_start = repre.get("detectedStart")
|
||||
|
||||
# exclude handle if no handles defined
|
||||
if no_handles:
|
||||
frame_start_no_handles = frame_start + handle_start
|
||||
frame_end_no_handles = frame_end - handle_end
|
||||
|
||||
if start_frame != repre.get("detectedStart", start_frame):
|
||||
start_frame = repre.get("detectedStart")
|
||||
input_args.append(
|
||||
"-start_number {0} -framerate {1}".format(
|
||||
start_frame, fps))
|
||||
frame_start, fps))
|
||||
else:
|
||||
if no_handles:
|
||||
start_sec = float(handle_start) / fps
|
||||
input_args.append("-ss {:0.2f}".format(start_sec))
|
||||
frame_start_no_handles = frame_start + handle_start
|
||||
frame_end_no_handles = frame_end - handle_end
|
||||
|
||||
input_args.append("-i {}".format(full_input_path))
|
||||
|
||||
|
|
@ -191,37 +217,48 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
]
|
||||
)
|
||||
|
||||
output_args = []
|
||||
codec_args = profile.get('codec', [])
|
||||
output_args.extend(codec_args)
|
||||
# preset's output data
|
||||
output_args.extend(profile.get('output', []))
|
||||
|
||||
# defining image ratios
|
||||
resolution_ratio = float(resolution_width / (
|
||||
resolution_height * pixel_aspect))
|
||||
delivery_ratio = float(to_width) / float(to_height)
|
||||
self.log.debug(resolution_ratio)
|
||||
self.log.debug(delivery_ratio)
|
||||
resolution_ratio = (float(resolution_width) * pixel_aspect) / resolution_height
|
||||
delivery_ratio = float(self.to_width) / float(self.to_height)
|
||||
self.log.debug(
|
||||
"__ resolution_ratio: `{}`".format(resolution_ratio))
|
||||
self.log.debug(
|
||||
"__ delivery_ratio: `{}`".format(delivery_ratio))
|
||||
|
||||
# get scale factor
|
||||
scale_factor = to_height / (
|
||||
scale_factor = float(self.to_height) / (
|
||||
resolution_height * pixel_aspect)
|
||||
self.log.debug(scale_factor)
|
||||
|
||||
# shorten two decimals long float number for testing conditions
|
||||
resolution_ratio_test = float(
|
||||
"{:0.2f}".format(resolution_ratio))
|
||||
delivery_ratio_test = float(
|
||||
"{:0.2f}".format(delivery_ratio))
|
||||
|
||||
if resolution_ratio_test < delivery_ratio_test:
|
||||
scale_factor = float(self.to_width) / (
|
||||
resolution_width * pixel_aspect)
|
||||
|
||||
self.log.debug("__ scale_factor: `{}`".format(scale_factor))
|
||||
|
||||
# letter_box
|
||||
lb = profile.get('letter_box', 0)
|
||||
if lb != 0:
|
||||
ffmpet_width = to_width
|
||||
ffmpet_height = to_height
|
||||
ffmpeg_width = self.to_width
|
||||
ffmpeg_height = self.to_height
|
||||
if "reformat" not in p_tags:
|
||||
lb /= pixel_aspect
|
||||
if resolution_ratio != delivery_ratio:
|
||||
ffmpet_width = resolution_width
|
||||
ffmpet_height = int(
|
||||
if resolution_ratio_test != delivery_ratio_test:
|
||||
ffmpeg_width = resolution_width
|
||||
ffmpeg_height = int(
|
||||
resolution_height * pixel_aspect)
|
||||
else:
|
||||
if resolution_ratio != delivery_ratio:
|
||||
if resolution_ratio_test != delivery_ratio_test:
|
||||
lb /= scale_factor
|
||||
else:
|
||||
lb /= pixel_aspect
|
||||
|
|
@ -233,11 +270,18 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
"c=black,drawbox=0:ih-round((ih-(iw*("
|
||||
"1/{2})))/2):iw:round((ih-(iw*(1/{2})))"
|
||||
"/2):t=fill:c=black").format(
|
||||
ffmpet_width, ffmpet_height, lb))
|
||||
ffmpeg_width, ffmpeg_height, lb))
|
||||
|
||||
# In case audio is longer than video.
|
||||
output_args.append("-shortest")
|
||||
|
||||
if no_handles:
|
||||
duration_sec = float(
|
||||
(frame_end - (
|
||||
frame_start + handle_start
|
||||
) + 1) - handle_end) / fps
|
||||
output_args.append("-t {:0.2f}".format(duration_sec))
|
||||
|
||||
# output filename
|
||||
output_args.append(full_output_path)
|
||||
|
||||
|
|
@ -252,24 +296,26 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
|
||||
# scaling none square pixels and 1920 width
|
||||
if "reformat" in p_tags:
|
||||
if resolution_ratio < delivery_ratio:
|
||||
if resolution_ratio_test < delivery_ratio_test:
|
||||
self.log.debug("lower then delivery")
|
||||
width_scale = int(to_width * scale_factor)
|
||||
width_scale = int(self.to_width * scale_factor)
|
||||
width_half_pad = int((
|
||||
to_width - width_scale)/2)
|
||||
height_scale = to_height
|
||||
self.to_width - width_scale)/2)
|
||||
height_scale = self.to_height
|
||||
height_half_pad = 0
|
||||
else:
|
||||
self.log.debug("heigher then delivery")
|
||||
width_scale = to_width
|
||||
width_scale = self.to_width
|
||||
width_half_pad = 0
|
||||
scale_factor = float(to_width) / float(
|
||||
resolution_width)
|
||||
self.log.debug(scale_factor)
|
||||
scale_factor = float(self.to_width) / (float(
|
||||
resolution_width) * pixel_aspect)
|
||||
self.log.debug(
|
||||
"__ scale_factor: `{}`".format(
|
||||
scale_factor))
|
||||
height_scale = int(
|
||||
resolution_height * scale_factor)
|
||||
height_half_pad = int(
|
||||
(to_height - height_scale)/2)
|
||||
(self.to_height - height_scale)/2)
|
||||
|
||||
self.log.debug(
|
||||
"__ width_scale: `{}`".format(width_scale))
|
||||
|
|
@ -287,7 +333,7 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
"scale={0}x{1}:flags=lanczos,"
|
||||
"pad={2}:{3}:{4}:{5}:black,setsar=1"
|
||||
).format(width_scale, height_scale,
|
||||
to_width, to_height,
|
||||
self.to_width, self.to_height,
|
||||
width_half_pad,
|
||||
height_half_pad
|
||||
)
|
||||
|
|
@ -351,14 +397,19 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
"codec": codec_args,
|
||||
"_profile": profile,
|
||||
"resolutionHeight": resolution_height,
|
||||
"resolutionWidth": resolution_width,
|
||||
"resolutionWidth": resolution_width
|
||||
})
|
||||
if is_sequence:
|
||||
repre_new.update({
|
||||
"stagingDir": stg_dir,
|
||||
"files": os.listdir(stg_dir)
|
||||
})
|
||||
|
||||
if no_handles:
|
||||
repre_new.update({
|
||||
"outputName": name + "_noHandles",
|
||||
"startFrameReview": frame_start_no_handles,
|
||||
"endFrameReview": frame_end_no_handles
|
||||
})
|
||||
if repre_new.get('preview'):
|
||||
repre_new.pop("preview")
|
||||
if repre_new.get('thumbnail'):
|
||||
|
|
@ -372,6 +423,11 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
if "delete" in repre.get("tags", []):
|
||||
representations_new.remove(repre)
|
||||
|
||||
instance.data.update({
|
||||
"reviewToWidth": self.to_width,
|
||||
"reviewToHeight": self.to_height
|
||||
})
|
||||
|
||||
self.log.debug(
|
||||
"new representations: {}".format(representations_new))
|
||||
instance.data["representations"] = representations_new
|
||||
|
|
|
|||
|
|
@ -24,24 +24,36 @@ class ExtractReviewSlate(pype.api.Extractor):
|
|||
slate_path = inst_data.get("slateFrame")
|
||||
ffmpeg_path = pype.lib.get_ffmpeg_tool_path("ffmpeg")
|
||||
|
||||
to_width = 1920
|
||||
to_height = 1080
|
||||
# values are set in ExtractReview
|
||||
to_width = inst_data["reviewToWidth"]
|
||||
to_height = inst_data["reviewToHeight"]
|
||||
|
||||
resolution_width = inst_data.get("resolutionWidth", to_width)
|
||||
resolution_height = inst_data.get("resolutionHeight", to_height)
|
||||
pixel_aspect = inst_data.get("pixelAspect", 1)
|
||||
fps = inst_data.get("fps")
|
||||
|
||||
# defining image ratios
|
||||
resolution_ratio = float(resolution_width / (
|
||||
resolution_height * pixel_aspect))
|
||||
resolution_ratio = (float(resolution_width) * pixel_aspect) / resolution_height
|
||||
delivery_ratio = float(to_width) / float(to_height)
|
||||
self.log.debug(resolution_ratio)
|
||||
self.log.debug(delivery_ratio)
|
||||
self.log.debug("__ resolution_ratio: `{}`".format(resolution_ratio))
|
||||
self.log.debug("__ delivery_ratio: `{}`".format(delivery_ratio))
|
||||
|
||||
# get scale factor
|
||||
scale_factor = to_height / (
|
||||
scale_factor = float(to_height) / (
|
||||
resolution_height * pixel_aspect)
|
||||
self.log.debug(scale_factor)
|
||||
|
||||
# shorten two decimals long float number for testing conditions
|
||||
resolution_ratio_test = float(
|
||||
"{:0.2f}".format(resolution_ratio))
|
||||
delivery_ratio_test = float(
|
||||
"{:0.2f}".format(delivery_ratio))
|
||||
|
||||
if resolution_ratio_test < delivery_ratio_test:
|
||||
scale_factor = float(to_width) / (
|
||||
resolution_width * pixel_aspect)
|
||||
|
||||
self.log.debug("__ scale_factor: `{}`".format(scale_factor))
|
||||
|
||||
for i, repre in enumerate(inst_data["representations"]):
|
||||
_remove_at_end = []
|
||||
|
|
@ -95,7 +107,7 @@ class ExtractReviewSlate(pype.api.Extractor):
|
|||
|
||||
# scaling none square pixels and 1920 width
|
||||
if "reformat" in p_tags:
|
||||
if resolution_ratio < delivery_ratio:
|
||||
if resolution_ratio_test < delivery_ratio_test:
|
||||
self.log.debug("lower then delivery")
|
||||
width_scale = int(to_width * scale_factor)
|
||||
width_half_pad = int((
|
||||
|
|
@ -106,7 +118,8 @@ class ExtractReviewSlate(pype.api.Extractor):
|
|||
self.log.debug("heigher then delivery")
|
||||
width_scale = to_width
|
||||
width_half_pad = 0
|
||||
scale_factor = float(to_width) / float(resolution_width)
|
||||
scale_factor = float(to_width) / (float(
|
||||
resolution_width) * pixel_aspect)
|
||||
self.log.debug(scale_factor)
|
||||
height_scale = int(
|
||||
resolution_height * scale_factor)
|
||||
|
|
|
|||
|
|
@ -80,7 +80,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"matchmove",
|
||||
"image"
|
||||
"source",
|
||||
"assembly"
|
||||
"assembly",
|
||||
"textures"
|
||||
]
|
||||
exclude_families = ["clip"]
|
||||
db_representation_context_keys = [
|
||||
|
|
@ -278,6 +279,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
stagingdir = repre['stagingDir']
|
||||
if repre.get('anatomy_template'):
|
||||
template_name = repre['anatomy_template']
|
||||
if repre.get("outputName"):
|
||||
template_data["output"] = repre['outputName']
|
||||
|
||||
template = os.path.normpath(
|
||||
anatomy.templates[template_name]["path"])
|
||||
|
|
@ -389,9 +392,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
template_data["representation"] = repre['ext']
|
||||
|
||||
if repre.get("outputName"):
|
||||
template_data["output"] = repre['outputName']
|
||||
|
||||
src = os.path.join(stagingdir, fname)
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
template_filled = anatomy_filled[template_name]["path"]
|
||||
|
|
@ -441,7 +441,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
if sequence_repre and repre.get("frameStart"):
|
||||
representation['context']['frame'] = (
|
||||
src_padding_exp % int(repre.get("frameStart"))
|
||||
dst_padding_exp % int(repre.get("frameStart"))
|
||||
)
|
||||
|
||||
self.log.debug("__ representation: {}".format(representation))
|
||||
|
|
|
|||
|
|
@ -131,6 +131,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
- publishJobState (str, Optional): "Active" or "Suspended"
|
||||
This defaults to "Suspended"
|
||||
|
||||
- expectedFiles (list or dict): explained bellow
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit image sequence jobs to Deadline or Muster"
|
||||
|
|
@ -166,7 +168,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
instance_transfer = {
|
||||
"slate": ["slateFrame"],
|
||||
"review": ["lutPath"],
|
||||
"render.farm": ["bakeScriptPath", "bakeRenderPath", "bakeWriteNodeName"]
|
||||
"render.farm": ["bakeScriptPath", "bakeRenderPath",
|
||||
"bakeWriteNodeName", "version"]
|
||||
}
|
||||
|
||||
# list of family names to transfer to new family if present
|
||||
|
|
@ -190,11 +193,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
metadata_path = os.path.normpath(metadata_path)
|
||||
mount_root = os.path.normpath(os.environ["PYPE_STUDIO_PROJECTS_MOUNT"])
|
||||
network_root = os.path.normpath(
|
||||
os.environ["PYPE_STUDIO_PROJECTS_PATH"]
|
||||
)
|
||||
|
||||
network_root = os.environ["PYPE_STUDIO_PROJECTS_PATH"]
|
||||
metadata_path = metadata_path.replace(mount_root, network_root)
|
||||
metadata_path = os.path.normpath(metadata_path)
|
||||
|
||||
# Generate the payload for Deadline submission
|
||||
payload = {
|
||||
|
|
@ -206,7 +207,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"UserName": job["Props"]["User"],
|
||||
"Comment": instance.context.data.get("comment", ""),
|
||||
"Priority": job["Props"]["Pri"],
|
||||
"Pool": self.deadline_pool
|
||||
"Pool": self.deadline_pool,
|
||||
"OutputDirectory0": output_dir
|
||||
},
|
||||
"PluginInfo": {
|
||||
"Version": "3.6",
|
||||
|
|
@ -384,13 +386,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"tags": ["review"] if preview else []
|
||||
}
|
||||
|
||||
# add tags
|
||||
if preview:
|
||||
if "ftrack" not in new_instance["families"]:
|
||||
if os.environ.get("FTRACK_SERVER"):
|
||||
new_instance["families"].append("ftrack")
|
||||
if "review" not in new_instance["families"]:
|
||||
new_instance["families"].append("review")
|
||||
self._solve_families(new_instance, preview)
|
||||
|
||||
new_instance["representations"] = [rep]
|
||||
|
||||
|
|
@ -399,6 +395,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
if new_instance.get("extendFrames", False):
|
||||
self._copy_extend_frames(new_instance, rep)
|
||||
instances.append(new_instance)
|
||||
|
||||
return instances
|
||||
|
||||
def _get_representations(self, instance, exp_files):
|
||||
|
|
@ -419,6 +416,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
start = int(instance.get("frameStart"))
|
||||
end = int(instance.get("frameEnd"))
|
||||
cols, rem = clique.assemble(exp_files)
|
||||
bake_render_path = instance.get("bakeRenderPath")
|
||||
|
||||
# create representation for every collected sequence
|
||||
for c in cols:
|
||||
ext = c.tail.lstrip(".")
|
||||
|
|
@ -435,8 +434,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
preview = True
|
||||
break
|
||||
break
|
||||
|
||||
if bake_render_path:
|
||||
preview = False
|
||||
|
||||
rep = {
|
||||
"name": str(c),
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": [os.path.basename(f) for f in list(c)],
|
||||
"frameStart": start,
|
||||
|
|
@ -450,32 +453,42 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
representations.append(rep)
|
||||
|
||||
families = instance.get("families")
|
||||
# if we have one representation with preview tag
|
||||
# flag whole instance for review and for ftrack
|
||||
if preview:
|
||||
if "ftrack" not in families:
|
||||
if os.environ.get("FTRACK_SERVER"):
|
||||
families.append("ftrack")
|
||||
if "review" not in families:
|
||||
families.append("review")
|
||||
instance["families"] = families
|
||||
self._solve_families(instance, preview)
|
||||
|
||||
# add reminders as representations
|
||||
for r in rem:
|
||||
ext = r.split(".")[-1]
|
||||
rep = {
|
||||
"name": r,
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": os.path.basename(r),
|
||||
"stagingDir": os.path.dirname(r),
|
||||
"anatomy_template": "publish",
|
||||
}
|
||||
|
||||
if r in bake_render_path:
|
||||
rep.update({
|
||||
"fps": instance.get("fps"),
|
||||
"anatomy_template": "render",
|
||||
"tags": ["review", "delete"]
|
||||
})
|
||||
# solve families with `preview` attributes
|
||||
self._solve_families(instance, True)
|
||||
representations.append(rep)
|
||||
|
||||
return representations
|
||||
|
||||
def _solve_families(self, instance, preview=False):
|
||||
families = instance.get("families")
|
||||
# if we have one representation with preview tag
|
||||
# flag whole instance for review and for ftrack
|
||||
if preview:
|
||||
if "ftrack" not in families:
|
||||
if os.environ.get("FTRACK_SERVER"):
|
||||
families.append("ftrack")
|
||||
if "review" not in families:
|
||||
families.append("review")
|
||||
instance["families"] = families
|
||||
|
||||
def process(self, instance):
|
||||
"""
|
||||
Detect type of renderfarm submission and create and post dependend job
|
||||
|
|
@ -485,7 +498,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
:param instance: Instance data
|
||||
:type instance: dict
|
||||
"""
|
||||
|
||||
data = instance.data.copy()
|
||||
context = instance.context
|
||||
self.context = context
|
||||
|
|
@ -518,10 +530,23 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
start = instance.data.get("frameStart")
|
||||
if start is None:
|
||||
start = context.data["frameStart"]
|
||||
|
||||
end = instance.data.get("frameEnd")
|
||||
if end is None:
|
||||
end = context.data["frameEnd"]
|
||||
|
||||
handle_start = instance.data.get("handleStart")
|
||||
if handle_start is None:
|
||||
handle_start = context.data["handleStart"]
|
||||
|
||||
handle_end = instance.data.get("handleEnd")
|
||||
if handle_end is None:
|
||||
handle_end = context.data["handleEnd"]
|
||||
|
||||
fps = instance.data.get("fps")
|
||||
if fps is None:
|
||||
fps = context.data["fps"]
|
||||
|
||||
if data.get("extendFrames", False):
|
||||
start, end = self._extend_frames(
|
||||
asset,
|
||||
|
|
@ -550,7 +575,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"asset": asset,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"fps": data.get("fps", 25),
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"fps": fps,
|
||||
"source": source,
|
||||
"extendFrames": data.get("extendFrames"),
|
||||
"overrideExistingFrame": data.get("overrideExistingFrame"),
|
||||
|
|
@ -571,6 +598,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
for v in values:
|
||||
instance_skeleton_data[v] = instance.data.get(v)
|
||||
|
||||
# look into instance data if representations are not having any
|
||||
# which are having tag `publish_on_farm` and include them
|
||||
for r in instance.data.get("representations", []):
|
||||
if "publish_on_farm" in r.get("tags"):
|
||||
# create representations attribute of not there
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
instance_skeleton_data["representations"].append(r)
|
||||
|
||||
instances = None
|
||||
assert data.get("expectedFiles"), ("Submission from old Pype version"
|
||||
" - missing expectedFiles")
|
||||
|
|
@ -644,7 +681,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
data.get("expectedFiles")
|
||||
)
|
||||
|
||||
if "representations" not in instance_skeleton_data:
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
# add representation
|
||||
|
|
|
|||
|
|
@ -220,6 +220,8 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
layer=layer_name)),
|
||||
"renderer": self.get_render_attribute("currentRenderer",
|
||||
layer=layer_name),
|
||||
"handleStart": context.data["assetEntity"]['data']['handleStart'],
|
||||
"handleEnd": context.data["assetEntity"]['data']['handleEnd'],
|
||||
|
||||
# instance subset
|
||||
"family": "renderlayer",
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import json
|
||||
import getpass
|
||||
import clique
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
|
@ -242,7 +243,8 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
# Optional, enable double-click to preview rendered
|
||||
# frames from Deadline Monitor
|
||||
"OutputFilename0": output_filename_0.replace("\\", "/"),
|
||||
"OutputDirectory0": os.path.dirname(output_filename_0),
|
||||
"OutputFilename0": output_filename_0.replace("\\", "/")
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
|
|
@ -272,6 +274,26 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"AuxFiles": []
|
||||
}
|
||||
|
||||
exp = instance.data.get("expectedFiles")
|
||||
|
||||
OutputFilenames = {}
|
||||
expIndex = 0
|
||||
|
||||
if isinstance(exp[0], dict):
|
||||
# we have aovs and we need to iterate over them
|
||||
for aov, files in exp[0].items():
|
||||
col = clique.assemble(files)[0][0]
|
||||
outputFile = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile
|
||||
OutputFilenames[expIndex] = outputFile
|
||||
expIndex += 1
|
||||
else:
|
||||
col = clique.assemble(files)[0][0]
|
||||
outputFile = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile
|
||||
# OutputFilenames[expIndex] = outputFile
|
||||
|
||||
|
||||
# We need those to pass them to pype for it to set correct context
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
|
|
|
|||
|
|
@ -1 +0,0 @@
|
|||
|
||||
|
|
@ -1 +0,0 @@
|
|||
# usually used for mattepainting
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class CollectRenderTarget(pyblish.api.InstancePlugin):
|
||||
"""Collect families for all instances"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = "Collect Render Target"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ['write']
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
node = instance[0]
|
||||
|
||||
self.log.info('processing {}'.format(node))
|
||||
|
||||
families = []
|
||||
if instance.data.get('families'):
|
||||
families += instance.data['families']
|
||||
|
||||
# set for ftrack to accept
|
||||
# instance.data["families"] = ["ftrack"]
|
||||
|
||||
if node["render"].value():
|
||||
# dealing with local/farm rendering
|
||||
if node["render_farm"].value():
|
||||
families.append("render.farm")
|
||||
else:
|
||||
families.append("render.local")
|
||||
else:
|
||||
families.append("render.frames")
|
||||
# to ignore staging dir op in integrate
|
||||
instance.data['transfer'] = False
|
||||
|
||||
families.append('ftrack')
|
||||
|
||||
instance.data["families"] = families
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
instance.context[:] = sorted(instance.context, key=self.sort_by_family)
|
||||
|
||||
def sort_by_family(self, instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get("families", instance.data.get("family"))
|
||||
|
|
@ -1,147 +0,0 @@
|
|||
import os
|
||||
import json
|
||||
import getpass
|
||||
|
||||
from avalon import api
|
||||
from avalon.vendor import requests
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
# TODO: rewrite docstring to nuke
|
||||
"""Submit current Comp to Deadline
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable DEADLINE_REST_URL
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["nuke"]
|
||||
families = ["write", "render.deadline"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
context = instance.context
|
||||
|
||||
key = "__hasRun{}".format(self.__class__.__name__)
|
||||
if context.data.get(key, False):
|
||||
return
|
||||
else:
|
||||
context.data[key] = True
|
||||
|
||||
DEADLINE_REST_URL = api.Session.get("DEADLINE_REST_URL",
|
||||
"http://localhost:8082")
|
||||
assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
|
||||
|
||||
# Collect all saver instances in context that are to be rendered
|
||||
write_instances = []
|
||||
for instance in context[:]:
|
||||
if not self.families[0] in instance.data.get("families"):
|
||||
# Allow only saver family instances
|
||||
continue
|
||||
|
||||
if not instance.data.get("publish", True):
|
||||
# Skip inactive instances
|
||||
continue
|
||||
self.log.debug(instance.data["name"])
|
||||
write_instances.append(instance)
|
||||
|
||||
if not write_instances:
|
||||
raise RuntimeError("No instances found for Deadline submittion")
|
||||
|
||||
hostVersion = int(context.data["hostVersion"])
|
||||
filepath = context.data["currentFile"]
|
||||
filename = os.path.basename(filepath)
|
||||
comment = context.data.get("comment", "")
|
||||
deadline_user = context.data.get("deadlineUser", getpass.getuser())
|
||||
|
||||
# Documentation for keys available at:
|
||||
# https://docs.thinkboxsoftware.com
|
||||
# /products/deadline/8.0/1_User%20Manual/manual
|
||||
# /manual-submission.html#job-info-file-options
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
# Top-level group name
|
||||
"BatchName": filename,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": filename,
|
||||
|
||||
# User, as seen in Monitor
|
||||
"UserName": deadline_user,
|
||||
|
||||
# Use a default submission pool for Nuke
|
||||
"Pool": "nuke",
|
||||
|
||||
"Plugin": "Nuke",
|
||||
"Frames": "{start}-{end}".format(
|
||||
start=int(instance.data["frameStart"]),
|
||||
end=int(instance.data["frameEnd"])
|
||||
),
|
||||
|
||||
"Comment": comment,
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
"FlowFile": filepath,
|
||||
|
||||
# Mandatory for Deadline
|
||||
"Version": str(hostVersion),
|
||||
|
||||
# Render in high quality
|
||||
"HighQuality": True,
|
||||
|
||||
# Whether saver output should be checked after rendering
|
||||
# is complete
|
||||
"CheckOutput": True,
|
||||
|
||||
# Proxy: higher numbers smaller images for faster test renders
|
||||
# 1 = no proxy quality
|
||||
"Proxy": 1,
|
||||
},
|
||||
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": []
|
||||
}
|
||||
|
||||
# Enable going to rendered frames from Deadline Monitor
|
||||
for index, instance in enumerate(write_instances):
|
||||
path = instance.data["path"]
|
||||
folder, filename = os.path.split(path)
|
||||
payload["JobInfo"]["OutputDirectory%d" % index] = folder
|
||||
payload["JobInfo"]["OutputFilename%d" % index] = filename
|
||||
|
||||
# Include critical variables with submission
|
||||
keys = [
|
||||
# TODO: This won't work if the slaves don't have accesss to
|
||||
# these paths, such as if slaves are running Linux and the
|
||||
# submitter is on Windows.
|
||||
"PYTHONPATH",
|
||||
"NUKE_PATH"
|
||||
# "OFX_PLUGIN_PATH",
|
||||
]
|
||||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **api.Session)
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
key=key,
|
||||
value=environment[key]
|
||||
) for index, key in enumerate(environment)
|
||||
})
|
||||
|
||||
self.log.info("Submitting..")
|
||||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
url = "{}/api/jobs".format(DEADLINE_REST_URL)
|
||||
response = requests.post(url, json=payload)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
||||
# Store the response for dependent job submission plug-ins
|
||||
for instance in write_instances:
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class IncrementTestPlugin(pyblish.api.ContextPlugin):
|
||||
"""Increment current script version."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.5
|
||||
label = "Test Plugin"
|
||||
hosts = ['nuke']
|
||||
|
||||
def process(self, context):
|
||||
instances = context[:]
|
||||
|
||||
prerender_check = list()
|
||||
families_check = list()
|
||||
for instance in instances:
|
||||
if ("prerender" in str(instance)):
|
||||
prerender_check.append(instance)
|
||||
if instance.data.get("families", None):
|
||||
families_check.append(True)
|
||||
|
||||
if len(prerender_check) != len(families_check):
|
||||
self.log.info(prerender_check)
|
||||
self.log.info(families_check)
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
import nuke
|
||||
import os
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
# TODO: add repair function
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class ValidateSettingsNuke(pyblish.api.Validator):
|
||||
""" Validates settings """
|
||||
|
||||
families = ['scene']
|
||||
hosts = ['nuke']
|
||||
optional = True
|
||||
label = 'Settings'
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
asset = io.find_one({"name": os.environ['AVALON_ASSET']})
|
||||
try:
|
||||
avalon_resolution = asset["data"].get("resolution", '')
|
||||
avalon_pixel_aspect = asset["data"].get("pixelAspect", '')
|
||||
avalon_fps = asset["data"].get("fps", '')
|
||||
avalon_first = asset["data"].get("frameStart", '')
|
||||
avalon_last = asset["data"].get("frameEnd", '')
|
||||
avalon_crop = asset["data"].get("crop", '')
|
||||
except KeyError:
|
||||
print(
|
||||
"No resolution information found for \"{0}\".".format(
|
||||
asset["name"]
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
# validating first frame
|
||||
local_first = nuke.root()['first_frame'].value()
|
||||
msg = 'First frame is incorrect.'
|
||||
msg += '\n\nLocal first: %s' % local_first
|
||||
msg += '\n\nOnline first: %s' % avalon_first
|
||||
assert local_first == avalon_first, msg
|
||||
|
||||
# validating last frame
|
||||
local_last = nuke.root()['last_frame'].value()
|
||||
msg = 'Last frame is incorrect.'
|
||||
msg += '\n\nLocal last: %s' % local_last
|
||||
msg += '\n\nOnline last: %s' % avalon_last
|
||||
assert local_last == avalon_last, msg
|
||||
|
||||
# validating fps
|
||||
local_fps = nuke.root()['fps'].value()
|
||||
msg = 'FPS is incorrect.'
|
||||
msg += '\n\nLocal fps: %s' % local_fps
|
||||
msg += '\n\nOnline fps: %s' % avalon_fps
|
||||
assert local_fps == avalon_fps, msg
|
||||
|
||||
# validating resolution width
|
||||
local_width = nuke.root().format().width()
|
||||
msg = 'Width is incorrect.'
|
||||
msg += '\n\nLocal width: %s' % local_width
|
||||
msg += '\n\nOnline width: %s' % avalon_resolution[0]
|
||||
assert local_width == avalon_resolution[0], msg
|
||||
|
||||
# validating resolution width
|
||||
local_height = nuke.root().format().height()
|
||||
msg = 'Height is incorrect.'
|
||||
msg += '\n\nLocal height: %s' % local_height
|
||||
msg += '\n\nOnline height: %s' % avalon_resolution[1]
|
||||
assert local_height == avalon_resolution[1], msg
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
import nuke
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class RepairNukeProxyModeAction(pyblish.api.Action):
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
nuke.root()["proxy"].setValue(0)
|
||||
|
||||
|
||||
class ValidateNukeProxyMode(pyblish.api.ContextPlugin):
|
||||
"""Validates against having proxy mode on."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
optional = True
|
||||
label = "Proxy Mode"
|
||||
actions = [RepairNukeProxyModeAction]
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
# targets = ["default", "process"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
msg = (
|
||||
"Proxy mode is not supported. Please disable Proxy Mode in the "
|
||||
"Project settings."
|
||||
)
|
||||
assert not nuke.root()["proxy"].getValue(), msg
|
||||
|
|
@ -112,6 +112,7 @@ class LoadMov(api.Loader):
|
|||
)
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
repr_id = context["representation"]["_id"]
|
||||
|
||||
orig_first = version_data.get("frameStart")
|
||||
orig_last = version_data.get("frameEnd")
|
||||
|
|
@ -120,12 +121,16 @@ class LoadMov(api.Loader):
|
|||
first = orig_first - diff
|
||||
last = orig_last - diff
|
||||
|
||||
handle_start = version_data.get("handleStart")
|
||||
handle_end = version_data.get("handleEnd")
|
||||
handle_start = version_data.get("handleStart", 0)
|
||||
handle_end = version_data.get("handleEnd", 0)
|
||||
|
||||
colorspace = version_data.get("colorspace")
|
||||
repr_cont = context["representation"]["context"]
|
||||
|
||||
self.log.debug(
|
||||
"Representation id `{}` ".format(repr_id))
|
||||
|
||||
context["representation"]["_id"]
|
||||
# create handles offset (only to last, because of mov)
|
||||
last += handle_start + handle_end
|
||||
# offset should be with handles so it match orig frame range
|
||||
|
|
@ -138,7 +143,6 @@ class LoadMov(api.Loader):
|
|||
file = self.fname
|
||||
|
||||
if not file:
|
||||
repr_id = context["representation"]["_id"]
|
||||
self.log.warning(
|
||||
"Representation id `{}` is failing to load".format(repr_id))
|
||||
return
|
||||
|
|
|
|||
|
|
@ -86,8 +86,11 @@ class LoadSequence(api.Loader):
|
|||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
repr_id = context["representation"]["_id"]
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
self.log.debug(
|
||||
"Representation id `{}` ".format(repr_id))
|
||||
|
||||
self.first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
self.handle_start = version_data.get("handleStart", 0)
|
||||
|
|
|
|||
|
|
@ -1,25 +0,0 @@
|
|||
from avalon import api, io
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectAssetInfo(pyblish.api.ContextPlugin):
|
||||
"""Collect framerate."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Asset Info"
|
||||
hosts = [
|
||||
"nuke",
|
||||
"nukeassist"
|
||||
]
|
||||
|
||||
def process(self, context):
|
||||
asset_data = io.find_one({
|
||||
"type": "asset",
|
||||
"name": api.Session["AVALON_ASSET"]
|
||||
})
|
||||
self.log.info("asset_data: {}".format(asset_data))
|
||||
|
||||
context.data['handles'] = int(asset_data["data"].get("handles", 0))
|
||||
context.data["handleStart"] = int(asset_data["data"].get(
|
||||
"handleStart", 0))
|
||||
context.data["handleEnd"] = int(asset_data["data"].get("handleEnd", 0))
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
import os
|
||||
import pype.api as pype
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectScriptVersion(pyblish. api.ContextPlugin):
|
||||
"""Collect Script Version."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Script Version"
|
||||
hosts = [
|
||||
"nuke",
|
||||
"nukeassist"
|
||||
]
|
||||
|
||||
def process(self, context):
|
||||
file_path = context.data["currentFile"]
|
||||
base_name = os.path.basename(file_path)
|
||||
# get version string
|
||||
version = pype.get_version_from_path(base_name)
|
||||
|
||||
context.data['version'] = version
|
||||
|
|
@ -46,7 +46,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
)
|
||||
|
||||
if node["use_limit"].getValue():
|
||||
handles = 0
|
||||
first_frame = int(node["first"].getValue())
|
||||
last_frame = int(node["last"].getValue())
|
||||
|
||||
|
|
@ -55,9 +54,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
output_dir = os.path.dirname(path)
|
||||
self.log.debug('output dir: {}'.format(output_dir))
|
||||
|
||||
# # get version to instance for integration
|
||||
# instance.data['version'] = instance.context.data.get(
|
||||
# "version", pype.get_version_from_path(nuke.root().name()))
|
||||
# get version to instance for integration
|
||||
instance.data['version'] = instance.context.data["version"]
|
||||
|
||||
self.log.debug('Write Version: %s' % instance.data('version'))
|
||||
|
||||
|
|
@ -133,7 +131,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
"outputDir": output_dir,
|
||||
"ext": ext,
|
||||
"label": label,
|
||||
"handles": handles,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame,
|
||||
"outputType": output_type,
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
write_node["raw"].setValue(1)
|
||||
write_node.setInput(0, previous_node)
|
||||
temporary_nodes.append(write_node)
|
||||
tags = ["thumbnail"]
|
||||
tags = ["thumbnail", "publish_on_farm"]
|
||||
|
||||
# retime for
|
||||
first_frame = int(last_frame) / 2
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ import getpass
|
|||
from avalon import api
|
||||
from avalon.vendor import requests
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
@ -23,6 +22,11 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
families = ["render.farm"]
|
||||
optional = True
|
||||
|
||||
deadline_priority = 50
|
||||
deadline_pool = ""
|
||||
deadline_pool_secondary = ""
|
||||
deadline_chunk_size = 1
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
node = instance[0]
|
||||
|
|
@ -55,7 +59,9 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
)
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
instance.data["publishJobState"] = "Active"
|
||||
instance.data["outputDir"] = os.path.dirname(
|
||||
render_path).replace("\\", "/")
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
|
||||
if instance.data.get("bakeScriptPath"):
|
||||
render_path = instance.data.get("bakeRenderPath")
|
||||
|
|
@ -87,6 +93,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
script_name = os.path.basename(script_path)
|
||||
jobname = "%s - %s" % (script_name, instance.name)
|
||||
|
||||
output_filename_0 = self.preview_fname(render_path)
|
||||
|
||||
if not responce_data:
|
||||
responce_data = {}
|
||||
|
||||
|
|
@ -96,6 +104,15 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
except OSError:
|
||||
pass
|
||||
|
||||
# define chunk and priority
|
||||
chunk_size = instance.data.get("deadlineChunkSize")
|
||||
if chunk_size == 0:
|
||||
chunk_size = self.deadline_chunk_size
|
||||
|
||||
priority = instance.data.get("deadlinePriority")
|
||||
if priority != 50:
|
||||
priority = self.deadline_priority
|
||||
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
# Top-level group name
|
||||
|
|
@ -107,10 +124,11 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
# Arbitrary username, for visualisation in Monitor
|
||||
"UserName": self._deadline_user,
|
||||
|
||||
"Priority": instance.data["deadlinePriority"],
|
||||
"Priority": priority,
|
||||
"ChunkSize": chunk_size,
|
||||
|
||||
"Pool": "2d",
|
||||
"SecondaryPool": "2d",
|
||||
"Pool": self.deadline_pool,
|
||||
"SecondaryPool": self.deadline_pool_secondary,
|
||||
|
||||
"Plugin": "Nuke",
|
||||
"Frames": "{start}-{end}".format(
|
||||
|
|
@ -119,6 +137,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
),
|
||||
"Comment": self._comment,
|
||||
|
||||
# Optional, enable double-click to preview rendered
|
||||
# frames from Deadline Monitor
|
||||
"OutputFilename0": output_filename_0.replace("\\", "/")
|
||||
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
|
|
@ -220,6 +242,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self.log.info("Submitting..")
|
||||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# adding expectied files to instance.data
|
||||
self.expected_files(instance, render_path)
|
||||
self.log.debug("__ expectedFiles: `{}`".format(
|
||||
instance.data["expectedFiles"]))
|
||||
response = requests.post(self.deadline_url, json=payload)
|
||||
|
||||
if not response.ok:
|
||||
|
|
@ -240,3 +266,51 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"%f=%d was rounded off to nearest integer"
|
||||
% (value, int(value))
|
||||
)
|
||||
|
||||
def preview_fname(self, path):
|
||||
"""Return output file path with #### for padding.
|
||||
|
||||
Deadline requires the path to be formatted with # in place of numbers.
|
||||
For example `/path/to/render.####.png`
|
||||
|
||||
Args:
|
||||
path (str): path to rendered images
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
if "%" in path:
|
||||
search_results = re.search(r"(%0)(\d)(d.)", path).groups()
|
||||
self.log.debug("_ search_results: `{}`".format(search_results))
|
||||
return int(search_results[1])
|
||||
if "#" in path:
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
return path
|
||||
else:
|
||||
return path
|
||||
|
||||
def expected_files(self,
|
||||
instance,
|
||||
path):
|
||||
""" Create expected files in instance data
|
||||
"""
|
||||
if not instance.data.get("expectedFiles"):
|
||||
instance.data["expectedFiles"] = list()
|
||||
|
||||
dir = os.path.dirname(path)
|
||||
file = os.path.basename(path)
|
||||
|
||||
if "#" in file:
|
||||
pparts = file.split("#")
|
||||
padding = "%0{}d".format(len(pparts) - 1)
|
||||
file = pparts[0] + padding + pparts[-1]
|
||||
|
||||
if "%" not in file:
|
||||
instance.data["expectedFiles"].append(path)
|
||||
return
|
||||
|
||||
for i in range(self._frame_start, (self._frame_end + 1)):
|
||||
instance.data["expectedFiles"].append(
|
||||
os.path.join(dir, (file % i)).replace("\\", "/"))
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ class CollectClips(api.ContextPlugin):
|
|||
context.data["assetsShared"] = dict()
|
||||
|
||||
projectdata = context.data["projectEntity"]["data"]
|
||||
version = context.data.get("version", "001")
|
||||
sequence = context.data.get("activeSequence")
|
||||
selection = context.data.get("selection")
|
||||
|
||||
|
|
@ -108,8 +107,7 @@ class CollectClips(api.ContextPlugin):
|
|||
"family": "clip",
|
||||
"families": [],
|
||||
"handleStart": projectdata.get("handleStart", 0),
|
||||
"handleEnd": projectdata.get("handleEnd", 0),
|
||||
"version": int(version)})
|
||||
"handleEnd": projectdata.get("handleEnd", 0)})
|
||||
|
||||
instance = context.create_instance(**data)
|
||||
|
||||
|
|
|
|||
|
|
@ -42,6 +42,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin):
|
|||
width = int(sequence.format().width())
|
||||
height = int(sequence.format().height())
|
||||
pixel_aspect = sequence.format().pixelAspect()
|
||||
fps = context.data["fps"]
|
||||
|
||||
# build data for inner nukestudio project property
|
||||
data = {
|
||||
|
|
@ -161,9 +162,10 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin):
|
|||
"asset": asset,
|
||||
"hierarchy": hierarchy,
|
||||
"parents": parents,
|
||||
"width": width,
|
||||
"height": height,
|
||||
"resolutionWidth": width,
|
||||
"resolutionHeight": height,
|
||||
"pixelAspect": pixel_aspect,
|
||||
"fps": fps,
|
||||
"tasks": instance.data["tasks"]
|
||||
})
|
||||
|
||||
|
|
@ -223,9 +225,12 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
|
|||
instance.data["parents"] = s_asset_data["parents"]
|
||||
instance.data["hierarchy"] = s_asset_data["hierarchy"]
|
||||
instance.data["tasks"] = s_asset_data["tasks"]
|
||||
instance.data["width"] = s_asset_data["width"]
|
||||
instance.data["height"] = s_asset_data["height"]
|
||||
instance.data["resolutionWidth"] = s_asset_data[
|
||||
"resolutionWidth"]
|
||||
instance.data["resolutionHeight"] = s_asset_data[
|
||||
"resolutionHeight"]
|
||||
instance.data["pixelAspect"] = s_asset_data["pixelAspect"]
|
||||
instance.data["fps"] = s_asset_data["fps"]
|
||||
|
||||
# adding frame start if any on instance
|
||||
start_frame = s_asset_data.get("startingFrame")
|
||||
|
|
@ -275,8 +280,8 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
|
|||
# adding SourceResolution if Tag was present
|
||||
if instance.data.get("main"):
|
||||
in_info['custom_attributes'].update({
|
||||
"resolutionWidth": instance.data["width"],
|
||||
"resolutionHeight": instance.data["height"],
|
||||
"resolutionWidth": instance.data["resolutionWidth"],
|
||||
"resolutionHeight": instance.data["resolutionHeight"],
|
||||
"pixelAspect": instance.data["pixelAspect"]
|
||||
})
|
||||
|
||||
|
|
|
|||
18
pype/plugins/nukestudio/publish/collect_instance_version.py
Normal file
18
pype/plugins/nukestudio/publish/collect_instance_version.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from pyblish import api
|
||||
|
||||
|
||||
class CollectInstanceVersion(api.InstancePlugin):
|
||||
""" Collecting versions of Hiero project into instances
|
||||
|
||||
If activated then any subset version is created in
|
||||
version of the actual project.
|
||||
"""
|
||||
|
||||
order = api.CollectorOrder + 0.011
|
||||
label = "Collect Instance Version"
|
||||
|
||||
def process(self, instance):
|
||||
version = instance.context.data.get("version", "001")
|
||||
instance.data.update({
|
||||
"version": int(version)
|
||||
})
|
||||
|
|
@ -83,7 +83,7 @@ class CollectPlates(api.InstancePlugin):
|
|||
class CollectPlatesData(api.InstancePlugin):
|
||||
"""Collect plates"""
|
||||
|
||||
order = api.CollectorOrder + 0.495
|
||||
order = api.CollectorOrder + 0.48
|
||||
label = "Collect Plates Data"
|
||||
hosts = ["nukestudio"]
|
||||
families = ["plate"]
|
||||
|
|
@ -126,7 +126,7 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
transfer_data = [
|
||||
"handleStart", "handleEnd", "sourceIn", "sourceOut", "frameStart",
|
||||
"frameEnd", "sourceInH", "sourceOutH", "clipIn", "clipOut",
|
||||
"clipInH", "clipOutH", "asset", "track", "version", "width", "height", "pixelAspect"
|
||||
"clipInH", "clipOutH", "asset", "track", "resolutionWidth", "resolutionHeight", "pixelAspect", "fps"
|
||||
]
|
||||
|
||||
# pass data to version
|
||||
|
|
@ -141,6 +141,13 @@ class CollectPlatesData(api.InstancePlugin):
|
|||
"fps": instance.context.data["fps"]
|
||||
})
|
||||
|
||||
version = instance.data.get("version")
|
||||
if version:
|
||||
version_data.update({
|
||||
"version": version
|
||||
})
|
||||
|
||||
|
||||
try:
|
||||
basename, ext = os.path.splitext(source_file)
|
||||
head, padding = os.path.splitext(basename)
|
||||
|
|
|
|||
|
|
@ -196,7 +196,8 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin):
|
|||
"asset": asset_name,
|
||||
"family": instance.data["family"],
|
||||
"subset": subset_name,
|
||||
"version": version_number
|
||||
"version": version_number,
|
||||
"hierarchy": instance.data["hierarchy"]
|
||||
})
|
||||
|
||||
resolution_width = instance.data.get("resolutionWidth")
|
||||
|
|
@ -207,9 +208,13 @@ class ExtractVideoTracksLuts(pyblish.api.InstancePlugin):
|
|||
if resolution_height:
|
||||
anatomy_data["resolution_height"] = resolution_height
|
||||
|
||||
pixel_aspect = instance.data.get("pixelAspect")
|
||||
if pixel_aspect:
|
||||
anatomy_data["pixel_aspect"] = float("{:0.2f}".format(pixel_aspect))
|
||||
|
||||
fps = instance.data.get("fps")
|
||||
if resolution_height:
|
||||
anatomy_data["fps"] = fps
|
||||
anatomy_data["fps"] = float("{:0.2f}".format(fps))
|
||||
|
||||
instance.data["projectEntity"] = project_entity
|
||||
instance.data["assetEntity"] = asset_entity
|
||||
|
|
|
|||
|
|
@ -241,7 +241,7 @@ class TasksModel(TreeModel):
|
|||
self.endResetModel()
|
||||
|
||||
def flags(self, index):
|
||||
return QtCore.Qt.ItemIsEnabled
|
||||
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
|
||||
|
||||
def headerData(self, section, orientation, role):
|
||||
|
||||
|
|
|
|||
BIN
res/icons/folder-favorite.png
Normal file
BIN
res/icons/folder-favorite.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.8 KiB |
BIN
res/icons/folder-favorite2.png
Normal file
BIN
res/icons/folder-favorite2.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 22 KiB |
BIN
res/icons/folder-favorite3.png
Normal file
BIN
res/icons/folder-favorite3.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 7.8 KiB |
|
|
@ -1,54 +0,0 @@
|
|||
'''
|
||||
Simple socket server using threads
|
||||
'''
|
||||
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
import StringIO
|
||||
import contextlib
|
||||
|
||||
import nuke
|
||||
|
||||
HOST = ''
|
||||
PORT = 8888
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def stdoutIO(stdout=None):
|
||||
old = sys.stdout
|
||||
if stdout is None:
|
||||
stdout = StringIO.StringIO()
|
||||
sys.stdout = stdout
|
||||
yield stdout
|
||||
sys.stdout = old
|
||||
|
||||
|
||||
def _exec(data):
|
||||
with stdoutIO() as s:
|
||||
exec(data)
|
||||
return s.getvalue()
|
||||
|
||||
|
||||
def server_start():
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
s.bind((HOST, PORT))
|
||||
s.listen(5)
|
||||
|
||||
while 1:
|
||||
client, address = s.accept()
|
||||
try:
|
||||
data = client.recv(4096)
|
||||
if data:
|
||||
result = nuke.executeInMainThreadWithResult(_exec, args=(data))
|
||||
client.send(str(result))
|
||||
except SystemExit:
|
||||
result = self.encode('SERVER: Shutting down...')
|
||||
client.send(str(result))
|
||||
raise
|
||||
finally:
|
||||
client.close()
|
||||
|
||||
t = threading.Thread(None, server_start)
|
||||
t.setDaemon(True)
|
||||
t.start()
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
import atom_server
|
||||
import KnobScripter
|
||||
|
||||
from pype.nuke.lib import (
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue