Merge branch 'master' into feature/PYPE-638_res_and_fps_in_burnins

This commit is contained in:
Milan Kolar 2020-01-10 16:05:11 +01:00
commit 301b011c2b
14 changed files with 617 additions and 209 deletions

View file

@ -28,7 +28,7 @@ class SyncToAvalonEvent(BaseEvent):
ignore_entTypes = [
"socialfeed", "socialnotification", "note",
"assetversion", "job", "user", "reviewsessionobject", "timer",
"timelog", "auth_userrole"
"timelog", "auth_userrole", "appointment"
]
ignore_ent_types = ["Milestone"]
ignore_keys = ["statusid"]
@ -1438,9 +1438,11 @@ class SyncToAvalonEvent(BaseEvent):
if attr["entity_type"] != ent_info["entityType"]:
continue
if ent_info["entityType"] != "show":
if attr["object_type_id"] != ent_info["objectTypeId"]:
continue
if (
ent_info["entityType"] == "task" and
attr["object_type_id"] != ent_info["objectTypeId"]
):
continue
configuration_id = attr["id"]
entity_type_conf_ids[entity_type] = configuration_id
@ -1712,7 +1714,8 @@ class SyncToAvalonEvent(BaseEvent):
if ca_ent_type == "show":
cust_attrs_by_obj_id[ca_ent_type][key] = cust_attr
else:
elif ca_ent_type == "task":
obj_id = cust_attr["object_type_id"]
cust_attrs_by_obj_id[obj_id][key] = cust_attr

View file

@ -314,6 +314,9 @@ class SyncEntitiesFactory:
self.log.warning(msg)
return {"success": False, "message": msg}
self.log.debug((
"*** Synchronization initialization started <{}>."
).format(project_full_name))
# Check if `avalon_mongo_id` custom attribute exist or is accessible
if CustAttrIdKey not in ft_project["custom_attributes"]:
items = []
@ -699,7 +702,7 @@ class SyncEntitiesFactory:
if ca_ent_type == "show":
avalon_attrs[ca_ent_type][key] = cust_attr["default"]
avalon_attrs_ca_id[ca_ent_type][key] = cust_attr["id"]
else:
elif ca_ent_type == "task":
obj_id = cust_attr["object_type_id"]
avalon_attrs[obj_id][key] = cust_attr["default"]
avalon_attrs_ca_id[obj_id][key] = cust_attr["id"]
@ -708,7 +711,7 @@ class SyncEntitiesFactory:
if ca_ent_type == "show":
attrs_per_entity_type[ca_ent_type][key] = cust_attr["default"]
attrs_per_entity_type_ca_id[ca_ent_type][key] = cust_attr["id"]
else:
elif ca_ent_type == "task":
obj_id = cust_attr["object_type_id"]
attrs_per_entity_type[obj_id][key] = cust_attr["default"]
attrs_per_entity_type_ca_id[obj_id][key] = cust_attr["id"]

View file

@ -18,13 +18,16 @@ def _subprocess(*args, **kwargs):
"""Convenience method for getting output errors for subprocess."""
# make sure environment contains only strings
filtered_env = {k: str(v) for k, v in os.environ.items()}
if not kwargs.get("env"):
filtered_env = {k: str(v) for k, v in os.environ.items()}
else:
filtered_env = {k: str(v) for k, v in kwargs.get("env").items()}
# set overrides
kwargs['stdout'] = kwargs.get('stdout', subprocess.PIPE)
kwargs['stderr'] = kwargs.get('stderr', subprocess.STDOUT)
kwargs['stdin'] = kwargs.get('stdin', subprocess.PIPE)
kwargs['env'] = kwargs.get('env',filtered_env)
kwargs['env'] = filtered_env
proc = subprocess.Popen(*args, **kwargs)

View file

@ -707,9 +707,11 @@ class WorkfileSettings(object):
frame_start = int(data["frameStart"]) - handle_start
frame_end = int(data["frameEnd"]) + handle_end
self._root_node["lock_range"].setValue(False)
self._root_node["fps"].setValue(fps)
self._root_node["first_frame"].setValue(frame_start)
self._root_node["last_frame"].setValue(frame_end)
self._root_node["lock_range"].setValue(True)
# setting active viewers
try:
@ -1197,13 +1199,13 @@ class BuildWorkfile(WorkfileSettings):
self.ypos -= (self.ypos_size * multiply) + self.ypos_gap
class Exporter_review_lut:
class ExporterReview:
"""
Generator object for review lut from Nuke
Base class object for generating review data from Nuke
Args:
klass (pyblish.plugin): pyblish plugin parent
instance (pyblish.instance): instance of pyblish context
"""
_temp_nodes = []
@ -1213,94 +1215,15 @@ class Exporter_review_lut:
def __init__(self,
klass,
instance,
name=None,
ext=None,
cube_size=None,
lut_size=None,
lut_style=None):
instance
):
self.log = klass.log
self.instance = instance
self.name = name or "baked_lut"
self.ext = ext or "cube"
self.cube_size = cube_size or 32
self.lut_size = lut_size or 1024
self.lut_style = lut_style or "linear"
self.stagingDir = self.instance.data["stagingDir"]
self.path_in = self.instance.data.get("path", None)
self.staging_dir = self.instance.data["stagingDir"]
self.collection = self.instance.data.get("collection", None)
# set frame start / end and file name to self
self.get_file_info()
self.log.info("File info was set...")
self.file = self.fhead + self.name + ".{}".format(self.ext)
self.path = os.path.join(self.stagingDir, self.file).replace("\\", "/")
def generate_lut(self):
# ---------- start nodes creation
# CMSTestPattern
cms_node = nuke.createNode("CMSTestPattern")
cms_node["cube_size"].setValue(self.cube_size)
# connect
self._temp_nodes.append(cms_node)
self.previous_node = cms_node
self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes))
# Node View Process
ipn = self.get_view_process_node()
if ipn is not None:
# connect
ipn.setInput(0, self.previous_node)
self._temp_nodes.append(ipn)
self.previous_node = ipn
self.log.debug("ViewProcess... `{}`".format(self._temp_nodes))
# OCIODisplay
dag_node = nuke.createNode("OCIODisplay")
# connect
dag_node.setInput(0, self.previous_node)
self._temp_nodes.append(dag_node)
self.previous_node = dag_node
self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes))
# GenerateLUT
gen_lut_node = nuke.createNode("GenerateLUT")
gen_lut_node["file"].setValue(self.path)
gen_lut_node["file_type"].setValue(".{}".format(self.ext))
gen_lut_node["lut1d"].setValue(self.lut_size)
gen_lut_node["style1d"].setValue(self.lut_style)
# connect
gen_lut_node.setInput(0, self.previous_node)
self._temp_nodes.append(gen_lut_node)
self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes))
# ---------- end nodes creation
# Export lut file
nuke.execute(
gen_lut_node.name(),
int(self.first_frame),
int(self.first_frame))
self.log.info("Exported...")
# ---------- generate representation data
self.get_representation_data()
self.log.debug("Representation... `{}`".format(self.data))
# ---------- Clean up
for node in self._temp_nodes:
nuke.delete(node)
self.log.info("Deleted nodes...")
return self.data
def get_file_info(self):
if self.collection:
self.log.debug("Collection: `{}`".format(self.collection))
@ -1313,7 +1236,7 @@ class Exporter_review_lut:
self.first_frame = min(self.collection.indexes)
self.last_frame = max(self.collection.indexes)
else:
self.fname = os.path.basename(self.instance.data.get("path", None))
self.fname = os.path.basename(self.path_in)
self.fhead = os.path.splitext(self.fname)[0] + "."
self.first_frame = self.instance.data.get("frameStart", None)
self.last_frame = self.instance.data.get("frameEnd", None)
@ -1321,17 +1244,26 @@ class Exporter_review_lut:
if "#" in self.fhead:
self.fhead = self.fhead.replace("#", "")[:-1]
def get_representation_data(self):
def get_representation_data(self, tags=None, range=False):
add_tags = []
if tags:
add_tags = tags
repre = {
'name': self.name,
'ext': self.ext,
'files': self.file,
"stagingDir": self.stagingDir,
"stagingDir": self.staging_dir,
"anatomy_template": "publish",
"tags": [self.name.replace("_", "-")]
"tags": [self.name.replace("_", "-")] + add_tags
}
if range:
repre.update({
"frameStart": self.first_frame,
"frameEnd": self.last_frame,
})
self.data["representations"].append(repre)
def get_view_process_node(self):
@ -1366,6 +1298,251 @@ class Exporter_review_lut:
return ipn
def clean_nodes(self):
for node in self._temp_nodes:
nuke.delete(node)
self.log.info("Deleted nodes...")
class ExporterReviewLut(ExporterReview):
"""
Generator object for review lut from Nuke
Args:
klass (pyblish.plugin): pyblish plugin parent
instance (pyblish.instance): instance of pyblish context
"""
def __init__(self,
klass,
instance,
name=None,
ext=None,
cube_size=None,
lut_size=None,
lut_style=None):
# initialize parent class
ExporterReview.__init__(self, klass, instance)
# deal with now lut defined in viewer lut
if hasattr(klass, "viewer_lut_raw"):
self.viewer_lut_raw = klass.viewer_lut_raw
else:
self.viewer_lut_raw = False
self.name = name or "baked_lut"
self.ext = ext or "cube"
self.cube_size = cube_size or 32
self.lut_size = lut_size or 1024
self.lut_style = lut_style or "linear"
# set frame start / end and file name to self
self.get_file_info()
self.log.info("File info was set...")
self.file = self.fhead + self.name + ".{}".format(self.ext)
self.path = os.path.join(
self.staging_dir, self.file).replace("\\", "/")
def generate_lut(self):
# ---------- start nodes creation
# CMSTestPattern
cms_node = nuke.createNode("CMSTestPattern")
cms_node["cube_size"].setValue(self.cube_size)
# connect
self._temp_nodes.append(cms_node)
self.previous_node = cms_node
self.log.debug("CMSTestPattern... `{}`".format(self._temp_nodes))
# Node View Process
ipn = self.get_view_process_node()
if ipn is not None:
# connect
ipn.setInput(0, self.previous_node)
self._temp_nodes.append(ipn)
self.previous_node = ipn
self.log.debug("ViewProcess... `{}`".format(self._temp_nodes))
if not self.viewer_lut_raw:
# OCIODisplay
dag_node = nuke.createNode("OCIODisplay")
# connect
dag_node.setInput(0, self.previous_node)
self._temp_nodes.append(dag_node)
self.previous_node = dag_node
self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes))
# GenerateLUT
gen_lut_node = nuke.createNode("GenerateLUT")
gen_lut_node["file"].setValue(self.path)
gen_lut_node["file_type"].setValue(".{}".format(self.ext))
gen_lut_node["lut1d"].setValue(self.lut_size)
gen_lut_node["style1d"].setValue(self.lut_style)
# connect
gen_lut_node.setInput(0, self.previous_node)
self._temp_nodes.append(gen_lut_node)
self.log.debug("GenerateLUT... `{}`".format(self._temp_nodes))
# ---------- end nodes creation
# Export lut file
nuke.execute(
gen_lut_node.name(),
int(self.first_frame),
int(self.first_frame))
self.log.info("Exported...")
# ---------- generate representation data
self.get_representation_data()
self.log.debug("Representation... `{}`".format(self.data))
# ---------- Clean up
self.clean_nodes()
return self.data
class ExporterReviewMov(ExporterReview):
"""
Metaclass for generating review mov files
Args:
klass (pyblish.plugin): pyblish plugin parent
instance (pyblish.instance): instance of pyblish context
"""
def __init__(self,
klass,
instance,
name=None,
ext=None,
):
# initialize parent class
ExporterReview.__init__(self, klass, instance)
# passing presets for nodes to self
if hasattr(klass, "nodes"):
self.nodes = klass.nodes
else:
self.nodes = {}
# deal with now lut defined in viewer lut
if hasattr(klass, "viewer_lut_raw"):
self.viewer_lut_raw = klass.viewer_lut_raw
else:
self.viewer_lut_raw = False
self.name = name or "baked"
self.ext = ext or "mov"
# set frame start / end and file name to self
self.get_file_info()
self.log.info("File info was set...")
self.file = self.fhead + self.name + ".{}".format(self.ext)
self.path = os.path.join(
self.staging_dir, self.file).replace("\\", "/")
def render(self, render_node_name):
self.log.info("Rendering... ")
# Render Write node
nuke.execute(
render_node_name,
int(self.first_frame),
int(self.last_frame))
self.log.info("Rendered...")
def save_file(self):
with anlib.maintained_selection():
self.log.info("Saving nodes as file... ")
# select temp nodes
anlib.select_nodes(self._temp_nodes)
# create nk path
path = os.path.splitext(self.path)[0] + ".nk"
# save file to the path
nuke.nodeCopy(path)
self.log.info("Nodes exported...")
return path
def generate_mov(self, farm=False):
# ---------- start nodes creation
# Read node
r_node = nuke.createNode("Read")
r_node["file"].setValue(self.path_in)
r_node["first"].setValue(self.first_frame)
r_node["origfirst"].setValue(self.first_frame)
r_node["last"].setValue(self.last_frame)
r_node["origlast"].setValue(self.last_frame)
# connect
self._temp_nodes.append(r_node)
self.previous_node = r_node
self.log.debug("Read... `{}`".format(self._temp_nodes))
# View Process node
ipn = self.get_view_process_node()
if ipn is not None:
# connect
ipn.setInput(0, self.previous_node)
self._temp_nodes.append(ipn)
self.previous_node = ipn
self.log.debug("ViewProcess... `{}`".format(self._temp_nodes))
if not self.viewer_lut_raw:
# OCIODisplay node
dag_node = nuke.createNode("OCIODisplay")
# connect
dag_node.setInput(0, self.previous_node)
self._temp_nodes.append(dag_node)
self.previous_node = dag_node
self.log.debug("OCIODisplay... `{}`".format(self._temp_nodes))
# Write node
write_node = nuke.createNode("Write")
self.log.debug("Path: {}".format(self.path))
self.instance.data["baked_colorspace_movie"] = self.path
write_node["file"].setValue(self.path)
write_node["file_type"].setValue(self.ext)
write_node["raw"].setValue(1)
# connect
write_node.setInput(0, self.previous_node)
self._temp_nodes.append(write_node)
self.log.debug("Write... `{}`".format(self._temp_nodes))
# ---------- end nodes creation
# ---------- render or save to nk
if farm:
path_nk = self.save_file()
self.data.update({
"bakeScriptPath": path_nk,
"bakeWriteNodeName": write_node.name(),
"bakeRenderPath": self.path
})
else:
self.render(write_node.name())
# ---------- generate representation data
self.get_representation_data(
tags=["review", "delete"],
range=True
)
self.log.debug("Representation... `{}`".format(self.data))
#---------- Clean up
self.clean_nodes()
return self.data
def get_dependent_nodes(nodes):
"""Get all dependent nodes connected to the list of nodes.

View file

@ -22,19 +22,16 @@ def has_unsaved_changes():
def save_file(filepath):
file = os.path.basename(filepath)
project = hiero.core.projects()[-1]
# close `Untitled` project
if "Untitled" not in project.name():
log.info("Saving project: `{}`".format(project.name()))
if project:
log.info("Saving project: `{}` as '{}'".format(project.name(), file))
project.saveAs(filepath)
elif not project:
else:
log.info("Creating new project...")
project = hiero.core.newProject()
project.saveAs(filepath)
else:
log.info("Dropping `Untitled` project...")
return
def open_file(filepath):

View file

@ -188,14 +188,18 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
# Adding Custom Attributes
for attr, val in assetversion_cust_attrs.items():
if attr in assetversion_entity["custom_attributes"]:
assetversion_entity["custom_attributes"][attr] = val
continue
try:
assetversion_entity["custom_attributes"][attr] = val
session.commit()
continue
except Exception:
session.rollback()
self.log.warning((
"Custom Attrubute \"{0}\""
" is not available for AssetVersion."
" Can't set it's value to: \"{1}\""
).format(attr, str(val)))
" is not available for AssetVersion <{1}>."
" Can't set it's value to: \"{2}\""
).format(attr, assetversion_entity["id"], str(val)))
# Have to commit the version and asset, because location can't
# determine the final location without.

View file

@ -54,10 +54,6 @@ def collect(root,
patterns=[pattern],
minimum_items=1)
# Ignore any remainders
if remainder:
print("Skipping remainder {}".format(remainder))
# Exclude any frames outside start and end frame.
for collection in collections:
for index in list(collection.indexes):
@ -71,7 +67,7 @@ def collect(root,
# Keep only collections that have at least a single frame
collections = [c for c in collections if c.indexes]
return collections
return collections, remainder
class CollectRenderedFrames(pyblish.api.ContextPlugin):
@ -119,8 +115,10 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
try:
data = json.load(f)
except Exception as exc:
self.log.error("Error loading json: "
"{} - Exception: {}".format(path, exc))
self.log.error(
"Error loading json: "
"{} - Exception: {}".format(path, exc)
)
raise
cwd = os.path.dirname(path)
@ -148,11 +146,14 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
os.environ.update(session)
instance = metadata.get("instance")
if instance:
# here is the place to add ability for nuke noninteractive
# ______________________________________
instance_family = instance.get("family")
pixel_aspect = instance.get("pixelAspect", 1)
resolution_width = instance.get("resolutionWidth", 1920)
resolution_height = instance.get("resolutionHeight", 1080)
lut_path = instance.get("lutPath", None)
else:
# Search in directory
data = dict()
@ -163,14 +164,17 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
if regex:
self.log.info("Using regex: {}".format(regex))
collections = collect(root=root,
regex=regex,
exclude_regex=data.get("exclude_regex"),
frame_start=data.get("frameStart"),
frame_end=data.get("frameEnd"))
collections, remainder = collect(
root=root,
regex=regex,
exclude_regex=data.get("exclude_regex"),
frame_start=data.get("frameStart"),
frame_end=data.get("frameEnd"),
)
self.log.info("Found collections: {}".format(collections))
"""
if data.get("subset"):
# If subset is provided for this json then it must be a single
# collection.
@ -178,79 +182,190 @@ class CollectRenderedFrames(pyblish.api.ContextPlugin):
self.log.error("Forced subset can only work with a single "
"found sequence")
raise RuntimeError("Invalid sequence")
"""
fps = data.get("fps", 25)
if data.get("user"):
context.data["user"] = data["user"]
# Get family from the data
families = data.get("families", ["render"])
if "render" not in families:
families.append("render")
if "ftrack" not in families:
families.append("ftrack")
if "review" not in families:
families.append("review")
if "write" in instance_family:
families.append("write")
for collection in collections:
instance = context.create_instance(str(collection))
self.log.info("Collection: %s" % list(collection))
if data.get("attachTo"):
# we need to attach found collections to existing
# subset version as review represenation.
# Ensure each instance gets a unique reference to the data
for attach in data.get("attachTo"):
self.log.info(
"Attaching render {}:v{}".format(
attach["subset"], attach["version"]))
instance = context.create_instance(
attach["subset"])
instance.data.update(
{
"name": attach["subset"],
"version": attach["version"],
"family": 'review',
"families": ['review', 'ftrack'],
"asset": data.get(
"asset", api.Session["AVALON_ASSET"]),
"stagingDir": root,
"frameStart": data.get("frameStart"),
"frameEnd": data.get("frameEnd"),
"fps": fps,
"source": data.get("source", ""),
"pixelAspect": pixel_aspect
})
if "representations" not in instance.data:
instance.data["representations"] = []
for collection in collections:
self.log.info(
" - adding representation: {}".format(
str(collection))
)
ext = collection.tail.lstrip(".")
representation = {
"name": ext,
"ext": "{}".format(ext),
"files": list(collection),
"stagingDir": root,
"anatomy_template": "render",
"fps": fps,
"tags": ["review"],
}
instance.data["representations"].append(
representation)
elif data.get("subset"):
# if we have subset - add all collections and known
# reminder as representations
self.log.info(
"Adding representations to subset {}".format(
data.get("subset")))
instance = context.create_instance(data.get("subset"))
data = copy.deepcopy(data)
# If no subset provided, get it from collection's head
subset = data.get("subset", collection.head.rstrip("_. "))
# If no start or end frame provided, get it from collection
indices = list(collection.indexes)
start = data.get("frameStart", indices[0])
end = data.get("frameEnd", indices[-1])
self.log.debug("Collected pixel_aspect:\n"
"{}".format(pixel_aspect))
self.log.debug("type pixel_aspect:\n"
"{}".format(type(pixel_aspect)))
# root = os.path.normpath(root)
# self.log.info("Source: {}}".format(data.get("source", "")))
ext = list(collection)[0].split('.')[-1]
instance.data.update({
"name": str(collection),
"family": families[0], # backwards compatibility / pyblish
"families": list(families),
"subset": subset,
"asset": data.get("asset", api.Session["AVALON_ASSET"]),
"stagingDir": root,
"frameStart": start,
"frameEnd": end,
"fps": fps,
"source": data.get('source', ''),
"pixelAspect": pixel_aspect,
})
if lut_path:
instance.data.update({"lutPath": lut_path})
instance.append(collection)
instance.context.data['fps'] = fps
instance.data.update(
{
"name": data.get("subset"),
"family": families[0],
"families": list(families),
"subset": data.get("subset"),
"asset": data.get(
"asset", api.Session["AVALON_ASSET"]),
"stagingDir": root,
"frameStart": data.get("frameStart"),
"frameEnd": data.get("frameEnd"),
"fps": fps,
"source": data.get("source", ""),
"pixelAspect": pixel_aspect,
}
)
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': ext,
'ext': '{}'.format(ext),
'files': list(collection),
"stagingDir": root,
"anatomy_template": "render",
"fps": fps,
"tags": ['review']
}
instance.data["representations"].append(representation)
for collection in collections:
self.log.info(" - {}".format(str(collection)))
if data.get('user'):
context.data["user"] = data['user']
ext = collection.tail.lstrip(".")
self.log.debug("Collected instance:\n"
"{}".format(pformat(instance.data)))
representation = {
"name": ext,
"ext": "{}".format(ext),
"files": list(collection),
"stagingDir": root,
"anatomy_template": "render",
"fps": fps,
"tags": ["review"],
}
instance.data["representations"].append(
representation)
# process reminders
for rem in remainder:
# add only known types to representation
if rem.split(".")[-1] in ['mov', 'jpg', 'mp4']:
self.log.info(" . {}".format(rem))
representation = {
"name": rem.split(".")[-1],
"ext": "{}".format(rem.split(".")[-1]),
"files": rem,
"stagingDir": root,
"anatomy_template": "render",
"fps": fps,
"tags": ["review"],
}
instance.data["representations"].append(
representation)
else:
# we have no subset so we take every collection and create one
# from it
for collection in collections:
instance = context.create_instance(str(collection))
self.log.info("Creating subset from: %s" % str(collection))
# Ensure each instance gets a unique reference to the data
data = copy.deepcopy(data)
# If no subset provided, get it from collection's head
subset = data.get("subset", collection.head.rstrip("_. "))
# If no start or end frame provided, get it from collection
indices = list(collection.indexes)
start = data.get("frameStart", indices[0])
end = data.get("frameEnd", indices[-1])
ext = list(collection)[0].split(".")[-1]
if "review" not in families:
families.append("review")
instance.data.update(
{
"name": str(collection),
"family": families[0], # backwards compatibility
"families": list(families),
"subset": subset,
"asset": data.get(
"asset", api.Session["AVALON_ASSET"]),
"stagingDir": root,
"frameStart": start,
"frameEnd": end,
"fps": fps,
"source": data.get("source", ""),
"pixelAspect": pixel_aspect,
}
)
if lut_path:
instance.data.update({"lutPath": lut_path})
instance.append(collection)
instance.context.data["fps"] = fps
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
"name": ext,
"ext": "{}".format(ext),
"files": list(collection),
"stagingDir": root,
"anatomy_template": "render",
"fps": fps,
"tags": ["review"],
}
instance.data["representations"].append(representation)

View file

@ -1,5 +1,4 @@
import os
import math
import pyblish.api
import clique
import pype.api
@ -25,14 +24,16 @@ class ExtractReview(pyblish.api.InstancePlugin):
ext_filter = []
def process(self, instance):
to_width = 1920
to_height = 1080
output_profiles = self.outputs or {}
inst_data = instance.data
fps = inst_data.get("fps")
start_frame = inst_data.get("frameStart")
resolution_height = instance.data.get("resolutionHeight", 1080)
resolution_width = instance.data.get("resolutionWidth", 1920)
resolution_width = instance.data.get("resolutionWidth", to_width)
resolution_height = instance.data.get("resolutionHeight", to_height)
pixel_aspect = instance.data.get("pixelAspect", 1)
self.log.debug("Families In: `{}`".format(instance.data["families"]))
@ -155,13 +156,38 @@ class ExtractReview(pyblish.api.InstancePlugin):
# preset's output data
output_args.extend(profile.get('output', []))
# defining image ratios
resolution_ratio = float(resolution_width / (
resolution_height * pixel_aspect))
delivery_ratio = float(to_width) / float(to_height)
self.log.debug(resolution_ratio)
self.log.debug(delivery_ratio)
# get scale factor
scale_factor = to_height / (
resolution_height * pixel_aspect)
self.log.debug(scale_factor)
# letter_box
lb = profile.get('letter_box', 0)
if lb is not 0:
if lb != 0:
ffmpet_width = to_width
ffmpet_height = to_height
if "reformat" not in p_tags:
lb /= pixel_aspect
if resolution_ratio != delivery_ratio:
ffmpet_width = resolution_width
ffmpet_height = int(
resolution_height * pixel_aspect)
else:
# TODO: it might still be failing in some cases
if resolution_ratio != delivery_ratio:
lb /= scale_factor
else:
lb /= pixel_aspect
output_args.append(
"-filter:v scale=1920x1080:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{0})))/2):iw:round((ih-(iw*(1/{0})))/2):t=fill:c=black".format(lb))
"-filter:v scale={0}x{1}:flags=lanczos,setsar=1,drawbox=0:0:iw:round((ih-(iw*(1/{2})))/2):t=fill:c=black,drawbox=0:ih-round((ih-(iw*(1/{2})))/2):iw:round((ih-(iw*(1/{2})))/2):t=fill:c=black".format(ffmpet_width, ffmpet_height, lb))
# In case audio is longer than video.
output_args.append("-shortest")
@ -172,22 +198,26 @@ class ExtractReview(pyblish.api.InstancePlugin):
self.log.debug("__ pixel_aspect: `{}`".format(pixel_aspect))
self.log.debug("__ resolution_width: `{}`".format(resolution_width))
self.log.debug("__ resolution_height: `{}`".format(resolution_height))
# scaling none square pixels and 1920 width
if "reformat" in p_tags:
width_scale = 1920
width_half_pad = 0
res_w = int(float(resolution_width) * pixel_aspect)
height_half_pad = int((
(res_w - 1920) / (
res_w * .01) * (
1080 * .01)) / 2
)
height_scale = 1080 - (height_half_pad * 2)
if height_scale > 1080:
if resolution_ratio < delivery_ratio:
self.log.debug("lower then delivery")
width_scale = int(to_width * scale_factor)
width_half_pad = int((
to_width - width_scale)/2)
height_scale = to_height
height_half_pad = 0
height_scale = 1080
width_half_pad = (1920 - (float(resolution_width) * (1080 / float(resolution_height))) ) / 2
width_scale = int(1920 - (width_half_pad * 2))
else:
self.log.debug("heigher then delivery")
width_scale = to_width
width_half_pad = 0
scale_factor = float(to_width) / float(resolution_width)
self.log.debug(scale_factor)
height_scale = int(
resolution_height * scale_factor)
height_half_pad = int(
(to_height - height_scale)/2)
self.log.debug("__ width_scale: `{}`".format(width_scale))
self.log.debug("__ width_half_pad: `{}`".format(width_half_pad))
@ -195,8 +225,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
self.log.debug("__ height_half_pad: `{}`".format(height_half_pad))
scaling_arg = "scale={0}x{1}:flags=lanczos,pad=1920:1080:{2}:{3}:black,setsar=1".format(
width_scale, height_scale, width_half_pad, height_half_pad
scaling_arg = "scale={0}x{1}:flags=lanczos,pad={2}:{3}:{4}:{5}:black,setsar=1".format(
width_scale, height_scale, to_width, to_height, width_half_pad, height_half_pad
)
vf_back = self.add_video_filter_args(

View file

@ -21,6 +21,12 @@ def _get_script():
if module_path.endswith(".pyc"):
module_path = module_path[:-len(".pyc")] + ".py"
module_path = os.path.normpath(module_path)
mount_root = os.path.normpath(os.environ['PYPE_STUDIO_CORE_MOUNT'])
network_root = os.path.normpath(os.environ['PYPE_STUDIO_CORE_PATH'])
module_path = module_path.replace(mount_root, network_root)
return module_path
@ -164,6 +170,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
output_dir = instance.data["outputDir"]
metadata_path = os.path.join(output_dir, metadata_filename)
metadata_path = os.path.normpath(metadata_path)
mount_root = os.path.normpath(os.environ['PYPE_STUDIO_PROJECTS_MOUNT'])
network_root = os.path.normpath(os.environ['PYPE_STUDIO_PROJECTS_PATH'])
metadata_path = metadata_path.replace(mount_root, network_root)
# Generate the payload for Deadline submission
payload = {
"JobInfo": {
@ -282,6 +294,19 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
relative_path = os.path.relpath(source, api.registered_root())
source = os.path.join("{root}", relative_path).replace("\\", "/")
# find subsets and version to attach render to
attach_to = instance.data.get("attachTo")
attach_subset_versions = []
if attach_to:
for subset in attach_to:
for instance in context:
if instance.data["subset"] != subset["subset"]:
continue
attach_subset_versions.append(
{"version": instance.data["version"],
"subset": subset["subset"],
"family": subset["family"]})
# Write metadata for publish job
metadata = {
"asset": asset,

View file

@ -6,9 +6,6 @@ from pype import api as pype
import nuke
log = pype.Logger().get_logger(__name__, "nuke")
class CrateRead(avalon.nuke.Creator):
# change this to template preset
name = "ReadCopy"

View file

@ -7,10 +7,6 @@ from pypeapp import config
import nuke
log = pype.Logger().get_logger(__name__, "nuke")
class CreateWriteRender(plugin.PypeCreator):
# change this to template preset
name = "WriteRender"

View file

@ -6,7 +6,7 @@ import pype
reload(pnlib)
class ExtractReviewLutData(pype.api.Extractor):
class ExtractReviewDataLut(pype.api.Extractor):
"""Extracts movie and thumbnail with baked in luts
must be run after extract_render_local.py
@ -37,8 +37,9 @@ class ExtractReviewLutData(pype.api.Extractor):
self.log.info(
"StagingDir `{0}`...".format(instance.data["stagingDir"]))
# generate data
with anlib.maintained_selection():
exporter = pnlib.Exporter_review_lut(
exporter = pnlib.ExporterReviewLut(
self, instance
)
data = exporter.generate_lut()

View file

@ -0,0 +1,56 @@
import os
import pyblish.api
from avalon.nuke import lib as anlib
from pype.nuke import lib as pnlib
import pype
reload(pnlib)
class ExtractReviewDataMov(pype.api.Extractor):
"""Extracts movie and thumbnail with baked in luts
must be run after extract_render_local.py
"""
order = pyblish.api.ExtractorOrder + 0.01
label = "Extract Review Data Mov"
families = ["review"]
hosts = ["nuke"]
def process(self, instance):
families = instance.data["families"]
self.log.info("Creating staging dir...")
if "representations" in instance.data:
staging_dir = instance.data[
"representations"][0]["stagingDir"].replace("\\", "/")
instance.data["stagingDir"] = staging_dir
instance.data["representations"][0]["tags"] = []
else:
instance.data["representations"] = []
# get output path
render_path = instance.data['path']
staging_dir = os.path.normpath(os.path.dirname(render_path))
instance.data["stagingDir"] = staging_dir
self.log.info(
"StagingDir `{0}`...".format(instance.data["stagingDir"]))
# generate data
with anlib.maintained_selection():
exporter = pnlib.ExporterReviewMov(
self, instance)
if "render.farm" in families:
instance.data["families"].remove("review")
instance.data["families"].remove("ftrack")
data = exporter.generate_mov(farm=True)
else:
data = exporter.generate_mov()
# assign to representations
instance.data["representations"] += data["representations"]
self.log.debug(
"_ representations: {}".format(instance.data["representations"]))

View file

@ -159,12 +159,13 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins):
options['frame_offset'] = start_frame
expr = r'%%{eif\:n+%d\:d}' % options['frame_offset']
_text = str(int(self.end_frame + options['frame_offset']))
if text and isinstance(text, str):
text = r"{}".format(text)
expr = text.replace("{current_frame}", expr)
text = text.replace("{current_frame}", _text)
options['expression'] = expr
text = str(int(self.end_frame + options['frame_offset']))
self._add_burnin(text, align, options, ffmpeg_burnins.DRAWTEXT)
def add_timecode(self, align, options=None, start_frame=None):