mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merged in bugfix/PYPE-746-nk-compatibility-with-new-rende (pull request #520)
Bugfix/PYPE-746 nk compatibility with new rende Approved-by: Jakub Ježek <jakub@pype.club> Approved-by: Milan Kolar <milan@orbi.tools>
This commit is contained in:
commit
d29fe63829
11 changed files with 169 additions and 61 deletions
|
|
@ -21,7 +21,7 @@ class CollectSceneVersion(pyblish.api.ContextPlugin):
|
|||
if '<shell>' in filename:
|
||||
return
|
||||
|
||||
rootVersion = pype.get_version_from_path(filename)
|
||||
rootVersion = int(pype.get_version_from_path(filename))
|
||||
context.data['version'] = rootVersion
|
||||
|
||||
self.log.info("{}".format(type(rootVersion)))
|
||||
self.log.info('Scene Version: %s' % context.data.get('version'))
|
||||
|
|
|
|||
|
|
@ -27,8 +27,9 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
representations_new = representations[:]
|
||||
|
||||
for repre in representations:
|
||||
tags = repre.get("tags", [])
|
||||
self.log.debug(repre)
|
||||
valid = 'review' in repre['tags'] or "thumb-nuke" in repre['tags']
|
||||
valid = 'review' in tags or "thumb-nuke" in tags
|
||||
if not valid:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -131,6 +131,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
- publishJobState (str, Optional): "Active" or "Suspended"
|
||||
This defaults to "Suspended"
|
||||
|
||||
- expectedFiles (list or dict): explained bellow
|
||||
|
||||
"""
|
||||
|
||||
label = "Submit image sequence jobs to Deadline or Muster"
|
||||
|
|
@ -166,7 +168,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
instance_transfer = {
|
||||
"slate": ["slateFrame"],
|
||||
"review": ["lutPath"],
|
||||
"render.farm": ["bakeScriptPath", "bakeRenderPath", "bakeWriteNodeName"]
|
||||
"render.farm": ["bakeScriptPath", "bakeRenderPath",
|
||||
"bakeWriteNodeName", "version"]
|
||||
}
|
||||
|
||||
# list of family names to transfer to new family if present
|
||||
|
|
@ -384,13 +387,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"tags": ["review"] if preview else []
|
||||
}
|
||||
|
||||
# add tags
|
||||
if preview:
|
||||
if "ftrack" not in new_instance["families"]:
|
||||
if os.environ.get("FTRACK_SERVER"):
|
||||
new_instance["families"].append("ftrack")
|
||||
if "review" not in new_instance["families"]:
|
||||
new_instance["families"].append("review")
|
||||
self._solve_families(new_instance, preview)
|
||||
|
||||
new_instance["representations"] = [rep]
|
||||
|
||||
|
|
@ -399,6 +396,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
if new_instance.get("extendFrames", False):
|
||||
self._copy_extend_frames(new_instance, rep)
|
||||
instances.append(new_instance)
|
||||
|
||||
return instances
|
||||
|
||||
def _get_representations(self, instance, exp_files):
|
||||
|
|
@ -419,6 +417,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
start = int(instance.get("frameStart"))
|
||||
end = int(instance.get("frameEnd"))
|
||||
cols, rem = clique.assemble(exp_files)
|
||||
bake_render_path = instance.get("bakeRenderPath")
|
||||
|
||||
# create representation for every collected sequence
|
||||
for c in cols:
|
||||
ext = c.tail.lstrip(".")
|
||||
|
|
@ -435,8 +435,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
preview = True
|
||||
break
|
||||
break
|
||||
|
||||
if bake_render_path:
|
||||
preview = False
|
||||
|
||||
rep = {
|
||||
"name": str(c),
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": [os.path.basename(f) for f in list(c)],
|
||||
"frameStart": start,
|
||||
|
|
@ -450,32 +454,42 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
representations.append(rep)
|
||||
|
||||
families = instance.get("families")
|
||||
# if we have one representation with preview tag
|
||||
# flag whole instance for review and for ftrack
|
||||
if preview:
|
||||
if "ftrack" not in families:
|
||||
if os.environ.get("FTRACK_SERVER"):
|
||||
families.append("ftrack")
|
||||
if "review" not in families:
|
||||
families.append("review")
|
||||
instance["families"] = families
|
||||
self._solve_families(instance, preview)
|
||||
|
||||
# add reminders as representations
|
||||
for r in rem:
|
||||
ext = r.split(".")[-1]
|
||||
rep = {
|
||||
"name": r,
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": os.path.basename(r),
|
||||
"stagingDir": os.path.dirname(r),
|
||||
"anatomy_template": "publish",
|
||||
}
|
||||
|
||||
if r in bake_render_path:
|
||||
rep.update({
|
||||
"fps": instance.get("fps"),
|
||||
"anatomy_template": "render",
|
||||
"tags": ["review", "delete"]
|
||||
})
|
||||
# solve families with `preview` attributes
|
||||
self._solve_families(instance, True)
|
||||
representations.append(rep)
|
||||
|
||||
return representations
|
||||
|
||||
def _solve_families(self, instance, preview=False):
|
||||
families = instance.get("families")
|
||||
# if we have one representation with preview tag
|
||||
# flag whole instance for review and for ftrack
|
||||
if preview:
|
||||
if "ftrack" not in families:
|
||||
if os.environ.get("FTRACK_SERVER"):
|
||||
families.append("ftrack")
|
||||
if "review" not in families:
|
||||
families.append("review")
|
||||
instance["families"] = families
|
||||
|
||||
def process(self, instance):
|
||||
"""
|
||||
Detect type of renderfarm submission and create and post dependend job
|
||||
|
|
@ -485,7 +499,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
:param instance: Instance data
|
||||
:type instance: dict
|
||||
"""
|
||||
|
||||
data = instance.data.copy()
|
||||
context = instance.context
|
||||
self.context = context
|
||||
|
|
@ -518,10 +531,23 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
start = instance.data.get("frameStart")
|
||||
if start is None:
|
||||
start = context.data["frameStart"]
|
||||
|
||||
end = instance.data.get("frameEnd")
|
||||
if end is None:
|
||||
end = context.data["frameEnd"]
|
||||
|
||||
handle_start = instance.data.get("handleStart")
|
||||
if handle_start is None:
|
||||
handle_start = context.data["handleStart"]
|
||||
|
||||
handle_end = instance.data.get("handleEnd")
|
||||
if handle_end is None:
|
||||
handle_end = context.data["handleEnd"]
|
||||
|
||||
fps = instance.data.get("fps")
|
||||
if fps is None:
|
||||
fps = context.data["fps"]
|
||||
|
||||
if data.get("extendFrames", False):
|
||||
start, end = self._extend_frames(
|
||||
asset,
|
||||
|
|
@ -550,7 +576,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"asset": asset,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"fps": data.get("fps", 25),
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"fps": fps,
|
||||
"source": source,
|
||||
"extendFrames": data.get("extendFrames"),
|
||||
"overrideExistingFrame": data.get("overrideExistingFrame"),
|
||||
|
|
@ -571,6 +599,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
for v in values:
|
||||
instance_skeleton_data[v] = instance.data.get(v)
|
||||
|
||||
# look into instance data if representations are not having any
|
||||
# which are having tag `publish_on_farm` and include them
|
||||
for r in instance.data.get("representations", []):
|
||||
if "publish_on_farm" in r.get("tags"):
|
||||
# create representations attribute of not there
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
instance_skeleton_data["representations"].append(r)
|
||||
|
||||
instances = None
|
||||
assert data.get("expectedFiles"), ("Submission from old Pype version"
|
||||
" - missing expectedFiles")
|
||||
|
|
@ -644,7 +682,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
data.get("expectedFiles")
|
||||
)
|
||||
|
||||
if "representations" not in instance_skeleton_data:
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
# add representation
|
||||
|
|
|
|||
|
|
@ -220,6 +220,8 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
layer=layer_name)),
|
||||
"renderer": self.get_render_attribute("currentRenderer",
|
||||
layer=layer_name),
|
||||
"handleStart": context.data["assetEntity"]['data']['handleStart'],
|
||||
"handleEnd": context.data["assetEntity"]['data']['handleEnd'],
|
||||
|
||||
# instance subset
|
||||
"family": "renderlayer",
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import json
|
||||
import getpass
|
||||
import clique
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
|
@ -242,7 +243,8 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
# Optional, enable double-click to preview rendered
|
||||
# frames from Deadline Monitor
|
||||
"OutputFilename0": output_filename_0.replace("\\", "/"),
|
||||
"OutputDirectory0": os.path.dirname(output_filename_0),
|
||||
"OutputFilename0": output_filename_0.replace("\\", "/")
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
|
|
@ -272,6 +274,26 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"AuxFiles": []
|
||||
}
|
||||
|
||||
exp = instance.data.get("expectedFiles")
|
||||
|
||||
OutputFilenames = {}
|
||||
expIndex = 0
|
||||
|
||||
if isinstance(exp[0], dict):
|
||||
# we have aovs and we need to iterate over them
|
||||
for aov, files in exp[0].items():
|
||||
col = clique.assemble(files)[0][0]
|
||||
outputFile = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile
|
||||
OutputFilenames[expIndex] = outputFile
|
||||
expIndex += 1
|
||||
else:
|
||||
col = clique.assemble(files)[0][0]
|
||||
outputFile = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile
|
||||
# OutputFilenames[expIndex] = outputFile
|
||||
|
||||
|
||||
# We need those to pass them to pype for it to set correct context
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
|
|
|
|||
|
|
@ -112,6 +112,7 @@ class LoadMov(api.Loader):
|
|||
)
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
repr_id = context["representation"]["_id"]
|
||||
|
||||
orig_first = version_data.get("frameStart")
|
||||
orig_last = version_data.get("frameEnd")
|
||||
|
|
@ -120,12 +121,16 @@ class LoadMov(api.Loader):
|
|||
first = orig_first - diff
|
||||
last = orig_last - diff
|
||||
|
||||
handle_start = version_data.get("handleStart")
|
||||
handle_end = version_data.get("handleEnd")
|
||||
handle_start = version_data.get("handleStart", 0)
|
||||
handle_end = version_data.get("handleEnd", 0)
|
||||
|
||||
colorspace = version_data.get("colorspace")
|
||||
repr_cont = context["representation"]["context"]
|
||||
|
||||
self.log.debug(
|
||||
"Representation id `{}` ".format(repr_id))
|
||||
|
||||
context["representation"]["_id"]
|
||||
# create handles offset (only to last, because of mov)
|
||||
last += handle_start + handle_end
|
||||
# offset should be with handles so it match orig frame range
|
||||
|
|
@ -138,7 +143,6 @@ class LoadMov(api.Loader):
|
|||
file = self.fname
|
||||
|
||||
if not file:
|
||||
repr_id = context["representation"]["_id"]
|
||||
self.log.warning(
|
||||
"Representation id `{}` is failing to load".format(repr_id))
|
||||
return
|
||||
|
|
|
|||
|
|
@ -86,8 +86,11 @@ class LoadSequence(api.Loader):
|
|||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
repr_id = context["representation"]["_id"]
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
self.log.debug(
|
||||
"Representation id `{}` ".format(repr_id))
|
||||
|
||||
self.first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
self.handle_start = version_data.get("handleStart", 0)
|
||||
|
|
|
|||
|
|
@ -1,22 +0,0 @@
|
|||
import os
|
||||
import pype.api as pype
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectScriptVersion(pyblish. api.ContextPlugin):
|
||||
"""Collect Script Version."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Script Version"
|
||||
hosts = [
|
||||
"nuke",
|
||||
"nukeassist"
|
||||
]
|
||||
|
||||
def process(self, context):
|
||||
file_path = context.data["currentFile"]
|
||||
base_name = os.path.basename(file_path)
|
||||
# get version string
|
||||
version = pype.get_version_from_path(base_name)
|
||||
|
||||
context.data['version'] = version
|
||||
|
|
@ -52,9 +52,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
output_dir = os.path.dirname(path)
|
||||
self.log.debug('output dir: {}'.format(output_dir))
|
||||
|
||||
# # get version to instance for integration
|
||||
# instance.data['version'] = instance.context.data.get(
|
||||
# "version", pype.get_version_from_path(nuke.root().name()))
|
||||
# get version to instance for integration
|
||||
instance.data['version'] = instance.context.data["version"]
|
||||
|
||||
self.log.debug('Write Version: %s' % instance.data('version'))
|
||||
|
||||
|
|
@ -112,7 +111,8 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
"outputDir": output_dir,
|
||||
"ext": ext,
|
||||
"label": label,
|
||||
"handles": handles,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame,
|
||||
"outputType": output_type,
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
write_node["raw"].setValue(1)
|
||||
write_node.setInput(0, previous_node)
|
||||
temporary_nodes.append(write_node)
|
||||
tags = ["thumbnail"]
|
||||
tags = ["thumbnail", "publish_on_farm"]
|
||||
|
||||
# retime for
|
||||
first_frame = int(last_frame) / 2
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ import getpass
|
|||
from avalon import api
|
||||
from avalon.vendor import requests
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
@ -55,7 +54,9 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
)
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
instance.data["publishJobState"] = "Active"
|
||||
instance.data["outputDir"] = os.path.dirname(
|
||||
render_path).replace("\\", "/")
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
|
||||
if instance.data.get("bakeScriptPath"):
|
||||
render_path = instance.data.get("bakeRenderPath")
|
||||
|
|
@ -87,6 +88,9 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
script_name = os.path.basename(script_path)
|
||||
jobname = "%s - %s" % (script_name, instance.name)
|
||||
|
||||
output_filename_0 = self.preview_fname(render_path)
|
||||
output_directory_0 = render_dir.replace("\\", "/")
|
||||
|
||||
if not responce_data:
|
||||
responce_data = {}
|
||||
|
||||
|
|
@ -119,6 +123,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
),
|
||||
"Comment": self._comment,
|
||||
|
||||
# Optional, enable double-click to preview rendered
|
||||
# frames from Deadline Monitor
|
||||
"OutputFilename0": output_filename_0.replace("\\", "/")
|
||||
|
||||
},
|
||||
"PluginInfo": {
|
||||
# Input
|
||||
|
|
@ -220,6 +228,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self.log.info("Submitting..")
|
||||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# adding expectied files to instance.data
|
||||
self.expected_files(instance, render_path)
|
||||
self.log.debug("__ expectedFiles: `{}`".format(
|
||||
instance.data["expectedFiles"]))
|
||||
response = requests.post(self.deadline_url, json=payload)
|
||||
|
||||
if not response.ok:
|
||||
|
|
@ -240,3 +252,51 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"%f=%d was rounded off to nearest integer"
|
||||
% (value, int(value))
|
||||
)
|
||||
|
||||
def preview_fname(self, path):
|
||||
"""Return output file path with #### for padding.
|
||||
|
||||
Deadline requires the path to be formatted with # in place of numbers.
|
||||
For example `/path/to/render.####.png`
|
||||
|
||||
Args:
|
||||
path (str): path to rendered images
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
if "%" in path:
|
||||
search_results = re.search(r"(%0)(\d)(d.)", path).groups()
|
||||
self.log.debug("_ search_results: `{}`".format(search_results))
|
||||
return int(search_results[1])
|
||||
if "#" in path:
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
return path
|
||||
else:
|
||||
return path
|
||||
|
||||
def expected_files(self,
|
||||
instance,
|
||||
path):
|
||||
""" Create expected files in instance data
|
||||
"""
|
||||
if not instance.data.get("expectedFiles"):
|
||||
instance.data["expectedFiles"] = list()
|
||||
|
||||
dir = os.path.dirname(path)
|
||||
file = os.path.basename(path)
|
||||
|
||||
if "#" in file:
|
||||
pparts = file.split("#")
|
||||
padding = "%0{}d".format(len(pparts) - 1)
|
||||
file = pparts[0] + padding + pparts[-1]
|
||||
|
||||
if "%" not in file:
|
||||
instance.data["expectedFiles"].append(path)
|
||||
return
|
||||
|
||||
for i in range(self._frame_start, (self._frame_end + 1)):
|
||||
instance.data["expectedFiles"].append(
|
||||
os.path.join(dir, (file % i)).replace("\\", "/"))
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue