mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
merge hotfix/nuke-fixes with fixed conflict
This commit is contained in:
commit
7ff6f468fa
6 changed files with 129 additions and 104 deletions
|
|
@ -69,7 +69,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"lut",
|
||||
"audio",
|
||||
"yetiRig",
|
||||
"yeticache"
|
||||
"yeticache",
|
||||
"source"
|
||||
]
|
||||
exclude_families = ["clip"]
|
||||
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
label = "Integrate Frames"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
families = ["imagesequence", "source"]
|
||||
families = ["imagesequence"]
|
||||
|
||||
family_targets = [".frames", ".local", ".review", "imagesequence", "render", "source"]
|
||||
exclude_families = ["clip"]
|
||||
|
|
|
|||
|
|
@ -17,21 +17,24 @@ class CrateRead(avalon.nuke.Creator):
|
|||
family = "source"
|
||||
families = family
|
||||
icon = "film"
|
||||
defaults = ["Effect", "Backplate", "Fire", "Smoke"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CrateRead, self).__init__(*args, **kwargs)
|
||||
|
||||
self.nodes = nuke.selectedNodes()
|
||||
data = OrderedDict()
|
||||
data['family'] = self.family
|
||||
data['families'] = self.families
|
||||
{data.update({k: v}) for k, v in self.data.items()
|
||||
if k not in data.keys()}
|
||||
|
||||
for k, v in self.data.items():
|
||||
if k not in data.keys():
|
||||
data.update({k: v})
|
||||
|
||||
self.data = data
|
||||
|
||||
def process(self):
|
||||
self.name = self.data["subset"]
|
||||
|
||||
nodes = nuke.selectedNodes()
|
||||
nodes = self.nodes
|
||||
|
||||
if not nodes or len(nodes) == 0:
|
||||
nuke.message('Please select Read node')
|
||||
|
|
@ -40,9 +43,8 @@ class CrateRead(avalon.nuke.Creator):
|
|||
for node in nodes:
|
||||
if node.Class() != 'Read':
|
||||
continue
|
||||
name = node["name"].value()
|
||||
avalon_data = self.data
|
||||
avalon_data['subset'] = "{}_{}".format(self.family, name)
|
||||
avalon_data['subset'] = "{}".format(self.name)
|
||||
self.change_read_node(self.data["subset"], node, avalon_data)
|
||||
count_reads += 1
|
||||
|
||||
|
|
@ -52,4 +54,4 @@ class CrateRead(avalon.nuke.Creator):
|
|||
|
||||
def change_read_node(self, name, node, data):
|
||||
node = avalon.nuke.lib.imprint(node, data)
|
||||
node['tile_color'].setValue(16711935)
|
||||
node['tile_color'].setValue(16744935)
|
||||
|
|
|
|||
|
|
@ -40,7 +40,6 @@ class CreateWriteRender(plugin.PypeCreator):
|
|||
|
||||
def process(self):
|
||||
from pype.nuke import lib as pnlib
|
||||
reload(pnlib)
|
||||
|
||||
inputs = []
|
||||
outputs = []
|
||||
|
|
@ -101,7 +100,7 @@ class CreateWriteRender(plugin.PypeCreator):
|
|||
for output in outputs:
|
||||
output.setInput(0, write_node)
|
||||
|
||||
return True
|
||||
return write_node
|
||||
|
||||
#
|
||||
# class CreateWritePrerender(avalon.nuke.Creator):
|
||||
|
|
|
|||
|
|
@ -15,16 +15,17 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
asset_data = io.find_one({"type": "asset",
|
||||
"name": api.Session["AVALON_ASSET"]})
|
||||
|
||||
|
||||
self.log.debug("asset_data: {}".format(asset_data["data"]))
|
||||
instances = []
|
||||
# creating instances per write node
|
||||
|
||||
self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes()))
|
||||
for node in nuke.allNodes():
|
||||
|
||||
try:
|
||||
if node["disable"].value():
|
||||
continue
|
||||
|
|
@ -60,18 +61,20 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
family = avalon_knob_data["family"]
|
||||
families = [avalon_knob_data["families"]]
|
||||
if node["render"].value():
|
||||
self.log.info("flagged for render")
|
||||
add_family = "render.local"
|
||||
# dealing with local/farm rendering
|
||||
if node["render_farm"].value():
|
||||
self.log.info("adding render farm family")
|
||||
add_family = "render.farm"
|
||||
instance.data["transfer"] = False
|
||||
families.append(add_family)
|
||||
else:
|
||||
# add family into families
|
||||
families.insert(0, family)
|
||||
|
||||
if node.Class() not in "Read":
|
||||
if node["render"].value():
|
||||
self.log.info("flagged for render")
|
||||
add_family = "render.local"
|
||||
# dealing with local/farm rendering
|
||||
if node["render_farm"].value():
|
||||
self.log.info("adding render farm family")
|
||||
add_family = "render.farm"
|
||||
instance.data["transfer"] = False
|
||||
families.append(add_family)
|
||||
else:
|
||||
# add family into families
|
||||
families.insert(0, family)
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
|
|
|
|||
|
|
@ -1,112 +1,132 @@
|
|||
import os
|
||||
import re
|
||||
import clique
|
||||
import nuke
|
||||
import pyblish.api
|
||||
import logging
|
||||
from avalon import io, api
|
||||
|
||||
log = logging.get_logger(__name__)
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class CollectNukeReads(pyblish.api.ContextPlugin):
|
||||
class CollectNukeReads(pyblish.api.InstancePlugin):
|
||||
"""Collect all read nodes."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
order = pyblish.api.CollectorOrder + 0.04
|
||||
label = "Collect Reads"
|
||||
hosts = ["nuke"]
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["source"]
|
||||
|
||||
def process(self, context):
|
||||
def process(self, instance):
|
||||
asset_data = io.find_one({"type": "asset",
|
||||
"name": api.Session["AVALON_ASSET"]})
|
||||
|
||||
self.log.debug("asset_data: {}".format(asset_data["data"]))
|
||||
for instance in context.data["instances"]:
|
||||
self.log.debug("checking instance: {}".format(instance))
|
||||
|
||||
node = instance[0]
|
||||
if node.Class() != "Read":
|
||||
continue
|
||||
self.log.debug("checking instance: {}".format(instance))
|
||||
|
||||
file_path = node["file"].value()
|
||||
file_name = os.path.basename(file_path)
|
||||
items = file_name.split(".")
|
||||
node = instance[0]
|
||||
if node.Class() != "Read":
|
||||
return
|
||||
|
||||
if len(items) < 2:
|
||||
raise ValueError
|
||||
file_path = node["file"].value()
|
||||
file_name = os.path.basename(file_path)
|
||||
items = file_name.split(".")
|
||||
|
||||
ext = items[-1]
|
||||
if len(items) < 2:
|
||||
raise ValueError
|
||||
|
||||
# # Get frame range
|
||||
first_frame = node['first'].value()
|
||||
last_frame = node['last'].value()
|
||||
ext = items[-1]
|
||||
|
||||
# # Easier way to sequence - Not tested
|
||||
# isSequence = True
|
||||
# if first_frame == last_frame:
|
||||
# isSequence = False
|
||||
# Get frame range
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = node['first'].value()
|
||||
last_frame = node['last'].value()
|
||||
|
||||
isSequence = False
|
||||
if len(items) > 1:
|
||||
sequence = items[-2]
|
||||
hash_regex = re.compile(r'([#*])')
|
||||
seq_regex = re.compile('[%0-9*d]')
|
||||
hash_match = re.match(hash_regex, sequence)
|
||||
seq_match = re.match(seq_regex, sequence)
|
||||
if hash_match or seq_match:
|
||||
isSequence = True
|
||||
# colorspace
|
||||
colorspace = node["colorspace"].value()
|
||||
if "default" in colorspace:
|
||||
colorspace = colorspace.replace("default (", "").replace(")", "")
|
||||
|
||||
# get source path
|
||||
path = nuke.filename(node)
|
||||
source_dir = os.path.dirname(path)
|
||||
self.log.debug('source dir: {}'.format(source_dir))
|
||||
# # Easier way to sequence - Not tested
|
||||
# isSequence = True
|
||||
# if first_frame == last_frame:
|
||||
# isSequence = False
|
||||
|
||||
if isSequence:
|
||||
source_files = os.listdir(source_dir)
|
||||
else:
|
||||
source_files = file_name
|
||||
isSequence = False
|
||||
if len(items) > 1:
|
||||
sequence = items[-2]
|
||||
hash_regex = re.compile(r'([#*])')
|
||||
seq_regex = re.compile(r'[%0-9*d]')
|
||||
hash_match = re.match(hash_regex, sequence)
|
||||
seq_match = re.match(seq_regex, sequence)
|
||||
if hash_match or seq_match:
|
||||
isSequence = True
|
||||
|
||||
# Include start and end render frame in label
|
||||
name = node.name()
|
||||
label = "{0} ({1}-{2})".format(
|
||||
name,
|
||||
int(first_frame),
|
||||
int(last_frame)
|
||||
)
|
||||
# get source path
|
||||
path = nuke.filename(node)
|
||||
source_dir = os.path.dirname(path)
|
||||
self.log.debug('source dir: {}'.format(source_dir))
|
||||
|
||||
self.log.debug("collected_frames: {}".format(label))
|
||||
if isSequence:
|
||||
source_files = [f for f in os.listdir(source_dir)
|
||||
if ext in f
|
||||
if items[0] in f]
|
||||
else:
|
||||
source_files = file_name
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
# Include start and end render frame in label
|
||||
name = node.name()
|
||||
label = "{0} ({1}-{2})".format(
|
||||
name,
|
||||
int(first_frame),
|
||||
int(last_frame)
|
||||
)
|
||||
|
||||
representation = {
|
||||
'name': ext,
|
||||
'ext': "." + ext,
|
||||
'files': source_files,
|
||||
"stagingDir": source_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
self.log.debug("collected_frames: {}".format(label))
|
||||
|
||||
transfer = False
|
||||
if "publish" in node.knobs():
|
||||
transfer = node["publish"]
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
instance.data['transfer'] = transfer
|
||||
representation = {
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
'files': source_files,
|
||||
"stagingDir": source_dir,
|
||||
"frameStart": "%0{}d".format(
|
||||
len(str(last_frame))) % first_frame
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.debug("checking for error: {}".format(label))
|
||||
instance.data.update({
|
||||
"path": path,
|
||||
"stagingDir": source_dir,
|
||||
"ext": ext,
|
||||
"label": label,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame,
|
||||
"colorspace": node["colorspace"].value(),
|
||||
"handles": int(asset_data["data"].get("handles", 0)),
|
||||
"step": 1,
|
||||
"fps": int(nuke.root()['fps'].value())
|
||||
})
|
||||
transfer = False
|
||||
if "publish" in node.knobs():
|
||||
transfer = node["publish"]
|
||||
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
instance.data['transfer'] = transfer
|
||||
|
||||
self.log.debug("context: {}".format(context))
|
||||
# Add version data to instance
|
||||
version_data = {
|
||||
"handles": handle_start,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStart": first_frame + handle_start,
|
||||
"frameEnd": last_frame - handle_end,
|
||||
"colorspace": colorspace,
|
||||
"families": [instance.data["family"]],
|
||||
"subset": instance.data["subset"],
|
||||
"fps": instance.context.data["fps"]
|
||||
}
|
||||
|
||||
instance.data.update({
|
||||
"versionData": version_data,
|
||||
"path": path,
|
||||
"stagingDir": source_dir,
|
||||
"ext": ext,
|
||||
"label": label,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame,
|
||||
"colorspace": colorspace,
|
||||
"handles": int(asset_data["data"].get("handles", 0)),
|
||||
"step": 1,
|
||||
"fps": int(nuke.root()['fps'].value())
|
||||
})
|
||||
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue