mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'hotfix/nuke_publish_create_load' into hotfix/nuke_new_attr_changes
This commit is contained in:
commit
aa2efc7bf3
11 changed files with 72 additions and 120 deletions
|
|
@ -197,7 +197,7 @@ def version_up(filepath):
|
|||
dirname = os.path.dirname(filepath)
|
||||
basename, ext = os.path.splitext(os.path.basename(filepath))
|
||||
|
||||
regex = "[._]v\d+"
|
||||
regex = r"[._]v\d+"
|
||||
matches = re.findall(regex, str(basename), re.IGNORECASE)
|
||||
if not matches:
|
||||
log.info("Creating version...")
|
||||
|
|
@ -205,7 +205,7 @@ def version_up(filepath):
|
|||
new_basename = "{}{}".format(basename, new_label)
|
||||
else:
|
||||
label = matches[-1]
|
||||
version = re.search("\d+", label).group()
|
||||
version = re.search(r"\d+", label).group()
|
||||
padding = len(version)
|
||||
|
||||
new_version = int(version) + 1
|
||||
|
|
|
|||
|
|
@ -321,16 +321,7 @@ def create_write_node(name, data, prenodes=None):
|
|||
lnk.makeLink(write_node.name(), "Render")
|
||||
lnk.setName("Render")
|
||||
GN.addKnob(lnk)
|
||||
|
||||
# linking knobs to group property panel
|
||||
linking_knobs = ["first", "last", "use_limit"]
|
||||
for k in linking_knobs:
|
||||
lnk = nuke.Link_Knob(k)
|
||||
lnk.makeLink(write_node.name(), k)
|
||||
lnk.setName(k.replace('_', ' ').capitalize())
|
||||
lnk.clearFlag(nuke.STARTLINE)
|
||||
GN.addKnob(lnk)
|
||||
|
||||
|
||||
return GN
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -410,6 +410,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
Args:
|
||||
instance: the instance to integrate
|
||||
"""
|
||||
transfers = instance.data.get("transfers", list())
|
||||
|
||||
for src, dest in transfers:
|
||||
if os.path.normpath(src) != os.path.normpath(dest):
|
||||
self.copy_file(src, dest)
|
||||
|
||||
# Produce hardlinked copies
|
||||
# Note: hardlink can only be produced between two files on the same
|
||||
|
|
|
|||
|
|
@ -125,6 +125,24 @@ class CreateWritePrerender(avalon.nuke.Creator):
|
|||
write_data.update({
|
||||
"fpath_template": "{work}/prerenders/{subset}/{subset}.{frame}.{ext}"})
|
||||
|
||||
create_write_node(self.data["subset"], write_data)
|
||||
# get group node
|
||||
group_node = create_write_node(self.data["subset"], write_data)
|
||||
|
||||
# open group node
|
||||
group_node.begin()
|
||||
for n in nuke.allNodes():
|
||||
# get write node
|
||||
if n.Class() in "Write":
|
||||
write_node = n
|
||||
group_node.end()
|
||||
|
||||
# linking knobs to group property panel
|
||||
linking_knobs = ["first", "last", "use_limit"]
|
||||
for k in linking_knobs:
|
||||
lnk = nuke.Link_Knob(k)
|
||||
lnk.makeLink(write_node.name(), k)
|
||||
lnk.setName(k.replace('_', ' ').capitalize())
|
||||
lnk.clearFlag(nuke.STARTLINE)
|
||||
group_node.addKnob(lnk)
|
||||
|
||||
return
|
||||
|
|
|
|||
|
|
@ -92,6 +92,8 @@ class LoadSequence(api.Loader):
|
|||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
log.info("version_data: {}\n".format(version_data))
|
||||
|
||||
first = version_data.get("frameStart", None)
|
||||
last = version_data.get("frameEnd", None)
|
||||
handles = version_data.get("handles", 0)
|
||||
|
|
@ -103,9 +105,9 @@ class LoadSequence(api.Loader):
|
|||
handle_start = handles
|
||||
handle_end = handles
|
||||
|
||||
# create handles offset
|
||||
first -= handle_start
|
||||
last += handle_end
|
||||
# # create handles offset
|
||||
# first -= handle_start
|
||||
# last += handle_end
|
||||
|
||||
# Fallback to asset name when namespace is None
|
||||
if namespace is None:
|
||||
|
|
|
|||
|
|
@ -19,3 +19,5 @@ class CollectAssetInfo(pyblish.api.ContextPlugin):
|
|||
self.log.info("asset_data: {}".format(asset_data))
|
||||
|
||||
context.data['handles'] = int(asset_data["data"].get("handles", 0))
|
||||
context.data["handleStart"] = int(asset_data["data"].get("handleStart", 0))
|
||||
context.data["handleEnd"] = int(asset_data["data"].get("handleEnd", 0))
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import os
|
|||
import nuke
|
||||
import pyblish.api
|
||||
from avalon import io, api
|
||||
from avalon.nuke.lib import get_avalon_knob_data
|
||||
from avalon.nuke import get_avalon_knob_data
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
|
|
@ -18,23 +18,26 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
asset_data = io.find_one({"type": "asset",
|
||||
"name": api.Session["AVALON_ASSET"]})
|
||||
|
||||
# add handles into context
|
||||
context.data['handles'] = context.data['handles']
|
||||
|
||||
self.log.debug("asset_data: {}".format(asset_data["data"]))
|
||||
instances = []
|
||||
# creating instances per write node
|
||||
for node in nuke.allNodes():
|
||||
|
||||
self.log.debug("nuke.allNodes(): {}".format(nuke.allNodes()))
|
||||
for node in nuke.allNodes():
|
||||
try:
|
||||
if node["disable"].value():
|
||||
continue
|
||||
except Exception:
|
||||
except Exception as E:
|
||||
self.log.warning(E)
|
||||
continue
|
||||
|
||||
# get data from avalon knob
|
||||
self.log.debug("node[name]: {}".format(node['name'].value()))
|
||||
avalon_knob_data = get_avalon_knob_data(node)
|
||||
|
||||
self.log.debug("avalon_knob_data: {}".format(avalon_knob_data))
|
||||
|
||||
if not avalon_knob_data:
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -14,17 +14,24 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
families = ["render", "render.local", "render.farm"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# if not instance.data["publish"]:
|
||||
# continue
|
||||
|
||||
node = instance[0]
|
||||
group = instance[0]
|
||||
|
||||
if node.Class() != "Write":
|
||||
if group.Class() != "Group":
|
||||
return
|
||||
|
||||
self.log.debug("checking instance: {}".format(instance))
|
||||
|
||||
group.begin()
|
||||
|
||||
for n in nuke.allNodes():
|
||||
if n.Class() != "Write":
|
||||
continue
|
||||
node = n
|
||||
group.end()
|
||||
|
||||
# Determine defined file type
|
||||
ext = node["file_type"].value()
|
||||
|
||||
|
|
@ -34,7 +41,9 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
output_type = "mov"
|
||||
|
||||
# Get frame range
|
||||
handles = instance.context.data.get('handles', 0)
|
||||
handles = instance.context.data['handles']
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
|
||||
|
|
@ -85,7 +94,20 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
if 'render.local' in instance.data['families']:
|
||||
instance.data['families'].append('ftrack')
|
||||
|
||||
# Add version data to instance
|
||||
version_data = {
|
||||
"handles": handle_start,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"version": int(version),
|
||||
"colorspace": node["colorspace"].value(),
|
||||
"families": [instance.data["family"]],
|
||||
"subset": instance.data["subset"],
|
||||
"fps": instance.context.data["fps"]
|
||||
}
|
||||
|
||||
instance.data.update({
|
||||
"versionData": version_data,
|
||||
"path": path,
|
||||
"outputDir": output_dir,
|
||||
"ext": ext,
|
||||
|
|
@ -96,5 +118,5 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
"outputType": output_type,
|
||||
"colorspace": node["colorspace"].value(),
|
||||
})
|
||||
|
||||
instance[0] = node
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
|
|
|
|||
|
|
@ -1,91 +0,0 @@
|
|||
import os
|
||||
import json
|
||||
import datetime
|
||||
import time
|
||||
|
||||
import clique
|
||||
from pprint import pformat
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ExtractJSON(pyblish.api.ContextPlugin):
|
||||
""" Extract all instances to a serialized json file. """
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 1
|
||||
label = "Extract to JSON"
|
||||
families = ["write"]
|
||||
|
||||
def process(self, context):
|
||||
workspace = os.path.join(
|
||||
os.path.dirname(context.data["currentFile"]), "workspace",
|
||||
"instances")
|
||||
|
||||
if not os.path.exists(workspace):
|
||||
os.makedirs(workspace)
|
||||
|
||||
context_data = context.data.copy()
|
||||
unwrapped_instance = []
|
||||
for i in context_data["instances"]:
|
||||
unwrapped_instance.append(i.data)
|
||||
|
||||
context_data["instances"] = unwrapped_instance
|
||||
|
||||
timestamp = datetime.datetime.fromtimestamp(
|
||||
time.time()).strftime("%Y%m%d-%H%M%S")
|
||||
filename = timestamp + "_instances.json"
|
||||
|
||||
with open(os.path.join(workspace, filename), "w") as outfile:
|
||||
outfile.write(pformat(context_data, depth=20))
|
||||
|
||||
def serialize(self, data):
|
||||
"""
|
||||
Convert all nested content to serialized objects
|
||||
|
||||
Args:
|
||||
data (dict): nested data
|
||||
|
||||
Returns:
|
||||
dict: nested data
|
||||
"""
|
||||
|
||||
def encoding_obj(value):
|
||||
try:
|
||||
value = str(value).replace("\\", "/")
|
||||
# value = getattr(value, '__dict__', str(value))
|
||||
except Exception:
|
||||
pass
|
||||
return value
|
||||
|
||||
for key, value in dict(data).items():
|
||||
if key in ["records", "instances", "results"]:
|
||||
# escape all record objects
|
||||
data[key] = None
|
||||
continue
|
||||
|
||||
if hasattr(value, '__module__'):
|
||||
# only deals with module objects
|
||||
if "plugins" in value.__module__:
|
||||
# only dealing with plugin objects
|
||||
data[key] = str(value.__module__)
|
||||
else:
|
||||
if ".lib." in value.__module__:
|
||||
# will allow only anatomy dict
|
||||
data[key] = self.serialize(value)
|
||||
else:
|
||||
data[key] = None
|
||||
continue
|
||||
continue
|
||||
|
||||
if isinstance(value, dict):
|
||||
# loops if dictionary
|
||||
data[key] = self.serialize(value)
|
||||
|
||||
if isinstance(value, (list or tuple)):
|
||||
# loops if list or tuple
|
||||
for i, item in enumerate(value):
|
||||
value[i] = self.serialize(item)
|
||||
data[key] = value
|
||||
|
||||
data[key] = encoding_obj(value)
|
||||
|
||||
return data
|
||||
|
|
@ -29,7 +29,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
# root = nuke.root()
|
||||
# node_subset_name = instance.data.get("name", None)
|
||||
node = instance[0]
|
||||
node = instance[1]
|
||||
|
||||
DEADLINE_REST_URL = os.environ.get("DEADLINE_REST_URL",
|
||||
"http://localhost:8082")
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ class RepairNukeWriteNodeAction(pyblish.api.Action):
|
|||
instances = pype.utils.filter_instances(context, plugin)
|
||||
|
||||
for instance in instances:
|
||||
node = instance[0]
|
||||
node = instance[1]
|
||||
correct_data = nukelib.get_write_node_template_attr(node)
|
||||
for k, v in correct_data.items():
|
||||
node[k].setValue(v)
|
||||
|
|
@ -33,7 +33,7 @@ class ValidateNukeWriteNode(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
node = instance[0]
|
||||
node = instance[1]
|
||||
correct_data = nukelib.get_write_node_template_attr(node)
|
||||
|
||||
check = []
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue