mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
wip improving publishing version
This commit is contained in:
parent
f445864e01
commit
958aeab2e6
16 changed files with 237 additions and 115 deletions
|
|
@ -26,7 +26,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
'render': 'render'
|
||||
}
|
||||
|
||||
exclude = ["prerender.frames"]
|
||||
exclude = []
|
||||
|
||||
def process(self, instance):
|
||||
for ex in self.exclude:
|
||||
|
|
|
|||
|
|
@ -135,7 +135,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
# \|________|
|
||||
#
|
||||
root = api.registered_root()
|
||||
hierarchy = io.find_one({"type":'asset', "name":ASSET})['data']['parents']
|
||||
hierarchy = io.find_one({"type": 'asset', "name": ASSET})['data']['parents']
|
||||
if hierarchy:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = os.path.join(*hierarchy)
|
||||
|
|
@ -226,17 +226,17 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
# Imprint shortcut to context
|
||||
# for performance reasons.
|
||||
"context": {
|
||||
"root": root,
|
||||
"project": PROJECT,
|
||||
"projectcode": project['data']['code'],
|
||||
'task': api.Session["AVALON_TASK"],
|
||||
"silo": asset['silo'],
|
||||
"asset": ASSET,
|
||||
"family": instance.data['family'],
|
||||
"subset": subset["name"],
|
||||
"version": version["name"],
|
||||
"hierarchy": hierarchy,
|
||||
"representation": ext[1:]
|
||||
"root": root,
|
||||
"project": PROJECT,
|
||||
"projectcode": project['data']['code'],
|
||||
'task': api.Session["AVALON_TASK"],
|
||||
"silo": asset['silo'],
|
||||
"asset": ASSET,
|
||||
"family": instance.data['family'],
|
||||
"subset": subset["name"],
|
||||
"version": version["name"],
|
||||
"hierarchy": hierarchy,
|
||||
"representation": ext[1:]
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
27
pype/plugins/nuke/_publish_unused/integrate_staging_dir.py
Normal file
27
pype/plugins/nuke/_publish_unused/integrate_staging_dir.py
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
import pyblish.api
|
||||
import shutil
|
||||
import os
|
||||
|
||||
|
||||
class CopyStagingDir(pyblish.api.InstancePlugin):
|
||||
"""Copy data rendered into temp local directory
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder - 2
|
||||
label = "Copy data from temp dir"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["render.local"]
|
||||
|
||||
def process(self, instance):
|
||||
temp_dir = instance.data.get("stagingDir")
|
||||
output_dir = instance.data.get("outputDir")
|
||||
|
||||
# copy data to correct dir
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
self.log.info("output dir has been created")
|
||||
|
||||
for f in os.listdir(temp_dir):
|
||||
self.log.info("copy file to correct destination: {}".format(f))
|
||||
shutil.copy(os.path.join(temp_dir, os.path.basename(f)),
|
||||
os.path.join(output_dir, os.path.basename(f)))
|
||||
49
pype/plugins/nuke/publish/collect_families.py
Normal file
49
pype/plugins/nuke/publish/collect_families.py
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class CollectInstanceFamilies(pyblish.api.ContextPlugin):
|
||||
"""Collect families for all instances"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = "Define Families"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
def process(self, context):
|
||||
self.log.info('context.data["instances"]: {}'.format(context.data["instances"]))
|
||||
for instance in context.data["instances"]:
|
||||
|
||||
if "write" in instance.data["family"]:
|
||||
if not instance.data["publish"]:
|
||||
continue
|
||||
|
||||
node = instance[0]
|
||||
|
||||
if not node["render"].value():
|
||||
families = [
|
||||
"{}.frames".format(
|
||||
instance.data["avalonKnob"]["families"]),
|
||||
'ftrack']
|
||||
# to ignore staging dir op in integrate
|
||||
instance.data['transfer'] = False
|
||||
else:
|
||||
# dealing with local/farm rendering
|
||||
if node["render_farm"].value():
|
||||
families = [
|
||||
"{}.farm".format(
|
||||
instance.data["avalonKnob"]["families"]),
|
||||
'ftrack']
|
||||
else:
|
||||
families = [
|
||||
"{}.local".format(
|
||||
instance.data["avalonKnob"]["families"]),
|
||||
'ftrack']
|
||||
|
||||
instance.data.update({"families": families})
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
context[:] = sorted(context, key=self.sort_by_family)
|
||||
|
||||
def sort_by_family(self, instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get("families", instance.data.get("family"))
|
||||
|
|
@ -2,6 +2,7 @@ import os
|
|||
|
||||
import nuke
|
||||
import pyblish.api
|
||||
from avalon import io, api
|
||||
from pype.nuke.lib import get_avalon_knob_data
|
||||
|
||||
|
||||
|
|
@ -9,11 +10,14 @@ from pype.nuke.lib import get_avalon_knob_data
|
|||
class CollectNukeInstances(pyblish.api.ContextPlugin):
|
||||
"""Collect all nodes with Avalon knob."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = "Collect Instances"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
def process(self, context):
|
||||
asset_data = io.find_one({"type": "asset",
|
||||
"name": api.Session["AVALON_ASSET"]})
|
||||
self.log.debug("asset_data: {}".format(asset_data["data"]))
|
||||
instances = []
|
||||
# creating instances per write node
|
||||
for node in nuke.allNodes():
|
||||
|
|
@ -44,10 +48,12 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
"label": node.name(),
|
||||
"name": node.name(),
|
||||
"subset": subset,
|
||||
"families": [avalon_knob_data["families"]],
|
||||
"family": avalon_knob_data["family"],
|
||||
"avalonKnob": avalon_knob_data,
|
||||
"publish": node.knob('publish')
|
||||
"publish": node.knob('publish').value(),
|
||||
"handles": int(asset_data["data"].get("handles", 0)),
|
||||
"step": 1,
|
||||
"fps": int(nuke.root()['fps'].value())
|
||||
|
||||
})
|
||||
self.log.info("collected instance: {}".format(instance.data))
|
||||
|
|
@ -55,11 +61,4 @@ class CollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
context.data["instances"] = instances
|
||||
|
||||
# Sort/grouped by family (preserving local index)
|
||||
context[:] = sorted(context, key=self.sort_by_family)
|
||||
|
||||
self.log.debug("context: {}".format(context))
|
||||
|
||||
def sort_by_family(self, instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get("families", instance.data.get("family"))
|
||||
|
|
|
|||
40
pype/plugins/nuke/publish/collect_script.py
Normal file
40
pype/plugins/nuke/publish/collect_script.py
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
from avalon import api, io
|
||||
import nuke
|
||||
import pyblish.api
|
||||
import os
|
||||
|
||||
|
||||
class CollectScript(pyblish.api.ContextPlugin):
|
||||
"""Publish current script version."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
label = "Collect Script to publish"
|
||||
hosts = ['nuke']
|
||||
|
||||
def process(self, context):
|
||||
asset_data = io.find_one({"type": "asset",
|
||||
"name": api.Session["AVALON_ASSET"]})
|
||||
self.log.debug("asset_data: {}".format(asset_data["data"]))
|
||||
|
||||
# creating instances per write node
|
||||
file_path = nuke.root()['name'].value()
|
||||
base_name = os.path.basename(file_path)
|
||||
subset = base_name.split("_v")[0]
|
||||
|
||||
# Create instance
|
||||
instance = context.create_instance(subset)
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"asset": os.environ["AVALON_ASSET"],
|
||||
"label": base_name,
|
||||
"name": base_name,
|
||||
"subset": subset,
|
||||
"family": "script",
|
||||
"handles": int(asset_data["data"].get("handles", 0)),
|
||||
"step": 1,
|
||||
"fps": int(nuke.root()['fps'].value())
|
||||
|
||||
})
|
||||
self.log.info('Publishing script version')
|
||||
context.data["instances"].append(instance)
|
||||
|
|
@ -3,7 +3,7 @@ import tempfile
|
|||
import nuke
|
||||
import pyblish.api
|
||||
import logging
|
||||
from avalon import io, api
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -17,10 +17,11 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
|
|||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
def process(self, context):
|
||||
asset_data = io.find_one({"type": "asset",
|
||||
"name": api.Session["AVALON_ASSET"]})
|
||||
self.log.debug("asset_data: {}".format(asset_data["data"]))
|
||||
for instance in context.data["instances"]:
|
||||
|
||||
if not instance.data["publish"]:
|
||||
continue
|
||||
|
||||
self.log.debug("checking instance: {}".format(instance))
|
||||
node = instance[0]
|
||||
|
||||
|
|
@ -48,10 +49,9 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
|
|||
output_dir = os.path.dirname(path)
|
||||
self.log.debug('output dir: {}'.format(output_dir))
|
||||
|
||||
instance.data.update({"stagingDir": output_dir})
|
||||
# Include start and end render frame in label
|
||||
# create label
|
||||
name = node.name()
|
||||
|
||||
# Include start and end render frame in label
|
||||
label = "{0} ({1}-{2})".format(
|
||||
name,
|
||||
int(first_frame),
|
||||
|
|
@ -59,55 +59,35 @@ class CollectNukeWrites(pyblish.api.ContextPlugin):
|
|||
)
|
||||
|
||||
# preredered frames
|
||||
if not node["render"].value():
|
||||
try:
|
||||
families = [
|
||||
"{}.frames".format(
|
||||
instance.data["avalonKnob"]["families"]),
|
||||
'ftrack'
|
||||
]
|
||||
collected_frames = os.listdir(output_dir)
|
||||
self.log.debug("collected_frames: {}".format(label))
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = list()
|
||||
instance.data["files"].append(collected_frames)
|
||||
instance.data['transfer'] = False
|
||||
except Exception:
|
||||
node["render"].setValue(True)
|
||||
raise AttributeError(
|
||||
"Files in `{}`. Needs to refresh the publishing".format(output_dir))
|
||||
else:
|
||||
# dealing with local/farm rendering
|
||||
if node["render_farm"].value():
|
||||
families = [
|
||||
"{}.farm".format(instance.data["avalonKnob"]["families"])]
|
||||
else:
|
||||
families = [
|
||||
"{}.local".format(instance.data["avalonKnob"]["families"])
|
||||
]
|
||||
# adding for local renderings
|
||||
instance.data.update({"stagingDir": tempfile.mkdtemp().replace("\\", "/")})
|
||||
# collect frames by try
|
||||
# collect families in next file
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = list()
|
||||
|
||||
try:
|
||||
collected_frames = os.listdir(output_dir)
|
||||
self.log.debug("collected_frames: {}".format(label))
|
||||
|
||||
instance.data["files"].append(collected_frames)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# adding stage dir for faster local renderings
|
||||
staging_dir = tempfile.mkdtemp().replace("\\", "/")
|
||||
instance.data.update({"stagingDir": staging_dir})
|
||||
self.log.debug('staging_dir: {}'.format(staging_dir))
|
||||
|
||||
self.log.debug("checking for error: {}".format(label))
|
||||
instance.data.update({
|
||||
"path": path,
|
||||
"outputDir": output_dir,
|
||||
"ext": ext,
|
||||
"label": label,
|
||||
"families": families,
|
||||
"startFrame": first_frame,
|
||||
"endFrame": last_frame,
|
||||
"outputType": output_type,
|
||||
"colorspace": node["colorspace"].value(),
|
||||
"handles": int(asset_data["data"].get("handles", 0)),
|
||||
"step": 1,
|
||||
"fps": int(nuke.root()['fps'].value())
|
||||
})
|
||||
|
||||
self.log.debug("instance.data: {}".format(instance.data))
|
||||
|
||||
self.log.debug("context: {}".format(context))
|
||||
|
||||
def sort_by_family(self, instance):
|
||||
"""Sort by family"""
|
||||
return instance.data.get("families", instance.data.get("family"))
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import pyblish.api
|
|||
class ExtractOutputDirectory(pyblish.api.InstancePlugin):
|
||||
"""Extracts the output path for any collection or single output_path."""
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.1
|
||||
order = pyblish.api.ExtractorOrder - 0.05
|
||||
label = "Output Directory"
|
||||
optional = True
|
||||
|
||||
|
|
@ -16,9 +16,6 @@ class ExtractOutputDirectory(pyblish.api.InstancePlugin):
|
|||
|
||||
path = None
|
||||
|
||||
if "collection" in instance.data.keys():
|
||||
path = instance.data["collection"].format()
|
||||
|
||||
if "output_path" in instance.data.keys():
|
||||
path = instance.data["path"]
|
||||
|
||||
|
|
|
|||
|
|
@ -52,21 +52,8 @@ class NukeRenderLocal(pyblish.api.InstancePlugin):
|
|||
int(last_frame)
|
||||
)
|
||||
|
||||
# copy data to correct dir
|
||||
if not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
self.log.info("output dir has been created")
|
||||
|
||||
for f in os.listdir(temp_dir):
|
||||
self.log.info(f)
|
||||
shutil.copy(os.path.join(temp_dir, os.path.basename(f)),
|
||||
os.path.join(output_dir, os.path.basename(f)))
|
||||
|
||||
# swap path back to publish path
|
||||
path = node['file'].value()
|
||||
node['file'].setValue(path.replace(temp_dir, output_dir))
|
||||
|
||||
# swith to prerendered.frames
|
||||
instance[0]["render"].setValue(False)
|
||||
|
||||
self.log.info('Finished render')
|
||||
|
|
@ -6,7 +6,7 @@ class ExtractScriptSave(pyblish.api.Extractor):
|
|||
"""
|
||||
"""
|
||||
label = 'Script Save'
|
||||
order = pyblish.api.Extractor.order - 0.45
|
||||
order = pyblish.api.Extractor.order - 0.1
|
||||
hosts = ['nuke']
|
||||
|
||||
def process(self, instance):
|
||||
23
pype/plugins/nuke/publish/extract_write_next_render.py
Normal file
23
pype/plugins/nuke/publish/extract_write_next_render.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class WriteToRender(pyblish.api.InstancePlugin):
|
||||
"""Swith Render knob on write instance to on,
|
||||
so next time publish will be set to render
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder + 0.1
|
||||
label = "Write to render next"
|
||||
optional = True
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["write"]
|
||||
|
||||
def process(self, instance):
|
||||
if [f for f in instance.data["families"]
|
||||
if ".frames" in f]:
|
||||
instance[0]["render"].setValue(True)
|
||||
self.log.info("Swith write node render to `on`")
|
||||
else:
|
||||
# swith to
|
||||
instance[0]["render"].setValue(False)
|
||||
self.log.info("Swith write node render to `Off`")
|
||||
|
|
@ -24,7 +24,8 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
label = "Integrate Frames"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
families = ["render.frames", "still.frames", "prerender.frames"]
|
||||
families = ["render.frames", "still.frames", "prerender.frames",
|
||||
"render.local", "still.local", "prerender.local"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -97,7 +98,6 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
|
||||
assumed_data = instance.data["assumedTemplateData"]
|
||||
assumed_version = assumed_data["version"]
|
||||
|
||||
if assumed_version != next_version:
|
||||
raise AttributeError("Assumed version 'v{0:03d}' does not match"
|
||||
"next version in database "
|
||||
|
|
@ -112,11 +112,9 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
locations=[LOCATION],
|
||||
data=version_data)
|
||||
|
||||
self.log.debug("version: {}".format(version))
|
||||
self.log.debug("Creating version ...")
|
||||
|
||||
version_id = io.insert_one(version).inserted_id
|
||||
self.log.debug("version_id: {}".format(version_id))
|
||||
|
||||
# Write to disk
|
||||
# _
|
||||
# | |
|
||||
|
|
@ -129,11 +127,10 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
#
|
||||
root = api.registered_root()
|
||||
hierarchy = io.find_one({"type": 'asset', "name": ASSET})['data']['parents']
|
||||
|
||||
if hierarchy:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = os.path.join(*hierarchy)
|
||||
self.log.debug("hierarchy: {}".format(hierarchy))
|
||||
|
||||
template_data = {"root": root,
|
||||
"project": {"name": PROJECT,
|
||||
"code": project['data']['code']},
|
||||
|
|
@ -199,6 +196,7 @@ class IntegrateFrames(pyblish.api.InstancePlugin):
|
|||
# |_______|
|
||||
#
|
||||
fname = files
|
||||
self.log.info("fname: {}".format(fname))
|
||||
assert not os.path.isabs(fname), (
|
||||
"Given file name is a full path"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import pyblish.api
|
|||
class IncrementScriptVersion(pyblish.api.InstancePlugin):
|
||||
"""Increment current script version."""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 9
|
||||
order = pyblish.api.IntegratorOrder + 0.9
|
||||
label = "Increment Current Script Version"
|
||||
optional = True
|
||||
hosts = ['nuke']
|
||||
|
|
@ -23,9 +23,9 @@ class RepairCollectionAction(pyblish.api.Action):
|
|||
class ValidateCollection(pyblish.api.InstancePlugin):
|
||||
""" Validates file output. """
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
# optional = True
|
||||
families = ['prerendered.frames']
|
||||
order = pyblish.api.ValidatorOrder + 0.1
|
||||
families = ["render.frames", "still.frames", "prerender.frames"]
|
||||
|
||||
label = "Check prerendered frames"
|
||||
hosts = ["nuke"]
|
||||
actions = [RepairCollectionAction]
|
||||
|
|
@ -47,11 +47,12 @@ class ValidateCollection(pyblish.api.InstancePlugin):
|
|||
|
||||
basename, ext = os.path.splitext(list(collections[0])[0])
|
||||
assert all(ext == os.path.splitext(name)[1]
|
||||
for name in collections[0]), self.log.info(
|
||||
for name in collections[0]), self.log.info(
|
||||
"Files had varying suffixes"
|
||||
)
|
||||
|
||||
assert not any(os.path.isabs(name) for name in collections[0]), self.log.info("some file name are absolute")
|
||||
assert not any(os.path.isabs(name)
|
||||
for name in collections[0]), self.log.info("some file name are absolute")
|
||||
|
||||
self.log.info('frame_length: {}'.format(frame_length))
|
||||
self.log.info('len(list(instance.data["files"])): {}'.format(
|
||||
|
|
|
|||
38
pype/plugins/nuke/publish/validate_write_families.py
Normal file
38
pype/plugins/nuke/publish/validate_write_families.py
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import clique
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class RepairWriteFamiliesAction(pyblish.api.Action):
|
||||
label = "Fix Write's render attributes"
|
||||
on = "failed"
|
||||
icon = "wrench"
|
||||
|
||||
def process(self, context, plugin):
|
||||
context[0][0]["render"].setValue(True)
|
||||
self.log.info("Rendering toggled ON")
|
||||
|
||||
|
||||
class ValidateWriteFamilies(pyblish.api.InstancePlugin):
|
||||
""" Validates write families. """
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Check correct writes families"
|
||||
hosts = ["nuke"]
|
||||
families = ["write"]
|
||||
actions = [RepairWriteFamiliesAction]
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug('instance.data["files"]: {}'.format(instance.data['files']))
|
||||
|
||||
if not [f for f in instance.data["families"]
|
||||
if ".frames" in f]:
|
||||
return
|
||||
|
||||
assert instance.data["files"], self.log.info(
|
||||
"`{}`: Swith `Render` on! \n"
|
||||
"No available frames to add to database. \n"
|
||||
"Use repair to render all frames".format(__name__))
|
||||
|
||||
self.log.info("Checked correct writes families")
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class WriteToRender(pyblish.api.InstancePlugin):
|
||||
"""Swith Render knob on write instance to on,
|
||||
so next time publish will be set to render
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 11
|
||||
label = "Write to render next"
|
||||
optional = True
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["render.frames", "still.frames", "prerender.frames"]
|
||||
|
||||
def process(self, instance):
|
||||
instance[0]["render"].setValue(True)
|
||||
self.log.info("Swith write node render to `on`")
|
||||
Loading…
Add table
Add a link
Reference in a new issue