adding pyblish plugins from kredenc

This commit is contained in:
Jakub Jezek 2018-10-03 19:06:08 +02:00
parent 2dc510a3ab
commit 8b931f5207
13 changed files with 383 additions and 100 deletions

93
.gitignore vendored
View file

@ -4,96 +4,3 @@
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# dotenv
.env
# virtualenv
.venv
venv/
ENV/
# Spyder project settings
.spyderproject
# Rope project settings
.ropeproject
# Pycharm IDE settings
.idea

View file

@ -2,7 +2,7 @@ import os
from avalon import api as avalon
from pyblish import api as pyblish
from avalon.tools import workfiles
# from avalon.tools import workfiles
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
@ -31,10 +31,10 @@ def install():
avalon.data["familiesStateDefault"] = False
avalon.data["familiesStateToggled"] = family_states
# work files start at app start
workfiles.show(
os.environ["AVALON_WORKDIR"]
)
# # work files start at app start
# workfiles.show(
# os.environ["AVALON_WORKDIR"]
# )
def uninstall():

View file

@ -14,7 +14,7 @@ def _get_script():
# todo: use a more elegant way to get the python script
try:
from pype.scripts import publish_filesequence
from pype.fusion.scripts import publish_filesequence
except Exception:
raise RuntimeError("Expected module 'publish_imagesequence'"
"to be available")

View file

@ -11,7 +11,7 @@ import pyblish.api
def _get_script():
"""Get path to the image sequence script"""
try:
from pype.scripts import publish_filesequence
from pype.fusion.scripts import publish_filesequence
except Exception as e:
raise RuntimeError("Expected module 'publish_imagesequence'"
"to be available")

View file

@ -0,0 +1,68 @@
import nuke
import os
import pyblish.api
import avalon.io as io
# TODO: add repair function
@pyblish.api.log
class ValidateSettingsNuke(pyblish.api.Validator):
""" Validates settings """
families = ['scene']
hosts = ['nuke']
optional = True
label = 'Settings'
def process(self, instance):
asset = io.find_one({"name": os.environ['AVALON_ASSET']})
try:
avalon_resolution = asset["data"].get("resolution", '')
avalon_pixel_aspect = asset["data"].get("pixel_aspect", '')
avalon_fps = asset["data"].get("fps", '')
avalon_first = asset["data"].get("edit_in", '')
avalon_last = asset["data"].get("edit_out", '')
avalon_crop = asset["data"].get("crop", '')
except KeyError:
print(
"No resolution information found for \"{0}\".".format(
project["name"]
)
)
return
# validating first frame
local_first = nuke.root()['first_frame'].value()
msg = 'First frame is incorrect.'
msg += '\n\nLocal first: %s' % local_first
msg += '\n\nOnline first: %s' % avalon_first
assert local_first == avalon_first, msg
# validating last frame
local_last = nuke.root()['last_frame'].value()
msg = 'Last frame is incorrect.'
msg += '\n\nLocal last: %s' % local_last
msg += '\n\nOnline last: %s' % avalon_last
assert local_last == avalon_last, msg
# validating fps
local_fps = nuke.root()['fps'].value()
msg = 'FPS is incorrect.'
msg += '\n\nLocal fps: %s' % local_fps
msg += '\n\nOnline fps: %s' % avalon_fps
assert local_fps == avalon_fps, msg
# validating resolution width
local_width = nuke.root().format().width()
msg = 'Width is incorrect.'
msg += '\n\nLocal width: %s' % local_width
msg += '\n\nOnline width: %s' % avalon_resolution[0]
assert local_width == avalon_resolution[0], msg
# validating resolution width
local_height = nuke.root().format().height()
msg = 'Height is incorrect.'
msg += '\n\nLocal height: %s' % local_height
msg += '\n\nOnline height: %s' % avalon_resolution[1]
assert local_height == avalon_resolution[1], msg

View file

@ -0,0 +1,20 @@
import os
import pyblish.api
@pyblish.api.log
class ValidatePrerendersOutput(pyblish.api.Validator):
"""Validates that the output directory for the write nodes exists"""
families = ['write.prerender']
hosts = ['nuke']
label = 'Pre-renders output'
def process(self, instance):
path = os.path.dirname(instance[0]['file'].value())
if 'output' not in path:
name = instance[0].name()
msg = 'Output directory for %s is not in an "output" folder.' % name
raise ValueError(msg)

View file

@ -0,0 +1,56 @@
import os
import pyblish.api
import pype.utils
@pyblish.api.log
class RepairNukeWriteNodeAction(pyblish.api.Action):
label = "Repair"
on = "failed"
icon = "wrench"
def process(self, context, plugin):
instances = pype.utils.filter_instances(context, plugin)
for instance in instances:
if "create_directories" in instance[0].knobs():
instance[0]['create_directories'].setValue(True)
else:
path, file = os.path.split(instance[0].data['outputFilename'])
self.log.info(path)
if not os.path.exists(path):
os.makedirs(path)
if "metadata" in instance[0].knobs().keys():
instance[0]["metadata"].setValue("all metadata")
class ValidateNukeWriteNode(pyblish.api.InstancePlugin):
""" Validates file output. """
order = pyblish.api.ValidatorOrder
optional = True
families = ["write.render"]
label = "Write Node"
actions = [RepairNukeWriteNodeAction]
hosts = ["nuke"]
def process(self, instance):
# Validate output directory exists, if not creating directories.
# The existence of the knob is queried because previous version
# of Nuke did not have this feature.
if "create_directories" in instance[0].knobs():
msg = "Use Create Directories"
assert instance[0].knobs()['create_directories'].value() is True, msg
else:
path, file = os.path.split(instance.data['outputFilename'])
msg = "Output directory doesn't exist: \"{0}\"".format(path)
assert os.path.exists(path), msg
# Validate metadata knob
if "metadata" in instance[0].knobs().keys():
msg = "Metadata needs to be set to \"all metadata\"."
assert instance[0]["metadata"].value() == "all metadata", msg

98
pype/utils/__init__.py Normal file
View file

@ -0,0 +1,98 @@
from .lib import *
def load_capture_preset(path):
import capture_gui
import capture
path = path
preset = capture_gui.lib.load_json(path)
print preset
options = dict()
# CODEC
id = 'Codec'
for key in preset[id]:
options[str(key)] = preset[id][key]
# GENERIC
id = 'Generic'
for key in preset[id]:
if key.startswith('isolate'):
pass
# options['isolate'] = preset[id][key]
else:
options[str(key)] = preset[id][key]
# RESOLUTION
id = 'Resolution'
options['height'] = preset[id]['height']
options['width'] = preset[id]['width']
# DISPLAY OPTIONS
id = 'Display Options'
disp_options = {}
for key in preset['Display Options']:
if key.startswith('background'):
disp_options[key] = preset['Display Options'][key]
else:
disp_options['displayGradient'] = True
options['display_options'] = disp_options
# VIEWPORT OPTIONS
temp_options = {}
id = 'Renderer'
for key in preset[id]:
temp_options[str(key)] = preset[id][key]
temp_options2 = {}
id = 'Viewport Options'
light_options = {0: "default",
1: 'all',
2: 'selected',
3: 'flat',
4: 'nolights'}
for key in preset[id]:
if key == 'high_quality':
temp_options2['multiSampleEnable'] = True
temp_options2['multiSampleCount'] = 4
temp_options2['textureMaxResolution'] = 512
temp_options2['enableTextureMaxRes'] = True
if key == 'alphaCut':
temp_options2['transparencyAlgorithm'] = 5
temp_options2['transparencyQuality'] = 1
if key == 'headsUpDisplay':
temp_options['headsUpDisplay'] = True
if key == 'displayLights':
temp_options[str(key)] = light_options[preset[id][key]]
else:
temp_options[str(key)] = preset[id][key]
for key in ['override_viewport_options', 'high_quality', 'alphaCut']:
temp_options.pop(key, None)
options['viewport_options'] = temp_options
options['viewport2_options'] = temp_options2
# use active sound track
scene = capture.parse_active_scene()
options['sound'] = scene['sound']
cam_options = dict()
cam_options['overscan'] = 1.0
cam_options['displayFieldChart'] = False
cam_options['displayFilmGate'] = False
cam_options['displayFilmOrigin'] = False
cam_options['displayFilmPivot'] = False
cam_options['displayGateMask'] = False
cam_options['displayResolution'] = False
cam_options['displaySafeAction'] = False
cam_options['displaySafeTitle'] = False
# options['display_options'] = temp_options
return options

107
pype/utils/lib.py Normal file
View file

@ -0,0 +1,107 @@
import re
import tempfile
import json
import os
import sys
import pyblish.api
print 'pyblish_utils loaded'
def save_preset(path, preset):
"""Save options to path"""
with open(path, "w") as f:
json.dump(preset, f)
def load_preset(path):
"""Load options json from path"""
with open(path, "r") as f:
return json.load(f)
def temp_dir(context):
"""Provide a temporary directory in which to store extracted files"""
extract_dir = context.data('extractDir')
if not extract_dir:
extract_dir = tempfile.mkdtemp()
context.set_data('extractDir', value=extract_dir)
return extract_dir
def version_get(string, prefix, suffix=None):
"""Extract version information from filenames. Code from Foundry's nukescripts.version_get()"""
if string is None:
raise ValueError, "Empty version string - no match"
regex = "[/_.]" + prefix + "\d+"
matches = re.findall(regex, string, re.IGNORECASE)
if not len(matches):
msg = "No \"_" + prefix + "#\" found in \"" + string + "\""
raise ValueError, msg
return (matches[-1:][0][1], re.search("\d+", matches[-1:][0]).group())
def version_set(string, prefix, oldintval, newintval):
"""Changes version information from filenames. Code from Foundry's nukescripts.version_set()"""
regex = "[/_.]" + prefix + "\d+"
matches = re.findall(regex, string, re.IGNORECASE)
if not len(matches):
return ""
# Filter to retain only version strings with matching numbers
matches = filter(lambda s: int(s[2:]) == oldintval, matches)
# Replace all version strings with matching numbers
for match in matches:
# use expression instead of expr so 0 prefix does not make octal
fmt = "%%(#)0%dd" % (len(match) - 2)
newfullvalue = match[0] + prefix + str(fmt % {"#": newintval})
string = re.sub(match, newfullvalue, string)
return string
def version_up(string):
try:
(prefix, v) = version_get(string, 'v')
v = int(v)
file = version_set(string, prefix, v, v + 1)
except:
raise ValueError, 'Unable to version up File'
return file
def open_folder(path):
"""Provide a temporary directory in which to store extracted files"""
import subprocess
path = os.path.abspath(path)
if sys.platform == 'win32':
subprocess.Popen('explorer "%s"' % path)
elif sys.platform == 'darwin': # macOS
subprocess.Popen(['open', path])
else: # linux
try:
subprocess.Popen(['xdg-open', path])
except OSError:
raise OSError('unsupported xdg-open call??')
def filter_instances(context, plugin):
"""Provide a temporary directory in which to store extracted files"""
# Get the errored instances
allInstances = []
for result in context.data["results"]:
if (result["instance"] is not None and
result["instance"] not in allInstances):
allInstances.append(result["instance"])
# Apply pyblish.logic to get the instances for the plug-in
instances = pyblish.api.instances_by_plugin(allInstances, plugin)
return instances

View file

@ -0,0 +1,27 @@
import nuke
# auto fix version paths in write nodes following root name of script
cmd = '''
import re
rootVersion=re.search('[vV]\d+', os.path.split(nuke.root().name())[1]).group()
for each in nuke.allNodes():
if each.Class() == 'Write':
each['file'].setValue(re.sub('[vV]\d+', rootVersion, each['file'].value()))
'''
nuke.knobDefault('onScriptSave', cmd)
print '\n>>> menu.py: Function for automatic check of version in write nodes is added\n'
ffmpeg_cmd = '''if nuke.env['LINUX']:
nuke.tcl('load ffmpegReader')
nuke.tcl('load ffmpegWriter')
else:
nuke.tcl('load movReader')
nuke.tcl('load movWriter')'''
nuke.knobDefault('onScriptLoad', ffmpeg_cmd)
# run avalon's tool Workfiles
workfiles = '''from avalon.tools import workfiles
if nuke.Root().name() == 'Root':
nuke.scriptClose()
workfiles.show(os.environ["AVALON_WORKDIR"])'''
nuke.knobDefault('onCreate', workfiles)