feat(glob,nk): old files/dirs remove

This commit is contained in:
Jakub Jezek 2020-03-04 15:09:02 +01:00
parent 083d184250
commit a22ca26655
No known key found for this signature in database
GPG key ID: C4B96E101D2A47F3
17 changed files with 0 additions and 940 deletions

View file

@ -1,60 +0,0 @@
import os
import subprocess
import pyblish.api
CREATE_NO_WINDOW = 0x08000000
def deadline_command(cmd):
# Find Deadline
path = os.environ.get("DEADLINE_PATH", None)
assert path is not None, "Variable 'DEADLINE_PATH' must be set"
executable = os.path.join(path, "deadlinecommand")
if os.name == "nt":
executable += ".exe"
assert os.path.exists(
executable), "Deadline executable not found at %s" % executable
assert cmd, "Must have a command"
query = (executable, cmd)
process = subprocess.Popen(query, stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
creationflags=CREATE_NO_WINDOW)
out, err = process.communicate()
return out
class CollectDeadlineUser(pyblish.api.ContextPlugin):
"""Retrieve the local active Deadline user"""
order = pyblish.api.CollectorOrder + 0.499
label = "Deadline User"
hosts = ['maya', 'fusion', 'nuke']
families = [
"renderlayer",
"saver.deadline",
"imagesequence"
]
def process(self, context):
"""Inject the current working file"""
user = None
try:
user = deadline_command("GetCurrentUserName").strip()
except:
self.log.warning("Deadline command seems not to be working")
if not user:
self.log.warning("No Deadline user found. "
"Do you have Deadline installed?")
return
self.log.info("Found Deadline user: {}".format(user))
context.data['deadlineUser'] = user

View file

@ -1,127 +0,0 @@
import os
import json
import re
import pyblish.api
import clique
class CollectJSON(pyblish.api.ContextPlugin):
""" Collecting the json files in current directory. """
label = "JSON"
order = pyblish.api.CollectorOrder
hosts = ['maya']
def version_get(self, string, prefix):
""" Extract version information from filenames. Code from Foundry"s
nukescripts.version_get()
"""
regex = r"[/_.]{}\d+".format(prefix)
matches = re.findall(regex, string, re.IGNORECASE)
if not len(matches):
msg = "No '_{}#' found in '{}'".format(prefix, string)
raise ValueError(msg)
return matches[-1:][0][1], re.search(r"\d+", matches[-1:][0]).group()
def process(self, context):
current_file = context.data.get("currentFile", '')
# Skip if current file is not a directory
if not os.path.isdir(current_file):
return
# Traverse directory and collect collections from json files.
instances = []
for root, dirs, files in os.walk(current_file):
for f in files:
if f.endswith(".json"):
with open(os.path.join(root, f)) as json_data:
for data in json.load(json_data):
instances.append(data)
# Validate instance based on supported families.
valid_families = ["img", "cache", "scene", "mov"]
valid_data = []
for data in instances:
families = data.get("families", []) + [data["family"]]
family_type = list(set(families) & set(valid_families))
if family_type:
valid_data.append(data)
# Create existing output instance.
scanned_dirs = []
files = []
collections = []
for data in valid_data:
if "collection" not in data.keys():
continue
if data["collection"] is None:
continue
instance_collection = clique.parse(data["collection"])
try:
version = self.version_get(
os.path.basename(instance_collection.format()), "v"
)[1]
except KeyError:
# Ignore any output that is not versioned
continue
# Getting collections of all previous versions and current version
for count in range(1, int(version) + 1):
# Generate collection
version_string = "v" + str(count).zfill(len(version))
head = instance_collection.head.replace(
"v" + version, version_string
)
collection = clique.Collection(
head=head.replace("\\", "/"),
padding=instance_collection.padding,
tail=instance_collection.tail
)
collection.version = count
# Scan collection directory
scan_dir = os.path.dirname(collection.head)
if scan_dir not in scanned_dirs and os.path.exists(scan_dir):
for f in os.listdir(scan_dir):
file_path = os.path.join(scan_dir, f)
files.append(file_path.replace("\\", "/"))
scanned_dirs.append(scan_dir)
# Match files to collection and add
for f in files:
if collection.match(f):
collection.add(f)
# Skip if no files were found in the collection
if not list(collection):
continue
# Skip existing collections
if collection in collections:
continue
instance = context.create_instance(name=data["name"])
version = self.version_get(
os.path.basename(collection.format()), "v"
)[1]
basename = os.path.basename(collection.format())
instance.data["label"] = "{0} - {1}".format(
data["name"], basename
)
families = data["families"] + [data["family"]]
family = list(set(valid_families) & set(families))[0]
instance.data["family"] = family
instance.data["families"] = ["output"]
instance.data["collection"] = collection
instance.data["version"] = int(version)
instance.data["publish"] = False
collections.append(collection)

View file

@ -1,88 +0,0 @@
import os
import re
import copy
from avalon import io
from pprint import pprint
import pyblish.api
from avalon import api
texture_extensions = ['.tif', '.tiff', '.jpg', '.jpeg', '.tx', '.png', '.tga',
'.psd', '.dpx', '.hdr', '.hdri', '.exr', '.sxr', '.psb']
class CollectTextures(pyblish.api.ContextPlugin):
"""
Gather all texture files in working directory, traversing whole structure.
"""
order = pyblish.api.CollectorOrder
targets = ["texture"]
label = "Textures"
hosts = ["shell"]
def process(self, context):
if os.environ.get("PYPE_PUBLISH_PATHS"):
paths = os.environ["PYPE_PUBLISH_PATHS"].split(os.pathsep)
else:
cwd = context.get("workspaceDir", os.getcwd())
paths = [cwd]
textures = []
for path in paths:
for dir, subdir, files in os.walk(path):
textures.extend(
os.path.join(dir, x) for x in files
if os.path.splitext(x)[1].lower() in texture_extensions)
self.log.info("Got {} texture files.".format(len(textures)))
if len(textures) < 1:
raise RuntimeError("no textures found.")
asset_name = os.environ.get("AVALON_ASSET")
family = 'texture'
subset = 'Main'
project = io.find_one({'type': 'project'})
asset = io.find_one({
'type': 'asset',
'name': asset_name
})
context.data['project'] = project
context.data['asset'] = asset
for tex in textures:
self.log.info("Processing: {}".format(tex))
name, ext = os.path.splitext(tex)
simple_name = os.path.splitext(os.path.basename(tex))[0]
instance = context.create_instance(simple_name)
instance.data.update({
"subset": subset,
"asset": asset_name,
"label": simple_name,
"name": simple_name,
"family": family,
"families": [family, 'ftrack'],
})
instance.data['destination_list'] = list()
instance.data['representations'] = list()
instance.data['source'] = 'pype command'
texture_data = {}
texture_data['anatomy_template'] = 'texture'
texture_data["ext"] = ext
texture_data["label"] = simple_name
texture_data["name"] = "texture"
texture_data["stagingDir"] = os.path.dirname(tex)
texture_data["files"] = os.path.basename(tex)
texture_data["thumbnail"] = False
texture_data["preview"] = False
instance.data["representations"].append(texture_data)
self.log.info("collected instance: {}".format(instance.data))
self.log.info("All collected.")

View file

@ -1,51 +0,0 @@
import os
import json
import datetime
import time
import pyblish.api
import clique
class ExtractJSON(pyblish.api.ContextPlugin):
""" Extract all instances to a serialized json file. """
order = pyblish.api.IntegratorOrder
label = "JSON"
hosts = ['maya']
def process(self, context):
workspace = os.path.join(
os.path.dirname(context.data["currentFile"]), "workspace",
"instances")
if not os.path.exists(workspace):
os.makedirs(workspace)
output_data = []
for instance in context:
self.log.debug(instance['data'])
data = {}
for key, value in instance.data.iteritems():
if isinstance(value, clique.Collection):
value = value.format()
try:
json.dumps(value)
data[key] = value
except KeyError:
msg = "\"{0}\"".format(value)
msg += " in instance.data[\"{0}\"]".format(key)
msg += " could not be serialized."
self.log.debug(msg)
output_data.append(data)
timestamp = datetime.datetime.fromtimestamp(
time.time()).strftime("%Y%m%d-%H%M%S")
filename = timestamp + "_instances.json"
with open(os.path.join(workspace, filename), "w") as outfile:
outfile.write(json.dumps(output_data, indent=4, sort_keys=True))

View file

@ -1,86 +0,0 @@
import os
import pyblish.api
import subprocess
import clique
class ExtractQuicktimeEXR(pyblish.api.InstancePlugin):
"""Resolve any dependency issies
This plug-in resolves any paths which, if not updated might break
the published file.
The order of families is important, when working with lookdev you want to
first publish the texture, update the texture paths in the nodes and then
publish the shading network. Same goes for file dependent assets.
"""
label = "Extract Quicktime"
order = pyblish.api.ExtractorOrder
families = ["imagesequence", "render", "write", "source"]
hosts = ["shell"]
def process(self, instance):
# fps = instance.data.get("fps")
# start = instance.data.get("startFrame")
# stagingdir = os.path.normpath(instance.data.get("stagingDir"))
#
# collected_frames = os.listdir(stagingdir)
# collections, remainder = clique.assemble(collected_frames)
#
# full_input_path = os.path.join(
# stagingdir, collections[0].format('{head}{padding}{tail}')
# )
# self.log.info("input {}".format(full_input_path))
#
# filename = collections[0].format('{head}')
# if not filename.endswith('.'):
# filename += "."
# movFile = filename + "mov"
# full_output_path = os.path.join(stagingdir, movFile)
#
# self.log.info("output {}".format(full_output_path))
#
# config_data = instance.context.data['output_repre_config']
#
# proj_name = os.environ.get('AVALON_PROJECT', '__default__')
# profile = config_data.get(proj_name, config_data['__default__'])
#
# input_args = []
# # overrides output file
# input_args.append("-y")
# # preset's input data
# input_args.extend(profile.get('input', []))
# # necessary input data
# input_args.append("-start_number {}".format(start))
# input_args.append("-i {}".format(full_input_path))
# input_args.append("-framerate {}".format(fps))
#
# output_args = []
# # preset's output data
# output_args.extend(profile.get('output', []))
# # output filename
# output_args.append(full_output_path)
# mov_args = [
# "ffmpeg",
# " ".join(input_args),
# " ".join(output_args)
# ]
# subprocess_mov = " ".join(mov_args)
# sub_proc = subprocess.Popen(subprocess_mov)
# sub_proc.wait()
#
# if not os.path.isfile(full_output_path):
# raise("Quicktime wasn't created succesfully")
#
# if "representations" not in instance.data:
# instance.data["representations"] = []
#
# representation = {
# 'name': 'mov',
# 'ext': 'mov',
# 'files': movFile,
# "stagingDir": stagingdir,
# "preview": True
# }
# instance.data["representations"].append(representation)

View file

@ -1,153 +0,0 @@
import os
import subprocess
import pyblish.api
import filelink
class ExtractTranscode(pyblish.api.InstancePlugin):
"""Extracts review movie from image sequence.
Offset to get images to transcode from.
"""
order = pyblish.api.ExtractorOrder + 0.1
label = "Transcode"
optional = True
families = ["review"]
def find_previous_index(self, index, indexes):
"""Finds the closest previous value in a list from a value."""
data = []
for i in indexes:
if i >= index:
continue
data.append(index - i)
return indexes[data.index(min(data))]
def process(self, instance):
if "collection" in instance.data.keys():
self.process_image(instance)
if "output_path" in instance.data.keys():
self.process_movie(instance)
def process_image(self, instance):
collection = instance.data.get("collection", [])
if not list(collection):
msg = "Skipping \"{0}\" because no frames was found."
self.log.warning(msg.format(instance.data["name"]))
return
# Temporary fill the missing frames.
missing = collection.holes()
if not collection.is_contiguous():
pattern = collection.format("{head}{padding}{tail}")
for index in missing.indexes:
dst = pattern % index
src_index = self.find_previous_index(
index, list(collection.indexes)
)
src = pattern % src_index
filelink.create(src, dst)
# Generate args.
# Has to be yuv420p for compatibility with older players and smooth
# playback. This does come with a sacrifice of more visible banding
# issues.
# -crf 18 is visually lossless.
args = [
"ffmpeg", "-y",
"-start_number", str(min(collection.indexes)),
"-framerate", str(instance.context.data["framerate"]),
"-i", collection.format("{head}{padding}{tail}"),
"-pix_fmt", "yuv420p",
"-crf", "18",
"-timecode", "00:00:00:01",
"-vframes",
str(max(collection.indexes) - min(collection.indexes) + 1),
"-vf",
"scale=trunc(iw/2)*2:trunc(ih/2)*2",
]
if instance.data.get("baked_colorspace_movie"):
args = [
"ffmpeg", "-y",
"-i", instance.data["baked_colorspace_movie"],
"-pix_fmt", "yuv420p",
"-crf", "18",
"-timecode", "00:00:00:01",
]
args.append(collection.format("{head}.mov"))
self.log.debug("Executing args: {0}".format(args))
# Can't use subprocess.check_output, cause Houdini doesn't like that.
p = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE,
cwd=os.path.dirname(args[-1])
)
output = p.communicate()[0]
# Remove temporary frame fillers
for f in missing:
os.remove(f)
if p.returncode != 0:
raise ValueError(output)
self.log.debug(output)
def process_movie(self, instance):
# Generate args.
# Has to be yuv420p for compatibility with older players and smooth
# playback. This does come with a sacrifice of more visible banding
# issues.
args = [
"ffmpeg", "-y",
"-i", instance.data["output_path"],
"-pix_fmt", "yuv420p",
"-crf", "18",
"-timecode", "00:00:00:01",
]
if instance.data.get("baked_colorspace_movie"):
args = [
"ffmpeg", "-y",
"-i", instance.data["baked_colorspace_movie"],
"-pix_fmt", "yuv420p",
"-crf", "18",
"-timecode", "00:00:00:01",
]
split = os.path.splitext(instance.data["output_path"])
args.append(split[0] + "_review.mov")
self.log.debug("Executing args: {0}".format(args))
# Can't use subprocess.check_output, cause Houdini doesn't like that.
p = subprocess.Popen(
args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE,
cwd=os.path.dirname(args[-1])
)
output = p.communicate()[0]
if p.returncode != 0:
raise ValueError(output)
self.log.debug(output)

View file

@ -1 +0,0 @@
# usually used for mattepainting

View file

@ -1,46 +0,0 @@
import pyblish.api
@pyblish.api.log
class CollectRenderTarget(pyblish.api.InstancePlugin):
"""Collect families for all instances"""
order = pyblish.api.CollectorOrder + 0.2
label = "Collect Render Target"
hosts = ["nuke", "nukeassist"]
families = ['write']
def process(self, instance):
node = instance[0]
self.log.info('processing {}'.format(node))
families = []
if instance.data.get('families'):
families += instance.data['families']
# set for ftrack to accept
# instance.data["families"] = ["ftrack"]
if node["render"].value():
# dealing with local/farm rendering
if node["render_farm"].value():
families.append("render.farm")
else:
families.append("render.local")
else:
families.append("render.frames")
# to ignore staging dir op in integrate
instance.data['transfer'] = False
families.append('ftrack')
instance.data["families"] = families
# Sort/grouped by family (preserving local index)
instance.context[:] = sorted(instance.context, key=self.sort_by_family)
def sort_by_family(self, instance):
"""Sort by family"""
return instance.data.get("families", instance.data.get("family"))

View file

@ -1,147 +0,0 @@
import os
import json
import getpass
from avalon import api
from avalon.vendor import requests
import pyblish.api
class NukeSubmitDeadline(pyblish.api.InstancePlugin):
# TODO: rewrite docstring to nuke
"""Submit current Comp to Deadline
Renders are submitted to a Deadline Web Service as
supplied via the environment variable DEADLINE_REST_URL
"""
label = "Submit to Deadline"
order = pyblish.api.IntegratorOrder
hosts = ["nuke"]
families = ["write", "render.deadline"]
def process(self, instance):
context = instance.context
key = "__hasRun{}".format(self.__class__.__name__)
if context.data.get(key, False):
return
else:
context.data[key] = True
DEADLINE_REST_URL = api.Session.get("DEADLINE_REST_URL",
"http://localhost:8082")
assert DEADLINE_REST_URL, "Requires DEADLINE_REST_URL"
# Collect all saver instances in context that are to be rendered
write_instances = []
for instance in context[:]:
if not self.families[0] in instance.data.get("families"):
# Allow only saver family instances
continue
if not instance.data.get("publish", True):
# Skip inactive instances
continue
self.log.debug(instance.data["name"])
write_instances.append(instance)
if not write_instances:
raise RuntimeError("No instances found for Deadline submittion")
hostVersion = int(context.data["hostVersion"])
filepath = context.data["currentFile"]
filename = os.path.basename(filepath)
comment = context.data.get("comment", "")
deadline_user = context.data.get("deadlineUser", getpass.getuser())
# Documentation for keys available at:
# https://docs.thinkboxsoftware.com
# /products/deadline/8.0/1_User%20Manual/manual
# /manual-submission.html#job-info-file-options
payload = {
"JobInfo": {
# Top-level group name
"BatchName": filename,
# Job name, as seen in Monitor
"Name": filename,
# User, as seen in Monitor
"UserName": deadline_user,
# Use a default submission pool for Nuke
"Pool": "nuke",
"Plugin": "Nuke",
"Frames": "{start}-{end}".format(
start=int(instance.data["frameStart"]),
end=int(instance.data["frameEnd"])
),
"Comment": comment,
},
"PluginInfo": {
# Input
"FlowFile": filepath,
# Mandatory for Deadline
"Version": str(hostVersion),
# Render in high quality
"HighQuality": True,
# Whether saver output should be checked after rendering
# is complete
"CheckOutput": True,
# Proxy: higher numbers smaller images for faster test renders
# 1 = no proxy quality
"Proxy": 1,
},
# Mandatory for Deadline, may be empty
"AuxFiles": []
}
# Enable going to rendered frames from Deadline Monitor
for index, instance in enumerate(write_instances):
path = instance.data["path"]
folder, filename = os.path.split(path)
payload["JobInfo"]["OutputDirectory%d" % index] = folder
payload["JobInfo"]["OutputFilename%d" % index] = filename
# Include critical variables with submission
keys = [
# TODO: This won't work if the slaves don't have accesss to
# these paths, such as if slaves are running Linux and the
# submitter is on Windows.
"PYTHONPATH",
"NUKE_PATH"
# "OFX_PLUGIN_PATH",
]
environment = dict({key: os.environ[key] for key in keys
if key in os.environ}, **api.Session)
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
key=key,
value=environment[key]
) for index, key in enumerate(environment)
})
self.log.info("Submitting..")
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
# E.g. http://192.168.0.1:8082/api/jobs
url = "{}/api/jobs".format(DEADLINE_REST_URL)
response = requests.post(url, json=payload)
if not response.ok:
raise Exception(response.text)
# Store the response for dependent job submission plug-ins
for instance in write_instances:
instance.data["deadlineSubmissionJob"] = response.json()

View file

@ -1,24 +0,0 @@
import pyblish.api
class IncrementTestPlugin(pyblish.api.ContextPlugin):
"""Increment current script version."""
order = pyblish.api.CollectorOrder + 0.5
label = "Test Plugin"
hosts = ['nuke']
def process(self, context):
instances = context[:]
prerender_check = list()
families_check = list()
for instance in instances:
if ("prerender" in str(instance)):
prerender_check.append(instance)
if instance.data.get("families", None):
families_check.append(True)
if len(prerender_check) != len(families_check):
self.log.info(prerender_check)
self.log.info(families_check)

View file

@ -1,68 +0,0 @@
import nuke
import os
import pyblish.api
from avalon import io
# TODO: add repair function
@pyblish.api.log
class ValidateSettingsNuke(pyblish.api.Validator):
""" Validates settings """
families = ['scene']
hosts = ['nuke']
optional = True
label = 'Settings'
def process(self, instance):
asset = io.find_one({"name": os.environ['AVALON_ASSET']})
try:
avalon_resolution = asset["data"].get("resolution", '')
avalon_pixel_aspect = asset["data"].get("pixelAspect", '')
avalon_fps = asset["data"].get("fps", '')
avalon_first = asset["data"].get("frameStart", '')
avalon_last = asset["data"].get("frameEnd", '')
avalon_crop = asset["data"].get("crop", '')
except KeyError:
print(
"No resolution information found for \"{0}\".".format(
asset["name"]
)
)
return
# validating first frame
local_first = nuke.root()['first_frame'].value()
msg = 'First frame is incorrect.'
msg += '\n\nLocal first: %s' % local_first
msg += '\n\nOnline first: %s' % avalon_first
assert local_first == avalon_first, msg
# validating last frame
local_last = nuke.root()['last_frame'].value()
msg = 'Last frame is incorrect.'
msg += '\n\nLocal last: %s' % local_last
msg += '\n\nOnline last: %s' % avalon_last
assert local_last == avalon_last, msg
# validating fps
local_fps = nuke.root()['fps'].value()
msg = 'FPS is incorrect.'
msg += '\n\nLocal fps: %s' % local_fps
msg += '\n\nOnline fps: %s' % avalon_fps
assert local_fps == avalon_fps, msg
# validating resolution width
local_width = nuke.root().format().width()
msg = 'Width is incorrect.'
msg += '\n\nLocal width: %s' % local_width
msg += '\n\nOnline width: %s' % avalon_resolution[0]
assert local_width == avalon_resolution[0], msg
# validating resolution width
local_height = nuke.root().format().height()
msg = 'Height is incorrect.'
msg += '\n\nLocal height: %s' % local_height
msg += '\n\nOnline height: %s' % avalon_resolution[1]
assert local_height == avalon_resolution[1], msg

View file

@ -1,33 +0,0 @@
import nuke
import pyblish.api
class RepairNukeProxyModeAction(pyblish.api.Action):
label = "Repair"
icon = "wrench"
on = "failed"
def process(self, context, plugin):
nuke.root()["proxy"].setValue(0)
class ValidateNukeProxyMode(pyblish.api.ContextPlugin):
"""Validates against having proxy mode on."""
order = pyblish.api.ValidatorOrder
optional = True
label = "Proxy Mode"
actions = [RepairNukeProxyModeAction]
hosts = ["nuke", "nukeassist"]
# targets = ["default", "process"]
def process(self, context):
msg = (
"Proxy mode is not supported. Please disable Proxy Mode in the "
"Project settings."
)
assert not nuke.root()["proxy"].getValue(), msg

View file

@ -1,54 +0,0 @@
'''
Simple socket server using threads
'''
import socket
import sys
import threading
import StringIO
import contextlib
import nuke
HOST = ''
PORT = 8888
@contextlib.contextmanager
def stdoutIO(stdout=None):
old = sys.stdout
if stdout is None:
stdout = StringIO.StringIO()
sys.stdout = stdout
yield stdout
sys.stdout = old
def _exec(data):
with stdoutIO() as s:
exec(data)
return s.getvalue()
def server_start():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((HOST, PORT))
s.listen(5)
while 1:
client, address = s.accept()
try:
data = client.recv(4096)
if data:
result = nuke.executeInMainThreadWithResult(_exec, args=(data))
client.send(str(result))
except SystemExit:
result = self.encode('SERVER: Shutting down...')
client.send(str(result))
raise
finally:
client.close()
t = threading.Thread(None, server_start)
t.setDaemon(True)
t.start()

View file

@ -1,6 +1,5 @@
import os
import sys
import atom_server
import KnobScripter
from pype.nuke.lib import (