rename config folder

This commit is contained in:
Jakub Jezek 2018-10-02 22:55:04 +02:00
parent bb17ef7221
commit 7667674317
182 changed files with 272 additions and 272 deletions

View file

@ -0,0 +1,33 @@
from avalon import api, style
class CopyFile(api.Loader):
"""Copy the published file to be pasted at the desired location"""
representations = ["*"]
families = ["*"]
label = "Copy File"
order = 10
icon = "copy"
color = style.colors.default
def load(self, context, name=None, namespace=None, data=None):
self.log.info("Added copy to clipboard: {0}".format(self.fname))
self.copy_file_to_clipboard(self.fname)
@staticmethod
def copy_file_to_clipboard(path):
from avalon.vendor.Qt import QtCore, QtWidgets
app = QtWidgets.QApplication.instance()
assert app, "Must have running QApplication instance"
# Build mime data for clipboard
data = QtCore.QMimeData()
url = QtCore.QUrl.fromLocalFile(path)
data.setUrls([url])
# Set to Clipboard
clipboard = app.clipboard()
clipboard.setMimeData(data)

View file

@ -0,0 +1,29 @@
import os
from avalon import api
class CopyFilePath(api.Loader):
"""Copy published file path to clipboard"""
representations = ["*"]
families = ["*"]
label = "Copy File Path"
order = 20
icon = "clipboard"
color = "#999999"
def load(self, context, name=None, namespace=None, data=None):
self.log.info("Added file path to clipboard: {0}".format(self.fname))
self.copy_path_to_clipboard(self.fname)
@staticmethod
def copy_path_to_clipboard(path):
from avalon.vendor.Qt import QtCore, QtWidgets
app = QtWidgets.QApplication.instance()
assert app, "Must have running QApplication instance"
# Set to Clipboard
clipboard = app.clipboard()
clipboard.setText(os.path.normpath(path))

View file

@ -0,0 +1,49 @@
import sys
import os
import subprocess
from avalon import api
def open(filepath):
"""Open file with system default executable"""
if sys.platform.startswith('darwin'):
subprocess.call(('open', filepath))
elif os.name == 'nt':
os.startfile(filepath)
elif os.name == 'posix':
subprocess.call(('xdg-open', filepath))
class PlayImageSequence(api.Loader):
"""Open Image Sequence with system default"""
families = ["studio.imagesequence"]
representations = ["*"]
label = "Play sequence"
order = -10
icon = "play-circle"
color = "orange"
def load(self, context, name, namespace, data):
directory = self.fname
from avalon.vendor import clique
pattern = clique.PATTERNS["frames"]
files = os.listdir(directory)
collections, remainder = clique.assemble(files,
patterns=[pattern],
minimum_items=1)
assert not remainder, ("There shouldn't have been a remainder for "
"'%s': %s" % (directory, remainder))
seqeunce = collections[0]
first_image = list(seqeunce)[0]
filepath = os.path.normpath(os.path.join(directory, first_image))
self.log.info("Opening : {}".format(filepath))
open(filepath)

View file

@ -0,0 +1,33 @@
import os
import shutil
import pyblish.api
class CleanUp(pyblish.api.InstancePlugin):
"""Cleans up the staging directory after a successful publish.
The removal will only happen for staging directories which are inside the
temporary folder, otherwise the folder is ignored.
"""
order = pyblish.api.IntegratorOrder + 10
label = "Clean Up"
def process(self, instance):
import tempfile
staging_dir = instance.data.get("stagingDir", None)
if not staging_dir or not os.path.exists(staging_dir):
self.log.info("No staging directory found: %s" % staging_dir)
return
temp_root = tempfile.gettempdir()
if not os.path.normpath(staging_dir).startswith(temp_root):
self.log.info("Skipping cleanup. Staging directory is not in the "
"temp folder: %s" % staging_dir)
return
self.log.info("Removing temporary folder ...")
shutil.rmtree(staging_dir)

View file

@ -0,0 +1,108 @@
import pyblish.api
import os
from avalon import io, api
class CollectAssumedDestination(pyblish.api.InstancePlugin):
"""Generate the assumed destination path where the file will be stored"""
label = "Collect Assumed Destination"
order = pyblish.api.CollectorOrder + 0.499
def process(self, instance):
self.create_destination_template(instance)
template_data = instance.data["assumedTemplateData"]
template = instance.data["template"]
mock_template = template.format(**template_data)
# For now assume resources end up in a "resources" folder in the
# published folder
mock_destination = os.path.join(os.path.dirname(mock_template),
"resources")
# Clean the path
mock_destination = os.path.abspath(os.path.normpath(mock_destination))
# Define resource destination and transfers
resources = instance.data.get("resources", list())
transfers = instance.data.get("transfers", list())
for resource in resources:
# Add destination to the resource
source_filename = os.path.basename(resource["source"])
destination = os.path.join(mock_destination, source_filename)
resource['destination'] = destination
# Collect transfers for the individual files of the resource
# e.g. all individual files of a cache or UDIM textures.
files = resource['files']
for fsrc in files:
fname = os.path.basename(fsrc)
fdest = os.path.join(mock_destination, fname)
transfers.append([fsrc, fdest])
instance.data["resources"] = resources
instance.data["transfers"] = transfers
def create_destination_template(self, instance):
"""Create a filepath based on the current data available
Example template:
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
{subset}.{representation}
Args:
instance: the instance to publish
Returns:
file path (str)
"""
# get all the stuff from the database
subset_name = instance.data["subset"]
asset_name = instance.data["asset"]
project_name = api.Session["AVALON_PROJECT"]
project = io.find_one({"type": "project",
"name": project_name},
projection={"config": True})
template = project["config"]["template"]["publish"]
asset = io.find_one({"type": "asset",
"name": asset_name,
"parent": project["_id"]})
assert asset, ("No asset found by the name '{}' "
"in project '{}'".format(asset_name, project_name))
silo = asset['silo']
subset = io.find_one({"type": "subset",
"name": subset_name,
"parent": asset["_id"]})
# assume there is no version yet, we start at `1`
version = None
version_number = 1
if subset is not None:
version = io.find_one({"type": "version",
"parent": subset["_id"]},
sort=[("name", -1)])
# if there is a subset there ought to be version
if version is not None:
version_number += version["name"]
template_data = {"root": api.Session["AVALON_PROJECTS"],
"project": project_name,
"silo": silo,
"asset": asset_name,
"subset": subset_name,
"version": version_number,
"representation": "TEMP"}
instance.data["assumedTemplateData"] = template_data
instance.data["template"] = template

View file

@ -0,0 +1,11 @@
import pyblish.api
class CollectColorbleedComment(pyblish.api.ContextPlugin):
"""This plug-ins displays the comment dialog box per default"""
label = "Collect Comment"
order = pyblish.api.CollectorOrder
def process(self, context):
context.data["comment"] = ""

View file

@ -0,0 +1,22 @@
import os
import pyblish.api
class CollectContextLabel(pyblish.api.ContextPlugin):
"""Labelize context using the registered host and current file"""
order = pyblish.api.CollectorOrder + 0.25
label = "Context Label"
def process(self, context):
# Get last registered host
host = pyblish.api.registered_hosts()[-1]
# Get scene name from "currentFile"
path = context.data.get("currentFile") or "<Unsaved>"
base = os.path.basename(path)
# Set label
label = "{host} - {scene}".format(host=host.title(), scene=base)
context.data["label"] = label

View file

@ -0,0 +1,14 @@
import os
import pyblish.api
class CollectCurrentShellFile(pyblish.api.ContextPlugin):
"""Inject the current working file into context"""
order = pyblish.api.CollectorOrder - 0.5
label = "Current File"
hosts = ["shell"]
def process(self, context):
"""Inject the current working file"""
context.data["currentFile"] = os.path.join(os.getcwd(), "<shell>")

View file

@ -0,0 +1,52 @@
import os
import subprocess
import pyblish.api
CREATE_NO_WINDOW = 0x08000000
def deadline_command(cmd):
# Find Deadline
path = os.environ.get("DEADLINE_PATH", None)
assert path is not None, "Variable 'DEADLINE_PATH' must be set"
executable = os.path.join(path, "deadlinecommand")
if os.name == "nt":
executable += ".exe"
assert os.path.exists(
executable), "Deadline executable not found at %s" % executable
assert cmd, "Must have a command"
query = (executable, cmd)
process = subprocess.Popen(query, stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
creationflags=CREATE_NO_WINDOW)
out, err = process.communicate()
return out
class CollectDeadlineUser(pyblish.api.ContextPlugin):
"""Retrieve the local active Deadline user"""
order = pyblish.api.CollectorOrder + 0.499
label = "Deadline User"
hosts = ['maya', 'fusion']
families = ["studio.renderlayer", "studio.saver.deadline"]
def process(self, context):
"""Inject the current working file"""
user = deadline_command("GetCurrentUserName").strip()
if not user:
self.log.warning("No Deadline user found. "
"Do you have Deadline installed?")
return
self.log.info("Found Deadline user: {}".format(user))
context.data['deadlineUser'] = user

View file

@ -0,0 +1,184 @@
import os
import re
import copy
import json
import pprint
import pyblish.api
from avalon import api
def collect(root,
regex=None,
exclude_regex=None,
startFrame=None,
endFrame=None):
"""Collect sequence collections in root"""
from avalon.vendor import clique
files = list()
for filename in os.listdir(root):
# Must have extension
ext = os.path.splitext(filename)[1]
if not ext:
continue
# Only files
if not os.path.isfile(os.path.join(root, filename)):
continue
# Include and exclude regex
if regex and not re.search(regex, filename):
continue
if exclude_regex and re.search(exclude_regex, filename):
continue
files.append(filename)
# Match collections
# Support filenames like: projectX_shot01_0010.tiff with this regex
pattern = r"(?P<index>(?P<padding>0*)\d+)\.\D+\d?$"
collections, remainder = clique.assemble(files,
patterns=[pattern],
minimum_items=1)
# Ignore any remainders
if remainder:
print("Skipping remainder {}".format(remainder))
# Exclude any frames outside start and end frame.
for collection in collections:
for index in list(collection.indexes):
if startFrame is not None and index < startFrame:
collection.indexes.discard(index)
continue
if endFrame is not None and index > endFrame:
collection.indexes.discard(index)
continue
# Keep only collections that have at least a single frame
collections = [c for c in collections if c.indexes]
return collections
class CollectFileSequences(pyblish.api.ContextPlugin):
"""Gather file sequences from working directory
When "FILESEQUENCE" environment variable is set these paths (folders or
.json files) are parsed for image sequences. Otherwise the current
working directory is searched for file sequences.
The json configuration may have the optional keys:
asset (str): The asset to publish to. If not provided fall back to
api.Session["AVALON_ASSET"]
subset (str): The subset to publish to. If not provided the sequence's
head (up to frame number) will be used.
startFrame (int): The start frame for the sequence
endFrame (int): The end frame for the sequence
root (str): The path to collect from (can be relative to the .json)
regex (str): A regex for the sequence filename
exclude_regex (str): A regex for filename to exclude from collection
metadata (dict): Custom metadata for instance.data["metadata"]
"""
order = pyblish.api.CollectorOrder
targets = ["filesequence"]
label = "File Sequences"
def process(self, context):
if os.environ.get("FILESEQUENCE"):
paths = os.environ["FILESEQUENCE"].split(os.pathsep)
else:
cwd = context.get("workspaceDir", os.getcwd())
paths = [cwd]
for path in paths:
self.log.info("Loading: {}".format(path))
if path.endswith(".json"):
# Search using .json configuration
with open(path, "r") as f:
try:
data = json.load(f)
except Exception as exc:
self.log.error("Error loading json: "
"{} - Exception: {}".format(path, exc))
raise
cwd = os.path.dirname(path)
root_override = data.get("root")
if root_override:
if os.path.isabs(root_override):
root = root_override
else:
root = os.path.join(cwd, root_override)
else:
root = cwd
else:
# Search in directory
data = dict()
root = path
self.log.info("Collecting: {}".format(root))
regex = data.get("regex")
if regex:
self.log.info("Using regex: {}".format(regex))
collections = collect(root=root,
regex=regex,
exclude_regex=data.get("exclude_regex"),
startFrame=data.get("startFrame"),
endFrame=data.get("endFrame"))
self.log.info("Found collections: {}".format(collections))
if data.get("subset"):
# If subset is provided for this json then it must be a single
# collection.
if len(collections) > 1:
self.log.error("Forced subset can only work with a single "
"found sequence")
raise RuntimeError("Invalid sequence")
# Get family from the data
families = data.get("families", ["studio.imagesequence"])
assert isinstance(families, (list, tuple)), "Must be iterable"
assert families, "Must have at least a single family"
for collection in collections:
instance = context.create_instance(str(collection))
self.log.info("Collection: %s" % list(collection))
# Ensure each instance gets a unique reference to the data
data = copy.deepcopy(data)
# If no subset provided, get it from collection's head
subset = data.get("subset", collection.head.rstrip("_. "))
# If no start or end frame provided, get it from collection
indices = list(collection.indexes)
start = data.get("startFrame", indices[0])
end = data.get("endFrame", indices[-1])
instance.data.update({
"name": str(collection),
"family": families[0], # backwards compatibility / pyblish
"families": list(families),
"subset": subset,
"asset": data.get("asset", api.Session["AVALON_ASSET"]),
"stagingDir": root,
"files": [list(collection)],
"startFrame": start,
"endFrame": end
})
instance.append(collection)
self.log.debug("Collected instance:\n"
"{}".format(pprint.pformat(instance.data)))

View file

@ -0,0 +1,14 @@
import os
import pyblish.api
class CollectShellWorkspace(pyblish.api.ContextPlugin):
"""Inject the current workspace into context"""
order = pyblish.api.CollectorOrder - 0.5
label = "Shell Workspace"
hosts = ["shell"]
def process(self, context):
context.data["workspaceDir"] = os.getcwd()

View file

@ -0,0 +1,12 @@
import pyblish.api
from avalon import api
class CollectMindbenderTime(pyblish.api.ContextPlugin):
"""Store global time at the time of publish"""
label = "Collect Current Time"
order = pyblish.api.CollectorOrder
def process(self, context):
context.data["time"] = api.time()

View file

@ -0,0 +1,349 @@
import os
import logging
import shutil
import errno
import pyblish.api
from avalon import api, io
log = logging.getLogger(__name__)
class IntegrateAsset(pyblish.api.InstancePlugin):
"""Resolve any dependency issies
This plug-in resolves any paths which, if not updated might break
the published file.
The order of families is important, when working with lookdev you want to
first publish the texture, update the texture paths in the nodes and then
publish the shading network. Same goes for file dependent assets.
"""
label = "Integrate Asset"
order = pyblish.api.IntegratorOrder
families = ["studio.animation",
"studio.camera",
"studio.imagesequence",
"studio.look",
"pype.mayaAscii",
"studio.model",
"studio.pointcache",
"studio.setdress",
"studio.rig",
"studio.vrayproxy",
"studio.yetiRig",
"studio.yeticache"]
def process(self, instance):
self.register(instance)
self.log.info("Integrating Asset in to the database ...")
self.integrate(instance)
def register(self, instance):
# Required environment variables
PROJECT = api.Session["AVALON_PROJECT"]
ASSET = instance.data.get("asset") or api.Session["AVALON_ASSET"]
LOCATION = api.Session["AVALON_LOCATION"]
context = instance.context
# Atomicity
#
# Guarantee atomic publishes - each asset contains
# an identical set of members.
# __
# / o
# / \
# | o |
# \ /
# o __/
#
assert all(result["success"] for result in context.data["results"]), (
"Atomicity not held, aborting.")
# Assemble
#
# |
# v
# ---> <----
# ^
# |
#
stagingdir = instance.data.get("stagingDir")
assert stagingdir, ("Incomplete instance \"%s\": "
"Missing reference to staging area." % instance)
# extra check if stagingDir actually exists and is available
self.log.debug("Establishing staging directory @ %s" % stagingdir)
project = io.find_one({"type": "project"},
projection={"pype.template.publish": True})
asset = io.find_one({"type": "asset",
"name": ASSET,
"parent": project["_id"]})
assert all([project, asset]), ("Could not find current project or "
"asset '%s'" % ASSET)
subset = self.get_subset(asset, instance)
# get next version
latest_version = io.find_one({"type": "version",
"parent": subset["_id"]},
{"name": True},
sort=[("name", -1)])
next_version = 1
if latest_version is not None:
next_version += latest_version["name"]
self.log.info("Verifying version from assumed destination")
assumed_data = instance.data["assumedTemplateData"]
assumed_version = assumed_data["version"]
if assumed_version != next_version:
raise AttributeError("Assumed version 'v{0:03d}' does not match"
"next version in database "
"('v{1:03d}')".format(assumed_version,
next_version))
self.log.debug("Next version: v{0:03d}".format(next_version))
version_data = self.create_version_data(context, instance)
version = self.create_version(subset=subset,
version_number=next_version,
locations=[LOCATION],
data=version_data)
self.log.debug("Creating version ...")
version_id = io.insert_one(version).inserted_id
# Write to disk
# _
# | |
# _| |_
# ____\ /
# |\ \ / \
# \ \ v \
# \ \________.
# \|________|
#
root = api.registered_root()
template_data = {"root": root,
"project": PROJECT,
"silo": asset['silo'],
"asset": ASSET,
"subset": subset["name"],
"version": version["name"]}
template_publish = project["config"]["template"]["publish"]
# Find the representations to transfer amongst the files
# Each should be a single representation (as such, a single extension)
representations = []
for files in instance.data["files"]:
# Collection
# _______
# |______|\
# | |\|
# | ||
# | ||
# | ||
# |_______|
#
if isinstance(files, list):
collection = files
# Assert that each member has identical suffix
_, ext = os.path.splitext(collection[0])
assert all(ext == os.path.splitext(name)[1]
for name in collection), (
"Files had varying suffixes, this is a bug"
)
assert not any(os.path.isabs(name) for name in collection)
template_data["representation"] = ext[1:]
for fname in collection:
src = os.path.join(stagingdir, fname)
dst = os.path.join(
template_publish.format(**template_data),
fname
)
instance.data["transfers"].append([src, dst])
else:
# Single file
# _______
# | |\
# | |
# | |
# | |
# |_______|
#
fname = files
assert not os.path.isabs(fname), (
"Given file name is a full path"
)
_, ext = os.path.splitext(fname)
template_data["representation"] = ext[1:]
src = os.path.join(stagingdir, fname)
dst = template_publish.format(**template_data)
instance.data["transfers"].append([src, dst])
representation = {
"schema": "avalon-core:representation-2.0",
"type": "representation",
"parent": version_id,
"name": ext[1:],
"data": {},
"dependencies": instance.data.get("dependencies", "").split(),
# Imprint shortcut to context
# for performance reasons.
"context": {
"project": PROJECT,
"asset": ASSET,
"silo": asset['silo'],
"subset": subset["name"],
"version": version["name"],
"representation": ext[1:]
}
}
representations.append(representation)
self.log.info("Registering {} items".format(len(representations)))
io.insert_many(representations)
def integrate(self, instance):
"""Move the files
Through `instance.data["transfers"]`
Args:
instance: the instance to integrate
"""
transfers = instance.data["transfers"]
for src, dest in transfers:
self.log.info("Copying file .. {} -> {}".format(src, dest))
self.copy_file(src, dest)
def copy_file(self, src, dst):
""" Copy given source to destination
Arguments:
src (str): the source file which needs to be copied
dst (str): the destination of the sourc file
Returns:
None
"""
dirname = os.path.dirname(dst)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
self.log.critical("An unexpected error occurred.")
raise
shutil.copy(src, dst)
def get_subset(self, asset, instance):
subset = io.find_one({"type": "subset",
"parent": asset["_id"],
"name": instance.data["subset"]})
if subset is None:
subset_name = instance.data["subset"]
self.log.info("Subset '%s' not found, creating.." % subset_name)
_id = io.insert_one({
"schema": "avalon-core:subset-2.0",
"type": "subset",
"name": subset_name,
"data": {},
"parent": asset["_id"]
}).inserted_id
subset = io.find_one({"_id": _id})
return subset
def create_version(self, subset, version_number, locations, data=None):
""" Copy given source to destination
Args:
subset (dict): the registered subset of the asset
version_number (int): the version number
locations (list): the currently registered locations
Returns:
dict: collection of data to create a version
"""
# Imprint currently registered location
version_locations = [location for location in locations if
location is not None]
return {"schema": "avalon-core:version-2.0",
"type": "version",
"parent": subset["_id"],
"name": version_number,
"locations": version_locations,
"data": data}
def create_version_data(self, context, instance):
"""Create the data collection for the version
Args:
context: the current context
instance: the current instance being published
Returns:
dict: the required information with instance.data as key
"""
families = []
current_families = instance.data.get("families", list())
instance_family = instance.data.get("family", None)
if instance_family is not None:
families.append(instance_family)
families += current_families
# create relative source path for DB
relative_path = os.path.relpath(context.data["currentFile"],
api.registered_root())
source = os.path.join("{root}", relative_path).replace("\\", "/")
version_data = {"families": families,
"time": context.data["time"],
"author": context.data["user"],
"source": source,
"comment": context.data.get("comment")}
# Include optional data if present in
optionals = ["startFrame", "endFrame", "step", "handles"]
for key in optionals:
if key in instance.data:
version_data[key] = instance.data[key]
return version_data

View file

@ -0,0 +1,316 @@
import os
import json
import pprint
import re
from avalon import api, io
from avalon.vendor import requests, clique
import pyblish.api
def _get_script():
"""Get path to the image sequence script"""
try:
from pype.scripts import publish_filesequence
except Exception as e:
raise RuntimeError("Expected module 'publish_imagesequence'"
"to be available")
module_path = publish_filesequence.__file__
if module_path.endswith(".pyc"):
module_path = module_path[:-len(".pyc")] + ".py"
return module_path
# Logic to retrieve latest files concerning extendFrames
def get_latest_version(asset_name, subset_name, family):
# Get asset
asset_name = io.find_one({"type": "asset",
"name": asset_name},
projection={"name": True})
subset = io.find_one({"type": "subset",
"name": subset_name,
"parent": asset_name["_id"]},
projection={"_id": True, "name": True})
# Check if subsets actually exists (pre-run check)
assert subset, "No subsets found, please publish with `extendFrames` off"
# Get version
version_projection = {"name": True,
"data.startFrame": True,
"data.endFrame": True,
"parent": True}
version = io.find_one({"type": "version",
"parent": subset["_id"],
"data.families": family},
projection=version_projection,
sort=[("name", -1)])
assert version, "No version found, this is a bug"
return version
def get_resources(version, extension=None):
"""
Get the files from the specific version
"""
query = {"type": "representation", "parent": version["_id"]}
if extension:
query["name"] = extension
representation = io.find_one(query)
assert representation, "This is a bug"
directory = api.get_representation_path(representation)
print("Source: ", directory)
resources = sorted([os.path.normpath(os.path.join(directory, fname))
for fname in os.listdir(directory)])
return resources
def get_resource_files(resources, frame_range, override=True):
res_collections, _ = clique.assemble(resources)
assert len(res_collections) == 1, "Multiple collections found"
res_collection = res_collections[0]
# Remove any frames
if override:
for frame in frame_range:
if frame not in res_collection.indexes:
continue
res_collection.indexes.remove(frame)
return list(res_collection)
class SubmitDependentImageSequenceJobDeadline(pyblish.api.InstancePlugin):
"""Submit image sequence publish jobs to Deadline.
These jobs are dependent on a deadline job submission prior to this
plug-in.
Renders are submitted to a Deadline Web Service as
supplied via the environment variable AVALON_DEADLINE
Options in instance.data:
- deadlineSubmission (dict, Required): The returned .json
data from the job submission to deadline.
- outputDir (str, Required): The output directory where the metadata
file should be generated. It's assumed that this will also be
final folder containing the output files.
- ext (str, Optional): The extension (including `.`) that is required
in the output filename to be picked up for image sequence
publishing.
- publishJobState (str, Optional): "Active" or "Suspended"
This defaults to "Suspended"
This requires a "startFrame" and "endFrame" to be present in instance.data
or in context.data.
"""
label = "Submit image sequence jobs to Deadline"
order = pyblish.api.IntegratorOrder + 0.1
hosts = ["fusion", "maya"]
families = ["studio.saver.deadline", "studio.renderlayer"]
def process(self, instance):
AVALON_DEADLINE = api.Session.get("AVALON_DEADLINE",
"http://localhost:8082")
assert AVALON_DEADLINE, "Requires AVALON_DEADLINE"
# Get a submission job
job = instance.data.get("deadlineSubmissionJob")
if not job:
raise RuntimeError("Can't continue without valid deadline "
"submission prior to this plug-in.")
data = instance.data.copy()
subset = data["subset"]
state = data.get("publishJobState", "Suspended")
job_name = "{batch} - {subset} [publish image sequence]".format(
batch=job["Props"]["Name"],
subset=subset
)
# Add in start/end frame
context = instance.context
start = instance.data.get("startFrame", context.data["startFrame"])
end = instance.data.get("endFrame", context.data["endFrame"])
resources = []
# Add in regex for sequence filename
# This assumes the output files start with subset name and ends with
# a file extension.
if "ext" in instance.data:
ext = re.escape(instance.data["ext"])
else:
ext = "\.\D+"
regex = "^{subset}.*\d+{ext}$".format(subset=re.escape(subset),
ext=ext)
# Write metadata for publish job
render_job = data.pop("deadlineSubmissionJob")
metadata = {
"regex": regex,
"startFrame": start,
"endFrame": end,
"families": ["studio.imagesequence"],
# Optional metadata (for debugging)
"metadata": {
"instance": data,
"job": job,
"session": api.Session.copy()
}
}
# Ensure output dir exists
output_dir = instance.data["outputDir"]
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
if data.get("extendFrames", False):
family = "studio.imagesequence"
override = data["overrideExistingFrame"]
# override = data.get("overrideExistingFrame", False)
out_file = render_job.get("OutFile")
if not out_file:
raise RuntimeError("OutFile not found in render job!")
extension = os.path.splitext(out_file[0])[1]
_ext = extension[1:]
# Frame comparison
prev_start = None
prev_end = None
resource_range = range(int(start), int(end)+1)
# Gather all the subset files (one subset per render pass!)
subset_names = [data["subset"]]
subset_names.extend(data.get("renderPasses", []))
for subset_name in subset_names:
version = get_latest_version(asset_name=data["asset"],
subset_name=subset_name,
family=family)
# Set prev start / end frames for comparison
if not prev_start and not prev_end:
prev_start = version["data"]["startFrame"]
prev_end = version["data"]["endFrame"]
subset_resources = get_resources(version, _ext)
resource_files = get_resource_files(subset_resources,
resource_range,
override)
resources.extend(resource_files)
updated_start = min(start, prev_start)
updated_end = max(end, prev_end)
# Update metadata and instance start / end frame
self.log.info("Updating start / end frame : "
"{} - {}".format(updated_start, updated_end))
# TODO : Improve logic to get new frame range for the
# publish job (publish_filesequence.py)
# The current approach is not following Pyblish logic which is based
# on Collect / Validate / Extract.
# ---- Collect Plugins ---
# Collect Extend Frames - Only run if extendFrames is toggled
# # # Store in instance:
# # # Previous rendered files per subset based on frames
# # # --> Add to instance.data[resources]
# # # Update publish frame range
# ---- Validate Plugins ---
# Validate Extend Frames
# # # Check if instance has the requirements to extend frames
# There might have been some things which can be added to the list
# Please do so when fixing this.
# Start frame
metadata["startFrame"] = updated_start
metadata["metadata"]["instance"]["startFrame"] = updated_start
# End frame
metadata["endFrame"] = updated_end
metadata["metadata"]["instance"]["endFrame"] = updated_end
metadata_filename = "{}_metadata.json".format(subset)
metadata_path = os.path.join(output_dir, metadata_filename)
with open(metadata_path, "w") as f:
json.dump(metadata, f, indent=4, sort_keys=True)
# Generate the payload for Deadline submission
payload = {
"JobInfo": {
"Plugin": "Python",
"BatchName": job["Props"]["Batch"],
"Name": job_name,
"JobType": "Normal",
"JobDependency0": job["_id"],
"UserName": job["Props"]["User"],
"Comment": instance.context.data.get("comment", ""),
"InitialStatus": state
},
"PluginInfo": {
"Version": "3.6",
"ScriptFile": _get_script(),
"Arguments": '--path "{}"'.format(metadata_path),
"SingleFrameOnly": "True"
},
# Mandatory for Deadline, may be empty
"AuxFiles": []
}
# Transfer the environment from the original job to this dependent
# job so they use the same environment
environment = job["Props"].get("Env", {})
payload["JobInfo"].update({
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
key=key,
value=environment[key]
) for index, key in enumerate(environment)
})
self.log.info("Submitting..")
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
url = "{}/api/jobs".format(AVALON_DEADLINE)
response = requests.post(url, json=payload)
if not response.ok:
raise Exception(response.text)
# Copy files from previous render if extendFrame is True
if data.get("extendFrames", False):
self.log.info("Preparing to copy ..")
import shutil
dest_path = data["outputDir"]
for source in resources:
src_file = os.path.basename(source)
dest = os.path.join(dest_path, src_file)
shutil.copy(source, dest)
self.log.info("Finished copying %i files" % len(resources))

View file

@ -0,0 +1,15 @@
import pyblish.api
class ValidateCurrentSaveFile(pyblish.api.ContextPlugin):
"""File must be saved before publishing"""
label = "Validate File Saved"
order = pyblish.api.ValidatorOrder - 0.1
hosts = ["maya", "houdini"]
def process(self, context):
current_file = context.data["currentFile"]
if not current_file:
raise RuntimeError("File not saved")

View file

@ -0,0 +1,34 @@
import pyblish.api
class ValidateSequenceFrames(pyblish.api.InstancePlugin):
"""Ensure the sequence of frames is complete
The files found in the folder are checked against the startFrame and
endFrame of the instance. If the first or last file is not
corresponding with the first or last frame it is flagged as invalid.
"""
order = pyblish.api.ValidatorOrder
label = "Validate Sequence Frames"
families = ["studio.imagesequence"]
hosts = ["shell"]
def process(self, instance):
collection = instance[0]
self.log.info(collection)
frames = list(collection.indexes)
current_range = (frames[0], frames[-1])
required_range = (instance.data["startFrame"],
instance.data["endFrame"])
if current_range != required_range:
raise ValueError("Invalid frame range: {0} - "
"expected: {1}".format(current_range,
required_range))
missing = collection.holes().indexes
assert not missing, "Missing frames: %s" % (missing,)