Merge remote-tracking branch 'origin/develop' into develop

This commit is contained in:
Milan Kolar 2020-03-23 18:13:44 +01:00
commit 378a3fbb68
11 changed files with 757 additions and 46 deletions

View file

@ -1,6 +1,7 @@
[flake8]
# ignore = D203
ignore = BLK100
ignore = W504
max-line-length = 79
exclude =
.git,

View file

@ -229,6 +229,8 @@ def is_latest(representation):
"""
version = io.find_one({"_id": representation['parent']})
if version["type"] == "master_version":
return True
# Get highest version under the parent
highest_version = io.find_one({

View file

@ -1573,10 +1573,9 @@ class ExporterReviewMov(ExporterReview):
self.nodes = {}
# deal with now lut defined in viewer lut
if hasattr(klass, "viewer_lut_raw"):
self.viewer_lut_raw = klass.viewer_lut_raw
else:
self.viewer_lut_raw = False
self.viewer_lut_raw = klass.viewer_lut_raw
self.bake_colorspace_fallback = klass.bake_colorspace_fallback
self.bake_colorspace_main = klass.bake_colorspace_main
self.name = name or "baked"
self.ext = ext or "mov"
@ -1637,8 +1636,26 @@ class ExporterReviewMov(ExporterReview):
self.log.debug("ViewProcess... `{}`".format(self._temp_nodes))
if not self.viewer_lut_raw:
# OCIODisplay node
dag_node = nuke.createNode("OCIODisplay")
colorspaces = [
self.bake_colorspace_main, self.bake_colorspace_fallback
]
if any(colorspaces):
# OCIOColorSpace with controled output
dag_node = nuke.createNode("OCIOColorSpace")
for c in colorspaces:
test = dag_node["out_colorspace"].setValue(str(c))
if test:
self.log.info(
"Baking in colorspace... `{}`".format(c))
break
if not test:
dag_node = nuke.createNode("OCIODisplay")
else:
# OCIODisplay
dag_node = nuke.createNode("OCIODisplay")
# connect
dag_node.setInput(0, self.previous_node)
self._temp_nodes.append(dag_node)

View file

@ -0,0 +1,604 @@
import os
import copy
import clique
import errno
import shutil
from pymongo import InsertOne, ReplaceOne
import pyblish.api
from avalon import api, io, schema
from avalon.vendor import filelink
class IntegrateMasterVersion(pyblish.api.InstancePlugin):
label = "Integrate Master Version"
# Must happen after IntegrateNew
order = pyblish.api.IntegratorOrder + 0.1
optional = True
families = [
"model",
"rig",
"setdress",
"look",
"pointcache",
"animation"
]
# Can specify representation names that will be ignored (lower case)
ignored_representation_names = []
db_representation_context_keys = [
"project", "asset", "task", "subset", "representation",
"family", "hierarchy", "task", "username"
]
# TODO add family filtering
# QUESTION/TODO this process should happen on server if crashed due to
# permissions error on files (files were used or user didn't have perms)
# *but all other plugins must be sucessfully completed
def process(self, instance):
self.log.debug(
"--- Integration of Master version for subset `{}` begins.".format(
instance.data.get("subset", str(instance))
)
)
published_repres = instance.data.get("published_representations")
if not published_repres:
self.log.debug(
"*** There are not published representations on the instance."
)
return
project_name = api.Session["AVALON_PROJECT"]
# TODO raise error if master not set?
anatomy = instance.context.data["anatomy"]
if "master" not in anatomy.templates:
self.log.warning("!!! Anatomy does not have set `master` key!")
return
if "path" not in anatomy.templates["master"]:
self.log.warning((
"!!! There is not set `path` template in `master` anatomy"
" for project \"{}\"."
).format(project_name))
return
master_template = anatomy.templates["master"]["path"]
self.log.debug("`Master` template check was successful. `{}`".format(
master_template
))
master_publish_dir = self.get_publish_dir(instance)
src_version_entity = instance.data.get("versionEntity")
filtered_repre_ids = []
for repre_id, repre_info in published_repres.items():
repre = repre_info["representation"]
if repre["name"].lower() in self.ignored_representation_names:
self.log.debug(
"Filtering representation with name: `{}`".format(
repre["name"].lower()
)
)
filtered_repre_ids.append(repre_id)
for repre_id in filtered_repre_ids:
published_repres.pop(repre_id, None)
if not published_repres:
self.log.debug(
"*** All published representations were filtered by name."
)
return
if src_version_entity is None:
self.log.debug((
"Published version entity was not sent in representation data."
" Querying entity from database."
))
src_version_entity = (
self.version_from_representations(published_repres)
)
if not src_version_entity:
self.log.warning((
"!!! Can't find origin version in database."
" Skipping Master version publish."
))
return
all_copied_files = []
transfers = instance.data.get("transfers", list())
for dst in transfers.values():
dst = os.path.normpath(dst)
if dst not in all_copied_files:
all_copied_files.append(dst)
hardlinks = instance.data.get("hardlinks", list())
for dst in hardlinks.values():
dst = os.path.normpath(dst)
if dst not in all_copied_files:
all_copied_files.append(dst)
all_repre_file_paths = []
for repre_info in published_repres.values():
published_files = repre_info.get("published_files") or []
for file_path in published_files:
file_path = os.path.normpath(file_path)
if file_path not in all_repre_file_paths:
all_repre_file_paths.append(file_path)
# TODO this is not best practice of getting resources for publish
# WARNING due to this we must remove all files from master publish dir
instance_publish_dir = os.path.normpath(
instance.data["publishDir"]
)
other_file_paths_mapping = []
for file_path in all_copied_files:
# Check if it is from publishDir
if not file_path.startswith(instance_publish_dir):
continue
if file_path in all_repre_file_paths:
continue
dst_filepath = file_path.replace(
instance_publish_dir, master_publish_dir
)
other_file_paths_mapping.append((file_path, dst_filepath))
# Current version
old_version, old_repres = (
self.current_master_ents(src_version_entity)
)
old_repres_by_name = {
repre["name"].lower(): repre for repre in old_repres
}
if old_version:
new_version_id = old_version["_id"]
else:
new_version_id = io.ObjectId()
new_master_version = {
"_id": new_version_id,
"version_id": src_version_entity["_id"],
"parent": src_version_entity["parent"],
"type": "master_version",
"schema": "pype:master_version-1.0"
}
schema.validate(new_master_version)
# Don't make changes in database until everything is O.K.
bulk_writes = []
if old_version:
self.log.debug("Replacing old master version.")
bulk_writes.append(
ReplaceOne(
{"_id": new_master_version["_id"]},
new_master_version
)
)
else:
self.log.debug("Creating first master version.")
bulk_writes.append(
InsertOne(new_master_version)
)
# Separate old representations into `to replace` and `to delete`
old_repres_to_replace = {}
old_repres_to_delete = {}
for repre_info in published_repres.values():
repre = repre_info["representation"]
repre_name_low = repre["name"].lower()
if repre_name_low in old_repres_by_name:
old_repres_to_replace[repre_name_low] = (
old_repres_by_name.pop(repre_name_low)
)
if old_repres_by_name:
old_repres_to_delete = old_repres_by_name
archived_repres = list(io.find({
# Check what is type of archived representation
"type": "archived_repsentation",
"parent": new_version_id
}))
archived_repres_by_name = {}
for repre in archived_repres:
repre_name_low = repre["name"].lower()
archived_repres_by_name[repre_name_low] = repre
backup_master_publish_dir = None
if os.path.exists(master_publish_dir):
backup_master_publish_dir = master_publish_dir + ".BACKUP"
max_idx = 10
idx = 0
_backup_master_publish_dir = backup_master_publish_dir
while os.path.exists(_backup_master_publish_dir):
self.log.debug((
"Backup folder already exists."
" Trying to remove \"{}\""
).format(_backup_master_publish_dir))
try:
shutil.rmtree(_backup_master_publish_dir)
backup_master_publish_dir = _backup_master_publish_dir
break
except Exception:
self.log.info((
"Could not remove previous backup folder."
" Trying to add index to folder name"
))
_backup_master_publish_dir = (
backup_master_publish_dir + str(idx)
)
if not os.path.exists(_backup_master_publish_dir):
backup_master_publish_dir = _backup_master_publish_dir
break
if idx > max_idx:
raise AssertionError((
"Backup folders are fully occupied to max index \"{}\""
).format(max_idx))
break
idx += 1
self.log.debug("Backup folder path is \"{}\"".format(
backup_master_publish_dir
))
try:
os.rename(master_publish_dir, backup_master_publish_dir)
except PermissionError:
raise AssertionError((
"Could not create master version because it is not"
" possible to replace current master files."
))
try:
src_to_dst_file_paths = []
for repre_info in published_repres.values():
# Skip if new repre does not have published repre files
published_files = repre_info["published_files"]
if len(published_files) == 0:
continue
# Prepare anatomy data
anatomy_data = repre_info["anatomy_data"]
anatomy_data.pop("version", None)
# Get filled path to repre context
anatomy_filled = anatomy.format(anatomy_data)
template_filled = anatomy_filled["master"]["path"]
repre_data = {
"path": str(template_filled),
"template": master_template
}
repre_context = template_filled.used_values
for key in self.db_representation_context_keys:
if (
key in repre_context or
key not in anatomy_data
):
continue
repre_context[key] = anatomy_data[key]
# Prepare new repre
repre = copy.deepcopy(repre_info["representation"])
repre["parent"] = new_master_version["_id"]
repre["context"] = repre_context
repre["data"] = repre_data
repre.pop("_id", None)
schema.validate(repre)
repre_name_low = repre["name"].lower()
# Replace current representation
if repre_name_low in old_repres_to_replace:
old_repre = old_repres_to_replace.pop(repre_name_low)
repre["_id"] = old_repre["_id"]
bulk_writes.append(
ReplaceOne(
{"_id": old_repre["_id"]},
repre
)
)
# Unarchive representation
elif repre_name_low in archived_repres_by_name:
archived_repre = archived_repres_by_name.pop(
repre_name_low
)
old_id = archived_repre["old_id"]
repre["_id"] = old_id
bulk_writes.append(
ReplaceOne(
{"old_id": old_id},
repre
)
)
# Create representation
else:
repre["_id"] = io.ObjectId()
bulk_writes.append(
InsertOne(repre)
)
# Prepare paths of source and destination files
if len(published_files) == 1:
src_to_dst_file_paths.append(
(published_files[0], template_filled)
)
continue
collections, remainders = clique.assemble(published_files)
if remainders or not collections or len(collections) > 1:
raise Exception((
"Integrity error. Files of published representation "
"is combination of frame collections and single files."
"Collections: `{}` Single files: `{}`"
).format(str(collections), str(remainders)))
src_col = collections[0]
# Get head and tail for collection
frame_splitter = "_-_FRAME_SPLIT_-_"
anatomy_data["frame"] = frame_splitter
_anatomy_filled = anatomy.format(anatomy_data)
_template_filled = _anatomy_filled["master"]["path"]
head, tail = _template_filled.split(frame_splitter)
padding = (
anatomy.templates["render"]["padding"]
)
dst_col = clique.Collection(
head=head, padding=padding, tail=tail
)
dst_col.indexes.clear()
dst_col.indexes.update(src_col.indexes)
for src_file, dst_file in zip(src_col, dst_col):
src_to_dst_file_paths.append(
(src_file, dst_file)
)
self.path_checks = []
# Copy(hardlink) paths of source and destination files
# TODO should we *only* create hardlinks?
# TODO should we keep files for deletion until this is successful?
for src_path, dst_path in src_to_dst_file_paths:
self.copy_file(src_path, dst_path)
for src_path, dst_path in other_file_paths_mapping:
self.copy_file(src_path, dst_path)
# Archive not replaced old representations
for repre_name_low, repre in old_repres_to_delete.items():
# Replace archived representation (This is backup)
# - should not happen to have both repre and archived repre
if repre_name_low in archived_repres_by_name:
archived_repre = archived_repres_by_name.pop(
repre_name_low
)
repre["old_id"] = repre["_id"]
repre["_id"] = archived_repre["_id"]
repre["type"] = archived_repre["type"]
bulk_writes.append(
ReplaceOne(
{"_id": archived_repre["_id"]},
repre
)
)
else:
repre["old_id"] = repre["_id"]
repre["_id"] = io.ObjectId()
repre["type"] = "archived_representation"
bulk_writes.append(
InsertOne(repre)
)
if bulk_writes:
io._database[io.Session["AVALON_PROJECT"]].bulk_write(
bulk_writes
)
# Remove backuped previous master
if (
backup_master_publish_dir is not None and
os.path.exists(backup_master_publish_dir)
):
shutil.rmtree(backup_master_publish_dir)
except Exception:
if (
backup_master_publish_dir is not None and
os.path.exists(backup_master_publish_dir)
):
os.rename(backup_master_publish_dir, master_publish_dir)
self.log.error((
"!!! Creating of Master version failed."
" Previous master version maybe lost some data!"
))
raise
self.log.debug((
"--- Master version integration for subset `{}`"
" seems to be successful."
).format(
instance.data.get("subset", str(instance))
))
def get_all_files_from_path(self, path):
files = []
for (dir_path, dir_names, file_names) in os.walk(path):
for file_name in file_names:
_path = os.path.join(dir_path, file_name)
files.append(_path)
return files
def get_publish_dir(self, instance):
anatomy = instance.context.data["anatomy"]
template_data = copy.deepcopy(instance.data["anatomyData"])
if "folder" in anatomy.templates["master"]:
anatomy_filled = anatomy.format(template_data)
publish_folder = anatomy_filled["master"]["folder"]
else:
# This is for cases of Deprecated anatomy without `folder`
# TODO remove when all clients have solved this issue
template_data.update({
"frame": "FRAME_TEMP",
"representation": "TEMP"
})
anatomy_filled = anatomy.format(template_data)
# solve deprecated situation when `folder` key is not underneath
# `publish` anatomy
project_name = api.Session["AVALON_PROJECT"]
self.log.warning((
"Deprecation warning: Anatomy does not have set `folder`"
" key underneath `publish` (in global of for project `{}`)."
).format(project_name))
file_path = anatomy_filled["master"]["path"]
# Directory
publish_folder = os.path.dirname(file_path)
publish_folder = os.path.normpath(publish_folder)
self.log.debug("Master publish dir: \"{}\"".format(publish_folder))
return publish_folder
def copy_file(self, src_path, dst_path):
# TODO check drives if are the same to check if cas hardlink
dst_path = self.path_root_check(dst_path)
src_path = self.path_root_check(src_path)
dirname = os.path.dirname(dst_path)
try:
os.makedirs(dirname)
self.log.debug("Folder(s) created: \"{}\"".format(dirname))
except OSError as exc:
if exc.errno != errno.EEXIST:
self.log.error("An unexpected error occurred.", exc_info=True)
raise
self.log.debug("Folder already exists: \"{}\"".format(dirname))
self.log.debug("Copying file \"{}\" to \"{}\"".format(
src_path, dst_path
))
# First try hardlink and copy if paths are cross drive
try:
filelink.create(src_path, dst_path, filelink.HARDLINK)
# Return when successful
return
except OSError as exc:
# re-raise exception if different than cross drive path
if exc.errno != errno.EXDEV:
raise
shutil.copy(src_path, dst_path)
def path_root_check(self, path):
normalized_path = os.path.normpath(path)
forward_slash_path = normalized_path.replace("\\", "/")
drive, _path = os.path.splitdrive(normalized_path)
if os.path.exists(drive + "/"):
key = "drive_check{}".format(drive)
if key not in self.path_checks:
self.log.debug(
"Drive \"{}\" exist. Nothing to change.".format(drive)
)
self.path_checks.append(key)
return normalized_path
path_env_key = "PYPE_STUDIO_PROJECTS_PATH"
mount_env_key = "PYPE_STUDIO_PROJECTS_MOUNT"
missing_envs = []
if path_env_key not in os.environ:
missing_envs.append(path_env_key)
if mount_env_key not in os.environ:
missing_envs.append(mount_env_key)
if missing_envs:
key = "missing_envs"
if key not in self.path_checks:
self.path_checks.append(key)
_add_s = ""
if len(missing_envs) > 1:
_add_s = "s"
self.log.warning((
"Can't replace MOUNT drive path to UNC path due to missing"
" environment variable{}: `{}`. This may cause issues"
" during publishing process."
).format(_add_s, ", ".join(missing_envs)))
return normalized_path
unc_root = os.environ[path_env_key].replace("\\", "/")
mount_root = os.environ[mount_env_key].replace("\\", "/")
# --- Remove slashes at the end of mount and unc roots ---
while unc_root.endswith("/"):
unc_root = unc_root[:-1]
while mount_root.endswith("/"):
mount_root = mount_root[:-1]
# ---
if forward_slash_path.startswith(unc_root):
self.log.debug((
"Path already starts with UNC root: \"{}\""
).format(unc_root))
return normalized_path
if not forward_slash_path.startswith(mount_root):
self.log.warning((
"Path do not start with MOUNT root \"{}\" "
"set in environment variable \"{}\""
).format(unc_root, mount_env_key))
return normalized_path
# Replace Mount root with Unc root
path = unc_root + forward_slash_path[len(mount_root):]
return os.path.normpath(path)
def version_from_representations(self, repres):
for repre in repres:
version = io.find_one({"_id": repre["parent"]})
if version:
return version
def current_master_ents(self, version):
master_version = io.find_one({
"parent": version["parent"],
"type": "master_version"
})
if not master_version:
return (None, [])
master_repres = list(io.find({
"parent": master_version["_id"],
"type": "representation"
}))
return (master_version, master_repres)

View file

@ -162,6 +162,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
)
subset = self.get_subset(asset_entity, instance)
instance.data["subsetEntity"] = subset
version_number = instance.data["version"]
self.log.debug("Next version: v{}".format(version_number))
@ -236,6 +237,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
bulk_writes
)
version = io.find_one({"_id": version_id})
instance.data["versionEntity"] = version
existing_repres = list(io.find({
"parent": version_id,
"type": "archived_representation"
@ -260,11 +264,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
if 'transfers' not in instance.data:
instance.data['transfers'] = []
published_representations = {}
for idx, repre in enumerate(instance.data["representations"]):
published_files = []
# create template data for Anatomy
template_data = copy.deepcopy(anatomy_data)
if intent is not None:
template_data["intent"] = intent
if intent_value is not None:
template_data["intent"] = intent_value
resolution_width = repre.get("resolutionWidth")
resolution_height = repre.get("resolutionHeight")
@ -368,6 +375,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("source: {}".format(src))
instance.data["transfers"].append([src, dst])
published_files.append(dst)
# for adding first frame into db
if not dst_start_frame:
dst_start_frame = dst_padding
@ -375,7 +384,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
dst = "{0}{1}{2}".format(
dst_head,
dst_start_frame,
dst_tail).replace("..", ".")
dst_tail
).replace("..", ".")
repre['published_path'] = self.unc_convert(dst)
else:
@ -403,9 +413,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
instance.data["transfers"].append([src, dst])
published_files.append(dst)
repre['published_path'] = self.unc_convert(dst)
self.log.debug("__ dst: {}".format(dst))
repre["publishedFiles"] = published_files
for key in self.db_representation_context_keys:
value = template_data.get(key)
if not value:
@ -452,6 +465,11 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("__ destination_list: {}".format(destination_list))
instance.data['destination_list'] = destination_list
representations.append(representation)
published_representations[repre_id] = {
"representation": representation,
"anatomy_data": template_data,
"published_files": published_files
}
self.log.debug("__ representations: {}".format(representations))
# Remove old representations if there are any (before insertion of new)
@ -466,7 +484,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("__ represNAME: {}".format(rep['name']))
self.log.debug("__ represPATH: {}".format(rep['published_path']))
io.insert_many(representations)
instance.data["published_representations"] = representations
instance.data["published_representations"] = (
published_representations
)
# self.log.debug("Representation: {}".format(representations))
self.log.info("Registered {} items".format(len(representations)))

View file

@ -18,17 +18,23 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin):
order = pyblish.api.IntegratorOrder + 0.01
families = ["review"]
required_context_keys = [
"project", "asset", "task", "subset", "version"
]
def process(self, instance):
if not os.environ.get("AVALON_THUMBNAIL_ROOT"):
self.log.info("AVALON_THUMBNAIL_ROOT is not set."
" Skipping thumbnail integration.")
self.log.warning(
"AVALON_THUMBNAIL_ROOT is not set."
" Skipping thumbnail integration."
)
return
published_repres = instance.data.get("published_representations")
if not published_repres:
self.log.debug(
"There are not published representation ids on the instance."
"There are no published representations on the instance."
)
return
@ -36,21 +42,22 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin):
anatomy = instance.context.data["anatomy"]
if "publish" not in anatomy.templates:
raise AssertionError("Anatomy does not have set publish key!")
self.log.warning("Anatomy is missing the \"publish\" key!")
return
if "thumbnail" not in anatomy.templates["publish"]:
raise AssertionError((
"There is not set \"thumbnail\" template for project \"{}\""
self.log.warning((
"There is no \"thumbnail\" template set for the project \"{}\""
).format(project_name))
thumbnail_template = anatomy.templates["publish"]["thumbnail"]
io.install()
return
thumb_repre = None
for repre in published_repres:
thumb_repre_anatomy_data = None
for repre_info in published_repres.values():
repre = repre_info["representation"]
if repre["name"].lower() == "thumbnail":
thumb_repre = repre
thumb_repre_anatomy_data = repre_info["anatomy_data"]
break
if not thumb_repre:
@ -59,6 +66,10 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin):
)
return
io.install()
thumbnail_template = anatomy.templates["publish"]["thumbnail"]
version = io.find_one({"_id": thumb_repre["parent"]})
if not version:
raise AssertionError(
@ -80,7 +91,7 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin):
thumbnail_id = ObjectId()
# Prepare anatomy template fill data
template_data = copy.deepcopy(thumb_repre["context"])
template_data = copy.deepcopy(thumb_repre_anatomy_data)
template_data.update({
"_id": str(thumbnail_id),
"thumbnail_root": os.environ.get("AVALON_THUMBNAIL_ROOT"),
@ -89,15 +100,9 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin):
})
anatomy_filled = anatomy.format(template_data)
final_path = anatomy_filled.get("publish", {}).get("thumbnail")
if not final_path:
raise AssertionError((
"Anatomy template was not filled with entered data"
"\nTemplate: {} "
"\nData: {}"
).format(thumbnail_template, str(template_data)))
template_filled = anatomy_filled["publish"]["thumbnail"]
dst_full_path = os.path.normpath(final_path)
dst_full_path = os.path.normpath(str(template_filled))
self.log.debug(
"Copying file .. {} -> {}".format(src_full_path, dst_full_path)
)
@ -115,13 +120,20 @@ class IntegrateThumbnails(pyblish.api.InstancePlugin):
template_data.pop("_id")
template_data.pop("thumbnail_root")
repre_context = template_filled.used_values
for key in self.required_context_keys:
value = template_data.get(key)
if not value:
continue
repre_context[key] = template_data[key]
thumbnail_entity = {
"_id": thumbnail_id,
"type": "thumbnail",
"schema": "pype:thumbnail-1.0",
"data": {
"template": thumbnail_template,
"template_data": template_data
"template_data": repre_context
}
}
# Create thumbnail entity

View file

@ -170,7 +170,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"review": ["lutPath"],
"render.farm": ["bakeScriptPath", "bakeRenderPath",
"bakeWriteNodeName", "version"]
}
}
# list of family names to transfer to new family if present
families_transfer = ["render3d", "render2d", "ftrack", "slate"]
@ -276,7 +276,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# if override remove all frames we are expecting to be rendered
# so we'll copy only those missing from current render
if instance.data.get("overrideExistingFrame"):
for frame in range(start, end+1):
for frame in range(start, end + 1):
if frame not in r_col.indexes:
continue
r_col.indexes.remove(frame)
@ -348,10 +348,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
assert len(cols) == 1, "only one image sequence type is expected"
# create subset name `familyTaskSubset_AOV`
subset_name = 'render{}{}{}{}_{}'.format(
group_name = 'render{}{}{}{}'.format(
task[0].upper(), task[1:],
subset[0].upper(), subset[1:],
aov)
subset[0].upper(), subset[1:])
subset_name = '{}_{}'.format(group_name, aov)
staging = os.path.dirname(list(cols[0])[0])
@ -366,6 +367,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
new_instance = copy(instance_data)
new_instance["subset"] = subset_name
new_instance["subsetGroup"] = group_name
ext = cols[0].tail.lstrip(".")

View file

@ -211,19 +211,23 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
"attachTo": attachTo,
"setMembers": layer_name,
"publish": True,
"frameStart": int(context.data["assetEntity"]['data']['frameStart']),
"frameEnd": int(context.data["assetEntity"]['data']['frameEnd']),
"frameStartHandle": int(self.get_render_attribute("startFrame",
layer=layer_name)),
"frameEndHandle": int(self.get_render_attribute("endFrame",
layer=layer_name)),
"frameStart": int(
context.data["assetEntity"]['data']['frameStart']),
"frameEnd": int(
context.data["assetEntity"]['data']['frameEnd']),
"frameStartHandle": int(
self.get_render_attribute("startFrame", layer=layer_name)),
"frameEndHandle": int(
self.get_render_attribute("endFrame", layer=layer_name)),
"byFrameStep": int(
self.get_render_attribute("byFrameStep",
layer=layer_name)),
"renderer": self.get_render_attribute("currentRenderer",
layer=layer_name),
"handleStart": int(context.data["assetEntity"]['data']['handleStart']),
"handleEnd": int(context.data["assetEntity"]['data']['handleEnd']),
"handleStart": int(
context.data["assetEntity"]['data']['handleStart']),
"handleEnd": int(
context.data["assetEntity"]['data']['handleEnd']),
# instance subset
"family": "renderlayer",

View file

@ -3,7 +3,7 @@ import pyblish.api
from avalon.nuke import lib as anlib
from pype.nuke import lib as pnlib
import pype
reload(pnlib)
class ExtractReviewDataMov(pype.api.Extractor):
"""Extracts movie and thumbnail with baked in luts
@ -18,6 +18,11 @@ class ExtractReviewDataMov(pype.api.Extractor):
families = ["review", "render", "render.local"]
hosts = ["nuke"]
# presets
viewer_lut_raw = None
bake_colorspace_fallback = None
bake_colorspace_main = None
def process(self, instance):
families = instance.data["families"]
self.log.info("Creating staging dir...")

View file

@ -296,7 +296,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins):
args=args,
overwrite=overwrite
)
print(command)
# print(command)
proc = subprocess.Popen(command, shell=True)
proc.communicate()

View file

@ -0,0 +1,44 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "pype:master_version-1.0",
"description": "Master version of asset",
"type": "object",
"additionalProperties": true,
"required": [
"version_id",
"schema",
"type",
"parent"
],
"properties": {
"_id": {
"description": "Document's id (database will create it's if not entered)",
"example": "ObjectId(592c33475f8c1b064c4d1696)"
},
"version_id": {
"description": "The version ID from which it was created",
"example": "ObjectId(592c33475f8c1b064c4d1695)"
},
"schema": {
"description": "The schema associated with this document",
"type": "string",
"enum": ["avalon-core:master_version-1.0", "pype:master_version-1.0"],
"example": "pype:master_version-1.0"
},
"type": {
"description": "The type of document",
"type": "string",
"enum": ["master_version"],
"example": "master_version"
},
"parent": {
"description": "Unique identifier to parent document",
"example": "ObjectId(592c33475f8c1b064c4d1697)"
}
}
}