mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-02 00:44:52 +01:00
Merge branch 'develop' into feature/PYPE-654-nks-cut-reference-videos
This commit is contained in:
commit
eb28198319
49 changed files with 1493 additions and 1524 deletions
|
|
@ -68,6 +68,9 @@ class FusionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
# Top-level group name
|
||||
"BatchName": filename,
|
||||
|
||||
# Asset dependency to wait for at least the scene file to sync.
|
||||
"AssetDependency0": filepath,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": filename,
|
||||
|
||||
|
|
|
|||
|
|
@ -1,13 +1,14 @@
|
|||
"""Collect Anatomy and global anatomy data.
|
||||
"""Collect global context Anatomy data.
|
||||
|
||||
Requires:
|
||||
context -> anatomy
|
||||
context -> projectEntity
|
||||
context -> assetEntity
|
||||
context -> username
|
||||
context -> datetimeData
|
||||
session -> AVALON_TASK
|
||||
projectEntity, assetEntity -> collect_avalon_entities *(pyblish.api.CollectorOrder)
|
||||
username -> collect_pype_user *(pyblish.api.CollectorOrder + 0.001)
|
||||
datetimeData -> collect_datetime_data *(pyblish.api.CollectorOrder)
|
||||
|
||||
Provides:
|
||||
context -> anatomy (pypeapp.Anatomy)
|
||||
context -> anatomyData
|
||||
"""
|
||||
|
||||
|
|
@ -15,45 +16,51 @@ import os
|
|||
import json
|
||||
|
||||
from avalon import api, lib
|
||||
from pypeapp import Anatomy
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectAnatomy(pyblish.api.ContextPlugin):
|
||||
"""Collect Anatomy into Context"""
|
||||
class CollectAnatomyContextData(pyblish.api.ContextPlugin):
|
||||
"""Collect Anatomy Context data.
|
||||
|
||||
Example:
|
||||
context.data["anatomyData"] = {
|
||||
"project": {
|
||||
"name": "MyProject",
|
||||
"code": "myproj"
|
||||
},
|
||||
"asset": "AssetName",
|
||||
"hierarchy": "path/to/asset",
|
||||
"task": "Working",
|
||||
"username": "MeDespicable",
|
||||
|
||||
*** OPTIONAL ***
|
||||
"app": "maya" # Current application base name
|
||||
+ mutliple keys from `datetimeData` # see it's collector
|
||||
}
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.002
|
||||
label = "Collect Anatomy"
|
||||
label = "Collect Anatomy Context Data"
|
||||
|
||||
def process(self, context):
|
||||
root_path = api.registered_root()
|
||||
task_name = api.Session["AVALON_TASK"]
|
||||
|
||||
project_entity = context.data["projectEntity"]
|
||||
asset_entity = context.data["assetEntity"]
|
||||
|
||||
project_name = project_entity["name"]
|
||||
|
||||
context.data["anatomy"] = Anatomy(project_name)
|
||||
self.log.info(
|
||||
"Anatomy object collected for project \"{}\".".format(project_name)
|
||||
)
|
||||
|
||||
hierarchy_items = asset_entity["data"]["parents"]
|
||||
hierarchy = ""
|
||||
if hierarchy_items:
|
||||
hierarchy = os.path.join(*hierarchy_items)
|
||||
|
||||
context_data = {
|
||||
"root": root_path,
|
||||
"project": {
|
||||
"name": project_name,
|
||||
"name": project_entity["name"],
|
||||
"code": project_entity["data"].get("code")
|
||||
},
|
||||
"asset": asset_entity["name"],
|
||||
"hierarchy": hierarchy.replace("\\", "/"),
|
||||
"task": task_name,
|
||||
|
||||
"username": context.data["user"]
|
||||
}
|
||||
|
||||
|
|
@ -28,11 +28,11 @@ from avalon import io
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectInstanceAnatomyData(pyblish.api.InstancePlugin):
|
||||
"""Fill templates with data needed for publish"""
|
||||
class CollectAnatomyInstanceData(pyblish.api.InstancePlugin):
|
||||
"""Collect Instance specific Anatomy data."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.49
|
||||
label = "Collect instance anatomy data"
|
||||
label = "Collect Anatomy Instance data"
|
||||
|
||||
def process(self, instance):
|
||||
# get all the stuff from the database
|
||||
32
pype/plugins/global/publish/collect_anatomy_object.py
Normal file
32
pype/plugins/global/publish/collect_anatomy_object.py
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
"""Collect Anatomy object.
|
||||
|
||||
Requires:
|
||||
os.environ -> AVALON_PROJECT
|
||||
|
||||
Provides:
|
||||
context -> anatomy (pypeapp.Anatomy)
|
||||
"""
|
||||
import os
|
||||
from pypeapp import Anatomy
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectAnatomyObject(pyblish.api.ContextPlugin):
|
||||
"""Collect Anatomy object into Context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.4
|
||||
label = "Collect Anatomy Object"
|
||||
|
||||
def process(self, context):
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
if project_name is None:
|
||||
raise AssertionError(
|
||||
"Environment `AVALON_PROJECT` is not set."
|
||||
"Could not initialize project's Anatomy."
|
||||
)
|
||||
|
||||
context.data["anatomy"] = Anatomy(project_name)
|
||||
|
||||
self.log.info(
|
||||
"Anatomy object collected for project \"{}\".".format(project_name)
|
||||
)
|
||||
|
|
@ -15,7 +15,7 @@ import pyblish.api
|
|||
class CollectAvalonEntities(pyblish.api.ContextPlugin):
|
||||
"""Collect Anatomy into Context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.02
|
||||
order = pyblish.api.CollectorOrder - 0.1
|
||||
label = "Collect Avalon Entities"
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
|
|
@ -1,11 +1,18 @@
|
|||
"""Loads publishing context from json and continues in publish process.
|
||||
|
||||
Requires:
|
||||
anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11)
|
||||
|
||||
Provides:
|
||||
context, instances -> All data from previous publishing process.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
import pyblish.api
|
||||
from avalon import api
|
||||
|
||||
from pypeapp import PypeLauncher
|
||||
|
||||
|
||||
class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
|
|
@ -13,14 +20,17 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
`PYPE_PUBLISH_DATA`. Those files _MUST_ share same context.
|
||||
|
||||
"""
|
||||
order = pyblish.api.CollectorOrder - 0.1
|
||||
order = pyblish.api.CollectorOrder - 0.2
|
||||
targets = ["filesequence"]
|
||||
label = "Collect rendered frames"
|
||||
|
||||
_context = None
|
||||
|
||||
def _load_json(self, path):
|
||||
assert os.path.isfile(path), ("path to json file doesn't exist")
|
||||
path = path.strip('\"')
|
||||
assert os.path.isfile(path), (
|
||||
"Path to json file doesn't exist. \"{}\"".format(path)
|
||||
)
|
||||
data = None
|
||||
with open(path, "r") as json_file:
|
||||
try:
|
||||
|
|
@ -32,7 +42,12 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
)
|
||||
return data
|
||||
|
||||
def _process_path(self, data):
|
||||
def _fill_staging_dir(self, data_object, anatomy):
|
||||
staging_dir = data_object.get("stagingDir")
|
||||
if staging_dir:
|
||||
data_object["stagingDir"] = anatomy.fill_root(staging_dir)
|
||||
|
||||
def _process_path(self, data, anatomy):
|
||||
# validate basic necessary data
|
||||
data_err = "invalid json file - missing data"
|
||||
required = ["asset", "user", "comment",
|
||||
|
|
@ -66,14 +81,23 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
os.environ["FTRACK_SERVER"] = ftrack["FTRACK_SERVER"]
|
||||
|
||||
# now we can just add instances from json file and we are done
|
||||
for instance in data.get("instances"):
|
||||
for instance_data in data.get("instances"):
|
||||
self.log.info(" - processing instance for {}".format(
|
||||
instance.get("subset")))
|
||||
i = self._context.create_instance(instance.get("subset"))
|
||||
self.log.info("remapping paths ...")
|
||||
i.data["representations"] = [PypeLauncher().path_remapper(
|
||||
data=r) for r in instance.get("representations")]
|
||||
i.data.update(instance)
|
||||
instance_data.get("subset")))
|
||||
instance = self._context.create_instance(
|
||||
instance_data.get("subset")
|
||||
)
|
||||
self.log.info("Filling stagignDir...")
|
||||
|
||||
self._fill_staging_dir(instance_data, anatomy)
|
||||
instance.data.update(instance_data)
|
||||
|
||||
representations = []
|
||||
for repre_data in instance_data.get("representations") or []:
|
||||
self._fill_staging_dir(repre_data, anatomy)
|
||||
representations.append(repre_data)
|
||||
|
||||
instance.data["representations"] = representations
|
||||
|
||||
def process(self, context):
|
||||
self._context = context
|
||||
|
|
@ -82,13 +106,39 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
"Missing `PYPE_PUBLISH_DATA`")
|
||||
paths = os.environ["PYPE_PUBLISH_DATA"].split(os.pathsep)
|
||||
|
||||
session_set = False
|
||||
for path in paths:
|
||||
data = self._load_json(path)
|
||||
if not session_set:
|
||||
self.log.info("Setting session using data from file")
|
||||
api.Session.update(data.get("session"))
|
||||
os.environ.update(data.get("session"))
|
||||
session_set = True
|
||||
assert data, "failed to load json file"
|
||||
self._process_path(data)
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
if project_name is None:
|
||||
raise AssertionError(
|
||||
"Environment `AVALON_PROJECT` was not found."
|
||||
"Could not set project `root` which may cause issues."
|
||||
)
|
||||
|
||||
# TODO root filling should happen after collect Anatomy
|
||||
self.log.info("Getting root setting for project \"{}\"".format(
|
||||
project_name
|
||||
))
|
||||
|
||||
anatomy = context.data["anatomy"]
|
||||
self.log.info("anatomy: {}".format(anatomy.roots))
|
||||
try:
|
||||
session_is_set = False
|
||||
for path in paths:
|
||||
path = anatomy.fill_root(path)
|
||||
data = self._load_json(path)
|
||||
assert data, "failed to load json file"
|
||||
if not session_is_set:
|
||||
session_data = data["session"]
|
||||
remapped = anatomy.roots_obj.path_remapper(
|
||||
session_data["AVALON_WORKDIR"]
|
||||
)
|
||||
if remapped:
|
||||
session_data["AVALON_WORKDIR"] = remapped
|
||||
|
||||
self.log.info("Setting session using data from file")
|
||||
api.Session.update(session_data)
|
||||
os.environ.update(session_data)
|
||||
session_is_set = True
|
||||
self._process_path(data, anatomy)
|
||||
except Exception as e:
|
||||
self.log.error(e, exc_info=True)
|
||||
raise Exception("Error") from e
|
||||
|
|
|
|||
|
|
@ -193,7 +193,6 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
self.log.debug("Output: {}".format(output))
|
||||
|
||||
repre_update = {
|
||||
"anatomy_template": repre.get("anatomy_template", "render"),
|
||||
"files": movieFileBurnin,
|
||||
"name": repre["name"],
|
||||
"tags": [x for x in repre["tags"] if x != "delete"]
|
||||
|
|
|
|||
|
|
@ -481,9 +481,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
|
|||
|
||||
def copy_file(self, src_path, dst_path):
|
||||
# TODO check drives if are the same to check if cas hardlink
|
||||
dst_path = self.path_root_check(dst_path)
|
||||
src_path = self.path_root_check(src_path)
|
||||
|
||||
dirname = os.path.dirname(dst_path)
|
||||
|
||||
try:
|
||||
|
|
@ -513,75 +510,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
|
|||
|
||||
shutil.copy(src_path, dst_path)
|
||||
|
||||
def path_root_check(self, path):
|
||||
normalized_path = os.path.normpath(path)
|
||||
forward_slash_path = normalized_path.replace("\\", "/")
|
||||
|
||||
drive, _path = os.path.splitdrive(normalized_path)
|
||||
if os.path.exists(drive + "/"):
|
||||
key = "drive_check{}".format(drive)
|
||||
if key not in self.path_checks:
|
||||
self.log.debug(
|
||||
"Drive \"{}\" exist. Nothing to change.".format(drive)
|
||||
)
|
||||
self.path_checks.append(key)
|
||||
|
||||
return normalized_path
|
||||
|
||||
path_env_key = "PYPE_STUDIO_PROJECTS_PATH"
|
||||
mount_env_key = "PYPE_STUDIO_PROJECTS_MOUNT"
|
||||
missing_envs = []
|
||||
if path_env_key not in os.environ:
|
||||
missing_envs.append(path_env_key)
|
||||
|
||||
if mount_env_key not in os.environ:
|
||||
missing_envs.append(mount_env_key)
|
||||
|
||||
if missing_envs:
|
||||
key = "missing_envs"
|
||||
if key not in self.path_checks:
|
||||
self.path_checks.append(key)
|
||||
_add_s = ""
|
||||
if len(missing_envs) > 1:
|
||||
_add_s = "s"
|
||||
|
||||
self.log.warning((
|
||||
"Can't replace MOUNT drive path to UNC path due to missing"
|
||||
" environment variable{}: `{}`. This may cause issues"
|
||||
" during publishing process."
|
||||
).format(_add_s, ", ".join(missing_envs)))
|
||||
|
||||
return normalized_path
|
||||
|
||||
unc_root = os.environ[path_env_key].replace("\\", "/")
|
||||
mount_root = os.environ[mount_env_key].replace("\\", "/")
|
||||
|
||||
# --- Remove slashes at the end of mount and unc roots ---
|
||||
while unc_root.endswith("/"):
|
||||
unc_root = unc_root[:-1]
|
||||
|
||||
while mount_root.endswith("/"):
|
||||
mount_root = mount_root[:-1]
|
||||
# ---
|
||||
|
||||
if forward_slash_path.startswith(unc_root):
|
||||
self.log.debug((
|
||||
"Path already starts with UNC root: \"{}\""
|
||||
).format(unc_root))
|
||||
return normalized_path
|
||||
|
||||
if not forward_slash_path.startswith(mount_root):
|
||||
self.log.warning((
|
||||
"Path do not start with MOUNT root \"{}\" "
|
||||
"set in environment variable \"{}\""
|
||||
).format(unc_root, mount_env_key))
|
||||
return normalized_path
|
||||
|
||||
# Replace Mount root with Unc root
|
||||
path = unc_root + forward_slash_path[len(mount_root):]
|
||||
|
||||
return os.path.normpath(path)
|
||||
|
||||
def version_from_representations(self, repres):
|
||||
for repre in repres:
|
||||
version = io.find_one({"_id": repre["parent"]})
|
||||
|
|
|
|||
|
|
@ -41,10 +41,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
'name': representation name (usually the same as extension)
|
||||
'ext': file extension
|
||||
optional data
|
||||
'anatomy_template': 'publish' or 'render', etc.
|
||||
template from anatomy that should be used for
|
||||
integrating this file. Only the first level can
|
||||
be specified right now.
|
||||
"frameStart"
|
||||
"frameEnd"
|
||||
'fps'
|
||||
|
|
@ -93,6 +89,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"family", "hierarchy", "task", "username"
|
||||
]
|
||||
default_template_name = "publish"
|
||||
template_name_profiles = None
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -269,6 +266,8 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
if 'transfers' not in instance.data:
|
||||
instance.data['transfers'] = []
|
||||
|
||||
template_name = self.template_name_from_instance(instance)
|
||||
|
||||
published_representations = {}
|
||||
for idx, repre in enumerate(instance.data["representations"]):
|
||||
published_files = []
|
||||
|
|
@ -293,9 +292,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
if repre.get('stagingDir'):
|
||||
stagingdir = repre['stagingDir']
|
||||
|
||||
template_name = (
|
||||
repre.get('anatomy_template') or self.default_template_name
|
||||
)
|
||||
if repre.get("outputName"):
|
||||
template_data["output"] = repre['outputName']
|
||||
|
||||
|
|
@ -332,6 +328,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
test_dest_files.append(
|
||||
os.path.normpath(template_filled)
|
||||
)
|
||||
template_data["frame"] = repre_context["frame"]
|
||||
|
||||
self.log.debug(
|
||||
"test_dest_files: {}".format(str(test_dest_files)))
|
||||
|
|
@ -395,7 +392,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
dst_start_frame,
|
||||
dst_tail
|
||||
).replace("..", ".")
|
||||
repre['published_path'] = self.unc_convert(dst)
|
||||
repre['published_path'] = dst
|
||||
|
||||
else:
|
||||
# Single file
|
||||
|
|
@ -423,7 +420,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
instance.data["transfers"].append([src, dst])
|
||||
|
||||
published_files.append(dst)
|
||||
repre['published_path'] = self.unc_convert(dst)
|
||||
repre['published_path'] = dst
|
||||
self.log.debug("__ dst: {}".format(dst))
|
||||
|
||||
repre["publishedFiles"] = published_files
|
||||
|
|
@ -527,23 +524,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
self.log.debug("Hardlinking file .. {} -> {}".format(src, dest))
|
||||
self.hardlink_file(src, dest)
|
||||
|
||||
def unc_convert(self, path):
|
||||
self.log.debug("> __ path: `{}`".format(path))
|
||||
drive, _path = os.path.splitdrive(path)
|
||||
self.log.debug("> __ drive, _path: `{}`, `{}`".format(drive, _path))
|
||||
|
||||
if not os.path.exists(drive + "/"):
|
||||
self.log.info("Converting to unc from environments ..")
|
||||
|
||||
path_replace = os.getenv("PYPE_STUDIO_PROJECTS_PATH")
|
||||
path_mount = os.getenv("PYPE_STUDIO_PROJECTS_MOUNT")
|
||||
|
||||
if "/" in path_mount:
|
||||
path = path.replace(path_mount[0:-1], path_replace)
|
||||
else:
|
||||
path = path.replace(path_mount, path_replace)
|
||||
return path
|
||||
|
||||
def copy_file(self, src, dst):
|
||||
""" Copy given source to destination
|
||||
|
||||
|
|
@ -553,8 +533,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
Returns:
|
||||
None
|
||||
"""
|
||||
src = self.unc_convert(src)
|
||||
dst = self.unc_convert(dst)
|
||||
src = os.path.normpath(src)
|
||||
dst = os.path.normpath(dst)
|
||||
self.log.debug("Copying file .. {} -> {}".format(src, dst))
|
||||
|
|
@ -582,9 +560,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
def hardlink_file(self, src, dst):
|
||||
dirname = os.path.dirname(dst)
|
||||
|
||||
src = self.unc_convert(src)
|
||||
dst = self.unc_convert(dst)
|
||||
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except OSError as e:
|
||||
|
|
@ -669,30 +644,35 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
families.append(instance_family)
|
||||
families += current_families
|
||||
|
||||
self.log.debug("Registered root: {}".format(api.registered_root()))
|
||||
|
||||
# create relative source path for DB
|
||||
try:
|
||||
source = instance.data['source']
|
||||
except KeyError:
|
||||
if "source" in instance.data:
|
||||
source = instance.data["source"]
|
||||
else:
|
||||
source = context.data["currentFile"]
|
||||
self.log.debug("source: {}".format(source))
|
||||
source = str(source).replace(
|
||||
os.getenv("PYPE_STUDIO_PROJECTS_MOUNT"),
|
||||
api.registered_root()
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
success, rootless_path = (
|
||||
anatomy.roots_obj.find_root_template_from_path(source)
|
||||
)
|
||||
relative_path = os.path.relpath(source, api.registered_root())
|
||||
source = os.path.join("{root}", relative_path).replace("\\", "/")
|
||||
if success:
|
||||
source = rootless_path
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(source))
|
||||
|
||||
self.log.debug("Source: {}".format(source))
|
||||
version_data = {"families": families,
|
||||
"time": context.data["time"],
|
||||
"author": context.data["user"],
|
||||
"source": source,
|
||||
"comment": context.data.get("comment"),
|
||||
"machine": context.data.get("machine"),
|
||||
"fps": context.data.get(
|
||||
"fps", instance.data.get("fps"))}
|
||||
version_data = {
|
||||
"families": families,
|
||||
"time": context.data["time"],
|
||||
"author": context.data["user"],
|
||||
"source": source,
|
||||
"comment": context.data.get("comment"),
|
||||
"machine": context.data.get("machine"),
|
||||
"fps": context.data.get(
|
||||
"fps", instance.data.get("fps")
|
||||
)
|
||||
}
|
||||
|
||||
intent_value = instance.context.data.get("intent")
|
||||
if intent_value and isinstance(intent_value, dict):
|
||||
|
|
@ -711,3 +691,70 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
version_data[key] = instance.data[key]
|
||||
|
||||
return version_data
|
||||
|
||||
def main_family_from_instance(self, instance):
|
||||
"""Returns main family of entered instance."""
|
||||
family = instance.data.get("family")
|
||||
if not family:
|
||||
family = instance.data["families"][0]
|
||||
return family
|
||||
|
||||
def template_name_from_instance(self, instance):
|
||||
template_name = self.default_template_name
|
||||
if not self.template_name_profiles:
|
||||
self.log.debug((
|
||||
"Template name profiles are not set."
|
||||
" Using default \"{}\""
|
||||
).format(template_name))
|
||||
return template_name
|
||||
|
||||
# Task name from session?
|
||||
task_name = io.Session.get("AVALON_TASK")
|
||||
family = self.main_family_from_instance(instance)
|
||||
|
||||
matching_profiles = None
|
||||
highest_value = -1
|
||||
self.log.info(self.template_name_profiles)
|
||||
for name, filters in self.template_name_profiles.items():
|
||||
value = 0
|
||||
families = filters.get("families")
|
||||
if families:
|
||||
if family not in families:
|
||||
continue
|
||||
value += 1
|
||||
|
||||
tasks = filters.get("tasks")
|
||||
if tasks:
|
||||
if task_name not in tasks:
|
||||
continue
|
||||
value += 1
|
||||
|
||||
if value > highest_value:
|
||||
matching_profiles = {}
|
||||
highest_value = value
|
||||
|
||||
if value == highest_value:
|
||||
matching_profiles[name] = filters
|
||||
|
||||
if len(matching_profiles) == 1:
|
||||
template_name = matching_profiles.keys()[0]
|
||||
self.log.debug(
|
||||
"Using template name \"{}\".".format(template_name)
|
||||
)
|
||||
|
||||
elif len(matching_profiles) > 1:
|
||||
template_name = matching_profiles.keys()[0]
|
||||
self.log.warning((
|
||||
"More than one template profiles matched"
|
||||
" Family \"{}\" and Task: \"{}\"."
|
||||
" Using first template name in row \"{}\"."
|
||||
).format(family, task_name, template_name))
|
||||
|
||||
else:
|
||||
self.log.debug((
|
||||
"None of template profiles matched"
|
||||
" Family \"{}\" and Task: \"{}\"."
|
||||
" Using default template name \"{}\""
|
||||
).format(family, task_name, template_name))
|
||||
|
||||
return template_name
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submit publishing job to farm."""
|
||||
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
|
|
@ -10,7 +13,7 @@ import pyblish.api
|
|||
|
||||
|
||||
def _get_script():
|
||||
"""Get path to the image sequence script"""
|
||||
"""Get path to the image sequence script."""
|
||||
try:
|
||||
from pype.scripts import publish_filesequence
|
||||
except Exception:
|
||||
|
|
@ -20,17 +23,11 @@ def _get_script():
|
|||
if module_path.endswith(".pyc"):
|
||||
module_path = module_path[: -len(".pyc")] + ".py"
|
||||
|
||||
module_path = os.path.normpath(module_path)
|
||||
mount_root = os.path.normpath(os.environ["PYPE_STUDIO_CORE_MOUNT"])
|
||||
network_root = os.path.normpath(os.environ["PYPE_STUDIO_CORE_PATH"])
|
||||
|
||||
module_path = module_path.replace(mount_root, network_root)
|
||||
|
||||
return module_path
|
||||
return os.path.normpath(module_path)
|
||||
|
||||
|
||||
# Logic to retrieve latest files concerning extendFrames
|
||||
def get_latest_version(asset_name, subset_name, family):
|
||||
"""Retrieve latest files concerning extendFrame feature."""
|
||||
# Get asset
|
||||
asset_name = io.find_one(
|
||||
{"type": "asset", "name": asset_name}, projection={"name": True}
|
||||
|
|
@ -64,9 +61,7 @@ def get_latest_version(asset_name, subset_name, family):
|
|||
|
||||
|
||||
def get_resources(version, extension=None):
|
||||
"""
|
||||
Get the files from the specific version
|
||||
"""
|
||||
"""Get the files from the specific version."""
|
||||
query = {"type": "representation", "parent": version["_id"]}
|
||||
if extension:
|
||||
query["name"] = extension
|
||||
|
|
@ -86,14 +81,25 @@ def get_resources(version, extension=None):
|
|||
return resources
|
||||
|
||||
|
||||
def get_resource_files(resources, frame_range, override=True):
|
||||
def get_resource_files(resources, frame_range=None):
|
||||
"""Get resource files at given path.
|
||||
|
||||
If `frame_range` is specified those outside will be removed.
|
||||
|
||||
Arguments:
|
||||
resources (list): List of resources
|
||||
frame_range (list): Frame range to apply override
|
||||
|
||||
Returns:
|
||||
list of str: list of collected resources
|
||||
|
||||
"""
|
||||
res_collections, _ = clique.assemble(resources)
|
||||
assert len(res_collections) == 1, "Multiple collections found"
|
||||
res_collection = res_collections[0]
|
||||
|
||||
# Remove any frames
|
||||
if override:
|
||||
if frame_range is not None:
|
||||
for frame in frame_range:
|
||||
if frame not in res_collection.indexes:
|
||||
continue
|
||||
|
|
@ -146,16 +152,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
aov_filter = {"maya": ["beauty"]}
|
||||
|
||||
enviro_filter = [
|
||||
"PATH",
|
||||
"PYTHONPATH",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER",
|
||||
"PYPE_ROOT",
|
||||
"PYPE_METADATA_FILE",
|
||||
"PYPE_STUDIO_PROJECTS_PATH",
|
||||
"PYPE_STUDIO_PROJECTS_MOUNT",
|
||||
"AVALON_PROJECT"
|
||||
"AVALON_PROJECT",
|
||||
"PYPE_LOG_NO_COLORS"
|
||||
]
|
||||
|
||||
# pool used to do the publishing job
|
||||
|
|
@ -177,10 +179,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
families_transfer = ["render3d", "render2d", "ftrack", "slate"]
|
||||
|
||||
def _submit_deadline_post_job(self, instance, job):
|
||||
"""
|
||||
"""Submit publish job to Deadline.
|
||||
|
||||
Deadline specific code separated from :meth:`process` for sake of
|
||||
more universal code. Muster post job is sent directly by Muster
|
||||
submitter, so this type of code isn't necessary for it.
|
||||
|
||||
"""
|
||||
data = instance.data.copy()
|
||||
subset = data["subset"]
|
||||
|
|
@ -188,14 +192,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
batch=job["Props"]["Name"], subset=subset
|
||||
)
|
||||
|
||||
metadata_filename = "{}_metadata.json".format(subset)
|
||||
output_dir = instance.data["outputDir"]
|
||||
metadata_path = os.path.join(output_dir, metadata_filename)
|
||||
|
||||
metadata_path = os.path.normpath(metadata_path)
|
||||
mount_root = os.path.normpath(os.environ["PYPE_STUDIO_PROJECTS_MOUNT"])
|
||||
network_root = os.environ["PYPE_STUDIO_PROJECTS_PATH"]
|
||||
metadata_path = metadata_path.replace(mount_root, network_root)
|
||||
# Convert output dir to `{root}/rest/of/path/...` with Anatomy
|
||||
success, rootless_path = (
|
||||
self.anatomy.roots_obj.find_root_template_from_path(output_dir)
|
||||
)
|
||||
if not success:
|
||||
# `rootless_path` is not set to `output_dir` if none of roots match
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(output_dir))
|
||||
rootless_path = output_dir
|
||||
|
||||
# Generate the payload for Deadline submission
|
||||
payload = {
|
||||
|
|
@ -222,6 +230,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
# Transfer the environment from the original job to this dependent
|
||||
# job so they use the same environment
|
||||
metadata_filename = "{}_metadata.json".format(subset)
|
||||
metadata_path = os.path.join(rootless_path, metadata_filename)
|
||||
|
||||
environment = job["Props"].get("Env", {})
|
||||
environment["PYPE_METADATA_FILE"] = metadata_path
|
||||
environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
|
||||
|
|
@ -256,14 +267,17 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
raise Exception(response.text)
|
||||
|
||||
def _copy_extend_frames(self, instance, representation):
|
||||
"""
|
||||
"""Copy existing frames from latest version.
|
||||
|
||||
This will copy all existing frames from subset's latest version back
|
||||
to render directory and rename them to what renderer is expecting.
|
||||
|
||||
:param instance: instance to get required data from
|
||||
:type instance: pyblish.plugin.Instance
|
||||
"""
|
||||
Arguments:
|
||||
instance (pyblish.plugin.Instance): instance to get required
|
||||
data from
|
||||
representation (dict): presentation to operate on
|
||||
|
||||
"""
|
||||
import speedcopy
|
||||
|
||||
self.log.info("Preparing to copy ...")
|
||||
|
|
@ -303,9 +317,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
# type
|
||||
assert fn is not None, "padding string wasn't found"
|
||||
# list of tuples (source, destination)
|
||||
staging = representation.get("stagingDir")
|
||||
staging = self.anatomy.fill_roots(staging)
|
||||
resource_files.append(
|
||||
(frame,
|
||||
os.path.join(representation.get("stagingDir"),
|
||||
os.path.join(staging,
|
||||
"{}{}{}".format(pre,
|
||||
fn.group("frame"),
|
||||
post)))
|
||||
|
|
@ -325,19 +341,20 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"Finished copying %i files" % len(resource_files))
|
||||
|
||||
def _create_instances_for_aov(self, instance_data, exp_files):
|
||||
"""
|
||||
"""Create instance for each AOV found.
|
||||
|
||||
This will create new instance for every aov it can detect in expected
|
||||
files list.
|
||||
|
||||
:param instance_data: skeleton data for instance (those needed) later
|
||||
by collector
|
||||
:type instance_data: pyblish.plugin.Instance
|
||||
:param exp_files: list of expected files divided by aovs
|
||||
:type exp_files: list
|
||||
:returns: list of instances
|
||||
:rtype: list(publish.plugin.Instance)
|
||||
"""
|
||||
Arguments:
|
||||
instance_data (pyblish.plugin.Instance): skeleton data for instance
|
||||
(those needed) later by collector
|
||||
exp_files (list): list of expected files divided by aovs
|
||||
|
||||
Returns:
|
||||
list of instances
|
||||
|
||||
"""
|
||||
task = os.environ["AVALON_TASK"]
|
||||
subset = instance_data["subset"]
|
||||
instances = []
|
||||
|
|
@ -361,6 +378,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
subset_name = '{}_{}'.format(group_name, aov)
|
||||
|
||||
staging = os.path.dirname(list(cols[0])[0])
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.roots_obj.find_root_template_from_path(staging)
|
||||
)
|
||||
if success:
|
||||
staging = rootless_staging_dir
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(staging))
|
||||
|
||||
self.log.info("Creating data for: {}".format(subset_name))
|
||||
|
||||
|
|
@ -386,7 +413,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"frameEnd": int(instance_data.get("frameEndHandle")),
|
||||
# If expectedFile are absolute, we need only filenames
|
||||
"stagingDir": staging,
|
||||
"anatomy_template": "render",
|
||||
"fps": new_instance.get("fps"),
|
||||
"tags": ["review"] if preview else []
|
||||
}
|
||||
|
|
@ -404,26 +430,28 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
return instances
|
||||
|
||||
def _get_representations(self, instance, exp_files):
|
||||
"""
|
||||
"""Create representations for file sequences.
|
||||
|
||||
This will return representations of expected files if they are not
|
||||
in hierarchy of aovs. There should be only one sequence of files for
|
||||
most cases, but if not - we create representation from each of them.
|
||||
|
||||
:param instance: instance for which we are setting representations
|
||||
:type instance: pyblish.plugin.Instance
|
||||
:param exp_files: list of expected files
|
||||
:type exp_files: list
|
||||
:returns: list of representations
|
||||
:rtype: list(dict)
|
||||
"""
|
||||
Arguments:
|
||||
instance (pyblish.plugin.Instance): instance for which we are
|
||||
setting representations
|
||||
exp_files (list): list of expected files
|
||||
|
||||
Returns:
|
||||
list of representations
|
||||
|
||||
"""
|
||||
representations = []
|
||||
cols, rem = clique.assemble(exp_files)
|
||||
collections, remainders = clique.assemble(exp_files)
|
||||
bake_render_path = instance.get("bakeRenderPath")
|
||||
|
||||
# create representation for every collected sequence
|
||||
for c in cols:
|
||||
ext = c.tail.lstrip(".")
|
||||
for collection in collections:
|
||||
ext = collection.tail.lstrip(".")
|
||||
preview = False
|
||||
# if filtered aov name is found in filename, toggle it for
|
||||
# preview video rendering
|
||||
|
|
@ -432,7 +460,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
for aov in self.aov_filter[app]:
|
||||
if re.match(
|
||||
r".+(?:\.|_)({})(?:\.|_).*".format(aov),
|
||||
list(c)[0]
|
||||
list(collection)[0]
|
||||
):
|
||||
preview = True
|
||||
break
|
||||
|
|
@ -441,15 +469,26 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
if bake_render_path:
|
||||
preview = False
|
||||
|
||||
staging = os.path.dirname(list(collection)[0])
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.roots_obj.find_root_template_from_path(staging)
|
||||
)
|
||||
if success:
|
||||
staging = rootless_staging_dir
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(staging))
|
||||
|
||||
rep = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": [os.path.basename(f) for f in list(c)],
|
||||
"files": [os.path.basename(f) for f in list(collection)],
|
||||
"frameStart": int(instance.get("frameStartHandle")),
|
||||
"frameEnd": int(instance.get("frameEndHandle")),
|
||||
# If expectedFile are absolute, we need only filenames
|
||||
"stagingDir": os.path.dirname(list(c)[0]),
|
||||
"anatomy_template": "render",
|
||||
"stagingDir": staging,
|
||||
"fps": instance.get("fps"),
|
||||
"tags": ["review", "preview"] if preview else [],
|
||||
}
|
||||
|
|
@ -462,19 +501,30 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
self._solve_families(instance, preview)
|
||||
|
||||
# add reminders as representations
|
||||
for r in rem:
|
||||
ext = r.split(".")[-1]
|
||||
for remainder in remainders:
|
||||
ext = remainder.split(".")[-1]
|
||||
|
||||
staging = os.path.dirname(remainder)
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.roots_obj.find_root_template_from_path(staging)
|
||||
)
|
||||
if success:
|
||||
staging = rootless_staging_dir
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(staging))
|
||||
|
||||
rep = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": os.path.basename(r),
|
||||
"stagingDir": os.path.dirname(r),
|
||||
"anatomy_template": "publish"
|
||||
"files": os.path.basename(remainder),
|
||||
"stagingDir": os.path.dirname(remainder),
|
||||
}
|
||||
if r in bake_render_path:
|
||||
if remainder in bake_render_path:
|
||||
rep.update({
|
||||
"fps": instance.get("fps"),
|
||||
"anatomy_template": "render",
|
||||
"tags": ["review", "delete"]
|
||||
})
|
||||
# solve families with `preview` attributes
|
||||
|
|
@ -496,7 +546,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
instance["families"] = families
|
||||
|
||||
def process(self, instance):
|
||||
"""
|
||||
"""Process plugin.
|
||||
|
||||
Detect type of renderfarm submission and create and post dependend job
|
||||
in case of Deadline. It creates json file with metadata needed for
|
||||
publishing in directory of render.
|
||||
|
|
@ -507,6 +558,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
data = instance.data.copy()
|
||||
context = instance.context
|
||||
self.context = context
|
||||
self.anatomy = instance.context.data["anatomy"]
|
||||
|
||||
if hasattr(instance, "_log"):
|
||||
data['_log'] = instance._log
|
||||
|
|
@ -566,11 +618,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
except KeyError:
|
||||
source = context.data["currentFile"]
|
||||
|
||||
source = source.replace(
|
||||
os.getenv("PYPE_STUDIO_PROJECTS_MOUNT"), api.registered_root()
|
||||
success, rootless_path = (
|
||||
self.anatomy.roots_obj.find_root_template_from_path(source)
|
||||
)
|
||||
relative_path = os.path.relpath(source, api.registered_root())
|
||||
source = os.path.join("{root}", relative_path).replace("\\", "/")
|
||||
if success:
|
||||
source = rootless_path
|
||||
|
||||
else:
|
||||
# `rootless_path` is not set to `source` if none of roots match
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues."
|
||||
).format(source))
|
||||
|
||||
families = ["render"]
|
||||
|
||||
|
|
@ -621,13 +680,29 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
|
||||
# look into instance data if representations are not having any
|
||||
# which are having tag `publish_on_farm` and include them
|
||||
for r in instance.data.get("representations", []):
|
||||
if "publish_on_farm" in r.get("tags"):
|
||||
for repre in instance.data.get("representations", []):
|
||||
staging_dir = repre.get("stagingDir")
|
||||
if staging_dir:
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.roots_obj.find_root_template_from_path(
|
||||
staging_dir
|
||||
)
|
||||
)
|
||||
if success:
|
||||
repre["stagingDir"] = rootless_staging_dir
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(staging_dir))
|
||||
repre["stagingDir"] = staging_dir
|
||||
|
||||
if "publish_on_farm" in repre.get("tags"):
|
||||
# create representations attribute of not there
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
instance_skeleton_data["representations"].append(r)
|
||||
instance_skeleton_data["representations"].append(repre)
|
||||
|
||||
instances = None
|
||||
assert data.get("expectedFiles"), ("Submission from old Pype version"
|
||||
|
|
@ -764,12 +839,21 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
with open(metadata_path, "w") as f:
|
||||
json.dump(publish_job, f, indent=4, sort_keys=True)
|
||||
|
||||
def _extend_frames(self, asset, subset, start, end, override):
|
||||
"""
|
||||
This will get latest version of asset and update frame range based
|
||||
on minimum and maximuma values
|
||||
"""
|
||||
def _extend_frames(self, asset, subset, start, end):
|
||||
"""Get latest version of asset nad update frame range.
|
||||
|
||||
Based on minimum and maximuma values.
|
||||
|
||||
Arguments:
|
||||
asset (str): asset name
|
||||
subset (str): subset name
|
||||
start (int): start frame
|
||||
end (int): end frame
|
||||
|
||||
Returns:
|
||||
(int, int): upddate frame start/end
|
||||
|
||||
"""
|
||||
# Frame comparison
|
||||
prev_start = None
|
||||
prev_end = None
|
||||
|
|
|
|||
|
|
@ -122,6 +122,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
workspace = context.data["workspaceDir"]
|
||||
|
||||
self._rs = renderSetup.instance()
|
||||
current_layer = self._rs.getVisibleRenderLayer()
|
||||
maya_render_layers = {l.name(): l for l in self._rs.getRenderLayers()}
|
||||
|
||||
self.maya_layers = maya_render_layers
|
||||
|
|
@ -306,6 +307,10 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
instance.data.update(data)
|
||||
self.log.debug("data: {}".format(json.dumps(data, indent=4)))
|
||||
|
||||
# Restore current layer.
|
||||
self.log.info("Restoring to {}".format(current_layer.name()))
|
||||
self._rs.switchToLayer(current_layer)
|
||||
|
||||
def parse_options(self, render_globals):
|
||||
"""Get all overrides with a value, skip those without
|
||||
|
||||
|
|
@ -400,6 +405,8 @@ class ExpectedFiles:
|
|||
multipart = False
|
||||
|
||||
def get(self, renderer, layer):
|
||||
renderSetup.instance().switchToLayerUsingLegacyName(layer)
|
||||
|
||||
if renderer.lower() == "arnold":
|
||||
return self._get_files(ExpectedFilesArnold(layer))
|
||||
elif renderer.lower() == "vray":
|
||||
|
|
|
|||
|
|
@ -73,7 +73,6 @@ class ExtractYetiCache(pype.api.Extractor):
|
|||
'ext': 'fur',
|
||||
'files': cache_files[0] if len(cache_files) == 1 else cache_files,
|
||||
'stagingDir': dirname,
|
||||
'anatomy_template': 'publish',
|
||||
'frameStart': int(start_frame),
|
||||
'frameEnd': int(end_frame)
|
||||
}
|
||||
|
|
@ -84,8 +83,7 @@ class ExtractYetiCache(pype.api.Extractor):
|
|||
'name': 'fursettings',
|
||||
'ext': 'fursettings',
|
||||
'files': os.path.basename(data_file),
|
||||
'stagingDir': dirname,
|
||||
'anatomy_template': 'publish'
|
||||
'stagingDir': dirname
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -169,8 +169,7 @@ class ExtractYetiRig(pype.api.Extractor):
|
|||
'name': "ma",
|
||||
'ext': 'ma',
|
||||
'files': "yeti_rig.ma",
|
||||
'stagingDir': dirname,
|
||||
'anatomy_template': 'publish'
|
||||
'stagingDir': dirname
|
||||
}
|
||||
)
|
||||
self.log.info("settings file: {}".format("yeti.rigsettings"))
|
||||
|
|
@ -179,8 +178,7 @@ class ExtractYetiRig(pype.api.Extractor):
|
|||
'name': 'rigsettings',
|
||||
'ext': 'rigsettings',
|
||||
'files': 'yeti.rigsettings',
|
||||
'stagingDir': dirname,
|
||||
'anatomy_template': 'publish'
|
||||
'stagingDir': dirname
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,17 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submitting render job to Deadline.
|
||||
|
||||
This module is taking care of submitting job from Maya to Deadline. It
|
||||
creates job and set correct environments. Its behavior is controlled by
|
||||
`DEADLINE_REST_URL` environment variable - pointing to Deadline Web Service
|
||||
and `MayaSubmitDeadline.use_published (bool)` property telling Deadline to
|
||||
use published scene workfile or not.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import getpass
|
||||
import re
|
||||
import clique
|
||||
|
||||
from maya import cmds
|
||||
|
|
@ -14,7 +25,7 @@ import pype.maya.lib as lib
|
|||
|
||||
|
||||
def get_renderer_variables(renderlayer=None):
|
||||
"""Retrieve the extension which has been set in the VRay settings
|
||||
"""Retrieve the extension which has been set in the VRay settings.
|
||||
|
||||
Will return None if the current renderer is not VRay
|
||||
For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which
|
||||
|
|
@ -25,16 +36,21 @@ def get_renderer_variables(renderlayer=None):
|
|||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
|
||||
"""
|
||||
renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer())
|
||||
render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"])
|
||||
|
||||
padding = cmds.getAttr("{}.{}".format(render_attrs["node"],
|
||||
render_attrs["padding"]))
|
||||
|
||||
filename_0 = cmds.renderSettings(fullPath=True, firstImageName=True)[0]
|
||||
|
||||
filename_0 = cmds.renderSettings(
|
||||
fullPath=True,
|
||||
gin="#" * int(padding),
|
||||
lut=True,
|
||||
layer=renderlayer or lib.get_current_renderlayer())[0]
|
||||
filename_0 = filename_0.replace('_<RenderPass>', '_beauty')
|
||||
prefix_attr = "defaultRenderGlobals.imageFilePrefix"
|
||||
if renderer == "vray":
|
||||
# Maya's renderSettings function does not return V-Ray file extension
|
||||
# so we get the extension from vraySettings
|
||||
|
|
@ -46,62 +62,33 @@ def get_renderer_variables(renderlayer=None):
|
|||
if extension is None:
|
||||
extension = "png"
|
||||
|
||||
filename_prefix = "<Scene>/<Scene>_<Layer>/<Layer>"
|
||||
if extension == "exr (multichannel)" or extension == "exr (deep)":
|
||||
extension = "exr"
|
||||
|
||||
prefix_attr = "vraySettings.fileNamePrefix"
|
||||
elif renderer == "renderman":
|
||||
prefix_attr = "rmanGlobals.imageFileFormat"
|
||||
elif renderer == "redshift":
|
||||
# mapping redshift extension dropdown values to strings
|
||||
ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"]
|
||||
extension = ext_mapping[
|
||||
cmds.getAttr("redshiftOptions.imageFormat")
|
||||
]
|
||||
else:
|
||||
# Get the extension, getAttr defaultRenderGlobals.imageFormat
|
||||
# returns an index number.
|
||||
filename_base = os.path.basename(filename_0)
|
||||
extension = os.path.splitext(filename_base)[-1].strip(".")
|
||||
filename_prefix = cmds.getAttr("defaultRenderGlobals.imageFilePrefix")
|
||||
|
||||
filename_prefix = cmds.getAttr(prefix_attr)
|
||||
return {"ext": extension,
|
||||
"filename_prefix": filename_prefix,
|
||||
"padding": padding,
|
||||
"filename_0": filename_0}
|
||||
|
||||
|
||||
def preview_fname(folder, scene, layer, padding, ext):
|
||||
"""Return output file path with #### for padding.
|
||||
|
||||
Deadline requires the path to be formatted with # in place of numbers.
|
||||
For example `/path/to/render.####.png`
|
||||
|
||||
Args:
|
||||
folder (str): The root output folder (image path)
|
||||
scene (str): The scene name
|
||||
layer (str): The layer name to be rendered
|
||||
padding (int): The padding length
|
||||
ext(str): The output file extension
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
|
||||
fileprefix = cmds.getAttr("defaultRenderGlobals.imageFilePrefix")
|
||||
output = fileprefix + ".{number}.{ext}"
|
||||
# RenderPass is currently hardcoded to "beauty" because its not important
|
||||
# for the deadline submission, but we will need something to replace
|
||||
# "<RenderPass>".
|
||||
mapping = {
|
||||
"<Scene>": "{scene}",
|
||||
"<RenderLayer>": "{layer}",
|
||||
"RenderPass": "beauty"
|
||||
}
|
||||
for key, value in mapping.items():
|
||||
output = output.replace(key, value)
|
||||
output = output.format(
|
||||
scene=scene,
|
||||
layer=layer,
|
||||
number="#" * padding,
|
||||
ext=ext
|
||||
)
|
||||
|
||||
return os.path.join(folder, output)
|
||||
|
||||
|
||||
class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
||||
"""Submit available render layers to Deadline
|
||||
"""Submit available render layers to Deadline.
|
||||
|
||||
Renders are submitted to a Deadline Web Service as
|
||||
supplied via the environment variable DEADLINE_REST_URL
|
||||
|
|
@ -194,22 +181,22 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
filename = os.path.basename(filepath)
|
||||
comment = context.data.get("comment", "")
|
||||
scene = os.path.splitext(filename)[0]
|
||||
dirname = os.path.join(workspace, "renders")
|
||||
renderlayer = instance.data['setMembers'] # rs_beauty
|
||||
renderlayer_name = instance.data['subset'] # beauty
|
||||
# renderlayer_globals = instance.data["renderGlobals"]
|
||||
# legacy_layers = renderlayer_globals["UseLegacyRenderLayers"]
|
||||
deadline_user = context.data.get("deadlineUser", getpass.getuser())
|
||||
jobname = "%s - %s" % (filename, instance.name)
|
||||
|
||||
# Get the variables depending on the renderer
|
||||
render_variables = get_renderer_variables(renderlayer)
|
||||
output_filename_0 = preview_fname(folder=dirname,
|
||||
scene=scene,
|
||||
layer=renderlayer_name,
|
||||
padding=render_variables["padding"],
|
||||
ext=render_variables["ext"])
|
||||
filename_0 = render_variables["filename_0"]
|
||||
if self.use_published:
|
||||
new_scene = os.path.splitext(filename)[0]
|
||||
orig_scene = os.path.splitext(
|
||||
os.path.basename(context.data["currentFile"]))[0]
|
||||
filename_0 = render_variables["filename_0"].replace(
|
||||
orig_scene, new_scene)
|
||||
|
||||
output_filename_0 = filename_0
|
||||
|
||||
try:
|
||||
# Ensure render folder exists
|
||||
|
|
@ -226,6 +213,9 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
# Top-level group name
|
||||
"BatchName": filename,
|
||||
|
||||
# Asset dependency to wait for at least the scene file to sync.
|
||||
"AssetDependency0": filepath,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": jobname,
|
||||
|
||||
|
|
@ -284,7 +274,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
for aov, files in exp[0].items():
|
||||
col = clique.assemble(files)[0][0]
|
||||
outputFile = col.format('{head}{padding}{tail}')
|
||||
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile
|
||||
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile # noqa: E501
|
||||
OutputFilenames[expIndex] = outputFile
|
||||
expIndex += 1
|
||||
else:
|
||||
|
|
@ -293,7 +283,6 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile
|
||||
# OutputFilenames[expIndex] = outputFile
|
||||
|
||||
|
||||
# We need those to pass them to pype for it to set correct context
|
||||
keys = [
|
||||
"FTRACK_API_KEY",
|
||||
|
|
@ -334,7 +323,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
raise Exception(response.text)
|
||||
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["outputDir"] = os.path.dirname(output_filename_0)
|
||||
instance.data["outputDir"] = os.path.dirname(filename_0)
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
|
||||
def preflight_check(self, instance):
|
||||
|
|
|
|||
|
|
@ -309,14 +309,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
|
|||
output_dir = instance.data["outputDir"]
|
||||
metadata_path = os.path.join(output_dir, metadata_filename)
|
||||
|
||||
# replace path for UNC / network share paths, co PYPE is found
|
||||
# over network. It assumes PYPE is located somewhere in
|
||||
# PYPE_STUDIO_CORE_PATH
|
||||
pype_root = os.environ["PYPE_ROOT"].replace(
|
||||
os.path.normpath(
|
||||
os.environ['PYPE_STUDIO_CORE_MOUNT']), # noqa
|
||||
os.path.normpath(
|
||||
os.environ['PYPE_STUDIO_CORE_PATH'])) # noqa
|
||||
pype_root = os.environ["PYPE_SETUP_PATH"]
|
||||
|
||||
# we must provide either full path to executable or use musters own
|
||||
# python named MPython.exe, residing directly in muster bin
|
||||
|
|
@ -517,33 +510,25 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
|
|||
environment["PATH"] = os.environ["PATH"]
|
||||
# self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
|
||||
clean_environment = {}
|
||||
for key in environment:
|
||||
for key, value in environment.items():
|
||||
clean_path = ""
|
||||
self.log.debug("key: {}".format(key))
|
||||
to_process = environment[key]
|
||||
if key == "PYPE_STUDIO_CORE_MOUNT":
|
||||
clean_path = environment[key]
|
||||
elif "://" in environment[key]:
|
||||
clean_path = environment[key]
|
||||
elif os.pathsep not in to_process:
|
||||
try:
|
||||
path = environment[key]
|
||||
path.decode('UTF-8', 'strict')
|
||||
clean_path = os.path.normpath(path)
|
||||
except UnicodeDecodeError:
|
||||
print('path contains non UTF characters')
|
||||
if "://" in value:
|
||||
clean_path = value
|
||||
else:
|
||||
for path in environment[key].split(os.pathsep):
|
||||
valid_paths = []
|
||||
for path in value.split(os.pathsep):
|
||||
if not path:
|
||||
continue
|
||||
try:
|
||||
path.decode('UTF-8', 'strict')
|
||||
clean_path += os.path.normpath(path) + os.pathsep
|
||||
valid_paths.append(os.path.normpath(path))
|
||||
except UnicodeDecodeError:
|
||||
print('path contains non UTF characters')
|
||||
|
||||
# this should replace paths so they are pointing to network share
|
||||
clean_path = clean_path.replace(
|
||||
os.path.normpath(environment['PYPE_STUDIO_CORE_MOUNT']),
|
||||
os.path.normpath(environment['PYPE_STUDIO_CORE_PATH']))
|
||||
if valid_paths:
|
||||
clean_path = os.pathsep.join(valid_paths)
|
||||
|
||||
clean_environment[key] = clean_path
|
||||
|
||||
return clean_environment
|
||||
|
|
|
|||
|
|
@ -237,7 +237,7 @@ class LoadSequence(api.Loader):
|
|||
|
||||
repr_cont = representation["context"]
|
||||
|
||||
file = self.fname
|
||||
file = api.get_representation_path(representation)
|
||||
|
||||
if not file:
|
||||
repr_id = representation["_id"]
|
||||
|
|
|
|||
|
|
@ -79,8 +79,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
representation = {
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
"stagingDir": output_dir,
|
||||
"anatomy_template": "render"
|
||||
"stagingDir": output_dir
|
||||
}
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -71,8 +71,7 @@ class NukeRenderLocal(pype.api.Extractor):
|
|||
'ext': ext,
|
||||
'frameStart': "%0{}d".format(len(str(last_frame))) % first_frame,
|
||||
'files': collected_frames,
|
||||
"stagingDir": out_dir,
|
||||
"anatomy_template": "render"
|
||||
"stagingDir": out_dir
|
||||
}
|
||||
instance.data["representations"].append(repre)
|
||||
|
||||
|
|
|
|||
|
|
@ -130,7 +130,6 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
"stagingDir": staging_dir,
|
||||
"frameStart": first_frame,
|
||||
"frameEnd": last_frame,
|
||||
"anatomy_template": "render",
|
||||
"tags": tags
|
||||
}
|
||||
instance.data["representations"].append(repre)
|
||||
|
|
|
|||
|
|
@ -128,6 +128,9 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
# Top-level group name
|
||||
"BatchName": script_name,
|
||||
|
||||
# Asset dependency to wait for at least the scene file to sync.
|
||||
"AssetDependency0": script_path,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": jobname,
|
||||
|
||||
|
|
@ -201,40 +204,32 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
if path.lower().startswith('pype_'):
|
||||
environment[path] = os.environ[path]
|
||||
|
||||
environment["PATH"] = os.environ["PATH"]
|
||||
# environment["PATH"] = os.environ["PATH"]
|
||||
# self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
|
||||
clean_environment = {}
|
||||
for key in environment:
|
||||
for key, value in environment.items():
|
||||
clean_path = ""
|
||||
self.log.debug("key: {}".format(key))
|
||||
to_process = environment[key]
|
||||
if key == "PYPE_STUDIO_CORE_MOUNT":
|
||||
clean_path = environment[key]
|
||||
elif "://" in environment[key]:
|
||||
clean_path = environment[key]
|
||||
elif os.pathsep not in to_process:
|
||||
try:
|
||||
path = environment[key]
|
||||
path.decode('UTF-8', 'strict')
|
||||
clean_path = os.path.normpath(path)
|
||||
except UnicodeDecodeError:
|
||||
print('path contains non UTF characters')
|
||||
if "://" in value:
|
||||
clean_path = value
|
||||
else:
|
||||
for path in environment[key].split(os.pathsep):
|
||||
valid_paths = []
|
||||
for path in value.split(os.pathsep):
|
||||
if not path:
|
||||
continue
|
||||
try:
|
||||
path.decode('UTF-8', 'strict')
|
||||
clean_path += os.path.normpath(path) + os.pathsep
|
||||
valid_paths.append(os.path.normpath(path))
|
||||
except UnicodeDecodeError:
|
||||
print('path contains non UTF characters')
|
||||
|
||||
if valid_paths:
|
||||
clean_path = os.pathsep.join(valid_paths)
|
||||
|
||||
if key == "PYTHONPATH":
|
||||
clean_path = clean_path.replace('python2', 'python3')
|
||||
|
||||
clean_path = clean_path.replace(
|
||||
os.path.normpath(
|
||||
environment['PYPE_STUDIO_CORE_MOUNT']), # noqa
|
||||
os.path.normpath(
|
||||
environment['PYPE_STUDIO_CORE_PATH'])) # noqa
|
||||
self.log.debug("clean path: {}".format(clean_path))
|
||||
clean_environment[key] = clean_path
|
||||
|
||||
environment = clean_environment
|
||||
|
|
|
|||
|
|
@ -46,7 +46,6 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
|||
in_data = json.load(f)
|
||||
|
||||
asset_name = in_data["asset"]
|
||||
family_preset_key = in_data.get("family_preset_key", "")
|
||||
family = in_data["family"]
|
||||
subset = in_data["subset"]
|
||||
|
||||
|
|
@ -57,15 +56,6 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
|||
|
||||
presets = config.get_presets()
|
||||
|
||||
# Get from presets anatomy key that will be used for getting template
|
||||
# - default integrate new is used if not set
|
||||
anatomy_key = (
|
||||
presets.get("standalone_publish", {})
|
||||
.get("families", {})
|
||||
.get(family_preset_key, {})
|
||||
.get("anatomy_template")
|
||||
)
|
||||
|
||||
project = io.find_one({"type": "project"})
|
||||
asset = io.find_one({"type": "asset", "name": asset_name})
|
||||
context.data["project"] = project
|
||||
|
|
@ -98,12 +88,9 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
|||
instance.data["source"] = "standalone publisher"
|
||||
|
||||
for component in in_data["representations"]:
|
||||
|
||||
component["destination"] = component["files"]
|
||||
component["stagingDir"] = component["stagingDir"]
|
||||
# Do not set anatomy_template if not specified
|
||||
if anatomy_key:
|
||||
component["anatomy_template"] = anatomy_key
|
||||
|
||||
if isinstance(component["files"], list):
|
||||
collections, remainder = clique.assemble(component["files"])
|
||||
self.log.debug("collecting sequence: {}".format(collections))
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue