mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 12:54:40 +01:00
Merge maya_new_publisher
This commit is contained in:
commit
c83589b0c4
37 changed files with 2407 additions and 1012 deletions
|
|
@ -2,7 +2,7 @@
|
|||
"""Tools to work with FBX."""
|
||||
import logging
|
||||
|
||||
from pyblish.api import Instance
|
||||
import pyblish.api
|
||||
|
||||
from maya import cmds # noqa
|
||||
import maya.mel as mel # noqa
|
||||
|
|
@ -141,7 +141,7 @@ class FBXExtractor:
|
|||
return options
|
||||
|
||||
def set_options_from_instance(self, instance):
|
||||
# type: (Instance) -> None
|
||||
# type: (pyblish.api.Instance) -> None
|
||||
"""Sets FBX export options from data in the instance.
|
||||
|
||||
Args:
|
||||
|
|
|
|||
|
|
@ -295,6 +295,13 @@ class CollectMayaRender(pyblish.api.InstancePlugin):
|
|||
"colorspaceView": colorspace_data["view"],
|
||||
}
|
||||
|
||||
rr_settings = (
|
||||
context.data["system_settings"]["modules"]["royalrender"]
|
||||
)
|
||||
if rr_settings["enabled"]:
|
||||
data["rrPathName"] = instance.data.get("rrPathName")
|
||||
self.log.info(data["rrPathName"])
|
||||
|
||||
if self.sync_workfile_version:
|
||||
data["version"] = context.data["version"]
|
||||
for instance in context:
|
||||
|
|
|
|||
|
|
@ -22,6 +22,10 @@ from openpype.pipeline.publish import (
|
|||
KnownPublishError,
|
||||
OpenPypePyblishPluginMixin
|
||||
)
|
||||
from openpype.pipeline.publish.lib import (
|
||||
replace_published_scene,
|
||||
get_published_workfile_instance
|
||||
)
|
||||
|
||||
JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError)
|
||||
|
||||
|
|
@ -430,7 +434,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
file_path = None
|
||||
if self.use_published:
|
||||
if not self.import_reference:
|
||||
file_path = self.from_published_scene()
|
||||
file_path = self.from_published_scene(context)
|
||||
else:
|
||||
self.log.info("use the scene with imported reference for rendering") # noqa
|
||||
file_path = context.data["currentFile"]
|
||||
|
|
@ -525,72 +529,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
published.
|
||||
|
||||
"""
|
||||
instance = self._instance
|
||||
workfile_instance = self._get_workfile_instance(instance.context)
|
||||
if workfile_instance is None:
|
||||
return
|
||||
|
||||
# determine published path from Anatomy.
|
||||
template_data = workfile_instance.data.get("anatomyData")
|
||||
rep = workfile_instance.data["representations"][0]
|
||||
template_data["representation"] = rep.get("name")
|
||||
template_data["ext"] = rep.get("ext")
|
||||
template_data["comment"] = None
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
template_obj = anatomy.templates_obj["publish"]["path"]
|
||||
template_filled = template_obj.format_strict(template_data)
|
||||
file_path = os.path.normpath(template_filled)
|
||||
|
||||
self.log.info("Using published scene for render {}".format(file_path))
|
||||
|
||||
if not os.path.exists(file_path):
|
||||
self.log.error("published scene does not exist!")
|
||||
raise
|
||||
|
||||
if not replace_in_path:
|
||||
return file_path
|
||||
|
||||
# now we need to switch scene in expected files
|
||||
# because <scene> token will now point to published
|
||||
# scene file and that might differ from current one
|
||||
def _clean_name(path):
|
||||
return os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
new_scene = _clean_name(file_path)
|
||||
orig_scene = _clean_name(instance.context.data["currentFile"])
|
||||
expected_files = instance.data.get("expectedFiles")
|
||||
|
||||
if isinstance(expected_files[0], dict):
|
||||
# we have aovs and we need to iterate over them
|
||||
new_exp = {}
|
||||
for aov, files in expected_files[0].items():
|
||||
replaced_files = []
|
||||
for f in files:
|
||||
replaced_files.append(
|
||||
str(f).replace(orig_scene, new_scene)
|
||||
)
|
||||
new_exp[aov] = replaced_files
|
||||
# [] might be too much here, TODO
|
||||
instance.data["expectedFiles"] = [new_exp]
|
||||
else:
|
||||
new_exp = []
|
||||
for f in expected_files:
|
||||
new_exp.append(
|
||||
str(f).replace(orig_scene, new_scene)
|
||||
)
|
||||
instance.data["expectedFiles"] = new_exp
|
||||
|
||||
metadata_folder = instance.data.get("publishRenderMetadataFolder")
|
||||
if metadata_folder:
|
||||
metadata_folder = metadata_folder.replace(orig_scene,
|
||||
new_scene)
|
||||
instance.data["publishRenderMetadataFolder"] = metadata_folder
|
||||
self.log.info("Scene name was switched {} -> {}".format(
|
||||
orig_scene, new_scene
|
||||
))
|
||||
|
||||
return file_path
|
||||
return replace_published_scene(self._instance, replace_in_path=True)
|
||||
|
||||
def assemble_payload(
|
||||
self, job_info=None, plugin_info=None, aux_files=None):
|
||||
|
|
@ -651,22 +590,3 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
self._instance.data["deadlineSubmissionJob"] = result
|
||||
|
||||
return result["_id"]
|
||||
|
||||
@staticmethod
|
||||
def _get_workfile_instance(context):
|
||||
"""Find workfile instance in context"""
|
||||
for instance in context:
|
||||
|
||||
is_workfile = (
|
||||
"workfile" in instance.data.get("families", []) or
|
||||
instance.data["family"] == "workfile"
|
||||
)
|
||||
if not is_workfile:
|
||||
continue
|
||||
|
||||
# test if there is instance of workfile waiting
|
||||
# to be published.
|
||||
assert instance.data.get("publish", True) is True, (
|
||||
"Workfile (scene) must be published along")
|
||||
|
||||
return instance
|
||||
|
|
|
|||
|
|
@ -47,6 +47,7 @@ from openpype_modules.deadline import abstract_submit_deadline
|
|||
from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo
|
||||
from openpype.tests.lib import is_in_tests
|
||||
from openpype.lib import is_running_from_build
|
||||
from openpype.pipeline.farm.tools import iter_expected_files
|
||||
|
||||
|
||||
def _validate_deadline_bool_value(instance, attribute, value):
|
||||
|
|
@ -238,7 +239,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
# Add list of expected files to job
|
||||
# ---------------------------------
|
||||
exp = instance.data.get("expectedFiles")
|
||||
for filepath in self._iter_expected_files(exp):
|
||||
for filepath in iter_expected_files(exp):
|
||||
job_info.OutputDirectory += os.path.dirname(filepath)
|
||||
job_info.OutputFilename += os.path.basename(filepath)
|
||||
|
||||
|
|
@ -296,7 +297,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
# TODO: Avoid the need for this logic here, needed for submit publish
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
expected_files = instance.data["expectedFiles"]
|
||||
first_file = next(self._iter_expected_files(expected_files))
|
||||
first_file = next(iter_expected_files(expected_files))
|
||||
output_dir = os.path.dirname(first_file)
|
||||
instance.data["outputDir"] = output_dir
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
|
@ -815,16 +816,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
end=int(self._instance.data["frameEndHandle"]),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _iter_expected_files(exp):
|
||||
if isinstance(exp[0], dict):
|
||||
for _aov, files in exp[0].items():
|
||||
for file in files:
|
||||
yield file
|
||||
else:
|
||||
for file in exp:
|
||||
yield file
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
defs = super(MayaSubmitDeadline, cls).get_attribute_defs()
|
||||
|
|
@ -863,7 +854,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
|
||||
return defs
|
||||
|
||||
|
||||
def _format_tiles(
|
||||
filename,
|
||||
index,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submit publishing job to farm."""
|
||||
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
|
|
@ -12,47 +11,22 @@ import pyblish.api
|
|||
|
||||
from openpype.client import (
|
||||
get_last_version_by_subset_name,
|
||||
get_representations,
|
||||
)
|
||||
from openpype.pipeline import (
|
||||
get_representation_path,
|
||||
legacy_io,
|
||||
)
|
||||
from openpype.pipeline.publish import OpenPypePyblishPluginMixin
|
||||
from openpype.lib import EnumDef
|
||||
from openpype.tests.lib import is_in_tests
|
||||
from openpype.pipeline.farm.patterning import match_aov_pattern
|
||||
from openpype.lib import is_running_from_build
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
def get_resources(project_name, version, extension=None):
|
||||
"""Get the files from the specific version."""
|
||||
|
||||
# TODO this functions seems to be weird
|
||||
# - it's looking for representation with one extension or first (any)
|
||||
# representation from a version?
|
||||
# - not sure how this should work, maybe it does for specific use cases
|
||||
# but probably can't be used for all resources from 2D workflows
|
||||
extensions = None
|
||||
if extension:
|
||||
extensions = [extension]
|
||||
repre_docs = list(get_representations(
|
||||
project_name, version_ids=[version["_id"]], extensions=extensions
|
||||
))
|
||||
assert repre_docs, "This is a bug"
|
||||
|
||||
representation = repre_docs[0]
|
||||
directory = get_representation_path(representation)
|
||||
print("Source: ", directory)
|
||||
resources = sorted(
|
||||
[
|
||||
os.path.normpath(os.path.join(directory, fname))
|
||||
for fname in os.listdir(directory)
|
||||
]
|
||||
)
|
||||
|
||||
return resources
|
||||
from openpype.pipeline.farm.pyblish_functions import (
|
||||
create_skeleton_instance,
|
||||
create_instances_for_aov,
|
||||
attach_instances_to_subset,
|
||||
prepare_representations,
|
||||
create_metadata_path
|
||||
)
|
||||
|
||||
|
||||
def get_resource_files(resources, frame_range=None):
|
||||
|
|
@ -186,36 +160,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
# poor man exclusion
|
||||
skip_integration_repre_list = []
|
||||
|
||||
def _create_metadata_path(self, instance):
|
||||
ins_data = instance.data
|
||||
# Ensure output dir exists
|
||||
output_dir = ins_data.get(
|
||||
"publishRenderMetadataFolder", ins_data["outputDir"])
|
||||
|
||||
try:
|
||||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
except OSError:
|
||||
# directory is not available
|
||||
self.log.warning("Path is unreachable: `{}`".format(output_dir))
|
||||
|
||||
metadata_filename = "{}_metadata.json".format(ins_data["subset"])
|
||||
|
||||
metadata_path = os.path.join(output_dir, metadata_filename)
|
||||
|
||||
# Convert output dir to `{root}/rest/of/path/...` with Anatomy
|
||||
success, rootless_mtdt_p = self.anatomy.find_root_template_from_path(
|
||||
metadata_path)
|
||||
if not success:
|
||||
# `rootless_path` is not set to `output_dir` if none of roots match
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(output_dir))
|
||||
rootless_mtdt_p = metadata_path
|
||||
|
||||
return metadata_path, rootless_mtdt_p
|
||||
|
||||
def _submit_deadline_post_job(self, instance, job, instances):
|
||||
"""Submit publish job to Deadline.
|
||||
|
||||
|
|
@ -230,6 +174,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
subset = data["subset"]
|
||||
job_name = "Publish - {subset}".format(subset=subset)
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
# instance.data.get("subset") != instances[0]["subset"]
|
||||
# 'Main' vs 'renderMain'
|
||||
override_version = None
|
||||
|
|
@ -237,7 +183,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
if instance_version != 1:
|
||||
override_version = instance_version
|
||||
output_dir = self._get_publish_folder(
|
||||
instance.context.data['anatomy'],
|
||||
anatomy,
|
||||
deepcopy(instance.data["anatomyData"]),
|
||||
instance.data.get("asset"),
|
||||
instances[0]["subset"],
|
||||
|
|
@ -248,7 +194,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
# Transfer the environment from the original job to this dependent
|
||||
# job so they use the same environment
|
||||
metadata_path, rootless_metadata_path = \
|
||||
self._create_metadata_path(instance)
|
||||
create_metadata_path(instance, anatomy)
|
||||
|
||||
environment = {
|
||||
"AVALON_PROJECT": legacy_io.Session["AVALON_PROJECT"],
|
||||
|
|
@ -335,13 +281,15 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
self.log.info("Adding tile assembly jobs as dependencies...")
|
||||
job_index = 0
|
||||
for assembly_id in instance.data.get("assemblySubmissionJobs"):
|
||||
payload["JobInfo"]["JobDependency{}".format(job_index)] = assembly_id # noqa: E501
|
||||
payload["JobInfo"]["JobDependency{}".format(
|
||||
job_index)] = assembly_id # noqa: E501
|
||||
job_index += 1
|
||||
elif instance.data.get("bakingSubmissionJobs"):
|
||||
self.log.info("Adding baking submission jobs as dependencies...")
|
||||
job_index = 0
|
||||
for assembly_id in instance.data["bakingSubmissionJobs"]:
|
||||
payload["JobInfo"]["JobDependency{}".format(job_index)] = assembly_id # noqa: E501
|
||||
payload["JobInfo"]["JobDependency{}".format(
|
||||
job_index)] = assembly_id # noqa: E501
|
||||
job_index += 1
|
||||
else:
|
||||
payload["JobInfo"]["JobDependency0"] = job["_id"]
|
||||
|
|
@ -369,413 +317,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
return deadline_publish_job_id
|
||||
|
||||
def _copy_extend_frames(self, instance, representation):
|
||||
"""Copy existing frames from latest version.
|
||||
|
||||
This will copy all existing frames from subset's latest version back
|
||||
to render directory and rename them to what renderer is expecting.
|
||||
|
||||
Arguments:
|
||||
instance (pyblish.plugin.Instance): instance to get required
|
||||
data from
|
||||
representation (dict): presentation to operate on
|
||||
|
||||
"""
|
||||
import speedcopy
|
||||
|
||||
self.log.info("Preparing to copy ...")
|
||||
start = instance.data.get("frameStart")
|
||||
end = instance.data.get("frameEnd")
|
||||
project_name = legacy_io.active_project()
|
||||
|
||||
# get latest version of subset
|
||||
# this will stop if subset wasn't published yet
|
||||
project_name = legacy_io.active_project()
|
||||
version = get_last_version_by_subset_name(
|
||||
project_name,
|
||||
instance.data.get("subset"),
|
||||
asset_name=instance.data.get("asset")
|
||||
)
|
||||
|
||||
# get its files based on extension
|
||||
subset_resources = get_resources(
|
||||
project_name, version, representation.get("ext")
|
||||
)
|
||||
r_col, _ = clique.assemble(subset_resources)
|
||||
|
||||
# if override remove all frames we are expecting to be rendered
|
||||
# so we'll copy only those missing from current render
|
||||
if instance.data.get("overrideExistingFrame"):
|
||||
for frame in range(start, end + 1):
|
||||
if frame not in r_col.indexes:
|
||||
continue
|
||||
r_col.indexes.remove(frame)
|
||||
|
||||
# now we need to translate published names from representation
|
||||
# back. This is tricky, right now we'll just use same naming
|
||||
# and only switch frame numbers
|
||||
resource_files = []
|
||||
r_filename = os.path.basename(
|
||||
representation.get("files")[0]) # first file
|
||||
op = re.search(self.R_FRAME_NUMBER, r_filename)
|
||||
pre = r_filename[:op.start("frame")]
|
||||
post = r_filename[op.end("frame"):]
|
||||
assert op is not None, "padding string wasn't found"
|
||||
for frame in list(r_col):
|
||||
fn = re.search(self.R_FRAME_NUMBER, frame)
|
||||
# silencing linter as we need to compare to True, not to
|
||||
# type
|
||||
assert fn is not None, "padding string wasn't found"
|
||||
# list of tuples (source, destination)
|
||||
staging = representation.get("stagingDir")
|
||||
staging = self.anatomy.fill_root(staging)
|
||||
resource_files.append(
|
||||
(frame,
|
||||
os.path.join(staging,
|
||||
"{}{}{}".format(pre,
|
||||
fn.group("frame"),
|
||||
post)))
|
||||
)
|
||||
|
||||
# test if destination dir exists and create it if not
|
||||
output_dir = os.path.dirname(representation.get("files")[0])
|
||||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
# copy files
|
||||
for source in resource_files:
|
||||
speedcopy.copy(source[0], source[1])
|
||||
self.log.info(" > {}".format(source[1]))
|
||||
|
||||
self.log.info(
|
||||
"Finished copying %i files" % len(resource_files))
|
||||
|
||||
def _create_instances_for_aov(
|
||||
self, instance_data, exp_files, additional_data, do_not_add_review
|
||||
):
|
||||
"""Create instance for each AOV found.
|
||||
|
||||
This will create new instance for every aov it can detect in expected
|
||||
files list.
|
||||
|
||||
Arguments:
|
||||
instance_data (pyblish.plugin.Instance): skeleton data for instance
|
||||
(those needed) later by collector
|
||||
exp_files (list): list of expected files divided by aovs
|
||||
additional_data (dict):
|
||||
do_not_add_review (bool): explicitly skip review
|
||||
|
||||
Returns:
|
||||
list of instances
|
||||
|
||||
"""
|
||||
task = os.environ["AVALON_TASK"]
|
||||
subset = instance_data["subset"]
|
||||
cameras = instance_data.get("cameras", [])
|
||||
instances = []
|
||||
# go through aovs in expected files
|
||||
for aov, files in exp_files[0].items():
|
||||
cols, rem = clique.assemble(files)
|
||||
# we shouldn't have any reminders. And if we do, it should
|
||||
# be just one item for single frame renders.
|
||||
if not cols and rem:
|
||||
assert len(rem) == 1, ("Found multiple non related files "
|
||||
"to render, don't know what to do "
|
||||
"with them.")
|
||||
col = rem[0]
|
||||
ext = os.path.splitext(col)[1].lstrip(".")
|
||||
else:
|
||||
# but we really expect only one collection.
|
||||
# Nothing else make sense.
|
||||
assert len(cols) == 1, "only one image sequence type is expected" # noqa: E501
|
||||
ext = cols[0].tail.lstrip(".")
|
||||
col = list(cols[0])
|
||||
|
||||
self.log.debug(col)
|
||||
# create subset name `familyTaskSubset_AOV`
|
||||
group_name = 'render{}{}{}{}'.format(
|
||||
task[0].upper(), task[1:],
|
||||
subset[0].upper(), subset[1:])
|
||||
|
||||
cam = [c for c in cameras if c in col.head]
|
||||
if cam:
|
||||
if aov:
|
||||
subset_name = '{}_{}_{}'.format(group_name, cam, aov)
|
||||
else:
|
||||
subset_name = '{}_{}'.format(group_name, cam)
|
||||
else:
|
||||
if aov:
|
||||
subset_name = '{}_{}'.format(group_name, aov)
|
||||
else:
|
||||
subset_name = '{}'.format(group_name)
|
||||
|
||||
if isinstance(col, (list, tuple)):
|
||||
staging = os.path.dirname(col[0])
|
||||
else:
|
||||
staging = os.path.dirname(col)
|
||||
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.find_root_template_from_path(staging)
|
||||
)
|
||||
if success:
|
||||
staging = rootless_staging_dir
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(staging))
|
||||
|
||||
self.log.info("Creating data for: {}".format(subset_name))
|
||||
|
||||
app = os.environ.get("AVALON_APP", "")
|
||||
|
||||
if isinstance(col, list):
|
||||
render_file_name = os.path.basename(col[0])
|
||||
else:
|
||||
render_file_name = os.path.basename(col)
|
||||
aov_patterns = self.aov_filter
|
||||
|
||||
preview = match_aov_pattern(app, aov_patterns, render_file_name)
|
||||
# toggle preview on if multipart is on
|
||||
|
||||
if instance_data.get("multipartExr"):
|
||||
self.log.debug("Adding preview tag because its multipartExr")
|
||||
preview = True
|
||||
self.log.debug("preview:{}".format(preview))
|
||||
new_instance = deepcopy(instance_data)
|
||||
new_instance["subset"] = subset_name
|
||||
new_instance["subsetGroup"] = group_name
|
||||
|
||||
preview = preview and not do_not_add_review
|
||||
if preview:
|
||||
new_instance["review"] = True
|
||||
|
||||
# create representation
|
||||
if isinstance(col, (list, tuple)):
|
||||
files = [os.path.basename(f) for f in col]
|
||||
else:
|
||||
files = os.path.basename(col)
|
||||
|
||||
# Copy render product "colorspace" data to representation.
|
||||
colorspace = ""
|
||||
products = additional_data["renderProducts"].layer_data.products
|
||||
for product in products:
|
||||
if product.productName == aov:
|
||||
colorspace = product.colorspace
|
||||
break
|
||||
|
||||
rep = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": files,
|
||||
"frameStart": int(instance_data.get("frameStartHandle")),
|
||||
"frameEnd": int(instance_data.get("frameEndHandle")),
|
||||
# If expectedFile are absolute, we need only filenames
|
||||
"stagingDir": staging,
|
||||
"fps": new_instance.get("fps"),
|
||||
"tags": ["review"] if preview else [],
|
||||
"colorspaceData": {
|
||||
"colorspace": colorspace,
|
||||
"config": {
|
||||
"path": additional_data["colorspaceConfig"],
|
||||
"template": additional_data["colorspaceTemplate"]
|
||||
},
|
||||
"display": additional_data["display"],
|
||||
"view": additional_data["view"]
|
||||
}
|
||||
}
|
||||
|
||||
# support conversion from tiled to scanline
|
||||
if instance_data.get("convertToScanline"):
|
||||
self.log.info("Adding scanline conversion.")
|
||||
rep["tags"].append("toScanline")
|
||||
|
||||
# poor man exclusion
|
||||
if ext in self.skip_integration_repre_list:
|
||||
rep["tags"].append("delete")
|
||||
|
||||
self._solve_families(new_instance, preview)
|
||||
|
||||
new_instance["representations"] = [rep]
|
||||
|
||||
# if extending frames from existing version, copy files from there
|
||||
# into our destination directory
|
||||
if new_instance.get("extendFrames", False):
|
||||
self._copy_extend_frames(new_instance, rep)
|
||||
instances.append(new_instance)
|
||||
self.log.debug("instances:{}".format(instances))
|
||||
return instances
|
||||
|
||||
def _get_representations(self, instance_data, exp_files,
|
||||
do_not_add_review):
|
||||
"""Create representations for file sequences.
|
||||
|
||||
This will return representations of expected files if they are not
|
||||
in hierarchy of aovs. There should be only one sequence of files for
|
||||
most cases, but if not - we create representation from each of them.
|
||||
|
||||
Arguments:
|
||||
instance_data (dict): instance.data for which we are
|
||||
setting representations
|
||||
exp_files (list): list of expected files
|
||||
do_not_add_review (bool): explicitly skip review
|
||||
|
||||
Returns:
|
||||
list of representations
|
||||
|
||||
"""
|
||||
representations = []
|
||||
host_name = os.environ.get("AVALON_APP", "")
|
||||
collections, remainders = clique.assemble(exp_files)
|
||||
|
||||
# create representation for every collected sequence
|
||||
for collection in collections:
|
||||
ext = collection.tail.lstrip(".")
|
||||
preview = False
|
||||
# TODO 'useSequenceForReview' is temporary solution which does
|
||||
# not work for 100% of cases. We must be able to tell what
|
||||
# expected files contains more explicitly and from what
|
||||
# should be review made.
|
||||
# - "review" tag is never added when is set to 'False'
|
||||
if instance_data["useSequenceForReview"]:
|
||||
# toggle preview on if multipart is on
|
||||
if instance_data.get("multipartExr", False):
|
||||
self.log.debug(
|
||||
"Adding preview tag because its multipartExr"
|
||||
)
|
||||
preview = True
|
||||
else:
|
||||
render_file_name = list(collection)[0]
|
||||
# if filtered aov name is found in filename, toggle it for
|
||||
# preview video rendering
|
||||
preview = match_aov_pattern(
|
||||
host_name, self.aov_filter, render_file_name
|
||||
)
|
||||
|
||||
staging = os.path.dirname(list(collection)[0])
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.find_root_template_from_path(staging)
|
||||
)
|
||||
if success:
|
||||
staging = rootless_staging_dir
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(staging))
|
||||
|
||||
frame_start = int(instance_data.get("frameStartHandle"))
|
||||
if instance_data.get("slate"):
|
||||
frame_start -= 1
|
||||
|
||||
preview = preview and not do_not_add_review
|
||||
rep = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": [os.path.basename(f) for f in list(collection)],
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": int(instance_data.get("frameEndHandle")),
|
||||
# If expectedFile are absolute, we need only filenames
|
||||
"stagingDir": staging,
|
||||
"fps": instance_data.get("fps"),
|
||||
"tags": ["review"] if preview else [],
|
||||
}
|
||||
|
||||
# poor man exclusion
|
||||
if ext in self.skip_integration_repre_list:
|
||||
rep["tags"].append("delete")
|
||||
|
||||
if instance_data.get("multipartExr", False):
|
||||
rep["tags"].append("multipartExr")
|
||||
|
||||
# support conversion from tiled to scanline
|
||||
if instance_data.get("convertToScanline"):
|
||||
self.log.info("Adding scanline conversion.")
|
||||
rep["tags"].append("toScanline")
|
||||
|
||||
representations.append(rep)
|
||||
|
||||
self._solve_families(instance_data, preview)
|
||||
|
||||
# add remainders as representations
|
||||
for remainder in remainders:
|
||||
ext = remainder.split(".")[-1]
|
||||
|
||||
staging = os.path.dirname(remainder)
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.find_root_template_from_path(staging)
|
||||
)
|
||||
if success:
|
||||
staging = rootless_staging_dir
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(staging))
|
||||
|
||||
rep = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": os.path.basename(remainder),
|
||||
"stagingDir": staging,
|
||||
}
|
||||
|
||||
preview = match_aov_pattern(
|
||||
host_name, self.aov_filter, remainder
|
||||
)
|
||||
preview = preview and not do_not_add_review
|
||||
if preview:
|
||||
rep.update({
|
||||
"fps": instance_data.get("fps"),
|
||||
"tags": ["review"]
|
||||
})
|
||||
self._solve_families(instance_data, preview)
|
||||
|
||||
already_there = False
|
||||
for repre in instance_data.get("representations", []):
|
||||
# might be added explicitly before by publish_on_farm
|
||||
already_there = repre.get("files") == rep["files"]
|
||||
if already_there:
|
||||
self.log.debug("repre {} already_there".format(repre))
|
||||
break
|
||||
|
||||
if not already_there:
|
||||
representations.append(rep)
|
||||
|
||||
for rep in representations:
|
||||
# inject colorspace data
|
||||
self.set_representation_colorspace(
|
||||
rep, self.context,
|
||||
colorspace=instance_data["colorspace"]
|
||||
)
|
||||
|
||||
return representations
|
||||
|
||||
def _solve_families(self, instance, preview=False):
|
||||
families = instance.get("families")
|
||||
|
||||
# if we have one representation with preview tag
|
||||
# flag whole instance for review and for ftrack
|
||||
if preview:
|
||||
if "ftrack" not in families:
|
||||
if os.environ.get("FTRACK_SERVER"):
|
||||
self.log.debug(
|
||||
"Adding \"ftrack\" to families because of preview tag."
|
||||
)
|
||||
families.append("ftrack")
|
||||
if "review" not in families:
|
||||
self.log.debug(
|
||||
"Adding \"review\" to families because of preview tag."
|
||||
)
|
||||
families.append("review")
|
||||
instance["families"] = families
|
||||
|
||||
def process(self, instance):
|
||||
# type: (pyblish.api.Instance) -> None
|
||||
"""Process plugin.
|
||||
|
||||
Detect type of renderfarm submission and create and post dependent job
|
||||
in case of Deadline. It creates json file with metadata needed for
|
||||
Detect type of render farm submission and create and post dependent
|
||||
job in case of Deadline. It creates json file with metadata needed for
|
||||
publishing in directory of render.
|
||||
|
||||
Args:
|
||||
|
|
@ -786,151 +333,14 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
data = instance.data.copy()
|
||||
context = instance.context
|
||||
self.context = context
|
||||
self.anatomy = instance.context.data["anatomy"]
|
||||
|
||||
asset = data.get("asset") or legacy_io.Session["AVALON_ASSET"]
|
||||
subset = data.get("subset")
|
||||
|
||||
start = instance.data.get("frameStart")
|
||||
if start is None:
|
||||
start = context.data["frameStart"]
|
||||
|
||||
end = instance.data.get("frameEnd")
|
||||
if end is None:
|
||||
end = context.data["frameEnd"]
|
||||
|
||||
handle_start = instance.data.get("handleStart")
|
||||
if handle_start is None:
|
||||
handle_start = context.data["handleStart"]
|
||||
|
||||
handle_end = instance.data.get("handleEnd")
|
||||
if handle_end is None:
|
||||
handle_end = context.data["handleEnd"]
|
||||
|
||||
fps = instance.data.get("fps")
|
||||
if fps is None:
|
||||
fps = context.data["fps"]
|
||||
|
||||
if data.get("extendFrames", False):
|
||||
start, end = self._extend_frames(
|
||||
asset,
|
||||
subset,
|
||||
start,
|
||||
end,
|
||||
data["overrideExistingFrame"])
|
||||
|
||||
try:
|
||||
source = data["source"]
|
||||
except KeyError:
|
||||
source = context.data["currentFile"]
|
||||
|
||||
success, rootless_path = (
|
||||
self.anatomy.find_root_template_from_path(source)
|
||||
)
|
||||
if success:
|
||||
source = rootless_path
|
||||
|
||||
else:
|
||||
# `rootless_path` is not set to `source` if none of roots match
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues."
|
||||
).format(source))
|
||||
|
||||
family = "render"
|
||||
if ("prerender" in instance.data["families"] or
|
||||
"prerender.farm" in instance.data["families"]):
|
||||
family = "prerender"
|
||||
families = [family]
|
||||
|
||||
# pass review to families if marked as review
|
||||
do_not_add_review = False
|
||||
if data.get("review"):
|
||||
families.append("review")
|
||||
elif data.get("review") == False:
|
||||
self.log.debug("Instance has review explicitly disabled.")
|
||||
do_not_add_review = True
|
||||
|
||||
instance_skeleton_data = {
|
||||
"family": family,
|
||||
"subset": subset,
|
||||
"families": families,
|
||||
"asset": asset,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
"frameStartHandle": start - handle_start,
|
||||
"frameEndHandle": end + handle_end,
|
||||
"comment": instance.data["comment"],
|
||||
"fps": fps,
|
||||
"source": source,
|
||||
"extendFrames": data.get("extendFrames"),
|
||||
"overrideExistingFrame": data.get("overrideExistingFrame"),
|
||||
"pixelAspect": data.get("pixelAspect", 1),
|
||||
"resolutionWidth": data.get("resolutionWidth", 1920),
|
||||
"resolutionHeight": data.get("resolutionHeight", 1080),
|
||||
"multipartExr": data.get("multipartExr", False),
|
||||
"jobBatchName": data.get("jobBatchName", ""),
|
||||
"useSequenceForReview": data.get("useSequenceForReview", True),
|
||||
# map inputVersions `ObjectId` -> `str` so json supports it
|
||||
"inputVersions": list(map(str, data.get("inputVersions", []))),
|
||||
"colorspace": instance.data.get("colorspace")
|
||||
}
|
||||
|
||||
# skip locking version if we are creating v01
|
||||
instance_version = instance.data.get("version") # take this if exists
|
||||
if instance_version != 1:
|
||||
instance_skeleton_data["version"] = instance_version
|
||||
|
||||
# transfer specific families from original instance to new render
|
||||
for item in self.families_transfer:
|
||||
if item in instance.data.get("families", []):
|
||||
instance_skeleton_data["families"] += [item]
|
||||
|
||||
# transfer specific properties from original instance based on
|
||||
# mapping dictionary `instance_transfer`
|
||||
for key, values in self.instance_transfer.items():
|
||||
if key in instance.data.get("families", []):
|
||||
for v in values:
|
||||
instance_skeleton_data[v] = instance.data.get(v)
|
||||
|
||||
# look into instance data if representations are not having any
|
||||
# which are having tag `publish_on_farm` and include them
|
||||
for repre in instance.data.get("representations", []):
|
||||
staging_dir = repre.get("stagingDir")
|
||||
if staging_dir:
|
||||
success, rootless_staging_dir = (
|
||||
self.anatomy.find_root_template_from_path(
|
||||
staging_dir
|
||||
)
|
||||
)
|
||||
if success:
|
||||
repre["stagingDir"] = rootless_staging_dir
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(staging_dir))
|
||||
repre["stagingDir"] = staging_dir
|
||||
|
||||
if "publish_on_farm" in repre.get("tags"):
|
||||
# create representations attribute of not there
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
instance_skeleton_data["representations"].append(repre)
|
||||
|
||||
instances = None
|
||||
assert data.get("expectedFiles"), ("Submission from old Pype version"
|
||||
" - missing expectedFiles")
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
instance_skeleton_data = create_skeleton_instance(
|
||||
instance, families_transfer=self.families_transfer,
|
||||
instance_transfer=self.instance_transfer)
|
||||
"""
|
||||
if content of `expectedFiles` are dictionaries, we will handle
|
||||
it as list of AOVs, creating instance from every one of them.
|
||||
if content of `expectedFiles` list are dictionaries, we will handle
|
||||
it as list of AOVs, creating instance for every one of them.
|
||||
|
||||
Example:
|
||||
--------
|
||||
|
|
@ -952,7 +362,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
This will create instances for `beauty` and `Z` subset
|
||||
adding those files to their respective representations.
|
||||
|
||||
If we've got only list of files, we collect all filesequences.
|
||||
If we have only list of files, we collect all file sequences.
|
||||
More then one doesn't probably make sense, but we'll handle it
|
||||
like creating one instance with multiple representations.
|
||||
|
||||
|
|
@ -969,57 +379,23 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
This will result in one instance with two representations:
|
||||
`foo` and `xxx`
|
||||
"""
|
||||
do_not_add_review = False
|
||||
if instance.data.get("review") is False:
|
||||
self.log.debug("Instance has review explicitly disabled.")
|
||||
do_not_add_review = True
|
||||
|
||||
self.log.info(data.get("expectedFiles"))
|
||||
|
||||
if isinstance(data.get("expectedFiles")[0], dict):
|
||||
# we cannot attach AOVs to other subsets as we consider every
|
||||
# AOV subset of its own.
|
||||
|
||||
additional_data = {
|
||||
"renderProducts": instance.data["renderProducts"],
|
||||
"colorspaceConfig": instance.data["colorspaceConfig"],
|
||||
"display": instance.data["colorspaceDisplay"],
|
||||
"view": instance.data["colorspaceView"]
|
||||
}
|
||||
|
||||
# Get templated path from absolute config path.
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
colorspaceTemplate = instance.data["colorspaceConfig"]
|
||||
success, rootless_staging_dir = (
|
||||
anatomy.find_root_template_from_path(colorspaceTemplate)
|
||||
)
|
||||
if success:
|
||||
colorspaceTemplate = rootless_staging_dir
|
||||
else:
|
||||
self.log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(colorspaceTemplate))
|
||||
additional_data["colorspaceTemplate"] = colorspaceTemplate
|
||||
|
||||
if len(data.get("attachTo")) > 0:
|
||||
assert len(data.get("expectedFiles")[0].keys()) == 1, (
|
||||
"attaching multiple AOVs or renderable cameras to "
|
||||
"subset is not supported")
|
||||
|
||||
# create instances for every AOV we found in expected files.
|
||||
# note: this is done for every AOV and every render camere (if
|
||||
# there are multiple renderable cameras in scene)
|
||||
instances = self._create_instances_for_aov(
|
||||
instance_skeleton_data,
|
||||
data.get("expectedFiles"),
|
||||
additional_data,
|
||||
do_not_add_review
|
||||
)
|
||||
self.log.info("got {} instance{}".format(
|
||||
len(instances),
|
||||
"s" if len(instances) > 1 else ""))
|
||||
|
||||
if isinstance(instance.data.get("expectedFiles")[0], dict):
|
||||
instances = create_instances_for_aov(
|
||||
instance, instance_skeleton_data,
|
||||
self.aov_filter, self.skip_integration_repre_list,
|
||||
do_not_add_review)
|
||||
else:
|
||||
representations = self._get_representations(
|
||||
representations = prepare_representations(
|
||||
instance_skeleton_data,
|
||||
data.get("expectedFiles"),
|
||||
instance.data.get("expectedFiles"),
|
||||
anatomy,
|
||||
self.aov_filter,
|
||||
self.skip_integration_repre_list,
|
||||
do_not_add_review
|
||||
)
|
||||
|
||||
|
|
@ -1030,25 +406,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
instance_skeleton_data["representations"] += representations
|
||||
instances = [instance_skeleton_data]
|
||||
|
||||
# if we are attaching to other subsets, create copy of existing
|
||||
# instances, change data to match its subset and replace
|
||||
# existing instances with modified data
|
||||
# attach instances to subset
|
||||
if instance.data.get("attachTo"):
|
||||
self.log.info("Attaching render to subset:")
|
||||
new_instances = []
|
||||
for at in instance.data.get("attachTo"):
|
||||
for i in instances:
|
||||
new_i = copy(i)
|
||||
new_i["version"] = at.get("version")
|
||||
new_i["subset"] = at.get("subset")
|
||||
new_i["family"] = at.get("family")
|
||||
new_i["append"] = True
|
||||
# don't set subsetGroup if we are attaching
|
||||
new_i.pop("subsetGroup")
|
||||
new_instances.append(new_i)
|
||||
self.log.info(" - {} / v{}".format(
|
||||
at.get("subset"), at.get("version")))
|
||||
instances = new_instances
|
||||
instances = attach_instances_to_subset(
|
||||
instance.data.get("attachTo"), instances
|
||||
)
|
||||
|
||||
r''' SUBMiT PUBLiSH JOB 2 D34DLiN3
|
||||
____
|
||||
|
|
@ -1063,11 +425,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
render_job = None
|
||||
submission_type = ""
|
||||
if instance.data.get("toBeRenderedOn") == "deadline":
|
||||
render_job = data.pop("deadlineSubmissionJob", None)
|
||||
render_job = instance.data.pop("deadlineSubmissionJob", None)
|
||||
submission_type = "deadline"
|
||||
|
||||
if instance.data.get("toBeRenderedOn") == "muster":
|
||||
render_job = data.pop("musterSubmissionJob", None)
|
||||
render_job = instance.data.pop("musterSubmissionJob", None)
|
||||
submission_type = "muster"
|
||||
|
||||
if not render_job and instance.data.get("tileRendering") is False:
|
||||
|
|
@ -1089,10 +451,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
render_job["Props"]["Batch"] = instance.data.get(
|
||||
"jobBatchName")
|
||||
else:
|
||||
render_job["Props"]["Batch"] = os.path.splitext(
|
||||
os.path.basename(context.data.get("currentFile")))[0]
|
||||
batch = os.path.splitext(os.path.basename(
|
||||
instance.context.data.get("currentFile")))[0]
|
||||
render_job["Props"]["Batch"] = batch
|
||||
# User is deadline user
|
||||
render_job["Props"]["User"] = context.data.get(
|
||||
render_job["Props"]["User"] = instance.context.data.get(
|
||||
"deadlineUser", getpass.getuser())
|
||||
|
||||
render_job["Props"]["Env"] = {
|
||||
|
|
@ -1118,15 +481,15 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
"asset": asset,
|
||||
"frameStart": start,
|
||||
"frameEnd": end,
|
||||
"fps": context.data.get("fps", None),
|
||||
"source": source,
|
||||
"user": context.data["user"],
|
||||
"version": context.data["version"], # this is workfile version
|
||||
"intent": context.data.get("intent"),
|
||||
"comment": context.data.get("comment"),
|
||||
"asset": instance_skeleton_data["asset"],
|
||||
"frameStart": instance_skeleton_data["frameStart"],
|
||||
"frameEnd": instance_skeleton_data["frameEnd"],
|
||||
"fps": instance_skeleton_data["fps"],
|
||||
"source": instance_skeleton_data["source"],
|
||||
"user": instance.context.data["user"],
|
||||
"version": instance.context.data["version"], # workfile version
|
||||
"intent": instance.context.data.get("intent"),
|
||||
"comment": instance.context.data.get("comment"),
|
||||
"job": render_job or None,
|
||||
"session": legacy_io.Session.copy(),
|
||||
"instances": instances
|
||||
|
|
@ -1136,7 +499,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
publish_job["deadline_publish_job_id"] = deadline_publish_job_id
|
||||
|
||||
# add audio to metadata file if available
|
||||
audio_file = context.data.get("audioFile")
|
||||
audio_file = instance.context.data.get("audioFile")
|
||||
if audio_file and os.path.isfile(audio_file):
|
||||
publish_job.update({"audio": audio_file})
|
||||
|
||||
|
|
@ -1149,54 +512,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
}
|
||||
publish_job.update({"ftrack": ftrack})
|
||||
|
||||
metadata_path, rootless_metadata_path = self._create_metadata_path(
|
||||
instance)
|
||||
metadata_path, rootless_metadata_path = \
|
||||
create_metadata_path(instance, anatomy)
|
||||
|
||||
self.log.info("Writing json file: {}".format(metadata_path))
|
||||
with open(metadata_path, "w") as f:
|
||||
json.dump(publish_job, f, indent=4, sort_keys=True)
|
||||
|
||||
def _extend_frames(self, asset, subset, start, end):
|
||||
"""Get latest version of asset nad update frame range.
|
||||
|
||||
Based on minimum and maximuma values.
|
||||
|
||||
Arguments:
|
||||
asset (str): asset name
|
||||
subset (str): subset name
|
||||
start (int): start frame
|
||||
end (int): end frame
|
||||
|
||||
Returns:
|
||||
(int, int): upddate frame start/end
|
||||
|
||||
"""
|
||||
# Frame comparison
|
||||
prev_start = None
|
||||
prev_end = None
|
||||
|
||||
project_name = legacy_io.active_project()
|
||||
version = get_last_version_by_subset_name(
|
||||
project_name,
|
||||
subset,
|
||||
asset_name=asset
|
||||
)
|
||||
|
||||
# Set prev start / end frames for comparison
|
||||
if not prev_start and not prev_end:
|
||||
prev_start = version["data"]["frameStart"]
|
||||
prev_end = version["data"]["frameEnd"]
|
||||
|
||||
updated_start = min(start, prev_start)
|
||||
updated_end = max(end, prev_end)
|
||||
|
||||
self.log.info(
|
||||
"Updating start / end frame : "
|
||||
"{} - {}".format(updated_start, updated_end)
|
||||
)
|
||||
|
||||
return updated_start, updated_end
|
||||
|
||||
def _get_publish_folder(self, anatomy, template_data,
|
||||
asset, subset,
|
||||
family='render', version=None):
|
||||
|
|
|
|||
|
|
@ -3,10 +3,10 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.lib.local_settings import OpenPypeSettingsRegistry
|
||||
from openpype.lib import Logger, run_subprocess
|
||||
from .rr_job import RRJob, SubmitFile, SubmitterParameter
|
||||
from openpype.lib.vendor_bin_utils import find_tool_in_custom_paths
|
||||
|
||||
|
||||
class Api:
|
||||
|
|
@ -15,69 +15,57 @@ class Api:
|
|||
RR_SUBMIT_CONSOLE = 1
|
||||
RR_SUBMIT_API = 2
|
||||
|
||||
def __init__(self, settings, project=None):
|
||||
def __init__(self, rr_path=None):
|
||||
self.log = Logger.get_logger("RoyalRender")
|
||||
self._settings = settings
|
||||
self._initialize_rr(project)
|
||||
self._rr_path = rr_path
|
||||
os.environ["RR_ROOT"] = rr_path
|
||||
|
||||
def _initialize_rr(self, project=None):
|
||||
# type: (str) -> None
|
||||
"""Initialize RR Path.
|
||||
@staticmethod
|
||||
def get_rr_bin_path(rr_root, tool_name=None):
|
||||
# type: (str, str) -> str
|
||||
"""Get path to RR bin folder.
|
||||
|
||||
Args:
|
||||
project (str, Optional): Project name to set RR api in
|
||||
context.
|
||||
tool_name (str): Name of RR executable you want.
|
||||
rr_root (str): Custom RR root if needed.
|
||||
|
||||
Returns:
|
||||
str: Path to the tool based on current platform.
|
||||
|
||||
"""
|
||||
if project:
|
||||
project_settings = get_project_settings(project)
|
||||
rr_path = (
|
||||
project_settings
|
||||
["royalrender"]
|
||||
["rr_paths"]
|
||||
)
|
||||
else:
|
||||
rr_path = (
|
||||
self._settings
|
||||
["modules"]
|
||||
["royalrender"]
|
||||
["rr_path"]
|
||||
["default"]
|
||||
)
|
||||
os.environ["RR_ROOT"] = rr_path
|
||||
self._rr_path = rr_path
|
||||
|
||||
def _get_rr_bin_path(self, rr_root=None):
|
||||
# type: (str) -> str
|
||||
"""Get path to RR bin folder."""
|
||||
rr_root = rr_root or self._rr_path
|
||||
is_64bit_python = sys.maxsize > 2 ** 32
|
||||
|
||||
rr_bin_path = ""
|
||||
rr_bin_parts = [rr_root, "bin"]
|
||||
if sys.platform.lower() == "win32":
|
||||
rr_bin_path = "/bin/win64"
|
||||
if not is_64bit_python:
|
||||
# we are using 64bit python
|
||||
rr_bin_path = "/bin/win"
|
||||
rr_bin_path = rr_bin_path.replace(
|
||||
"/", os.path.sep
|
||||
)
|
||||
rr_bin_parts.append("win")
|
||||
|
||||
if sys.platform.lower() == "darwin":
|
||||
rr_bin_path = "/bin/mac64"
|
||||
if not is_64bit_python:
|
||||
rr_bin_path = "/bin/mac"
|
||||
rr_bin_parts.append("mac")
|
||||
|
||||
if sys.platform.lower() == "linux":
|
||||
rr_bin_path = "/bin/lx64"
|
||||
if sys.platform.lower().startswith("linux"):
|
||||
rr_bin_parts.append("lx")
|
||||
|
||||
return os.path.join(rr_root, rr_bin_path)
|
||||
rr_bin_path = os.sep.join(rr_bin_parts)
|
||||
|
||||
paths_to_check = []
|
||||
# if we use 64bit python, append 64bit specific path first
|
||||
if is_64bit_python:
|
||||
if not tool_name:
|
||||
return rr_bin_path + "64"
|
||||
paths_to_check.append(rr_bin_path + "64")
|
||||
|
||||
# otherwise use 32bit
|
||||
if not tool_name:
|
||||
return rr_bin_path
|
||||
paths_to_check.append(rr_bin_path)
|
||||
|
||||
return find_tool_in_custom_paths(paths_to_check, tool_name)
|
||||
|
||||
def _initialize_module_path(self):
|
||||
# type: () -> None
|
||||
"""Set RR modules for Python."""
|
||||
# default for linux
|
||||
rr_bin = self._get_rr_bin_path()
|
||||
rr_bin = self.get_rr_bin_path(self._rr_path)
|
||||
rr_module_path = os.path.join(rr_bin, "lx64/lib")
|
||||
|
||||
if sys.platform.lower() == "win32":
|
||||
|
|
@ -91,51 +79,46 @@ class Api:
|
|||
|
||||
sys.path.append(os.path.join(self._rr_path, rr_module_path))
|
||||
|
||||
def create_submission(self, jobs, submitter_attributes, file_name=None):
|
||||
# type: (list[RRJob], list[SubmitterParameter], str) -> SubmitFile
|
||||
@staticmethod
|
||||
def create_submission(jobs, submitter_attributes):
|
||||
# type: (list[RRJob], list[SubmitterParameter]) -> SubmitFile
|
||||
"""Create jobs submission file.
|
||||
|
||||
Args:
|
||||
jobs (list): List of :class:`RRJob`
|
||||
submitter_attributes (list): List of submitter attributes
|
||||
:class:`SubmitterParameter` for whole submission batch.
|
||||
file_name (str), optional): File path to write data to.
|
||||
|
||||
Returns:
|
||||
str: XML data of job submission files.
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
return SubmitFile(SubmitterParameters=submitter_attributes, Jobs=jobs)
|
||||
|
||||
def submit_file(self, file, mode=RR_SUBMIT_CONSOLE):
|
||||
# type: (SubmitFile, int) -> None
|
||||
if mode == self.RR_SUBMIT_CONSOLE:
|
||||
self._submit_using_console(file)
|
||||
return
|
||||
|
||||
# RR v7 supports only Python 2.7 so we bail out in fear
|
||||
# RR v7 supports only Python 2.7, so we bail out in fear
|
||||
# until there is support for Python 3 😰
|
||||
raise NotImplementedError(
|
||||
"Submission via RoyalRender API is not supported yet")
|
||||
# self._submit_using_api(file)
|
||||
|
||||
def _submit_using_console(self, file):
|
||||
# type: (SubmitFile) -> bool
|
||||
rr_console = os.path.join(
|
||||
self._get_rr_bin_path(),
|
||||
"rrSubmitterconsole"
|
||||
)
|
||||
def _submit_using_console(self, job_file):
|
||||
# type: (SubmitFile) -> None
|
||||
rr_start_local = self.get_rr_bin_path(
|
||||
self._rr_path, "rrStartLocal")
|
||||
|
||||
if sys.platform.lower() == "darwin":
|
||||
if "/bin/mac64" in rr_console:
|
||||
rr_console = rr_console.replace("/bin/mac64", "/bin/mac")
|
||||
self.log.info("rr_console: {}".format(rr_start_local))
|
||||
|
||||
if sys.platform.lower() == "win32":
|
||||
if "/bin/win64" in rr_console:
|
||||
rr_console = rr_console.replace("/bin/win64", "/bin/win")
|
||||
rr_console += ".exe"
|
||||
|
||||
args = [rr_console, file]
|
||||
run_subprocess(" ".join(args), logger=self.log)
|
||||
args = [rr_start_local, "rrSubmitterconsole", job_file]
|
||||
self.log.info("Executing: {}".format(" ".join(args)))
|
||||
env = os.environ
|
||||
env["RR_ROOT"] = self._rr_path
|
||||
run_subprocess(args, logger=self.log, env=env)
|
||||
|
||||
def _submit_using_api(self, file):
|
||||
# type: (SubmitFile) -> None
|
||||
|
|
|
|||
301
openpype/modules/royalrender/lib.py
Normal file
301
openpype/modules/royalrender/lib.py
Normal file
|
|
@ -0,0 +1,301 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submitting render job to RoyalRender."""
|
||||
import os
|
||||
import re
|
||||
import platform
|
||||
from datetime import datetime
|
||||
|
||||
import pyblish.api
|
||||
from openpype.tests.lib import is_in_tests
|
||||
from openpype.pipeline.publish.lib import get_published_workfile_instance
|
||||
from openpype.pipeline.publish import KnownPublishError
|
||||
from openpype.modules.royalrender.api import Api as rrApi
|
||||
from openpype.modules.royalrender.rr_job import (
|
||||
RRJob, CustomAttribute, get_rr_platform)
|
||||
from openpype.lib import (
|
||||
is_running_from_build,
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
)
|
||||
from openpype.pipeline import OpenPypePyblishPluginMixin
|
||||
|
||||
|
||||
class BaseCreateRoyalRenderJob(pyblish.api.InstancePlugin,
|
||||
OpenPypePyblishPluginMixin):
|
||||
"""Creates separate rendering job for Royal Render"""
|
||||
label = "Create Nuke Render job in RR"
|
||||
order = pyblish.api.IntegratorOrder + 0.1
|
||||
hosts = ["nuke"]
|
||||
families = ["render", "prerender"]
|
||||
targets = ["local"]
|
||||
optional = True
|
||||
|
||||
priority = 50
|
||||
chunk_size = 1
|
||||
concurrent_tasks = 1
|
||||
use_gpu = True
|
||||
use_published = True
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
NumberDef(
|
||||
"priority",
|
||||
label="Priority",
|
||||
default=cls.priority,
|
||||
decimals=0
|
||||
),
|
||||
NumberDef(
|
||||
"chunk",
|
||||
label="Frames Per Task",
|
||||
default=cls.chunk_size,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=1000
|
||||
),
|
||||
NumberDef(
|
||||
"concurrency",
|
||||
label="Concurrency",
|
||||
default=cls.concurrent_tasks,
|
||||
decimals=0,
|
||||
minimum=1,
|
||||
maximum=10
|
||||
),
|
||||
BoolDef(
|
||||
"use_gpu",
|
||||
default=cls.use_gpu,
|
||||
label="Use GPU"
|
||||
),
|
||||
BoolDef(
|
||||
"suspend_publish",
|
||||
default=False,
|
||||
label="Suspend publish"
|
||||
),
|
||||
BoolDef(
|
||||
"use_published",
|
||||
default=cls.use_published,
|
||||
label="Use published workfile"
|
||||
)
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._instance = None
|
||||
self._rr_root = None
|
||||
self.scene_path = None
|
||||
self.job = None
|
||||
self.submission_parameters = None
|
||||
self.rr_api = None
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.info("Skipping local instance.")
|
||||
return
|
||||
|
||||
instance.data["attributeValues"] = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
# add suspend_publish attributeValue to instance data
|
||||
instance.data["suspend_publish"] = instance.data["attributeValues"][
|
||||
"suspend_publish"]
|
||||
|
||||
context = instance.context
|
||||
self._instance = instance
|
||||
|
||||
self._rr_root = self._resolve_rr_path(context, instance.data.get(
|
||||
"rrPathName")) # noqa
|
||||
self.log.debug(self._rr_root)
|
||||
if not self._rr_root:
|
||||
raise KnownPublishError(
|
||||
("Missing RoyalRender root. "
|
||||
"You need to configure RoyalRender module."))
|
||||
|
||||
self.rr_api = rrApi(self._rr_root)
|
||||
|
||||
self.scene_path = context.data["currentFile"]
|
||||
if self.use_published:
|
||||
file_path = get_published_workfile_instance(context)
|
||||
|
||||
# fallback if nothing was set
|
||||
if not file_path:
|
||||
self.log.warning("Falling back to workfile")
|
||||
file_path = context.data["currentFile"]
|
||||
|
||||
self.scene_path = file_path
|
||||
self.log.info(
|
||||
"Using published scene for render {}".format(self.scene_path)
|
||||
)
|
||||
|
||||
if not self._instance.data.get("expectedFiles"):
|
||||
self._instance.data["expectedFiles"] = []
|
||||
|
||||
if not self._instance.data.get("rrJobs"):
|
||||
self._instance.data["rrJobs"] = []
|
||||
|
||||
self._instance.data["outputDir"] = os.path.dirname(
|
||||
self._instance.data["path"]).replace("\\", "/")
|
||||
|
||||
def get_job(self, instance, script_path, render_path, node_name):
|
||||
"""Get RR job based on current instance.
|
||||
|
||||
Args:
|
||||
script_path (str): Path to Nuke script.
|
||||
render_path (str): Output path.
|
||||
node_name (str): Name of the render node.
|
||||
|
||||
Returns:
|
||||
RRJob: RoyalRender Job instance.
|
||||
|
||||
"""
|
||||
start_frame = int(instance.data["frameStartHandle"])
|
||||
end_frame = int(instance.data["frameEndHandle"])
|
||||
|
||||
batch_name = os.path.basename(script_path)
|
||||
jobname = "%s - %s" % (batch_name, self._instance.name)
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
render_dir = os.path.normpath(os.path.dirname(render_path))
|
||||
output_filename_0 = self.preview_fname(render_path)
|
||||
file_name, file_ext = os.path.splitext(
|
||||
os.path.basename(output_filename_0))
|
||||
|
||||
custom_attributes = []
|
||||
if is_running_from_build():
|
||||
custom_attributes = [
|
||||
CustomAttribute(
|
||||
name="OpenPypeVersion",
|
||||
value=os.environ.get("OPENPYPE_VERSION"))
|
||||
]
|
||||
|
||||
# this will append expected files to instance as needed.
|
||||
expected_files = self.expected_files(
|
||||
instance, render_path, start_frame, end_frame)
|
||||
instance.data["expectedFiles"].extend(expected_files)
|
||||
|
||||
job = RRJob(
|
||||
Software="",
|
||||
Renderer="",
|
||||
SeqStart=int(start_frame),
|
||||
SeqEnd=int(end_frame),
|
||||
SeqStep=int(instance.data.get("byFrameStep", 1)),
|
||||
SeqFileOffset=0,
|
||||
Version=0,
|
||||
SceneName=script_path,
|
||||
IsActive=True,
|
||||
ImageDir=render_dir.replace("\\", "/"),
|
||||
ImageFilename=file_name,
|
||||
ImageExtension=file_ext,
|
||||
ImagePreNumberLetter="",
|
||||
ImageSingleOutputFile=False,
|
||||
SceneOS=get_rr_platform(),
|
||||
Layer=node_name,
|
||||
SceneDatabaseDir=script_path,
|
||||
CustomSHotName=jobname,
|
||||
CompanyProjectName=instance.context.data["projectName"],
|
||||
ImageWidth=instance.data["resolutionWidth"],
|
||||
ImageHeight=instance.data["resolutionHeight"],
|
||||
CustomAttributes=custom_attributes
|
||||
)
|
||||
|
||||
return job
|
||||
|
||||
def update_job_with_host_specific(self, instance, job):
|
||||
"""Host specific mapping for RRJob"""
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
def _resolve_rr_path(context, rr_path_name):
|
||||
# type: (pyblish.api.Context, str) -> str
|
||||
rr_settings = (
|
||||
context.data
|
||||
["system_settings"]
|
||||
["modules"]
|
||||
["royalrender"]
|
||||
)
|
||||
try:
|
||||
default_servers = rr_settings["rr_paths"]
|
||||
project_servers = (
|
||||
context.data
|
||||
["project_settings"]
|
||||
["royalrender"]
|
||||
["rr_paths"]
|
||||
)
|
||||
rr_servers = {
|
||||
k: default_servers[k]
|
||||
for k in project_servers
|
||||
if k in default_servers
|
||||
}
|
||||
|
||||
except (AttributeError, KeyError):
|
||||
# Handle situation were we had only one url for royal render.
|
||||
return context.data["defaultRRPath"][platform.system().lower()]
|
||||
|
||||
return rr_servers[rr_path_name][platform.system().lower()]
|
||||
|
||||
def expected_files(self, instance, path, start_frame, end_frame):
|
||||
"""Get expected files.
|
||||
|
||||
This function generate expected files from provided
|
||||
path and start/end frames.
|
||||
|
||||
It was taken from Deadline module, but this should be
|
||||
probably handled better in collector to support more
|
||||
flexible scenarios.
|
||||
|
||||
Args:
|
||||
instance (Instance)
|
||||
path (str): Output path.
|
||||
start_frame (int): Start frame.
|
||||
end_frame (int): End frame.
|
||||
|
||||
Returns:
|
||||
list: List of expected files.
|
||||
|
||||
"""
|
||||
if instance.data.get("expectedFiles"):
|
||||
return instance.data["expectedFiles"]
|
||||
|
||||
dir_name = os.path.dirname(path)
|
||||
file = os.path.basename(path)
|
||||
|
||||
expected_files = []
|
||||
|
||||
if "#" in file:
|
||||
pparts = file.split("#")
|
||||
padding = "%0{}d".format(len(pparts) - 1)
|
||||
file = pparts[0] + padding + pparts[-1]
|
||||
|
||||
if "%" not in file:
|
||||
expected_files.append(path)
|
||||
return expected_files
|
||||
|
||||
if self._instance.data.get("slate"):
|
||||
start_frame -= 1
|
||||
|
||||
expected_files.extend(
|
||||
os.path.join(dir_name, (file % i)).replace("\\", "/")
|
||||
for i in range(start_frame, (end_frame + 1))
|
||||
)
|
||||
return expected_files
|
||||
|
||||
def preview_fname(self, path):
|
||||
"""Return output file path with #### for padding.
|
||||
|
||||
RR requires the path to be formatted with # in place of numbers.
|
||||
For example `/path/to/render.####.png`
|
||||
|
||||
Args:
|
||||
path (str): path to rendered images
|
||||
|
||||
Returns:
|
||||
str
|
||||
|
||||
"""
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
if "%" in path:
|
||||
search_results = re.search(r"(%0)(\d)(d.)", path).groups()
|
||||
self.log.debug("_ search_results: `{}`".format(search_results))
|
||||
return int(search_results[1])
|
||||
if "#" in path:
|
||||
self.log.debug("_ path: `{}`".format(path))
|
||||
return path
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect default Deadline server."""
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectDefaultRRPath(pyblish.api.ContextPlugin):
|
||||
"""Collect default Royal Render path."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Default Royal Render Path"
|
||||
|
||||
def process(self, context):
|
||||
try:
|
||||
rr_module = context.data.get(
|
||||
"openPypeModules")["royalrender"]
|
||||
except AttributeError:
|
||||
msg = "Cannot get OpenPype Royal Render module."
|
||||
self.log.error(msg)
|
||||
raise AssertionError(msg)
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
self.log.debug(rr_module.rr_paths)
|
||||
context.data["defaultRRPath"] = rr_module.rr_paths["default"] # noqa: E501
|
||||
|
|
@ -10,24 +10,26 @@ class CollectRRPathFromInstance(pyblish.api.InstancePlugin):
|
|||
families = ["render", "prerender", "renderlayer"]
|
||||
|
||||
def process(self, instance):
|
||||
instance.data["rrPath"] = self._collect_rr_path(instance)
|
||||
instance.data["rrPathName"] = self._collect_rr_path_name(instance)
|
||||
self.log.info(
|
||||
"Using {} for submission.".format(instance.data["rrPath"]))
|
||||
"Using '{}' for submission.".format(instance.data["rrPathName"]))
|
||||
|
||||
@staticmethod
|
||||
def _collect_rr_path(render_instance):
|
||||
def _collect_rr_path_name(instance):
|
||||
# type: (pyblish.api.Instance) -> str
|
||||
"""Get Royal Render path from render instance."""
|
||||
"""Get Royal Render pat name from render instance."""
|
||||
rr_settings = (
|
||||
render_instance.context.data
|
||||
instance.context.data
|
||||
["system_settings"]
|
||||
["modules"]
|
||||
["royalrender"]
|
||||
)
|
||||
if not instance.data.get("rrPaths"):
|
||||
return "default"
|
||||
try:
|
||||
default_servers = rr_settings["rr_paths"]
|
||||
project_servers = (
|
||||
render_instance.context.data
|
||||
instance.context.data
|
||||
["project_settings"]
|
||||
["royalrender"]
|
||||
["rr_paths"]
|
||||
|
|
@ -40,10 +42,6 @@ class CollectRRPathFromInstance(pyblish.api.InstancePlugin):
|
|||
|
||||
except (AttributeError, KeyError):
|
||||
# Handle situation were we had only one url for royal render.
|
||||
return render_instance.context.data["defaultRRPath"]
|
||||
return rr_settings["rr_paths"]["default"]
|
||||
|
||||
return rr_servers[
|
||||
list(rr_servers.keys())[
|
||||
int(render_instance.data.get("rrPaths"))
|
||||
]
|
||||
]
|
||||
return list(rr_servers.keys())[int(instance.data.get("rrPaths"))]
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ class CollectSequencesFromJob(pyblish.api.ContextPlugin):
|
|||
"""Gather file sequences from job directory.
|
||||
|
||||
When "OPENPYPE_PUBLISH_DATA" environment variable is set these paths
|
||||
(folders or .json files) are parsed for image sequences. Otherwise the
|
||||
(folders or .json files) are parsed for image sequences. Otherwise, the
|
||||
current working directory is searched for file sequences.
|
||||
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -0,0 +1,42 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submitting render job to RoyalRender."""
|
||||
# -*- coding: utf-8 -*-
|
||||
"""Submitting render job to RoyalRender."""
|
||||
import os
|
||||
|
||||
from maya.OpenMaya import MGlobal
|
||||
|
||||
from openpype.modules.royalrender import lib
|
||||
from openpype.pipeline.farm.tools import iter_expected_files
|
||||
|
||||
|
||||
class CreateMayaRoyalRenderJob(lib.BaseCreateRoyalRenderJob):
|
||||
label = "Create Maya Render job in RR"
|
||||
families = ["renderlayer"]
|
||||
|
||||
def update_job_with_host_specific(self, instance, job):
|
||||
job.Software = "Maya"
|
||||
job.Version = "{0:.2f}".format(MGlobal.apiVersion() / 10000)
|
||||
job.Camera = instance.data["cameras"][0],
|
||||
workspace = instance.context.data["workspaceDir"]
|
||||
job.SceneDatabaseDir = workspace
|
||||
|
||||
return job
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point."""
|
||||
super(CreateMayaRoyalRenderJob, self).process(instance)
|
||||
|
||||
expected_files = self._instance.data["expectedFiles"]
|
||||
first_file_path = next(iter_expected_files(expected_files))
|
||||
output_dir = os.path.dirname(first_file_path)
|
||||
self._instance.data["outputDir"] = output_dir
|
||||
|
||||
layer = self._instance.data["setMembers"] # type: str
|
||||
layer_name = layer.removeprefix("rs_")
|
||||
|
||||
job = self.get_job(instance, self.scene_path, first_file_path,
|
||||
layer_name)
|
||||
job = self.update_job_with_host_specific(instance, job)
|
||||
|
||||
instance.data["rrJobs"] += job
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submitting render job to RoyalRender."""
|
||||
import re
|
||||
|
||||
from openpype.modules.royalrender import lib
|
||||
|
||||
|
||||
class CreateNukeRoyalRenderJob(lib.BaseCreateRoyalRenderJob):
|
||||
"""Creates separate rendering job for Royal Render"""
|
||||
label = "Create Nuke Render job in RR"
|
||||
hosts = ["nuke"]
|
||||
families = ["render", "prerender"]
|
||||
|
||||
def process(self, instance):
|
||||
super(CreateNukeRoyalRenderJob, self).process(instance)
|
||||
|
||||
# redefinition of families
|
||||
if "render" in self._instance.data["family"]:
|
||||
self._instance.data["family"] = "write"
|
||||
self._instance.data["families"].insert(0, "render2d")
|
||||
elif "prerender" in self._instance.data["family"]:
|
||||
self._instance.data["family"] = "write"
|
||||
self._instance.data["families"].insert(0, "prerender")
|
||||
|
||||
jobs = self.create_jobs(self._instance)
|
||||
for job in jobs:
|
||||
job = self.update_job_with_host_specific(instance, job)
|
||||
|
||||
instance.data["rrJobs"] += jobs
|
||||
|
||||
def update_job_with_host_specific(self, instance, job):
|
||||
nuke_version = re.search(
|
||||
r"\d+\.\d+", self._instance.context.data.get("hostVersion"))
|
||||
|
||||
job.Software = "Nuke"
|
||||
job.Version = nuke_version.group()
|
||||
|
||||
return job
|
||||
|
||||
def create_jobs(self, instance):
|
||||
"""Nuke creates multiple RR jobs - for baking etc."""
|
||||
# get output path
|
||||
render_path = instance.data['path']
|
||||
self.log.info("render::{}".format(render_path))
|
||||
self.log.info("expected::{}".format(instance.data.get("expectedFiles")))
|
||||
script_path = self.scene_path
|
||||
node = self._instance.data["transientData"]["node"]
|
||||
|
||||
# main job
|
||||
jobs = [
|
||||
self.get_job(
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
node.name()
|
||||
)
|
||||
]
|
||||
|
||||
for baking_script in self._instance.data.get("bakingNukeScripts", []):
|
||||
render_path = baking_script["bakeRenderPath"]
|
||||
script_path = baking_script["bakeScriptPath"]
|
||||
exe_node_name = baking_script["bakeWriteNodeName"]
|
||||
|
||||
jobs.append(self.get_job(
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
exe_node_name
|
||||
))
|
||||
|
||||
return jobs
|
||||
|
|
@ -0,0 +1,282 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Create publishing job on RoyalRender."""
|
||||
import os
|
||||
import attr
|
||||
import json
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.modules.royalrender.rr_job import (
|
||||
RRJob,
|
||||
RREnvList,
|
||||
get_rr_platform
|
||||
)
|
||||
from openpype.pipeline.publish import KnownPublishError
|
||||
from openpype.pipeline import (
|
||||
legacy_io,
|
||||
)
|
||||
from openpype.pipeline.farm.pyblish_functions import (
|
||||
create_skeleton_instance,
|
||||
create_instances_for_aov,
|
||||
attach_instances_to_subset,
|
||||
prepare_representations,
|
||||
create_metadata_path
|
||||
)
|
||||
|
||||
|
||||
class CreatePublishRoyalRenderJob(pyblish.api.InstancePlugin):
|
||||
"""Creates job which publishes rendered files to publish area.
|
||||
|
||||
Job waits until all rendering jobs are finished, triggers `publish` command
|
||||
where it reads from prepared .json file with metadata about what should
|
||||
be published, renames prepared images and publishes them.
|
||||
|
||||
When triggered it produces .log file next to .json file in work area.
|
||||
"""
|
||||
label = "Create publish job in RR"
|
||||
order = pyblish.api.IntegratorOrder + 0.2
|
||||
icon = "tractor"
|
||||
targets = ["local"]
|
||||
hosts = ["fusion", "maya", "nuke", "celaction", "aftereffects", "harmony"]
|
||||
families = ["render.farm", "prerender.farm",
|
||||
"renderlayer", "imagesequence", "vrayscene"]
|
||||
aov_filter = {"maya": [r".*([Bb]eauty).*"],
|
||||
"aftereffects": [r".*"], # for everything from AE
|
||||
"harmony": [r".*"], # for everything from AE
|
||||
"celaction": [r".*"]}
|
||||
|
||||
skip_integration_repre_list = []
|
||||
|
||||
# mapping of instance properties to be transferred to new instance
|
||||
# for every specified family
|
||||
instance_transfer = {
|
||||
"slate": ["slateFrames", "slate"],
|
||||
"review": ["lutPath"],
|
||||
"render2d": ["bakingNukeScripts", "version"],
|
||||
"renderlayer": ["convertToScanline"]
|
||||
}
|
||||
|
||||
# list of family names to transfer to new family if present
|
||||
families_transfer = ["render3d", "render2d", "ftrack", "slate"]
|
||||
|
||||
environ_job_filter = [
|
||||
"OPENPYPE_METADATA_FILE"
|
||||
]
|
||||
|
||||
environ_keys = [
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER",
|
||||
"AVALON_APP_NAME",
|
||||
"OPENPYPE_USERNAME",
|
||||
"OPENPYPE_SG_USER",
|
||||
"OPENPYPE_MONGO"
|
||||
]
|
||||
priority = 50
|
||||
|
||||
def process(self, instance):
|
||||
context = instance.context
|
||||
self.context = context
|
||||
self.anatomy = instance.context.data["anatomy"]
|
||||
|
||||
if not instance.data.get("farm"):
|
||||
self.log.info("Skipping local instance.")
|
||||
return
|
||||
|
||||
instance_skeleton_data = create_skeleton_instance(
|
||||
instance,
|
||||
families_transfer=self.families_transfer,
|
||||
instance_transfer=self.instance_transfer)
|
||||
|
||||
do_not_add_review = False
|
||||
if instance.data.get("review") is False:
|
||||
self.log.debug("Instance has review explicitly disabled.")
|
||||
do_not_add_review = True
|
||||
|
||||
if isinstance(instance.data.get("expectedFiles")[0], dict):
|
||||
instances = create_instances_for_aov(
|
||||
instance, instance_skeleton_data,
|
||||
self.aov_filter, self.skip_integration_repre_list,
|
||||
do_not_add_review)
|
||||
|
||||
else:
|
||||
representations = prepare_representations(
|
||||
instance_skeleton_data,
|
||||
instance.data.get("expectedFiles"),
|
||||
self.anatomy,
|
||||
self.aov_filter,
|
||||
self.skip_integration_repre_list,
|
||||
do_not_add_review
|
||||
)
|
||||
|
||||
if "representations" not in instance_skeleton_data.keys():
|
||||
instance_skeleton_data["representations"] = []
|
||||
|
||||
# add representation
|
||||
instance_skeleton_data["representations"] += representations
|
||||
instances = [instance_skeleton_data]
|
||||
|
||||
# attach instances to subset
|
||||
if instance.data.get("attachTo"):
|
||||
instances = attach_instances_to_subset(
|
||||
instance.data.get("attachTo"), instances
|
||||
)
|
||||
|
||||
self.log.info("Creating RoyalRender Publish job ...")
|
||||
|
||||
if not instance.data.get("rrJobs"):
|
||||
self.log.error(("There is no prior RoyalRender "
|
||||
"job on the instance."))
|
||||
raise KnownPublishError(
|
||||
"Can't create publish job without prior rendering jobs first")
|
||||
|
||||
rr_job = self.get_job(instance, instances)
|
||||
instance.data["rrJobs"].append(rr_job)
|
||||
|
||||
# publish job file
|
||||
publish_job = {
|
||||
"asset": instance_skeleton_data["asset"],
|
||||
"frameStart": instance_skeleton_data["frameStart"],
|
||||
"frameEnd": instance_skeleton_data["frameEnd"],
|
||||
"fps": instance_skeleton_data["fps"],
|
||||
"source": instance_skeleton_data["source"],
|
||||
"user": instance.context.data["user"],
|
||||
"version": instance.context.data["version"], # workfile version
|
||||
"intent": instance.context.data.get("intent"),
|
||||
"comment": instance.context.data.get("comment"),
|
||||
"job": attr.asdict(rr_job),
|
||||
"session": legacy_io.Session.copy(),
|
||||
"instances": instances
|
||||
}
|
||||
|
||||
metadata_path, rootless_metadata_path = \
|
||||
create_metadata_path(instance, self.anatomy)
|
||||
|
||||
self.log.info("Writing json file: {}".format(metadata_path))
|
||||
with open(metadata_path, "w") as f:
|
||||
json.dump(publish_job, f, indent=4, sort_keys=True)
|
||||
|
||||
def get_job(self, instance, instances):
|
||||
"""Create RR publishing job.
|
||||
|
||||
Based on provided original instance and additional instances,
|
||||
create publishing job and return it to be submitted to farm.
|
||||
|
||||
Args:
|
||||
instance (Instance): Original instance.
|
||||
instances (list of Instance): List of instances to
|
||||
be published on farm.
|
||||
|
||||
Returns:
|
||||
RRJob: RoyalRender publish job.
|
||||
|
||||
"""
|
||||
data = instance.data.copy()
|
||||
subset = data["subset"]
|
||||
jobname = "Publish - {subset}".format(subset=subset)
|
||||
|
||||
# Transfer the environment from the original job to this dependent
|
||||
# job, so they use the same environment
|
||||
metadata_path, rootless_metadata_path = \
|
||||
create_metadata_path(instance, self.anatomy)
|
||||
|
||||
anatomy_data = instance.context.data["anatomyData"]
|
||||
|
||||
environment = RREnvList({
|
||||
"AVALON_PROJECT": anatomy_data["project"]["name"],
|
||||
"AVALON_ASSET": anatomy_data["asset"],
|
||||
"AVALON_TASK": anatomy_data["task"]["name"],
|
||||
"OPENPYPE_USERNAME": anatomy_data["user"]
|
||||
})
|
||||
|
||||
# add environments from self.environ_keys
|
||||
for env_key in self.environ_keys:
|
||||
if os.getenv(env_key):
|
||||
environment[env_key] = os.environ[env_key]
|
||||
|
||||
# pass environment keys from self.environ_job_filter
|
||||
# and collect all pre_ids to wait for
|
||||
job_environ = {}
|
||||
jobs_pre_ids = []
|
||||
for job in instance.data["rrJobs"]: # type: RRJob
|
||||
if job.rrEnvList:
|
||||
job_environ.update(
|
||||
dict(RREnvList.parse(job.rrEnvList))
|
||||
)
|
||||
jobs_pre_ids.append(job.PreID)
|
||||
|
||||
for env_j_key in self.environ_job_filter:
|
||||
if job_environ.get(env_j_key):
|
||||
environment[env_j_key] = job_environ[env_j_key]
|
||||
|
||||
priority = self.priority or instance.data.get("priority", 50)
|
||||
|
||||
# rr requires absolut path or all jobs won't show up in rControl
|
||||
abs_metadata_path = self.anatomy.fill_root(rootless_metadata_path)
|
||||
|
||||
# command line set in E01__OpenPype__PublishJob.cfg, here only
|
||||
# additional logging
|
||||
args = [
|
||||
">", os.path.join(os.path.dirname(abs_metadata_path),
|
||||
"rr_out.log"),
|
||||
"2>&1"
|
||||
]
|
||||
|
||||
job = RRJob(
|
||||
Software="OpenPype",
|
||||
Renderer="Once",
|
||||
SeqStart=1,
|
||||
SeqEnd=1,
|
||||
SeqStep=1,
|
||||
SeqFileOffset=0,
|
||||
Version=self._sanitize_version(os.environ.get("OPENPYPE_VERSION")),
|
||||
SceneName=abs_metadata_path,
|
||||
# command line arguments
|
||||
CustomAddCmdFlags=" ".join(args),
|
||||
IsActive=True,
|
||||
ImageFilename="execOnce.file",
|
||||
ImageDir="<SceneFolder>",
|
||||
ImageExtension="",
|
||||
ImagePreNumberLetter="",
|
||||
SceneOS=get_rr_platform(),
|
||||
rrEnvList=environment.serialize(),
|
||||
Priority=priority,
|
||||
CustomSHotName=jobname,
|
||||
CompanyProjectName=instance.context.data["projectName"]
|
||||
)
|
||||
|
||||
# add assembly jobs as dependencies
|
||||
if instance.data.get("tileRendering"):
|
||||
self.log.info("Adding tile assembly jobs as dependencies...")
|
||||
job.WaitForPreIDs += instance.data.get("assemblySubmissionJobs")
|
||||
elif instance.data.get("bakingSubmissionJobs"):
|
||||
self.log.info("Adding baking submission jobs as dependencies...")
|
||||
job.WaitForPreIDs += instance.data["bakingSubmissionJobs"]
|
||||
else:
|
||||
job.WaitForPreIDs += jobs_pre_ids
|
||||
|
||||
return job
|
||||
|
||||
def _sanitize_version(self, version):
|
||||
"""Returns version in format MAJOR.MINORPATCH
|
||||
|
||||
3.15.7-nightly.2 >> 3.157
|
||||
"""
|
||||
VERSION_REGEX = re.compile(
|
||||
r"(?P<major>0|[1-9]\d*)"
|
||||
r"\.(?P<minor>0|[1-9]\d*)"
|
||||
r"\.(?P<patch>0|[1-9]\d*)"
|
||||
r"(?:-(?P<prerelease>[a-zA-Z\d\-.]*))?"
|
||||
r"(?:\+(?P<buildmetadata>[a-zA-Z\d\-.]*))?"
|
||||
)
|
||||
|
||||
valid_parts = VERSION_REGEX.findall(version)
|
||||
if len(valid_parts) != 1:
|
||||
# Return invalid version with filled 'origin' attribute
|
||||
return version
|
||||
|
||||
# Unpack found version
|
||||
major, minor, patch, pre, post = valid_parts[0]
|
||||
|
||||
return "{}.{}{}".format(major, minor, patch)
|
||||
|
|
@ -0,0 +1,131 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Submit jobs to RoyalRender."""
|
||||
import tempfile
|
||||
import platform
|
||||
|
||||
import pyblish.api
|
||||
from openpype.modules.royalrender.api import (
|
||||
RRJob,
|
||||
Api as rrApi,
|
||||
SubmitterParameter
|
||||
)
|
||||
from openpype.pipeline.publish import KnownPublishError
|
||||
|
||||
|
||||
class SubmitJobsToRoyalRender(pyblish.api.ContextPlugin):
|
||||
"""Find all jobs, create submission XML and submit it to RoyalRender."""
|
||||
label = "Submit jobs to RoyalRender"
|
||||
order = pyblish.api.IntegratorOrder + 0.3
|
||||
targets = ["local"]
|
||||
|
||||
def __init__(self):
|
||||
super(SubmitJobsToRoyalRender, self).__init__()
|
||||
self._rr_root = None
|
||||
self._rr_api = None
|
||||
self._submission_parameters = []
|
||||
|
||||
def process(self, context):
|
||||
rr_settings = (
|
||||
context.data
|
||||
["system_settings"]
|
||||
["modules"]
|
||||
["royalrender"]
|
||||
)
|
||||
|
||||
if rr_settings["enabled"] is not True:
|
||||
self.log.warning("RoyalRender modules is disabled.")
|
||||
return
|
||||
|
||||
# iterate over all instances and try to find RRJobs
|
||||
jobs = []
|
||||
instance_rr_path = None
|
||||
for instance in context:
|
||||
if isinstance(instance.data.get("rrJob"), RRJob):
|
||||
jobs.append(instance.data.get("rrJob"))
|
||||
if instance.data.get("rrJobs"):
|
||||
if all(
|
||||
isinstance(job, RRJob)
|
||||
for job in instance.data.get("rrJobs")):
|
||||
jobs += instance.data.get("rrJobs")
|
||||
if instance.data.get("rrPathName"):
|
||||
instance_rr_path = instance.data["rrPathName"]
|
||||
|
||||
if jobs:
|
||||
self._rr_root = self._resolve_rr_path(context, instance_rr_path)
|
||||
if not self._rr_root:
|
||||
raise KnownPublishError(
|
||||
("Missing RoyalRender root. "
|
||||
"You need to configure RoyalRender module."))
|
||||
self._rr_api = rrApi(self._rr_root)
|
||||
self._submission_parameters = self.get_submission_parameters()
|
||||
self.process_submission(jobs)
|
||||
return
|
||||
|
||||
self.log.info("No RoyalRender jobs found")
|
||||
|
||||
def process_submission(self, jobs):
|
||||
# type: ([RRJob]) -> None
|
||||
|
||||
idx_pre_id = 0
|
||||
for job in jobs:
|
||||
job.PreID = idx_pre_id
|
||||
if idx_pre_id > 0:
|
||||
job.WaitForPreIDs.append(idx_pre_id - 1)
|
||||
idx_pre_id += 1
|
||||
|
||||
submission = rrApi.create_submission(
|
||||
jobs,
|
||||
self._submission_parameters)
|
||||
|
||||
xml = tempfile.NamedTemporaryFile(suffix=".xml", delete=False)
|
||||
with open(xml.name, "w") as f:
|
||||
f.write(submission.serialize())
|
||||
|
||||
self.log.info("submitting job(s) file: {}".format(xml.name))
|
||||
self._rr_api.submit_file(file=xml.name)
|
||||
|
||||
def create_file(self, name, ext, contents=None):
|
||||
temp = tempfile.NamedTemporaryFile(
|
||||
dir=self.tempdir,
|
||||
suffix=ext,
|
||||
prefix=name + '.',
|
||||
delete=False,
|
||||
)
|
||||
|
||||
if contents:
|
||||
with open(temp.name, 'w') as f:
|
||||
f.write(contents)
|
||||
|
||||
return temp.name
|
||||
|
||||
def get_submission_parameters(self):
|
||||
return [SubmitterParameter("RequiredMemory", "0")]
|
||||
|
||||
@staticmethod
|
||||
def _resolve_rr_path(context, rr_path_name):
|
||||
# type: (pyblish.api.Context, str) -> str
|
||||
rr_settings = (
|
||||
context.data
|
||||
["system_settings"]
|
||||
["modules"]
|
||||
["royalrender"]
|
||||
)
|
||||
try:
|
||||
default_servers = rr_settings["rr_paths"]
|
||||
project_servers = (
|
||||
context.data
|
||||
["project_settings"]
|
||||
["royalrender"]
|
||||
["rr_paths"]
|
||||
)
|
||||
rr_servers = {
|
||||
k: default_servers[k]
|
||||
for k in project_servers
|
||||
if k in default_servers
|
||||
}
|
||||
|
||||
except (AttributeError, KeyError):
|
||||
# Handle situation were we had only one url for royal render.
|
||||
return context.data["defaultRRPath"][platform.system().lower()]
|
||||
|
||||
return rr_servers[rr_path_name][platform.system().lower()]
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Python wrapper for RoyalRender XML job file."""
|
||||
import sys
|
||||
from xml.dom import minidom as md
|
||||
import attr
|
||||
from collections import namedtuple, OrderedDict
|
||||
|
|
@ -8,8 +9,36 @@ from collections import namedtuple, OrderedDict
|
|||
CustomAttribute = namedtuple("CustomAttribute", ["name", "value"])
|
||||
|
||||
|
||||
def get_rr_platform():
|
||||
# type: () -> str
|
||||
"""Returns name of platform used in rr jobs."""
|
||||
if sys.platform.lower() in ["win32", "win64"]:
|
||||
return "windows"
|
||||
elif sys.platform.lower() == "darwin":
|
||||
return "mac"
|
||||
else:
|
||||
return "linux"
|
||||
|
||||
|
||||
class RREnvList(dict):
|
||||
def serialize(self):
|
||||
# <rrEnvList>VariableA=ValueA~~~VariableB=ValueB</rrEnvList>
|
||||
return "~~~".join(
|
||||
["{}={}".format(k, v) for k, v in sorted(self.items())])
|
||||
|
||||
@staticmethod
|
||||
def parse(data):
|
||||
# type: (str) -> RREnvList
|
||||
"""Parse rrEnvList string and return it as RREnvList object."""
|
||||
out = RREnvList()
|
||||
for var in data.split("~~~"):
|
||||
k, v = var.split("=")
|
||||
out[k] = v
|
||||
return out
|
||||
|
||||
|
||||
@attr.s
|
||||
class RRJob:
|
||||
class RRJob(object):
|
||||
"""Mapping of Royal Render job file to a data class."""
|
||||
|
||||
# Required
|
||||
|
|
@ -35,7 +64,7 @@ class RRJob:
|
|||
|
||||
# Is the job enabled for submission?
|
||||
# enabled by default
|
||||
IsActive = attr.ib() # type: str
|
||||
IsActive = attr.ib() # type: bool
|
||||
|
||||
# Sequence settings of this job
|
||||
SeqStart = attr.ib() # type: int
|
||||
|
|
@ -60,7 +89,7 @@ class RRJob:
|
|||
|
||||
# If you render a single file, e.g. Quicktime or Avi, then you have to
|
||||
# set this value. Videos have to be rendered at once on one client.
|
||||
ImageSingleOutputFile = attr.ib(default="false") # type: str
|
||||
ImageSingleOutputFile = attr.ib(default=False) # type: bool
|
||||
|
||||
# Semi-Required (required for some render applications)
|
||||
# -----------------------------------------------------
|
||||
|
|
@ -87,7 +116,7 @@ class RRJob:
|
|||
|
||||
# Frame Padding of the frame number in the rendered filename.
|
||||
# Some render config files are setting the padding at render time.
|
||||
ImageFramePadding = attr.ib(default=None) # type: str
|
||||
ImageFramePadding = attr.ib(default=None) # type: int
|
||||
|
||||
# Some render applications support overriding the image format at
|
||||
# the render commandline.
|
||||
|
|
@ -108,7 +137,7 @@ class RRJob:
|
|||
# jobs send from this machine. If a job with the PreID was found, then
|
||||
# this jobs waits for the other job. Note: This flag can be used multiple
|
||||
# times to wait for multiple jobs.
|
||||
WaitForPreID = attr.ib(default=None) # type: int
|
||||
WaitForPreIDs = attr.ib(factory=list) # type: list
|
||||
|
||||
# List of submitter options per job
|
||||
# list item must be of `SubmitterParameter` type
|
||||
|
|
@ -120,6 +149,9 @@ class RRJob:
|
|||
# list item must be of `CustomAttribute` named tuple
|
||||
CustomAttributes = attr.ib(factory=list) # type: list
|
||||
|
||||
# This is used to hold command line arguments for Execute job
|
||||
CustomAddCmdFlags = attr.ib(default=None) # type: str
|
||||
|
||||
# Additional information for subsequent publish script and
|
||||
# for better display in rrControl
|
||||
UserName = attr.ib(default=None) # type: str
|
||||
|
|
@ -129,6 +161,7 @@ class RRJob:
|
|||
CustomUserInfo = attr.ib(default=None) # type: str
|
||||
SubmitMachine = attr.ib(default=None) # type: str
|
||||
Color_ID = attr.ib(default=2) # type: int
|
||||
CompanyProjectName = attr.ib(default=None) # type: str
|
||||
|
||||
RequiredLicenses = attr.ib(default=None) # type: str
|
||||
|
||||
|
|
@ -137,6 +170,10 @@ class RRJob:
|
|||
TotalFrames = attr.ib(default=None) # type: int
|
||||
Tiled = attr.ib(default=None) # type: str
|
||||
|
||||
# Environment
|
||||
# only used in RR 8.3 and newer
|
||||
rrEnvList = attr.ib(default=None) # type: str
|
||||
|
||||
|
||||
class SubmitterParameter:
|
||||
"""Wrapper for Submitter Parameters."""
|
||||
|
|
@ -160,7 +197,7 @@ class SubmitterParameter:
|
|||
|
||||
|
||||
@attr.s
|
||||
class SubmitFile:
|
||||
class SubmitFile(object):
|
||||
"""Class wrapping Royal Render submission XML file."""
|
||||
|
||||
# Syntax version of the submission file.
|
||||
|
|
@ -169,11 +206,11 @@ class SubmitFile:
|
|||
# Delete submission file after processing
|
||||
DeleteXML = attr.ib(default=1) # type: int
|
||||
|
||||
# List of submitter options per job
|
||||
# List of the submitter options per job.
|
||||
# list item must be of `SubmitterParameter` type
|
||||
SubmitterParameters = attr.ib(factory=list) # type: list
|
||||
|
||||
# List of job is submission batch.
|
||||
# List of the jobs in submission batch.
|
||||
# list item must be of type `RRJob`
|
||||
Jobs = attr.ib(factory=list) # type: list
|
||||
|
||||
|
|
@ -225,7 +262,7 @@ class SubmitFile:
|
|||
# <SubmitterParameter>foo=bar~baz~goo</SubmitterParameter>
|
||||
self._process_submitter_parameters(
|
||||
self.SubmitterParameters, root, job_file)
|
||||
|
||||
root.appendChild(job_file)
|
||||
for job in self.Jobs: # type: RRJob
|
||||
if not isinstance(job, RRJob):
|
||||
raise AttributeError(
|
||||
|
|
@ -241,16 +278,28 @@ class SubmitFile:
|
|||
job, dict_factory=OrderedDict, filter=filter_data)
|
||||
serialized_job.pop("CustomAttributes")
|
||||
serialized_job.pop("SubmitterParameters")
|
||||
# we are handling `WaitForPreIDs` separately.
|
||||
wait_pre_ids = serialized_job.pop("WaitForPreIDs", [])
|
||||
|
||||
for custom_attr in job_custom_attributes: # type: CustomAttribute
|
||||
serialized_job["Custom{}".format(
|
||||
custom_attr.name)] = custom_attr.value
|
||||
|
||||
for item, value in serialized_job.items():
|
||||
xml_attr = root.create(item)
|
||||
xml_attr = root.createElement(item)
|
||||
xml_attr.appendChild(
|
||||
root.createTextNode(value)
|
||||
root.createTextNode(str(value))
|
||||
)
|
||||
xml_job.appendChild(xml_attr)
|
||||
|
||||
# WaitForPreID - can be used multiple times
|
||||
for pre_id in wait_pre_ids:
|
||||
xml_attr = root.createElement("WaitForPreID")
|
||||
xml_attr.appendChild(
|
||||
root.createTextNode(str(pre_id))
|
||||
)
|
||||
xml_job.appendChild(xml_attr)
|
||||
|
||||
job_file.appendChild(xml_job)
|
||||
|
||||
return root.toprettyxml(indent="\t")
|
||||
|
|
|
|||
Binary file not shown.
|
After Width: | Height: | Size: 2 KiB |
|
|
@ -0,0 +1,71 @@
|
|||
IconApp= E01__OpenPype.png
|
||||
Name= OpenPype
|
||||
rendererName= Once
|
||||
Version= 1
|
||||
Version_Minor= 0
|
||||
Type=Execute
|
||||
TYPEv9=Execute
|
||||
ExecuteJobType=Once
|
||||
|
||||
|
||||
################################# [Windows] [Linux] [Osx] ##################################
|
||||
|
||||
|
||||
CommandLine=<envFileExecute <rrEnvFile>>
|
||||
|
||||
CommandLine=<rrEnvLine>
|
||||
|
||||
|
||||
::win CommandLine= set "CUDA_VISIBLE_DEVICES=<GpuListC>"
|
||||
::lx CommandLine= setenv CUDA_VISIBLE_DEVICES <GpuListC>
|
||||
::osx CommandLine= setenv CUDA_VISIBLE_DEVICES <GpuListC>
|
||||
|
||||
|
||||
CommandLine=
|
||||
<SetEnvGlobal>
|
||||
|
||||
CommandLine=
|
||||
<SetEnvSoft>
|
||||
|
||||
CommandLine=
|
||||
<ResetExitCode>
|
||||
|
||||
CommandLine= "<Exe>" --headless publish <Scene>
|
||||
--targets royalrender
|
||||
--targets farm
|
||||
<AdditionalCommandlineParam>
|
||||
<CustomFlags>
|
||||
|
||||
CommandLine=
|
||||
<CheckExitCode>
|
||||
|
||||
|
||||
|
||||
################################## Render Settings ##################################
|
||||
|
||||
|
||||
|
||||
################################## Submitter Settings ##################################
|
||||
StartMultipleInstances= 0~0
|
||||
SceneFileExtension= *.json
|
||||
AllowImageNameChange= 0
|
||||
AllowImageDirChange= 0
|
||||
SequenceDivide= 0~1
|
||||
PPSequenceCheck=0~0
|
||||
PPCreateSmallVideo=0~0
|
||||
PPCreateFullVideo=0~0
|
||||
AllowLocalRenderOut= 0~0
|
||||
|
||||
|
||||
################################## Client Settings ##################################
|
||||
|
||||
IconApp=E01__OpenPype.png
|
||||
|
||||
licenseFailLine=
|
||||
|
||||
errorSearchLine=
|
||||
|
||||
permanentErrorSearchLine =
|
||||
|
||||
Frozen_MinCoreUsage=0.3
|
||||
Frozen_Minutes=30
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
IconApp= E01__OpenPype.png
|
||||
Name= OpenPype
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
[Windows]
|
||||
Executable= openpype_console.exe
|
||||
Path= OS; <ProgramFiles(x86)>\OpenPype\*\openpype_console.exe
|
||||
Path= 32; <ProgramFiles(x86)>\OpenPype\*\openpype_console.exe
|
||||
|
||||
[Linux]
|
||||
Executable= openpype_console
|
||||
Path= OS; /opt/openpype/*/openpype_console
|
||||
|
||||
[Mac]
|
||||
Executable= openpype_console
|
||||
Path= OS; /Applications/OpenPype*/Content/MacOS/openpype_console
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
PrePostType= pre
|
||||
CommandLine=
|
||||
<ResetExitCode>
|
||||
CommandLine= <OsxApp "<rrBin64>rrPythonconsole" > "<RR_DIR>render_apps/_prepost_scripts/PreOpenPypeInjectEnvironments.py"
|
||||
|
||||
CommandLine=
|
||||
<CheckExitCode> <FN>
|
||||
|
||||
CommandLine= "<RenderAppPath:OpenPype>"
|
||||
CommandLine=
|
||||
<CheckExitCode> <FN>
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
|
||||
os.environ["OPENYPYPE_TESTVAR"] = "OpenPype was here"
|
||||
871
openpype/pipeline/farm/pyblish_functions.py
Normal file
871
openpype/pipeline/farm/pyblish_functions.py
Normal file
|
|
@ -0,0 +1,871 @@
|
|||
import copy
|
||||
import attr
|
||||
import pyblish.api
|
||||
import os
|
||||
import clique
|
||||
from copy import deepcopy
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from openpype.pipeline import (
|
||||
get_current_project_name,
|
||||
get_representation_path,
|
||||
Anatomy,
|
||||
)
|
||||
from openpype.client import (
|
||||
get_last_version_by_subset_name,
|
||||
get_representations
|
||||
)
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline.publish import KnownPublishError
|
||||
from openpype.pipeline.farm.patterning import match_aov_pattern
|
||||
|
||||
|
||||
@attr.s
|
||||
class TimeData(object):
|
||||
"""Structure used to handle time related data."""
|
||||
start = attr.ib(type=int)
|
||||
end = attr.ib(type=int)
|
||||
fps = attr.ib()
|
||||
step = attr.ib(default=1, type=int)
|
||||
handle_start = attr.ib(default=0, type=int)
|
||||
handle_end = attr.ib(default=0, type=int)
|
||||
|
||||
|
||||
def remap_source(path, anatomy):
|
||||
"""Try to remap path to rootless path.
|
||||
|
||||
Args:
|
||||
path (str): Path to be remapped to rootless.
|
||||
anatomy (Anatomy): Anatomy object to handle remapping
|
||||
itself.
|
||||
|
||||
Returns:
|
||||
str: Remapped path.
|
||||
|
||||
Throws:
|
||||
ValueError: if the root cannot be found.
|
||||
|
||||
"""
|
||||
success, rootless_path = (
|
||||
anatomy.find_root_template_from_path(path)
|
||||
)
|
||||
if success:
|
||||
source = rootless_path
|
||||
else:
|
||||
raise ValueError(
|
||||
"Root from template path cannot be found: {}".format(path))
|
||||
return source
|
||||
|
||||
|
||||
def extend_frames(asset, subset, start, end):
|
||||
"""Get latest version of asset nad update frame range.
|
||||
|
||||
Based on minimum and maximum values.
|
||||
|
||||
Arguments:
|
||||
asset (str): asset name
|
||||
subset (str): subset name
|
||||
start (int): start frame
|
||||
end (int): end frame
|
||||
|
||||
Returns:
|
||||
(int, int): update frame start/end
|
||||
|
||||
"""
|
||||
# Frame comparison
|
||||
prev_start = None
|
||||
prev_end = None
|
||||
|
||||
project_name = get_current_project_name()
|
||||
version = get_last_version_by_subset_name(
|
||||
project_name,
|
||||
subset,
|
||||
asset_name=asset
|
||||
)
|
||||
|
||||
# Set prev start / end frames for comparison
|
||||
if not prev_start and not prev_end:
|
||||
prev_start = version["data"]["frameStart"]
|
||||
prev_end = version["data"]["frameEnd"]
|
||||
|
||||
updated_start = min(start, prev_start)
|
||||
updated_end = max(end, prev_end)
|
||||
|
||||
return updated_start, updated_end
|
||||
|
||||
|
||||
def get_time_data_from_instance_or_context(instance):
|
||||
"""Get time data from instance (or context).
|
||||
|
||||
If time data is not found on instance, data from context will be used.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Source instance.
|
||||
|
||||
Returns:
|
||||
TimeData: dataclass holding time information.
|
||||
|
||||
"""
|
||||
return TimeData(
|
||||
start=(instance.data.get("frameStart") or
|
||||
instance.context.data.get("frameStart")),
|
||||
end=(instance.data.get("frameEnd") or
|
||||
instance.context.data.get("frameEnd")),
|
||||
fps=(instance.data.get("fps") or
|
||||
instance.context.data.get("fps")),
|
||||
handle_start=(instance.data.get("handleStart") or
|
||||
instance.context.data.get("handleStart")), # noqa: E501
|
||||
handle_end=(instance.data.get("handleStart") or
|
||||
instance.context.data.get("handleStart"))
|
||||
)
|
||||
|
||||
|
||||
def get_transferable_representations(instance):
|
||||
"""Transfer representations from original instance.
|
||||
|
||||
This will get all representations on the original instance that
|
||||
are flagged with `publish_on_farm` and return them to be included
|
||||
on skeleton instance if needed.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Original instance to be processed.
|
||||
|
||||
Return:
|
||||
list of dicts: List of transferable representations.
|
||||
|
||||
"""
|
||||
anatomy = instance.context.data["anatomy"] # type: Anatomy
|
||||
to_transfer = []
|
||||
|
||||
for representation in instance.data.get("representations", []):
|
||||
if "publish_on_farm" not in representation.get("tags"):
|
||||
continue
|
||||
|
||||
trans_rep = representation.copy()
|
||||
|
||||
staging_dir = trans_rep.get("stagingDir")
|
||||
|
||||
if staging_dir:
|
||||
try:
|
||||
trans_rep["stagingDir"] = remap_source(staging_dir, anatomy)
|
||||
except ValueError:
|
||||
log = Logger.get_logger("farm_publishing")
|
||||
log.warning(
|
||||
("Could not find root path for remapping \"{}\". "
|
||||
"This may cause issues on farm.").format(staging_dir))
|
||||
|
||||
to_transfer.append(trans_rep)
|
||||
return to_transfer
|
||||
|
||||
|
||||
def create_skeleton_instance(
|
||||
instance, families_transfer=None, instance_transfer=None):
|
||||
# type: (pyblish.api.Instance, list, dict) -> dict
|
||||
"""Create skeleton instance from original instance data.
|
||||
|
||||
This will create dictionary containing skeleton
|
||||
- common - data used for publishing rendered instances.
|
||||
This skeleton instance is then extended with additional data
|
||||
and serialized to be processed by farm job.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Original instance to
|
||||
be used as a source of data.
|
||||
families_transfer (list): List of family names to transfer
|
||||
from the original instance to the skeleton.
|
||||
instance_transfer (dict): Dict with keys as families and
|
||||
values as a list of property names to transfer to the
|
||||
new skeleton.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary with skeleton instance data.
|
||||
|
||||
"""
|
||||
# list of family names to transfer to new family if present
|
||||
|
||||
context = instance.context
|
||||
data = instance.data.copy()
|
||||
anatomy = instance.context.data["anatomy"] # type: Anatomy
|
||||
|
||||
# get time related data from instance (or context)
|
||||
time_data = get_time_data_from_instance_or_context(instance)
|
||||
|
||||
if data.get("extendFrames", False):
|
||||
time_data.start, time_data.end = extend_frames(
|
||||
data["asset"],
|
||||
data["subset"],
|
||||
time_data.start,
|
||||
time_data.end,
|
||||
)
|
||||
|
||||
source = data.get("source") or context.data.get("currentFile")
|
||||
success, rootless_path = (
|
||||
anatomy.find_root_template_from_path(source)
|
||||
)
|
||||
if success:
|
||||
source = rootless_path
|
||||
else:
|
||||
# `rootless_path` is not set to `source` if none of roots match
|
||||
log = Logger.get_logger("farm_publishing")
|
||||
log.warning(("Could not find root path for remapping \"{}\". "
|
||||
"This may cause issues.").format(source))
|
||||
|
||||
family = ("render"
|
||||
if "prerender" not in instance.data["families"]
|
||||
else "prerender")
|
||||
families = [family]
|
||||
|
||||
# pass review to families if marked as review
|
||||
if data.get("review"):
|
||||
families.append("review")
|
||||
|
||||
instance_skeleton_data = {
|
||||
"family": family,
|
||||
"subset": data["subset"],
|
||||
"families": families,
|
||||
"asset": data["asset"],
|
||||
"frameStart": time_data.start,
|
||||
"frameEnd": time_data.end,
|
||||
"handleStart": time_data.handle_start,
|
||||
"handleEnd": time_data.handle_end,
|
||||
"frameStartHandle": time_data.start - time_data.handle_start,
|
||||
"frameEndHandle": time_data.end + time_data.handle_end,
|
||||
"comment": data.get("comment"),
|
||||
"fps": time_data.fps,
|
||||
"source": source,
|
||||
"extendFrames": data.get("extendFrames"),
|
||||
"overrideExistingFrame": data.get("overrideExistingFrame"),
|
||||
"pixelAspect": data.get("pixelAspect", 1),
|
||||
"resolutionWidth": data.get("resolutionWidth", 1920),
|
||||
"resolutionHeight": data.get("resolutionHeight", 1080),
|
||||
"multipartExr": data.get("multipartExr", False),
|
||||
"jobBatchName": data.get("jobBatchName", ""),
|
||||
"useSequenceForReview": data.get("useSequenceForReview", True),
|
||||
# map inputVersions `ObjectId` -> `str` so json supports it
|
||||
"inputVersions": list(map(str, data.get("inputVersions", [])))
|
||||
}
|
||||
|
||||
# skip locking version if we are creating v01
|
||||
instance_version = data.get("version") # take this if exists
|
||||
if instance_version != 1:
|
||||
instance_skeleton_data["version"] = instance_version
|
||||
|
||||
# transfer specific families from original instance to new render
|
||||
for item in families_transfer:
|
||||
if item in instance.data.get("families", []):
|
||||
instance_skeleton_data["families"] += [item]
|
||||
|
||||
# transfer specific properties from original instance based on
|
||||
# mapping dictionary `instance_transfer`
|
||||
for key, values in instance_transfer.items():
|
||||
if key in instance.data.get("families", []):
|
||||
for v in values:
|
||||
instance_skeleton_data[v] = instance.data.get(v)
|
||||
|
||||
representations = get_transferable_representations(instance)
|
||||
instance_skeleton_data["representations"] = []
|
||||
instance_skeleton_data["representations"] += representations
|
||||
|
||||
return instance_skeleton_data
|
||||
|
||||
|
||||
def _add_review_families(families):
|
||||
"""Adds review flag to families.
|
||||
|
||||
Handles situation when new instances are created which should have review
|
||||
in families. In that case they should have 'ftrack' too.
|
||||
|
||||
TODO: This is ugly and needs to be refactored. Ftrack family should be
|
||||
added in different way (based on if the module is enabled?)
|
||||
|
||||
"""
|
||||
# if we have one representation with preview tag
|
||||
# flag whole instance for review and for ftrack
|
||||
if "ftrack" not in families and os.environ.get("FTRACK_SERVER"):
|
||||
families.append("ftrack")
|
||||
if "review" not in families:
|
||||
families.append("review")
|
||||
return families
|
||||
|
||||
|
||||
def prepare_representations(instance, exp_files, anatomy, aov_filter,
|
||||
skip_integration_repre_list,
|
||||
do_not_add_review):
|
||||
"""Create representations for file sequences.
|
||||
|
||||
This will return representations of expected files if they are not
|
||||
in hierarchy of aovs. There should be only one sequence of files for
|
||||
most cases, but if not - we create representation from each of them.
|
||||
|
||||
Arguments:
|
||||
instance (dict): instance data for which we are
|
||||
setting representations
|
||||
exp_files (list): list of expected files
|
||||
anatomy (Anatomy):
|
||||
aov_filter (dict): add review for specific aov names
|
||||
skip_integration_repre_list (list): exclude specific extensions,
|
||||
do_not_add_review (bool): explicitly skip review
|
||||
|
||||
Returns:
|
||||
list of representations
|
||||
|
||||
"""
|
||||
representations = []
|
||||
host_name = os.environ.get("AVALON_APP", "")
|
||||
collections, remainders = clique.assemble(exp_files)
|
||||
|
||||
log = Logger.get_logger("farm_publishing")
|
||||
|
||||
# create representation for every collected sequence
|
||||
for collection in collections:
|
||||
ext = collection.tail.lstrip(".")
|
||||
preview = False
|
||||
# TODO 'useSequenceForReview' is temporary solution which does
|
||||
# not work for 100% of cases. We must be able to tell what
|
||||
# expected files contains more explicitly and from what
|
||||
# should be review made.
|
||||
# - "review" tag is never added when is set to 'False'
|
||||
if instance["useSequenceForReview"]:
|
||||
# toggle preview on if multipart is on
|
||||
if instance.get("multipartExr", False):
|
||||
log.debug(
|
||||
"Adding preview tag because its multipartExr"
|
||||
)
|
||||
preview = True
|
||||
else:
|
||||
render_file_name = list(collection)[0]
|
||||
# if filtered aov name is found in filename, toggle it for
|
||||
# preview video rendering
|
||||
preview = match_aov_pattern(
|
||||
host_name, aov_filter, render_file_name
|
||||
)
|
||||
|
||||
staging = os.path.dirname(list(collection)[0])
|
||||
success, rootless_staging_dir = (
|
||||
anatomy.find_root_template_from_path(staging)
|
||||
)
|
||||
if success:
|
||||
staging = rootless_staging_dir
|
||||
else:
|
||||
log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(staging))
|
||||
|
||||
frame_start = int(instance.get("frameStartHandle"))
|
||||
if instance.get("slate"):
|
||||
frame_start -= 1
|
||||
|
||||
# explicitly disable review by user
|
||||
preview = preview and not do_not_add_review
|
||||
rep = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": [os.path.basename(f) for f in list(collection)],
|
||||
"frameStart": frame_start,
|
||||
"frameEnd": int(instance.get("frameEndHandle")),
|
||||
# If expectedFile are absolute, we need only filenames
|
||||
"stagingDir": staging,
|
||||
"fps": instance.get("fps"),
|
||||
"tags": ["review"] if preview else [],
|
||||
}
|
||||
|
||||
# poor man exclusion
|
||||
if ext in skip_integration_repre_list:
|
||||
rep["tags"].append("delete")
|
||||
|
||||
if instance.get("multipartExr", False):
|
||||
rep["tags"].append("multipartExr")
|
||||
|
||||
# support conversion from tiled to scanline
|
||||
if instance.get("convertToScanline"):
|
||||
log.info("Adding scanline conversion.")
|
||||
rep["tags"].append("toScanline")
|
||||
|
||||
representations.append(rep)
|
||||
|
||||
if preview:
|
||||
instance["families"] = _add_review_families(instance["families"])
|
||||
|
||||
# add remainders as representations
|
||||
for remainder in remainders:
|
||||
ext = remainder.split(".")[-1]
|
||||
|
||||
staging = os.path.dirname(remainder)
|
||||
success, rootless_staging_dir = (
|
||||
anatomy.find_root_template_from_path(staging)
|
||||
)
|
||||
if success:
|
||||
staging = rootless_staging_dir
|
||||
else:
|
||||
log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(staging))
|
||||
|
||||
rep = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": os.path.basename(remainder),
|
||||
"stagingDir": staging,
|
||||
}
|
||||
|
||||
preview = match_aov_pattern(
|
||||
host_name, aov_filter, remainder
|
||||
)
|
||||
preview = preview and not do_not_add_review
|
||||
if preview:
|
||||
rep.update({
|
||||
"fps": instance.get("fps"),
|
||||
"tags": ["review"]
|
||||
})
|
||||
instance["families"] = _add_review_families(instance["families"])
|
||||
|
||||
already_there = False
|
||||
for repre in instance.get("representations", []):
|
||||
# might be added explicitly before by publish_on_farm
|
||||
already_there = repre.get("files") == rep["files"]
|
||||
if already_there:
|
||||
log.debug("repre {} already_there".format(repre))
|
||||
break
|
||||
|
||||
if not already_there:
|
||||
representations.append(rep)
|
||||
|
||||
return representations
|
||||
|
||||
|
||||
def create_instances_for_aov(instance, skeleton, aov_filter,
|
||||
skip_integration_repre_list,
|
||||
do_not_add_review):
|
||||
"""Create instances from AOVs.
|
||||
|
||||
This will create new pyblish.api.Instances by going over expected
|
||||
files defined on original instance.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Original instance.
|
||||
skeleton (dict): Skeleton instance data.
|
||||
skip_integration_repre_list (list): skip
|
||||
|
||||
Returns:
|
||||
list of pyblish.api.Instance: Instances created from
|
||||
expected files.
|
||||
|
||||
"""
|
||||
# we cannot attach AOVs to other subsets as we consider every
|
||||
# AOV subset of its own.
|
||||
|
||||
log = Logger.get_logger("farm_publishing")
|
||||
additional_color_data = {
|
||||
"renderProducts": instance.data["renderProducts"],
|
||||
"colorspaceConfig": instance.data["colorspaceConfig"],
|
||||
"display": instance.data["colorspaceDisplay"],
|
||||
"view": instance.data["colorspaceView"]
|
||||
}
|
||||
|
||||
# Get templated path from absolute config path.
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
colorspace_template = instance.data["colorspaceConfig"]
|
||||
try:
|
||||
additional_color_data["colorspaceTemplate"] = remap_source(
|
||||
colorspace_template, anatomy)
|
||||
except ValueError as e:
|
||||
log.warning(e)
|
||||
additional_color_data["colorspaceTemplate"] = colorspace_template
|
||||
|
||||
# if there are subset to attach to and more than one AOV,
|
||||
# we cannot proceed.
|
||||
if (
|
||||
len(instance.data.get("attachTo", [])) > 0
|
||||
and len(instance.data.get("expectedFiles")[0].keys()) != 1
|
||||
):
|
||||
raise KnownPublishError(
|
||||
"attaching multiple AOVs or renderable cameras to "
|
||||
"subset is not supported yet.")
|
||||
|
||||
# create instances for every AOV we found in expected files.
|
||||
# NOTE: this is done for every AOV and every render camera (if
|
||||
# there are multiple renderable cameras in scene)
|
||||
return _create_instances_for_aov(
|
||||
instance,
|
||||
skeleton,
|
||||
aov_filter,
|
||||
additional_color_data,
|
||||
skip_integration_repre_list,
|
||||
do_not_add_review
|
||||
)
|
||||
|
||||
|
||||
def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data,
|
||||
skip_integration_repre_list, do_not_add_review):
|
||||
"""Create instance for each AOV found.
|
||||
|
||||
This will create new instance for every AOV it can detect in expected
|
||||
files list.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Original instance.
|
||||
skeleton (dict): Skeleton data for instance (those needed) later
|
||||
by collector.
|
||||
additional_data (dict): ..
|
||||
skip_integration_repre_list (list): list of extensions that shouldn't
|
||||
be published
|
||||
do_not_addbe _review (bool): explicitly disable review
|
||||
|
||||
|
||||
Returns:
|
||||
list of instances
|
||||
|
||||
Throws:
|
||||
ValueError:
|
||||
|
||||
"""
|
||||
# TODO: this needs to be taking the task from context or instance
|
||||
task = os.environ["AVALON_TASK"]
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
subset = skeleton["subset"]
|
||||
cameras = instance.data.get("cameras", [])
|
||||
exp_files = instance.data["expectedFiles"]
|
||||
log = Logger.get_logger("farm_publishing")
|
||||
|
||||
instances = []
|
||||
# go through AOVs in expected files
|
||||
for aov, files in exp_files[0].items():
|
||||
cols, rem = clique.assemble(files)
|
||||
# we shouldn't have any reminders. And if we do, it should
|
||||
# be just one item for single frame renders.
|
||||
if not cols and rem:
|
||||
if len(rem) != 1:
|
||||
raise ValueError("Found multiple non related files "
|
||||
"to render, don't know what to do "
|
||||
"with them.")
|
||||
col = rem[0]
|
||||
ext = os.path.splitext(col)[1].lstrip(".")
|
||||
else:
|
||||
# but we really expect only one collection.
|
||||
# Nothing else make sense.
|
||||
if len(cols) != 1:
|
||||
raise ValueError("Only one image sequence type is expected.") # noqa: E501
|
||||
ext = cols[0].tail.lstrip(".")
|
||||
col = list(cols[0])
|
||||
|
||||
# create subset name `familyTaskSubset_AOV`
|
||||
group_name = 'render{}{}{}{}'.format(
|
||||
task[0].upper(), task[1:],
|
||||
subset[0].upper(), subset[1:])
|
||||
|
||||
# if there are multiple cameras, we need to add camera name
|
||||
if isinstance(col, (list, tuple)):
|
||||
cam = [c for c in cameras if c in col[0]]
|
||||
else:
|
||||
# in case of single frame
|
||||
cam = [c for c in cameras if c in col]
|
||||
if cam:
|
||||
if aov:
|
||||
subset_name = '{}_{}_{}'.format(group_name, cam, aov)
|
||||
else:
|
||||
subset_name = '{}_{}'.format(group_name, cam)
|
||||
else:
|
||||
if aov:
|
||||
subset_name = '{}_{}'.format(group_name, aov)
|
||||
else:
|
||||
subset_name = '{}'.format(group_name)
|
||||
|
||||
if isinstance(col, (list, tuple)):
|
||||
staging = os.path.dirname(col[0])
|
||||
else:
|
||||
staging = os.path.dirname(col)
|
||||
|
||||
try:
|
||||
staging = remap_source(staging, anatomy)
|
||||
except ValueError as e:
|
||||
log.warning(e)
|
||||
|
||||
log.info("Creating data for: {}".format(subset_name))
|
||||
|
||||
app = os.environ.get("AVALON_APP", "")
|
||||
|
||||
if isinstance(col, list):
|
||||
render_file_name = os.path.basename(col[0])
|
||||
else:
|
||||
render_file_name = os.path.basename(col)
|
||||
aov_patterns = aov_filter
|
||||
|
||||
preview = match_aov_pattern(app, aov_patterns, render_file_name)
|
||||
# toggle preview on if multipart is on
|
||||
if instance.data.get("multipartExr"):
|
||||
log.debug("Adding preview tag because its multipartExr")
|
||||
preview = True
|
||||
|
||||
new_instance = deepcopy(skeleton)
|
||||
new_instance["subset"] = subset_name
|
||||
new_instance["subsetGroup"] = group_name
|
||||
|
||||
# explicitly disable review by user
|
||||
preview = preview and not do_not_add_review
|
||||
if preview:
|
||||
new_instance["review"] = True
|
||||
|
||||
# create representation
|
||||
if isinstance(col, (list, tuple)):
|
||||
files = [os.path.basename(f) for f in col]
|
||||
else:
|
||||
files = os.path.basename(col)
|
||||
|
||||
# Copy render product "colorspace" data to representation.
|
||||
colorspace = ""
|
||||
products = additional_data["renderProducts"].layer_data.products
|
||||
for product in products:
|
||||
if product.productName == aov:
|
||||
colorspace = product.colorspace
|
||||
break
|
||||
|
||||
rep = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"files": files,
|
||||
"frameStart": int(skeleton["frameStartHandle"]),
|
||||
"frameEnd": int(skeleton["frameEndHandle"]),
|
||||
# If expectedFile are absolute, we need only filenames
|
||||
"stagingDir": staging,
|
||||
"fps": new_instance.get("fps"),
|
||||
"tags": ["review"] if preview else [],
|
||||
"colorspaceData": {
|
||||
"colorspace": colorspace,
|
||||
"config": {
|
||||
"path": additional_data["colorspaceConfig"],
|
||||
"template": additional_data["colorspaceTemplate"]
|
||||
},
|
||||
"display": additional_data["display"],
|
||||
"view": additional_data["view"]
|
||||
}
|
||||
}
|
||||
|
||||
# support conversion from tiled to scanline
|
||||
if instance.data.get("convertToScanline"):
|
||||
log.info("Adding scanline conversion.")
|
||||
rep["tags"].append("toScanline")
|
||||
|
||||
# poor man exclusion
|
||||
if ext in skip_integration_repre_list:
|
||||
rep["tags"].append("delete")
|
||||
|
||||
if preview:
|
||||
new_instance["families"] = _add_review_families(
|
||||
new_instance["families"])
|
||||
|
||||
new_instance["representations"] = [rep]
|
||||
|
||||
# if extending frames from existing version, copy files from there
|
||||
# into our destination directory
|
||||
if new_instance.get("extendFrames", False):
|
||||
copy_extend_frames(new_instance, rep)
|
||||
instances.append(new_instance)
|
||||
log.debug("instances:{}".format(instances))
|
||||
return instances
|
||||
|
||||
|
||||
def get_resources(project_name, version, extension=None):
|
||||
"""Get the files from the specific version.
|
||||
|
||||
This will return all get all files from representation.
|
||||
|
||||
Todo:
|
||||
This is really weird function, and it's use is
|
||||
highly controversial. First, it will not probably work
|
||||
ar all in final release of AYON, second, the logic isn't sound.
|
||||
It should try to find representation matching the current one -
|
||||
because it is used to pull out files from previous version to
|
||||
be included in this one.
|
||||
|
||||
.. deprecated:: 3.15.5
|
||||
This won't work in AYON and even the logic must be refactored.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of the project.
|
||||
version (dict): Version document.
|
||||
extension (str): extension used to filter
|
||||
representations.
|
||||
|
||||
Returns:
|
||||
list: of files
|
||||
|
||||
"""
|
||||
warnings.warn((
|
||||
"This won't work in AYON and even "
|
||||
"the logic must be refactored."), DeprecationWarning)
|
||||
extensions = []
|
||||
if extension:
|
||||
extensions = [extension]
|
||||
|
||||
# there is a `context_filter` argument that won't probably work in
|
||||
# final release of AYON. SO we'll rather not use it
|
||||
repre_docs = list(get_representations(
|
||||
project_name, version_ids=[version["_id"]]))
|
||||
|
||||
filtered = []
|
||||
for doc in repre_docs:
|
||||
if doc["context"]["ext"] in extensions:
|
||||
filtered.append(doc)
|
||||
|
||||
representation = filtered[0]
|
||||
directory = get_representation_path(representation)
|
||||
print("Source: ", directory)
|
||||
resources = sorted(
|
||||
[
|
||||
os.path.normpath(os.path.join(directory, file_name))
|
||||
for file_name in os.listdir(directory)
|
||||
]
|
||||
)
|
||||
|
||||
return resources
|
||||
|
||||
|
||||
def copy_extend_frames(instance, representation):
|
||||
"""Copy existing frames from latest version.
|
||||
|
||||
This will copy all existing frames from subset's latest version back
|
||||
to render directory and rename them to what renderer is expecting.
|
||||
|
||||
Arguments:
|
||||
instance (pyblish.plugin.Instance): instance to get required
|
||||
data from
|
||||
representation (dict): presentation to operate on
|
||||
|
||||
"""
|
||||
import speedcopy
|
||||
|
||||
R_FRAME_NUMBER = re.compile(
|
||||
r".+\.(?P<frame>[0-9]+)\..+")
|
||||
|
||||
log = Logger.get_logger("farm_publishing")
|
||||
log.info("Preparing to copy ...")
|
||||
start = instance.data.get("frameStart")
|
||||
end = instance.data.get("frameEnd")
|
||||
project_name = instance.context.data["project"]
|
||||
anatomy = instance.context.data["anatomy"] # type: Anatomy
|
||||
|
||||
# get latest version of subset
|
||||
# this will stop if subset wasn't published yet
|
||||
|
||||
version = get_last_version_by_subset_name(
|
||||
project_name,
|
||||
instance.data.get("subset"),
|
||||
asset_name=instance.data.get("asset")
|
||||
)
|
||||
|
||||
# get its files based on extension
|
||||
subset_resources = get_resources(
|
||||
project_name, version, representation.get("ext")
|
||||
)
|
||||
r_col, _ = clique.assemble(subset_resources)
|
||||
|
||||
# if override remove all frames we are expecting to be rendered,
|
||||
# so we'll copy only those missing from current render
|
||||
if instance.data.get("overrideExistingFrame"):
|
||||
for frame in range(start, end + 1):
|
||||
if frame not in r_col.indexes:
|
||||
continue
|
||||
r_col.indexes.remove(frame)
|
||||
|
||||
# now we need to translate published names from representation
|
||||
# back. This is tricky, right now we'll just use same naming
|
||||
# and only switch frame numbers
|
||||
resource_files = []
|
||||
r_filename = os.path.basename(
|
||||
representation.get("files")[0]) # first file
|
||||
op = re.search(R_FRAME_NUMBER, r_filename)
|
||||
pre = r_filename[:op.start("frame")]
|
||||
post = r_filename[op.end("frame"):]
|
||||
assert op is not None, "padding string wasn't found"
|
||||
for frame in list(r_col):
|
||||
fn = re.search(R_FRAME_NUMBER, frame)
|
||||
# silencing linter as we need to compare to True, not to
|
||||
# type
|
||||
assert fn is not None, "padding string wasn't found"
|
||||
# list of tuples (source, destination)
|
||||
staging = representation.get("stagingDir")
|
||||
staging = anatomy.fill_root(staging)
|
||||
resource_files.append(
|
||||
(frame, os.path.join(
|
||||
staging, "{}{}{}".format(pre, fn["frame"], post)))
|
||||
)
|
||||
|
||||
# test if destination dir exists and create it if not
|
||||
output_dir = os.path.dirname(representation.get("files")[0])
|
||||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
# copy files
|
||||
for source in resource_files:
|
||||
speedcopy.copy(source[0], source[1])
|
||||
log.info(" > {}".format(source[1]))
|
||||
|
||||
log.info("Finished copying %i files" % len(resource_files))
|
||||
|
||||
|
||||
def attach_instances_to_subset(attach_to, instances):
|
||||
"""Attach instance to subset.
|
||||
|
||||
If we are attaching to other subsets, create copy of existing
|
||||
instances, change data to match its subset and replace
|
||||
existing instances with modified data.
|
||||
|
||||
Args:
|
||||
attach_to (list): List of instances to attach to.
|
||||
instances (list): List of instances to attach.
|
||||
|
||||
Returns:
|
||||
list: List of attached instances.
|
||||
|
||||
"""
|
||||
#
|
||||
|
||||
new_instances = []
|
||||
for attach_instance in attach_to:
|
||||
for i in instances:
|
||||
new_inst = copy.deepcopy(i)
|
||||
new_inst["version"] = attach_instance.get("version")
|
||||
new_inst["subset"] = attach_instance.get("subset")
|
||||
new_inst["family"] = attach_instance.get("family")
|
||||
new_inst["append"] = True
|
||||
# don't set subsetGroup if we are attaching
|
||||
new_inst.pop("subsetGroup")
|
||||
new_instances.append(new_inst)
|
||||
return new_instances
|
||||
|
||||
|
||||
def create_metadata_path(instance, anatomy):
|
||||
ins_data = instance.data
|
||||
# Ensure output dir exists
|
||||
output_dir = ins_data.get(
|
||||
"publishRenderMetadataFolder", ins_data["outputDir"])
|
||||
|
||||
log = Logger.get_logger("farm_publishing")
|
||||
|
||||
try:
|
||||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
except OSError:
|
||||
# directory is not available
|
||||
log.warning("Path is unreachable: `{}`".format(output_dir))
|
||||
|
||||
metadata_filename = "{}_metadata.json".format(ins_data["subset"])
|
||||
|
||||
metadata_path = os.path.join(output_dir, metadata_filename)
|
||||
|
||||
# Convert output dir to `{root}/rest/of/path/...` with Anatomy
|
||||
success, rootless_mtdt_p = anatomy.find_root_template_from_path(
|
||||
metadata_path)
|
||||
if not success:
|
||||
# `rootless_path` is not set to `output_dir` if none of roots match
|
||||
log.warning((
|
||||
"Could not find root path for remapping \"{}\"."
|
||||
" This may cause issues on farm."
|
||||
).format(output_dir))
|
||||
rootless_mtdt_p = metadata_path
|
||||
|
||||
return metadata_path, rootless_mtdt_p
|
||||
24
openpype/pipeline/farm/pyblish_functions.pyi
Normal file
24
openpype/pipeline/farm/pyblish_functions.pyi
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
import pyblish.api
|
||||
from openpype.pipeline import Anatomy
|
||||
from typing import Tuple, Union, List
|
||||
|
||||
|
||||
class TimeData:
|
||||
start: int
|
||||
end: int
|
||||
fps: float | int
|
||||
step: int
|
||||
handle_start: int
|
||||
handle_end: int
|
||||
|
||||
def __init__(self, start: int, end: int, fps: float | int, step: int, handle_start: int, handle_end: int):
|
||||
...
|
||||
...
|
||||
|
||||
def remap_source(source: str, anatomy: Anatomy): ...
|
||||
def extend_frames(asset: str, subset: str, start: int, end: int) -> Tuple[int, int]: ...
|
||||
def get_time_data_from_instance_or_context(instance: pyblish.api.Instance) -> TimeData: ...
|
||||
def get_transferable_representations(instance: pyblish.api.Instance) -> list: ...
|
||||
def create_skeleton_instance(instance: pyblish.api.Instance, families_transfer: list = ..., instance_transfer: dict = ...) -> dict: ...
|
||||
def create_instances_for_aov(instance: pyblish.api.Instance, skeleton: dict, aov_filter: dict) -> List[pyblish.api.Instance]: ...
|
||||
def attach_instances_to_subset(attach_to: list, instances: list) -> list: ...
|
||||
112
openpype/pipeline/farm/tools.py
Normal file
112
openpype/pipeline/farm/tools.py
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
import os
|
||||
|
||||
|
||||
def get_published_workfile_instance(context):
|
||||
"""Find workfile instance in context"""
|
||||
for i in context:
|
||||
is_workfile = (
|
||||
"workfile" in i.data.get("families", []) or
|
||||
i.data["family"] == "workfile"
|
||||
)
|
||||
if not is_workfile:
|
||||
continue
|
||||
|
||||
# test if there is instance of workfile waiting
|
||||
# to be published.
|
||||
if i.data["publish"] is not True:
|
||||
continue
|
||||
|
||||
return i
|
||||
|
||||
|
||||
def from_published_scene(instance, replace_in_path=True):
|
||||
"""Switch work scene for published scene.
|
||||
|
||||
If rendering/exporting from published scenes is enabled, this will
|
||||
replace paths from working scene to published scene.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Instance data to process.
|
||||
replace_in_path (bool): if True, it will try to find
|
||||
old scene name in path of expected files and replace it
|
||||
with name of published scene.
|
||||
|
||||
Returns:
|
||||
str: Published scene path.
|
||||
None: if no published scene is found.
|
||||
|
||||
Note:
|
||||
Published scene path is actually determined from project Anatomy
|
||||
as at the time this plugin is running the scene can be still
|
||||
un-published.
|
||||
|
||||
"""
|
||||
workfile_instance = get_published_workfile_instance(instance.context)
|
||||
if workfile_instance is None:
|
||||
return
|
||||
|
||||
# determine published path from Anatomy.
|
||||
template_data = workfile_instance.data.get("anatomyData")
|
||||
rep = workfile_instance.data["representations"][0]
|
||||
template_data["representation"] = rep.get("name")
|
||||
template_data["ext"] = rep.get("ext")
|
||||
template_data["comment"] = None
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
template_obj = anatomy.templates_obj["publish"]["path"]
|
||||
template_filled = template_obj.format_strict(template_data)
|
||||
file_path = os.path.normpath(template_filled)
|
||||
|
||||
if not os.path.exists(file_path):
|
||||
raise
|
||||
|
||||
if not replace_in_path:
|
||||
return file_path
|
||||
|
||||
# now we need to switch scene in expected files
|
||||
# because <scene> token will now point to published
|
||||
# scene file and that might differ from current one
|
||||
def _clean_name(path):
|
||||
return os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
new_scene = _clean_name(file_path)
|
||||
orig_scene = _clean_name(instance.context.data["currentFile"])
|
||||
expected_files = instance.data.get("expectedFiles")
|
||||
|
||||
if isinstance(expected_files[0], dict):
|
||||
# we have aovs and we need to iterate over them
|
||||
new_exp = {}
|
||||
for aov, files in expected_files[0].items():
|
||||
replaced_files = []
|
||||
for f in files:
|
||||
replaced_files.append(
|
||||
str(f).replace(orig_scene, new_scene)
|
||||
)
|
||||
new_exp[aov] = replaced_files
|
||||
# [] might be too much here, TODO
|
||||
instance.data["expectedFiles"] = [new_exp]
|
||||
else:
|
||||
new_exp = []
|
||||
for f in expected_files:
|
||||
new_exp.append(
|
||||
str(f).replace(orig_scene, new_scene)
|
||||
)
|
||||
instance.data["expectedFiles"] = new_exp
|
||||
|
||||
metadata_folder = instance.data.get("publishRenderMetadataFolder")
|
||||
if metadata_folder:
|
||||
metadata_folder = metadata_folder.replace(orig_scene,
|
||||
new_scene)
|
||||
instance.data["publishRenderMetadataFolder"] = metadata_folder
|
||||
|
||||
return file_path
|
||||
|
||||
|
||||
def iter_expected_files(exp):
|
||||
if isinstance(exp[0], dict):
|
||||
for _aov, files in exp[0].items():
|
||||
for file in files:
|
||||
yield file
|
||||
else:
|
||||
for file in exp:
|
||||
yield file
|
||||
|
|
@ -864,6 +864,109 @@ def _validate_transient_template(project_name, template_name, anatomy):
|
|||
).format(template_name, project_name))
|
||||
|
||||
|
||||
def get_published_workfile_instance(context):
|
||||
"""Find workfile instance in context"""
|
||||
for i in context:
|
||||
is_workfile = (
|
||||
"workfile" in i.data.get("families", []) or
|
||||
i.data["family"] == "workfile"
|
||||
)
|
||||
if not is_workfile:
|
||||
continue
|
||||
|
||||
# test if there is instance of workfile waiting
|
||||
# to be published.
|
||||
if not i.data.get("publish", True):
|
||||
continue
|
||||
|
||||
return i
|
||||
|
||||
|
||||
def replace_published_scene(instance, replace_in_path=True):
|
||||
"""Switch work scene for published scene.
|
||||
If rendering/exporting from published scenes is enabled, this will
|
||||
replace paths from working scene to published scene.
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Pyblish instance.
|
||||
replace_in_path (bool): if True, it will try to find
|
||||
old scene name in path of expected files and replace it
|
||||
with name of published scene.
|
||||
Returns:
|
||||
str: Published scene path.
|
||||
None: if no published scene is found.
|
||||
Note:
|
||||
Published scene path is actually determined from project Anatomy
|
||||
as at the time this plugin is running scene can still not be
|
||||
published.
|
||||
"""
|
||||
log = Logger.get_logger("published_workfile")
|
||||
workfile_instance = get_published_workfile_instance(instance.context)
|
||||
if workfile_instance is None:
|
||||
return
|
||||
|
||||
# determine published path from Anatomy.
|
||||
template_data = workfile_instance.data.get("anatomyData")
|
||||
rep = workfile_instance.data["representations"][0]
|
||||
template_data["representation"] = rep.get("name")
|
||||
template_data["ext"] = rep.get("ext")
|
||||
template_data["comment"] = None
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
anatomy_filled = anatomy.format(template_data)
|
||||
template_filled = anatomy_filled["publish"]["path"]
|
||||
file_path = os.path.normpath(template_filled)
|
||||
|
||||
log.info("Using published scene for render {}".format(file_path))
|
||||
|
||||
if not os.path.exists(file_path):
|
||||
log.error("published scene does not exist!")
|
||||
raise
|
||||
|
||||
if not replace_in_path:
|
||||
return file_path
|
||||
|
||||
# now we need to switch scene in expected files
|
||||
# because <scene> token will now point to published
|
||||
# scene file and that might differ from current one
|
||||
def _clean_name(path):
|
||||
return os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
new_scene = _clean_name(file_path)
|
||||
orig_scene = _clean_name(instance.context.data["currentFile"])
|
||||
expected_files = instance.data.get("expectedFiles")
|
||||
|
||||
if isinstance(expected_files[0], dict):
|
||||
# we have aovs and we need to iterate over them
|
||||
new_exp = {}
|
||||
for aov, files in expected_files[0].items():
|
||||
replaced_files = []
|
||||
for f in files:
|
||||
replaced_files.append(
|
||||
str(f).replace(orig_scene, new_scene)
|
||||
)
|
||||
new_exp[aov] = replaced_files
|
||||
# [] might be too much here, TODO
|
||||
instance.data["expectedFiles"] = [new_exp]
|
||||
else:
|
||||
new_exp = []
|
||||
for f in expected_files:
|
||||
new_exp.append(
|
||||
str(f).replace(orig_scene, new_scene)
|
||||
)
|
||||
instance.data["expectedFiles"] = new_exp
|
||||
|
||||
metadata_folder = instance.data.get("publishRenderMetadataFolder")
|
||||
if metadata_folder:
|
||||
metadata_folder = metadata_folder.replace(orig_scene,
|
||||
new_scene)
|
||||
instance.data["publishRenderMetadataFolder"] = metadata_folder
|
||||
|
||||
log.info("Scene name was switched {} -> {}".format(
|
||||
orig_scene, new_scene
|
||||
))
|
||||
|
||||
return file_path
|
||||
|
||||
def add_repre_files_for_cleanup(instance, repre):
|
||||
""" Explicitly mark repre files to be deleted.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,7 @@
|
|||
{
|
||||
"rr_paths": [
|
||||
"default"
|
||||
],
|
||||
"publish": {
|
||||
"CollectSequencesFromJob": {
|
||||
"review": true
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"shotgrid_project_id": 0,
|
||||
"shotgrid_server": "",
|
||||
"shotgrid_server": [],
|
||||
"event": {
|
||||
"enabled": false
|
||||
},
|
||||
|
|
|
|||
|
|
@ -185,9 +185,9 @@
|
|||
"enabled": false,
|
||||
"rr_paths": {
|
||||
"default": {
|
||||
"windows": "",
|
||||
"darwin": "",
|
||||
"linux": ""
|
||||
"windows": "C:\\RR8",
|
||||
"darwin": "/Volumes/share/RR8",
|
||||
"linux": "/mnt/studio/RR8"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -107,7 +107,8 @@ from .enum_entity import (
|
|||
TaskTypeEnumEntity,
|
||||
DeadlineUrlEnumEntity,
|
||||
AnatomyTemplatesEnumEntity,
|
||||
ShotgridUrlEnumEntity
|
||||
ShotgridUrlEnumEntity,
|
||||
RoyalRenderRootEnumEntity
|
||||
)
|
||||
|
||||
from .list_entity import ListEntity
|
||||
|
|
@ -170,6 +171,7 @@ __all__ = (
|
|||
"TaskTypeEnumEntity",
|
||||
"DeadlineUrlEnumEntity",
|
||||
"ShotgridUrlEnumEntity",
|
||||
"RoyalRenderRootEnumEntity",
|
||||
"AnatomyTemplatesEnumEntity",
|
||||
|
||||
"ListEntity",
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import abc
|
||||
import six
|
||||
import copy
|
||||
from .input_entities import InputEntity
|
||||
from .exceptions import EntitySchemaError
|
||||
|
|
@ -477,8 +479,9 @@ class TaskTypeEnumEntity(BaseEnumEntity):
|
|||
self.set(value_on_not_set)
|
||||
|
||||
|
||||
class DeadlineUrlEnumEntity(BaseEnumEntity):
|
||||
schema_types = ["deadline_url-enum"]
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class FarmRootEnumEntity(BaseEnumEntity):
|
||||
schema_types = []
|
||||
|
||||
def _item_initialization(self):
|
||||
self.multiselection = self.schema_data.get("multiselection", True)
|
||||
|
|
@ -496,22 +499,8 @@ class DeadlineUrlEnumEntity(BaseEnumEntity):
|
|||
# GUI attribute
|
||||
self.placeholder = self.schema_data.get("placeholder")
|
||||
|
||||
def _get_enum_values(self):
|
||||
deadline_urls_entity = self.get_entity_from_path(
|
||||
"system_settings/modules/deadline/deadline_urls"
|
||||
)
|
||||
|
||||
valid_keys = set()
|
||||
enum_items_list = []
|
||||
for server_name, url_entity in deadline_urls_entity.items():
|
||||
enum_items_list.append(
|
||||
{server_name: "{}: {}".format(server_name, url_entity.value)}
|
||||
)
|
||||
valid_keys.add(server_name)
|
||||
return enum_items_list, valid_keys
|
||||
|
||||
def set_override_state(self, *args, **kwargs):
|
||||
super(DeadlineUrlEnumEntity, self).set_override_state(*args, **kwargs)
|
||||
super(FarmRootEnumEntity, self).set_override_state(*args, **kwargs)
|
||||
|
||||
self.enum_items, self.valid_keys = self._get_enum_values()
|
||||
if self.multiselection:
|
||||
|
|
@ -528,22 +517,50 @@ class DeadlineUrlEnumEntity(BaseEnumEntity):
|
|||
elif self._current_value not in self.valid_keys:
|
||||
self._current_value = tuple(self.valid_keys)[0]
|
||||
|
||||
@abc.abstractmethod
|
||||
def _get_enum_values(self):
|
||||
pass
|
||||
|
||||
class ShotgridUrlEnumEntity(BaseEnumEntity):
|
||||
|
||||
class DeadlineUrlEnumEntity(FarmRootEnumEntity):
|
||||
schema_types = ["deadline_url-enum"]
|
||||
|
||||
def _get_enum_values(self):
|
||||
deadline_urls_entity = self.get_entity_from_path(
|
||||
"system_settings/modules/deadline/deadline_urls"
|
||||
)
|
||||
|
||||
valid_keys = set()
|
||||
enum_items_list = []
|
||||
for server_name, url_entity in deadline_urls_entity.items():
|
||||
enum_items_list.append(
|
||||
{server_name: "{}: {}".format(server_name, url_entity.value)}
|
||||
)
|
||||
valid_keys.add(server_name)
|
||||
return enum_items_list, valid_keys
|
||||
|
||||
|
||||
class RoyalRenderRootEnumEntity(FarmRootEnumEntity):
|
||||
schema_types = ["rr_root-enum"]
|
||||
|
||||
def _get_enum_values(self):
|
||||
rr_root_entity = self.get_entity_from_path(
|
||||
"system_settings/modules/royalrender/rr_paths"
|
||||
)
|
||||
|
||||
valid_keys = set()
|
||||
enum_items_list = []
|
||||
for server_name, url_entity in rr_root_entity.items():
|
||||
enum_items_list.append(
|
||||
{server_name: "{}: {}".format(server_name, url_entity.value)}
|
||||
)
|
||||
valid_keys.add(server_name)
|
||||
return enum_items_list, valid_keys
|
||||
|
||||
|
||||
class ShotgridUrlEnumEntity(FarmRootEnumEntity):
|
||||
schema_types = ["shotgrid_url-enum"]
|
||||
|
||||
def _item_initialization(self):
|
||||
self.multiselection = False
|
||||
|
||||
self.enum_items = []
|
||||
self.valid_keys = set()
|
||||
|
||||
self.valid_value_types = (STRING_TYPE,)
|
||||
self.value_on_not_set = ""
|
||||
|
||||
# GUI attribute
|
||||
self.placeholder = self.schema_data.get("placeholder")
|
||||
|
||||
def _get_enum_values(self):
|
||||
shotgrid_settings = self.get_entity_from_path(
|
||||
"system_settings/modules/shotgrid/shotgrid_settings"
|
||||
|
|
@ -562,16 +579,6 @@ class ShotgridUrlEnumEntity(BaseEnumEntity):
|
|||
valid_keys.add(server_name)
|
||||
return enum_items_list, valid_keys
|
||||
|
||||
def set_override_state(self, *args, **kwargs):
|
||||
super(ShotgridUrlEnumEntity, self).set_override_state(*args, **kwargs)
|
||||
|
||||
self.enum_items, self.valid_keys = self._get_enum_values()
|
||||
if not self.valid_keys:
|
||||
self._current_value = ""
|
||||
|
||||
elif self._current_value not in self.valid_keys:
|
||||
self._current_value = tuple(self.valid_keys)[0]
|
||||
|
||||
|
||||
class AnatomyTemplatesEnumEntity(BaseEnumEntity):
|
||||
schema_types = ["anatomy-templates-enum"]
|
||||
|
|
|
|||
|
|
@ -5,6 +5,12 @@
|
|||
"collapsible": true,
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "rr_root-enum",
|
||||
"key": "rr_paths",
|
||||
"label": "Royal Render Roots",
|
||||
"multiselect": true
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
|
|||
68
poetry.lock
generated
68
poetry.lock
generated
|
|
@ -1,4 +1,4 @@
|
|||
# This file is automatically @generated by Poetry and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "acre"
|
||||
|
|
@ -1456,11 +1456,13 @@ python-versions = ">=3.6"
|
|||
files = [
|
||||
{file = "lief-0.12.3-cp310-cp310-macosx_10_14_arm64.whl", hash = "sha256:66724f337e6a36cea1a9380f13b59923f276c49ca837becae2e7be93a2e245d9"},
|
||||
{file = "lief-0.12.3-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:6d18aafa2028587c98f6d4387bec94346e92f2b5a8a5002f70b1cf35b1c045cc"},
|
||||
{file = "lief-0.12.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d4f69d125caaa8d5ddb574f29cc83101e165ebea1a9f18ad042eb3544081a797"},
|
||||
{file = "lief-0.12.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c078d6230279ffd3bca717c79664fb8368666f610b577deb24b374607936e9c1"},
|
||||
{file = "lief-0.12.3-cp310-cp310-win32.whl", hash = "sha256:e3a6af926532d0aac9e7501946134513d63217bacba666e6f7f5a0b7e15ba236"},
|
||||
{file = "lief-0.12.3-cp310-cp310-win_amd64.whl", hash = "sha256:0750b72e3aa161e1fb0e2e7f571121ae05d2428aafd742ff05a7656ad2288447"},
|
||||
{file = "lief-0.12.3-cp311-cp311-macosx_10_14_arm64.whl", hash = "sha256:b5c123cb99a7879d754c059e299198b34e7e30e3b64cf22e8962013db0099f47"},
|
||||
{file = "lief-0.12.3-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:8bc58fa26a830df6178e36f112cb2bbdd65deff593f066d2d51434ff78386ba5"},
|
||||
{file = "lief-0.12.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74ac6143ac6ccd813c9b068d9c5f1f9d55c8813c8b407387eb57de01c3db2d74"},
|
||||
{file = "lief-0.12.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04eb6b70d646fb5bd6183575928ee23715550f161f2832cbcd8c6ff2071fb408"},
|
||||
{file = "lief-0.12.3-cp311-cp311-win32.whl", hash = "sha256:7e2d0a53c403769b04adcf8df92e83c5e25f9103a052aa7f17b0a9cf057735fb"},
|
||||
{file = "lief-0.12.3-cp311-cp311-win_amd64.whl", hash = "sha256:7f6395c12ee1bc4a5162f567cba96d0c72dfb660e7902e84d4f3029daf14fe33"},
|
||||
|
|
@ -1480,6 +1482,7 @@ files = [
|
|||
{file = "lief-0.12.3-cp38-cp38-win_amd64.whl", hash = "sha256:b00667257b43e93d94166c959055b6147d46d302598f3ee55c194b40414c89cc"},
|
||||
{file = "lief-0.12.3-cp39-cp39-macosx_10_14_arm64.whl", hash = "sha256:e6a1b5b389090d524621c2455795e1262f62dc9381bedd96f0cd72b878c4066d"},
|
||||
{file = "lief-0.12.3-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:ae773196df814202c0c51056163a1478941b299512b09660a3c37be3c7fac81e"},
|
||||
{file = "lief-0.12.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:66ddf88917ec7b00752687c476bb2771dc8ec19bd7e4c0dcff1f8ef774cad4e9"},
|
||||
{file = "lief-0.12.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:4a47f410032c63ac3be051d963d0337d6b47f0e94bfe8e946ab4b6c428f4d0f8"},
|
||||
{file = "lief-0.12.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbd11367c2259bd1131a6c8755dcde33314324de5ea029227bfbc7d3755871e6"},
|
||||
{file = "lief-0.12.3-cp39-cp39-win32.whl", hash = "sha256:2ce53e311918c3e5b54c815ef420a747208d2a88200c41cd476f3dd1eb876bcf"},
|
||||
|
|
@ -1676,6 +1679,65 @@ files = [
|
|||
{file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.2.0"
|
||||
description = "Optional static typing for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "mypy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:701189408b460a2ff42b984e6bd45c3f41f0ac9f5f58b8873bbedc511900086d"},
|
||||
{file = "mypy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe91be1c51c90e2afe6827601ca14353bbf3953f343c2129fa1e247d55fd95ba"},
|
||||
{file = "mypy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d26b513225ffd3eacece727f4387bdce6469192ef029ca9dd469940158bc89e"},
|
||||
{file = "mypy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a2d219775a120581a0ae8ca392b31f238d452729adbcb6892fa89688cb8306a"},
|
||||
{file = "mypy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:2e93a8a553e0394b26c4ca683923b85a69f7ccdc0139e6acd1354cc884fe0128"},
|
||||
{file = "mypy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3efde4af6f2d3ccf58ae825495dbb8d74abd6d176ee686ce2ab19bd025273f41"},
|
||||
{file = "mypy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:695c45cea7e8abb6f088a34a6034b1d273122e5530aeebb9c09626cea6dca4cb"},
|
||||
{file = "mypy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0e9464a0af6715852267bf29c9553e4555b61f5904a4fc538547a4d67617937"},
|
||||
{file = "mypy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8293a216e902ac12779eb7a08f2bc39ec6c878d7c6025aa59464e0c4c16f7eb9"},
|
||||
{file = "mypy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:f46af8d162f3d470d8ffc997aaf7a269996d205f9d746124a179d3abe05ac602"},
|
||||
{file = "mypy-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:031fc69c9a7e12bcc5660b74122ed84b3f1c505e762cc4296884096c6d8ee140"},
|
||||
{file = "mypy-1.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:390bc685ec209ada4e9d35068ac6988c60160b2b703072d2850457b62499e336"},
|
||||
{file = "mypy-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4b41412df69ec06ab141808d12e0bf2823717b1c363bd77b4c0820feaa37249e"},
|
||||
{file = "mypy-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e4a682b3f2489d218751981639cffc4e281d548f9d517addfd5a2917ac78119"},
|
||||
{file = "mypy-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a197ad3a774f8e74f21e428f0de7f60ad26a8d23437b69638aac2764d1e06a6a"},
|
||||
{file = "mypy-1.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c9a084bce1061e55cdc0493a2ad890375af359c766b8ac311ac8120d3a472950"},
|
||||
{file = "mypy-1.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaeaa0888b7f3ccb7bcd40b50497ca30923dba14f385bde4af78fac713d6d6f6"},
|
||||
{file = "mypy-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bea55fc25b96c53affab852ad94bf111a3083bc1d8b0c76a61dd101d8a388cf5"},
|
||||
{file = "mypy-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:4c8d8c6b80aa4a1689f2a179d31d86ae1367ea4a12855cc13aa3ba24bb36b2d8"},
|
||||
{file = "mypy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70894c5345bea98321a2fe84df35f43ee7bb0feec117a71420c60459fc3e1eed"},
|
||||
{file = "mypy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4a99fe1768925e4a139aace8f3fb66db3576ee1c30b9c0f70f744ead7e329c9f"},
|
||||
{file = "mypy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023fe9e618182ca6317ae89833ba422c411469156b690fde6a315ad10695a521"},
|
||||
{file = "mypy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d19f1a239d59f10fdc31263d48b7937c585810288376671eaf75380b074f238"},
|
||||
{file = "mypy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:2de7babe398cb7a85ac7f1fd5c42f396c215ab3eff731b4d761d68d0f6a80f48"},
|
||||
{file = "mypy-1.2.0-py3-none-any.whl", hash = "sha256:d8e9187bfcd5ffedbe87403195e1fc340189a68463903c39e2b63307c9fa0394"},
|
||||
{file = "mypy-1.2.0.tar.gz", hash = "sha256:f70a40410d774ae23fcb4afbbeca652905a04de7948eaf0b1789c8d1426b72d1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mypy-extensions = ">=1.0.0"
|
||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||
typing-extensions = ">=3.10"
|
||||
|
||||
[package.extras]
|
||||
dmypy = ["psutil (>=4.0)"]
|
||||
install-types = ["pip"]
|
||||
python2 = ["typed-ast (>=1.4.0,<2)"]
|
||||
reports = ["lxml"]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-extensions"
|
||||
version = "1.0.0"
|
||||
description = "Type system extensions for programs checked with the mypy type checker."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
|
||||
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nodeenv"
|
||||
version = "1.7.0"
|
||||
|
|
@ -2352,7 +2414,7 @@ files = [
|
|||
cffi = ">=1.4.1"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=1.6.5)", "sphinx_rtd_theme"]
|
||||
docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
|
||||
tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3462,4 +3524,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9.1,<3.10"
|
||||
content-hash = "02daca205796a0f29a0d9f50707544e6804f32027eba493cd2aa7f175a00dcea"
|
||||
content-hash = "9d3a574b1b6f42ae05d4f0fa6d65677ee54a51c53d984dd3f44d02f234962dbb"
|
||||
|
|
|
|||
|
|
@ -94,6 +94,7 @@ wheel = "*"
|
|||
enlighten = "*" # cool terminal progress bars
|
||||
toml = "^0.10.2" # for parsing pyproject.toml
|
||||
pre-commit = "*"
|
||||
mypy = "*" # for better types
|
||||
|
||||
[tool.poetry.urls]
|
||||
"Bug Tracker" = "https://github.com/pypeclub/openpype/issues"
|
||||
|
|
|
|||
|
|
@ -1,10 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Test suite for User Settings."""
|
||||
# import pytest
|
||||
# from openpype.modules import ModulesManager
|
||||
|
||||
|
||||
def test_rr_job():
|
||||
# manager = ModulesManager()
|
||||
# rr_module = manager.modules_by_name["royalrender"]
|
||||
...
|
||||
|
|
@ -102,6 +102,10 @@ workstation that should be submitting render jobs to muster via OpenPype.
|
|||
|
||||
**`templates mapping`** - you can customize Muster templates to match your existing setup here.
|
||||
|
||||
### Royal Render
|
||||
|
||||
**`Royal Render Root Paths`** - multi platform paths to Royal Render installation.
|
||||
|
||||
### Clockify
|
||||
|
||||
**`Workspace Name`** - name of the clockify workspace where you would like to be sending all the timelogs.
|
||||
|
|
|
|||
37
website/docs/module_royalrender.md
Normal file
37
website/docs/module_royalrender.md
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
---
|
||||
id: module_royalrender
|
||||
title: Royal Render Administration
|
||||
sidebar_label: Royal Render
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
|
||||
## Preparation
|
||||
|
||||
For [Royal Render](hhttps://www.royalrender.de/) support you need to set a few things up in both OpenPype and Royal Render itself
|
||||
|
||||
1. Deploy OpenPype executable to all nodes of Royal Render farm. See [Install & Run](admin_use.md)
|
||||
|
||||
2. Enable Royal Render Module in the [OpenPype Admin Settings](admin_settings_system.md#royal-render).
|
||||
|
||||
3. Point OpenPype to your Royal Render installation in the [OpenPype Admin Settings](admin_settings_system.md#royal-render).
|
||||
|
||||
4. Install our custom plugin and scripts to your RR repository. It should be as simple as copying content of `openpype/modules/royalrender/rr_root` to `path/to/your/royalrender/repository`.
|
||||
|
||||
|
||||
## Configuration
|
||||
|
||||
OpenPype integration for Royal Render consists of pointing RR to location of Openpype executable. That is being done by copying `_install_paths/OpenPype.cfg` to
|
||||
RR root folder. This file contains reasonable defaults. They could be changed in this file or modified Render apps in `rrControl`.
|
||||
|
||||
|
||||
## Debugging
|
||||
|
||||
Current implementation uses dynamically build '.xml' file which is stored in temporary folder accessible by RR. It might make sense to
|
||||
use this Openpype built file and try to run it via `*__rrServerConsole` executable from command line in case of unforeseeable issues.
|
||||
|
||||
## Known issues
|
||||
|
||||
Currently environment values set in Openpype are not propagated into render jobs on RR. It is studio responsibility to synchronize environment variables from Openpype with all render nodes for now.
|
||||
|
|
@ -111,6 +111,7 @@ module.exports = {
|
|||
"module_site_sync",
|
||||
"module_deadline",
|
||||
"module_muster",
|
||||
"module_royalrender",
|
||||
"module_clockify",
|
||||
"module_slack"
|
||||
],
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue