mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-02 00:44:52 +01:00
Merge branch 'develop' into feature/OP-2893_Add-description-during-publish
This commit is contained in:
commit
053c2df59e
15 changed files with 190 additions and 119 deletions
|
|
@ -1,6 +1,7 @@
|
||||||
import os
|
import os
|
||||||
from avalon import api
|
from avalon import api
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
|
from openpype.lib import get_subset_name_with_asset_doc
|
||||||
|
|
||||||
|
|
||||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||||
|
|
@ -38,7 +39,14 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||||
|
|
||||||
# workfile instance
|
# workfile instance
|
||||||
family = "workfile"
|
family = "workfile"
|
||||||
subset = family + task.capitalize()
|
subset = get_subset_name_with_asset_doc(
|
||||||
|
family,
|
||||||
|
"",
|
||||||
|
context.data["anatomyData"]["task"]["name"],
|
||||||
|
context.data["assetEntity"],
|
||||||
|
context.data["anatomyData"]["project"]["name"],
|
||||||
|
host_name=context.data["hostName"]
|
||||||
|
)
|
||||||
# Create instance
|
# Create instance
|
||||||
instance = context.create_instance(subset)
|
instance = context.create_instance(subset)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,8 @@
|
||||||
import pyblish.api
|
import pyblish.api
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from openpype.lib import get_subset_name_with_asset_doc
|
||||||
|
|
||||||
|
|
||||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||||
"""Collect current script for publish."""
|
"""Collect current script for publish."""
|
||||||
|
|
@ -14,10 +16,15 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||||
def process(self, context):
|
def process(self, context):
|
||||||
"""Plugin entry point."""
|
"""Plugin entry point."""
|
||||||
family = "workfile"
|
family = "workfile"
|
||||||
task = os.getenv("AVALON_TASK", None)
|
|
||||||
sanitized_task_name = task[0].upper() + task[1:]
|
|
||||||
basename = os.path.basename(context.data["currentFile"])
|
basename = os.path.basename(context.data["currentFile"])
|
||||||
subset = "{}{}".format(family, sanitized_task_name)
|
subset = get_subset_name_with_asset_doc(
|
||||||
|
family,
|
||||||
|
"",
|
||||||
|
context.data["anatomyData"]["task"]["name"],
|
||||||
|
context.data["assetEntity"],
|
||||||
|
context.data["anatomyData"]["project"]["name"],
|
||||||
|
host_name=context.data["hostName"]
|
||||||
|
)
|
||||||
|
|
||||||
# Create instance
|
# Create instance
|
||||||
instance = context.create_instance(subset)
|
instance = context.create_instance(subset)
|
||||||
|
|
|
||||||
|
|
@ -252,6 +252,7 @@ class CreateRender(plugin.Creator):
|
||||||
"""Create instance settings."""
|
"""Create instance settings."""
|
||||||
# get pools
|
# get pools
|
||||||
pool_names = []
|
pool_names = []
|
||||||
|
default_priority = 50
|
||||||
|
|
||||||
self.server_aliases = list(self.deadline_servers.keys())
|
self.server_aliases = list(self.deadline_servers.keys())
|
||||||
self.data["deadlineServers"] = self.server_aliases
|
self.data["deadlineServers"] = self.server_aliases
|
||||||
|
|
@ -260,7 +261,8 @@ class CreateRender(plugin.Creator):
|
||||||
self.data["extendFrames"] = False
|
self.data["extendFrames"] = False
|
||||||
self.data["overrideExistingFrame"] = True
|
self.data["overrideExistingFrame"] = True
|
||||||
# self.data["useLegacyRenderLayers"] = True
|
# self.data["useLegacyRenderLayers"] = True
|
||||||
self.data["priority"] = 50
|
self.data["priority"] = default_priority
|
||||||
|
self.data["tile_priority"] = default_priority
|
||||||
self.data["framesPerTask"] = 1
|
self.data["framesPerTask"] = 1
|
||||||
self.data["whitelist"] = False
|
self.data["whitelist"] = False
|
||||||
self.data["machineList"] = ""
|
self.data["machineList"] = ""
|
||||||
|
|
@ -294,6 +296,16 @@ class CreateRender(plugin.Creator):
|
||||||
deadline_url = next(iter(self.deadline_servers.values()))
|
deadline_url = next(iter(self.deadline_servers.values()))
|
||||||
|
|
||||||
pool_names = self._get_deadline_pools(deadline_url)
|
pool_names = self._get_deadline_pools(deadline_url)
|
||||||
|
maya_submit_dl = self._project_settings.get(
|
||||||
|
"deadline", {}).get(
|
||||||
|
"publish", {}).get(
|
||||||
|
"MayaSubmitDeadline", {})
|
||||||
|
priority = maya_submit_dl.get("priority", default_priority)
|
||||||
|
self.data["priority"] = priority
|
||||||
|
|
||||||
|
tile_priority = maya_submit_dl.get("tile_priority",
|
||||||
|
default_priority)
|
||||||
|
self.data["tile_priority"] = tile_priority
|
||||||
|
|
||||||
if muster_enabled:
|
if muster_enabled:
|
||||||
self.log.info(">>> Loading Muster credentials ...")
|
self.log.info(">>> Loading Muster credentials ...")
|
||||||
|
|
|
||||||
|
|
@ -40,7 +40,14 @@ class ValidateCameraContents(pyblish.api.InstancePlugin):
|
||||||
# list when there are no actual cameras results in
|
# list when there are no actual cameras results in
|
||||||
# still an empty 'invalid' list
|
# still an empty 'invalid' list
|
||||||
if len(cameras) < 1:
|
if len(cameras) < 1:
|
||||||
raise RuntimeError("No cameras in instance.")
|
if members:
|
||||||
|
# If there are members in the instance return all of
|
||||||
|
# them as 'invalid' so the user can still select invalid
|
||||||
|
cls.log.error("No cameras found in instance "
|
||||||
|
"members: {}".format(members))
|
||||||
|
return members
|
||||||
|
|
||||||
|
raise RuntimeError("No cameras found in empty instance.")
|
||||||
|
|
||||||
# non-camera shapes
|
# non-camera shapes
|
||||||
valid_shapes = cmds.ls(shapes, type=('camera', 'locator'), long=True)
|
valid_shapes = cmds.ls(shapes, type=('camera', 'locator'), long=True)
|
||||||
|
|
|
||||||
|
|
@ -123,7 +123,7 @@ class ExtractReviewDataMov(openpype.api.Extractor):
|
||||||
if generated_repres:
|
if generated_repres:
|
||||||
# assign to representations
|
# assign to representations
|
||||||
instance.data["representations"] += generated_repres
|
instance.data["representations"] += generated_repres
|
||||||
instance.data["hasReviewableRepresentations"] = True
|
instance.data["useSequenceForReview"] = False
|
||||||
else:
|
else:
|
||||||
instance.data["families"].remove("review")
|
instance.data["families"].remove("review")
|
||||||
self.log.info((
|
self.log.info((
|
||||||
|
|
|
||||||
|
|
@ -37,6 +37,8 @@ IGNORED_DEFAULT_FILENAMES = (
|
||||||
"__init__.py",
|
"__init__.py",
|
||||||
"base.py",
|
"base.py",
|
||||||
"interfaces.py",
|
"interfaces.py",
|
||||||
|
"example_addons",
|
||||||
|
"default_modules",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -303,7 +305,16 @@ def _load_modules():
|
||||||
fullpath = os.path.join(current_dir, filename)
|
fullpath = os.path.join(current_dir, filename)
|
||||||
basename, ext = os.path.splitext(filename)
|
basename, ext = os.path.splitext(filename)
|
||||||
|
|
||||||
if not os.path.isdir(fullpath) and ext not in (".py", ):
|
if os.path.isdir(fullpath):
|
||||||
|
# Check existence of init fil
|
||||||
|
init_path = os.path.join(fullpath, "__init__.py")
|
||||||
|
if not os.path.exists(init_path):
|
||||||
|
log.debug((
|
||||||
|
"Module directory does not contan __init__.py file {}"
|
||||||
|
).format(fullpath))
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif ext not in (".py", ):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -341,7 +352,16 @@ def _load_modules():
|
||||||
fullpath = os.path.join(dirpath, filename)
|
fullpath = os.path.join(dirpath, filename)
|
||||||
basename, ext = os.path.splitext(filename)
|
basename, ext = os.path.splitext(filename)
|
||||||
|
|
||||||
if not os.path.isdir(fullpath) and ext not in (".py", ):
|
if os.path.isdir(fullpath):
|
||||||
|
# Check existence of init fil
|
||||||
|
init_path = os.path.join(fullpath, "__init__.py")
|
||||||
|
if not os.path.exists(init_path):
|
||||||
|
log.debug((
|
||||||
|
"Module directory does not contan __init__.py file {}"
|
||||||
|
).format(fullpath))
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif ext not in (".py", ):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# TODO add more logic how to define if folder is module or not
|
# TODO add more logic how to define if folder is module or not
|
||||||
|
|
|
||||||
|
|
@ -254,7 +254,11 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
||||||
use_published = True
|
use_published = True
|
||||||
tile_assembler_plugin = "OpenPypeTileAssembler"
|
tile_assembler_plugin = "OpenPypeTileAssembler"
|
||||||
asset_dependencies = False
|
asset_dependencies = False
|
||||||
|
priority = 50
|
||||||
|
tile_priority = 50
|
||||||
limit_groups = []
|
limit_groups = []
|
||||||
|
jobInfo = {}
|
||||||
|
pluginInfo = {}
|
||||||
group = "none"
|
group = "none"
|
||||||
|
|
||||||
def process(self, instance):
|
def process(self, instance):
|
||||||
|
|
@ -272,37 +276,12 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
||||||
self.deadline_url = instance.data.get("deadlineUrl")
|
self.deadline_url = instance.data.get("deadlineUrl")
|
||||||
assert self.deadline_url, "Requires Deadline Webservice URL"
|
assert self.deadline_url, "Requires Deadline Webservice URL"
|
||||||
|
|
||||||
self._job_info = (
|
# just using existing names from Setting
|
||||||
context.data["project_settings"].get(
|
self._job_info = self.jobInfo
|
||||||
"deadline", {}).get(
|
|
||||||
"publish", {}).get(
|
|
||||||
"MayaSubmitDeadline", {}).get(
|
|
||||||
"jobInfo", {})
|
|
||||||
)
|
|
||||||
|
|
||||||
self._plugin_info = (
|
self._plugin_info = self.pluginInfo
|
||||||
context.data["project_settings"].get(
|
|
||||||
"deadline", {}).get(
|
|
||||||
"publish", {}).get(
|
|
||||||
"MayaSubmitDeadline", {}).get(
|
|
||||||
"pluginInfo", {})
|
|
||||||
)
|
|
||||||
|
|
||||||
self.limit_groups = (
|
self.limit_groups = self.limit
|
||||||
context.data["project_settings"].get(
|
|
||||||
"deadline", {}).get(
|
|
||||||
"publish", {}).get(
|
|
||||||
"MayaSubmitDeadline", {}).get(
|
|
||||||
"limit", [])
|
|
||||||
)
|
|
||||||
|
|
||||||
self.group = (
|
|
||||||
context.data["project_settings"].get(
|
|
||||||
"deadline", {}).get(
|
|
||||||
"publish", {}).get(
|
|
||||||
"MayaSubmitDeadline", {}).get(
|
|
||||||
"group", "none")
|
|
||||||
)
|
|
||||||
|
|
||||||
context = instance.context
|
context = instance.context
|
||||||
workspace = context.data["workspaceDir"]
|
workspace = context.data["workspaceDir"]
|
||||||
|
|
@ -465,7 +444,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
||||||
self.payload_skeleton["JobInfo"]["UserName"] = deadline_user
|
self.payload_skeleton["JobInfo"]["UserName"] = deadline_user
|
||||||
# Set job priority
|
# Set job priority
|
||||||
self.payload_skeleton["JobInfo"]["Priority"] = \
|
self.payload_skeleton["JobInfo"]["Priority"] = \
|
||||||
self._instance.data.get("priority", 50)
|
self._instance.data.get("priority", self.priority)
|
||||||
|
|
||||||
if self.group != "none" and self.group:
|
if self.group != "none" and self.group:
|
||||||
self.payload_skeleton["JobInfo"]["Group"] = self.group
|
self.payload_skeleton["JobInfo"]["Group"] = self.group
|
||||||
|
|
@ -635,7 +614,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
||||||
}
|
}
|
||||||
assembly_payload["JobInfo"].update(output_filenames)
|
assembly_payload["JobInfo"].update(output_filenames)
|
||||||
assembly_payload["JobInfo"]["Priority"] = self._instance.data.get(
|
assembly_payload["JobInfo"]["Priority"] = self._instance.data.get(
|
||||||
"priority", 50)
|
"tile_priority", self.tile_priority)
|
||||||
assembly_payload["JobInfo"]["UserName"] = deadline_user
|
assembly_payload["JobInfo"]["UserName"] = deadline_user
|
||||||
|
|
||||||
frame_payloads = []
|
frame_payloads = []
|
||||||
|
|
|
||||||
|
|
@ -235,6 +235,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
||||||
if mongo_url:
|
if mongo_url:
|
||||||
environment["OPENPYPE_MONGO"] = mongo_url
|
environment["OPENPYPE_MONGO"] = mongo_url
|
||||||
|
|
||||||
|
priority = self.deadline_priority or instance.data.get("priority", 50)
|
||||||
|
|
||||||
args = [
|
args = [
|
||||||
"--headless",
|
"--headless",
|
||||||
'publish',
|
'publish',
|
||||||
|
|
@ -254,7 +256,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
||||||
|
|
||||||
"Department": self.deadline_department,
|
"Department": self.deadline_department,
|
||||||
"ChunkSize": self.deadline_chunk_size,
|
"ChunkSize": self.deadline_chunk_size,
|
||||||
"Priority": job["Props"]["Pri"],
|
"Priority": priority,
|
||||||
|
|
||||||
"Group": self.deadline_group,
|
"Group": self.deadline_group,
|
||||||
"Pool": self.deadline_pool,
|
"Pool": self.deadline_pool,
|
||||||
|
|
@ -524,26 +526,31 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
||||||
for collection in collections:
|
for collection in collections:
|
||||||
ext = collection.tail.lstrip(".")
|
ext = collection.tail.lstrip(".")
|
||||||
preview = False
|
preview = False
|
||||||
# if filtered aov name is found in filename, toggle it for
|
# TODO 'useSequenceForReview' is temporary solution which does
|
||||||
# preview video rendering
|
# not work for 100% of cases. We must be able to tell what
|
||||||
for app in self.aov_filter.keys():
|
# expected files contains more explicitly and from what
|
||||||
if os.environ.get("AVALON_APP", "") == app:
|
# should be review made.
|
||||||
# no need to add review if `hasReviewableRepresentations`
|
# - "review" tag is never added when is set to 'False'
|
||||||
if instance.get("hasReviewableRepresentations"):
|
use_sequence_for_review = instance.get(
|
||||||
break
|
"useSequenceForReview", True
|
||||||
|
)
|
||||||
|
if use_sequence_for_review:
|
||||||
|
# if filtered aov name is found in filename, toggle it for
|
||||||
|
# preview video rendering
|
||||||
|
for app in self.aov_filter.keys():
|
||||||
|
if os.environ.get("AVALON_APP", "") == app:
|
||||||
|
# iteratre all aov filters
|
||||||
|
for aov in self.aov_filter[app]:
|
||||||
|
if re.match(
|
||||||
|
aov,
|
||||||
|
list(collection)[0]
|
||||||
|
):
|
||||||
|
preview = True
|
||||||
|
break
|
||||||
|
|
||||||
# iteratre all aov filters
|
# toggle preview on if multipart is on
|
||||||
for aov in self.aov_filter[app]:
|
if instance.get("multipartExr", False):
|
||||||
if re.match(
|
preview = True
|
||||||
aov,
|
|
||||||
list(collection)[0]
|
|
||||||
):
|
|
||||||
preview = True
|
|
||||||
break
|
|
||||||
|
|
||||||
# toggle preview on if multipart is on
|
|
||||||
if instance.get("multipartExr", False):
|
|
||||||
preview = True
|
|
||||||
|
|
||||||
staging = os.path.dirname(list(collection)[0])
|
staging = os.path.dirname(list(collection)[0])
|
||||||
success, rootless_staging_dir = (
|
success, rootless_staging_dir = (
|
||||||
|
|
@ -730,8 +737,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
||||||
"resolutionHeight": data.get("resolutionHeight", 1080),
|
"resolutionHeight": data.get("resolutionHeight", 1080),
|
||||||
"multipartExr": data.get("multipartExr", False),
|
"multipartExr": data.get("multipartExr", False),
|
||||||
"jobBatchName": data.get("jobBatchName", ""),
|
"jobBatchName": data.get("jobBatchName", ""),
|
||||||
"hasReviewableRepresentations": data.get(
|
"useSequenceForReview": data.get("useSequenceForReview")
|
||||||
"hasReviewableRepresentations")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if "prerender" in instance.data["families"]:
|
if "prerender" in instance.data["families"]:
|
||||||
|
|
@ -923,12 +929,6 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
||||||
# User is deadline user
|
# User is deadline user
|
||||||
render_job["Props"]["User"] = context.data.get(
|
render_job["Props"]["User"] = context.data.get(
|
||||||
"deadlineUser", getpass.getuser())
|
"deadlineUser", getpass.getuser())
|
||||||
# Priority is now not handled at all
|
|
||||||
|
|
||||||
if self.deadline_priority:
|
|
||||||
render_job["Props"]["Pri"] = self.deadline_priority
|
|
||||||
else:
|
|
||||||
render_job["Props"]["Pri"] = instance.data.get("priority")
|
|
||||||
|
|
||||||
render_job["Props"]["Env"] = {
|
render_job["Props"]["Env"] = {
|
||||||
"FTRACK_API_USER": os.environ.get("FTRACK_API_USER"),
|
"FTRACK_API_USER": os.environ.get("FTRACK_API_USER"),
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,6 @@
|
||||||
import os
|
import os
|
||||||
from openpype_modules.ftrack.lib import BaseAction, statics_icon
|
from openpype_modules.ftrack.lib import BaseAction, statics_icon
|
||||||
from avalon import lib as avalonlib
|
from openpype.api import Anatomy
|
||||||
from openpype.api import (
|
|
||||||
Anatomy,
|
|
||||||
get_project_settings
|
|
||||||
)
|
|
||||||
from openpype.lib import ApplicationManager
|
|
||||||
|
|
||||||
|
|
||||||
class CreateFolders(BaseAction):
|
class CreateFolders(BaseAction):
|
||||||
|
|
|
||||||
|
|
@ -389,7 +389,8 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
|
||||||
|
|
||||||
self._append_lines([openpype_art])
|
self._append_lines([openpype_art])
|
||||||
|
|
||||||
self.setStyleSheet(load_stylesheet())
|
self._first_show = True
|
||||||
|
self._splitter_size_ratio = None
|
||||||
|
|
||||||
self._init_from_registry()
|
self._init_from_registry()
|
||||||
|
|
||||||
|
|
@ -416,9 +417,9 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
|
||||||
self.resize(width, height)
|
self.resize(width, height)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sizes = setting_registry.get_item("splitter_sizes")
|
self._splitter_size_ratio = (
|
||||||
if len(sizes) == len(self._widgets_splitter.sizes()):
|
setting_registry.get_item("splitter_sizes")
|
||||||
self._widgets_splitter.setSizes(sizes)
|
)
|
||||||
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
@ -627,8 +628,29 @@ class PythonInterpreterWidget(QtWidgets.QWidget):
|
||||||
def showEvent(self, event):
|
def showEvent(self, event):
|
||||||
self._line_check_timer.start()
|
self._line_check_timer.start()
|
||||||
super(PythonInterpreterWidget, self).showEvent(event)
|
super(PythonInterpreterWidget, self).showEvent(event)
|
||||||
|
# First show setup
|
||||||
|
if self._first_show:
|
||||||
|
self._first_show = False
|
||||||
|
self._on_first_show()
|
||||||
|
|
||||||
self._output_widget.scroll_to_bottom()
|
self._output_widget.scroll_to_bottom()
|
||||||
|
|
||||||
|
def _on_first_show(self):
|
||||||
|
# Change stylesheet
|
||||||
|
self.setStyleSheet(load_stylesheet())
|
||||||
|
# Check if splitter size ratio is set
|
||||||
|
# - first store value to local variable and then unset it
|
||||||
|
splitter_size_ratio = self._splitter_size_ratio
|
||||||
|
self._splitter_size_ratio = None
|
||||||
|
# Skip if is not set
|
||||||
|
if not splitter_size_ratio:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Skip if number of size items does not match to splitter
|
||||||
|
splitters_count = len(self._widgets_splitter.sizes())
|
||||||
|
if len(splitter_size_ratio) == splitters_count:
|
||||||
|
self._widgets_splitter.setSizes(splitter_size_ratio)
|
||||||
|
|
||||||
def closeEvent(self, event):
|
def closeEvent(self, event):
|
||||||
self.save_registry()
|
self.save_registry()
|
||||||
super(PythonInterpreterWidget, self).closeEvent(event)
|
super(PythonInterpreterWidget, self).closeEvent(event)
|
||||||
|
|
|
||||||
|
|
@ -2,8 +2,8 @@ import pyblish.api
|
||||||
from openpype.pipeline import PublishValidationError
|
from openpype.pipeline import PublishValidationError
|
||||||
|
|
||||||
|
|
||||||
class ValidateContainers(pyblish.api.InstancePlugin):
|
class ValidateAssetDocs(pyblish.api.InstancePlugin):
|
||||||
"""Validate existence of asset asset documents on instances.
|
"""Validate existence of asset documents on instances.
|
||||||
|
|
||||||
Without asset document it is not possible to publish the instance.
|
Without asset document it is not possible to publish the instance.
|
||||||
|
|
||||||
|
|
@ -22,10 +22,10 @@ class ValidateContainers(pyblish.api.InstancePlugin):
|
||||||
return
|
return
|
||||||
|
|
||||||
if instance.data.get("assetEntity"):
|
if instance.data.get("assetEntity"):
|
||||||
self.log.info("Instance have set asset document in it's data.")
|
self.log.info("Instance has set asset document in its data.")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise PublishValidationError((
|
raise PublishValidationError((
|
||||||
"Instance \"{}\" don't have set asset"
|
"Instance \"{}\" doesn't have asset document "
|
||||||
" document which is needed for publishing."
|
"set which is needed for publishing."
|
||||||
).format(instance.data["name"]))
|
).format(instance.data["name"]))
|
||||||
|
|
@ -15,33 +15,6 @@
|
||||||
"deadline"
|
"deadline"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"ProcessSubmittedJobOnFarm": {
|
|
||||||
"enabled": true,
|
|
||||||
"deadline_department": "",
|
|
||||||
"deadline_pool": "",
|
|
||||||
"deadline_group": "",
|
|
||||||
"deadline_chunk_size": 1,
|
|
||||||
"deadline_priority": 50,
|
|
||||||
"publishing_script": "",
|
|
||||||
"skip_integration_repre_list": [],
|
|
||||||
"aov_filter": {
|
|
||||||
"maya": [
|
|
||||||
".+(?:\\.|_)([Bb]eauty)(?:\\.|_).*"
|
|
||||||
],
|
|
||||||
"nuke": [
|
|
||||||
".*"
|
|
||||||
],
|
|
||||||
"aftereffects": [
|
|
||||||
".*"
|
|
||||||
],
|
|
||||||
"celaction": [
|
|
||||||
".*"
|
|
||||||
],
|
|
||||||
"harmony": [
|
|
||||||
".*"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"MayaSubmitDeadline": {
|
"MayaSubmitDeadline": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"optional": false,
|
"optional": false,
|
||||||
|
|
@ -49,6 +22,8 @@
|
||||||
"tile_assembler_plugin": "OpenPypeTileAssembler",
|
"tile_assembler_plugin": "OpenPypeTileAssembler",
|
||||||
"use_published": true,
|
"use_published": true,
|
||||||
"asset_dependencies": true,
|
"asset_dependencies": true,
|
||||||
|
"priority": 50,
|
||||||
|
"tile_priority": 50,
|
||||||
"group": "none",
|
"group": "none",
|
||||||
"limit": [],
|
"limit": [],
|
||||||
"jobInfo": {},
|
"jobInfo": {},
|
||||||
|
|
@ -96,6 +71,33 @@
|
||||||
"group": "",
|
"group": "",
|
||||||
"department": "",
|
"department": "",
|
||||||
"multiprocess": true
|
"multiprocess": true
|
||||||
|
},
|
||||||
|
"ProcessSubmittedJobOnFarm": {
|
||||||
|
"enabled": true,
|
||||||
|
"deadline_department": "",
|
||||||
|
"deadline_pool": "",
|
||||||
|
"deadline_group": "",
|
||||||
|
"deadline_chunk_size": 1,
|
||||||
|
"deadline_priority": 50,
|
||||||
|
"publishing_script": "",
|
||||||
|
"skip_integration_repre_list": [],
|
||||||
|
"aov_filter": {
|
||||||
|
"maya": [
|
||||||
|
".+(?:\\.|_)([Bb]eauty)(?:\\.|_).*"
|
||||||
|
],
|
||||||
|
"nuke": [
|
||||||
|
".*"
|
||||||
|
],
|
||||||
|
"aftereffects": [
|
||||||
|
".*"
|
||||||
|
],
|
||||||
|
"celaction": [
|
||||||
|
".*"
|
||||||
|
],
|
||||||
|
"harmony": [
|
||||||
|
".*"
|
||||||
|
]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -279,6 +279,15 @@
|
||||||
"tasks": [],
|
"tasks": [],
|
||||||
"template": "{family}{variant}"
|
"template": "{family}{variant}"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"families": [
|
||||||
|
"workfile"
|
||||||
|
],
|
||||||
|
"hosts": [],
|
||||||
|
"task_types": [],
|
||||||
|
"tasks": [],
|
||||||
|
"template": "{family}{Task}"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"families": [
|
"families": [
|
||||||
"render"
|
"render"
|
||||||
|
|
|
||||||
|
|
@ -117,6 +117,16 @@
|
||||||
"key": "asset_dependencies",
|
"key": "asset_dependencies",
|
||||||
"label": "Use Asset dependencies"
|
"label": "Use Asset dependencies"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "number",
|
||||||
|
"key": "priority",
|
||||||
|
"label": "Priority"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "number",
|
||||||
|
"key": "tile_priority",
|
||||||
|
"label": "Tile Assembler Priority"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"type": "text",
|
"type": "text",
|
||||||
"key": "group",
|
"key": "group",
|
||||||
|
|
|
||||||
|
|
@ -24,16 +24,18 @@ class DBAssert:
|
||||||
else:
|
else:
|
||||||
args[key] = val
|
args[key] = val
|
||||||
|
|
||||||
msg = None
|
|
||||||
no_of_docs = dbcon.count_documents(args)
|
|
||||||
if expected != no_of_docs:
|
|
||||||
msg = "Not expected no of versions. "\
|
|
||||||
"Expected {}, found {}".format(expected, no_of_docs)
|
|
||||||
|
|
||||||
args.pop("type")
|
args.pop("type")
|
||||||
detail_str = " "
|
detail_str = " "
|
||||||
if args:
|
if args:
|
||||||
detail_str = " with {}".format(args)
|
detail_str = " with '{}'".format(args)
|
||||||
|
|
||||||
|
msg = None
|
||||||
|
no_of_docs = dbcon.count_documents(args)
|
||||||
|
if expected != no_of_docs:
|
||||||
|
msg = "Not expected no of '{}'{}."\
|
||||||
|
"Expected {}, found {}".format(queried_type,
|
||||||
|
detail_str,
|
||||||
|
expected, no_of_docs)
|
||||||
|
|
||||||
status = "successful"
|
status = "successful"
|
||||||
if msg:
|
if msg:
|
||||||
|
|
@ -42,7 +44,5 @@ class DBAssert:
|
||||||
print("Comparing count of {}{} {}".format(queried_type,
|
print("Comparing count of {}{} {}".format(queried_type,
|
||||||
detail_str,
|
detail_str,
|
||||||
status))
|
status))
|
||||||
if msg:
|
|
||||||
print(msg)
|
|
||||||
|
|
||||||
return msg
|
return msg
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue