Merge branch 'develop' of github.com:pypeclub/pype into feature/sync_server_settings_changes

This commit is contained in:
Petr Kalis 2021-05-10 13:32:06 +02:00
commit c039d7fe5d
15 changed files with 179 additions and 124 deletions

View file

@ -47,6 +47,10 @@ class CreateRender(openpype.api.Creator):
self.data["members"] = [item.id]
self.data["uuid"] = item.id # for SubsetManager
self.data["subset"] = self.data["subset"]\
.replace(stub.PUBLISH_ICON, '')\
.replace(stub.LOADED_ICON, '')
stub.imprint(item, self.data)
stub.set_label_color(item.id, 14) # Cyan options 0 - 16
stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"])

View file

@ -19,7 +19,6 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
"rig",
"camerarig"]
representations = ["ma", "abc", "fbx", "mb"]
tool_names = ["loader"]
label = "Reference"
order = -10

View file

@ -34,7 +34,7 @@ class ExtractThumbnail(openpype.api.Extractor):
capture_preset = ""
capture_preset = (
instance.context.data["project_settings"]['maya']['publish']['ExtractPlayblast']
instance.context.data["project_settings"]['maya']['publish']['ExtractPlayblast']['capture_preset']
)
try:

View file

@ -373,21 +373,16 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
prev_node = None
with GN:
connections = list()
if input:
input_name = str(input.name()).replace(" ", "")
# if connected input node was defined
connections.append({
"node": input,
"inputName": input.name()})
prev_node = nuke.createNode(
"Input", "name {}".format(input.name()))
prev_node.hideControlPanel()
"Input", "name {}".format(input_name))
else:
# generic input node connected to nothing
prev_node = nuke.createNode(
"Input", "name {}".format("rgba"))
prev_node.hideControlPanel()
prev_node.hideControlPanel()
# creating pre-write nodes `prenodes`
if prenodes:
for name, klass, properties, set_output_to in prenodes:
@ -416,18 +411,12 @@ def create_write_node(name, data, input=None, prenodes=None, review=True):
input_node = nuke.createNode(
"Input", "name {}".format(node_name))
input_node.hideControlPanel()
connections.append({
"node": nuke.toNode(node_name),
"inputName": node_name})
now_node.setInput(1, input_node)
elif isinstance(set_output_to, str):
input_node = nuke.createNode(
"Input", "name {}".format(node_name))
input_node.hideControlPanel()
connections.append({
"node": nuke.toNode(set_output_to),
"inputName": set_output_to})
now_node.setInput(0, input_node)
else:

View file

@ -74,6 +74,9 @@ class CreateImage(openpype.api.Creator):
for group in groups:
long_names = []
group.name = group.name.replace(stub.PUBLISH_ICON, ''). \
replace(stub.LOADED_ICON, '')
if group.long_name:
for directory in group.long_name[::-1]:
name = directory.replace(stub.PUBLISH_ICON, '').\

View file

@ -194,7 +194,7 @@ def set_plugin_attributes_from_settings(
# map plugin superclass to preset json. Currenly suppoted is load and
# create (avalon.api.Loader and avalon.api.Creator)
plugin_type = None
if superclass.__name__.split(".")[-1] == "Loader":
if superclass.__name__.split(".")[-1] in ("Loader", "SubsetLoader"):
plugin_type = "load"
elif superclass.__name__.split(".")[-1] == "Creator":
plugin_type = "create"

View file

@ -14,10 +14,15 @@ import avalon.pipeline
from openpype.api import Anatomy
class DeleteOldVersions(api.Loader):
class DeleteOldVersions(api.SubsetLoader):
"""Deletes specific number of old version"""
is_multiple_contexts_compatible = True
sequence_splitter = "__sequence_splitter__"
representations = ["*"]
families = ["*"]
tool_names = ["library_loader"]
label = "Delete Old Versions"
order = 35
@ -259,9 +264,11 @@ class DeleteOldVersions(api.Loader):
)
if not version_ids:
msg = "Skipping processing. Nothing to delete."
msg = "Skipping processing. Nothing to delete on {}/{}".format(
asset["name"], subset["name"]
)
self.log.info(msg)
self.message(msg)
print(msg)
return
repres = list(self.dbcon.find({
@ -397,25 +404,30 @@ class DeleteOldVersions(api.Loader):
self.log.error(msg)
self.message(msg)
msg = "Total size of files: " + self.sizeof_fmt(size)
self.log.info(msg)
self.message(msg)
return size
def load(self, context, name=None, namespace=None, options=None):
def load(self, contexts, name=None, namespace=None, options=None):
try:
versions_to_keep = 2
remove_publish_folder = False
if options:
versions_to_keep = options.get(
"versions_to_keep", versions_to_keep
)
remove_publish_folder = options.get(
"remove_publish_folder", remove_publish_folder
)
size = 0
for count, context in enumerate(contexts):
versions_to_keep = 2
remove_publish_folder = False
if options:
versions_to_keep = options.get(
"versions_to_keep", versions_to_keep
)
remove_publish_folder = options.get(
"remove_publish_folder", remove_publish_folder
)
data = self.get_data(context, versions_to_keep)
data = self.get_data(context, versions_to_keep)
self.main(data, remove_publish_folder)
size += self.main(data, remove_publish_folder)
print("Progressing {}/{}".format(count + 1, len(contexts)))
msg = "Total size of files: " + self.sizeof_fmt(size)
self.log.info(msg)
self.message(msg)
except Exception:
self.log.error("Failed to delete versions.", exc_info=True)
@ -425,6 +437,7 @@ class CalculateOldVersions(DeleteOldVersions):
"""Calculate file size of old versions"""
label = "Calculate Old Versions"
order = 30
tool_names = ["library_loader"]
options = [
qargparse.Integer(
@ -438,6 +451,9 @@ class CalculateOldVersions(DeleteOldVersions):
def main(self, data, remove_publish_folder):
size = 0
if not data:
return size
if remove_publish_folder:
size = self.delete_whole_dir_paths(
data["dir_paths"].values(), delete=False
@ -447,6 +463,4 @@ class CalculateOldVersions(DeleteOldVersions):
data["dir_paths"], data["file_paths_by_dir"], delete=False
)
msg = "Total size of files: " + self.sizeof_fmt(size)
self.log.info(msg)
self.message(msg)
return size

View file

@ -297,7 +297,14 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
else:
orig_transfers = list(instance.data['transfers'])
template_name = self.template_name_from_instance(instance)
task_name = io.Session.get("AVALON_TASK")
family = self.main_family_from_instance(instance)
key_values = {"families": family, "tasks": task_name}
profile = filter_profiles(self.template_name_profiles, key_values,
logger=self.log)
if profile:
template_name = profile["template_name"]
published_representations = {}
for idx, repre in enumerate(instance.data["representations"]):
@ -853,68 +860,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
family = instance.data["families"][0]
return family
def template_name_from_instance(self, instance):
template_name = self.default_template_name
if not self.template_name_profiles:
self.log.debug((
"Template name profiles are not set."
" Using default \"{}\""
).format(template_name))
return template_name
# Task name from session?
task_name = io.Session.get("AVALON_TASK")
family = self.main_family_from_instance(instance)
matching_profiles = {}
highest_value = -1
self.log.debug(
"Template name profiles:\n{}".format(self.template_name_profiles)
)
for name, filters in self.template_name_profiles.items():
value = 0
families = filters.get("families")
if families:
if family not in families:
continue
value += 1
tasks = filters.get("tasks")
if tasks:
if task_name not in tasks:
continue
value += 1
if value > highest_value:
matching_profiles = {}
highest_value = value
if value == highest_value:
matching_profiles[name] = filters
if len(matching_profiles) == 1:
template_name = tuple(matching_profiles.keys())[0]
self.log.debug(
"Using template name \"{}\".".format(template_name)
)
elif len(matching_profiles) > 1:
template_name = tuple(matching_profiles.keys())[0]
self.log.warning((
"More than one template profiles matched"
" Family \"{}\" and Task: \"{}\"."
" Using first template name in row \"{}\"."
).format(family, task_name, template_name))
else:
self.log.debug((
"None of template profiles matched"
" Family \"{}\" and Task: \"{}\"."
" Using default template name \"{}\""
).format(family, task_name, template_name))
return template_name
def get_rootless_path(self, anatomy, path):
""" Returns, if possible, path without absolute portion from host
(eg. 'c:\' or '/opt/..')

View file

@ -11,7 +11,7 @@
"path": "{@folder}/{@file}"
},
"render": {
"folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/render/{subset}/{@version}",
"folder": "{root[work]}/{project[name]}/{hierarchy}/{asset}/publish/{family}/{subset}/{@version}",
"file": "{project[code]}_{asset}_{subset}_{@version}<_{output}><.{@frame}>.{ext}",
"path": "{@folder}/{@file}"
},

View file

@ -1,7 +1,18 @@
{
"publish": {
"IntegrateHeroVersion": {
"enabled": true
"enabled": true,
"optional": true,
"families": [
"model",
"rig",
"look",
"pointcache",
"animation",
"setdress",
"layout",
"mayaAscii"
]
},
"ExtractJpegEXR": {
"enabled": true,
@ -116,19 +127,22 @@
]
},
"IntegrateAssetNew": {
"template_name_profiles": {
"publish": {
"template_name_profiles": [
{
"families": [],
"tasks": []
"tasks": [],
"template_name": "publish"
},
"render": {
{
"families": [
"review",
"render",
"prerender"
]
],
"tasks": [],
"template_name": "render"
}
},
],
"subset_grouping_profiles": [
{
"families": [],

View file

@ -16,6 +16,17 @@
"type": "boolean",
"key": "enabled",
"label": "Enabled"
},
{
"type": "boolean",
"key": "optional",
"label": "Optional"
},
{
"key": "families",
"label": "Families",
"type": "list",
"object_type": "text"
}
]
},
@ -420,9 +431,39 @@
"is_group": true,
"children": [
{
"type": "raw-json",
"type": "list",
"key": "template_name_profiles",
"label": "template_name_profiles"
"label": "Template name profiles",
"use_label_wrap": true,
"object_type": {
"type": "dict",
"children": [
{
"type": "label",
"label": ""
},
{
"key": "families",
"label": "Families",
"type": "list",
"object_type": "text"
},
{
"key": "tasks",
"label": "Task names",
"type": "list",
"object_type": "text"
},
{
"type": "separator"
},
{
"type": "text",
"key": "template_name",
"label": "Template name"
}
]
}
},
{
"type": "list",

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

View file

@ -13,32 +13,45 @@ Project settings can have project specific values. Each new project is using stu
Projects always use default project values unless they have [project override](../admin_settings#project-overrides) (orage colour). Any changes in default project may affect all existing projects.
:::
## Profile filters
Many of the settings are using a concept of **Profile filters**
You can define multiple profiles to choose from for different contexts. Each filter is evaluated and a
profile with filters matching the current context the most, is used.
You can define profile without any filters and use it as **default**.
Only **one or none** profile will be returned per context.
All context filters are lists which may contain strings or Regular expressions (RegEx).
- **`hosts`** - Host from which publishing was triggered. `["maya", "nuke"]`
- **`families`** - Main family of processed subset. `["plate", "model"]`
- **`tasks`** - Currently processed task. `["modeling", "animation"]`
:::important Filtering
Filters are optional. In case when multiple profiles match current context, profile with higher number of matched filters has higher priority that profile without filters.
(Eg. order of when filter is added doesn't matter, only the precision of matching does.)
:::
## Publish plugins
Publish plugins used across all integrations.
### Extract Review
Plugin responsible for automatic FFmpeg conversion to variety of formats.
Extract review is using profile filtering to be able render different outputs for different situations.
Extract review is using [profile filtering](#profile-filters) to be able render different outputs for different situations.
**Profile filters**
You can define multiple profiles for different contexts. Profile with filters matching the current context the most, is used. You can define profile without filters and use it as **default**. Only **one or none** profile will be processed per instance.
All context filters are lists which may contain strings or Regular expressions (RegEx).
- **`hosts`** - Host from which publishing was triggered. `["maya", "nuke"]`
- **`families`** - Main family of processed instance. `["plate", "model"]`
:::important Filtering
Filters are optional. In case when multiple profiles match current context, profile with higher number of matched filters has higher priority that profile without filters.
:::
Applicable context filters:
**`hosts`** - Host from which publishing was triggered. `["maya", "nuke"]`
- **`families`** - Main family of processed subset. `["plate", "model"]`
![global_extract_review_profiles](assets/global_extract_review_profiles.png)
**Output Definitions**
Profile may generate multiple outputs from a single input. Each output must define unique name and output extension (use the extension without a dot e.g. **mp4**). All other settings of output definition are optional.
![global_extract_review_output_defs](assets/global_extract_review_output_defs.png)
@ -67,3 +80,36 @@ Profile may generate multiple outputs from a single input. Each output must defi
![global_extract_review_letter_box_settings](assets/global_extract_review_letter_box_settings.png)
![global_extract_review_letter_box](assets/global_extract_review_letter_box.png)
### IntegrateAssetNew
Saves information for all published subsets into DB, published assets are available for other hosts, tools and tasks after.
#### Template name profiles
Allows to select [anatomy template](admin_settings_project_anatomy.md#templates) based on context of subset being published.
For example for `render` profile you might want to publish and store assets in different location (based on anatomy setting) then for `publish` profile.
[Profile filtering](#profile-filters) is used to select between appropriate template for each context of published subsets.
Applicable context filters:
- **`hosts`** - Host from which publishing was triggered. `["maya", "nuke"]`
- **`tasks`** - Current task. `["modeling", "animation"]`
![global_integrate_new_template_name_profile](assets/global_integrate_new_template_name_profile.png)
(This image shows use case where `render` anatomy template is used for subsets of families ['review, 'render', 'prerender'], `publish` template is chosen for all other.)
#### Subset grouping profiles
Published subsets might be grouped together for cleaner and easier selection in **[Loader](artist_tools.md#subset-groups)**
Group name is chosen with use of [profile filtering](#profile-filters)
Applicable context filters:
- **`families`** - Main family of processed subset. `["plate", "model"]`
- **`hosts`** - Host from which publishing was triggered. `["maya", "nuke"]`
- **`tasks`** - Current task. `["modeling", "animation"]`
![global_integrate_new_template_name_profile](assets/global_integrate_new_subset_group.png)
(This image shows use case where only assets published from 'photoshop', for all families for all tasks should be marked as grouped with a capitalized name of Task where they are published from.)