Merge branch 'develop' into enhancement/3dsmax-use-custom-modifier-attributes

This commit is contained in:
Ondřej Samohel 2023-06-19 18:37:44 +02:00 committed by GitHub
commit 449cd662dd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
32 changed files with 1190 additions and 345 deletions

View file

@ -35,6 +35,7 @@ body:
label: Version
description: What version are you running? Look to OpenPype Tray
options:
- 3.15.11-nightly.3
- 3.15.11-nightly.2
- 3.15.11-nightly.1
- 3.15.10
@ -134,7 +135,6 @@ body:
- 3.14.3-nightly.7
- 3.14.3-nightly.6
- 3.14.3-nightly.5
- 3.14.3-nightly.4
validations:
required: true
- type: dropdown

View file

@ -220,7 +220,6 @@ def new_representation_doc(
"parent": version_id,
"name": name,
"data": data,
# Imprint shortcut to context for performance reasons.
"context": context
}
@ -708,7 +707,11 @@ class OperationsSession(object):
return operation
def create_project(project_name, project_code, library_project=False):
def create_project(
project_name,
project_code,
library_project=False,
):
"""Create project using OpenPype settings.
This project creation function is not validating project document on
@ -752,7 +755,7 @@ def create_project(project_name, project_code, library_project=False):
"name": project_name,
"data": {
"code": project_code,
"library_project": library_project
"library_project": library_project,
},
"schema": CURRENT_PROJECT_SCHEMA
}

View file

@ -35,9 +35,15 @@ class ArnoldStandinLoader(load.LoaderPlugin):
color = "orange"
def load(self, context, name, namespace, options):
if not cmds.pluginInfo("mtoa", query=True, loaded=True):
cmds.loadPlugin("mtoa")
# Create defaultArnoldRenderOptions before creating aiStandin
# which tries to connect it. Since we load the plugin and directly
# create aiStandin without the defaultArnoldRenderOptions,
# we need to create the render options for aiStandin creation.
from mtoa.core import createOptions
createOptions()
# Make sure to load arnold before importing `mtoa.ui.arnoldmenu`
cmds.loadPlugin("mtoa", quiet=True)
import mtoa.ui.arnoldmenu
version = context['version']

View file

@ -2020,11 +2020,11 @@ class WorkfileSettings(object):
# TODO: backward compatibility for old projects - remove later
# perhaps old project overrides is having it set to older version
# with use of `customOCIOConfigPath`
resolved_path = None
if workfile_settings.get("customOCIOConfigPath"):
unresolved_path = workfile_settings["customOCIOConfigPath"]
ocio_paths = unresolved_path[platform.system().lower()]
resolved_path = None
for ocio_p in ocio_paths:
resolved_path = str(ocio_p).format(**os.environ)
if not os.path.exists(resolved_path):
@ -2054,9 +2054,9 @@ class WorkfileSettings(object):
self._root_node["colorManagement"].setValue("OCIO")
# we dont need the key anymore
workfile_settings.pop("customOCIOConfigPath")
workfile_settings.pop("colorManagement")
workfile_settings.pop("OCIO_config")
workfile_settings.pop("customOCIOConfigPath", None)
workfile_settings.pop("colorManagement", None)
workfile_settings.pop("OCIO_config", None)
# then set the rest
for knob, value_ in workfile_settings.items():

View file

@ -222,7 +222,6 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
"label": subset,
"name": subset,
"family": in_data["family"],
# "version": in_data.get("version", 1),
"frameStart": in_data.get("representations", [None])[0].get(
"frameStart", None
),
@ -232,6 +231,14 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
"families": instance_families
}
)
# Fill version only if 'use_next_available_version' is disabled
# and version is filled in instance data
version = in_data.get("version")
use_next_available_version = in_data.get(
"use_next_available_version", True)
if not use_next_available_version and version is not None:
instance.data["version"] = version
self.log.info("collected instance: {}".format(pformat(instance.data)))
self.log.info("parsing data: {}".format(pformat(in_data)))

View file

@ -1,4 +1,14 @@
from openpype.lib.attribute_definitions import FileDef
from openpype.client import (
get_assets,
get_subsets,
get_last_versions,
)
from openpype.lib.attribute_definitions import (
FileDef,
BoolDef,
NumberDef,
UISeparatorDef,
)
from openpype.lib.transcoding import IMAGE_EXTENSIONS, VIDEO_EXTENSIONS
from openpype.pipeline.create import (
Creator,
@ -94,6 +104,7 @@ class TrayPublishCreator(Creator):
class SettingsCreator(TrayPublishCreator):
create_allow_context_change = True
create_allow_thumbnail = True
allow_version_control = False
extensions = []
@ -101,8 +112,18 @@ class SettingsCreator(TrayPublishCreator):
# Pass precreate data to creator attributes
thumbnail_path = pre_create_data.pop(PRE_CREATE_THUMBNAIL_KEY, None)
# Fill 'version_to_use' if version control is enabled
if self.allow_version_control:
asset_name = data["asset"]
subset_docs_by_asset_id = self._prepare_next_versions(
[asset_name], [subset_name])
version = subset_docs_by_asset_id[asset_name].get(subset_name)
pre_create_data["version_to_use"] = version
data["_previous_last_version"] = version
data["creator_attributes"] = pre_create_data
data["settings_creator"] = True
# Create new instance
new_instance = CreatedInstance(self.family, subset_name, data, self)
@ -111,7 +132,158 @@ class SettingsCreator(TrayPublishCreator):
if thumbnail_path:
self.set_instance_thumbnail_path(new_instance.id, thumbnail_path)
def _prepare_next_versions(self, asset_names, subset_names):
"""Prepare next versions for given asset and subset names.
Todos:
Expect combination of subset names by asset name to avoid
unnecessary server calls for unused subsets.
Args:
asset_names (Iterable[str]): Asset names.
subset_names (Iterable[str]): Subset names.
Returns:
dict[str, dict[str, int]]: Last versions by asset
and subset names.
"""
# Prepare all versions for all combinations to '1'
subset_docs_by_asset_id = {
asset_name: {
subset_name: 1
for subset_name in subset_names
}
for asset_name in asset_names
}
if not asset_names or not subset_names:
return subset_docs_by_asset_id
asset_docs = get_assets(
self.project_name,
asset_names=asset_names,
fields=["_id", "name"]
)
asset_names_by_id = {
asset_doc["_id"]: asset_doc["name"]
for asset_doc in asset_docs
}
subset_docs = list(get_subsets(
self.project_name,
asset_ids=asset_names_by_id.keys(),
subset_names=subset_names,
fields=["_id", "name", "parent"]
))
subset_ids = {subset_doc["_id"] for subset_doc in subset_docs}
last_versions = get_last_versions(
self.project_name,
subset_ids,
fields=["name", "parent"])
for subset_doc in subset_docs:
asset_id = subset_doc["parent"]
asset_name = asset_names_by_id[asset_id]
subset_name = subset_doc["name"]
subset_id = subset_doc["_id"]
last_version = last_versions.get(subset_id)
version = 0
if last_version is not None:
version = last_version["name"]
subset_docs_by_asset_id[asset_name][subset_name] += version
return subset_docs_by_asset_id
def _fill_next_versions(self, instances_data):
"""Fill next version for instances.
Instances have also stored previous next version to be able to
recognize if user did enter different version. If version was
not changed by user, or user set it to '0' the next version will be
updated by current database state.
"""
filtered_instance_data = []
for instance in instances_data:
previous_last_version = instance.get("_previous_last_version")
creator_attributes = instance["creator_attributes"]
use_next_version = creator_attributes.get(
"use_next_version", True)
version = creator_attributes.get("version_to_use", 0)
if (
use_next_version
or version == 0
or version == previous_last_version
):
filtered_instance_data.append(instance)
asset_names = {
instance["asset"]
for instance in filtered_instance_data}
subset_names = {
instance["subset"]
for instance in filtered_instance_data}
subset_docs_by_asset_id = self._prepare_next_versions(
asset_names, subset_names
)
for instance in filtered_instance_data:
asset_name = instance["asset"]
subset_name = instance["subset"]
version = subset_docs_by_asset_id[asset_name][subset_name]
instance["creator_attributes"]["version_to_use"] = version
instance["_previous_last_version"] = version
def collect_instances(self):
"""Collect instances from host.
Overriden to be able to manage version control attributes. If version
control is disabled, the attributes will be removed from instances,
and next versions are filled if is version control enabled.
"""
instances_by_identifier = cache_and_get_instances(
self, SHARED_DATA_KEY, list_instances
)
instances = instances_by_identifier[self.identifier]
if not instances:
return
if self.allow_version_control:
self._fill_next_versions(instances)
for instance_data in instances:
# Make sure that there are not data related to version control
# if plugin does not support it
if not self.allow_version_control:
instance_data.pop("_previous_last_version", None)
creator_attributes = instance_data["creator_attributes"]
creator_attributes.pop("version_to_use", None)
creator_attributes.pop("use_next_version", None)
instance = CreatedInstance.from_existing(instance_data, self)
self._add_instance_to_context(instance)
def get_instance_attr_defs(self):
defs = self.get_pre_create_attr_defs()
if self.allow_version_control:
defs += [
UISeparatorDef(),
BoolDef(
"use_next_version",
default=True,
label="Use next version",
),
NumberDef(
"version_to_use",
default=1,
minimum=0,
maximum=999,
label="Version to use",
)
]
return defs
def get_pre_create_attr_defs(self):
# Use same attributes as for instance attributes
return [
FileDef(
"representation_files",
@ -132,10 +304,6 @@ class SettingsCreator(TrayPublishCreator):
)
]
def get_pre_create_attr_defs(self):
# Use same attributes as for instance attrobites
return self.get_instance_attr_defs()
@classmethod
def from_settings(cls, item_data):
identifier = item_data["identifier"]
@ -155,6 +323,8 @@ class SettingsCreator(TrayPublishCreator):
"extensions": item_data["extensions"],
"allow_sequences": item_data["allow_sequences"],
"allow_multiple_items": item_data["allow_multiple_items"],
"default_variants": item_data["default_variants"]
"allow_version_control": item_data.get(
"allow_version_control", False),
"default_variants": item_data["default_variants"],
}
)

View file

@ -47,6 +47,8 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin):
"Created temp staging directory for instance {}. {}"
).format(instance_label, tmp_folder))
self._fill_version(instance, instance_label)
# Store filepaths for validation of their existence
source_filepaths = []
# Make sure there are no representations with same name
@ -93,6 +95,28 @@ class CollectSettingsSimpleInstances(pyblish.api.InstancePlugin):
)
)
def _fill_version(self, instance, instance_label):
"""Fill instance version under which will be instance integrated.
Instance must have set 'use_next_version' to 'False'
and 'version_to_use' to version to use.
Args:
instance (pyblish.api.Instance): Instance to fill version for.
instance_label (str): Label of instance to fill version for.
"""
creator_attributes = instance.data["creator_attributes"]
use_next_version = creator_attributes.get("use_next_version", True)
# If 'version_to_use' is '0' it means that next version should be used
version_to_use = creator_attributes.get("version_to_use", 0)
if use_next_version or not version_to_use:
return
instance.data["version"] = version_to_use
self.log.debug(
"Version for instance \"{}\" was set to \"{}\"".format(
instance_label, version_to_use))
def _create_main_representations(
self,
instance,

View file

@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<root>
<error id="main">
<title>Version already exists</title>
<description>
## Version already exists
Version {version} you have set on instance '{subset_name}' under '{asset_name}' already exists. This validation is enabled by default to prevent accidental override of existing versions.
### How to repair?
- Click on 'Repair' action -> this will change version to next available.
- Disable validation on the instance if you are sure you want to override the version.
- Reset publishing and manually change the version number.
</description>
</error>
</root>

View file

@ -0,0 +1,57 @@
import pyblish.api
from openpype.pipeline.publish import (
ValidateContentsOrder,
PublishXmlValidationError,
OptionalPyblishPluginMixin,
RepairAction,
)
class ValidateExistingVersion(
OptionalPyblishPluginMixin,
pyblish.api.InstancePlugin
):
label = "Validate Existing Version"
order = ValidateContentsOrder
hosts = ["traypublisher"]
actions = [RepairAction]
settings_category = "traypublisher"
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
version = instance.data.get("version")
if version is None:
return
last_version = instance.data.get("latestVersion")
if last_version is None or last_version < version:
return
subset_name = instance.data["subset"]
msg = "Version {} already exists for subset {}.".format(
version, subset_name)
formatting_data = {
"subset_name": subset_name,
"asset_name": instance.data["asset"],
"version": version
}
raise PublishXmlValidationError(
self, msg, formatting_data=formatting_data)
@classmethod
def repair(cls, instance):
create_context = instance.context.data["create_context"]
created_instance = create_context.get_instance_by_id(
instance.data["instance_id"])
creator_attributes = created_instance["creator_attributes"]
# Disable version override
creator_attributes["use_next_version"] = True
create_context.save_changes()

View file

@ -113,12 +113,19 @@ def pack_project(
project_name
))
if only_documents and not destination_dir:
raise ValueError((
"Destination directory must be defined"
" when only documents should be packed."
))
root_path = None
source_root = {}
project_source_path = None
if not only_documents:
roots = project_doc["config"]["roots"]
# Determine root directory of project
source_root = None
source_root_name = None
for root_name, root_value in roots.items():
if source_root is not None:
@ -141,6 +148,11 @@ def pack_project(
if not destination_dir:
destination_dir = root_path
if not destination_dir:
raise ValueError(
"Project {} does not have any roots.".format(project_name)
)
destination_dir = os.path.normpath(destination_dir)
if not os.path.exists(destination_dir):
os.makedirs(destination_dir)

View file

@ -138,7 +138,8 @@ class ClockifyModule(
"publish": [],
"create": [],
"load": [],
"actions": []
"actions": [],
"inventory": []
}
```

View file

@ -740,15 +740,16 @@ class ModulesManager:
Unknown keys are logged out.
Returns:
dict: Output is dictionary with keys "publish", "create", "load"
and "actions" each containing list of paths.
dict: Output is dictionary with keys "publish", "create", "load",
"actions" and "inventory" each containing list of paths.
"""
# Output structure
output = {
"publish": [],
"create": [],
"load": [],
"actions": []
"actions": [],
"inventory": []
}
unknown_keys_by_module = {}
for module in self.get_enabled_modules():
@ -853,6 +854,21 @@ class ModulesManager:
host_name
)
def collect_inventory_action_paths(self, host_name):
"""Helper to collect load plugin paths from modules.
Args:
host_name (str): For which host are load plugins meant.
Returns:
list: List of pyblish plugin paths.
"""
return self._collect_plugin_paths(
"get_inventory_action_paths",
host_name
)
def get_host_module(self, host_name):
"""Find host module by host name.

View file

@ -59,7 +59,6 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
render_path).replace("\\", "/")
instance.data["publishJobState"] = "Suspended"
instance.context.data['ftrackStatus'] = "Render"
# adding 2d render specific family for version identification in Loader
instance.data["families"] = ["render2d"]

View file

@ -109,8 +109,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
for status in asset_version_statuses
}
self._set_task_status(instance, project_entity, task_entity, session)
# Prepare AssetTypes
asset_types_by_short = self._ensure_asset_types_exists(
session, component_list
@ -180,45 +178,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
if asset_version not in instance.data[asset_versions_key]:
instance.data[asset_versions_key].append(asset_version)
def _set_task_status(self, instance, project_entity, task_entity, session):
if not project_entity:
self.log.info("Task status won't be set, project is not known.")
return
if not task_entity:
self.log.info("Task status won't be set, task is not known.")
return
status_name = instance.context.data.get("ftrackStatus")
if not status_name:
self.log.info("Ftrack status name is not set.")
return
self.log.debug(
"Ftrack status name will be (maybe) set to \"{}\"".format(
status_name
)
)
project_schema = project_entity["project_schema"]
task_statuses = project_schema.get_statuses(
"Task", task_entity["type_id"]
)
task_statuses_by_low_name = {
status["name"].lower(): status for status in task_statuses
}
status = task_statuses_by_low_name.get(status_name.lower())
if not status:
self.log.warning((
"Task status \"{}\" won't be set,"
" status is now allowed on task type \"{}\"."
).format(status_name, task_entity["type"]["name"]))
return
self.log.info("Setting task status to \"{}\"".format(status_name))
task_entity["status"] = status
session.commit()
def _fill_component_locations(self, session, component_list):
components_by_location_name = collections.defaultdict(list)
components_by_location_id = collections.defaultdict(list)

View file

@ -1,150 +0,0 @@
import pyblish.api
from openpype.lib import filter_profiles
class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin):
"""Change task status when should be published on farm.
Instance which has set "farm" key in data to 'True' is considered as will
be rendered on farm thus it's status should be changed.
"""
order = pyblish.api.IntegratorOrder + 0.48
label = "Integrate Ftrack Farm Status"
farm_status_profiles = []
def process(self, context):
# Quick end
if not self.farm_status_profiles:
project_name = context.data["projectName"]
self.log.info((
"Status profiles are not filled for project \"{}\". Skipping"
).format(project_name))
return
filtered_instances = self.filter_instances(context)
instances_with_status_names = self.get_instances_with_statuse_names(
context, filtered_instances
)
if instances_with_status_names:
self.fill_statuses(context, instances_with_status_names)
def filter_instances(self, context):
filtered_instances = []
for instance in context:
# Skip disabled instances
if instance.data.get("publish") is False:
continue
subset_name = instance.data["subset"]
msg_start = "Skipping instance {}.".format(subset_name)
if not instance.data.get("farm"):
self.log.debug(
"{} Won't be rendered on farm.".format(msg_start)
)
continue
task_entity = instance.data.get("ftrackTask")
if not task_entity:
self.log.debug(
"{} Does not have filled task".format(msg_start)
)
continue
filtered_instances.append(instance)
return filtered_instances
def get_instances_with_statuse_names(self, context, instances):
instances_with_status_names = []
for instance in instances:
family = instance.data["family"]
subset_name = instance.data["subset"]
task_entity = instance.data["ftrackTask"]
host_name = context.data["hostName"]
task_name = task_entity["name"]
task_type = task_entity["type"]["name"]
status_profile = filter_profiles(
self.farm_status_profiles,
{
"hosts": host_name,
"task_types": task_type,
"task_names": task_name,
"families": family,
"subsets": subset_name,
},
logger=self.log
)
if not status_profile:
# There already is log in 'filter_profiles'
continue
status_name = status_profile["status_name"]
if status_name:
instances_with_status_names.append((instance, status_name))
return instances_with_status_names
def fill_statuses(self, context, instances_with_status_names):
# Prepare available task statuses on the project
project_name = context.data["projectName"]
session = context.data["ftrackSession"]
project_entity = session.query((
"select project_schema from Project where full_name is \"{}\""
).format(project_name)).one()
project_schema = project_entity["project_schema"]
task_type_ids = set()
for item in instances_with_status_names:
instance, _ = item
task_entity = instance.data["ftrackTask"]
task_type_ids.add(task_entity["type"]["id"])
task_statuses_by_type_id = {
task_type_id: project_schema.get_statuses("Task", task_type_id)
for task_type_id in task_type_ids
}
# Keep track if anything has changed
skipped_status_names = set()
status_changed = False
for item in instances_with_status_names:
instance, status_name = item
task_entity = instance.data["ftrackTask"]
task_statuses = task_statuses_by_type_id[task_entity["type"]["id"]]
status_name_low = status_name.lower()
status_id = None
status_name = None
# Skip if status name was already tried to be found
for status in task_statuses:
if status["name"].lower() == status_name_low:
status_id = status["id"]
status_name = status["name"]
break
if status_id is None:
if status_name_low not in skipped_status_names:
skipped_status_names.add(status_name_low)
joined_status_names = ", ".join({
'"{}"'.format(status["name"])
for status in task_statuses
})
self.log.warning((
"Status \"{}\" is not available on project \"{}\"."
" Available statuses are {}"
).format(status_name, project_name, joined_status_names))
continue
# Change task status id
if status_id != task_entity["status_id"]:
task_entity["status_id"] = status_id
status_changed = True
path = "/".join([
item["name"]
for item in task_entity["link"]
])
self.log.debug("Set status \"{}\" to \"{}\"".format(
status_name, path
))
if status_changed:
session.commit()

View file

@ -0,0 +1,433 @@
import copy
import pyblish.api
from openpype.lib import filter_profiles
def create_chunks(iterable, chunk_size=None):
"""Separate iterable into multiple chunks by size.
Args:
iterable(list|tuple|set): Object that will be separated into chunks.
chunk_size(int): Size of one chunk. Default value is 200.
Returns:
list<list>: Chunked items.
"""
chunks = []
tupled_iterable = tuple(iterable)
if not tupled_iterable:
return chunks
iterable_size = len(tupled_iterable)
if chunk_size is None:
chunk_size = 200
if chunk_size < 1:
chunk_size = 1
for idx in range(0, iterable_size, chunk_size):
chunks.append(tupled_iterable[idx:idx + chunk_size])
return chunks
class CollectFtrackTaskStatuses(pyblish.api.ContextPlugin):
"""Collect available task statuses on the project.
This is preparation for integration of task statuses.
Requirements:
ftrackSession (ftrack_api.Session): Prepared ftrack session.
Provides:
ftrackTaskStatuses (dict[str, list[Any]]): Dictionary of available
task statuses on project by task type id.
ftrackStatusByTaskId (dict[str, str]): Empty dictionary of task
statuses by task id. Status on task can be set only once.
Value should be a name of status.
"""
# After 'CollectFtrackApi'
order = pyblish.api.CollectorOrder + 0.4992
label = "Collect Ftrack Task Statuses"
settings_category = "ftrack"
def process(self, context):
ftrack_session = context.data("ftrackSession")
if ftrack_session is None:
self.log.info("Ftrack session is not created.")
return
# Prepare available task statuses on the project
project_name = context.data["projectName"]
project_entity = ftrack_session.query((
"select project_schema from Project where full_name is \"{}\""
).format(project_name)).one()
project_schema = project_entity["project_schema"]
task_type_ids = {
task_type["id"]
for task_type in ftrack_session.query("select id from Type").all()
}
task_statuses_by_type_id = {
task_type_id: project_schema.get_statuses("Task", task_type_id)
for task_type_id in task_type_ids
}
context.data["ftrackTaskStatuses"] = task_statuses_by_type_id
context.data["ftrackStatusByTaskId"] = {}
self.log.info("Collected ftrack task statuses.")
class IntegrateFtrackStatusBase(pyblish.api.InstancePlugin):
"""Base plugin for status collection.
Requirements:
projectName (str): Name of the project.
hostName (str): Name of the host.
ftrackSession (ftrack_api.Session): Prepared ftrack session.
ftrackTaskStatuses (dict[str, list[Any]]): Dictionary of available
task statuses on project by task type id.
ftrackStatusByTaskId (dict[str, str]): Empty dictionary of task
statuses by task id. Status on task can be set only once.
Value should be a name of status.
"""
active = False
settings_key = None
status_profiles = []
@classmethod
def apply_settings(cls, project_settings):
settings_key = cls.settings_key
if settings_key is None:
settings_key = cls.__name__
try:
settings = project_settings["ftrack"]["publish"][settings_key]
except KeyError:
return
for key, value in settings.items():
setattr(cls, key, value)
def process(self, instance):
context = instance.context
# No profiles -> skip
profiles = self.get_status_profiles()
if not profiles:
project_name = context.data["projectName"]
self.log.info((
"Status profiles are not filled for project \"{}\". Skipping"
).format(project_name))
return
# Task statuses were not collected -> skip
task_statuses_by_type_id = context.data.get("ftrackTaskStatuses")
if not task_statuses_by_type_id:
self.log.info(
"Ftrack task statuses are not collected. Skipping.")
return
self.prepare_status_names(context, instance, profiles)
def get_status_profiles(self):
"""List of profiles to determine status name.
Example profile item:
{
"host_names": ["nuke"],
"task_types": ["Compositing"],
"task_names": ["Comp"],
"families": ["render"],
"subset_names": ["renderComp"],
"status_name": "Rendering",
}
Returns:
list[dict[str, Any]]: List of profiles.
"""
return self.status_profiles
def prepare_status_names(self, context, instance, profiles):
if not self.is_valid_instance(context, instance):
return
filter_data = self.get_profile_filter_data(context, instance)
status_profile = filter_profiles(
profiles,
filter_data,
logger=self.log
)
if not status_profile:
return
status_name = status_profile["status_name"]
if status_name:
self.fill_status(context, instance, status_name)
def get_profile_filter_data(self, context, instance):
task_entity = instance.data["ftrackTask"]
return {
"host_names": context.data["hostName"],
"task_types": task_entity["type"]["name"],
"task_names": task_entity["name"],
"families": instance.data["family"],
"subset_names": instance.data["subset"],
}
def is_valid_instance(self, context, instance):
"""Filter instances that should be processed.
Ignore instances that are not enabled for publishing or don't have
filled task. Also skip instances with tasks that already have defined
status.
Plugin should do more filtering which is custom for plugin logic.
Args:
context (pyblish.api.Context): Pyblish context.
instance (pyblish.api.Instance): Instance to process.
Returns:
list[pyblish.api.Instance]: List of instances that should be
processed.
"""
ftrack_status_by_task_id = context.data["ftrackStatusByTaskId"]
# Skip disabled instances
if instance.data.get("publish") is False:
return False
task_entity = instance.data.get("ftrackTask")
if not task_entity:
self.log.debug(
"Skipping instance Does not have filled task".format(
instance.data["subset"]))
return False
task_id = task_entity["id"]
if task_id in ftrack_status_by_task_id:
self.log.debug("Status for task {} was already defined".format(
task_entity["name"]
))
return False
return True
def fill_status(self, context, instance, status_name):
"""Fill status for instance task.
If task already had set status, it will be skipped.
Args:
context (pyblish.api.Context): Pyblish context.
instance (pyblish.api.Instance): Pyblish instance.
status_name (str): Name of status to set.
"""
task_entity = instance.data["ftrackTask"]
task_id = task_entity["id"]
ftrack_status_by_task_id = context.data["ftrackStatusByTaskId"]
if task_id in ftrack_status_by_task_id:
self.log.debug("Status for task {} was already defined".format(
task_entity["name"]
))
return
ftrack_status_by_task_id[task_id] = status_name
self.log.info((
"Task {} will be set to \"{}\" status."
).format(task_entity["name"], status_name))
class IntegrateFtrackFarmStatus(IntegrateFtrackStatusBase):
"""Collect task status names for instances that are sent to farm.
Instance which has set "farm" key in data to 'True' is considered as will
be rendered on farm thus it's status should be changed.
Requirements:
projectName (str): Name of the project.
hostName (str): Name of the host.
ftrackSession (ftrack_api.Session): Prepared ftrack session.
ftrackTaskStatuses (dict[str, list[Any]]): Dictionary of available
task statuses on project by task type id.
ftrackStatusByTaskId (dict[str, str]): Empty dictionary of task
statuses by task id. Status on task can be set only once.
Value should be a name of status.
"""
order = pyblish.api.IntegratorOrder + 0.48
label = "Ftrack Task Status To Farm Status"
active = True
farm_status_profiles = []
status_profiles = None
def is_valid_instance(self, context, instance):
if not instance.data.get("farm"):
self.log.debug("{} Won't be rendered on farm.".format(
instance.data["subset"]
))
return False
return super(IntegrateFtrackFarmStatus, self).is_valid_instance(
context, instance)
def get_status_profiles(self):
if self.status_profiles is None:
profiles = copy.deepcopy(self.farm_status_profiles)
for profile in profiles:
profile["host_names"] = profile.pop("hosts")
profile["subset_names"] = profile.pop("subsets")
self.status_profiles = profiles
return self.status_profiles
class IntegrateFtrackLocalStatus(IntegrateFtrackStatusBase):
"""Collect task status names for instances that are published locally.
Instance which has set "farm" key in data to 'True' is considered as will
be rendered on farm thus it's status should be changed.
Requirements:
projectName (str): Name of the project.
hostName (str): Name of the host.
ftrackSession (ftrack_api.Session): Prepared ftrack session.
ftrackTaskStatuses (dict[str, list[Any]]): Dictionary of available
task statuses on project by task type id.
ftrackStatusByTaskId (dict[str, str]): Empty dictionary of task
statuses by task id. Status on task can be set only once.
Value should be a name of status.
"""
order = IntegrateFtrackFarmStatus.order + 0.001
label = "Ftrack Task Status Local Publish"
active = True
targets = ["local"]
settings_key = "ftrack_task_status_local_publish"
def is_valid_instance(self, context, instance):
if instance.data.get("farm"):
self.log.debug("{} Will be rendered on farm.".format(
instance.data["subset"]
))
return False
return super(IntegrateFtrackLocalStatus, self).is_valid_instance(
context, instance)
class IntegrateFtrackOnFarmStatus(IntegrateFtrackStatusBase):
"""Collect task status names for instances that are published on farm.
Requirements:
projectName (str): Name of the project.
hostName (str): Name of the host.
ftrackSession (ftrack_api.Session): Prepared ftrack session.
ftrackTaskStatuses (dict[str, list[Any]]): Dictionary of available
task statuses on project by task type id.
ftrackStatusByTaskId (dict[str, str]): Empty dictionary of task
statuses by task id. Status on task can be set only once.
Value should be a name of status.
"""
order = IntegrateFtrackLocalStatus.order + 0.001
label = "Ftrack Task Status On Farm Status"
active = True
targets = ["farm"]
settings_key = "ftrack_task_status_on_farm_publish"
class IntegrateFtrackTaskStatus(pyblish.api.ContextPlugin):
# Use order of Integrate Ftrack Api plugin and offset it before or after
base_order = pyblish.api.IntegratorOrder + 0.499
# By default is after Integrate Ftrack Api
order = base_order + 0.0001
label = "Integrate Ftrack Task Status"
@classmethod
def apply_settings(cls, project_settings):
"""Apply project settings to plugin.
Args:
project_settings (dict[str, Any]): Project settings.
"""
settings = (
project_settings["ftrack"]["publish"]["IntegrateFtrackTaskStatus"]
)
diff = 0.001
if not settings["after_version_statuses"]:
diff = -diff
cls.order = cls.base_order + diff
def process(self, context):
task_statuses_by_type_id = context.data.get("ftrackTaskStatuses")
if not task_statuses_by_type_id:
self.log.info("Ftrack task statuses are not collected. Skipping.")
return
status_by_task_id = self._get_status_by_task_id(context)
if not status_by_task_id:
self.log.info("No statuses to set. Skipping.")
return
ftrack_session = context.data["ftrackSession"]
task_entities = self._get_task_entities(
ftrack_session, status_by_task_id)
for task_entity in task_entities:
task_path = "/".join([
item["name"] for item in task_entity["link"]
])
task_id = task_entity["id"]
type_id = task_entity["type_id"]
new_status = None
status_name = status_by_task_id[task_id]
self.log.debug(
"Status to set {} on task {}.".format(status_name, task_path))
status_name_low = status_name.lower()
available_statuses = task_statuses_by_type_id[type_id]
for status in available_statuses:
if status["name"].lower() == status_name_low:
new_status = status
break
if new_status is None:
joined_statuses = ", ".join([
"'{}'".format(status["name"])
for status in available_statuses
])
self.log.debug((
"Status '{}' was not found in available statuses: {}."
).format(status_name, joined_statuses))
continue
if task_entity["status_id"] != new_status["id"]:
task_entity["status_id"] = new_status["id"]
self.log.debug("Changing status of task '{}' to '{}'".format(
task_path, status_name
))
ftrack_session.commit()
def _get_status_by_task_id(self, context):
status_by_task_id = context.data["ftrackStatusByTaskId"]
return {
task_id: status_name
for task_id, status_name in status_by_task_id.items()
if status_name
}
def _get_task_entities(self, ftrack_session, status_by_task_id):
task_entities = []
for chunk_ids in create_chunks(status_by_task_id.keys()):
joined_ids = ",".join(
['"{}"'.format(task_id) for task_id in chunk_ids]
)
task_entities.extend(ftrack_session.query((
"select id, type_id, status_id, link from Task"
" where id in ({})"
).format(joined_ids)).all())
return task_entities

View file

@ -63,7 +63,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
"""
order = pyblish.api.IntegratorOrder - 0.04
label = 'Integrate Hierarchy To Ftrack'
label = "Integrate Hierarchy To Ftrack"
families = ["shot"]
hosts = [
"hiero",
@ -94,14 +94,13 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
"Project \"{}\" was not found on ftrack.".format(project_name)
)
self.context = context
self.session = session
self.ft_project = project
self.task_types = self.get_all_task_types(project)
self.task_statuses = self.get_task_statuses(project)
# import ftrack hierarchy
self.import_to_ftrack(project_name, hierarchy_context)
self.import_to_ftrack(context, project_name, hierarchy_context)
def query_ftrack_entitites(self, session, ft_project):
project_id = ft_project["id"]
@ -227,7 +226,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
return output
def import_to_ftrack(self, project_name, hierarchy_context):
def import_to_ftrack(self, context, project_name, hierarchy_context):
# Prequery hiearchical custom attributes
hier_attrs = get_pype_attr(self.session)[1]
hier_attr_by_key = {
@ -258,7 +257,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
self.session, matching_entities, hier_attrs)
# Get ftrack api module (as they are different per python version)
ftrack_api = self.context.data["ftrackPythonModule"]
ftrack_api = context.data["ftrackPythonModule"]
# Use queue of hierarchy items to process
import_queue = collections.deque()
@ -292,7 +291,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
# CUSTOM ATTRIBUTES
custom_attributes = entity_data.get('custom_attributes', {})
instances = []
for instance in self.context:
for instance in context:
instance_asset_name = instance.data.get("asset")
if (
instance_asset_name
@ -369,6 +368,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
if task_name:
instances_by_task_name[task_name.lower()].append(instance)
ftrack_status_by_task_id = context.data["ftrackStatusByTaskId"]
tasks = entity_data.get('tasks', [])
existing_tasks = []
tasks_to_create = []
@ -389,11 +389,11 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
for task_name, task_type in tasks_to_create:
task_entity = self.create_task(
name=task_name,
task_type=task_type,
parent=entity
task_name,
task_type,
entity,
ftrack_status_by_task_id
)
for instance in instances_by_task_name[task_name.lower()]:
instance.data["ftrackTask"] = task_entity
@ -481,7 +481,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
for status in task_workflow_statuses
}
def create_task(self, name, task_type, parent):
def create_task(self, name, task_type, parent, ftrack_status_by_task_id):
filter_data = {
"task_names": name,
"task_types": task_type
@ -491,12 +491,14 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
filter_data
)
status_id = None
status_name = None
if profile:
status_name = profile["status_name"]
status_name_low = status_name.lower()
for _status_id, status in self.task_statuses.items():
if status["name"].lower() == status_name_low:
status_id = _status_id
status_name = status["name"]
break
if status_id is None:
@ -523,6 +525,8 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
self.session._configure_locations()
six.reraise(tp, value, tb)
if status_id is not None:
ftrack_status_by_task_id[task["id"]] = None
return task
def _get_active_assets(self, context):

View file

@ -33,8 +33,8 @@ class OpenPypeInterface:
class IPluginPaths(OpenPypeInterface):
"""Module has plugin paths to return.
Expected result is dictionary with keys "publish", "create", "load" or
"actions" and values as list or string.
Expected result is dictionary with keys "publish", "create", "load",
"actions" or "inventory" and values as list or string.
{
"publish": ["path/to/publish_plugins"]
}
@ -109,6 +109,21 @@ class IPluginPaths(OpenPypeInterface):
return self._get_plugin_paths_by_type("publish")
def get_inventory_action_paths(self, host_name):
"""Receive inventory action paths.
Give addons ability to add inventory action plugin paths.
Notes:
Default implementation uses 'get_plugin_paths' and always return
all publish plugin paths.
Args:
host_name (str): For which host are the plugins meant.
"""
return self._get_plugin_paths_by_type("inventory")
class ILaunchHookPaths(OpenPypeInterface):
"""Module has launch hook paths to return.
@ -395,13 +410,11 @@ class ITrayService(ITrayModule):
class ISettingsChangeListener(OpenPypeInterface):
"""Module has plugin paths to return.
"""Module tries to listen to settings changes.
Only settings changes in the current process are propagated.
Changes made in other processes or machines won't trigger the callbacks.
Expected result is dictionary with keys "publish", "create", "load" or
"actions" and values as list or string.
{
"publish": ["path/to/publish_plugins"]
}
"""
@abstractmethod

View file

@ -312,7 +312,8 @@ def get_views_data_subprocess(config_path):
def get_imageio_config(
project_name, host_name,
project_name,
host_name,
project_settings=None,
anatomy_data=None,
anatomy=None
@ -325,12 +326,9 @@ def get_imageio_config(
Args:
project_name (str): project name
host_name (str): host name
project_settings (dict, optional): project settings.
Defaults to None.
anatomy_data (dict, optional): anatomy formatting data.
Defaults to None.
anatomy (lib.Anatomy, optional): Anatomy object.
Defaults to None.
project_settings (Optional[dict]): Project settings.
anatomy_data (Optional[dict]): anatomy formatting data.
anatomy (Optional[Anatomy]): Anatomy object.
Returns:
dict: config path data or empty dict
@ -345,37 +343,36 @@ def get_imageio_config(
formatting_data = deepcopy(anatomy_data)
# add project roots to anatomy data
# Add project roots to anatomy data
formatting_data["root"] = anatomy.roots
formatting_data["platform"] = platform.system().lower()
# get colorspace settings
# check if global settings group is having activate_global_color_management
# key at all. If it does't then default it to False
# this is for backward compatibility only
# TODO: in future rewrite this to be more explicit
# Get colorspace settings
imageio_global, imageio_host = _get_imageio_settings(
project_settings, host_name)
activate_color_management = (
imageio_global.get("activate_global_color_management", False)
# for already saved overrides from previous version
# TODO: remove this in future - backward compatibility
or imageio_host.get("ocio_config").get("enabled")
)
# Host 'ocio_config' is optional
host_ocio_config = imageio_host.get("ocio_config") or {}
# Global color management must be enabled to be able to use host settings
activate_color_management = imageio_global.get(
"activate_global_color_management")
# TODO: remove this in future - backward compatibility
# For already saved overrides from previous version look for 'enabled'
# on host settings.
if activate_color_management is None:
activate_color_management = host_ocio_config.get("enabled", False)
if not activate_color_management:
# if global settings are disabled return empty dict because
# it is expected that no colorspace management is needed
log.info(
"Colorspace management is disabled globally."
)
log.info("Colorspace management is disabled globally.")
return {}
# check if host settings group is having activate_host_color_management
# if it does not have activation key then default it to True so it uses
# global settings
# this is for backward compatibility
# Check if host settings group is having 'activate_host_color_management'
# - if it does not have activation key then default it to True so it uses
# global settings
# This is for backward compatibility.
# TODO: in future rewrite this to be more explicit
activate_host_color_management = imageio_host.get(
"activate_host_color_management", True)
@ -389,21 +386,18 @@ def get_imageio_config(
)
return {}
config_host = imageio_host.get("ocio_config", {})
# get config path from either global or host_name
# get config path from either global or host settings
# depending on override flag
# TODO: in future rewrite this to be more explicit
config_data = None
override_global_config = (
config_host.get("override_global_config")
override_global_config = host_ocio_config.get("override_global_config")
if override_global_config is None:
# for already saved overrides from previous version
# TODO: remove this in future - backward compatibility
or config_host.get("enabled")
)
override_global_config = host_ocio_config.get("enabled")
if override_global_config:
config_data = _get_config_data(
config_host["filepath"], formatting_data
host_ocio_config["filepath"], formatting_data
)
else:
# get config path from global
@ -507,34 +501,35 @@ def get_imageio_file_rules(project_name, host_name, project_settings=None):
frules_host = imageio_host.get("file_rules", {})
# compile file rules dictionary
activate_host_rules = (
frules_host.get("activate_host_rules")
activate_host_rules = frules_host.get("activate_host_rules")
if activate_host_rules is None:
# TODO: remove this in future - backward compatibility
or frules_host.get("enabled")
)
activate_host_rules = frules_host.get("enabled", False)
# return host rules if activated or global rules
return frules_host["rules"] if activate_host_rules else global_rules
def get_remapped_colorspace_to_native(
ocio_colorspace_name, host_name, imageio_host_settings):
ocio_colorspace_name, host_name, imageio_host_settings
):
"""Return native colorspace name.
Args:
ocio_colorspace_name (str | None): ocio colorspace name
host_name (str): Host name.
imageio_host_settings (dict[str, Any]): ImageIO host settings.
Returns:
str: native colorspace name defined in remapping or None
Union[str, None]: native colorspace name defined in remapping or None
"""
if not CashedData.remapping.get(host_name, {}).get("to_native"):
CashedData.remapping.setdefault(host_name, {})
if CashedData.remapping[host_name].get("to_native") is None:
remapping_rules = imageio_host_settings["remapping"]["rules"]
CashedData.remapping[host_name] = {
"to_native": {
rule["ocio_name"]: input["host_native_name"]
for rule in remapping_rules
}
CashedData.remapping[host_name]["to_native"] = {
rule["ocio_name"]: rule["host_native_name"]
for rule in remapping_rules
}
return CashedData.remapping[host_name]["to_native"].get(
@ -542,23 +537,25 @@ def get_remapped_colorspace_to_native(
def get_remapped_colorspace_from_native(
host_native_colorspace_name, host_name, imageio_host_settings):
host_native_colorspace_name, host_name, imageio_host_settings
):
"""Return ocio colorspace name remapped from host native used name.
Args:
host_native_colorspace_name (str): host native colorspace name
host_name (str): Host name.
imageio_host_settings (dict[str, Any]): ImageIO host settings.
Returns:
str: ocio colorspace name defined in remapping or None
Union[str, None]: Ocio colorspace name defined in remapping or None.
"""
if not CashedData.remapping.get(host_name, {}).get("from_native"):
CashedData.remapping.setdefault(host_name, {})
if CashedData.remapping[host_name].get("from_native") is None:
remapping_rules = imageio_host_settings["remapping"]["rules"]
CashedData.remapping[host_name] = {
"from_native": {
input["host_native_name"]: rule["ocio_name"]
for rule in remapping_rules
}
CashedData.remapping[host_name]["from_native"] = {
rule["host_native_name"]: rule["ocio_name"]
for rule in remapping_rules
}
return CashedData.remapping[host_name]["from_native"].get(

View file

@ -181,6 +181,11 @@ def install_openpype_plugins(project_name=None, host_name=None):
for path in load_plugin_paths:
register_loader_plugin_path(path)
inventory_action_paths = modules_manager.collect_inventory_action_paths(
host_name)
for path in inventory_action_paths:
register_inventory_action_path(path)
if project_name is None:
project_name = os.environ.get("AVALON_PROJECT")

View file

@ -1441,6 +1441,19 @@ class CreateContext:
"""Access to global publish attributes."""
return self._publish_attributes
def get_instance_by_id(self, instance_id):
"""Receive instance by id.
Args:
instance_id (str): Instance id.
Returns:
Union[CreatedInstance, None]: Instance or None if instance with
given id is not available.
"""
return self._instances_by_id.get(instance_id)
def get_sorted_creators(self, identifiers=None):
"""Sorted creators by 'order' attribute.

View file

@ -16,7 +16,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
order = pyblish.api.CollectorOrder - 0.5
def process(self, context):
create_context = context.data.pop("create_context", None)
create_context = context.data.get("create_context")
if not create_context:
host = registered_host()
if isinstance(host, IPublishHost):

View file

@ -356,6 +356,13 @@ class PypeCommands:
def pack_project(self, project_name, dirpath, database_only):
from openpype.lib.project_backpack import pack_project
if database_only and not dirpath:
raise ValueError((
"Destination dir must be defined when using --dbonly."
" Use '--dirpath {output dir path}' flag"
" to specify directory."
))
pack_project(project_name, dirpath, database_only)
def unpack_project(self, zip_filepath, new_root, database_only):

View file

@ -493,7 +493,29 @@
"upload_reviewable_with_origin_name": false
},
"IntegrateFtrackFarmStatus": {
"farm_status_profiles": []
"farm_status_profiles": [
{
"hosts": [
"celaction"
],
"task_types": [],
"task_names": [],
"families": [
"render"
],
"subsets": [],
"status_name": "Render"
}
]
},
"ftrack_task_status_local_publish": {
"status_profiles": []
},
"ftrack_task_status_on_farm_publish": {
"status_profiles": []
},
"IntegrateFtrackTaskStatus": {
"after_version_statuses": true
}
}
}

View file

@ -23,6 +23,7 @@
"detailed_description": "Workfiles are full scenes from any application that are directly edited by artists. They represent a state of work on a task at a given point and are usually not directly referenced into other scenes.",
"allow_sequences": false,
"allow_multiple_items": false,
"allow_version_control": false,
"extensions": [
".ma",
".mb",
@ -57,6 +58,7 @@
"detailed_description": "Models should only contain geometry data, without any extras like cameras, locators or bones.\n\nKeep in mind that models published from tray publisher are not validated for correctness. ",
"allow_sequences": false,
"allow_multiple_items": true,
"allow_version_control": false,
"extensions": [
".ma",
".mb",
@ -82,6 +84,7 @@
"detailed_description": "Alembic or bgeo cache of animated data",
"allow_sequences": true,
"allow_multiple_items": true,
"allow_version_control": false,
"extensions": [
".abc",
".bgeo",
@ -105,6 +108,7 @@
"detailed_description": "Any type of image seqeuence coming from outside of the studio. Usually camera footage, but could also be animatics used for reference.",
"allow_sequences": true,
"allow_multiple_items": true,
"allow_version_control": false,
"extensions": [
".exr",
".png",
@ -127,6 +131,7 @@
"detailed_description": "Sequence or single file renders",
"allow_sequences": true,
"allow_multiple_items": true,
"allow_version_control": false,
"extensions": [
".exr",
".png",
@ -150,6 +155,7 @@
"detailed_description": "Ideally this should be only camera itself with baked animation, however, it can technically also include helper geometry.",
"allow_sequences": false,
"allow_multiple_items": true,
"allow_version_control": false,
"extensions": [
".abc",
".ma",
@ -174,6 +180,7 @@
"detailed_description": "Any image data can be published as image family. References, textures, concept art, matte paints. This is a fallback 2d family for everything that doesn't fit more specific family.",
"allow_sequences": false,
"allow_multiple_items": true,
"allow_version_control": false,
"extensions": [
".exr",
".jpg",
@ -197,6 +204,7 @@
"detailed_description": "Hierarchical data structure for the efficient storage and manipulation of sparse volumetric data discretized on three-dimensional grids",
"allow_sequences": true,
"allow_multiple_items": true,
"allow_version_control": false,
"extensions": [
".vdb"
]
@ -215,6 +223,7 @@
"detailed_description": "Script exported from matchmoving application to be later processed into a tracked camera with additional data",
"allow_sequences": false,
"allow_multiple_items": true,
"allow_version_control": false,
"extensions": []
},
{
@ -227,6 +236,7 @@
"detailed_description": "CG rigged character or prop. Rig should be clean of any extra data and directly loadable into it's respective application\t",
"allow_sequences": false,
"allow_multiple_items": false,
"allow_version_control": false,
"extensions": [
".ma",
".blend",
@ -244,6 +254,7 @@
"detailed_description": "Texture files with Unreal Engine naming conventions",
"allow_sequences": false,
"allow_multiple_items": true,
"allow_version_control": false,
"extensions": []
}
],
@ -322,6 +333,11 @@
"enabled": true,
"optional": true,
"active": true
},
"ValidateExistingVersion": {
"enabled": true,
"optional": true,
"active": true
}
}
}

View file

@ -1058,7 +1058,7 @@
{
"type": "dict",
"key": "IntegrateFtrackFarmStatus",
"label": "Integrate Ftrack Farm Status",
"label": "Ftrack Status To Farm",
"children": [
{
"type": "label",
@ -1068,7 +1068,7 @@
"type": "list",
"collapsible": true,
"key": "farm_status_profiles",
"label": "Farm status profiles",
"label": "Profiles",
"use_label_wrap": true,
"object_type": {
"type": "dict",
@ -1114,6 +1114,142 @@
}
}
]
},
{
"type": "dict",
"key": "ftrack_task_status_local_publish",
"label": "Ftrack Status Local Integration",
"children": [
{
"type": "label",
"label": "Change status of task when is integrated locally"
},
{
"type": "list",
"collapsible": true,
"key": "status_profiles",
"label": "Profiles",
"use_label_wrap": true,
"object_type": {
"type": "dict",
"children": [
{
"key": "host_names",
"label": "Host names",
"type": "hosts-enum",
"multiselection": true
},
{
"key": "task_types",
"label": "Task types",
"type": "task-types-enum"
},
{
"key": "task_names",
"label": "Task names",
"type": "list",
"object_type": "text"
},
{
"key": "families",
"label": "Families",
"type": "list",
"object_type": "text"
},
{
"key": "subset_names",
"label": "Subset names",
"type": "list",
"object_type": "text"
},
{
"type": "separator"
},
{
"key": "status_name",
"label": "Status name",
"type": "text"
}
]
}
}
]
},
{
"type": "dict",
"key": "ftrack_task_status_on_farm_publish",
"label": "Ftrack Status On Farm",
"children": [
{
"type": "label",
"label": "Change status of task when it's subset is integrated on farm"
},
{
"type": "list",
"collapsible": true,
"key": "status_profiles",
"label": "Profiles",
"use_label_wrap": true,
"object_type": {
"type": "dict",
"children": [
{
"key": "host_names",
"label": "Host names",
"type": "hosts-enum",
"multiselection": true
},
{
"key": "task_types",
"label": "Task types",
"type": "task-types-enum"
},
{
"key": "task_names",
"label": "Task names",
"type": "list",
"object_type": "text"
},
{
"key": "families",
"label": "Families",
"type": "list",
"object_type": "text"
},
{
"key": "subset_names",
"label": "Subset names",
"type": "list",
"object_type": "text"
},
{
"type": "separator"
},
{
"key": "status_name",
"label": "Status name",
"type": "text"
}
]
}
}
]
},
{
"type": "dict",
"key": "IntegrateFtrackTaskStatus",
"label": "Integrate Ftrack Task Status",
"children": [
{
"type": "label",
"label": "Apply collected task statuses. This plugin can run before or after version integration. Some status automations may conflict with status changes on versions because of wrong order."
},
{
"type": "boolean",
"key": "after_version_statuses",
"label": "After version integration"
}
]
}
]
}

View file

@ -85,6 +85,12 @@
"label": "Allow multiple items",
"type": "boolean"
},
{
"type": "boolean",
"key": "allow_version_control",
"label": "Allow version control",
"default": false
},
{
"type": "list",
"key": "extensions",
@ -346,6 +352,10 @@
{
"key": "ValidateFrameRange",
"label": "Validate frame range"
},
{
"key": "ValidateExistingVersion",
"label": "Validate Existing Version"
}
]
}

View file

@ -53,6 +53,9 @@ class CreatorsModel(QtGui.QStandardItemModel):
index = self.index(row, 0)
item_id = index.data(ITEM_ID_ROLE)
creator_plugin = self._creators_by_id.get(item_id)
if creator_plugin and creator_plugin.family == family:
if creator_plugin and (
creator_plugin.label.lower() == family.lower()
or creator_plugin.family.lower() == family.lower()
):
indexes.append(index)
return indexes

View file

@ -1,4 +1,5 @@
import re
import platform
from openpype.client import get_projects, create_project
from .constants import (
@ -8,13 +9,16 @@ from .constants import (
from openpype.client.operations import (
PROJECT_NAME_ALLOWED_SYMBOLS,
PROJECT_NAME_REGEX,
OperationsSession,
)
from openpype.style import load_stylesheet
from openpype.pipeline import AvalonMongoDB
from openpype.tools.utils import (
PlaceholderLineEdit,
get_warning_pixmap
get_warning_pixmap,
PixmapLabel,
)
from openpype.settings.lib import get_default_anatomy_settings
from qtpy import QtWidgets, QtCore, QtGui
@ -35,7 +39,7 @@ class NameTextEdit(QtWidgets.QLineEdit):
sub_regex = "[^{}]+".format(NAME_ALLOWED_SYMBOLS)
new_before_text = re.sub(sub_regex, "", before_text)
new_after_text = re.sub(sub_regex, "", after_text)
idx -= (len(before_text) - len(new_before_text))
idx -= len(before_text) - len(new_before_text)
self.setText(new_before_text + new_after_text)
self.setCursorPosition(idx)
@ -141,13 +145,40 @@ class CreateProjectDialog(QtWidgets.QDialog):
inputs_widget = QtWidgets.QWidget(self)
project_name_input = QtWidgets.QLineEdit(inputs_widget)
project_code_input = QtWidgets.QLineEdit(inputs_widget)
project_width_input = NumScrollWidget(0, 9999999)
project_height_input = NumScrollWidget(0, 9999999)
project_fps_input = FloatScrollWidget(1, 9999999, decimals=3, step=1)
project_aspect_input = FloatScrollWidget(
0, 9999999, decimals=2, step=0.1
)
project_frame_start_input = NumScrollWidget(-9999999, 9999999)
project_frame_end_input = NumScrollWidget(-9999999, 9999999)
default_project_data = self.get_default_attributes()
project_width_input.setValue(default_project_data["resolutionWidth"])
project_height_input.setValue(default_project_data["resolutionHeight"])
project_fps_input.setValue(default_project_data["fps"])
project_aspect_input.setValue(default_project_data["pixelAspect"])
project_frame_start_input.setValue(default_project_data["frameStart"])
project_frame_end_input.setValue(default_project_data["frameEnd"])
library_project_input = QtWidgets.QCheckBox(inputs_widget)
inputs_layout = QtWidgets.QFormLayout(inputs_widget)
if platform.system() == "Darwin":
inputs_layout.setFieldGrowthPolicy(
QtWidgets.QFormLayout.AllNonFixedFieldsGrow
)
inputs_layout.setContentsMargins(0, 0, 0, 0)
inputs_layout.addRow("Project name:", project_name_input)
inputs_layout.addRow("Project code:", project_code_input)
inputs_layout.addRow("Library project:", library_project_input)
inputs_layout.addRow("Width:", project_width_input)
inputs_layout.addRow("Height:", project_height_input)
inputs_layout.addRow("FPS:", project_fps_input)
inputs_layout.addRow("Aspect:", project_aspect_input)
inputs_layout.addRow("Frame Start:", project_frame_start_input)
inputs_layout.addRow("Frame End:", project_frame_end_input)
project_name_label = QtWidgets.QLabel(self)
project_code_label = QtWidgets.QLabel(self)
@ -183,6 +214,12 @@ class CreateProjectDialog(QtWidgets.QDialog):
self.project_name_input = project_name_input
self.project_code_input = project_code_input
self.library_project_input = library_project_input
self.project_width_input = project_width_input
self.project_height_input = project_height_input
self.project_fps_input = project_fps_input
self.project_aspect_input = project_aspect_input
self.project_frame_start_input = project_frame_start_input
self.project_frame_end_input = project_frame_end_input
self.ok_btn = ok_btn
@ -190,6 +227,10 @@ class CreateProjectDialog(QtWidgets.QDialog):
def project_name(self):
return self.project_name_input.text()
def get_default_attributes(self):
settings = get_default_anatomy_settings()
return settings["attributes"]
def _on_project_name_change(self, value):
if self._project_code_value is None:
self._ignore_code_change = True
@ -215,12 +256,12 @@ class CreateProjectDialog(QtWidgets.QDialog):
is_valid = False
elif value in self.invalid_project_names:
message = "Project name \"{}\" already exist".format(value)
message = 'Project name "{}" already exist'.format(value)
is_valid = False
elif not PROJECT_NAME_REGEX.match(value):
message = (
"Project name \"{}\" contain not supported symbols"
'Project name "{}" contain not supported symbols'
).format(value)
is_valid = False
@ -237,12 +278,12 @@ class CreateProjectDialog(QtWidgets.QDialog):
is_valid = False
elif value in self.invalid_project_names:
message = "Project code \"{}\" already exist".format(value)
message = 'Project code "{}" already exist'.format(value)
is_valid = False
elif not PROJECT_NAME_REGEX.match(value):
message = (
"Project code \"{}\" contain not supported symbols"
'Project code "{}" contain not supported symbols'
).format(value)
is_valid = False
@ -264,9 +305,35 @@ class CreateProjectDialog(QtWidgets.QDialog):
project_name = self.project_name_input.text()
project_code = self.project_code_input.text()
library_project = self.library_project_input.isChecked()
create_project(project_name, project_code, library_project)
project_width = self.project_width_input.value()
project_height = self.project_height_input.value()
project_fps = self.project_fps_input.value()
project_aspect = self.project_aspect_input.value()
project_frame_start = self.project_frame_start_input.value()
project_frame_end = self.project_frame_end_input.value()
library_project = self.library_project_input.isChecked()
project_doc = create_project(
project_name,
project_code,
library_project,
)
update_data = {
"data.resolutionWidth": project_width,
"data.resolutionHeight": project_height,
"data.fps": project_fps,
"data.pixelAspect": project_aspect,
"data.frameStart": project_frame_start,
"data.frameEnd": project_frame_end,
}
session = OperationsSession()
session.update_entity(
project_name,
project_doc["type"],
project_doc["_id"],
update_data,
)
session.commit()
self.done(1)
def _get_existing_projects(self):
@ -288,45 +355,15 @@ class CreateProjectDialog(QtWidgets.QDialog):
return project_names, project_codes
# TODO PixmapLabel should be moved to 'utils' in other future PR so should be
# imported from there
class PixmapLabel(QtWidgets.QLabel):
"""Label resizing image to height of font."""
def __init__(self, pixmap, parent):
super(PixmapLabel, self).__init__(parent)
self._empty_pixmap = QtGui.QPixmap(0, 0)
self._source_pixmap = pixmap
def set_source_pixmap(self, pixmap):
"""Change source image."""
self._source_pixmap = pixmap
self._set_resized_pix()
class ProjectManagerPixmapLabel(PixmapLabel):
def _get_pix_size(self):
size = self.fontMetrics().height() * 4
return size, size
def _set_resized_pix(self):
if self._source_pixmap is None:
self.setPixmap(self._empty_pixmap)
return
width, height = self._get_pix_size()
self.setPixmap(
self._source_pixmap.scaled(
width,
height,
QtCore.Qt.KeepAspectRatio,
QtCore.Qt.SmoothTransformation
)
)
def resizeEvent(self, event):
self._set_resized_pix()
super(PixmapLabel, self).resizeEvent(event)
class ConfirmProjectDeletion(QtWidgets.QDialog):
"""Dialog which confirms deletion of a project."""
def __init__(self, project_name, parent):
super(ConfirmProjectDeletion, self).__init__(parent)
@ -335,23 +372,26 @@ class ConfirmProjectDeletion(QtWidgets.QDialog):
top_widget = QtWidgets.QWidget(self)
warning_pixmap = get_warning_pixmap()
warning_icon_label = PixmapLabel(warning_pixmap, top_widget)
warning_icon_label = ProjectManagerPixmapLabel(
warning_pixmap, top_widget
)
message_label = QtWidgets.QLabel(top_widget)
message_label.setWordWrap(True)
message_label.setTextInteractionFlags(QtCore.Qt.TextBrowserInteraction)
message_label.setText((
"<b>WARNING: This cannot be undone.</b><br/><br/>"
"Project <b>\"{}\"</b> with all related data will be"
" permanently removed from the database. (This action won't remove"
" any files on disk.)"
).format(project_name))
message_label.setText(
(
"<b>WARNING: This cannot be undone.</b><br/><br/>"
'Project <b>"{}"</b> with all related data will be'
" permanently removed from the database."
" (This action won't remove any files on disk.)"
).format(project_name)
)
top_layout = QtWidgets.QHBoxLayout(top_widget)
top_layout.setContentsMargins(0, 0, 0, 0)
top_layout.addWidget(
warning_icon_label, 0,
QtCore.Qt.AlignTop | QtCore.Qt.AlignHCenter
warning_icon_label, 0, QtCore.Qt.AlignTop | QtCore.Qt.AlignHCenter
)
top_layout.addWidget(message_label, 1)
@ -359,7 +399,7 @@ class ConfirmProjectDeletion(QtWidgets.QDialog):
confirm_input = PlaceholderLineEdit(self)
confirm_input.setPlaceholderText(
"Type \"{}\" to confirm...".format(project_name)
'Type "{}" to confirm...'.format(project_name)
)
cancel_btn = QtWidgets.QPushButton("Cancel", self)
@ -429,6 +469,7 @@ class ConfirmProjectDeletion(QtWidgets.QDialog):
class SpinBoxScrollFixed(QtWidgets.QSpinBox):
"""QSpinBox which only allow edits change with scroll wheel when active"""
def __init__(self, *args, **kwargs):
super(SpinBoxScrollFixed, self).__init__(*args, **kwargs)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
@ -442,6 +483,7 @@ class SpinBoxScrollFixed(QtWidgets.QSpinBox):
class DoubleSpinBoxScrollFixed(QtWidgets.QDoubleSpinBox):
"""QDoubleSpinBox which only allow edits with scroll wheel when active"""
def __init__(self, *args, **kwargs):
super(DoubleSpinBoxScrollFixed, self).__init__(*args, **kwargs)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
@ -451,3 +493,22 @@ class DoubleSpinBoxScrollFixed(QtWidgets.QDoubleSpinBox):
event.ignore()
else:
super(DoubleSpinBoxScrollFixed, self).wheelEvent(event)
class NumScrollWidget(SpinBoxScrollFixed):
def __init__(self, minimum, maximum):
super(NumScrollWidget, self).__init__()
self.setMaximum(maximum)
self.setMinimum(minimum)
self.setButtonSymbols(QtWidgets.QSpinBox.NoButtons)
class FloatScrollWidget(DoubleSpinBoxScrollFixed):
def __init__(self, minimum, maximum, decimals, step=None):
super(FloatScrollWidget, self).__init__()
self.setMaximum(maximum)
self.setMinimum(minimum)
self.setDecimals(decimals)
if step is not None:
self.setSingleStep(step)
self.setButtonSymbols(QtWidgets.QSpinBox.NoButtons)

View file

@ -453,7 +453,11 @@ class PublisherWindow(QtWidgets.QDialog):
return
save_match = event.matches(QtGui.QKeySequence.Save)
if save_match == QtGui.QKeySequence.ExactMatch:
# PySide2 and PySide6 support
if not isinstance(save_match, bool):
save_match = save_match == QtGui.QKeySequence.ExactMatch
if save_match:
if not self._controller.publish_has_started:
self._save_changes(True)
event.accept()

View file

@ -128,7 +128,8 @@ class FamilyWidget(QtWidgets.QWidget):
'family_preset_key': key,
'family': family,
'subset': self.input_result.text(),
'version': self.version_spinbox.value()
'version': self.version_spinbox.value(),
'use_next_available_version': self.version_checkbox.isChecked(),
}
return data

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring Pype version."""
__version__ = "3.15.11-nightly.2"
__version__ = "3.15.11-nightly.3"