mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge branch 'develop' into bugfix/OP-6150_Maya-arnoldExportAss-errors-out-during-extraction-in-Extract-Arnold-Scene-Source
This commit is contained in:
commit
6d00f70337
11 changed files with 636 additions and 208 deletions
|
|
@ -113,6 +113,12 @@ def pack_project(
|
|||
project_name
|
||||
))
|
||||
|
||||
if only_documents and not destination_dir:
|
||||
raise ValueError((
|
||||
"Destination directory must be defined"
|
||||
" when only documents should be packed."
|
||||
))
|
||||
|
||||
root_path = None
|
||||
source_root = {}
|
||||
project_source_path = None
|
||||
|
|
@ -141,6 +147,11 @@ def pack_project(
|
|||
if not destination_dir:
|
||||
destination_dir = root_path
|
||||
|
||||
if not destination_dir:
|
||||
raise ValueError(
|
||||
"Project {} does not have any roots.".format(project_name)
|
||||
)
|
||||
|
||||
destination_dir = os.path.normpath(destination_dir)
|
||||
if not os.path.exists(destination_dir):
|
||||
os.makedirs(destination_dir)
|
||||
|
|
|
|||
|
|
@ -59,7 +59,6 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
render_path).replace("\\", "/")
|
||||
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
instance.context.data['ftrackStatus'] = "Render"
|
||||
|
||||
# adding 2d render specific family for version identification in Loader
|
||||
instance.data["families"] = ["render2d"]
|
||||
|
|
|
|||
|
|
@ -109,8 +109,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
for status in asset_version_statuses
|
||||
}
|
||||
|
||||
self._set_task_status(instance, project_entity, task_entity, session)
|
||||
|
||||
# Prepare AssetTypes
|
||||
asset_types_by_short = self._ensure_asset_types_exists(
|
||||
session, component_list
|
||||
|
|
@ -180,45 +178,6 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
if asset_version not in instance.data[asset_versions_key]:
|
||||
instance.data[asset_versions_key].append(asset_version)
|
||||
|
||||
def _set_task_status(self, instance, project_entity, task_entity, session):
|
||||
if not project_entity:
|
||||
self.log.info("Task status won't be set, project is not known.")
|
||||
return
|
||||
|
||||
if not task_entity:
|
||||
self.log.info("Task status won't be set, task is not known.")
|
||||
return
|
||||
|
||||
status_name = instance.context.data.get("ftrackStatus")
|
||||
if not status_name:
|
||||
self.log.info("Ftrack status name is not set.")
|
||||
return
|
||||
|
||||
self.log.debug(
|
||||
"Ftrack status name will be (maybe) set to \"{}\"".format(
|
||||
status_name
|
||||
)
|
||||
)
|
||||
|
||||
project_schema = project_entity["project_schema"]
|
||||
task_statuses = project_schema.get_statuses(
|
||||
"Task", task_entity["type_id"]
|
||||
)
|
||||
task_statuses_by_low_name = {
|
||||
status["name"].lower(): status for status in task_statuses
|
||||
}
|
||||
status = task_statuses_by_low_name.get(status_name.lower())
|
||||
if not status:
|
||||
self.log.warning((
|
||||
"Task status \"{}\" won't be set,"
|
||||
" status is now allowed on task type \"{}\"."
|
||||
).format(status_name, task_entity["type"]["name"]))
|
||||
return
|
||||
|
||||
self.log.info("Setting task status to \"{}\"".format(status_name))
|
||||
task_entity["status"] = status
|
||||
session.commit()
|
||||
|
||||
def _fill_component_locations(self, session, component_list):
|
||||
components_by_location_name = collections.defaultdict(list)
|
||||
components_by_location_id = collections.defaultdict(list)
|
||||
|
|
|
|||
|
|
@ -1,150 +0,0 @@
|
|||
import pyblish.api
|
||||
from openpype.lib import filter_profiles
|
||||
|
||||
|
||||
class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin):
|
||||
"""Change task status when should be published on farm.
|
||||
|
||||
Instance which has set "farm" key in data to 'True' is considered as will
|
||||
be rendered on farm thus it's status should be changed.
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 0.48
|
||||
label = "Integrate Ftrack Farm Status"
|
||||
|
||||
farm_status_profiles = []
|
||||
|
||||
def process(self, context):
|
||||
# Quick end
|
||||
if not self.farm_status_profiles:
|
||||
project_name = context.data["projectName"]
|
||||
self.log.info((
|
||||
"Status profiles are not filled for project \"{}\". Skipping"
|
||||
).format(project_name))
|
||||
return
|
||||
|
||||
filtered_instances = self.filter_instances(context)
|
||||
instances_with_status_names = self.get_instances_with_statuse_names(
|
||||
context, filtered_instances
|
||||
)
|
||||
if instances_with_status_names:
|
||||
self.fill_statuses(context, instances_with_status_names)
|
||||
|
||||
def filter_instances(self, context):
|
||||
filtered_instances = []
|
||||
for instance in context:
|
||||
# Skip disabled instances
|
||||
if instance.data.get("publish") is False:
|
||||
continue
|
||||
subset_name = instance.data["subset"]
|
||||
msg_start = "Skipping instance {}.".format(subset_name)
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug(
|
||||
"{} Won't be rendered on farm.".format(msg_start)
|
||||
)
|
||||
continue
|
||||
|
||||
task_entity = instance.data.get("ftrackTask")
|
||||
if not task_entity:
|
||||
self.log.debug(
|
||||
"{} Does not have filled task".format(msg_start)
|
||||
)
|
||||
continue
|
||||
|
||||
filtered_instances.append(instance)
|
||||
return filtered_instances
|
||||
|
||||
def get_instances_with_statuse_names(self, context, instances):
|
||||
instances_with_status_names = []
|
||||
for instance in instances:
|
||||
family = instance.data["family"]
|
||||
subset_name = instance.data["subset"]
|
||||
task_entity = instance.data["ftrackTask"]
|
||||
host_name = context.data["hostName"]
|
||||
task_name = task_entity["name"]
|
||||
task_type = task_entity["type"]["name"]
|
||||
status_profile = filter_profiles(
|
||||
self.farm_status_profiles,
|
||||
{
|
||||
"hosts": host_name,
|
||||
"task_types": task_type,
|
||||
"task_names": task_name,
|
||||
"families": family,
|
||||
"subsets": subset_name,
|
||||
},
|
||||
logger=self.log
|
||||
)
|
||||
if not status_profile:
|
||||
# There already is log in 'filter_profiles'
|
||||
continue
|
||||
|
||||
status_name = status_profile["status_name"]
|
||||
if status_name:
|
||||
instances_with_status_names.append((instance, status_name))
|
||||
return instances_with_status_names
|
||||
|
||||
def fill_statuses(self, context, instances_with_status_names):
|
||||
# Prepare available task statuses on the project
|
||||
project_name = context.data["projectName"]
|
||||
session = context.data["ftrackSession"]
|
||||
project_entity = session.query((
|
||||
"select project_schema from Project where full_name is \"{}\""
|
||||
).format(project_name)).one()
|
||||
project_schema = project_entity["project_schema"]
|
||||
|
||||
task_type_ids = set()
|
||||
for item in instances_with_status_names:
|
||||
instance, _ = item
|
||||
task_entity = instance.data["ftrackTask"]
|
||||
task_type_ids.add(task_entity["type"]["id"])
|
||||
|
||||
task_statuses_by_type_id = {
|
||||
task_type_id: project_schema.get_statuses("Task", task_type_id)
|
||||
for task_type_id in task_type_ids
|
||||
}
|
||||
|
||||
# Keep track if anything has changed
|
||||
skipped_status_names = set()
|
||||
status_changed = False
|
||||
for item in instances_with_status_names:
|
||||
instance, status_name = item
|
||||
task_entity = instance.data["ftrackTask"]
|
||||
task_statuses = task_statuses_by_type_id[task_entity["type"]["id"]]
|
||||
status_name_low = status_name.lower()
|
||||
|
||||
status_id = None
|
||||
status_name = None
|
||||
# Skip if status name was already tried to be found
|
||||
for status in task_statuses:
|
||||
if status["name"].lower() == status_name_low:
|
||||
status_id = status["id"]
|
||||
status_name = status["name"]
|
||||
break
|
||||
|
||||
if status_id is None:
|
||||
if status_name_low not in skipped_status_names:
|
||||
skipped_status_names.add(status_name_low)
|
||||
joined_status_names = ", ".join({
|
||||
'"{}"'.format(status["name"])
|
||||
for status in task_statuses
|
||||
})
|
||||
self.log.warning((
|
||||
"Status \"{}\" is not available on project \"{}\"."
|
||||
" Available statuses are {}"
|
||||
).format(status_name, project_name, joined_status_names))
|
||||
continue
|
||||
|
||||
# Change task status id
|
||||
if status_id != task_entity["status_id"]:
|
||||
task_entity["status_id"] = status_id
|
||||
status_changed = True
|
||||
path = "/".join([
|
||||
item["name"]
|
||||
for item in task_entity["link"]
|
||||
])
|
||||
self.log.debug("Set status \"{}\" to \"{}\"".format(
|
||||
status_name, path
|
||||
))
|
||||
|
||||
if status_changed:
|
||||
session.commit()
|
||||
|
|
@ -0,0 +1,433 @@
|
|||
import copy
|
||||
|
||||
import pyblish.api
|
||||
from openpype.lib import filter_profiles
|
||||
|
||||
|
||||
def create_chunks(iterable, chunk_size=None):
|
||||
"""Separate iterable into multiple chunks by size.
|
||||
|
||||
Args:
|
||||
iterable(list|tuple|set): Object that will be separated into chunks.
|
||||
chunk_size(int): Size of one chunk. Default value is 200.
|
||||
|
||||
Returns:
|
||||
list<list>: Chunked items.
|
||||
"""
|
||||
chunks = []
|
||||
|
||||
tupled_iterable = tuple(iterable)
|
||||
if not tupled_iterable:
|
||||
return chunks
|
||||
iterable_size = len(tupled_iterable)
|
||||
if chunk_size is None:
|
||||
chunk_size = 200
|
||||
|
||||
if chunk_size < 1:
|
||||
chunk_size = 1
|
||||
|
||||
for idx in range(0, iterable_size, chunk_size):
|
||||
chunks.append(tupled_iterable[idx:idx + chunk_size])
|
||||
return chunks
|
||||
|
||||
|
||||
class CollectFtrackTaskStatuses(pyblish.api.ContextPlugin):
|
||||
"""Collect available task statuses on the project.
|
||||
|
||||
This is preparation for integration of task statuses.
|
||||
|
||||
Requirements:
|
||||
ftrackSession (ftrack_api.Session): Prepared ftrack session.
|
||||
|
||||
Provides:
|
||||
ftrackTaskStatuses (dict[str, list[Any]]): Dictionary of available
|
||||
task statuses on project by task type id.
|
||||
ftrackStatusByTaskId (dict[str, str]): Empty dictionary of task
|
||||
statuses by task id. Status on task can be set only once.
|
||||
Value should be a name of status.
|
||||
"""
|
||||
|
||||
# After 'CollectFtrackApi'
|
||||
order = pyblish.api.CollectorOrder + 0.4992
|
||||
label = "Collect Ftrack Task Statuses"
|
||||
settings_category = "ftrack"
|
||||
|
||||
def process(self, context):
|
||||
ftrack_session = context.data("ftrackSession")
|
||||
if ftrack_session is None:
|
||||
self.log.info("Ftrack session is not created.")
|
||||
return
|
||||
|
||||
# Prepare available task statuses on the project
|
||||
project_name = context.data["projectName"]
|
||||
project_entity = ftrack_session.query((
|
||||
"select project_schema from Project where full_name is \"{}\""
|
||||
).format(project_name)).one()
|
||||
project_schema = project_entity["project_schema"]
|
||||
|
||||
task_type_ids = {
|
||||
task_type["id"]
|
||||
for task_type in ftrack_session.query("select id from Type").all()
|
||||
}
|
||||
task_statuses_by_type_id = {
|
||||
task_type_id: project_schema.get_statuses("Task", task_type_id)
|
||||
for task_type_id in task_type_ids
|
||||
}
|
||||
context.data["ftrackTaskStatuses"] = task_statuses_by_type_id
|
||||
context.data["ftrackStatusByTaskId"] = {}
|
||||
self.log.info("Collected ftrack task statuses.")
|
||||
|
||||
|
||||
class IntegrateFtrackStatusBase(pyblish.api.InstancePlugin):
|
||||
"""Base plugin for status collection.
|
||||
|
||||
Requirements:
|
||||
projectName (str): Name of the project.
|
||||
hostName (str): Name of the host.
|
||||
ftrackSession (ftrack_api.Session): Prepared ftrack session.
|
||||
ftrackTaskStatuses (dict[str, list[Any]]): Dictionary of available
|
||||
task statuses on project by task type id.
|
||||
ftrackStatusByTaskId (dict[str, str]): Empty dictionary of task
|
||||
statuses by task id. Status on task can be set only once.
|
||||
Value should be a name of status.
|
||||
"""
|
||||
|
||||
active = False
|
||||
settings_key = None
|
||||
status_profiles = []
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
settings_key = cls.settings_key
|
||||
if settings_key is None:
|
||||
settings_key = cls.__name__
|
||||
|
||||
try:
|
||||
settings = project_settings["ftrack"]["publish"][settings_key]
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
for key, value in settings.items():
|
||||
setattr(cls, key, value)
|
||||
|
||||
def process(self, instance):
|
||||
context = instance.context
|
||||
# No profiles -> skip
|
||||
profiles = self.get_status_profiles()
|
||||
if not profiles:
|
||||
project_name = context.data["projectName"]
|
||||
self.log.info((
|
||||
"Status profiles are not filled for project \"{}\". Skipping"
|
||||
).format(project_name))
|
||||
return
|
||||
|
||||
# Task statuses were not collected -> skip
|
||||
task_statuses_by_type_id = context.data.get("ftrackTaskStatuses")
|
||||
if not task_statuses_by_type_id:
|
||||
self.log.info(
|
||||
"Ftrack task statuses are not collected. Skipping.")
|
||||
return
|
||||
|
||||
self.prepare_status_names(context, instance, profiles)
|
||||
|
||||
def get_status_profiles(self):
|
||||
"""List of profiles to determine status name.
|
||||
|
||||
Example profile item:
|
||||
{
|
||||
"host_names": ["nuke"],
|
||||
"task_types": ["Compositing"],
|
||||
"task_names": ["Comp"],
|
||||
"families": ["render"],
|
||||
"subset_names": ["renderComp"],
|
||||
"status_name": "Rendering",
|
||||
}
|
||||
|
||||
Returns:
|
||||
list[dict[str, Any]]: List of profiles.
|
||||
"""
|
||||
|
||||
return self.status_profiles
|
||||
|
||||
def prepare_status_names(self, context, instance, profiles):
|
||||
if not self.is_valid_instance(context, instance):
|
||||
return
|
||||
|
||||
filter_data = self.get_profile_filter_data(context, instance)
|
||||
status_profile = filter_profiles(
|
||||
profiles,
|
||||
filter_data,
|
||||
logger=self.log
|
||||
)
|
||||
if not status_profile:
|
||||
return
|
||||
|
||||
status_name = status_profile["status_name"]
|
||||
if status_name:
|
||||
self.fill_status(context, instance, status_name)
|
||||
|
||||
def get_profile_filter_data(self, context, instance):
|
||||
task_entity = instance.data["ftrackTask"]
|
||||
return {
|
||||
"host_names": context.data["hostName"],
|
||||
"task_types": task_entity["type"]["name"],
|
||||
"task_names": task_entity["name"],
|
||||
"families": instance.data["family"],
|
||||
"subset_names": instance.data["subset"],
|
||||
}
|
||||
|
||||
def is_valid_instance(self, context, instance):
|
||||
"""Filter instances that should be processed.
|
||||
|
||||
Ignore instances that are not enabled for publishing or don't have
|
||||
filled task. Also skip instances with tasks that already have defined
|
||||
status.
|
||||
|
||||
Plugin should do more filtering which is custom for plugin logic.
|
||||
|
||||
Args:
|
||||
context (pyblish.api.Context): Pyblish context.
|
||||
instance (pyblish.api.Instance): Instance to process.
|
||||
|
||||
Returns:
|
||||
list[pyblish.api.Instance]: List of instances that should be
|
||||
processed.
|
||||
"""
|
||||
|
||||
ftrack_status_by_task_id = context.data["ftrackStatusByTaskId"]
|
||||
# Skip disabled instances
|
||||
if instance.data.get("publish") is False:
|
||||
return False
|
||||
|
||||
task_entity = instance.data.get("ftrackTask")
|
||||
if not task_entity:
|
||||
self.log.debug(
|
||||
"Skipping instance Does not have filled task".format(
|
||||
instance.data["subset"]))
|
||||
return False
|
||||
|
||||
task_id = task_entity["id"]
|
||||
if task_id in ftrack_status_by_task_id:
|
||||
self.log.debug("Status for task {} was already defined".format(
|
||||
task_entity["name"]
|
||||
))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def fill_status(self, context, instance, status_name):
|
||||
"""Fill status for instance task.
|
||||
|
||||
If task already had set status, it will be skipped.
|
||||
|
||||
Args:
|
||||
context (pyblish.api.Context): Pyblish context.
|
||||
instance (pyblish.api.Instance): Pyblish instance.
|
||||
status_name (str): Name of status to set.
|
||||
"""
|
||||
|
||||
task_entity = instance.data["ftrackTask"]
|
||||
task_id = task_entity["id"]
|
||||
ftrack_status_by_task_id = context.data["ftrackStatusByTaskId"]
|
||||
if task_id in ftrack_status_by_task_id:
|
||||
self.log.debug("Status for task {} was already defined".format(
|
||||
task_entity["name"]
|
||||
))
|
||||
return
|
||||
|
||||
ftrack_status_by_task_id[task_id] = status_name
|
||||
self.log.info((
|
||||
"Task {} will be set to \"{}\" status."
|
||||
).format(task_entity["name"], status_name))
|
||||
|
||||
|
||||
class IntegrateFtrackFarmStatus(IntegrateFtrackStatusBase):
|
||||
"""Collect task status names for instances that are sent to farm.
|
||||
|
||||
Instance which has set "farm" key in data to 'True' is considered as will
|
||||
be rendered on farm thus it's status should be changed.
|
||||
|
||||
Requirements:
|
||||
projectName (str): Name of the project.
|
||||
hostName (str): Name of the host.
|
||||
ftrackSession (ftrack_api.Session): Prepared ftrack session.
|
||||
ftrackTaskStatuses (dict[str, list[Any]]): Dictionary of available
|
||||
task statuses on project by task type id.
|
||||
ftrackStatusByTaskId (dict[str, str]): Empty dictionary of task
|
||||
statuses by task id. Status on task can be set only once.
|
||||
Value should be a name of status.
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 0.48
|
||||
label = "Ftrack Task Status To Farm Status"
|
||||
active = True
|
||||
|
||||
farm_status_profiles = []
|
||||
status_profiles = None
|
||||
|
||||
def is_valid_instance(self, context, instance):
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("{} Won't be rendered on farm.".format(
|
||||
instance.data["subset"]
|
||||
))
|
||||
return False
|
||||
return super(IntegrateFtrackFarmStatus, self).is_valid_instance(
|
||||
context, instance)
|
||||
|
||||
def get_status_profiles(self):
|
||||
if self.status_profiles is None:
|
||||
profiles = copy.deepcopy(self.farm_status_profiles)
|
||||
for profile in profiles:
|
||||
profile["host_names"] = profile.pop("hosts")
|
||||
profile["subset_names"] = profile.pop("subsets")
|
||||
self.status_profiles = profiles
|
||||
return self.status_profiles
|
||||
|
||||
|
||||
class IntegrateFtrackLocalStatus(IntegrateFtrackStatusBase):
|
||||
"""Collect task status names for instances that are published locally.
|
||||
|
||||
Instance which has set "farm" key in data to 'True' is considered as will
|
||||
be rendered on farm thus it's status should be changed.
|
||||
|
||||
Requirements:
|
||||
projectName (str): Name of the project.
|
||||
hostName (str): Name of the host.
|
||||
ftrackSession (ftrack_api.Session): Prepared ftrack session.
|
||||
ftrackTaskStatuses (dict[str, list[Any]]): Dictionary of available
|
||||
task statuses on project by task type id.
|
||||
ftrackStatusByTaskId (dict[str, str]): Empty dictionary of task
|
||||
statuses by task id. Status on task can be set only once.
|
||||
Value should be a name of status.
|
||||
"""
|
||||
|
||||
order = IntegrateFtrackFarmStatus.order + 0.001
|
||||
label = "Ftrack Task Status Local Publish"
|
||||
active = True
|
||||
targets = ["local"]
|
||||
settings_key = "ftrack_task_status_local_publish"
|
||||
|
||||
def is_valid_instance(self, context, instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("{} Will be rendered on farm.".format(
|
||||
instance.data["subset"]
|
||||
))
|
||||
return False
|
||||
return super(IntegrateFtrackLocalStatus, self).is_valid_instance(
|
||||
context, instance)
|
||||
|
||||
|
||||
class IntegrateFtrackOnFarmStatus(IntegrateFtrackStatusBase):
|
||||
"""Collect task status names for instances that are published on farm.
|
||||
|
||||
Requirements:
|
||||
projectName (str): Name of the project.
|
||||
hostName (str): Name of the host.
|
||||
ftrackSession (ftrack_api.Session): Prepared ftrack session.
|
||||
ftrackTaskStatuses (dict[str, list[Any]]): Dictionary of available
|
||||
task statuses on project by task type id.
|
||||
ftrackStatusByTaskId (dict[str, str]): Empty dictionary of task
|
||||
statuses by task id. Status on task can be set only once.
|
||||
Value should be a name of status.
|
||||
"""
|
||||
|
||||
order = IntegrateFtrackLocalStatus.order + 0.001
|
||||
label = "Ftrack Task Status On Farm Status"
|
||||
active = True
|
||||
targets = ["farm"]
|
||||
settings_key = "ftrack_task_status_on_farm_publish"
|
||||
|
||||
|
||||
class IntegrateFtrackTaskStatus(pyblish.api.ContextPlugin):
|
||||
# Use order of Integrate Ftrack Api plugin and offset it before or after
|
||||
base_order = pyblish.api.IntegratorOrder + 0.499
|
||||
# By default is after Integrate Ftrack Api
|
||||
order = base_order + 0.0001
|
||||
label = "Integrate Ftrack Task Status"
|
||||
|
||||
@classmethod
|
||||
def apply_settings(cls, project_settings):
|
||||
"""Apply project settings to plugin.
|
||||
|
||||
Args:
|
||||
project_settings (dict[str, Any]): Project settings.
|
||||
"""
|
||||
|
||||
settings = (
|
||||
project_settings["ftrack"]["publish"]["IntegrateFtrackTaskStatus"]
|
||||
)
|
||||
diff = 0.001
|
||||
if not settings["after_version_statuses"]:
|
||||
diff = -diff
|
||||
cls.order = cls.base_order + diff
|
||||
|
||||
def process(self, context):
|
||||
task_statuses_by_type_id = context.data.get("ftrackTaskStatuses")
|
||||
if not task_statuses_by_type_id:
|
||||
self.log.info("Ftrack task statuses are not collected. Skipping.")
|
||||
return
|
||||
|
||||
status_by_task_id = self._get_status_by_task_id(context)
|
||||
if not status_by_task_id:
|
||||
self.log.info("No statuses to set. Skipping.")
|
||||
return
|
||||
|
||||
ftrack_session = context.data["ftrackSession"]
|
||||
|
||||
task_entities = self._get_task_entities(
|
||||
ftrack_session, status_by_task_id)
|
||||
|
||||
for task_entity in task_entities:
|
||||
task_path = "/".join([
|
||||
item["name"] for item in task_entity["link"]
|
||||
])
|
||||
task_id = task_entity["id"]
|
||||
type_id = task_entity["type_id"]
|
||||
new_status = None
|
||||
status_name = status_by_task_id[task_id]
|
||||
self.log.debug(
|
||||
"Status to set {} on task {}.".format(status_name, task_path))
|
||||
status_name_low = status_name.lower()
|
||||
available_statuses = task_statuses_by_type_id[type_id]
|
||||
for status in available_statuses:
|
||||
if status["name"].lower() == status_name_low:
|
||||
new_status = status
|
||||
break
|
||||
|
||||
if new_status is None:
|
||||
joined_statuses = ", ".join([
|
||||
"'{}'".format(status["name"])
|
||||
for status in available_statuses
|
||||
])
|
||||
self.log.debug((
|
||||
"Status '{}' was not found in available statuses: {}."
|
||||
).format(status_name, joined_statuses))
|
||||
continue
|
||||
|
||||
if task_entity["status_id"] != new_status["id"]:
|
||||
task_entity["status_id"] = new_status["id"]
|
||||
|
||||
self.log.debug("Changing status of task '{}' to '{}'".format(
|
||||
task_path, status_name
|
||||
))
|
||||
ftrack_session.commit()
|
||||
|
||||
def _get_status_by_task_id(self, context):
|
||||
status_by_task_id = context.data["ftrackStatusByTaskId"]
|
||||
return {
|
||||
task_id: status_name
|
||||
for task_id, status_name in status_by_task_id.items()
|
||||
if status_name
|
||||
}
|
||||
|
||||
def _get_task_entities(self, ftrack_session, status_by_task_id):
|
||||
task_entities = []
|
||||
for chunk_ids in create_chunks(status_by_task_id.keys()):
|
||||
joined_ids = ",".join(
|
||||
['"{}"'.format(task_id) for task_id in chunk_ids]
|
||||
)
|
||||
task_entities.extend(ftrack_session.query((
|
||||
"select id, type_id, status_id, link from Task"
|
||||
" where id in ({})"
|
||||
).format(joined_ids)).all())
|
||||
return task_entities
|
||||
|
|
@ -63,7 +63,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder - 0.04
|
||||
label = 'Integrate Hierarchy To Ftrack'
|
||||
label = "Integrate Hierarchy To Ftrack"
|
||||
families = ["shot"]
|
||||
hosts = [
|
||||
"hiero",
|
||||
|
|
@ -94,14 +94,13 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
"Project \"{}\" was not found on ftrack.".format(project_name)
|
||||
)
|
||||
|
||||
self.context = context
|
||||
self.session = session
|
||||
self.ft_project = project
|
||||
self.task_types = self.get_all_task_types(project)
|
||||
self.task_statuses = self.get_task_statuses(project)
|
||||
|
||||
# import ftrack hierarchy
|
||||
self.import_to_ftrack(project_name, hierarchy_context)
|
||||
self.import_to_ftrack(context, project_name, hierarchy_context)
|
||||
|
||||
def query_ftrack_entitites(self, session, ft_project):
|
||||
project_id = ft_project["id"]
|
||||
|
|
@ -227,7 +226,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
|
||||
return output
|
||||
|
||||
def import_to_ftrack(self, project_name, hierarchy_context):
|
||||
def import_to_ftrack(self, context, project_name, hierarchy_context):
|
||||
# Prequery hiearchical custom attributes
|
||||
hier_attrs = get_pype_attr(self.session)[1]
|
||||
hier_attr_by_key = {
|
||||
|
|
@ -258,7 +257,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
self.session, matching_entities, hier_attrs)
|
||||
|
||||
# Get ftrack api module (as they are different per python version)
|
||||
ftrack_api = self.context.data["ftrackPythonModule"]
|
||||
ftrack_api = context.data["ftrackPythonModule"]
|
||||
|
||||
# Use queue of hierarchy items to process
|
||||
import_queue = collections.deque()
|
||||
|
|
@ -292,7 +291,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
# CUSTOM ATTRIBUTES
|
||||
custom_attributes = entity_data.get('custom_attributes', {})
|
||||
instances = []
|
||||
for instance in self.context:
|
||||
for instance in context:
|
||||
instance_asset_name = instance.data.get("asset")
|
||||
if (
|
||||
instance_asset_name
|
||||
|
|
@ -369,6 +368,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
if task_name:
|
||||
instances_by_task_name[task_name.lower()].append(instance)
|
||||
|
||||
ftrack_status_by_task_id = context.data["ftrackStatusByTaskId"]
|
||||
tasks = entity_data.get('tasks', [])
|
||||
existing_tasks = []
|
||||
tasks_to_create = []
|
||||
|
|
@ -389,11 +389,11 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
|
||||
for task_name, task_type in tasks_to_create:
|
||||
task_entity = self.create_task(
|
||||
name=task_name,
|
||||
task_type=task_type,
|
||||
parent=entity
|
||||
task_name,
|
||||
task_type,
|
||||
entity,
|
||||
ftrack_status_by_task_id
|
||||
)
|
||||
|
||||
for instance in instances_by_task_name[task_name.lower()]:
|
||||
instance.data["ftrackTask"] = task_entity
|
||||
|
||||
|
|
@ -481,7 +481,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
for status in task_workflow_statuses
|
||||
}
|
||||
|
||||
def create_task(self, name, task_type, parent):
|
||||
def create_task(self, name, task_type, parent, ftrack_status_by_task_id):
|
||||
filter_data = {
|
||||
"task_names": name,
|
||||
"task_types": task_type
|
||||
|
|
@ -491,12 +491,14 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
filter_data
|
||||
)
|
||||
status_id = None
|
||||
status_name = None
|
||||
if profile:
|
||||
status_name = profile["status_name"]
|
||||
status_name_low = status_name.lower()
|
||||
for _status_id, status in self.task_statuses.items():
|
||||
if status["name"].lower() == status_name_low:
|
||||
status_id = _status_id
|
||||
status_name = status["name"]
|
||||
break
|
||||
|
||||
if status_id is None:
|
||||
|
|
@ -523,6 +525,8 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
if status_id is not None:
|
||||
ftrack_status_by_task_id[task["id"]] = None
|
||||
return task
|
||||
|
||||
def _get_active_assets(self, context):
|
||||
|
|
|
|||
|
|
@ -356,6 +356,13 @@ class PypeCommands:
|
|||
def pack_project(self, project_name, dirpath, database_only):
|
||||
from openpype.lib.project_backpack import pack_project
|
||||
|
||||
if database_only and not dirpath:
|
||||
raise ValueError((
|
||||
"Destination dir must be defined when using --dbonly."
|
||||
" Use '--dirpath {output dir path}' flag"
|
||||
" to specify directory."
|
||||
))
|
||||
|
||||
pack_project(project_name, dirpath, database_only)
|
||||
|
||||
def unpack_project(self, zip_filepath, new_root, database_only):
|
||||
|
|
|
|||
|
|
@ -493,7 +493,29 @@
|
|||
"upload_reviewable_with_origin_name": false
|
||||
},
|
||||
"IntegrateFtrackFarmStatus": {
|
||||
"farm_status_profiles": []
|
||||
"farm_status_profiles": [
|
||||
{
|
||||
"hosts": [
|
||||
"celaction"
|
||||
],
|
||||
"task_types": [],
|
||||
"task_names": [],
|
||||
"families": [
|
||||
"render"
|
||||
],
|
||||
"subsets": [],
|
||||
"status_name": "Render"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ftrack_task_status_local_publish": {
|
||||
"status_profiles": []
|
||||
},
|
||||
"ftrack_task_status_on_farm_publish": {
|
||||
"status_profiles": []
|
||||
},
|
||||
"IntegrateFtrackTaskStatus": {
|
||||
"after_version_statuses": true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1058,7 +1058,7 @@
|
|||
{
|
||||
"type": "dict",
|
||||
"key": "IntegrateFtrackFarmStatus",
|
||||
"label": "Integrate Ftrack Farm Status",
|
||||
"label": "Ftrack Status To Farm",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
|
|
@ -1068,7 +1068,7 @@
|
|||
"type": "list",
|
||||
"collapsible": true,
|
||||
"key": "farm_status_profiles",
|
||||
"label": "Farm status profiles",
|
||||
"label": "Profiles",
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
|
|
@ -1114,6 +1114,142 @@
|
|||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "ftrack_task_status_local_publish",
|
||||
"label": "Ftrack Status Local Integration",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Change status of task when is integrated locally"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"collapsible": true,
|
||||
"key": "status_profiles",
|
||||
"label": "Profiles",
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"key": "host_names",
|
||||
"label": "Host names",
|
||||
"type": "hosts-enum",
|
||||
"multiselection": true
|
||||
},
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "task_names",
|
||||
"label": "Task names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"key": "families",
|
||||
"label": "Families",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"key": "subset_names",
|
||||
"label": "Subset names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"key": "status_name",
|
||||
"label": "Status name",
|
||||
"type": "text"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "ftrack_task_status_on_farm_publish",
|
||||
"label": "Ftrack Status On Farm",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Change status of task when it's subset is integrated on farm"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"collapsible": true,
|
||||
"key": "status_profiles",
|
||||
"label": "Profiles",
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"key": "host_names",
|
||||
"label": "Host names",
|
||||
"type": "hosts-enum",
|
||||
"multiselection": true
|
||||
},
|
||||
{
|
||||
"key": "task_types",
|
||||
"label": "Task types",
|
||||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "task_names",
|
||||
"label": "Task names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"key": "families",
|
||||
"label": "Families",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"key": "subset_names",
|
||||
"label": "Subset names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
},
|
||||
{
|
||||
"type": "separator"
|
||||
},
|
||||
{
|
||||
"key": "status_name",
|
||||
"label": "Status name",
|
||||
"type": "text"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "IntegrateFtrackTaskStatus",
|
||||
"label": "Integrate Ftrack Task Status",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Apply collected task statuses. This plugin can run before or after version integration. Some status automations may conflict with status changes on versions because of wrong order."
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "after_version_statuses",
|
||||
"label": "After version integration"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -53,6 +53,9 @@ class CreatorsModel(QtGui.QStandardItemModel):
|
|||
index = self.index(row, 0)
|
||||
item_id = index.data(ITEM_ID_ROLE)
|
||||
creator_plugin = self._creators_by_id.get(item_id)
|
||||
if creator_plugin and creator_plugin.family == family:
|
||||
if creator_plugin and (
|
||||
creator_plugin.label.lower() == family.lower()
|
||||
or creator_plugin.family.lower() == family.lower()
|
||||
):
|
||||
indexes.append(index)
|
||||
return indexes
|
||||
|
|
|
|||
|
|
@ -453,7 +453,11 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
return
|
||||
|
||||
save_match = event.matches(QtGui.QKeySequence.Save)
|
||||
if save_match == QtGui.QKeySequence.ExactMatch:
|
||||
# PySide2 and PySide6 support
|
||||
if not isinstance(save_match, bool):
|
||||
save_match = save_match == QtGui.QKeySequence.ExactMatch
|
||||
|
||||
if save_match:
|
||||
if not self._controller.publish_has_started:
|
||||
self._save_changes(True)
|
||||
event.accept()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue