mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
use product type and name in rest of pipeline functions
This commit is contained in:
parent
88786d8160
commit
993b7619a7
8 changed files with 250 additions and 232 deletions
|
|
@ -58,16 +58,16 @@ def remap_source(path, anatomy):
|
|||
return source
|
||||
|
||||
|
||||
def extend_frames(asset, subset, start, end):
|
||||
def extend_frames(folder_path, product_name, start, end):
|
||||
"""Get latest version of asset nad update frame range.
|
||||
|
||||
Based on minimum and maximum values.
|
||||
|
||||
Arguments:
|
||||
asset (str): asset name
|
||||
subset (str): subset name
|
||||
start (int): start frame
|
||||
end (int): end frame
|
||||
folder_path (str): Folder path.
|
||||
product_name (str): Product name.
|
||||
start (int): Start frame.
|
||||
end (int): End frame.
|
||||
|
||||
Returns:
|
||||
(int, int): update frame start/end
|
||||
|
|
@ -80,8 +80,8 @@ def extend_frames(asset, subset, start, end):
|
|||
project_name = get_current_project_name()
|
||||
version = get_last_version_by_subset_name(
|
||||
project_name,
|
||||
subset,
|
||||
asset_name=asset
|
||||
product_name,
|
||||
asset_name=folder_path
|
||||
)
|
||||
|
||||
# Set prev start / end frames for comparison
|
||||
|
|
@ -198,7 +198,7 @@ def create_skeleton_instance(
|
|||
if data.get("extendFrames", False):
|
||||
time_data.start, time_data.end = extend_frames(
|
||||
data["folderPath"],
|
||||
data["subset"],
|
||||
data["productName"],
|
||||
time_data.start,
|
||||
time_data.end,
|
||||
)
|
||||
|
|
@ -215,18 +215,18 @@ def create_skeleton_instance(
|
|||
log.warning(("Could not find root path for remapping \"{}\". "
|
||||
"This may cause issues.").format(source))
|
||||
|
||||
family = ("render"
|
||||
product_type = ("render"
|
||||
if "prerender.farm" not in instance.data["families"]
|
||||
else "prerender")
|
||||
families = [family]
|
||||
families = [product_type]
|
||||
|
||||
# pass review to families if marked as review
|
||||
if data.get("review"):
|
||||
families.append("review")
|
||||
|
||||
instance_skeleton_data = {
|
||||
"family": family,
|
||||
"subset": data["subset"],
|
||||
"productType": product_type,
|
||||
"productName": data["productName"],
|
||||
"families": families,
|
||||
"folderPath": data["folderPath"],
|
||||
"frameStart": time_data.start,
|
||||
|
|
@ -472,8 +472,8 @@ def create_instances_for_aov(instance, skeleton, aov_filter,
|
|||
expected files.
|
||||
|
||||
"""
|
||||
# we cannot attach AOVs to other subsets as we consider every
|
||||
# AOV subset of its own.
|
||||
# we cannot attach AOVs to other products as we consider every
|
||||
# AOV product of its own.
|
||||
|
||||
log = Logger.get_logger("farm_publishing")
|
||||
additional_color_data = {
|
||||
|
|
@ -493,7 +493,7 @@ def create_instances_for_aov(instance, skeleton, aov_filter,
|
|||
log.warning(e)
|
||||
additional_color_data["colorspaceTemplate"] = colorspace_template
|
||||
|
||||
# if there are subset to attach to and more than one AOV,
|
||||
# if there are product to attach to and more than one AOV,
|
||||
# we cannot proceed.
|
||||
if (
|
||||
len(instance.data.get("attachTo", [])) > 0
|
||||
|
|
@ -501,7 +501,7 @@ def create_instances_for_aov(instance, skeleton, aov_filter,
|
|||
):
|
||||
raise KnownPublishError(
|
||||
"attaching multiple AOVs or renderable cameras to "
|
||||
"subset is not supported yet.")
|
||||
"product is not supported yet.")
|
||||
|
||||
# create instances for every AOV we found in expected files.
|
||||
# NOTE: this is done for every AOV and every render camera (if
|
||||
|
|
@ -544,7 +544,7 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data,
|
|||
task = os.environ["AYON_TASK_NAME"]
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
subset = skeleton["subset"]
|
||||
s_product_name = skeleton["productName"]
|
||||
cameras = instance.data.get("cameras", [])
|
||||
exp_files = instance.data["expectedFiles"]
|
||||
log = Logger.get_logger("farm_publishing")
|
||||
|
|
@ -570,34 +570,33 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data,
|
|||
ext = cols[0].tail.lstrip(".")
|
||||
col = list(cols[0])
|
||||
|
||||
# create subset name `familyTaskSubset_AOV`
|
||||
# create product name `<product type><Task><Product name>`
|
||||
# TODO refactor/remove me
|
||||
family = skeleton["family"]
|
||||
if not subset.startswith(family):
|
||||
product_type = skeleton["productType"]
|
||||
if not s_product_name.startswith(product_type):
|
||||
group_name = '{}{}{}{}{}'.format(
|
||||
family,
|
||||
product_type,
|
||||
task[0].upper(), task[1:],
|
||||
subset[0].upper(), subset[1:])
|
||||
s_product_name[0].upper(), s_product_name[1:])
|
||||
else:
|
||||
group_name = subset
|
||||
group_name = s_product_name
|
||||
|
||||
# if there are multiple cameras, we need to add camera name
|
||||
expected_filepath = col[0] if isinstance(col, (list, tuple)) else col
|
||||
cams = [cam for cam in cameras if cam in expected_filepath]
|
||||
if cams:
|
||||
for cam in cams:
|
||||
if aov:
|
||||
if not aov.startswith(cam):
|
||||
subset_name = '{}_{}_{}'.format(group_name, cam, aov)
|
||||
else:
|
||||
subset_name = "{}_{}".format(group_name, aov)
|
||||
if not aov:
|
||||
product_name = '{}_{}'.format(group_name, cam)
|
||||
elif not aov.startswith(cam):
|
||||
product_name = '{}_{}_{}'.format(group_name, cam, aov)
|
||||
else:
|
||||
subset_name = '{}_{}'.format(group_name, cam)
|
||||
product_name = "{}_{}".format(group_name, aov)
|
||||
else:
|
||||
if aov:
|
||||
subset_name = '{}_{}'.format(group_name, aov)
|
||||
product_name = '{}_{}'.format(group_name, aov)
|
||||
else:
|
||||
subset_name = '{}'.format(group_name)
|
||||
product_name = '{}'.format(group_name)
|
||||
|
||||
if isinstance(col, (list, tuple)):
|
||||
staging = os.path.dirname(col[0])
|
||||
|
|
@ -609,7 +608,7 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data,
|
|||
except ValueError as e:
|
||||
log.warning(e)
|
||||
|
||||
log.info("Creating data for: {}".format(subset_name))
|
||||
log.info("Creating data for: {}".format(product_name))
|
||||
|
||||
app = os.environ.get("AYON_HOST_NAME", "")
|
||||
|
||||
|
|
@ -626,7 +625,7 @@ def _create_instances_for_aov(instance, skeleton, aov_filter, additional_data,
|
|||
preview = True
|
||||
|
||||
new_instance = deepcopy(skeleton)
|
||||
new_instance["subset"] = subset_name
|
||||
new_instance["productName"] = product_name
|
||||
new_instance["subsetGroup"] = group_name
|
||||
|
||||
# explicitly disable review by user
|
||||
|
|
@ -778,7 +777,7 @@ def create_skeleton_instance_cache(instance):
|
|||
if data.get("extendFrames", False):
|
||||
time_data.start, time_data.end = extend_frames(
|
||||
data["folderPath"],
|
||||
data["subset"],
|
||||
data["productName"],
|
||||
time_data.start,
|
||||
time_data.end,
|
||||
)
|
||||
|
|
@ -795,15 +794,15 @@ def create_skeleton_instance_cache(instance):
|
|||
log.warning(("Could not find root path for remapping \"{}\". "
|
||||
"This may cause issues.").format(source))
|
||||
|
||||
family = instance.data["family"]
|
||||
product_type = instance.data["productType"]
|
||||
# Make sure "render" is in the families to go through
|
||||
# validating expected and rendered files
|
||||
# during publishing job.
|
||||
families = ["render", family]
|
||||
families = ["render", product_type]
|
||||
|
||||
instance_skeleton_data = {
|
||||
"family": family,
|
||||
"subset": data["subset"],
|
||||
"productType": product_type,
|
||||
"productName": data["productName"],
|
||||
"families": families,
|
||||
"folderPath": data["folderPath"],
|
||||
"frameStart": time_data.start,
|
||||
|
|
@ -910,8 +909,8 @@ def create_instances_for_cache(instance, skeleton):
|
|||
|
||||
"""
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
subset = skeleton["subset"]
|
||||
family = skeleton["family"]
|
||||
product_name = skeleton["productName"]
|
||||
product_type = skeleton["productType"]
|
||||
exp_files = instance.data["expectedFiles"]
|
||||
log = Logger.get_logger("farm_publishing")
|
||||
|
||||
|
|
@ -948,9 +947,9 @@ def create_instances_for_cache(instance, skeleton):
|
|||
|
||||
new_instance = deepcopy(skeleton)
|
||||
|
||||
new_instance["subset"] = subset
|
||||
log.info("Creating data for: {}".format(subset))
|
||||
new_instance["family"] = family
|
||||
new_instance["productName"] = product_name
|
||||
log.info("Creating data for: {}".format(product_name))
|
||||
new_instance["productType"] = product_type
|
||||
new_instance["families"] = skeleton["families"]
|
||||
# create representation
|
||||
if isinstance(col, (list, tuple)):
|
||||
|
|
@ -984,7 +983,7 @@ def create_instances_for_cache(instance, skeleton):
|
|||
def copy_extend_frames(instance, representation):
|
||||
"""Copy existing frames from latest version.
|
||||
|
||||
This will copy all existing frames from subset's latest version back
|
||||
This will copy all existing frames from product's latest version back
|
||||
to render directory and rename them to what renderer is expecting.
|
||||
|
||||
Arguments:
|
||||
|
|
@ -1005,20 +1004,20 @@ def copy_extend_frames(instance, representation):
|
|||
project_name = instance.context.data["project"]
|
||||
anatomy = instance.context.data["anatomy"] # type: Anatomy
|
||||
|
||||
# get latest version of subset
|
||||
# this will stop if subset wasn't published yet
|
||||
# get latest version of product
|
||||
# this will stop if product wasn't published yet
|
||||
|
||||
version = get_last_version_by_subset_name(
|
||||
project_name,
|
||||
instance.data.get("subset"),
|
||||
instance.data.get("productName"),
|
||||
asset_name=instance.data.get("folderPath")
|
||||
)
|
||||
|
||||
# get its files based on extension
|
||||
subset_resources = get_resources(
|
||||
product_resources = get_resources(
|
||||
project_name, version, representation.get("ext")
|
||||
)
|
||||
r_col, _ = clique.assemble(subset_resources)
|
||||
r_col, _ = clique.assemble(product_resources)
|
||||
|
||||
# if override remove all frames we are expecting to be rendered,
|
||||
# so we'll copy only those missing from current render
|
||||
|
|
@ -1064,11 +1063,11 @@ def copy_extend_frames(instance, representation):
|
|||
log.info("Finished copying %i files" % len(resource_files))
|
||||
|
||||
|
||||
def attach_instances_to_subset(attach_to, instances):
|
||||
"""Attach instance to subset.
|
||||
def attach_instances_to_product(attach_to, instances):
|
||||
"""Attach instance to product.
|
||||
|
||||
If we are attaching to other subsets, create copy of existing
|
||||
instances, change data to match its subset and replace
|
||||
If we are attaching to other products, create copy of existing
|
||||
instances, change data to match its product and replace
|
||||
existing instances with modified data.
|
||||
|
||||
Args:
|
||||
|
|
@ -1084,8 +1083,8 @@ def attach_instances_to_subset(attach_to, instances):
|
|||
for i in instances:
|
||||
new_inst = copy.deepcopy(i)
|
||||
new_inst["version"] = attach_instance.get("version")
|
||||
new_inst["subset"] = attach_instance.get("subset")
|
||||
new_inst["family"] = attach_instance.get("family")
|
||||
new_inst["productName"] = attach_instance.get("productName")
|
||||
new_inst["productType"] = attach_instance.get("productType")
|
||||
new_inst["append"] = True
|
||||
# don't set subsetGroup if we are attaching
|
||||
new_inst.pop("subsetGroup")
|
||||
|
|
@ -1108,7 +1107,7 @@ def create_metadata_path(instance, anatomy):
|
|||
# directory is not available
|
||||
log.warning("Path is unreachable: `{}`".format(output_dir))
|
||||
|
||||
metadata_filename = "{}_metadata.json".format(ins_data["subset"])
|
||||
metadata_filename = "{}_metadata.json".format(ins_data["productName"])
|
||||
|
||||
metadata_path = os.path.join(output_dir, metadata_filename)
|
||||
|
||||
|
|
|
|||
|
|
@ -16,9 +16,9 @@ class TimeData:
|
|||
...
|
||||
|
||||
def remap_source(source: str, anatomy: Anatomy): ...
|
||||
def extend_frames(asset: str, subset: str, start: int, end: int) -> Tuple[int, int]: ...
|
||||
def extend_frames(folder_path: str, product_name: str, start: int, end: int) -> Tuple[int, int]: ...
|
||||
def get_time_data_from_instance_or_context(instance: pyblish.api.Instance) -> TimeData: ...
|
||||
def get_transferable_representations(instance: pyblish.api.Instance) -> list: ...
|
||||
def create_skeleton_instance(instance: pyblish.api.Instance, families_transfer: list = ..., instance_transfer: dict = ...) -> dict: ...
|
||||
def create_instances_for_aov(instance: pyblish.api.Instance, skeleton: dict, aov_filter: dict) -> List[pyblish.api.Instance]: ...
|
||||
def attach_instances_to_subset(attach_to: list, instances: list) -> list: ...
|
||||
def attach_instances_to_product(attach_to: list, instances: list) -> list: ...
|
||||
|
|
|
|||
|
|
@ -5,8 +5,9 @@ def get_published_workfile_instance(context):
|
|||
"""Find workfile instance in context"""
|
||||
for i in context:
|
||||
is_workfile = (
|
||||
"workfile" in i.data.get("families", []) or
|
||||
i.data["family"] == "workfile"
|
||||
i.data["productType"] == "workfile"
|
||||
or "workfile" in i.data.get("families", [])
|
||||
|
||||
)
|
||||
if not is_workfile:
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -28,10 +28,11 @@ class RenderInstance(object):
|
|||
time = attr.ib() # time of instance creation (get_formatted_current_time)
|
||||
source = attr.ib() # path to source scene file
|
||||
label = attr.ib() # label to show in GUI
|
||||
subset = attr.ib() # subset name
|
||||
task = attr.ib() # task name
|
||||
productType = attr.ib() # product type
|
||||
productName = attr.ib() # product name
|
||||
folderPath = attr.ib() # folder path
|
||||
attachTo = attr.ib() # subset name to attach render to
|
||||
task = attr.ib() # task name
|
||||
attachTo = attr.ib() # product name to attach render to
|
||||
setMembers = attr.ib() # list of nodes/members producing render output
|
||||
publish = attr.ib() # bool, True to publish instance
|
||||
name = attr.ib() # instance name
|
||||
|
|
@ -60,7 +61,7 @@ class RenderInstance(object):
|
|||
review = attr.ib(default=None) # False - explicitly skip review
|
||||
priority = attr.ib(default=50) # job priority on farm
|
||||
|
||||
family = attr.ib(default="renderlayer")
|
||||
# family = attr.ib(default="renderlayer")
|
||||
families = attr.ib(default=["renderlayer"]) # list of families
|
||||
# True if should be rendered on farm, eg not integrate
|
||||
farm = attr.ib(default=False)
|
||||
|
|
@ -153,13 +154,13 @@ class AbstractCollectRender(pyblish.api.ContextPlugin):
|
|||
exp_files = self.get_expected_files(render_instance)
|
||||
assert exp_files, "no file names were generated, this is bug"
|
||||
|
||||
# if we want to attach render to subset, check if we have AOV's
|
||||
# if we want to attach render to product, check if we have AOV's
|
||||
# in expectedFiles. If so, raise error as we cannot attach AOV
|
||||
# (considered to be subset on its own) to another subset
|
||||
# (considered to be product on its own) to another product
|
||||
if render_instance.attachTo:
|
||||
assert isinstance(exp_files, list), (
|
||||
"attaching multiple AOVs or renderable cameras to "
|
||||
"subset is not supported"
|
||||
"product is not supported"
|
||||
)
|
||||
|
||||
frame_start_render = int(render_instance.frameStart)
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ def get_hero_template_name_profiles(
|
|||
def get_publish_template_name(
|
||||
project_name,
|
||||
host_name,
|
||||
family,
|
||||
product_type,
|
||||
task_name,
|
||||
task_type,
|
||||
project_settings=None,
|
||||
|
|
@ -123,7 +123,7 @@ def get_publish_template_name(
|
|||
Args:
|
||||
project_name (str): Name of project where to look for settings.
|
||||
host_name (str): Name of host integration.
|
||||
family (str): Family for which should be found template.
|
||||
product_type (str): Product type for which should be found template.
|
||||
task_name (str): Task name on which is instance working.
|
||||
task_type (str): Task type on which is instance working.
|
||||
project_settings (Dict[str, Any]): Prepared project settings.
|
||||
|
|
@ -138,7 +138,7 @@ def get_publish_template_name(
|
|||
template = None
|
||||
filter_criteria = {
|
||||
"hosts": host_name,
|
||||
"product_types": family,
|
||||
"product_types": product_type,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
}
|
||||
|
|
@ -701,19 +701,26 @@ def get_publish_repre_path(instance, repre, only_published=False):
|
|||
return None
|
||||
|
||||
|
||||
def get_custom_staging_dir_info(project_name, host_name, family, task_name,
|
||||
task_type, subset_name,
|
||||
project_settings=None,
|
||||
anatomy=None, log=None):
|
||||
def get_custom_staging_dir_info(
|
||||
project_name,
|
||||
host_name,
|
||||
product_type,
|
||||
task_name,
|
||||
task_type,
|
||||
product_name,
|
||||
project_settings=None,
|
||||
anatomy=None,
|
||||
log=None
|
||||
):
|
||||
"""Checks profiles if context should use special custom dir as staging.
|
||||
|
||||
Args:
|
||||
project_name (str)
|
||||
host_name (str)
|
||||
family (str)
|
||||
product_type (str)
|
||||
task_name (str)
|
||||
task_type (str)
|
||||
subset_name (str)
|
||||
product_name (str)
|
||||
project_settings(Dict[str, Any]): Prepared project settings.
|
||||
anatomy (Dict[str, Any])
|
||||
log (Logger) (optional)
|
||||
|
|
@ -736,10 +743,10 @@ def get_custom_staging_dir_info(project_name, host_name, family, task_name,
|
|||
|
||||
filtering_criteria = {
|
||||
"hosts": host_name,
|
||||
"families": family,
|
||||
"families": product_type,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
"subsets": subset_name
|
||||
"subsets": product_name
|
||||
}
|
||||
profile = filter_profiles(custom_staging_dir_profiles,
|
||||
filtering_criteria,
|
||||
|
|
@ -780,18 +787,18 @@ def _validate_transient_template(project_name, template_name, anatomy):
|
|||
def get_published_workfile_instance(context):
|
||||
"""Find workfile instance in context"""
|
||||
for i in context:
|
||||
is_workfile = (
|
||||
"workfile" in i.data.get("families", []) or
|
||||
i.data["family"] == "workfile"
|
||||
)
|
||||
if not is_workfile:
|
||||
continue
|
||||
|
||||
# test if there is instance of workfile waiting
|
||||
# to be published.
|
||||
if not i.data.get("publish", True):
|
||||
continue
|
||||
|
||||
if not (
|
||||
i.data["productType"] == "workfile"
|
||||
# QUESTION Is check in 'families' valid?
|
||||
or "workfile" in i.data.get("families", [])
|
||||
):
|
||||
continue
|
||||
|
||||
return i
|
||||
|
||||
|
||||
|
|
@ -917,7 +924,7 @@ def get_publish_instance_label(instance):
|
|||
is used string conversion of instance object -> 'instance._name'.
|
||||
|
||||
Todos:
|
||||
Maybe 'subset' key could be used too.
|
||||
Maybe 'productName' key could be used too.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): Pyblish instance.
|
||||
|
|
@ -936,8 +943,8 @@ def get_publish_instance_label(instance):
|
|||
def get_publish_instance_families(instance):
|
||||
"""Get all families of the instance.
|
||||
|
||||
Look for families under 'family' and 'families' keys in instance data.
|
||||
Value of 'family' is used as first family and then all other families
|
||||
Look for families under 'productType' and 'families' keys in instance data.
|
||||
Value of 'productType' is used as first family and then all other families
|
||||
in random order.
|
||||
|
||||
Args:
|
||||
|
|
@ -947,11 +954,11 @@ def get_publish_instance_families(instance):
|
|||
list[str]: List of families.
|
||||
"""
|
||||
|
||||
family = instance.data.get("family")
|
||||
product_type = instance.data.get("productType")
|
||||
families = set(instance.data.get("families") or [])
|
||||
output = []
|
||||
if family:
|
||||
output.append(family)
|
||||
families.discard(family)
|
||||
if product_type:
|
||||
output.append(product_type)
|
||||
families.discard(product_type)
|
||||
output.extend(families)
|
||||
return output
|
||||
|
|
|
|||
|
|
@ -7,8 +7,8 @@ def get_versioning_start(
|
|||
host_name,
|
||||
task_name=None,
|
||||
task_type=None,
|
||||
family=None,
|
||||
subset=None,
|
||||
product_type=None,
|
||||
product_name=None,
|
||||
project_settings=None,
|
||||
):
|
||||
"""Get anatomy versioning start"""
|
||||
|
|
@ -22,12 +22,14 @@ def get_versioning_start(
|
|||
if not profiles:
|
||||
return version_start
|
||||
|
||||
# TODO use 'product_types' and 'product_name' instead of
|
||||
# 'families' and 'subsets'
|
||||
filtering_criteria = {
|
||||
"host_names": host_name,
|
||||
"families": family,
|
||||
"families": product_type,
|
||||
"task_names": task_name,
|
||||
"task_types": task_type,
|
||||
"subsets": subset
|
||||
"subsets": product_name
|
||||
}
|
||||
profile = filter_profiles(profiles, filtering_criteria)
|
||||
|
||||
|
|
|
|||
|
|
@ -48,18 +48,18 @@ class BuildWorkfile:
|
|||
return self._log
|
||||
|
||||
@staticmethod
|
||||
def map_subsets_by_family(subsets):
|
||||
subsets_by_family = collections.defaultdict(list)
|
||||
for subset in subsets:
|
||||
family = subset["data"].get("family")
|
||||
if not family:
|
||||
families = subset["data"].get("families")
|
||||
def map_products_by_type(subset_docs):
|
||||
products_by_type = collections.defaultdict(list)
|
||||
for subset_doc in subset_docs:
|
||||
product_type = subset_doc["data"].get("family")
|
||||
if not product_type:
|
||||
families = subset_doc["data"].get("families")
|
||||
if not families:
|
||||
continue
|
||||
family = families[0]
|
||||
product_type = families[0]
|
||||
|
||||
subsets_by_family[family].append(subset)
|
||||
return subsets_by_family
|
||||
products_by_type[product_type].append(subset_doc)
|
||||
return products_by_type
|
||||
|
||||
def process(self):
|
||||
"""Main method of this wrapper.
|
||||
|
|
@ -80,17 +80,17 @@ class BuildWorkfile:
|
|||
stored in Workfile profiles from presets. Profiles are set by host,
|
||||
filtered by current task name and used by families.
|
||||
|
||||
Each family can specify representation names and loaders for
|
||||
Each product type can specify representation names and loaders for
|
||||
representations and first available and successful loaded
|
||||
representation is returned as container.
|
||||
|
||||
At the end you'll get list of loaded containers per each asset.
|
||||
|
||||
loaded_containers [{
|
||||
"asset_entity": <AssetEntity1>,
|
||||
"asset_doc": <AssetEntity1>,
|
||||
"containers": [<Container1>, <Container2>, ...]
|
||||
}, {
|
||||
"asset_entity": <AssetEntity2>,
|
||||
"asset_doc": <AssetEntity2>,
|
||||
"containers": [<Container3>, ...]
|
||||
}, {
|
||||
...
|
||||
|
|
@ -110,14 +110,14 @@ class BuildWorkfile:
|
|||
|
||||
# Get current asset name and entity
|
||||
project_name = get_current_project_name()
|
||||
current_asset_name = get_current_asset_name()
|
||||
current_asset_entity = get_asset_by_name(
|
||||
project_name, current_asset_name
|
||||
current_folder_path = get_current_asset_name()
|
||||
current_asset_doc = get_asset_by_name(
|
||||
project_name, current_folder_path
|
||||
)
|
||||
# Skip if asset was not found
|
||||
if not current_asset_entity:
|
||||
print("Asset entity with name `{}` was not found".format(
|
||||
current_asset_name
|
||||
if not current_asset_doc:
|
||||
print("Folder entity `{}` was not found".format(
|
||||
current_folder_path
|
||||
))
|
||||
return loaded_containers
|
||||
|
||||
|
|
@ -143,7 +143,7 @@ class BuildWorkfile:
|
|||
|
||||
# Load workfile presets for task
|
||||
self.build_presets = self.get_build_presets(
|
||||
current_task_name, current_asset_entity
|
||||
current_task_name, current_asset_doc
|
||||
)
|
||||
|
||||
# Skip if there are any presets for task
|
||||
|
|
@ -155,9 +155,9 @@ class BuildWorkfile:
|
|||
)
|
||||
return loaded_containers
|
||||
|
||||
# Get presets for loading current asset
|
||||
# Get presets for loading current folder
|
||||
current_context_profiles = self.build_presets.get("current_context")
|
||||
# Get presets for loading linked assets
|
||||
# Get presets for loading linked folders
|
||||
link_context_profiles = self.build_presets.get("linked_assets")
|
||||
# Skip if both are missing
|
||||
if not current_context_profiles and not link_context_profiles:
|
||||
|
|
@ -177,38 +177,38 @@ class BuildWorkfile:
|
|||
elif not link_context_profiles:
|
||||
self.log.warning((
|
||||
"Current task `{}` doesn't have any"
|
||||
"loading preset for it's linked assets."
|
||||
"loading preset for it's linked folders."
|
||||
).format(current_task_name))
|
||||
|
||||
# Prepare assets to process by workfile presets
|
||||
assets = []
|
||||
current_asset_id = None
|
||||
asset_docs = []
|
||||
current_folder_id = None
|
||||
if current_context_profiles:
|
||||
# Add current asset entity if preset has current context set
|
||||
assets.append(current_asset_entity)
|
||||
current_asset_id = current_asset_entity["_id"]
|
||||
asset_docs.append(current_asset_doc)
|
||||
current_folder_id = current_asset_doc["_id"]
|
||||
|
||||
if link_context_profiles:
|
||||
# Find and append linked assets if preset has set linked mapping
|
||||
link_assets = get_linked_assets(project_name, current_asset_entity)
|
||||
link_assets = get_linked_assets(project_name, current_asset_doc)
|
||||
if link_assets:
|
||||
assets.extend(link_assets)
|
||||
asset_docs.extend(link_assets)
|
||||
|
||||
# Skip if there are no assets. This can happen if only linked mapping
|
||||
# is set and there are no links for his asset.
|
||||
if not assets:
|
||||
if not asset_docs:
|
||||
self.log.warning(
|
||||
"Asset does not have linked assets. Nothing to process."
|
||||
)
|
||||
return loaded_containers
|
||||
|
||||
# Prepare entities from database for assets
|
||||
prepared_entities = self._collect_last_version_repres(assets)
|
||||
prepared_entities = self._collect_last_version_repres(asset_docs)
|
||||
|
||||
# Load containers by prepared entities and presets
|
||||
# - Current asset containers
|
||||
if current_asset_id and current_asset_id in prepared_entities:
|
||||
current_context_data = prepared_entities.pop(current_asset_id)
|
||||
if current_folder_id and current_folder_id in prepared_entities:
|
||||
current_context_data = prepared_entities.pop(current_folder_id)
|
||||
loaded_data = self.load_containers_by_asset_data(
|
||||
current_context_data, current_context_profiles, loaders_by_name
|
||||
)
|
||||
|
|
@ -281,7 +281,7 @@ class BuildWorkfile:
|
|||
with valid values.
|
||||
- "loaders" expects list of strings representing possible loaders.
|
||||
- "families" expects list of strings for filtering
|
||||
by main subset family.
|
||||
by product type.
|
||||
- "repre_names" expects list of strings for filtering by
|
||||
representation name.
|
||||
|
||||
|
|
@ -349,33 +349,33 @@ class BuildWorkfile:
|
|||
|
||||
return valid_profiles
|
||||
|
||||
def _prepare_profile_for_subsets(self, subsets, profiles):
|
||||
"""Select profile for each subset by it's data.
|
||||
def _prepare_profile_for_products(self, subset_docs, profiles):
|
||||
"""Select profile for each product by it's data.
|
||||
|
||||
Profiles are filtered for each subset individually.
|
||||
Profile is filtered by subset's family, optionally by name regex and
|
||||
Profiles are filtered for each product individually.
|
||||
Profile is filtered by product type, optionally by name regex and
|
||||
representation names set in profile.
|
||||
It is possible to not find matching profile for subset, in that case
|
||||
subset is skipped and it is possible that none of subsets have
|
||||
It is possible to not find matching profile for product, in that case
|
||||
product is skipped and it is possible that none of products have
|
||||
matching profile.
|
||||
|
||||
Args:
|
||||
subsets (List[Dict[str, Any]]): Subset documents.
|
||||
subset_docs (List[Dict[str, Any]]): Subset documents.
|
||||
profiles (List[Dict[str, Any]]): Build profiles.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Profile by subset's id.
|
||||
Dict[str, Any]: Profile by product id.
|
||||
"""
|
||||
|
||||
# Prepare subsets
|
||||
subsets_by_family = self.map_subsets_by_family(subsets)
|
||||
# Prepare products
|
||||
products_by_type = self.map_products_by_type(subset_docs)
|
||||
|
||||
profiles_per_subset_id = {}
|
||||
for family, subsets in subsets_by_family.items():
|
||||
family_low = family.lower()
|
||||
profiles_by_product_id = {}
|
||||
for product_type, subset_docs in products_by_type.items():
|
||||
product_type_low = product_type.lower()
|
||||
for profile in profiles:
|
||||
# Skip profile if does not contain family
|
||||
if family_low not in profile["product_types_lowered"]:
|
||||
# Skip profile if does not contain product type
|
||||
if product_type_low not in profile["product_types_lowered"]:
|
||||
continue
|
||||
|
||||
# Precompile name filters as regexes
|
||||
|
|
@ -387,31 +387,31 @@ class BuildWorkfile:
|
|||
profile_regexes = _profile_regexes
|
||||
|
||||
# TODO prepare regex compilation
|
||||
for subset in subsets:
|
||||
for subset_doc in subset_docs:
|
||||
# Verify regex filtering (optional)
|
||||
if profile_regexes:
|
||||
valid = False
|
||||
for pattern in profile_regexes:
|
||||
if re.match(pattern, subset["name"]):
|
||||
if re.match(pattern, subset_doc["name"]):
|
||||
valid = True
|
||||
break
|
||||
|
||||
if not valid:
|
||||
continue
|
||||
|
||||
profiles_per_subset_id[subset["_id"]] = profile
|
||||
profiles_by_product_id[subset_doc["_id"]] = profile
|
||||
|
||||
# break profiles loop on finding the first matching profile
|
||||
break
|
||||
return profiles_per_subset_id
|
||||
return profiles_by_product_id
|
||||
|
||||
def load_containers_by_asset_data(
|
||||
self, asset_entity_data, build_profiles, loaders_by_name
|
||||
self, asset_doc_data, build_profiles, loaders_by_name
|
||||
):
|
||||
"""Load containers for entered asset entity by Build profiles.
|
||||
|
||||
Args:
|
||||
asset_entity_data (Dict[str, Any]): Prepared data with subsets,
|
||||
asset_doc_data (Dict[str, Any]): Prepared data with products,
|
||||
last versions and representations for specific asset.
|
||||
build_profiles (Dict[str, Any]): Build profiles.
|
||||
loaders_by_name (Dict[str, LoaderPlugin]): Available loaders
|
||||
|
|
@ -423,10 +423,10 @@ class BuildWorkfile:
|
|||
"""
|
||||
|
||||
# Make sure all data are not empty
|
||||
if not asset_entity_data or not build_profiles or not loaders_by_name:
|
||||
if not asset_doc_data or not build_profiles or not loaders_by_name:
|
||||
return
|
||||
|
||||
asset_entity = asset_entity_data["asset_entity"]
|
||||
asset_doc = asset_doc_data["asset_doc"]
|
||||
|
||||
valid_profiles = self._filter_build_profiles(
|
||||
build_profiles, loaders_by_name
|
||||
|
|
@ -439,53 +439,53 @@ class BuildWorkfile:
|
|||
|
||||
self.log.debug("Valid Workfile profiles: {}".format(valid_profiles))
|
||||
|
||||
subsets_by_id = {}
|
||||
version_by_subset_id = {}
|
||||
products_by_id = {}
|
||||
version_by_product_id = {}
|
||||
repres_by_version_id = {}
|
||||
for subset_id, in_data in asset_entity_data["subsets"].items():
|
||||
subset_entity = in_data["subset_entity"]
|
||||
subsets_by_id[subset_entity["_id"]] = subset_entity
|
||||
for product_id, in_data in asset_doc_data["subsets"].items():
|
||||
subset_doc = in_data["subset_doc"]
|
||||
products_by_id[subset_doc["_id"]] = subset_doc
|
||||
|
||||
version_data = in_data["version"]
|
||||
version_entity = version_data["version_entity"]
|
||||
version_by_subset_id[subset_id] = version_entity
|
||||
repres_by_version_id[version_entity["_id"]] = (
|
||||
version_doc = version_data["version_doc"]
|
||||
version_by_product_id[product_id] = version_doc
|
||||
repres_by_version_id[version_doc["_id"]] = (
|
||||
version_data["repres"]
|
||||
)
|
||||
|
||||
if not subsets_by_id:
|
||||
self.log.warning("There are not subsets for asset {0}".format(
|
||||
asset_entity["name"]
|
||||
if not products_by_id:
|
||||
self.log.warning("There are not products for folder {0}".format(
|
||||
asset_doc["name"]
|
||||
))
|
||||
return
|
||||
|
||||
profiles_per_subset_id = self._prepare_profile_for_subsets(
|
||||
subsets_by_id.values(), valid_profiles
|
||||
profiles_by_product_id = self._prepare_profile_for_products(
|
||||
products_by_id.values(), valid_profiles
|
||||
)
|
||||
if not profiles_per_subset_id:
|
||||
self.log.warning("There are not valid subsets.")
|
||||
if not profiles_by_product_id:
|
||||
self.log.warning("There are not valid products.")
|
||||
return
|
||||
|
||||
valid_repres_by_subset_id = collections.defaultdict(list)
|
||||
for subset_id, profile in profiles_per_subset_id.items():
|
||||
valid_repres_by_product_id = collections.defaultdict(list)
|
||||
for product_id, profile in profiles_by_product_id.items():
|
||||
profile_repre_names = profile["repre_names_lowered"]
|
||||
|
||||
version_entity = version_by_subset_id[subset_id]
|
||||
version_id = version_entity["_id"]
|
||||
version_doc = version_by_product_id[product_id]
|
||||
version_id = version_doc["_id"]
|
||||
repres = repres_by_version_id[version_id]
|
||||
for repre in repres:
|
||||
repre_name_low = repre["name"].lower()
|
||||
if repre_name_low in profile_repre_names:
|
||||
valid_repres_by_subset_id[subset_id].append(repre)
|
||||
valid_repres_by_product_id[product_id].append(repre)
|
||||
|
||||
# DEBUG message
|
||||
msg = "Valid representations for Asset: `{}`".format(
|
||||
asset_entity["name"]
|
||||
msg = "Valid representations for Folder: `{}`".format(
|
||||
asset_doc["name"]
|
||||
)
|
||||
for subset_id, repres in valid_repres_by_subset_id.items():
|
||||
subset = subsets_by_id[subset_id]
|
||||
msg += "\n# Subset Name/ID: `{}`/{}".format(
|
||||
subset["name"], subset_id
|
||||
for product_id, repres in valid_repres_by_product_id.items():
|
||||
subset_doc = products_by_id[product_id]
|
||||
msg += "\n# Product Name/ID: `{}`/{}".format(
|
||||
subset_doc["name"], product_id
|
||||
)
|
||||
for repre in repres:
|
||||
msg += "\n## Repre name: `{}`".format(repre["name"])
|
||||
|
|
@ -493,37 +493,37 @@ class BuildWorkfile:
|
|||
self.log.debug(msg)
|
||||
|
||||
containers = self._load_containers(
|
||||
valid_repres_by_subset_id, subsets_by_id,
|
||||
profiles_per_subset_id, loaders_by_name
|
||||
valid_repres_by_product_id, products_by_id,
|
||||
profiles_by_product_id, loaders_by_name
|
||||
)
|
||||
|
||||
return {
|
||||
"asset_entity": asset_entity,
|
||||
"asset_doc": asset_doc,
|
||||
"containers": containers
|
||||
}
|
||||
|
||||
def _load_containers(
|
||||
self, repres_by_subset_id, subsets_by_id,
|
||||
profiles_per_subset_id, loaders_by_name
|
||||
self, repres_by_product_id, products_by_id,
|
||||
profiles_by_product_id, loaders_by_name
|
||||
):
|
||||
"""Real load by collected data happens here.
|
||||
|
||||
Loading of representations per subset happens here. Each subset can
|
||||
Loading of representations per product happens here. Each product can
|
||||
loads one representation. Loading is tried in specific order.
|
||||
Representations are tried to load by names defined in configuration.
|
||||
If subset has representation matching representation name each loader
|
||||
If product has representation matching representation name each loader
|
||||
is tried to load it until any is successful. If none of them was
|
||||
successful then next representation name is tried.
|
||||
Subset process loop ends when any representation is loaded or
|
||||
all matching representations were already tried.
|
||||
|
||||
Args:
|
||||
repres_by_subset_id (Dict[str, Dict[str, Any]]): Available
|
||||
representations mapped by their parent (subset) id.
|
||||
subsets_by_id (Dict[str, Dict[str, Any]]): Subset documents
|
||||
repres_by_product_id (Dict[str, Dict[str, Any]]): Available
|
||||
representations mapped by their parent (product) id.
|
||||
products_by_id (Dict[str, Dict[str, Any]]): Subset documents
|
||||
mapped by their id.
|
||||
profiles_per_subset_id (Dict[str, Dict[str, Any]]): Build profiles
|
||||
mapped by subset id.
|
||||
profiles_by_product_id (Dict[str, Dict[str, Any]]): Build profiles
|
||||
mapped by product id.
|
||||
loaders_by_name (Dict[str, LoaderPlugin]): Available loaders
|
||||
per name.
|
||||
|
||||
|
|
@ -533,38 +533,40 @@ class BuildWorkfile:
|
|||
|
||||
loaded_containers = []
|
||||
|
||||
# Get subset id order from build presets.
|
||||
# Get product id order from build presets.
|
||||
build_presets = self.build_presets.get("current_context", [])
|
||||
build_presets += self.build_presets.get("linked_assets", [])
|
||||
subset_ids_ordered = []
|
||||
product_ids_ordered = []
|
||||
for preset in build_presets:
|
||||
for preset_family in preset["product_types"]:
|
||||
for id, subset in subsets_by_id.items():
|
||||
if preset_family not in subset["data"].get("families", []):
|
||||
for product_type in preset["product_types"]:
|
||||
for product_id, subset_doc in products_by_id.items():
|
||||
# TODO 'families' is not available on product
|
||||
families = subset_doc["data"].get("families") or []
|
||||
if product_type not in families:
|
||||
continue
|
||||
|
||||
subset_ids_ordered.append(id)
|
||||
product_ids_ordered.append(product_id)
|
||||
|
||||
# Order representations from subsets.
|
||||
print("repres_by_subset_id", repres_by_subset_id)
|
||||
# Order representations from products.
|
||||
print("repres_by_product_id", repres_by_product_id)
|
||||
representations_ordered = []
|
||||
representations = []
|
||||
for id in subset_ids_ordered:
|
||||
for subset_id, repres in repres_by_subset_id.items():
|
||||
for ordered_product_id in product_ids_ordered:
|
||||
for product_id, repres in repres_by_product_id.items():
|
||||
if repres in representations:
|
||||
continue
|
||||
|
||||
if id == subset_id:
|
||||
representations_ordered.append((subset_id, repres))
|
||||
if ordered_product_id == product_id:
|
||||
representations_ordered.append((product_id, repres))
|
||||
representations.append(repres)
|
||||
|
||||
print("representations", representations)
|
||||
|
||||
# Load ordered representations.
|
||||
for subset_id, repres in representations_ordered:
|
||||
subset_name = subsets_by_id[subset_id]["name"]
|
||||
for product_id, repres in representations_ordered:
|
||||
product_name = products_by_id[product_id]["name"]
|
||||
|
||||
profile = profiles_per_subset_id[subset_id]
|
||||
profile = profiles_by_product_id[product_id]
|
||||
loaders_last_idx = len(profile["loaders"]) - 1
|
||||
repre_names_last_idx = len(profile["repre_names_lowered"]) - 1
|
||||
|
||||
|
|
@ -595,7 +597,7 @@ class BuildWorkfile:
|
|||
container = load_container(
|
||||
loader,
|
||||
repre["_id"],
|
||||
name=subset_name
|
||||
name=product_name
|
||||
)
|
||||
loaded_containers.append(container)
|
||||
is_loaded = True
|
||||
|
|
@ -618,8 +620,8 @@ class BuildWorkfile:
|
|||
msg += " Trying next loader."
|
||||
elif repre_name_idx < repre_names_last_idx:
|
||||
msg += (
|
||||
" Loading of subset `{}` was not successful."
|
||||
).format(subset_name)
|
||||
" Loading of product `{}` was not successful."
|
||||
).format(product_name)
|
||||
else:
|
||||
msg += " Trying next representation."
|
||||
self.log.info(msg)
|
||||
|
|
@ -627,7 +629,7 @@ class BuildWorkfile:
|
|||
return loaded_containers
|
||||
|
||||
def _collect_last_version_repres(self, asset_docs):
|
||||
"""Collect subsets, versions and representations for asset_entities.
|
||||
"""Collect products, versions and representations for asset_entities.
|
||||
|
||||
Args:
|
||||
asset_docs (List[Dict[str, Any]]): Asset entities for which
|
||||
|
|
@ -640,12 +642,12 @@ class BuildWorkfile:
|
|||
```
|
||||
{
|
||||
{Asset ID}: {
|
||||
"asset_entity": <AssetEntity>,
|
||||
"asset_doc": <AssetEntity>,
|
||||
"subsets": {
|
||||
{Subset ID}: {
|
||||
"subset_entity": <SubsetEntity>,
|
||||
"subset_doc": <SubsetEntity>,
|
||||
"version": {
|
||||
"version_entity": <VersionEntity>,
|
||||
"version_doc": <VersionEntity>,
|
||||
"repres": [
|
||||
<RepreEntity1>, <RepreEntity2>, ...
|
||||
]
|
||||
|
|
@ -656,7 +658,7 @@ class BuildWorkfile:
|
|||
},
|
||||
...
|
||||
}
|
||||
output[asset_id]["subsets"][subset_id]["version"]["repres"]
|
||||
output[folder_id]["subsets"][product_id]["version"]["repres"]
|
||||
```
|
||||
"""
|
||||
|
||||
|
|
@ -666,20 +668,26 @@ class BuildWorkfile:
|
|||
if not asset_docs:
|
||||
return output
|
||||
|
||||
asset_docs_by_ids = {asset["_id"]: asset for asset in asset_docs}
|
||||
asset_docs_by_ids = {
|
||||
asset_doc["_id"]: asset_doc
|
||||
for asset_doc in asset_docs
|
||||
}
|
||||
|
||||
project_name = get_current_project_name()
|
||||
subsets = list(get_subsets(
|
||||
subset_docs = list(get_subsets(
|
||||
project_name, asset_ids=asset_docs_by_ids.keys()
|
||||
))
|
||||
subset_entity_by_ids = {subset["_id"]: subset for subset in subsets}
|
||||
subset_docs_by_id = {
|
||||
subset_doc["_id"]: subset_doc
|
||||
for subset_doc in subset_docs
|
||||
}
|
||||
|
||||
last_version_by_subset_id = get_last_versions(
|
||||
project_name, subset_entity_by_ids.keys()
|
||||
last_version_by_product_id = get_last_versions(
|
||||
project_name, subset_docs_by_id.keys()
|
||||
)
|
||||
last_version_docs_by_id = {
|
||||
version["_id"]: version
|
||||
for version in last_version_by_subset_id.values()
|
||||
for version in last_version_by_product_id.values()
|
||||
}
|
||||
repre_docs = get_representations(
|
||||
project_name, version_ids=last_version_docs_by_id.keys()
|
||||
|
|
@ -689,28 +697,28 @@ class BuildWorkfile:
|
|||
version_id = repre_doc["parent"]
|
||||
version_doc = last_version_docs_by_id[version_id]
|
||||
|
||||
subset_id = version_doc["parent"]
|
||||
subset_doc = subset_entity_by_ids[subset_id]
|
||||
product_id = version_doc["parent"]
|
||||
subset_doc = subset_docs_by_id[product_id]
|
||||
|
||||
asset_id = subset_doc["parent"]
|
||||
asset_doc = asset_docs_by_ids[asset_id]
|
||||
folder_id = subset_doc["parent"]
|
||||
asset_doc = asset_docs_by_ids[folder_id]
|
||||
|
||||
if asset_id not in output:
|
||||
output[asset_id] = {
|
||||
"asset_entity": asset_doc,
|
||||
if folder_id not in output:
|
||||
output[folder_id] = {
|
||||
"asset_doc": asset_doc,
|
||||
"subsets": {}
|
||||
}
|
||||
|
||||
if subset_id not in output[asset_id]["subsets"]:
|
||||
output[asset_id]["subsets"][subset_id] = {
|
||||
"subset_entity": subset_doc,
|
||||
if product_id not in output[folder_id]["subsets"]:
|
||||
output[folder_id]["subsets"][product_id] = {
|
||||
"subset_doc": subset_doc,
|
||||
"version": {
|
||||
"version_entity": version_doc,
|
||||
"version_doc": version_doc,
|
||||
"repres": []
|
||||
}
|
||||
}
|
||||
|
||||
output[asset_id]["subsets"][subset_id]["version"]["repres"].append(
|
||||
output[folder_id]["subsets"][product_id]["version"]["repres"].append(
|
||||
repre_doc
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -321,7 +321,7 @@ def get_last_workfile(
|
|||
data["app"],
|
||||
task_name=data["task"]["name"],
|
||||
task_type=data["task"]["type"],
|
||||
family="workfile"
|
||||
product_type="workfile"
|
||||
)
|
||||
data.pop("comment", None)
|
||||
if not data.get("ext"):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue