mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
feat(ps): adding psd batch workflow to standalone publisher
This commit is contained in:
parent
0b4e01eca7
commit
c214e7516c
5 changed files with 190 additions and 58 deletions
|
|
@ -17,10 +17,8 @@ import os
|
|||
import pyblish.api
|
||||
from avalon import io
|
||||
import json
|
||||
import logging
|
||||
import clique
|
||||
|
||||
log = logging.getLogger("collector")
|
||||
from pprint import pformat
|
||||
|
||||
|
||||
class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
||||
|
|
@ -33,6 +31,9 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.CollectorOrder - 0.49
|
||||
hosts = ["standalonepublisher"]
|
||||
|
||||
# presets
|
||||
batch_extensions = ["edl", "xml", "psd"]
|
||||
|
||||
def process(self, context):
|
||||
# get json paths from os and load them
|
||||
io.install()
|
||||
|
|
@ -52,73 +53,80 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
|||
|
||||
with open(input_json_path, "r") as f:
|
||||
in_data = json.load(f)
|
||||
self.log.debug(f"_ in_data: {in_data}")
|
||||
self.log.debug(f"_ in_data: {pformat(in_data)}")
|
||||
|
||||
self.asset_name = in_data["asset"]
|
||||
self.family = in_data["family"]
|
||||
self.families = ["ftrack"]
|
||||
self.family_preset_key = in_data["family_preset_key"]
|
||||
asset = io.find_one({"type": "asset", "name": self.asset_name})
|
||||
context.data["asset"] = asset
|
||||
|
||||
# exception for editorial
|
||||
if "editorial" in self.family:
|
||||
# avoid subset name duplicity
|
||||
if not context.data.get("subsetNamesCheck"):
|
||||
context.data["subsetNamesCheck"] = list()
|
||||
|
||||
in_data_list = list()
|
||||
representations = in_data.pop("representations")
|
||||
for repr in representations:
|
||||
in_data_copy = in_data.copy()
|
||||
ext = repr["ext"][1:]
|
||||
subset = in_data_copy["subset"]
|
||||
# filter out non editorial files
|
||||
if ext not in ["edl", "xml"]:
|
||||
in_data_copy["representations"] = [repr]
|
||||
in_data_copy["subset"] = f"{ext}{subset}"
|
||||
in_data_list.append(in_data_copy)
|
||||
|
||||
files = repr.pop("files")
|
||||
|
||||
# delete unneeded keys
|
||||
delete_repr_keys = ["frameStart", "frameEnd"]
|
||||
for k in delete_repr_keys:
|
||||
if repr.get(k):
|
||||
repr.pop(k)
|
||||
|
||||
# convert files to list if it isnt
|
||||
if not isinstance(files, list):
|
||||
files = [files]
|
||||
|
||||
self.log.debug(f"_ files: {files}")
|
||||
for index, f in enumerate(files):
|
||||
index += 1
|
||||
# copy dictionaries
|
||||
in_data_copy = in_data_copy.copy()
|
||||
repr_new = repr.copy()
|
||||
|
||||
repr_new["files"] = f
|
||||
repr_new["name"] = ext
|
||||
in_data_copy["representations"] = [repr_new]
|
||||
|
||||
# create subset Name
|
||||
new_subset = f"{ext}{index}{subset}"
|
||||
while new_subset in context.data["subsetNamesCheck"]:
|
||||
index += 1
|
||||
new_subset = f"{ext}{index}{subset}"
|
||||
|
||||
context.data["subsetNamesCheck"].append(new_subset)
|
||||
in_data_copy["subset"] = new_subset
|
||||
in_data_list.append(in_data_copy)
|
||||
self.log.info(f"Creating subset: {ext}{index}{subset}")
|
||||
if self.family_preset_key in ["editorial", "psd_batch"]:
|
||||
in_data_list = self.multiple_instances(context, in_data)
|
||||
else:
|
||||
in_data_list = [in_data]
|
||||
|
||||
self.log.debug(f"_ in_data_list: {in_data_list}")
|
||||
self.log.debug(f"_ in_data_list: {pformat(in_data_list)}")
|
||||
|
||||
for in_data in in_data_list:
|
||||
# create instance
|
||||
self.create_instance(context, in_data)
|
||||
|
||||
def multiple_instances(self, context, in_data):
|
||||
# avoid subset name duplicity
|
||||
if not context.data.get("subsetNamesCheck"):
|
||||
context.data["subsetNamesCheck"] = list()
|
||||
|
||||
in_data_list = list()
|
||||
representations = in_data.pop("representations")
|
||||
for repr in representations:
|
||||
in_data_copy = in_data.copy()
|
||||
ext = repr["ext"][1:]
|
||||
subset = in_data_copy["subset"]
|
||||
# filter out non editorial files
|
||||
if ext not in self.batch_extensions:
|
||||
in_data_copy["representations"] = [repr]
|
||||
in_data_copy["subset"] = f"{ext}{subset}"
|
||||
in_data_list.append(in_data_copy)
|
||||
|
||||
files = repr.get("files")
|
||||
|
||||
# delete unneeded keys
|
||||
delete_repr_keys = ["frameStart", "frameEnd"]
|
||||
for k in delete_repr_keys:
|
||||
if repr.get(k):
|
||||
repr.pop(k)
|
||||
|
||||
# convert files to list if it isnt
|
||||
if not isinstance(files, list):
|
||||
files = [files]
|
||||
|
||||
self.log.debug(f"_ files: {files}")
|
||||
for index, f in enumerate(files):
|
||||
index += 1
|
||||
# copy dictionaries
|
||||
in_data_copy = in_data_copy.copy()
|
||||
repr_new = repr.copy()
|
||||
|
||||
repr_new["files"] = f
|
||||
repr_new["name"] = ext
|
||||
in_data_copy["representations"] = [repr_new]
|
||||
|
||||
# create subset Name
|
||||
new_subset = f"{ext}{index}{subset}"
|
||||
while new_subset in context.data["subsetNamesCheck"]:
|
||||
index += 1
|
||||
new_subset = f"{ext}{index}{subset}"
|
||||
|
||||
context.data["subsetNamesCheck"].append(new_subset)
|
||||
in_data_copy["subset"] = new_subset
|
||||
in_data_list.append(in_data_copy)
|
||||
self.log.info(f"Creating subset: {ext}{index}{subset}")
|
||||
|
||||
return in_data_list
|
||||
|
||||
def create_instance(self, context, in_data):
|
||||
subset = in_data["subset"]
|
||||
|
||||
|
|
@ -138,11 +146,11 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
|||
"frameEnd": in_data.get("representations", [None])[0].get(
|
||||
"frameEnd", None
|
||||
),
|
||||
"families": [self.family, "ftrack"],
|
||||
"families": self.families + [self.family_preset_key],
|
||||
}
|
||||
)
|
||||
self.log.info("collected instance: {}".format(instance.data))
|
||||
self.log.info("parsing data: {}".format(in_data))
|
||||
self.log.info("collected instance: {}".format(pformat(instance.data)))
|
||||
self.log.info("parsing data: {}".format(pformat(in_data)))
|
||||
|
||||
instance.data["destination_list"] = list()
|
||||
instance.data["representations"] = list()
|
||||
|
|
@ -165,4 +173,9 @@ class CollectContextDataSAPublish(pyblish.api.ContextPlugin):
|
|||
component["tags"] = ["review"]
|
||||
self.log.debug("Adding review family")
|
||||
|
||||
if "psd" in component["name"]:
|
||||
instance.data["source"] = component["files"]
|
||||
component["thumbnail"] = True
|
||||
self.log.debug("Adding image:psd_batch family")
|
||||
|
||||
instance.data["representations"].append(component)
|
||||
|
|
|
|||
|
|
@ -162,6 +162,7 @@ class CollectHierarchyContext(pyblish.api.ContextPlugin):
|
|||
label = "Collect Hierarchy Context"
|
||||
order = pyblish.api.CollectorOrder + 0.102
|
||||
hosts = ["standalonepublisher"]
|
||||
families = ["shot"]
|
||||
|
||||
def update_dict(self, ex_dict, new_dict):
|
||||
for key in ex_dict:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,26 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from pprint import pformat
|
||||
|
||||
|
||||
class CollectMatchingAssetToInstance(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Collecting temp json data sent from a host context
|
||||
and path for returning json data back to hostself.
|
||||
"""
|
||||
|
||||
label = "Collect Matching Asset to Instance"
|
||||
order = pyblish.api.CollectorOrder - 0.05
|
||||
hosts = ["standalonepublisher"]
|
||||
family = ["image"]
|
||||
|
||||
def process(self, instance):
|
||||
project_assets = instance.context.data["projectAssets"]
|
||||
source_file = os.path.basename(instance.data["source"])
|
||||
asset = next((project_assets[name] for name in project_assets
|
||||
if name in source_file), None)
|
||||
|
||||
if asset:
|
||||
instance.data["asset"] = asset["name"]
|
||||
instance.data["assetEntity"] = asset
|
||||
self.log.info(f"Matching asset assigned: {pformat(asset)}")
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
import pyblish.api
|
||||
from avalon import io
|
||||
from pprint import pformat
|
||||
|
||||
|
||||
class CollectProjectAssets(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Collect all available project assets to context data.
|
||||
"""
|
||||
|
||||
label = "Collect Project Assets"
|
||||
order = pyblish.api.CollectorOrder - 0.1
|
||||
hosts = ["standalonepublisher"]
|
||||
|
||||
def process(self, context):
|
||||
project_assets = {
|
||||
asset_doc["name"]: asset_doc
|
||||
for asset_doc in io.find({"type": "asset"})
|
||||
}
|
||||
context.data["projectAssets"] = project_assets
|
||||
self.log.debug(f"_ project_assets: {pformat(project_assets)}")
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
import pyblish.api
|
||||
from pprint import pformat
|
||||
|
||||
|
||||
class CollectPsdInstances(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Collect all available instances from psd batch.
|
||||
"""
|
||||
|
||||
label = "Collect Psd Instances"
|
||||
order = pyblish.api.CollectorOrder + 0.492
|
||||
hosts = ["standalonepublisher"]
|
||||
families = ["psd_batch"]
|
||||
|
||||
# presets
|
||||
subsets = {
|
||||
"imageForLayout": {
|
||||
"task": "background",
|
||||
"family": "imageForLayout"
|
||||
},
|
||||
"imageForComp": {
|
||||
"task": "background",
|
||||
"family": "imageForComp"
|
||||
},
|
||||
"workfileBackground": {
|
||||
"task": "background",
|
||||
"family": "workfile"
|
||||
},
|
||||
}
|
||||
|
||||
def process(self, instance):
|
||||
context = instance.context
|
||||
asset_data = instance.data["assetEntity"]
|
||||
asset = instance.data["asset"]
|
||||
anatomy_data = instance.data["anatomyData"].copy()
|
||||
|
||||
for subset_name, subset_data in self.subsets.items():
|
||||
instance_name = f"{asset}_{subset_name}"
|
||||
task = subset_data.get("task", "background")
|
||||
|
||||
# create new instance
|
||||
new_instance = context.create_instance(instance_name)
|
||||
# add original instance data except name key
|
||||
new_instance.data.update({k: v for k, v in instance.data.items()
|
||||
if k not in "name"})
|
||||
# add subset data from preset
|
||||
new_instance.data.update(subset_data)
|
||||
|
||||
label = f"{instance_name}"
|
||||
new_instance.data["label"] = label
|
||||
new_instance.data["subset"] = subset_name
|
||||
new_instance.data["families"].append("image")
|
||||
|
||||
# fix anatomy data
|
||||
anatomy_data_new = anatomy_data.copy()
|
||||
# updating hierarchy data
|
||||
anatomy_data_new.update({
|
||||
"asset": asset_data["name"],
|
||||
"task": task,
|
||||
"subset": subset_name
|
||||
})
|
||||
new_instance.data["anatomyData"] = anatomy_data_new
|
||||
|
||||
inst_data = new_instance.data
|
||||
self.log.debug(
|
||||
f"_ inst_data: {pformat(inst_data)}")
|
||||
|
||||
self.log.info(f"Created new instance: {instance_name}")
|
||||
|
||||
# delete original instance
|
||||
context.remove(instance)
|
||||
Loading…
Add table
Add a link
Reference in a new issue