PYPE-1901 - New pluging for remote publishing (webpublish)

Extracted _json_load method
This commit is contained in:
Petr Kalis 2021-10-08 17:25:42 +02:00
parent f92ee311b8
commit b90bcd9877
3 changed files with 109 additions and 19 deletions

View file

@ -0,0 +1,88 @@
import pyblish.api
import os
from avalon import photoshop
from openpype.lib import prepare_template_data
class CollectRemoteInstances(pyblish.api.ContextPlugin):
"""Gather instances configured color code of a layer.
Used in remote publishing when artists marks publishable layers by color-
coding.
Identifier:
id (str): "pyblish.avalon.instance"
"""
order = pyblish.api.CollectorOrder + 0.100
label = "Instances"
order = pyblish.api.CollectorOrder
hosts = ["photoshop"]
# configurable by Settings
families = ["background"]
color_code = ["red"]
subset_template_name = ""
def process(self, context):
self.log.info("CollectRemoteInstances")
if not os.environ.get("IS_HEADLESS"):
self.log.debug("Not headless publishing, skipping.")
return
# parse variant if used in webpublishing, comes from webpublisher batch
batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA")
variant = "Main"
if batch_dir and os.path.exists(batch_dir):
# TODO check if batch manifest is same as tasks manifests
task_data = self.parse_json(os.path.join(batch_dir,
"manifest.json"))
variant = task_data["variant"]
stub = photoshop.stub()
layers = stub.get_layers()
instance_names = []
for layer in layers:
self.log.info("!!!Layer:: {}".format(layer))
if layer.color_code not in self.color_code:
self.log.debug("Not marked, skip")
continue
if layer.parents:
self.log.debug("Not a top layer, skip")
continue
instance = context.create_instance(layer.name)
instance.append(layer)
instance.data["family"] = self.families[0]
instance.data["publish"] = layer.visible
# populate data from context, coming from outside?? TODO
# TEMP
self.log.info("asset {}".format(context.data["assetEntity"]))
self.log.info("taskType {}".format(context.data["taskType"]))
instance.data["asset"] = context.data["assetEntity"]["name"]
instance.data["task"] = context.data["taskType"]
fill_pairs = {
"variant": variant,
"family": instance.data["family"],
"task": instance.data["task"],
"layer": layer.name
}
subset = self.subset_template.format(
**prepare_template_data(fill_pairs))
instance.data["subset"] = subset
instance_names.append(layer.name)
# Produce diagnostic message for any graphical
# user interface interested in visualising it.
self.log.info("Found: \"%s\" " % instance.data["name"])
self.log.info("instance: {} ".format(instance.data))
if len(instance_names) != len(set(instance_names)):
self.log.warning("Duplicate instances found. " +
"Remove unwanted via SubsetManager")

View file

@ -15,6 +15,7 @@ import tempfile
import pyblish.api
from avalon import io
from openpype.lib import prepare_template_data
from openpype.lib.plugin_tools import parse_json
class CollectPublishedFiles(pyblish.api.ContextPlugin):
@ -33,22 +34,6 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
# from Settings
task_type_to_family = {}
def _load_json(self, path):
path = path.strip('\"')
assert os.path.isfile(path), (
"Path to json file doesn't exist. \"{}\"".format(path)
)
data = None
with open(path, "r") as json_file:
try:
data = json.load(json_file)
except Exception as exc:
self.log.error(
"Error loading json: "
"{} - Exception: {}".format(path, exc)
)
return data
def _process_batch(self, dir_url):
task_subfolders = [
os.path.join(dir_url, o)
@ -56,8 +41,8 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
if os.path.isdir(os.path.join(dir_url, o))]
self.log.info("task_sub:: {}".format(task_subfolders))
for task_dir in task_subfolders:
task_data = self._load_json(os.path.join(task_dir,
"manifest.json"))
task_data = parse_json(os.path.join(task_dir,
"manifest.json"))
self.log.info("task_data:: {}".format(task_data))
ctx = task_data["context"]
task_type = "default_task_type"
@ -261,7 +246,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
assert batch_dir, (
"Missing `OPENPYPE_PUBLISH_DATA`")
assert batch_dir, \
assert os.path.exists(batch_dir), \
"Folder {} doesn't exist".format(batch_dir)
project_name = os.environ.get("AVALON_PROJECT")

View file

@ -483,3 +483,20 @@ def should_decompress(file_url):
"compression: \"dwab\"" in output
return False
def parse_json(path):
path = path.strip('\"')
assert os.path.isfile(path), (
"Path to json file doesn't exist. \"{}\"".format(path)
)
data = None
with open(path, "r") as json_file:
try:
data = json.load(json_file)
except Exception as exc:
log.error(
"Error loading json: "
"{} - Exception: {}".format(path, exc)
)
return data