mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
Merge remote-tracking branch 'origin/bugfix/fusion-optional-validation' into bugfix/fusion-optional-validation
This commit is contained in:
commit
4da00c20f7
26 changed files with 1432 additions and 398 deletions
|
|
@ -217,7 +217,11 @@ class ExtractPlayblast(publish.Extractor):
|
|||
instance.data["panel"], edit=True, **viewport_defaults
|
||||
)
|
||||
|
||||
cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom)
|
||||
try:
|
||||
cmds.setAttr(
|
||||
"{}.panZoomEnabled".format(preset["camera"]), pan_zoom)
|
||||
except RuntimeError:
|
||||
self.log.warning("Cannot restore Pan/Zoom settings.")
|
||||
|
||||
collected_files = os.listdir(stagingdir)
|
||||
patterns = [clique.PATTERNS["frames"]]
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import pyblish.api
|
|||
|
||||
from openpype.hosts.maya.api.lib import set_attribute
|
||||
from openpype.pipeline.publish import (
|
||||
RepairContextAction,
|
||||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
)
|
||||
|
||||
|
|
@ -26,7 +26,7 @@ class ValidateAttributes(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
label = "Attributes"
|
||||
hosts = ["maya"]
|
||||
actions = [RepairContextAction]
|
||||
actions = [RepairAction]
|
||||
optional = True
|
||||
|
||||
attributes = None
|
||||
|
|
@ -81,7 +81,7 @@ class ValidateAttributes(pyblish.api.InstancePlugin):
|
|||
if node_name not in attributes:
|
||||
continue
|
||||
|
||||
for attr_name, expected in attributes.items():
|
||||
for attr_name, expected in attributes[node_name].items():
|
||||
|
||||
# Skip if attribute does not exist
|
||||
if not cmds.attributeQuery(attr_name, node=node, exists=True):
|
||||
|
|
|
|||
|
|
@ -7,28 +7,26 @@ from openpype.pipeline import (
|
|||
from openpype.hosts.photoshop.api.pipeline import cache_and_get_instances
|
||||
|
||||
|
||||
class PSWorkfileCreator(AutoCreator):
|
||||
identifier = "workfile"
|
||||
family = "workfile"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
class PSAutoCreator(AutoCreator):
|
||||
"""Generic autocreator to extend."""
|
||||
def get_instance_attr_defs(self):
|
||||
return []
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in cache_and_get_instances(self):
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
|
||||
if creator_id == self.identifier:
|
||||
subset_name = instance_data["subset"]
|
||||
instance = CreatedInstance(
|
||||
self.family, subset_name, instance_data, self
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
# nothing to change on workfiles
|
||||
pass
|
||||
self.log.debug("update_list:: {}".format(update_list))
|
||||
for created_inst, _changes in update_list:
|
||||
api.stub().imprint(created_inst.get("instance_id"),
|
||||
created_inst.data_to_store())
|
||||
|
||||
def create(self, options=None):
|
||||
existing_instance = None
|
||||
|
|
@ -58,6 +56,9 @@ class PSWorkfileCreator(AutoCreator):
|
|||
project_name, host_name, None
|
||||
))
|
||||
|
||||
if not self.active_on_create:
|
||||
data["active"] = False
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
)
|
||||
120
openpype/hosts/photoshop/plugins/create/create_flatten_image.py
Normal file
120
openpype/hosts/photoshop/plugins/create/create_flatten_image.py
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
from openpype.pipeline import CreatedInstance
|
||||
|
||||
from openpype.lib import BoolDef
|
||||
import openpype.hosts.photoshop.api as api
|
||||
from openpype.hosts.photoshop.lib import PSAutoCreator
|
||||
from openpype.pipeline.create import get_subset_name
|
||||
from openpype.client import get_asset_by_name
|
||||
|
||||
|
||||
class AutoImageCreator(PSAutoCreator):
|
||||
"""Creates flatten image from all visible layers.
|
||||
|
||||
Used in simplified publishing as auto created instance.
|
||||
Must be enabled in Setting and template for subset name provided
|
||||
"""
|
||||
identifier = "auto_image"
|
||||
family = "image"
|
||||
|
||||
# Settings
|
||||
default_variant = ""
|
||||
# - Mark by default instance for review
|
||||
mark_for_review = True
|
||||
active_on_create = True
|
||||
|
||||
def create(self, options=None):
|
||||
existing_instance = None
|
||||
for instance in self.create_context.instances:
|
||||
if instance.creator_identifier == self.identifier:
|
||||
existing_instance = instance
|
||||
break
|
||||
|
||||
context = self.create_context
|
||||
project_name = context.get_current_project_name()
|
||||
asset_name = context.get_current_asset_name()
|
||||
task_name = context.get_current_task_name()
|
||||
host_name = context.host_name
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
|
||||
if existing_instance is None:
|
||||
subset_name = get_subset_name(
|
||||
self.family, self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
)
|
||||
|
||||
publishable_ids = [layer.id for layer in api.stub().get_layers()
|
||||
if layer.visible]
|
||||
data = {
|
||||
"asset": asset_name,
|
||||
"task": task_name,
|
||||
# ids are "virtual" layers, won't get grouped as 'members' do
|
||||
# same difference in color coded layers in WP
|
||||
"ids": publishable_ids
|
||||
}
|
||||
|
||||
if not self.active_on_create:
|
||||
data["active"] = False
|
||||
|
||||
creator_attributes = {"mark_for_review": self.mark_for_review}
|
||||
data.update({"creator_attributes": creator_attributes})
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
)
|
||||
self._add_instance_to_context(new_instance)
|
||||
api.stub().imprint(new_instance.get("instance_id"),
|
||||
new_instance.data_to_store())
|
||||
|
||||
elif ( # existing instance from different context
|
||||
existing_instance["asset"] != asset_name
|
||||
or existing_instance["task"] != task_name
|
||||
):
|
||||
subset_name = get_subset_name(
|
||||
self.family, self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
)
|
||||
|
||||
existing_instance["asset"] = asset_name
|
||||
existing_instance["task"] = task_name
|
||||
existing_instance["subset"] = subset_name
|
||||
|
||||
api.stub().imprint(existing_instance.get("instance_id"),
|
||||
existing_instance.data_to_store())
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
return [
|
||||
BoolDef(
|
||||
"mark_for_review",
|
||||
label="Review",
|
||||
default=self.mark_for_review
|
||||
)
|
||||
]
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
BoolDef(
|
||||
"mark_for_review",
|
||||
label="Review"
|
||||
)
|
||||
]
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
plugin_settings = (
|
||||
project_settings["photoshop"]["create"]["AutoImageCreator"]
|
||||
)
|
||||
|
||||
self.active_on_create = plugin_settings["active_on_create"]
|
||||
self.default_variant = plugin_settings["default_variant"]
|
||||
self.mark_for_review = plugin_settings["mark_for_review"]
|
||||
self.enabled = plugin_settings["enabled"]
|
||||
|
||||
def get_detail_description(self):
|
||||
return """Creator for flatten image.
|
||||
|
||||
Studio might configure simple publishing workflow. In that case
|
||||
`image` instance is automatically created which will publish flat
|
||||
image from all visible layers.
|
||||
|
||||
Artist might disable this instance from publishing or from creating
|
||||
review for it though.
|
||||
"""
|
||||
|
|
@ -23,6 +23,11 @@ class ImageCreator(Creator):
|
|||
family = "image"
|
||||
description = "Image creator"
|
||||
|
||||
# Settings
|
||||
default_variants = ""
|
||||
mark_for_review = False
|
||||
active_on_create = True
|
||||
|
||||
def create(self, subset_name_from_ui, data, pre_create_data):
|
||||
groups_to_create = []
|
||||
top_layers_to_wrap = []
|
||||
|
|
@ -94,6 +99,12 @@ class ImageCreator(Creator):
|
|||
data.update({"layer_name": layer_name})
|
||||
data.update({"long_name": "_".join(layer_names_in_hierarchy)})
|
||||
|
||||
creator_attributes = {"mark_for_review": self.mark_for_review}
|
||||
data.update({"creator_attributes": creator_attributes})
|
||||
|
||||
if not self.active_on_create:
|
||||
data["active"] = False
|
||||
|
||||
new_instance = CreatedInstance(self.family, subset_name, data,
|
||||
self)
|
||||
|
||||
|
|
@ -134,11 +145,6 @@ class ImageCreator(Creator):
|
|||
self.host.remove_instance(instance)
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def get_default_variants(self):
|
||||
return [
|
||||
"Main"
|
||||
]
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
output = [
|
||||
BoolDef("use_selection", default=True,
|
||||
|
|
@ -148,10 +154,34 @@ class ImageCreator(Creator):
|
|||
label="Create separate instance for each selected"),
|
||||
BoolDef("use_layer_name",
|
||||
default=False,
|
||||
label="Use layer name in subset")
|
||||
label="Use layer name in subset"),
|
||||
BoolDef(
|
||||
"mark_for_review",
|
||||
label="Create separate review",
|
||||
default=False
|
||||
)
|
||||
]
|
||||
return output
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return [
|
||||
BoolDef(
|
||||
"mark_for_review",
|
||||
label="Review"
|
||||
)
|
||||
]
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
plugin_settings = (
|
||||
project_settings["photoshop"]["create"]["ImageCreator"]
|
||||
)
|
||||
|
||||
self.active_on_create = plugin_settings["active_on_create"]
|
||||
self.default_variants = plugin_settings["default_variants"]
|
||||
self.mark_for_review = plugin_settings["mark_for_review"]
|
||||
self.enabled = plugin_settings["enabled"]
|
||||
|
||||
|
||||
def get_detail_description(self):
|
||||
return """Creator for Image instances
|
||||
|
||||
|
|
@ -180,6 +210,11 @@ class ImageCreator(Creator):
|
|||
but layer name should be used (set explicitly in UI or implicitly if
|
||||
multiple images should be created), it is added in capitalized form
|
||||
as a suffix to subset name.
|
||||
|
||||
Each image could have its separate review created if necessary via
|
||||
`Create separate review` toggle.
|
||||
But more use case is to use separate `review` instance to create review
|
||||
from all published items.
|
||||
"""
|
||||
|
||||
def _handle_legacy(self, instance_data):
|
||||
|
|
|
|||
28
openpype/hosts/photoshop/plugins/create/create_review.py
Normal file
28
openpype/hosts/photoshop/plugins/create/create_review.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
from openpype.hosts.photoshop.lib import PSAutoCreator
|
||||
|
||||
|
||||
class ReviewCreator(PSAutoCreator):
|
||||
"""Creates review instance which might be disabled from publishing."""
|
||||
identifier = "review"
|
||||
family = "review"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def get_detail_description(self):
|
||||
return """Auto creator for review.
|
||||
|
||||
Photoshop review is created from all published images or from all
|
||||
visible layers if no `image` instances got created.
|
||||
|
||||
Review might be disabled by an artist (instance shouldn't be deleted as
|
||||
it will get recreated in next publish either way).
|
||||
"""
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
plugin_settings = (
|
||||
project_settings["photoshop"]["create"]["ReviewCreator"]
|
||||
)
|
||||
|
||||
self.default_variant = plugin_settings["default_variant"]
|
||||
self.active_on_create = plugin_settings["active_on_create"]
|
||||
self.enabled = plugin_settings["enabled"]
|
||||
28
openpype/hosts/photoshop/plugins/create/create_workfile.py
Normal file
28
openpype/hosts/photoshop/plugins/create/create_workfile.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
from openpype.hosts.photoshop.lib import PSAutoCreator
|
||||
|
||||
|
||||
class WorkfileCreator(PSAutoCreator):
|
||||
identifier = "workfile"
|
||||
family = "workfile"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def get_detail_description(self):
|
||||
return """Auto creator for workfile.
|
||||
|
||||
It is expected that each publish will also publish its source workfile
|
||||
for safekeeping. This creator triggers automatically without need for
|
||||
an artist to remember and trigger it explicitly.
|
||||
|
||||
Workfile instance could be disabled if it is not required to publish
|
||||
workfile. (Instance shouldn't be deleted though as it will be recreated
|
||||
in next publish automatically).
|
||||
"""
|
||||
|
||||
def apply_settings(self, project_settings, system_settings):
|
||||
plugin_settings = (
|
||||
project_settings["photoshop"]["create"]["WorkfileCreator"]
|
||||
)
|
||||
|
||||
self.active_on_create = plugin_settings["active_on_create"]
|
||||
self.enabled = plugin_settings["enabled"]
|
||||
101
openpype/hosts/photoshop/plugins/publish/collect_auto_image.py
Normal file
101
openpype/hosts/photoshop/plugins/publish/collect_auto_image.py
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.hosts.photoshop import api as photoshop
|
||||
from openpype.pipeline.create import get_subset_name
|
||||
|
||||
|
||||
class CollectAutoImage(pyblish.api.ContextPlugin):
|
||||
"""Creates auto image in non artist based publishes (Webpublisher).
|
||||
|
||||
'remotepublish' should be renamed to 'autopublish' or similar in the future
|
||||
"""
|
||||
|
||||
label = "Collect Auto Image"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["photoshop"]
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
|
||||
targets = ["remotepublish"]
|
||||
|
||||
def process(self, context):
|
||||
family = "image"
|
||||
for instance in context:
|
||||
creator_identifier = instance.data.get("creator_identifier")
|
||||
if creator_identifier and creator_identifier == "auto_image":
|
||||
self.log.debug("Auto image instance found, won't create new")
|
||||
return
|
||||
|
||||
project_name = context.data["anatomyData"]["project"]["name"]
|
||||
proj_settings = context.data["project_settings"]
|
||||
task_name = context.data["anatomyData"]["task"]["name"]
|
||||
host_name = context.data["hostName"]
|
||||
asset_doc = context.data["assetEntity"]
|
||||
asset_name = asset_doc["name"]
|
||||
|
||||
auto_creator = proj_settings.get(
|
||||
"photoshop", {}).get(
|
||||
"create", {}).get(
|
||||
"AutoImageCreator", {})
|
||||
|
||||
if not auto_creator or not auto_creator["enabled"]:
|
||||
self.log.debug("Auto image creator disabled, won't create new")
|
||||
return
|
||||
|
||||
stub = photoshop.stub()
|
||||
stored_items = stub.get_layers_metadata()
|
||||
for item in stored_items:
|
||||
if item.get("creator_identifier") == "auto_image":
|
||||
if not item.get("active"):
|
||||
self.log.debug("Auto_image instance disabled")
|
||||
return
|
||||
|
||||
layer_items = stub.get_layers()
|
||||
|
||||
publishable_ids = [layer.id for layer in layer_items
|
||||
if layer.visible]
|
||||
|
||||
# collect stored image instances
|
||||
instance_names = []
|
||||
for layer_item in layer_items:
|
||||
layer_meta_data = stub.read(layer_item, stored_items)
|
||||
|
||||
# Skip layers without metadata.
|
||||
if layer_meta_data is None:
|
||||
continue
|
||||
|
||||
# Skip containers.
|
||||
if "container" in layer_meta_data["id"]:
|
||||
continue
|
||||
|
||||
# active might not be in legacy meta
|
||||
if layer_meta_data.get("active", True) and layer_item.visible:
|
||||
instance_names.append(layer_meta_data["subset"])
|
||||
|
||||
if len(instance_names) == 0:
|
||||
variants = proj_settings.get(
|
||||
"photoshop", {}).get(
|
||||
"create", {}).get(
|
||||
"CreateImage", {}).get(
|
||||
"default_variants", [''])
|
||||
family = "image"
|
||||
|
||||
variant = context.data.get("variant") or variants[0]
|
||||
|
||||
subset_name = get_subset_name(
|
||||
family, variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
)
|
||||
|
||||
instance = context.create_instance(subset_name)
|
||||
instance.data["family"] = family
|
||||
instance.data["asset"] = asset_name
|
||||
instance.data["subset"] = subset_name
|
||||
instance.data["ids"] = publishable_ids
|
||||
instance.data["publish"] = True
|
||||
instance.data["creator_identifier"] = "auto_image"
|
||||
|
||||
if auto_creator["mark_for_review"]:
|
||||
instance.data["creator_attributes"] = {"mark_for_review": True}
|
||||
instance.data["families"] = ["review"]
|
||||
|
||||
self.log.info("auto image instance: {} ".format(instance.data))
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
"""
|
||||
Requires:
|
||||
None
|
||||
|
||||
Provides:
|
||||
instance -> family ("review")
|
||||
"""
|
||||
import pyblish.api
|
||||
|
||||
from openpype.hosts.photoshop import api as photoshop
|
||||
from openpype.pipeline.create import get_subset_name
|
||||
|
||||
|
||||
class CollectAutoReview(pyblish.api.ContextPlugin):
|
||||
"""Create review instance in non artist based workflow.
|
||||
|
||||
Called only if PS is triggered in Webpublisher or in tests.
|
||||
"""
|
||||
|
||||
label = "Collect Auto Review"
|
||||
hosts = ["photoshop"]
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
targets = ["remotepublish"]
|
||||
|
||||
publish = True
|
||||
|
||||
def process(self, context):
|
||||
family = "review"
|
||||
has_review = False
|
||||
for instance in context:
|
||||
if instance.data["family"] == family:
|
||||
self.log.debug("Review instance found, won't create new")
|
||||
has_review = True
|
||||
|
||||
creator_attributes = instance.data.get("creator_attributes", {})
|
||||
if (creator_attributes.get("mark_for_review") and
|
||||
"review" not in instance.data["families"]):
|
||||
instance.data["families"].append("review")
|
||||
|
||||
if has_review:
|
||||
return
|
||||
|
||||
stub = photoshop.stub()
|
||||
stored_items = stub.get_layers_metadata()
|
||||
for item in stored_items:
|
||||
if item.get("creator_identifier") == family:
|
||||
if not item.get("active"):
|
||||
self.log.debug("Review instance disabled")
|
||||
return
|
||||
|
||||
auto_creator = context.data["project_settings"].get(
|
||||
"photoshop", {}).get(
|
||||
"create", {}).get(
|
||||
"ReviewCreator", {})
|
||||
|
||||
if not auto_creator or not auto_creator["enabled"]:
|
||||
self.log.debug("Review creator disabled, won't create new")
|
||||
return
|
||||
|
||||
variant = (context.data.get("variant") or
|
||||
auto_creator["default_variant"])
|
||||
|
||||
project_name = context.data["anatomyData"]["project"]["name"]
|
||||
proj_settings = context.data["project_settings"]
|
||||
task_name = context.data["anatomyData"]["task"]["name"]
|
||||
host_name = context.data["hostName"]
|
||||
asset_doc = context.data["assetEntity"]
|
||||
asset_name = asset_doc["name"]
|
||||
|
||||
subset_name = get_subset_name(
|
||||
family,
|
||||
variant,
|
||||
task_name,
|
||||
asset_doc,
|
||||
project_name,
|
||||
host_name=host_name,
|
||||
project_settings=proj_settings
|
||||
)
|
||||
|
||||
instance = context.create_instance(subset_name)
|
||||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"label": subset_name,
|
||||
"name": subset_name,
|
||||
"family": family,
|
||||
"families": [],
|
||||
"representations": [],
|
||||
"asset": asset_name,
|
||||
"publish": self.publish
|
||||
})
|
||||
|
||||
self.log.debug("auto review created::{}".format(instance.data))
|
||||
|
|
@ -0,0 +1,99 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
from openpype.hosts.photoshop import api as photoshop
|
||||
from openpype.pipeline.create import get_subset_name
|
||||
|
||||
|
||||
class CollectAutoWorkfile(pyblish.api.ContextPlugin):
|
||||
"""Collect current script for publish."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = "Collect Workfile"
|
||||
hosts = ["photoshop"]
|
||||
|
||||
targets = ["remotepublish"]
|
||||
|
||||
def process(self, context):
|
||||
family = "workfile"
|
||||
file_path = context.data["currentFile"]
|
||||
_, ext = os.path.splitext(file_path)
|
||||
staging_dir = os.path.dirname(file_path)
|
||||
base_name = os.path.basename(file_path)
|
||||
workfile_representation = {
|
||||
"name": ext[1:],
|
||||
"ext": ext[1:],
|
||||
"files": base_name,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
|
||||
for instance in context:
|
||||
if instance.data["family"] == family:
|
||||
self.log.debug("Workfile instance found, won't create new")
|
||||
instance.data.update({
|
||||
"label": base_name,
|
||||
"name": base_name,
|
||||
"representations": [],
|
||||
})
|
||||
|
||||
# creating representation
|
||||
_, ext = os.path.splitext(file_path)
|
||||
instance.data["representations"].append(
|
||||
workfile_representation)
|
||||
|
||||
return
|
||||
|
||||
stub = photoshop.stub()
|
||||
stored_items = stub.get_layers_metadata()
|
||||
for item in stored_items:
|
||||
if item.get("creator_identifier") == family:
|
||||
if not item.get("active"):
|
||||
self.log.debug("Workfile instance disabled")
|
||||
return
|
||||
|
||||
project_name = context.data["anatomyData"]["project"]["name"]
|
||||
proj_settings = context.data["project_settings"]
|
||||
auto_creator = proj_settings.get(
|
||||
"photoshop", {}).get(
|
||||
"create", {}).get(
|
||||
"WorkfileCreator", {})
|
||||
|
||||
if not auto_creator or not auto_creator["enabled"]:
|
||||
self.log.debug("Workfile creator disabled, won't create new")
|
||||
return
|
||||
|
||||
# context.data["variant"] might come only from collect_batch_data
|
||||
variant = (context.data.get("variant") or
|
||||
auto_creator["default_variant"])
|
||||
|
||||
task_name = context.data["anatomyData"]["task"]["name"]
|
||||
host_name = context.data["hostName"]
|
||||
asset_doc = context.data["assetEntity"]
|
||||
asset_name = asset_doc["name"]
|
||||
|
||||
subset_name = get_subset_name(
|
||||
family,
|
||||
variant,
|
||||
task_name,
|
||||
asset_doc,
|
||||
project_name,
|
||||
host_name=host_name,
|
||||
project_settings=proj_settings
|
||||
)
|
||||
|
||||
# Create instance
|
||||
instance = context.create_instance(subset_name)
|
||||
instance.data.update({
|
||||
"subset": subset_name,
|
||||
"label": base_name,
|
||||
"name": base_name,
|
||||
"family": family,
|
||||
"families": [],
|
||||
"representations": [],
|
||||
"asset": asset_name
|
||||
})
|
||||
|
||||
# creating representation
|
||||
instance.data["representations"].append(workfile_representation)
|
||||
|
||||
self.log.debug("auto workfile review created:{}".format(instance.data))
|
||||
|
|
@ -1,116 +0,0 @@
|
|||
import pprint
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.hosts.photoshop import api as photoshop
|
||||
from openpype.lib import prepare_template_data
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
"""Gather instances by LayerSet and file metadata
|
||||
|
||||
Collects publishable instances from file metadata or enhance
|
||||
already collected by creator (family == "image").
|
||||
|
||||
If no image instances are explicitly created, it looks if there is value
|
||||
in `flatten_subset_template` (configurable in Settings), in that case it
|
||||
produces flatten image with all visible layers.
|
||||
|
||||
Identifier:
|
||||
id (str): "pyblish.avalon.instance"
|
||||
"""
|
||||
|
||||
label = "Collect Instances"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["photoshop"]
|
||||
families_mapping = {
|
||||
"image": []
|
||||
}
|
||||
# configurable in Settings
|
||||
flatten_subset_template = ""
|
||||
|
||||
def process(self, context):
|
||||
instance_by_layer_id = {}
|
||||
for instance in context:
|
||||
if (
|
||||
instance.data["family"] == "image" and
|
||||
instance.data.get("members")):
|
||||
layer_id = str(instance.data["members"][0])
|
||||
instance_by_layer_id[layer_id] = instance
|
||||
|
||||
stub = photoshop.stub()
|
||||
layer_items = stub.get_layers()
|
||||
layers_meta = stub.get_layers_metadata()
|
||||
instance_names = []
|
||||
|
||||
all_layer_ids = []
|
||||
for layer_item in layer_items:
|
||||
layer_meta_data = stub.read(layer_item, layers_meta)
|
||||
all_layer_ids.append(layer_item.id)
|
||||
|
||||
# Skip layers without metadata.
|
||||
if layer_meta_data is None:
|
||||
continue
|
||||
|
||||
# Skip containers.
|
||||
if "container" in layer_meta_data["id"]:
|
||||
continue
|
||||
|
||||
# active might not be in legacy meta
|
||||
if not layer_meta_data.get("active", True):
|
||||
continue
|
||||
|
||||
instance = instance_by_layer_id.get(str(layer_item.id))
|
||||
if instance is None:
|
||||
instance = context.create_instance(layer_meta_data["subset"])
|
||||
|
||||
instance.data["layer"] = layer_item
|
||||
instance.data.update(layer_meta_data)
|
||||
instance.data["families"] = self.families_mapping[
|
||||
layer_meta_data["family"]
|
||||
]
|
||||
instance.data["publish"] = layer_item.visible
|
||||
instance_names.append(layer_meta_data["subset"])
|
||||
|
||||
# Produce diagnostic message for any graphical
|
||||
# user interface interested in visualising it.
|
||||
self.log.info("Found: \"%s\" " % instance.data["name"])
|
||||
self.log.info("instance: {} ".format(
|
||||
pprint.pformat(instance.data, indent=4)))
|
||||
|
||||
if len(instance_names) != len(set(instance_names)):
|
||||
self.log.warning("Duplicate instances found. " +
|
||||
"Remove unwanted via Publisher")
|
||||
|
||||
if len(instance_names) == 0 and self.flatten_subset_template:
|
||||
project_name = context.data["projectEntity"]["name"]
|
||||
variants = get_project_settings(project_name).get(
|
||||
"photoshop", {}).get(
|
||||
"create", {}).get(
|
||||
"CreateImage", {}).get(
|
||||
"defaults", [''])
|
||||
family = "image"
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
asset_name = context.data["assetEntity"]["name"]
|
||||
|
||||
variant = context.data.get("variant") or variants[0]
|
||||
fill_pairs = {
|
||||
"variant": variant,
|
||||
"family": family,
|
||||
"task": task_name
|
||||
}
|
||||
|
||||
subset = self.flatten_subset_template.format(
|
||||
**prepare_template_data(fill_pairs))
|
||||
|
||||
instance = context.create_instance(subset)
|
||||
instance.data["family"] = family
|
||||
instance.data["asset"] = asset_name
|
||||
instance.data["subset"] = subset
|
||||
instance.data["ids"] = all_layer_ids
|
||||
instance.data["families"] = self.families_mapping[family]
|
||||
instance.data["publish"] = True
|
||||
|
||||
self.log.info("flatten instance: {} ".format(instance.data))
|
||||
|
|
@ -14,10 +14,7 @@ from openpype.pipeline.create import get_subset_name
|
|||
|
||||
|
||||
class CollectReview(pyblish.api.ContextPlugin):
|
||||
"""Gather the active document as review instance.
|
||||
|
||||
Triggers once even if no 'image' is published as by defaults it creates
|
||||
flatten image from a workfile.
|
||||
"""Adds review to families for instances marked to be reviewable.
|
||||
"""
|
||||
|
||||
label = "Collect Review"
|
||||
|
|
@ -28,25 +25,8 @@ class CollectReview(pyblish.api.ContextPlugin):
|
|||
publish = True
|
||||
|
||||
def process(self, context):
|
||||
family = "review"
|
||||
subset = get_subset_name(
|
||||
family,
|
||||
context.data.get("variant", ''),
|
||||
context.data["anatomyData"]["task"]["name"],
|
||||
context.data["assetEntity"],
|
||||
context.data["anatomyData"]["project"]["name"],
|
||||
host_name=context.data["hostName"],
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
|
||||
instance = context.create_instance(subset)
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"label": subset,
|
||||
"name": subset,
|
||||
"family": family,
|
||||
"families": [],
|
||||
"representations": [],
|
||||
"asset": os.environ["AVALON_ASSET"],
|
||||
"publish": self.publish
|
||||
})
|
||||
for instance in context:
|
||||
creator_attributes = instance.data["creator_attributes"]
|
||||
if (creator_attributes.get("mark_for_review") and
|
||||
"review" not in instance.data["families"]):
|
||||
instance.data["families"].append("review")
|
||||
|
|
|
|||
|
|
@ -14,50 +14,19 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
default_variant = "Main"
|
||||
|
||||
def process(self, context):
|
||||
existing_instance = None
|
||||
for instance in context:
|
||||
if instance.data["family"] == "workfile":
|
||||
self.log.debug("Workfile instance found, won't create new")
|
||||
existing_instance = instance
|
||||
break
|
||||
file_path = context.data["currentFile"]
|
||||
_, ext = os.path.splitext(file_path)
|
||||
staging_dir = os.path.dirname(file_path)
|
||||
base_name = os.path.basename(file_path)
|
||||
|
||||
family = "workfile"
|
||||
# context.data["variant"] might come only from collect_batch_data
|
||||
variant = context.data.get("variant") or self.default_variant
|
||||
subset = get_subset_name(
|
||||
family,
|
||||
variant,
|
||||
context.data["anatomyData"]["task"]["name"],
|
||||
context.data["assetEntity"],
|
||||
context.data["anatomyData"]["project"]["name"],
|
||||
host_name=context.data["hostName"],
|
||||
project_settings=context.data["project_settings"]
|
||||
)
|
||||
|
||||
file_path = context.data["currentFile"]
|
||||
staging_dir = os.path.dirname(file_path)
|
||||
base_name = os.path.basename(file_path)
|
||||
|
||||
# Create instance
|
||||
if existing_instance is None:
|
||||
instance = context.create_instance(subset)
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"label": base_name,
|
||||
"name": base_name,
|
||||
"family": family,
|
||||
"families": [],
|
||||
"representations": [],
|
||||
"asset": os.environ["AVALON_ASSET"]
|
||||
})
|
||||
else:
|
||||
instance = existing_instance
|
||||
|
||||
# creating representation
|
||||
_, ext = os.path.splitext(file_path)
|
||||
instance.data["representations"].append({
|
||||
"name": ext[1:],
|
||||
"ext": ext[1:],
|
||||
"files": base_name,
|
||||
"stagingDir": staging_dir,
|
||||
})
|
||||
# creating representation
|
||||
_, ext = os.path.splitext(file_path)
|
||||
instance.data["representations"].append({
|
||||
"name": ext[1:],
|
||||
"ext": ext[1:],
|
||||
"files": base_name,
|
||||
"stagingDir": staging_dir,
|
||||
})
|
||||
return
|
||||
|
|
|
|||
|
|
@ -47,32 +47,42 @@ class ExtractReview(publish.Extractor):
|
|||
layers = self._get_layers_from_image_instances(instance)
|
||||
self.log.info("Layers image instance found: {}".format(layers))
|
||||
|
||||
repre_name = "jpg"
|
||||
repre_skeleton = {
|
||||
"name": repre_name,
|
||||
"ext": "jpg",
|
||||
"stagingDir": staging_dir,
|
||||
"tags": self.jpg_options['tags'],
|
||||
}
|
||||
|
||||
if instance.data["family"] != "review":
|
||||
# enable creation of review, without this jpg review would clash
|
||||
# with jpg of the image family
|
||||
output_name = repre_name
|
||||
repre_name = "{}_{}".format(repre_name, output_name)
|
||||
repre_skeleton.update({"name": repre_name,
|
||||
"outputName": output_name})
|
||||
|
||||
if self.make_image_sequence and len(layers) > 1:
|
||||
self.log.info("Extract layers to image sequence.")
|
||||
img_list = self._save_sequence_images(staging_dir, layers)
|
||||
|
||||
instance.data["representations"].append({
|
||||
"name": "jpg",
|
||||
"ext": "jpg",
|
||||
"files": img_list,
|
||||
repre_skeleton.update({
|
||||
"frameStart": 0,
|
||||
"frameEnd": len(img_list),
|
||||
"fps": fps,
|
||||
"stagingDir": staging_dir,
|
||||
"tags": self.jpg_options['tags'],
|
||||
"files": img_list,
|
||||
})
|
||||
instance.data["representations"].append(repre_skeleton)
|
||||
processed_img_names = img_list
|
||||
else:
|
||||
self.log.info("Extract layers to flatten image.")
|
||||
img_list = self._save_flatten_image(staging_dir, layers)
|
||||
|
||||
instance.data["representations"].append({
|
||||
"name": "jpg",
|
||||
"ext": "jpg",
|
||||
"files": img_list, # cannot be [] for single frame
|
||||
"stagingDir": staging_dir,
|
||||
"tags": self.jpg_options['tags']
|
||||
repre_skeleton.update({
|
||||
"files": img_list,
|
||||
})
|
||||
instance.data["representations"].append(repre_skeleton)
|
||||
processed_img_names = [img_list]
|
||||
|
||||
ffmpeg_path = get_ffmpeg_tool_path("ffmpeg")
|
||||
|
|
|
|||
|
|
@ -1,15 +1,20 @@
|
|||
import os
|
||||
import shutil
|
||||
from time import sleep
|
||||
|
||||
from openpype.client.entities import (
|
||||
get_last_version_by_subset_id,
|
||||
get_representations,
|
||||
get_subsets,
|
||||
get_project
|
||||
)
|
||||
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.lib.local_settings import get_local_site_id
|
||||
from openpype.lib.profiles_filtering import filter_profiles
|
||||
from openpype.pipeline.load.utils import get_representation_path
|
||||
from openpype.modules.sync_server.sync_server import (
|
||||
download_last_published_workfile,
|
||||
)
|
||||
from openpype.pipeline.template_data import get_template_data
|
||||
from openpype.pipeline.workfile.path_resolving import (
|
||||
get_workfile_template_key,
|
||||
)
|
||||
from openpype.settings.lib import get_project_settings
|
||||
|
||||
|
||||
|
|
@ -22,7 +27,11 @@ class CopyLastPublishedWorkfile(PreLaunchHook):
|
|||
|
||||
# Before `AddLastWorkfileToLaunchArgs`
|
||||
order = -1
|
||||
app_groups = ["blender", "photoshop", "tvpaint", "aftereffects"]
|
||||
# any DCC could be used but TrayPublisher and other specials
|
||||
app_groups = ["blender", "photoshop", "tvpaint", "aftereffects",
|
||||
"nuke", "nukeassist", "nukex", "hiero", "nukestudio",
|
||||
"maya", "harmony", "celaction", "flame", "fusion",
|
||||
"houdini", "tvpaint"]
|
||||
|
||||
def execute(self):
|
||||
"""Check if local workfile doesn't exist, else copy it.
|
||||
|
|
@ -31,11 +40,11 @@ class CopyLastPublishedWorkfile(PreLaunchHook):
|
|||
2- Check if workfile in work area doesn't exist
|
||||
3- Check if published workfile exists and is copied locally in publish
|
||||
4- Substitute copied published workfile as first workfile
|
||||
with incremented version by +1
|
||||
|
||||
Returns:
|
||||
None: This is a void method.
|
||||
"""
|
||||
|
||||
sync_server = self.modules_manager.get("sync_server")
|
||||
if not sync_server or not sync_server.enabled:
|
||||
self.log.debug("Sync server module is not enabled or available")
|
||||
|
|
@ -53,6 +62,7 @@ class CopyLastPublishedWorkfile(PreLaunchHook):
|
|||
|
||||
# Get data
|
||||
project_name = self.data["project_name"]
|
||||
asset_name = self.data["asset_name"]
|
||||
task_name = self.data["task_name"]
|
||||
task_type = self.data["task_type"]
|
||||
host_name = self.application.host_name
|
||||
|
|
@ -68,6 +78,8 @@ class CopyLastPublishedWorkfile(PreLaunchHook):
|
|||
"hosts": host_name,
|
||||
}
|
||||
last_workfile_settings = filter_profiles(profiles, filter_data)
|
||||
if not last_workfile_settings:
|
||||
return
|
||||
use_last_published_workfile = last_workfile_settings.get(
|
||||
"use_last_published_workfile"
|
||||
)
|
||||
|
|
@ -92,57 +104,27 @@ class CopyLastPublishedWorkfile(PreLaunchHook):
|
|||
)
|
||||
return
|
||||
|
||||
max_retries = int((sync_server.sync_project_settings[project_name]
|
||||
["config"]
|
||||
["retry_cnt"]))
|
||||
|
||||
self.log.info("Trying to fetch last published workfile...")
|
||||
|
||||
project_doc = self.data.get("project_doc")
|
||||
asset_doc = self.data.get("asset_doc")
|
||||
anatomy = self.data.get("anatomy")
|
||||
|
||||
# Check it can proceed
|
||||
if not project_doc and not asset_doc:
|
||||
return
|
||||
context_filters = {
|
||||
"asset": asset_name,
|
||||
"family": "workfile",
|
||||
"task": {"name": task_name, "type": task_type}
|
||||
}
|
||||
|
||||
# Get subset id
|
||||
subset_id = next(
|
||||
(
|
||||
subset["_id"]
|
||||
for subset in get_subsets(
|
||||
project_name,
|
||||
asset_ids=[asset_doc["_id"]],
|
||||
fields=["_id", "data.family", "data.families"],
|
||||
)
|
||||
if subset["data"].get("family") == "workfile"
|
||||
# Legacy compatibility
|
||||
or "workfile" in subset["data"].get("families", {})
|
||||
),
|
||||
None,
|
||||
)
|
||||
if not subset_id:
|
||||
self.log.debug(
|
||||
'No any workfile for asset "{}".'.format(asset_doc["name"])
|
||||
)
|
||||
return
|
||||
workfile_representations = list(get_representations(
|
||||
project_name,
|
||||
context_filters=context_filters
|
||||
))
|
||||
|
||||
# Get workfile representation
|
||||
last_version_doc = get_last_version_by_subset_id(
|
||||
project_name, subset_id, fields=["_id"]
|
||||
)
|
||||
if not last_version_doc:
|
||||
self.log.debug("Subset does not have any versions")
|
||||
return
|
||||
|
||||
workfile_representation = next(
|
||||
(
|
||||
representation
|
||||
for representation in get_representations(
|
||||
project_name, version_ids=[last_version_doc["_id"]]
|
||||
)
|
||||
if representation["context"]["task"]["name"] == task_name
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
if not workfile_representation:
|
||||
if not workfile_representations:
|
||||
self.log.debug(
|
||||
'No published workfile for task "{}" and host "{}".'.format(
|
||||
task_name, host_name
|
||||
|
|
@ -150,28 +132,55 @@ class CopyLastPublishedWorkfile(PreLaunchHook):
|
|||
)
|
||||
return
|
||||
|
||||
local_site_id = get_local_site_id()
|
||||
sync_server.add_site(
|
||||
project_name,
|
||||
workfile_representation["_id"],
|
||||
local_site_id,
|
||||
force=True,
|
||||
priority=99,
|
||||
reset_timer=True,
|
||||
filtered_repres = filter(
|
||||
lambda r: r["context"].get("version") is not None,
|
||||
workfile_representations
|
||||
)
|
||||
|
||||
while not sync_server.is_representation_on_site(
|
||||
project_name, workfile_representation["_id"], local_site_id
|
||||
):
|
||||
sleep(5)
|
||||
|
||||
# Get paths
|
||||
published_workfile_path = get_representation_path(
|
||||
workfile_representation, root=anatomy.roots
|
||||
workfile_representation = max(
|
||||
filtered_repres, key=lambda r: r["context"]["version"]
|
||||
)
|
||||
local_workfile_dir = os.path.dirname(last_workfile)
|
||||
|
||||
# Copy file and substitute path
|
||||
self.data["last_workfile_path"] = shutil.copy(
|
||||
published_workfile_path, local_workfile_dir
|
||||
last_published_workfile_path = download_last_published_workfile(
|
||||
host_name,
|
||||
project_name,
|
||||
task_name,
|
||||
workfile_representation,
|
||||
max_retries,
|
||||
anatomy=anatomy
|
||||
)
|
||||
if not last_published_workfile_path:
|
||||
self.log.debug(
|
||||
"Couldn't download {}".format(last_published_workfile_path)
|
||||
)
|
||||
return
|
||||
|
||||
project_doc = self.data["project_doc"]
|
||||
|
||||
project_settings = self.data["project_settings"]
|
||||
template_key = get_workfile_template_key(
|
||||
task_name, host_name, project_name, project_settings
|
||||
)
|
||||
|
||||
# Get workfile data
|
||||
workfile_data = get_template_data(
|
||||
project_doc, asset_doc, task_name, host_name
|
||||
)
|
||||
|
||||
extension = last_published_workfile_path.split(".")[-1]
|
||||
workfile_data["version"] = (
|
||||
workfile_representation["context"]["version"] + 1)
|
||||
workfile_data["ext"] = extension
|
||||
|
||||
anatomy_result = anatomy.format(workfile_data)
|
||||
local_workfile_path = anatomy_result[template_key]["path"]
|
||||
|
||||
# Copy last published workfile to local workfile directory
|
||||
shutil.copy(
|
||||
last_published_workfile_path,
|
||||
local_workfile_path,
|
||||
)
|
||||
|
||||
self.data["last_workfile_path"] = local_workfile_path
|
||||
# Keep source filepath for further path conformation
|
||||
self.data["source_filepath"] = last_published_workfile_path
|
||||
|
|
@ -3,10 +3,15 @@ import os
|
|||
import asyncio
|
||||
import threading
|
||||
import concurrent.futures
|
||||
from concurrent.futures._base import CancelledError
|
||||
from time import sleep
|
||||
|
||||
from .providers import lib
|
||||
from openpype.client.entity_links import get_linked_representation_id
|
||||
from openpype.lib import Logger
|
||||
from openpype.lib.local_settings import get_local_site_id
|
||||
from openpype.modules.base import ModulesManager
|
||||
from openpype.pipeline import Anatomy
|
||||
from openpype.pipeline.load.utils import get_representation_path_with_anatomy
|
||||
|
||||
from .utils import SyncStatus, ResumableError
|
||||
|
||||
|
|
@ -189,6 +194,98 @@ def _site_is_working(module, project_name, site_name, site_config):
|
|||
return handler.is_active()
|
||||
|
||||
|
||||
def download_last_published_workfile(
|
||||
host_name: str,
|
||||
project_name: str,
|
||||
task_name: str,
|
||||
workfile_representation: dict,
|
||||
max_retries: int,
|
||||
anatomy: Anatomy = None,
|
||||
) -> str:
|
||||
"""Download the last published workfile
|
||||
|
||||
Args:
|
||||
host_name (str): Host name.
|
||||
project_name (str): Project name.
|
||||
task_name (str): Task name.
|
||||
workfile_representation (dict): Workfile representation.
|
||||
max_retries (int): complete file failure only after so many attempts
|
||||
anatomy (Anatomy, optional): Anatomy (Used for optimization).
|
||||
Defaults to None.
|
||||
|
||||
Returns:
|
||||
str: last published workfile path localized
|
||||
"""
|
||||
|
||||
if not anatomy:
|
||||
anatomy = Anatomy(project_name)
|
||||
|
||||
# Get sync server module
|
||||
sync_server = ModulesManager().modules_by_name.get("sync_server")
|
||||
if not sync_server or not sync_server.enabled:
|
||||
print("Sync server module is disabled or unavailable.")
|
||||
return
|
||||
|
||||
if not workfile_representation:
|
||||
print(
|
||||
"Not published workfile for task '{}' and host '{}'.".format(
|
||||
task_name, host_name
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
last_published_workfile_path = get_representation_path_with_anatomy(
|
||||
workfile_representation, anatomy
|
||||
)
|
||||
if (not last_published_workfile_path or
|
||||
not os.path.exists(last_published_workfile_path)):
|
||||
return
|
||||
|
||||
# If representation isn't available on remote site, then return.
|
||||
if not sync_server.is_representation_on_site(
|
||||
project_name,
|
||||
workfile_representation["_id"],
|
||||
sync_server.get_remote_site(project_name),
|
||||
):
|
||||
print(
|
||||
"Representation for task '{}' and host '{}'".format(
|
||||
task_name, host_name
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
# Get local site
|
||||
local_site_id = get_local_site_id()
|
||||
|
||||
# Add workfile representation to local site
|
||||
representation_ids = {workfile_representation["_id"]}
|
||||
representation_ids.update(
|
||||
get_linked_representation_id(
|
||||
project_name, repre_id=workfile_representation["_id"]
|
||||
)
|
||||
)
|
||||
for repre_id in representation_ids:
|
||||
if not sync_server.is_representation_on_site(project_name, repre_id,
|
||||
local_site_id):
|
||||
sync_server.add_site(
|
||||
project_name,
|
||||
repre_id,
|
||||
local_site_id,
|
||||
force=True,
|
||||
priority=99
|
||||
)
|
||||
sync_server.reset_timer()
|
||||
print("Starting to download:{}".format(last_published_workfile_path))
|
||||
# While representation unavailable locally, wait.
|
||||
while not sync_server.is_representation_on_site(
|
||||
project_name, workfile_representation["_id"], local_site_id,
|
||||
max_retries=max_retries
|
||||
):
|
||||
sleep(5)
|
||||
|
||||
return last_published_workfile_path
|
||||
|
||||
|
||||
class SyncServerThread(threading.Thread):
|
||||
"""
|
||||
Separate thread running synchronization server with asyncio loop.
|
||||
|
|
@ -358,7 +455,6 @@ class SyncServerThread(threading.Thread):
|
|||
|
||||
duration = time.time() - start_time
|
||||
self.log.debug("One loop took {:.2f}s".format(duration))
|
||||
|
||||
delay = self.module.get_loop_delay(project_name)
|
||||
self.log.debug(
|
||||
"Waiting for {} seconds to new loop".format(delay)
|
||||
|
|
@ -370,8 +466,8 @@ class SyncServerThread(threading.Thread):
|
|||
self.log.warning(
|
||||
"ConnectionResetError in sync loop, trying next loop",
|
||||
exc_info=True)
|
||||
except CancelledError:
|
||||
# just stopping server
|
||||
except asyncio.exceptions.CancelledError:
|
||||
# cancelling timer
|
||||
pass
|
||||
except ResumableError:
|
||||
self.log.warning(
|
||||
|
|
|
|||
|
|
@ -838,6 +838,18 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
return ret_dict
|
||||
|
||||
def get_launch_hook_paths(self):
|
||||
"""Implementation for applications launch hooks.
|
||||
|
||||
Returns:
|
||||
(str): full absolut path to directory with hooks for the module
|
||||
"""
|
||||
|
||||
return os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)),
|
||||
"launch_hooks"
|
||||
)
|
||||
|
||||
# Needs to be refactored after Settings are updated
|
||||
# # Methods for Settings to get appriate values to fill forms
|
||||
# def get_configurable_items(self, scope=None):
|
||||
|
|
@ -1045,9 +1057,23 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
self.sync_server_thread.reset_timer()
|
||||
|
||||
def is_representation_on_site(
|
||||
self, project_name, representation_id, site_name
|
||||
self, project_name, representation_id, site_name, max_retries=None
|
||||
):
|
||||
"""Checks if 'representation_id' has all files avail. on 'site_name'"""
|
||||
"""Checks if 'representation_id' has all files avail. on 'site_name'
|
||||
|
||||
Args:
|
||||
project_name (str)
|
||||
representation_id (str)
|
||||
site_name (str)
|
||||
max_retries (int) (optional) - provide only if method used in while
|
||||
loop to bail out
|
||||
Returns:
|
||||
(bool): True if 'representation_id' has all files correctly on the
|
||||
'site_name'
|
||||
Raises:
|
||||
(ValueError) Only If 'max_retries' provided if upload/download
|
||||
failed too many times to limit infinite loop check.
|
||||
"""
|
||||
representation = get_representation_by_id(project_name,
|
||||
representation_id,
|
||||
fields=["_id", "files"])
|
||||
|
|
@ -1060,6 +1086,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
if site["name"] != site_name:
|
||||
continue
|
||||
|
||||
if max_retries:
|
||||
tries = self._get_tries_count_from_rec(site)
|
||||
if tries >= max_retries:
|
||||
raise ValueError("Failed too many times")
|
||||
|
||||
if (site.get("progress") or site.get("error") or
|
||||
not site.get("created_dt")):
|
||||
return False
|
||||
|
|
|
|||
|
|
@ -10,23 +10,40 @@
|
|||
}
|
||||
},
|
||||
"create": {
|
||||
"CreateImage": {
|
||||
"defaults": [
|
||||
"ImageCreator": {
|
||||
"enabled": true,
|
||||
"active_on_create": true,
|
||||
"mark_for_review": false,
|
||||
"default_variants": [
|
||||
"Main"
|
||||
]
|
||||
},
|
||||
"AutoImageCreator": {
|
||||
"enabled": false,
|
||||
"active_on_create": true,
|
||||
"mark_for_review": false,
|
||||
"default_variant": ""
|
||||
},
|
||||
"ReviewCreator": {
|
||||
"enabled": true,
|
||||
"active_on_create": true,
|
||||
"default_variant": ""
|
||||
},
|
||||
"WorkfileCreator": {
|
||||
"enabled": true,
|
||||
"active_on_create": true,
|
||||
"default_variant": "Main"
|
||||
}
|
||||
},
|
||||
"publish": {
|
||||
"CollectColorCodedInstances": {
|
||||
"enabled": true,
|
||||
"create_flatten_image": "no",
|
||||
"flatten_subset_template": "",
|
||||
"color_code_mapping": []
|
||||
},
|
||||
"CollectInstances": {
|
||||
"flatten_subset_template": ""
|
||||
},
|
||||
"CollectReview": {
|
||||
"publish": true
|
||||
"enabled": true
|
||||
},
|
||||
"CollectVersion": {
|
||||
"enabled": false
|
||||
|
|
|
|||
|
|
@ -31,16 +31,126 @@
|
|||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "CreateImage",
|
||||
"key": "ImageCreator",
|
||||
"label": "Create Image",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Manually create instance from layer or group of layers. \n Separate review could be created for this image to be sent to Asset Management System."
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "active_on_create",
|
||||
"label": "Active by default"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "mark_for_review",
|
||||
"label": "Review by default"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "defaults",
|
||||
"label": "Default Subsets",
|
||||
"key": "default_variants",
|
||||
"label": "Default Variants",
|
||||
"object_type": "text"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "AutoImageCreator",
|
||||
"label": "Create Flatten Image",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Auto create image for all visible layers, used for simplified processing. \n Separate review could be created for this image to be sent to Asset Management System."
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "active_on_create",
|
||||
"label": "Active by default"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "mark_for_review",
|
||||
"label": "Review by default"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "default_variant",
|
||||
"label": "Default variant"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "ReviewCreator",
|
||||
"label": "Create Review",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Auto create review instance containing all published image instances or visible layers if no image instance."
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled",
|
||||
"default": true
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "active_on_create",
|
||||
"label": "Active by default"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "default_variant",
|
||||
"label": "Default variant"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "WorkfileCreator",
|
||||
"label": "Create Workfile",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Auto create workfile instance"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "active_on_create",
|
||||
"label": "Active by default"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "default_variant",
|
||||
"label": "Default variant"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
@ -56,11 +166,18 @@
|
|||
"is_group": true,
|
||||
"key": "CollectColorCodedInstances",
|
||||
"label": "Collect Color Coded Instances",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Set color for publishable layers, set its resulting family and template for subset name. \nCan create flatten image from published instances.(Applicable only for remote publishing!)"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled",
|
||||
"default": true
|
||||
},
|
||||
{
|
||||
"key": "create_flatten_image",
|
||||
"label": "Create flatten image",
|
||||
|
|
@ -131,40 +248,26 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "CollectInstances",
|
||||
"label": "Collect Instances",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Name for flatten image created if no image instance present"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "flatten_subset_template",
|
||||
"label": "Subset template for flatten image"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "CollectReview",
|
||||
"label": "Collect Review",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "publish",
|
||||
"label": "Active"
|
||||
}
|
||||
]
|
||||
"key": "enabled",
|
||||
"label": "Enabled",
|
||||
"default": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "CollectVersion",
|
||||
"label": "Collect Version",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.15.6-nightly.1"
|
||||
__version__ = "3.15.6-nightly.3"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,93 @@
|
|||
import logging
|
||||
|
||||
from tests.lib.assert_classes import DBAssert
|
||||
from tests.integration.hosts.photoshop.lib import PhotoshopTestClass
|
||||
|
||||
log = logging.getLogger("test_publish_in_photoshop")
|
||||
|
||||
|
||||
class TestPublishInPhotoshopAutoImage(PhotoshopTestClass):
|
||||
"""Test for publish in Phohoshop with different review configuration.
|
||||
|
||||
Workfile contains 3 layers, auto image and review instances created.
|
||||
|
||||
Test contains updates to Settings!!!
|
||||
|
||||
"""
|
||||
PERSIST = True
|
||||
|
||||
TEST_FILES = [
|
||||
("1iLF6aNI31qlUCD1rGg9X9eMieZzxL-rc",
|
||||
"test_photoshop_publish_auto_image.zip", "")
|
||||
]
|
||||
|
||||
APP_GROUP = "photoshop"
|
||||
# keep empty to locate latest installed variant or explicit
|
||||
APP_VARIANT = ""
|
||||
|
||||
APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT)
|
||||
|
||||
TIMEOUT = 120 # publish timeout
|
||||
|
||||
def test_db_asserts(self, dbcon, publish_finished):
|
||||
"""Host and input data dependent expected results in DB."""
|
||||
print("test_db_asserts")
|
||||
failures = []
|
||||
|
||||
failures.append(DBAssert.count_of_types(dbcon, "version", 3))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1}))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 0,
|
||||
name="imageMainForeground"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 0,
|
||||
name="imageMainBackground"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="workfileTest_task"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 5))
|
||||
|
||||
additional_args = {"context.subset": "imageMainForeground",
|
||||
"context.ext": "png"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "imageMainBackground",
|
||||
"context.ext": "png"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
# review from image
|
||||
additional_args = {"context.subset": "imageBeautyMain",
|
||||
"context.ext": "jpg",
|
||||
"name": "jpg_jpg"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "imageBeautyMain",
|
||||
"context.ext": "jpg",
|
||||
"name": "jpg"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "review"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
assert not any(failures)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_case = TestPublishInPhotoshopAutoImage()
|
||||
|
|
@ -0,0 +1,111 @@
|
|||
import logging
|
||||
|
||||
from tests.lib.assert_classes import DBAssert
|
||||
from tests.integration.hosts.photoshop.lib import PhotoshopTestClass
|
||||
|
||||
log = logging.getLogger("test_publish_in_photoshop")
|
||||
|
||||
|
||||
class TestPublishInPhotoshopImageReviews(PhotoshopTestClass):
|
||||
"""Test for publish in Phohoshop with different review configuration.
|
||||
|
||||
Workfile contains 2 image instance, one has review flag, second doesn't.
|
||||
|
||||
Regular `review` family is disabled.
|
||||
|
||||
Expected result is to `imageMainForeground` to have additional file with
|
||||
review, `imageMainBackground` without. No separate `review` family.
|
||||
|
||||
`test_project_test_asset_imageMainForeground_v001_jpg.jpg` is expected name
|
||||
of imageForeground review, `_jpg` suffix is needed to differentiate between
|
||||
image and review file.
|
||||
|
||||
"""
|
||||
PERSIST = True
|
||||
|
||||
TEST_FILES = [
|
||||
("12WGbNy9RJ3m9jlnk0Ib9-IZmONoxIz_p",
|
||||
"test_photoshop_publish_review.zip", "")
|
||||
]
|
||||
|
||||
APP_GROUP = "photoshop"
|
||||
# keep empty to locate latest installed variant or explicit
|
||||
APP_VARIANT = ""
|
||||
|
||||
APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT)
|
||||
|
||||
TIMEOUT = 120 # publish timeout
|
||||
|
||||
def test_db_asserts(self, dbcon, publish_finished):
|
||||
"""Host and input data dependent expected results in DB."""
|
||||
print("test_db_asserts")
|
||||
failures = []
|
||||
|
||||
failures.append(DBAssert.count_of_types(dbcon, "version", 3))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1}))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="imageMainForeground"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="imageMainBackground"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "subset", 1,
|
||||
name="workfileTest_task"))
|
||||
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 6))
|
||||
|
||||
additional_args = {"context.subset": "imageMainForeground",
|
||||
"context.ext": "png"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "imageMainForeground",
|
||||
"context.ext": "jpg"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 2,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "imageMainForeground",
|
||||
"context.ext": "jpg",
|
||||
"context.representation": "jpg_jpg"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "imageMainBackground",
|
||||
"context.ext": "png"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "imageMainBackground",
|
||||
"context.ext": "jpg"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 1,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "imageMainBackground",
|
||||
"context.ext": "jpg",
|
||||
"context.representation": "jpg_jpg"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
additional_args = {"context.subset": "review"}
|
||||
failures.append(
|
||||
DBAssert.count_of_types(dbcon, "representation", 0,
|
||||
additional_args=additional_args))
|
||||
|
||||
assert not any(failures)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_case = TestPublishInPhotoshopImageReviews()
|
||||
127
website/docs/admin_hosts_photoshop.md
Normal file
127
website/docs/admin_hosts_photoshop.md
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
---
|
||||
id: admin_hosts_photoshop
|
||||
title: Photoshop Settings
|
||||
sidebar_label: Photoshop
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
## Photoshop settings
|
||||
|
||||
There is a couple of settings that could configure publishing process for **Photoshop**.
|
||||
All of them are Project based, eg. each project could have different configuration.
|
||||
|
||||
Location: Settings > Project > Photoshop
|
||||
|
||||

|
||||
|
||||
## Color Management (ImageIO)
|
||||
|
||||
Placeholder for Color Management. Currently not implemented yet.
|
||||
|
||||
## Creator plugins
|
||||
|
||||
Contains configurable items for creators used during publishing from Photoshop.
|
||||
|
||||
### Create Image
|
||||
|
||||
Provides list of [variants](artist_concepts.md#variant) that will be shown to an artist in Publisher. Default value `Main`.
|
||||
|
||||
### Create Flatten Image
|
||||
|
||||
Provides simplified publishing process. It will create single `image` instance for artist automatically. This instance will
|
||||
produce flatten image from all visible layers in a workfile.
|
||||
|
||||
- Subset template for flatten image - provide template for subset name for this instance (example `imageBeauty`)
|
||||
- Review - should be separate review created for this instance
|
||||
|
||||
### Create Review
|
||||
|
||||
Creates single `review` instance automatically. This allows artists to disable it if needed.
|
||||
|
||||
### Create Workfile
|
||||
|
||||
Creates single `workfile` instance automatically. This allows artists to disable it if needed.
|
||||
|
||||
## Publish plugins
|
||||
|
||||
Contains configurable items for publish plugins used during publishing from Photoshop.
|
||||
|
||||
### Collect Color Coded Instances
|
||||
|
||||
Used only in remote publishing!
|
||||
|
||||
Allows to create automatically `image` instances for configurable highlight color set on layer or group in the workfile.
|
||||
|
||||
#### Create flatten image
|
||||
- Flatten with images - produce additional `image` with all published `image` instances merged
|
||||
- Flatten only - produce only merged `image` instance
|
||||
- No - produce only separate `image` instances
|
||||
|
||||
#### Subset template for flatten image
|
||||
|
||||
Template used to create subset name automatically (example `image{layer}Main` - uses layer name in subset name)
|
||||
|
||||
### Collect Review
|
||||
|
||||
Disable if no review should be created
|
||||
|
||||
### Collect Version
|
||||
|
||||
If enabled it will push version from workfile name to all published items. Eg. if artist is publishing `test_asset_workfile_v005.psd`
|
||||
produced `image` and `review` files will contain `v005` (even if some previous version were skipped for particular family).
|
||||
|
||||
### Validate Containers
|
||||
|
||||
Checks if all imported assets to the workfile through `Loader` are in latest version. Limits cases that older version of asset would be used.
|
||||
|
||||
If enabled, artist might still decide to disable validation for each publish (for special use cases).
|
||||
Limit this optionality by toggling `Optional`.
|
||||
`Active` toggle denotes that by default artists sees that optional validation as enabled.
|
||||
|
||||
### Validate naming of subsets and layers
|
||||
|
||||
Subset cannot contain invalid characters or extract to file would fail
|
||||
|
||||
#### Regex pattern of invalid characters
|
||||
|
||||
Contains weird characters like `/`, `/`, these might cause an issue when file (which contains subset name) is created on OS disk.
|
||||
|
||||
#### Replacement character
|
||||
|
||||
Replace all offending characters with this one. `_` is default.
|
||||
|
||||
### Extract Image
|
||||
|
||||
Controls extension formats of published instances of `image` family. `png` and `jpg` are by default.
|
||||
|
||||
### Extract Review
|
||||
|
||||
Controls output definitions of extracted reviews to upload on Asset Management (AM).
|
||||
|
||||
#### Makes an image sequence instead of flatten image
|
||||
|
||||
If multiple `image` instances are produced, glue created images into image sequence (`mov`) to review all of them separetely.
|
||||
Without it only flatten image would be produced.
|
||||
|
||||
#### Maximum size of sources for review
|
||||
|
||||
Set Byte limit for review file. Applicable if gigantic `image` instances are produced, full image size is unnecessary to upload to AM.
|
||||
|
||||
#### Extract jpg Options
|
||||
|
||||
Handles tags for produced `.jpg` representation. `Create review` and `Add review to Ftrack` are defaults.
|
||||
|
||||
#### Extract mov Options
|
||||
|
||||
Handles tags for produced `.mov` representation. `Create review` and `Add review to Ftrack` are defaults.
|
||||
|
||||
|
||||
### Workfile Builder
|
||||
|
||||
Allows to open prepared workfile for an artist when no workfile exists. Useful to share standards, additional helpful content in the workfile.
|
||||
|
||||
Could be configured per `Task type`, eg. `composition` task type could use different `.psd` template file than `art` task.
|
||||
Workfile template must be accessible for all artists.
|
||||
(Currently not handled by [SiteSync](module_site_sync.md))
|
||||
BIN
website/docs/assets/admin_hosts_photoshop_settings.png
Normal file
BIN
website/docs/assets/admin_hosts_photoshop_settings.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 14 KiB |
|
|
@ -7,80 +7,112 @@ sidebar_label: Site Sync
|
|||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
Site Sync allows users and studios to synchronize published assets between
|
||||
multiple 'sites'. Site denotes a storage location,
|
||||
which could be a physical disk, server, cloud storage. To be able to use site
|
||||
sync, it first needs to be configured.
|
||||
|
||||
:::warning
|
||||
**This feature is** currently **in a beta stage** and it is not recommended to rely on it fully for production.
|
||||
:::
|
||||
|
||||
Site Sync allows users and studios to synchronize published assets between multiple 'sites'. Site denotes a storage location,
|
||||
which could be a physical disk, server, cloud storage. To be able to use site sync, it first needs to be configured.
|
||||
|
||||
The general idea is that each user acts as an individual site and can download and upload any published project files when they are needed. that way, artist can have access to the whole project, but only every store files that are relevant to them on their home workstation.
|
||||
The general idea is that each user acts as an individual site and can download
|
||||
and upload any published project files when they are needed. that way, artist
|
||||
can have access to the whole project, but only every store files that are
|
||||
relevant to them on their home workstation.
|
||||
|
||||
:::note
|
||||
At the moment site sync is only able to deal with publishes files. No workfiles will be synchronized unless they are published. We are working on making workfile synchronization possible as well.
|
||||
At the moment site sync is only able to deal with publishes files. No workfiles
|
||||
will be synchronized unless they are published. We are working on making
|
||||
workfile synchronization possible as well.
|
||||
:::
|
||||
|
||||
## System Settings
|
||||
|
||||
To use synchronization, *Site Sync* needs to be enabled globally in **OpenPype Settings/System/Modules/Site Sync**.
|
||||
To use synchronization, *Site Sync* needs to be enabled globally in **OpenPype
|
||||
Settings/System/Modules/Site Sync**.
|
||||
|
||||

|
||||
|
||||
### Sites
|
||||
### Sites
|
||||
|
||||
By default there are two sites created for each OpenPype installation:
|
||||
- **studio** - default site - usually a centralized mounted disk accessible to all artists. Studio site is used if Site Sync is disabled.
|
||||
- **local** - each workstation or server running OpenPype Tray receives its own with unique site name. Workstation refers to itself as "local"however all other sites will see it under it's unique ID.
|
||||
|
||||
Artists can explore their site ID by opening OpenPype Info tool by clicking on a version number in the tray app.
|
||||
- **studio** - default site - usually a centralized mounted disk accessible to
|
||||
all artists. Studio site is used if Site Sync is disabled.
|
||||
- **local** - each workstation or server running OpenPype Tray receives its own
|
||||
with unique site name. Workstation refers to itself as "local"however all
|
||||
other sites will see it under it's unique ID.
|
||||
|
||||
Many different sites can be created and configured on the system level, and some or all can be assigned to each project.
|
||||
Artists can explore their site ID by opening OpenPype Info tool by clicking on
|
||||
a version number in the tray app.
|
||||
|
||||
Each OpenPype Tray app works with two sites at one time. (Sites can be the same, and no syncing is done in this setup).
|
||||
Many different sites can be created and configured on the system level, and
|
||||
some or all can be assigned to each project.
|
||||
|
||||
Sites could be configured differently per project basis.
|
||||
Each OpenPype Tray app works with two sites at one time. (Sites can be the
|
||||
same, and no syncing is done in this setup).
|
||||
|
||||
Each new site needs to be created first in `System Settings`. Most important feature of site is its Provider, select one from already prepared Providers.
|
||||
Sites could be configured differently per project basis.
|
||||
|
||||
#### Alternative sites
|
||||
Each new site needs to be created first in `System Settings`. Most important
|
||||
feature of site is its Provider, select one from already prepared Providers.
|
||||
|
||||
#### Alternative sites
|
||||
|
||||
This attribute is meant for special use cases only.
|
||||
|
||||
One of the use cases is sftp site vendoring (exposing) same data as regular site (studio). Each site is accessible for different audience. 'studio' for artists in a studio via shared disk, 'sftp' for externals via sftp server with mounted 'studio' drive.
|
||||
One of the use cases is sftp site vendoring (exposing) same data as regular
|
||||
site (studio). Each site is accessible for different audience. 'studio' for
|
||||
artists in a studio via shared disk, 'sftp' for externals via sftp server with
|
||||
mounted 'studio' drive.
|
||||
|
||||
Change of file status on one site actually means same change on 'alternate' site occurred too. (eg. artists publish to 'studio', 'sftp' is using
|
||||
same location >> file is accessible on 'sftp' site right away, no need to sync it anyhow.)
|
||||
Change of file status on one site actually means same change on 'alternate'
|
||||
site occurred too. (eg. artists publish to 'studio', 'sftp' is using
|
||||
same location >> file is accessible on 'sftp' site right away, no need to sync
|
||||
it anyhow.)
|
||||
|
||||
##### Example
|
||||
|
||||

|
||||
Admin created new `sftp` site which is handled by `SFTP` provider. Somewhere in the studio SFTP server is deployed on a machine that has access to `studio` drive.
|
||||
Admin created new `sftp` site which is handled by `SFTP` provider. Somewhere in
|
||||
the studio SFTP server is deployed on a machine that has access to `studio`
|
||||
drive.
|
||||
|
||||
Alternative sites work both way:
|
||||
|
||||
- everything published to `studio` is accessible on a `sftp` site too
|
||||
- everything published to `sftp` (most probably via artist's local disk - artists publishes locally, representation is marked to be synced to `sftp`. Immediately after it is synced, it is marked to be available on `studio` too for artists in the studio to use.)
|
||||
- everything published to `sftp` (most probably via artist's local disk -
|
||||
artists publishes locally, representation is marked to be synced to `sftp`.
|
||||
Immediately after it is synced, it is marked to be available on `studio` too
|
||||
for artists in the studio to use.)
|
||||
|
||||
## Project Settings
|
||||
|
||||
Sites need to be made available for each project. Of course this is possible to do on the default project as well, in which case all other projects will inherit these settings until overridden explicitly.
|
||||
Sites need to be made available for each project. Of course this is possible to
|
||||
do on the default project as well, in which case all other projects will
|
||||
inherit these settings until overridden explicitly.
|
||||
|
||||
You'll find the setting in **Settings/Project/Global/Site Sync**
|
||||
|
||||
The attributes that can be configured will vary between sites and their providers.
|
||||
The attributes that can be configured will vary between sites and their
|
||||
providers.
|
||||
|
||||
## Local settings
|
||||
|
||||
Each user should configure root folder for their 'local' site via **Local Settings** in OpenPype Tray. This folder will be used for all files that the user publishes or downloads while working on a project. Artist has the option to set the folder as "default"in which case it is used for all the projects, or it can be set on a project level individually.
|
||||
Each user should configure root folder for their 'local' site via **Local
|
||||
Settings** in OpenPype Tray. This folder will be used for all files that the
|
||||
user publishes or downloads while working on a project. Artist has the option
|
||||
to set the folder as "default"in which case it is used for all the projects, or
|
||||
it can be set on a project level individually.
|
||||
|
||||
Artists can also override which site they use as active and remote if need be.
|
||||
Artists can also override which site they use as active and remote if need be.
|
||||
|
||||

|
||||
|
||||
|
||||
## Providers
|
||||
|
||||
Each site implements a so called `provider` which handles most common operations (list files, copy files etc.) and provides interface with a particular type of storage. (disk, gdrive, aws, etc.)
|
||||
Multiple configured sites could share the same provider with different settings (multiple mounted disks - each disk can be a separate site, while
|
||||
Each site implements a so called `provider` which handles most common
|
||||
operations (list files, copy files etc.) and provides interface with a
|
||||
particular type of storage. (disk, gdrive, aws, etc.)
|
||||
Multiple configured sites could share the same provider with different
|
||||
settings (multiple mounted disks - each disk can be a separate site, while
|
||||
all share the same provider).
|
||||
|
||||
**Currently implemented providers:**
|
||||
|
|
@ -89,21 +121,30 @@ all share the same provider).
|
|||
|
||||
Handles files stored on disk storage.
|
||||
|
||||
Local drive provider is the most basic one that is used for accessing all standard hard disk storage scenarios. It will work with any storage that can be mounted on your system in a standard way. This could correspond to a physical external hard drive, network mounted storage, internal drive or even VPN connected network drive. It doesn't care about how the drive is mounted, but you must be able to point to it with a simple directory path.
|
||||
Local drive provider is the most basic one that is used for accessing all
|
||||
standard hard disk storage scenarios. It will work with any storage that can be
|
||||
mounted on your system in a standard way. This could correspond to a physical
|
||||
external hard drive, network mounted storage, internal drive or even VPN
|
||||
connected network drive. It doesn't care about how the drive is mounted, but
|
||||
you must be able to point to it with a simple directory path.
|
||||
|
||||
Default sites `local` and `studio` both use local drive provider.
|
||||
|
||||
|
||||
### Google Drive
|
||||
|
||||
Handles files on Google Drive (this). GDrive is provided as a production example for implementing other cloud providers
|
||||
Handles files on Google Drive (this). GDrive is provided as a production
|
||||
example for implementing other cloud providers
|
||||
|
||||
Let's imagine a small globally distributed studio which wants all published work for all their freelancers uploaded to Google Drive folder.
|
||||
Let's imagine a small globally distributed studio which wants all published
|
||||
work for all their freelancers uploaded to Google Drive folder.
|
||||
|
||||
For this use case admin needs to configure:
|
||||
- how many times it tries to synchronize file in case of some issue (network, permissions)
|
||||
|
||||
- how many times it tries to synchronize file in case of some issue (network,
|
||||
permissions)
|
||||
- how often should synchronization check for new assets
|
||||
- sites for synchronization - 'local' and 'gdrive' (this can be overridden in local settings)
|
||||
- sites for synchronization - 'local' and 'gdrive' (this can be overridden in
|
||||
local settings)
|
||||
- user credentials
|
||||
- root folder location on Google Drive side
|
||||
|
||||
|
|
@ -111,30 +152,43 @@ Configuration would look like this:
|
|||
|
||||

|
||||
|
||||
*Site Sync* for Google Drive works using its API: https://developers.google.com/drive/api/v3/about-sdk
|
||||
*Site Sync* for Google Drive works using its
|
||||
API: https://developers.google.com/drive/api/v3/about-sdk
|
||||
|
||||
To configure Google Drive side you would need to have access to Google Cloud Platform project: https://console.cloud.google.com/
|
||||
To configure Google Drive side you would need to have access to Google Cloud
|
||||
Platform project: https://console.cloud.google.com/
|
||||
|
||||
To get working connection to Google Drive there are some necessary steps:
|
||||
- first you need to enable GDrive API: https://developers.google.com/drive/api/v3/enable-drive-api
|
||||
- next you need to create user, choose **Service Account** (for basic configuration no roles for account are necessary)
|
||||
|
||||
- first you need to enable GDrive
|
||||
API: https://developers.google.com/drive/api/v3/enable-drive-api
|
||||
- next you need to create user, choose **Service Account** (for basic
|
||||
configuration no roles for account are necessary)
|
||||
- add new key for created account and download .json file with credentials
|
||||
- share destination folder on the Google Drive with created account (directly in GDrive web application)
|
||||
- add new site back in OpenPype Settings, name as you want, provider needs to be 'gdrive'
|
||||
- share destination folder on the Google Drive with created account (directly
|
||||
in GDrive web application)
|
||||
- add new site back in OpenPype Settings, name as you want, provider needs to
|
||||
be 'gdrive'
|
||||
- distribute credentials file via shared mounted disk location
|
||||
|
||||
:::note
|
||||
If you are using regular personal GDrive for testing don't forget adding `/My Drive` as the prefix in root configuration. Business accounts and share drives don't need this.
|
||||
If you are using regular personal GDrive for testing don't forget
|
||||
adding `/My Drive` as the prefix in root configuration. Business accounts and
|
||||
share drives don't need this.
|
||||
:::
|
||||
|
||||
### SFTP
|
||||
|
||||
SFTP provider is used to connect to SFTP server. Currently authentication with `user:password` or `user:ssh key` is implemented.
|
||||
Please provide only one combination, don't forget to provide password for ssh key if ssh key was created with a passphrase.
|
||||
SFTP provider is used to connect to SFTP server. Currently authentication
|
||||
with `user:password` or `user:ssh key` is implemented.
|
||||
Please provide only one combination, don't forget to provide password for ssh
|
||||
key if ssh key was created with a passphrase.
|
||||
|
||||
(SFTP connection could be a bit finicky, use FileZilla or WinSCP for testing connection, it will be mush faster.)
|
||||
(SFTP connection could be a bit finicky, use FileZilla or WinSCP for testing
|
||||
connection, it will be mush faster.)
|
||||
|
||||
Beware that ssh key expects OpenSSH format (`.pem`) not a Putty format (`.ppk`)!
|
||||
Beware that ssh key expects OpenSSH format (`.pem`) not a Putty
|
||||
format (`.ppk`)!
|
||||
|
||||
#### How to set SFTP site
|
||||
|
||||
|
|
@ -143,60 +197,101 @@ Beware that ssh key expects OpenSSH format (`.pem`) not a Putty format (`.ppk`)!
|
|||
|
||||

|
||||
|
||||
- In Projects setting enable Site Sync (on default project - all project will be synched, or on specific project)
|
||||
- Configure SFTP connection and destination folder on a SFTP server (in screenshot `/upload`)
|
||||
- In Projects setting enable Site Sync (on default project - all project will
|
||||
be synched, or on specific project)
|
||||
- Configure SFTP connection and destination folder on a SFTP server (in
|
||||
screenshot `/upload`)
|
||||
|
||||

|
||||
|
||||
- if you want to force syncing between local and sftp site for all users, use combination `active site: local`, `remote site: NAME_OF_SFTP_SITE`
|
||||
- if you want to allow only specific users to use SFTP syncing (external users, not located in the office), use `active site: studio`, `remote site: studio`.
|
||||
|
||||
- if you want to force syncing between local and sftp site for all users, use
|
||||
combination `active site: local`, `remote site: NAME_OF_SFTP_SITE`
|
||||
- if you want to allow only specific users to use SFTP syncing (external users,
|
||||
not located in the office), use `active site: studio`, `remote site: studio`.
|
||||
|
||||

|
||||
|
||||
- Each artist can decide and configure syncing from his/her local to SFTP via `Local Settings`
|
||||
- Each artist can decide and configure syncing from his/her local to SFTP
|
||||
via `Local Settings`
|
||||
|
||||

|
||||
|
||||
|
||||
### Custom providers
|
||||
|
||||
If a studio needs to use other services for cloud storage, or want to implement totally different storage providers, they can do so by writing their own provider plugin. We're working on a developer documentation, however, for now we recommend looking at `abstract_provider.py`and `gdrive.py` inside `openpype/modules/sync_server/providers` and using it as a template.
|
||||
If a studio needs to use other services for cloud storage, or want to implement
|
||||
totally different storage providers, they can do so by writing their own
|
||||
provider plugin. We're working on a developer documentation, however, for now
|
||||
we recommend looking at `abstract_provider.py`and `gdrive.py`
|
||||
inside `openpype/modules/sync_server/providers` and using it as a template.
|
||||
|
||||
### Running Site Sync in background
|
||||
|
||||
Site Sync server synchronizes new published files from artist machine into configured remote location by default.
|
||||
Site Sync server synchronizes new published files from artist machine into
|
||||
configured remote location by default.
|
||||
|
||||
There might be a use case where you need to synchronize between "non-artist" sites, for example between studio site and cloud. In this case
|
||||
you need to run Site Sync as a background process from a command line (via service etc) 24/7.
|
||||
There might be a use case where you need to synchronize between "non-artist"
|
||||
sites, for example between studio site and cloud. In this case
|
||||
you need to run Site Sync as a background process from a command line (via
|
||||
service etc) 24/7.
|
||||
|
||||
To configure all sites where all published files should be synced eventually you need to configure `project_settings/global/sync_server/config/always_accessible_on` property in Settings (per project) first.
|
||||
To configure all sites where all published files should be synced eventually
|
||||
you need to
|
||||
configure `project_settings/global/sync_server/config/always_accessible_on`
|
||||
property in Settings (per project) first.
|
||||
|
||||

|
||||
|
||||
This is an example of:
|
||||
|
||||
- Site Sync is enabled for a project
|
||||
- default active and remote sites are set to `studio` - eg. standard process: everyone is working in a studio, publishing to shared location etc.
|
||||
- (but this also allows any of the artists to work remotely, they would change their active site in their own Local Settings to `local` and configure local root.
|
||||
This would result in everything artist publishes is saved first onto his local folder AND synchronized to `studio` site eventually.)
|
||||
- default active and remote sites are set to `studio` - eg. standard process:
|
||||
everyone is working in a studio, publishing to shared location etc.
|
||||
- (but this also allows any of the artists to work remotely, they would change
|
||||
their active site in their own Local Settings to `local` and configure local
|
||||
root.
|
||||
This would result in everything artist publishes is saved first onto his
|
||||
local folder AND synchronized to `studio` site eventually.)
|
||||
- everything exported must also be eventually uploaded to `sftp` site
|
||||
|
||||
This eventual synchronization between `studio` and `sftp` sites must be physically handled by background process.
|
||||
This eventual synchronization between `studio` and `sftp` sites must be
|
||||
physically handled by background process.
|
||||
|
||||
As current implementation relies heavily on Settings and Local Settings, background process for a specific site ('studio' for example) must be configured via Tray first to `syncserver` command to work.
|
||||
As current implementation relies heavily on Settings and Local Settings,
|
||||
background process for a specific site ('studio' for example) must be
|
||||
configured via Tray first to `syncserver` command to work.
|
||||
|
||||
To do this:
|
||||
|
||||
- run OP `Tray` with environment variable OPENPYPE_LOCAL_ID set to name of active (source) site. In most use cases it would be studio (for cases of backups of everything published to studio site to different cloud site etc.)
|
||||
- run OP `Tray` with environment variable OPENPYPE_LOCAL_ID set to name of
|
||||
active (source) site. In most use cases it would be studio (for cases of
|
||||
backups of everything published to studio site to different cloud site etc.)
|
||||
- start `Tray`
|
||||
- check `Local ID` in information dialog after clicking on version number in the Tray
|
||||
- check `Local ID` in information dialog after clicking on version number in
|
||||
the Tray
|
||||
- open `Local Settings` in the `Tray`
|
||||
- configure for each project necessary active site and remote site
|
||||
- close `Tray`
|
||||
- run OP from a command line with `syncserver` and `--active_site` arguments
|
||||
|
||||
|
||||
This is an example how to trigger background syncing process where active (source) site is `studio`.
|
||||
(It is expected that OP is installed on a machine, `openpype_console` is on PATH. If not, add full path to executable.
|
||||
This is an example how to trigger background syncing process where active (
|
||||
source) site is `studio`.
|
||||
(It is expected that OP is installed on a machine, `openpype_console` is on
|
||||
PATH. If not, add full path to executable.
|
||||
)
|
||||
|
||||
```shell
|
||||
openpype_console syncserver --active_site studio
|
||||
```
|
||||
```
|
||||
|
||||
### Syncing of last published workfile
|
||||
|
||||
Some DCC might have enabled
|
||||
in `project_setting/global/tools/Workfiles/last_workfile_on_startup`, eg. open
|
||||
DCC with last opened workfile.
|
||||
|
||||
Flag `use_last_published_workfile` tells that last published workfile should be
|
||||
used if no workfile is present locally.
|
||||
This use case could happen if artists starts working on new task locally,
|
||||
doesn't have any workfile present. In that case last published will be
|
||||
synchronized locally and its version bumped by 1 (as workfile's version is
|
||||
always +1 from published version).
|
||||
|
|
@ -126,6 +126,7 @@ module.exports = {
|
|||
"admin_hosts_nuke",
|
||||
"admin_hosts_resolve",
|
||||
"admin_hosts_harmony",
|
||||
"admin_hosts_photoshop",
|
||||
"admin_hosts_aftereffects",
|
||||
"admin_hosts_tvpaint"
|
||||
],
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue