Merge branch 'develop' into feature/OP-3883_Change-extractor-usage-in-unreal
|
|
@ -58,7 +58,7 @@ def get_projects(active=True, inactive=False, fields=None):
|
|||
yield project_doc
|
||||
|
||||
|
||||
def get_project(project_name, active=True, inactive=False, fields=None):
|
||||
def get_project(project_name, active=True, inactive=True, fields=None):
|
||||
# Skip if both are disabled
|
||||
if not active and not inactive:
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -78,6 +78,23 @@ class Context:
|
|||
_project_doc = None
|
||||
|
||||
|
||||
def get_main_window():
|
||||
"""Acquire Nuke's main window"""
|
||||
if Context.main_window is None:
|
||||
from Qt import QtWidgets
|
||||
|
||||
top_widgets = QtWidgets.QApplication.topLevelWidgets()
|
||||
name = "Foundry::UI::DockMainWindow"
|
||||
for widget in top_widgets:
|
||||
if (
|
||||
widget.inherits("QMainWindow")
|
||||
and widget.metaObject().className() == name
|
||||
):
|
||||
Context.main_window = widget
|
||||
break
|
||||
return Context.main_window
|
||||
|
||||
|
||||
class Knobby(object):
|
||||
"""For creating knob which it's type isn't mapped in `create_knobs`
|
||||
|
||||
|
|
@ -2706,32 +2723,25 @@ class DirmapCache:
|
|||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _duplicate_node_temp():
|
||||
def node_tempfile():
|
||||
"""Create a temp file where node is pasted during duplication.
|
||||
|
||||
This is to avoid using clipboard for node duplication.
|
||||
"""
|
||||
|
||||
duplicate_node_temp_path = os.path.join(
|
||||
tempfile.gettempdir(),
|
||||
"openpype_nuke_duplicate_temp_{}".format(os.getpid())
|
||||
tmp_file = tempfile.NamedTemporaryFile(
|
||||
mode="w", prefix="openpype_nuke_temp_", suffix=".nk", delete=False
|
||||
)
|
||||
|
||||
# This can happen only if 'duplicate_node' would be
|
||||
if os.path.exists(duplicate_node_temp_path):
|
||||
log.warning((
|
||||
"Temp file for node duplication already exists."
|
||||
" Trying to remove {}"
|
||||
).format(duplicate_node_temp_path))
|
||||
os.remove(duplicate_node_temp_path)
|
||||
tmp_file.close()
|
||||
node_tempfile_path = tmp_file.name
|
||||
|
||||
try:
|
||||
# Yield the path where node can be copied
|
||||
yield duplicate_node_temp_path
|
||||
yield node_tempfile_path
|
||||
|
||||
finally:
|
||||
# Remove the file at the end
|
||||
os.remove(duplicate_node_temp_path)
|
||||
os.remove(node_tempfile_path)
|
||||
|
||||
|
||||
def duplicate_node(node):
|
||||
|
|
@ -2740,7 +2750,7 @@ def duplicate_node(node):
|
|||
# select required node for duplication
|
||||
node.setSelected(True)
|
||||
|
||||
with _duplicate_node_temp() as filepath:
|
||||
with node_tempfile() as filepath:
|
||||
# copy selected to temp filepath
|
||||
nuke.nodeCopy(filepath)
|
||||
|
||||
|
|
@ -2815,3 +2825,100 @@ def ls_img_sequence(path):
|
|||
}
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def get_group_io_nodes(nodes):
|
||||
"""Get the input and the output of a group of nodes."""
|
||||
|
||||
if not nodes:
|
||||
raise ValueError("there is no nodes in the list")
|
||||
|
||||
input_node = None
|
||||
output_node = None
|
||||
|
||||
if len(nodes) == 1:
|
||||
input_node = output_node = nodes[0]
|
||||
|
||||
else:
|
||||
for node in nodes:
|
||||
if "Input" in node.name():
|
||||
input_node = node
|
||||
|
||||
if "Output" in node.name():
|
||||
output_node = node
|
||||
|
||||
if input_node is not None and output_node is not None:
|
||||
break
|
||||
|
||||
if input_node is None:
|
||||
raise ValueError("No Input found")
|
||||
|
||||
if output_node is None:
|
||||
raise ValueError("No Output found")
|
||||
return input_node, output_node
|
||||
|
||||
|
||||
def get_extreme_positions(nodes):
|
||||
"""Get the 4 numbers that represent the box of a group of nodes."""
|
||||
|
||||
if not nodes:
|
||||
raise ValueError("there is no nodes in the list")
|
||||
|
||||
nodes_xpos = [n.xpos() for n in nodes] + \
|
||||
[n.xpos() + n.screenWidth() for n in nodes]
|
||||
|
||||
nodes_ypos = [n.ypos() for n in nodes] + \
|
||||
[n.ypos() + n.screenHeight() for n in nodes]
|
||||
|
||||
min_x, min_y = (min(nodes_xpos), min(nodes_ypos))
|
||||
max_x, max_y = (max(nodes_xpos), max(nodes_ypos))
|
||||
return min_x, min_y, max_x, max_y
|
||||
|
||||
|
||||
def refresh_node(node):
|
||||
"""Correct a bug caused by the multi-threading of nuke.
|
||||
|
||||
Refresh the node to make sure that it takes the desired attributes.
|
||||
"""
|
||||
|
||||
x = node.xpos()
|
||||
y = node.ypos()
|
||||
nuke.autoplaceSnap(node)
|
||||
node.setXYpos(x, y)
|
||||
|
||||
|
||||
def refresh_nodes(nodes):
|
||||
for node in nodes:
|
||||
refresh_node(node)
|
||||
|
||||
|
||||
def get_names_from_nodes(nodes):
|
||||
"""Get list of nodes names.
|
||||
|
||||
Args:
|
||||
nodes(List[nuke.Node]): List of nodes to convert into names.
|
||||
|
||||
Returns:
|
||||
List[str]: Name of passed nodes.
|
||||
"""
|
||||
|
||||
return [
|
||||
node.name()
|
||||
for node in nodes
|
||||
]
|
||||
|
||||
|
||||
def get_nodes_by_names(names):
|
||||
"""Get list of nuke nodes based on their names.
|
||||
|
||||
Args:
|
||||
names (List[str]): List of node names to be found.
|
||||
|
||||
Returns:
|
||||
List[nuke.Node]: List of nodes found by name.
|
||||
"""
|
||||
|
||||
return [
|
||||
nuke.toNode(name)
|
||||
for name in names
|
||||
]
|
||||
|
|
|
|||
220
openpype/hosts/nuke/api/lib_template_builder.py
Normal file
|
|
@ -0,0 +1,220 @@
|
|||
from collections import OrderedDict
|
||||
|
||||
import qargparse
|
||||
|
||||
import nuke
|
||||
|
||||
from openpype.tools.utils.widgets import OptionDialog
|
||||
|
||||
from .lib import imprint, get_main_window
|
||||
|
||||
|
||||
# To change as enum
|
||||
build_types = ["context_asset", "linked_asset", "all_assets"]
|
||||
|
||||
|
||||
def get_placeholder_attributes(node, enumerate=False):
|
||||
list_atts = {
|
||||
"builder_type",
|
||||
"family",
|
||||
"representation",
|
||||
"loader",
|
||||
"loader_args",
|
||||
"order",
|
||||
"asset",
|
||||
"subset",
|
||||
"hierarchy",
|
||||
"siblings",
|
||||
"last_loaded"
|
||||
}
|
||||
attributes = {}
|
||||
for attr in node.knobs().keys():
|
||||
if attr in list_atts:
|
||||
if enumerate:
|
||||
try:
|
||||
attributes[attr] = node.knob(attr).values()
|
||||
except AttributeError:
|
||||
attributes[attr] = node.knob(attr).getValue()
|
||||
else:
|
||||
attributes[attr] = node.knob(attr).getValue()
|
||||
|
||||
return attributes
|
||||
|
||||
|
||||
def delete_placeholder_attributes(node):
|
||||
"""Delete all extra placeholder attributes."""
|
||||
|
||||
extra_attributes = get_placeholder_attributes(node)
|
||||
for attribute in extra_attributes.keys():
|
||||
try:
|
||||
node.removeKnob(node.knob(attribute))
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
|
||||
def hide_placeholder_attributes(node):
|
||||
"""Hide all extra placeholder attributes."""
|
||||
|
||||
extra_attributes = get_placeholder_attributes(node)
|
||||
for attribute in extra_attributes.keys():
|
||||
try:
|
||||
node.knob(attribute).setVisible(False)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
|
||||
def create_placeholder():
|
||||
args = placeholder_window()
|
||||
if not args:
|
||||
# operation canceled, no locator created
|
||||
return
|
||||
|
||||
placeholder = nuke.nodes.NoOp()
|
||||
placeholder.setName("PLACEHOLDER")
|
||||
placeholder.knob("tile_color").setValue(4278190335)
|
||||
|
||||
# custom arg parse to force empty data query
|
||||
# and still imprint them on placeholder
|
||||
# and getting items when arg is of type Enumerator
|
||||
options = OrderedDict()
|
||||
for arg in args:
|
||||
if not type(arg) == qargparse.Separator:
|
||||
options[str(arg)] = arg._data.get("items") or arg.read()
|
||||
imprint(placeholder, options)
|
||||
imprint(placeholder, {"is_placeholder": True})
|
||||
placeholder.knob("is_placeholder").setVisible(False)
|
||||
|
||||
|
||||
def update_placeholder():
|
||||
placeholder = nuke.selectedNodes()
|
||||
if not placeholder:
|
||||
raise ValueError("No node selected")
|
||||
if len(placeholder) > 1:
|
||||
raise ValueError("Too many selected nodes")
|
||||
placeholder = placeholder[0]
|
||||
|
||||
args = placeholder_window(get_placeholder_attributes(placeholder))
|
||||
if not args:
|
||||
return # operation canceled
|
||||
# delete placeholder attributes
|
||||
delete_placeholder_attributes(placeholder)
|
||||
|
||||
options = OrderedDict()
|
||||
for arg in args:
|
||||
if not type(arg) == qargparse.Separator:
|
||||
options[str(arg)] = arg._data.get("items") or arg.read()
|
||||
imprint(placeholder, options)
|
||||
|
||||
|
||||
def imprint_enum(placeholder, args):
|
||||
"""
|
||||
Imprint method doesn't act properly with enums.
|
||||
Replacing the functionnality with this for now
|
||||
"""
|
||||
|
||||
enum_values = {
|
||||
str(arg): arg.read()
|
||||
for arg in args
|
||||
if arg._data.get("items")
|
||||
}
|
||||
string_to_value_enum_table = {
|
||||
build: idx
|
||||
for idx, build in enumerate(build_types)
|
||||
}
|
||||
attrs = {}
|
||||
for key, value in enum_values.items():
|
||||
attrs[key] = string_to_value_enum_table[value]
|
||||
|
||||
|
||||
def placeholder_window(options=None):
|
||||
options = options or dict()
|
||||
dialog = OptionDialog(parent=get_main_window())
|
||||
dialog.setWindowTitle("Create Placeholder")
|
||||
|
||||
args = [
|
||||
qargparse.Separator("Main attributes"),
|
||||
qargparse.Enum(
|
||||
"builder_type",
|
||||
label="Asset Builder Type",
|
||||
default=options.get("builder_type", 0),
|
||||
items=build_types,
|
||||
help="""Asset Builder Type
|
||||
Builder type describe what template loader will look for.
|
||||
|
||||
context_asset : Template loader will look for subsets of
|
||||
current context asset (Asset bob will find asset)
|
||||
|
||||
linked_asset : Template loader will look for assets linked
|
||||
to current context asset.
|
||||
Linked asset are looked in OpenPype database under field "inputLinks"
|
||||
"""
|
||||
),
|
||||
qargparse.String(
|
||||
"family",
|
||||
default=options.get("family", ""),
|
||||
label="OpenPype Family",
|
||||
placeholder="ex: image, plate ..."),
|
||||
qargparse.String(
|
||||
"representation",
|
||||
default=options.get("representation", ""),
|
||||
label="OpenPype Representation",
|
||||
placeholder="ex: mov, png ..."),
|
||||
qargparse.String(
|
||||
"loader",
|
||||
default=options.get("loader", ""),
|
||||
label="Loader",
|
||||
placeholder="ex: LoadClip, LoadImage ...",
|
||||
help="""Loader
|
||||
|
||||
Defines what openpype loader will be used to load assets.
|
||||
Useable loader depends on current host's loader list.
|
||||
Field is case sensitive.
|
||||
"""),
|
||||
qargparse.String(
|
||||
"loader_args",
|
||||
default=options.get("loader_args", ""),
|
||||
label="Loader Arguments",
|
||||
placeholder='ex: {"camera":"persp", "lights":True}',
|
||||
help="""Loader
|
||||
|
||||
Defines a dictionnary of arguments used to load assets.
|
||||
Useable arguments depend on current placeholder Loader.
|
||||
Field should be a valid python dict. Anything else will be ignored.
|
||||
"""),
|
||||
qargparse.Integer(
|
||||
"order",
|
||||
default=options.get("order", 0),
|
||||
min=0,
|
||||
max=999,
|
||||
label="Order",
|
||||
placeholder="ex: 0, 100 ... (smallest order loaded first)",
|
||||
help="""Order
|
||||
|
||||
Order defines asset loading priority (0 to 999)
|
||||
Priority rule is : "lowest is first to load"."""),
|
||||
qargparse.Separator(
|
||||
"Optional attributes "),
|
||||
qargparse.String(
|
||||
"asset",
|
||||
default=options.get("asset", ""),
|
||||
label="Asset filter",
|
||||
placeholder="regex filtering by asset name",
|
||||
help="Filtering assets by matching field regex to asset's name"),
|
||||
qargparse.String(
|
||||
"subset",
|
||||
default=options.get("subset", ""),
|
||||
label="Subset filter",
|
||||
placeholder="regex filtering by subset name",
|
||||
help="Filtering assets by matching field regex to subset's name"),
|
||||
qargparse.String(
|
||||
"hierarchy",
|
||||
default=options.get("hierarchy", ""),
|
||||
label="Hierarchy filter",
|
||||
placeholder="regex filtering by asset's hierarchy",
|
||||
help="Filtering assets by matching field asset's hierarchy")
|
||||
]
|
||||
dialog.create(args)
|
||||
if not dialog.exec_():
|
||||
return None
|
||||
|
||||
return args
|
||||
|
|
@ -22,10 +22,16 @@ from openpype.pipeline import (
|
|||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from openpype.pipeline.workfile import BuildWorkfile
|
||||
from openpype.pipeline.workfile.build_template import (
|
||||
build_workfile_template,
|
||||
update_workfile_template
|
||||
)
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
from .command import viewer_update_and_undo_stop
|
||||
from .lib import (
|
||||
Context,
|
||||
get_main_window,
|
||||
add_publish_knob,
|
||||
WorkfileSettings,
|
||||
process_workfile_builder,
|
||||
|
|
@ -33,7 +39,9 @@ from .lib import (
|
|||
check_inventory_versions,
|
||||
set_avalon_knob_data,
|
||||
read_avalon_data,
|
||||
Context
|
||||
)
|
||||
from .lib_template_builder import (
|
||||
create_placeholder, update_placeholder
|
||||
)
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
|
@ -53,23 +61,6 @@ if os.getenv("PYBLISH_GUI", None):
|
|||
pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None))
|
||||
|
||||
|
||||
def get_main_window():
|
||||
"""Acquire Nuke's main window"""
|
||||
if Context.main_window is None:
|
||||
from Qt import QtWidgets
|
||||
|
||||
top_widgets = QtWidgets.QApplication.topLevelWidgets()
|
||||
name = "Foundry::UI::DockMainWindow"
|
||||
for widget in top_widgets:
|
||||
if (
|
||||
widget.inherits("QMainWindow")
|
||||
and widget.metaObject().className() == name
|
||||
):
|
||||
Context.main_window = widget
|
||||
break
|
||||
return Context.main_window
|
||||
|
||||
|
||||
def reload_config():
|
||||
"""Attempt to reload pipeline at run-time.
|
||||
|
||||
|
|
@ -219,6 +210,24 @@ def _install_menu():
|
|||
lambda: BuildWorkfile().process()
|
||||
)
|
||||
|
||||
menu_template = menu.addMenu("Template Builder") # creating template menu
|
||||
menu_template.addCommand(
|
||||
"Build Workfile from template",
|
||||
lambda: build_workfile_template()
|
||||
)
|
||||
menu_template.addCommand(
|
||||
"Update Workfile",
|
||||
lambda: update_workfile_template()
|
||||
)
|
||||
menu_template.addSeparator()
|
||||
menu_template.addCommand(
|
||||
"Create Place Holder",
|
||||
lambda: create_placeholder()
|
||||
)
|
||||
menu_template.addCommand(
|
||||
"Update Place Holder",
|
||||
lambda: update_placeholder()
|
||||
)
|
||||
menu.addSeparator()
|
||||
menu.addCommand(
|
||||
"Experimental tools...",
|
||||
|
|
|
|||
639
openpype/hosts/nuke/api/template_loader.py
Normal file
|
|
@ -0,0 +1,639 @@
|
|||
import re
|
||||
import collections
|
||||
|
||||
import nuke
|
||||
|
||||
from openpype.client import get_representations
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.workfile.abstract_template_loader import (
|
||||
AbstractPlaceholder,
|
||||
AbstractTemplateLoader,
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
find_free_space_to_paste_nodes,
|
||||
get_extreme_positions,
|
||||
get_group_io_nodes,
|
||||
imprint,
|
||||
refresh_node,
|
||||
refresh_nodes,
|
||||
reset_selection,
|
||||
get_names_from_nodes,
|
||||
get_nodes_by_names,
|
||||
select_nodes,
|
||||
duplicate_node,
|
||||
node_tempfile,
|
||||
)
|
||||
|
||||
from .lib_template_builder import (
|
||||
delete_placeholder_attributes,
|
||||
get_placeholder_attributes,
|
||||
hide_placeholder_attributes
|
||||
)
|
||||
|
||||
PLACEHOLDER_SET = "PLACEHOLDERS_SET"
|
||||
|
||||
|
||||
class NukeTemplateLoader(AbstractTemplateLoader):
|
||||
"""Concrete implementation of AbstractTemplateLoader for Nuke
|
||||
|
||||
"""
|
||||
|
||||
def import_template(self, path):
|
||||
"""Import template into current scene.
|
||||
Block if a template is already loaded.
|
||||
|
||||
Args:
|
||||
path (str): A path to current template (usually given by
|
||||
get_template_path implementation)
|
||||
|
||||
Returns:
|
||||
bool: Wether the template was succesfully imported or not
|
||||
"""
|
||||
|
||||
# TODO check if the template is already imported
|
||||
|
||||
nuke.nodePaste(path)
|
||||
reset_selection()
|
||||
|
||||
return True
|
||||
|
||||
def preload(self, placeholder, loaders_by_name, last_representation):
|
||||
placeholder.data["nodes_init"] = nuke.allNodes()
|
||||
placeholder.data["last_repre_id"] = str(last_representation["_id"])
|
||||
|
||||
def populate_template(self, ignored_ids=None):
|
||||
processed_key = "_node_processed"
|
||||
|
||||
processed_nodes = []
|
||||
nodes = self.get_template_nodes()
|
||||
while nodes:
|
||||
# Mark nodes as processed so they're not re-executed
|
||||
# - that can happen if processing of placeholder node fails
|
||||
for node in nodes:
|
||||
imprint(node, {processed_key: True})
|
||||
processed_nodes.append(node)
|
||||
|
||||
super(NukeTemplateLoader, self).populate_template(ignored_ids)
|
||||
|
||||
# Recollect nodes to repopulate
|
||||
nodes = []
|
||||
for node in self.get_template_nodes():
|
||||
# Skip already processed nodes
|
||||
if (
|
||||
processed_key in node.knobs()
|
||||
and node.knob(processed_key).value()
|
||||
):
|
||||
continue
|
||||
nodes.append(node)
|
||||
|
||||
for node in processed_nodes:
|
||||
knob = node.knob(processed_key)
|
||||
if knob is not None:
|
||||
node.removeKnob(knob)
|
||||
|
||||
@staticmethod
|
||||
def get_template_nodes():
|
||||
placeholders = []
|
||||
all_groups = collections.deque()
|
||||
all_groups.append(nuke.thisGroup())
|
||||
while all_groups:
|
||||
group = all_groups.popleft()
|
||||
for node in group.nodes():
|
||||
if isinstance(node, nuke.Group):
|
||||
all_groups.append(node)
|
||||
|
||||
node_knobs = node.knobs()
|
||||
if (
|
||||
"builder_type" not in node_knobs
|
||||
or "is_placeholder" not in node_knobs
|
||||
or not node.knob("is_placeholder").value()
|
||||
):
|
||||
continue
|
||||
|
||||
if "empty" in node_knobs and node.knob("empty").value():
|
||||
continue
|
||||
|
||||
placeholders.append(node)
|
||||
|
||||
return placeholders
|
||||
|
||||
def update_missing_containers(self):
|
||||
nodes_by_id = collections.defaultdict(list)
|
||||
|
||||
for node in nuke.allNodes():
|
||||
node_knobs = node.knobs().keys()
|
||||
if "repre_id" in node_knobs:
|
||||
repre_id = node.knob("repre_id").getValue()
|
||||
nodes_by_id[repre_id].append(node.name())
|
||||
|
||||
if "empty" in node_knobs:
|
||||
node.removeKnob(node.knob("empty"))
|
||||
imprint(node, {"empty": False})
|
||||
|
||||
for node_names in nodes_by_id.values():
|
||||
node = None
|
||||
for node_name in node_names:
|
||||
node_by_name = nuke.toNode(node_name)
|
||||
if "builder_type" in node_by_name.knobs().keys():
|
||||
node = node_by_name
|
||||
break
|
||||
|
||||
if node is None:
|
||||
continue
|
||||
|
||||
placeholder = nuke.nodes.NoOp()
|
||||
placeholder.setName("PLACEHOLDER")
|
||||
placeholder.knob("tile_color").setValue(4278190335)
|
||||
attributes = get_placeholder_attributes(node, enumerate=True)
|
||||
imprint(placeholder, attributes)
|
||||
pos_x = int(node.knob("x").getValue())
|
||||
pos_y = int(node.knob("y").getValue())
|
||||
placeholder.setXYpos(pos_x, pos_y)
|
||||
imprint(placeholder, {"nb_children": 1})
|
||||
refresh_node(placeholder)
|
||||
|
||||
self.populate_template(self.get_loaded_containers_by_id())
|
||||
|
||||
def get_loaded_containers_by_id(self):
|
||||
repre_ids = set()
|
||||
for node in nuke.allNodes():
|
||||
if "repre_id" in node.knobs():
|
||||
repre_ids.add(node.knob("repre_id").getValue())
|
||||
|
||||
# Removes duplicates in the list
|
||||
return list(repre_ids)
|
||||
|
||||
def delete_placeholder(self, placeholder):
|
||||
placeholder_node = placeholder.data["node"]
|
||||
last_loaded = placeholder.data["last_loaded"]
|
||||
if not placeholder.data["delete"]:
|
||||
if "empty" in placeholder_node.knobs().keys():
|
||||
placeholder_node.removeKnob(placeholder_node.knob("empty"))
|
||||
imprint(placeholder_node, {"empty": True})
|
||||
return
|
||||
|
||||
if not last_loaded:
|
||||
nuke.delete(placeholder_node)
|
||||
return
|
||||
|
||||
if "last_loaded" in placeholder_node.knobs().keys():
|
||||
for node_name in placeholder_node.knob("last_loaded").values():
|
||||
node = nuke.toNode(node_name)
|
||||
try:
|
||||
delete_placeholder_attributes(node)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
last_loaded_names = [
|
||||
loaded_node.name()
|
||||
for loaded_node in last_loaded
|
||||
]
|
||||
imprint(placeholder_node, {"last_loaded": last_loaded_names})
|
||||
|
||||
for node in last_loaded:
|
||||
refresh_node(node)
|
||||
refresh_node(placeholder_node)
|
||||
if "builder_type" not in node.knobs().keys():
|
||||
attributes = get_placeholder_attributes(placeholder_node, True)
|
||||
imprint(node, attributes)
|
||||
imprint(node, {"is_placeholder": False})
|
||||
hide_placeholder_attributes(node)
|
||||
node.knob("is_placeholder").setVisible(False)
|
||||
imprint(
|
||||
node,
|
||||
{
|
||||
"x": placeholder_node.xpos(),
|
||||
"y": placeholder_node.ypos()
|
||||
}
|
||||
)
|
||||
node.knob("x").setVisible(False)
|
||||
node.knob("y").setVisible(False)
|
||||
nuke.delete(placeholder_node)
|
||||
|
||||
|
||||
class NukePlaceholder(AbstractPlaceholder):
|
||||
"""Concrete implementation of AbstractPlaceholder for Nuke"""
|
||||
|
||||
optional_keys = {"asset", "subset", "hierarchy"}
|
||||
|
||||
def get_data(self, node):
|
||||
user_data = dict()
|
||||
node_knobs = node.knobs()
|
||||
for attr in self.required_keys.union(self.optional_keys):
|
||||
if attr in node_knobs:
|
||||
user_data[attr] = node_knobs[attr].getValue()
|
||||
user_data["node"] = node
|
||||
|
||||
nb_children = 0
|
||||
if "nb_children" in node_knobs:
|
||||
nb_children = int(node_knobs["nb_children"].getValue())
|
||||
user_data["nb_children"] = nb_children
|
||||
|
||||
siblings = []
|
||||
if "siblings" in node_knobs:
|
||||
siblings = node_knobs["siblings"].values()
|
||||
user_data["siblings"] = siblings
|
||||
|
||||
node_full_name = node.fullName()
|
||||
user_data["group_name"] = node_full_name.rpartition(".")[0]
|
||||
user_data["last_loaded"] = []
|
||||
user_data["delete"] = False
|
||||
self.data = user_data
|
||||
|
||||
def parent_in_hierarchy(self, containers):
|
||||
return
|
||||
|
||||
def create_sib_copies(self):
|
||||
""" creating copies of the palce_holder siblings (the ones who were
|
||||
loaded with it) for the new nodes added
|
||||
|
||||
Returns :
|
||||
copies (dict) : with copied nodes names and their copies
|
||||
"""
|
||||
|
||||
copies = {}
|
||||
siblings = get_nodes_by_names(self.data["siblings"])
|
||||
for node in siblings:
|
||||
new_node = duplicate_node(node)
|
||||
|
||||
x_init = int(new_node.knob("x_init").getValue())
|
||||
y_init = int(new_node.knob("y_init").getValue())
|
||||
new_node.setXYpos(x_init, y_init)
|
||||
if isinstance(new_node, nuke.BackdropNode):
|
||||
w_init = new_node.knob("w_init").getValue()
|
||||
h_init = new_node.knob("h_init").getValue()
|
||||
new_node.knob("bdwidth").setValue(w_init)
|
||||
new_node.knob("bdheight").setValue(h_init)
|
||||
refresh_node(node)
|
||||
|
||||
if "repre_id" in node.knobs().keys():
|
||||
node.removeKnob(node.knob("repre_id"))
|
||||
copies[node.name()] = new_node
|
||||
return copies
|
||||
|
||||
def fix_z_order(self):
|
||||
"""Fix the problem of z_order when a backdrop is loaded."""
|
||||
|
||||
nodes_loaded = self.data["last_loaded"]
|
||||
loaded_backdrops = []
|
||||
bd_orders = set()
|
||||
for node in nodes_loaded:
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
loaded_backdrops.append(node)
|
||||
bd_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not bd_orders:
|
||||
return
|
||||
|
||||
sib_orders = set()
|
||||
for node_name in self.data["siblings"]:
|
||||
node = nuke.toNode(node_name)
|
||||
if isinstance(node, nuke.BackdropNode):
|
||||
sib_orders.add(node.knob("z_order").getValue())
|
||||
|
||||
if not sib_orders:
|
||||
return
|
||||
|
||||
min_order = min(bd_orders)
|
||||
max_order = max(sib_orders)
|
||||
for backdrop_node in loaded_backdrops:
|
||||
z_order = backdrop_node.knob("z_order").getValue()
|
||||
backdrop_node.knob("z_order").setValue(
|
||||
z_order + max_order - min_order + 1)
|
||||
|
||||
def update_nodes(self, nodes, considered_nodes, offset_y=None):
|
||||
"""Adjust backdrop nodes dimensions and positions.
|
||||
|
||||
Considering some nodes sizes.
|
||||
|
||||
Args:
|
||||
nodes (list): list of nodes to update
|
||||
considered_nodes (list): list of nodes to consider while updating
|
||||
positions and dimensions
|
||||
offset (int): distance between copies
|
||||
"""
|
||||
|
||||
placeholder_node = self.data["node"]
|
||||
|
||||
min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes)
|
||||
|
||||
diff_x = diff_y = 0
|
||||
contained_nodes = [] # for backdrops
|
||||
|
||||
if offset_y is None:
|
||||
width_ph = placeholder_node.screenWidth()
|
||||
height_ph = placeholder_node.screenHeight()
|
||||
diff_y = max_y - min_y - height_ph
|
||||
diff_x = max_x - min_x - width_ph
|
||||
contained_nodes = [placeholder_node]
|
||||
min_x = placeholder_node.xpos()
|
||||
min_y = placeholder_node.ypos()
|
||||
else:
|
||||
siblings = get_nodes_by_names(self.data["siblings"])
|
||||
minX, _, maxX, _ = get_extreme_positions(siblings)
|
||||
diff_y = max_y - min_y + 20
|
||||
diff_x = abs(max_x - min_x - maxX + minX)
|
||||
contained_nodes = considered_nodes
|
||||
|
||||
if diff_y <= 0 and diff_x <= 0:
|
||||
return
|
||||
|
||||
for node in nodes:
|
||||
refresh_node(node)
|
||||
|
||||
if (
|
||||
node == placeholder_node
|
||||
or node in considered_nodes
|
||||
):
|
||||
continue
|
||||
|
||||
if (
|
||||
not isinstance(node, nuke.BackdropNode)
|
||||
or (
|
||||
isinstance(node, nuke.BackdropNode)
|
||||
and not set(contained_nodes) <= set(node.getNodes())
|
||||
)
|
||||
):
|
||||
if offset_y is None and node.xpos() >= min_x:
|
||||
node.setXpos(node.xpos() + diff_x)
|
||||
|
||||
if node.ypos() >= min_y:
|
||||
node.setYpos(node.ypos() + diff_y)
|
||||
|
||||
else:
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
node.knob("bdwidth").setValue(width + diff_x)
|
||||
node.knob("bdheight").setValue(height + diff_y)
|
||||
|
||||
refresh_node(node)
|
||||
|
||||
def imprint_inits(self):
|
||||
"""Add initial positions and dimensions to the attributes"""
|
||||
|
||||
for node in nuke.allNodes():
|
||||
refresh_node(node)
|
||||
imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()})
|
||||
node.knob("x_init").setVisible(False)
|
||||
node.knob("y_init").setVisible(False)
|
||||
width = node.screenWidth()
|
||||
height = node.screenHeight()
|
||||
if "bdwidth" in node.knobs():
|
||||
imprint(node, {"w_init": width, "h_init": height})
|
||||
node.knob("w_init").setVisible(False)
|
||||
node.knob("h_init").setVisible(False)
|
||||
refresh_node(node)
|
||||
|
||||
def imprint_siblings(self):
|
||||
"""
|
||||
- add siblings names to placeholder attributes (nodes loaded with it)
|
||||
- add Id to the attributes of all the other nodes
|
||||
"""
|
||||
|
||||
loaded_nodes = self.data["last_loaded"]
|
||||
loaded_nodes_set = set(loaded_nodes)
|
||||
data = {"repre_id": str(self.data["last_repre_id"])}
|
||||
|
||||
for node in loaded_nodes:
|
||||
node_knobs = node.knobs()
|
||||
if "builder_type" not in node_knobs:
|
||||
# save the id of representation for all imported nodes
|
||||
imprint(node, data)
|
||||
node.knob("repre_id").setVisible(False)
|
||||
refresh_node(node)
|
||||
continue
|
||||
|
||||
if (
|
||||
"is_placeholder" not in node_knobs
|
||||
or (
|
||||
"is_placeholder" in node_knobs
|
||||
and node.knob("is_placeholder").value()
|
||||
)
|
||||
):
|
||||
siblings = list(loaded_nodes_set - {node})
|
||||
siblings_name = get_names_from_nodes(siblings)
|
||||
siblings = {"siblings": siblings_name}
|
||||
imprint(node, siblings)
|
||||
|
||||
def set_loaded_connections(self):
|
||||
"""
|
||||
set inputs and outputs of loaded nodes"""
|
||||
|
||||
placeholder_node = self.data["node"]
|
||||
input_node, output_node = get_group_io_nodes(self.data["last_loaded"])
|
||||
for node in placeholder_node.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) == placeholder_node:
|
||||
node.setInput(idx, output_node)
|
||||
|
||||
for node in placeholder_node.dependencies():
|
||||
for idx in range(placeholder_node.inputs()):
|
||||
if placeholder_node.input(idx) == node:
|
||||
input_node.setInput(0, node)
|
||||
|
||||
def set_copies_connections(self, copies):
|
||||
"""Set inputs and outputs of the copies.
|
||||
|
||||
Args:
|
||||
copies (dict): Copied nodes by their names.
|
||||
"""
|
||||
|
||||
last_input, last_output = get_group_io_nodes(self.data["last_loaded"])
|
||||
siblings = get_nodes_by_names(self.data["siblings"])
|
||||
siblings_input, siblings_output = get_group_io_nodes(siblings)
|
||||
copy_input = copies[siblings_input.name()]
|
||||
copy_output = copies[siblings_output.name()]
|
||||
|
||||
for node_init in siblings:
|
||||
if node_init == siblings_output:
|
||||
continue
|
||||
|
||||
node_copy = copies[node_init.name()]
|
||||
for node in node_init.dependent():
|
||||
for idx in range(node.inputs()):
|
||||
if node.input(idx) != node_init:
|
||||
continue
|
||||
|
||||
if node in siblings:
|
||||
copies[node.name()].setInput(idx, node_copy)
|
||||
else:
|
||||
last_input.setInput(0, node_copy)
|
||||
|
||||
for node in node_init.dependencies():
|
||||
for idx in range(node_init.inputs()):
|
||||
if node_init.input(idx) != node:
|
||||
continue
|
||||
|
||||
if node_init == siblings_input:
|
||||
copy_input.setInput(idx, node)
|
||||
elif node in siblings:
|
||||
node_copy.setInput(idx, copies[node.name()])
|
||||
else:
|
||||
node_copy.setInput(idx, last_output)
|
||||
|
||||
siblings_input.setInput(0, copy_output)
|
||||
|
||||
def move_to_placeholder_group(self, nodes_loaded):
|
||||
"""
|
||||
opening the placeholder's group and copying loaded nodes in it.
|
||||
|
||||
Returns :
|
||||
nodes_loaded (list): the new list of pasted nodes
|
||||
"""
|
||||
|
||||
groups_name = self.data["group_name"]
|
||||
reset_selection()
|
||||
select_nodes(nodes_loaded)
|
||||
if groups_name:
|
||||
with node_tempfile() as filepath:
|
||||
nuke.nodeCopy(filepath)
|
||||
for node in nuke.selectedNodes():
|
||||
nuke.delete(node)
|
||||
group = nuke.toNode(groups_name)
|
||||
group.begin()
|
||||
nuke.nodePaste(filepath)
|
||||
nodes_loaded = nuke.selectedNodes()
|
||||
return nodes_loaded
|
||||
|
||||
def clean(self):
|
||||
# deselect all selected nodes
|
||||
placeholder_node = self.data["node"]
|
||||
|
||||
# getting the latest nodes added
|
||||
nodes_init = self.data["nodes_init"]
|
||||
nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init))
|
||||
self.log.debug("Loaded nodes: {}".format(nodes_loaded))
|
||||
if not nodes_loaded:
|
||||
return
|
||||
|
||||
self.data["delete"] = True
|
||||
|
||||
nodes_loaded = self.move_to_placeholder_group(nodes_loaded)
|
||||
self.data["last_loaded"] = nodes_loaded
|
||||
refresh_nodes(nodes_loaded)
|
||||
|
||||
# positioning of the loaded nodes
|
||||
min_x, min_y, _, _ = get_extreme_positions(nodes_loaded)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + placeholder_node.xpos()
|
||||
ypos = (node.ypos() - min_y) + placeholder_node.ypos()
|
||||
node.setXYpos(xpos, ypos)
|
||||
refresh_nodes(nodes_loaded)
|
||||
|
||||
self.fix_z_order() # fix the problem of z_order for backdrops
|
||||
self.imprint_siblings()
|
||||
|
||||
if self.data["nb_children"] == 0:
|
||||
# save initial nodes postions and dimensions, update them
|
||||
# and set inputs and outputs of loaded nodes
|
||||
|
||||
self.imprint_inits()
|
||||
self.update_nodes(nuke.allNodes(), nodes_loaded)
|
||||
self.set_loaded_connections()
|
||||
|
||||
elif self.data["siblings"]:
|
||||
# create copies of placeholder siblings for the new loaded nodes,
|
||||
# set their inputs and outpus and update all nodes positions and
|
||||
# dimensions and siblings names
|
||||
|
||||
siblings = get_nodes_by_names(self.data["siblings"])
|
||||
refresh_nodes(siblings)
|
||||
copies = self.create_sib_copies()
|
||||
new_nodes = list(copies.values()) # copies nodes
|
||||
self.update_nodes(new_nodes, nodes_loaded)
|
||||
placeholder_node.removeKnob(placeholder_node.knob("siblings"))
|
||||
new_nodes_name = get_names_from_nodes(new_nodes)
|
||||
imprint(placeholder_node, {"siblings": new_nodes_name})
|
||||
self.set_copies_connections(copies)
|
||||
|
||||
self.update_nodes(
|
||||
nuke.allNodes(),
|
||||
new_nodes + nodes_loaded,
|
||||
20
|
||||
)
|
||||
|
||||
new_siblings = get_names_from_nodes(new_nodes)
|
||||
self.data["siblings"] = new_siblings
|
||||
|
||||
else:
|
||||
# if the placeholder doesn't have siblings, the loaded
|
||||
# nodes will be placed in a free space
|
||||
|
||||
xpointer, ypointer = find_free_space_to_paste_nodes(
|
||||
nodes_loaded, direction="bottom", offset=200
|
||||
)
|
||||
node = nuke.createNode("NoOp")
|
||||
reset_selection()
|
||||
nuke.delete(node)
|
||||
for node in nodes_loaded:
|
||||
xpos = (node.xpos() - min_x) + xpointer
|
||||
ypos = (node.ypos() - min_y) + ypointer
|
||||
node.setXYpos(xpos, ypos)
|
||||
|
||||
self.data["nb_children"] += 1
|
||||
reset_selection()
|
||||
# go back to root group
|
||||
nuke.root().begin()
|
||||
|
||||
def get_representations(self, current_asset_doc, linked_asset_docs):
|
||||
project_name = legacy_io.active_project()
|
||||
|
||||
builder_type = self.data["builder_type"]
|
||||
if builder_type == "context_asset":
|
||||
context_filters = {
|
||||
"asset": [re.compile(self.data["asset"])],
|
||||
"subset": [re.compile(self.data["subset"])],
|
||||
"hierarchy": [re.compile(self.data["hierarchy"])],
|
||||
"representations": [self.data["representation"]],
|
||||
"family": [self.data["family"]]
|
||||
}
|
||||
|
||||
elif builder_type != "linked_asset":
|
||||
context_filters = {
|
||||
"asset": [
|
||||
current_asset_doc["name"],
|
||||
re.compile(self.data["asset"])
|
||||
],
|
||||
"subset": [re.compile(self.data["subset"])],
|
||||
"hierarchy": [re.compile(self.data["hierarchy"])],
|
||||
"representation": [self.data["representation"]],
|
||||
"family": [self.data["family"]]
|
||||
}
|
||||
|
||||
else:
|
||||
asset_regex = re.compile(self.data["asset"])
|
||||
linked_asset_names = []
|
||||
for asset_doc in linked_asset_docs:
|
||||
asset_name = asset_doc["name"]
|
||||
if asset_regex.match(asset_name):
|
||||
linked_asset_names.append(asset_name)
|
||||
|
||||
if not linked_asset_names:
|
||||
return []
|
||||
|
||||
context_filters = {
|
||||
"asset": linked_asset_names,
|
||||
"subset": [re.compile(self.data["subset"])],
|
||||
"hierarchy": [re.compile(self.data["hierarchy"])],
|
||||
"representation": [self.data["representation"]],
|
||||
"family": [self.data["family"]],
|
||||
}
|
||||
|
||||
return list(get_representations(
|
||||
project_name,
|
||||
context_filters=context_filters
|
||||
))
|
||||
|
||||
def err_message(self):
|
||||
return (
|
||||
"Error while trying to load a representation.\n"
|
||||
"Either the subset wasn't published or the template is malformed."
|
||||
"\n\n"
|
||||
"Builder was looking for:\n{attributes}".format(
|
||||
attributes="\n".join([
|
||||
"{}: {}".format(key.title(), value)
|
||||
for key, value in self.data.items()]
|
||||
)
|
||||
)
|
||||
)
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
# What is `testhost`
|
||||
Host `testhost` was created to fake running host for testing of publisher.
|
||||
|
||||
Does not have any proper launch mechanism at the moment. There is python script `./run_publish.py` which will show publisher window. The script requires to set few variables to run. Execution will register host `testhost`, register global publish plugins and register creator and publish plugins from `./plugins`.
|
||||
|
||||
## Data
|
||||
Created instances and context data are stored into json files inside `./api` folder. Can be easily modified to save them to a different place.
|
||||
|
||||
## Plugins
|
||||
Test host has few plugins to be able test publishing.
|
||||
|
||||
### Creators
|
||||
They are just example plugins using functions from `api` to create/remove/update data. One of them is auto creator which means that is triggered on each reset of create context. Others are manual creators both creating the same family.
|
||||
|
||||
### Publishers
|
||||
Collectors are example plugin to use `get_attribute_defs` to define attributes for specific families or for context. Validators are to test `PublishValidationError`.
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
import os
|
||||
import logging
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import register_creator_plugin_path
|
||||
|
||||
from .pipeline import (
|
||||
ls,
|
||||
list_instances,
|
||||
update_instances,
|
||||
remove_instances,
|
||||
get_context_data,
|
||||
update_context_data,
|
||||
get_context_title
|
||||
)
|
||||
|
||||
|
||||
HOST_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def install():
|
||||
log.info("OpenPype - Installing TestHost integration")
|
||||
pyblish.api.register_host("testhost")
|
||||
pyblish.api.register_plugin_path(PUBLISH_PATH)
|
||||
register_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"ls",
|
||||
"list_instances",
|
||||
"update_instances",
|
||||
"remove_instances",
|
||||
"get_context_data",
|
||||
"update_context_data",
|
||||
"get_context_title",
|
||||
|
||||
"install"
|
||||
)
|
||||
|
|
@ -1 +0,0 @@
|
|||
{}
|
||||
|
|
@ -1,108 +0,0 @@
|
|||
[
|
||||
{
|
||||
"id": "pyblish.avalon.instance",
|
||||
"active": true,
|
||||
"family": "test",
|
||||
"subset": "testMyVariant",
|
||||
"version": 1,
|
||||
"asset": "sq01_sh0010",
|
||||
"task": "Compositing",
|
||||
"variant": "myVariant",
|
||||
"instance_id": "a485f148-9121-46a5-8157-aa64df0fb449",
|
||||
"creator_attributes": {
|
||||
"number_key": 10,
|
||||
"ha": 10
|
||||
},
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
"add_ftrack_family": false
|
||||
}
|
||||
},
|
||||
"creator_identifier": "test_one"
|
||||
},
|
||||
{
|
||||
"id": "pyblish.avalon.instance",
|
||||
"active": true,
|
||||
"family": "test",
|
||||
"subset": "testMyVariant2",
|
||||
"version": 1,
|
||||
"asset": "sq01_sh0010",
|
||||
"task": "Compositing",
|
||||
"variant": "myVariant2",
|
||||
"creator_attributes": {},
|
||||
"instance_id": "a485f148-9121-46a5-8157-aa64df0fb444",
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
"add_ftrack_family": true
|
||||
}
|
||||
},
|
||||
"creator_identifier": "test_one"
|
||||
},
|
||||
{
|
||||
"id": "pyblish.avalon.instance",
|
||||
"active": true,
|
||||
"family": "test",
|
||||
"subset": "testMain",
|
||||
"version": 1,
|
||||
"asset": "sq01_sh0010",
|
||||
"task": "Compositing",
|
||||
"variant": "Main",
|
||||
"creator_attributes": {},
|
||||
"instance_id": "3607bc95-75f6-4648-a58d-e699f413d09f",
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
"add_ftrack_family": true
|
||||
}
|
||||
},
|
||||
"creator_identifier": "test_two"
|
||||
},
|
||||
{
|
||||
"id": "pyblish.avalon.instance",
|
||||
"active": true,
|
||||
"family": "test",
|
||||
"subset": "testMain2",
|
||||
"version": 1,
|
||||
"asset": "sq01_sh0020",
|
||||
"task": "Compositing",
|
||||
"variant": "Main2",
|
||||
"instance_id": "4ccf56f6-9982-4837-967c-a49695dbe8eb",
|
||||
"creator_attributes": {},
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
"add_ftrack_family": true
|
||||
}
|
||||
},
|
||||
"creator_identifier": "test_two"
|
||||
},
|
||||
{
|
||||
"id": "pyblish.avalon.instance",
|
||||
"family": "test_three",
|
||||
"subset": "test_threeMain2",
|
||||
"active": true,
|
||||
"version": 1,
|
||||
"asset": "sq01_sh0020",
|
||||
"task": "Compositing",
|
||||
"variant": "Main2",
|
||||
"instance_id": "4ccf56f6-9982-4837-967c-a49695dbe8ec",
|
||||
"creator_attributes": {},
|
||||
"publish_attributes": {
|
||||
"CollectFtrackApi": {
|
||||
"add_ftrack_family": true
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "pyblish.avalon.instance",
|
||||
"family": "workfile",
|
||||
"subset": "workfileMain",
|
||||
"active": true,
|
||||
"creator_identifier": "workfile",
|
||||
"version": 1,
|
||||
"asset": "Alpaca_01",
|
||||
"task": "modeling",
|
||||
"variant": "Main",
|
||||
"instance_id": "7c9ddfc7-9f9c-4c1c-b233-38c966735fb6",
|
||||
"creator_attributes": {},
|
||||
"publish_attributes": {}
|
||||
}
|
||||
]
|
||||
|
|
@ -1,155 +0,0 @@
|
|||
import os
|
||||
import json
|
||||
from openpype.client import get_asset_by_name
|
||||
|
||||
|
||||
class HostContext:
|
||||
instances_json_path = None
|
||||
context_json_path = None
|
||||
|
||||
@classmethod
|
||||
def get_context_title(cls):
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
if not project_name:
|
||||
return "TestHost"
|
||||
|
||||
asset_name = os.environ.get("AVALON_ASSET")
|
||||
if not asset_name:
|
||||
return project_name
|
||||
|
||||
asset_doc = get_asset_by_name(
|
||||
project_name, asset_name, fields=["data.parents"]
|
||||
)
|
||||
|
||||
parents = asset_doc.get("data", {}).get("parents") or []
|
||||
|
||||
hierarchy = [project_name]
|
||||
hierarchy.extend(parents)
|
||||
hierarchy.append("<b>{}</b>".format(asset_name))
|
||||
task_name = os.environ.get("AVALON_TASK")
|
||||
if task_name:
|
||||
hierarchy.append(task_name)
|
||||
|
||||
return "/".join(hierarchy)
|
||||
|
||||
@classmethod
|
||||
def get_current_dir_filepath(cls, filename):
|
||||
return os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)),
|
||||
filename
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_instances_json_path(cls):
|
||||
if cls.instances_json_path is None:
|
||||
cls.instances_json_path = cls.get_current_dir_filepath(
|
||||
"instances.json"
|
||||
)
|
||||
return cls.instances_json_path
|
||||
|
||||
@classmethod
|
||||
def get_context_json_path(cls):
|
||||
if cls.context_json_path is None:
|
||||
cls.context_json_path = cls.get_current_dir_filepath(
|
||||
"context.json"
|
||||
)
|
||||
return cls.context_json_path
|
||||
|
||||
@classmethod
|
||||
def add_instance(cls, instance):
|
||||
instances = cls.get_instances()
|
||||
instances.append(instance)
|
||||
cls.save_instances(instances)
|
||||
|
||||
@classmethod
|
||||
def save_instances(cls, instances):
|
||||
json_path = cls.get_instances_json_path()
|
||||
with open(json_path, "w") as json_stream:
|
||||
json.dump(instances, json_stream, indent=4)
|
||||
|
||||
@classmethod
|
||||
def get_instances(cls):
|
||||
json_path = cls.get_instances_json_path()
|
||||
if not os.path.exists(json_path):
|
||||
instances = []
|
||||
with open(json_path, "w") as json_stream:
|
||||
json.dump(json_stream, instances)
|
||||
else:
|
||||
with open(json_path, "r") as json_stream:
|
||||
instances = json.load(json_stream)
|
||||
return instances
|
||||
|
||||
@classmethod
|
||||
def get_context_data(cls):
|
||||
json_path = cls.get_context_json_path()
|
||||
if not os.path.exists(json_path):
|
||||
data = {}
|
||||
with open(json_path, "w") as json_stream:
|
||||
json.dump(data, json_stream)
|
||||
else:
|
||||
with open(json_path, "r") as json_stream:
|
||||
data = json.load(json_stream)
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def save_context_data(cls, data):
|
||||
json_path = cls.get_context_json_path()
|
||||
with open(json_path, "w") as json_stream:
|
||||
json.dump(data, json_stream, indent=4)
|
||||
|
||||
|
||||
def ls():
|
||||
return []
|
||||
|
||||
|
||||
def list_instances():
|
||||
return HostContext.get_instances()
|
||||
|
||||
|
||||
def update_instances(update_list):
|
||||
updated_instances = {}
|
||||
for instance, _changes in update_list:
|
||||
updated_instances[instance.id] = instance.data_to_store()
|
||||
|
||||
instances = HostContext.get_instances()
|
||||
for instance_data in instances:
|
||||
instance_id = instance_data["instance_id"]
|
||||
if instance_id in updated_instances:
|
||||
new_instance_data = updated_instances[instance_id]
|
||||
old_keys = set(instance_data.keys())
|
||||
new_keys = set(new_instance_data.keys())
|
||||
instance_data.update(new_instance_data)
|
||||
for key in (old_keys - new_keys):
|
||||
instance_data.pop(key)
|
||||
|
||||
HostContext.save_instances(instances)
|
||||
|
||||
|
||||
def remove_instances(instances):
|
||||
if not isinstance(instances, (tuple, list)):
|
||||
instances = [instances]
|
||||
|
||||
current_instances = HostContext.get_instances()
|
||||
for instance in instances:
|
||||
instance_id = instance.data["instance_id"]
|
||||
found_idx = None
|
||||
for idx, _instance in enumerate(current_instances):
|
||||
if instance_id == _instance["instance_id"]:
|
||||
found_idx = idx
|
||||
break
|
||||
|
||||
if found_idx is not None:
|
||||
current_instances.pop(found_idx)
|
||||
HostContext.save_instances(current_instances)
|
||||
|
||||
|
||||
def get_context_data():
|
||||
return HostContext.get_context_data()
|
||||
|
||||
|
||||
def update_context_data(data, changes):
|
||||
HostContext.save_context_data(data)
|
||||
|
||||
|
||||
def get_context_title():
|
||||
return HostContext.get_context_title()
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
from openpype.lib import NumberDef
|
||||
from openpype.client import get_asset_by_name
|
||||
from openpype.pipeline import (
|
||||
legacy_io,
|
||||
AutoCreator,
|
||||
CreatedInstance,
|
||||
)
|
||||
from openpype.hosts.testhost.api import pipeline
|
||||
|
||||
|
||||
class MyAutoCreator(AutoCreator):
|
||||
identifier = "workfile"
|
||||
family = "workfile"
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
output = [
|
||||
NumberDef("number_key", label="Number")
|
||||
]
|
||||
return output
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in pipeline.list_instances():
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
subset_name = instance_data["subset"]
|
||||
instance = CreatedInstance(
|
||||
self.family, subset_name, instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
pipeline.update_instances(update_list)
|
||||
|
||||
def create(self):
|
||||
existing_instance = None
|
||||
for instance in self.create_context.instances:
|
||||
if instance.family == self.family:
|
||||
existing_instance = instance
|
||||
break
|
||||
|
||||
variant = "Main"
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
host_name = legacy_io.Session["AVALON_APP"]
|
||||
|
||||
if existing_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
)
|
||||
data = {
|
||||
"asset": asset_name,
|
||||
"task": task_name,
|
||||
"variant": variant
|
||||
}
|
||||
data.update(self.get_dynamic_data(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
))
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
self.family, subset_name, data, self
|
||||
)
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
elif (
|
||||
existing_instance["asset"] != asset_name
|
||||
or existing_instance["task"] != task_name
|
||||
):
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
)
|
||||
existing_instance["asset"] = asset_name
|
||||
existing_instance["task"] = task_name
|
||||
|
|
@ -1,94 +0,0 @@
|
|||
import json
|
||||
from openpype import resources
|
||||
from openpype.hosts.testhost.api import pipeline
|
||||
from openpype.lib import (
|
||||
UISeparatorDef,
|
||||
UILabelDef,
|
||||
BoolDef,
|
||||
NumberDef,
|
||||
FileDef,
|
||||
)
|
||||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
)
|
||||
|
||||
|
||||
class TestCreatorOne(Creator):
|
||||
identifier = "test_one"
|
||||
label = "test"
|
||||
family = "test"
|
||||
description = "Testing creator of testhost"
|
||||
|
||||
create_allow_context_change = False
|
||||
|
||||
def get_icon(self):
|
||||
return resources.get_openpype_splash_filepath()
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in pipeline.list_instances():
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
pipeline.update_instances(update_list)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
pipeline.remove_instances(instances)
|
||||
for instance in instances:
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def create(self, subset_name, data, pre_create_data):
|
||||
print("Data that can be used in create:\n{}".format(
|
||||
json.dumps(pre_create_data, indent=4)
|
||||
))
|
||||
new_instance = CreatedInstance(self.family, subset_name, data, self)
|
||||
pipeline.HostContext.add_instance(new_instance.data_to_store())
|
||||
self.log.info(new_instance.data)
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
def get_default_variants(self):
|
||||
return [
|
||||
"myVariant",
|
||||
"variantTwo",
|
||||
"different_variant"
|
||||
]
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
output = [
|
||||
NumberDef("number_key", label="Number"),
|
||||
]
|
||||
return output
|
||||
|
||||
def get_pre_create_attr_defs(self):
|
||||
output = [
|
||||
BoolDef("use_selection", label="Use selection"),
|
||||
UISeparatorDef(),
|
||||
UILabelDef("Testing label"),
|
||||
FileDef("filepath", folders=True, label="Filepath"),
|
||||
FileDef(
|
||||
"filepath_2", multipath=True, folders=True, label="Filepath 2"
|
||||
)
|
||||
]
|
||||
return output
|
||||
|
||||
def get_detail_description(self):
|
||||
return """# Relictus funes est Nyseides currusque nunc oblita
|
||||
|
||||
## Causa sed
|
||||
|
||||
Lorem markdownum posito consumptis, *plebe Amorque*, abstitimus rogatus fictaque
|
||||
gladium Circe, nos? Bos aeternum quae. Utque me, si aliquem cladis, et vestigia
|
||||
arbor, sic mea ferre lacrimae agantur prospiciens hactenus. Amanti dentes pete,
|
||||
vos quid laudemque rastrorumque terras in gratantibus **radix** erat cedemus?
|
||||
|
||||
Pudor tu ponderibus verbaque illa; ire ergo iam Venus patris certe longae
|
||||
cruentum lecta, et quaeque. Sit doce nox. Anteit ad tempora magni plenaque et
|
||||
videres mersit sibique auctor in tendunt mittit cunctos ventisque gravitate
|
||||
volucris quemquam Aeneaden. Pectore Mensis somnus; pectora
|
||||
[ferunt](http://www.mox.org/oculosbracchia)? Fertilitatis bella dulce et suum?
|
||||
"""
|
||||
|
|
@ -1,74 +0,0 @@
|
|||
from openpype.lib import NumberDef, TextDef
|
||||
from openpype.hosts.testhost.api import pipeline
|
||||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
)
|
||||
|
||||
|
||||
class TestCreatorTwo(Creator):
|
||||
identifier = "test_two"
|
||||
label = "test"
|
||||
family = "test"
|
||||
description = "A second testing creator"
|
||||
|
||||
def get_icon(self):
|
||||
return "cube"
|
||||
|
||||
def create(self, subset_name, data, pre_create_data):
|
||||
new_instance = CreatedInstance(self.family, subset_name, data, self)
|
||||
pipeline.HostContext.add_instance(new_instance.data_to_store())
|
||||
self.log.info(new_instance.data)
|
||||
self._add_instance_to_context(new_instance)
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in pipeline.list_instances():
|
||||
creator_id = instance_data.get("creator_identifier")
|
||||
if creator_id == self.identifier:
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
pipeline.update_instances(update_list)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
pipeline.remove_instances(instances)
|
||||
for instance in instances:
|
||||
self._remove_instance_from_context(instance)
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
output = [
|
||||
NumberDef("number_key"),
|
||||
TextDef("text_key")
|
||||
]
|
||||
return output
|
||||
|
||||
def get_detail_description(self):
|
||||
return """# Lorem ipsum, dolor sit amet. [](https://github.com/sindresorhus/awesome)
|
||||
|
||||
> A curated list of awesome lorem ipsum generators.
|
||||
|
||||
Inspired by the [awesome](https://github.com/sindresorhus/awesome) list thing.
|
||||
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Legend](#legend)
|
||||
- [Practical](#briefcase-practical)
|
||||
- [Whimsical](#roller_coaster-whimsical)
|
||||
- [Animals](#rabbit-animals)
|
||||
- [Eras](#tophat-eras)
|
||||
- [Famous Individuals](#sunglasses-famous-individuals)
|
||||
- [Music](#microphone-music)
|
||||
- [Food and Drink](#pizza-food-and-drink)
|
||||
- [Geographic and Dialects](#earth_africa-geographic-and-dialects)
|
||||
- [Literature](#books-literature)
|
||||
- [Miscellaneous](#cyclone-miscellaneous)
|
||||
- [Sports and Fitness](#bicyclist-sports-and-fitness)
|
||||
- [TV and Film](#movie_camera-tv-and-film)
|
||||
- [Tools, Apps, and Extensions](#wrench-tools-apps-and-extensions)
|
||||
- [Contribute](#contribute)
|
||||
- [TODO](#todo)
|
||||
"""
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import (
|
||||
OpenPypePyblishPluginMixin,
|
||||
attribute_definitions
|
||||
)
|
||||
|
||||
|
||||
class CollectContextDataTestHost(
|
||||
pyblish.api.ContextPlugin, OpenPypePyblishPluginMixin
|
||||
):
|
||||
"""
|
||||
Collecting temp json data sent from a host context
|
||||
and path for returning json data back to hostself.
|
||||
"""
|
||||
|
||||
label = "Collect Source - Test Host"
|
||||
order = pyblish.api.CollectorOrder - 0.4
|
||||
hosts = ["testhost"]
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
attribute_definitions.BoolDef(
|
||||
"test_bool",
|
||||
True,
|
||||
label="Bool input"
|
||||
)
|
||||
]
|
||||
|
||||
def process(self, context):
|
||||
# get json paths from os and load them
|
||||
for instance in context:
|
||||
instance.data["source"] = "testhost"
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
import json
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib import attribute_definitions
|
||||
from openpype.pipeline import OpenPypePyblishPluginMixin
|
||||
|
||||
|
||||
class CollectInstanceOneTestHost(
|
||||
pyblish.api.InstancePlugin, OpenPypePyblishPluginMixin
|
||||
):
|
||||
"""
|
||||
Collecting temp json data sent from a host context
|
||||
and path for returning json data back to hostself.
|
||||
"""
|
||||
|
||||
label = "Collect Instance 1 - Test Host"
|
||||
order = pyblish.api.CollectorOrder - 0.3
|
||||
hosts = ["testhost"]
|
||||
|
||||
@classmethod
|
||||
def get_attribute_defs(cls):
|
||||
return [
|
||||
attribute_definitions.NumberDef(
|
||||
"version",
|
||||
default=1,
|
||||
minimum=1,
|
||||
maximum=999,
|
||||
decimals=0,
|
||||
label="Version"
|
||||
)
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
self._debug_log(instance)
|
||||
|
||||
publish_attributes = instance.data.get("publish_attributes")
|
||||
if not publish_attributes:
|
||||
return
|
||||
|
||||
values = publish_attributes.get(self.__class__.__name__)
|
||||
if not values:
|
||||
return
|
||||
|
||||
instance.data["version"] = values["version"]
|
||||
|
||||
def _debug_log(self, instance):
|
||||
def _default_json(value):
|
||||
return str(value)
|
||||
|
||||
self.log.info(
|
||||
json.dumps(instance.data, indent=4, default=_default_json)
|
||||
)
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateInstanceAssetRepair(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
pass
|
||||
|
||||
|
||||
description = """
|
||||
## Publish plugins
|
||||
|
||||
### Validate Scene Settings
|
||||
|
||||
#### Skip Resolution Check for Tasks
|
||||
|
||||
Set regex pattern(s) to look for in a Task name to skip resolution check against values from DB.
|
||||
|
||||
#### Skip Timeline Check for Tasks
|
||||
|
||||
Set regex pattern(s) to look for in a Task name to skip `frameStart`, `frameEnd` check against values from DB.
|
||||
|
||||
### AfterEffects Submit to Deadline
|
||||
|
||||
* `Use Published scene` - Set to True (green) when Deadline should take published scene as a source instead of uploaded local one.
|
||||
* `Priority` - priority of job on farm
|
||||
* `Primary Pool` - here is list of pool fetched from server you can select from.
|
||||
* `Secondary Pool`
|
||||
* `Frames Per Task` - number of sequence division between individual tasks (chunks)
|
||||
making one job on farm.
|
||||
"""
|
||||
|
||||
|
||||
class ValidateContextWithError(pyblish.api.ContextPlugin):
|
||||
"""Validate the instance asset is the current selected context asset.
|
||||
|
||||
As it might happen that multiple worfiles are opened, switching
|
||||
between them would mess with selected context.
|
||||
In that case outputs might be output under wrong asset!
|
||||
|
||||
Repair action will use Context asset value (from Workfiles or Launcher)
|
||||
Closing and reopening with Workfiles will refresh Context value.
|
||||
"""
|
||||
|
||||
label = "Validate Context With Error"
|
||||
hosts = ["testhost"]
|
||||
actions = [ValidateInstanceAssetRepair]
|
||||
order = pyblish.api.ValidatorOrder
|
||||
|
||||
def process(self, context):
|
||||
raise PublishValidationError("Crashing", "Context error", description)
|
||||
|
|
@ -1,57 +0,0 @@
|
|||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateInstanceAssetRepair(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
pass
|
||||
|
||||
|
||||
description = """
|
||||
## Publish plugins
|
||||
|
||||
### Validate Scene Settings
|
||||
|
||||
#### Skip Resolution Check for Tasks
|
||||
|
||||
Set regex pattern(s) to look for in a Task name to skip resolution check against values from DB.
|
||||
|
||||
#### Skip Timeline Check for Tasks
|
||||
|
||||
Set regex pattern(s) to look for in a Task name to skip `frameStart`, `frameEnd` check against values from DB.
|
||||
|
||||
### AfterEffects Submit to Deadline
|
||||
|
||||
* `Use Published scene` - Set to True (green) when Deadline should take published scene as a source instead of uploaded local one.
|
||||
* `Priority` - priority of job on farm
|
||||
* `Primary Pool` - here is list of pool fetched from server you can select from.
|
||||
* `Secondary Pool`
|
||||
* `Frames Per Task` - number of sequence division between individual tasks (chunks)
|
||||
making one job on farm.
|
||||
"""
|
||||
|
||||
|
||||
class ValidateWithError(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance asset is the current selected context asset.
|
||||
|
||||
As it might happen that multiple worfiles are opened, switching
|
||||
between them would mess with selected context.
|
||||
In that case outputs might be output under wrong asset!
|
||||
|
||||
Repair action will use Context asset value (from Workfiles or Launcher)
|
||||
Closing and reopening with Workfiles will refresh Context value.
|
||||
"""
|
||||
|
||||
label = "Validate With Error"
|
||||
hosts = ["testhost"]
|
||||
actions = [ValidateInstanceAssetRepair]
|
||||
order = pyblish.api.ValidatorOrder
|
||||
|
||||
def process(self, instance):
|
||||
raise PublishValidationError("Crashing", "Instance error", description)
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
mongo_url = ""
|
||||
project_name = ""
|
||||
asset_name = ""
|
||||
task_name = ""
|
||||
ftrack_url = ""
|
||||
ftrack_username = ""
|
||||
ftrack_api_key = ""
|
||||
|
||||
|
||||
def multi_dirname(path, times=1):
|
||||
for _ in range(times):
|
||||
path = os.path.dirname(path)
|
||||
return path
|
||||
|
||||
|
||||
host_name = "testhost"
|
||||
current_file = os.path.abspath(__file__)
|
||||
openpype_dir = multi_dirname(current_file, 4)
|
||||
|
||||
os.environ["OPENPYPE_MONGO"] = mongo_url
|
||||
os.environ["OPENPYPE_ROOT"] = openpype_dir
|
||||
os.environ["AVALON_PROJECT"] = project_name
|
||||
os.environ["AVALON_ASSET"] = asset_name
|
||||
os.environ["AVALON_TASK"] = task_name
|
||||
os.environ["AVALON_APP"] = host_name
|
||||
os.environ["OPENPYPE_DATABASE_NAME"] = "openpype"
|
||||
os.environ["AVALON_TIMEOUT"] = "1000"
|
||||
os.environ["AVALON_DB"] = "avalon"
|
||||
os.environ["FTRACK_SERVER"] = ftrack_url
|
||||
os.environ["FTRACK_API_USER"] = ftrack_username
|
||||
os.environ["FTRACK_API_KEY"] = ftrack_api_key
|
||||
for path in [
|
||||
openpype_dir,
|
||||
r"{}\repos\avalon-core".format(openpype_dir),
|
||||
r"{}\.venv\Lib\site-packages".format(openpype_dir)
|
||||
]:
|
||||
sys.path.append(path)
|
||||
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
from openpype.tools.publisher.window import PublisherWindow
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function for testing purposes."""
|
||||
import pyblish.api
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype.hosts.testhost import api as testhost
|
||||
|
||||
manager = ModulesManager()
|
||||
for plugin_path in manager.collect_plugin_paths()["publish"]:
|
||||
pyblish.api.register_plugin_path(plugin_path)
|
||||
|
||||
install_host(testhost)
|
||||
|
||||
QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling)
|
||||
app = QtWidgets.QApplication([])
|
||||
window = PublisherWindow()
|
||||
window.show()
|
||||
app.exec_()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -17,6 +17,8 @@ from .publish_plugins import (
|
|||
|
||||
RepairAction,
|
||||
RepairContextAction,
|
||||
|
||||
Extractor,
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
|
|
@ -58,6 +60,8 @@ __all__ = (
|
|||
"RepairAction",
|
||||
"RepairContextAction",
|
||||
|
||||
"Extractor",
|
||||
|
||||
"DiscoverResult",
|
||||
"publish_plugins_discover",
|
||||
"load_help_content_from_plugin",
|
||||
|
|
|
|||
|
|
@ -8,7 +8,8 @@ from openpype.lib import BoolDef
|
|||
from .lib import (
|
||||
load_help_content_from_plugin,
|
||||
get_errored_instances_from_context,
|
||||
get_errored_plugins_from_context
|
||||
get_errored_plugins_from_context,
|
||||
get_instance_staging_dir,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -241,3 +242,25 @@ class RepairContextAction(pyblish.api.Action):
|
|||
if plugin in errored_plugins:
|
||||
self.log.info("Attempting fix ...")
|
||||
plugin.repair(context)
|
||||
|
||||
|
||||
class Extractor(pyblish.api.InstancePlugin):
|
||||
"""Extractor base class.
|
||||
|
||||
The extractor base class implements a "staging_dir" function used to
|
||||
generate a temporary directory for an instance to extract to.
|
||||
|
||||
This temporary directory is generated through `tempfile.mkdtemp()`
|
||||
|
||||
"""
|
||||
|
||||
order = 2.0
|
||||
|
||||
def staging_dir(self, instance):
|
||||
"""Provide a temporary directory in which to store extracted files
|
||||
|
||||
Upon calling this method the staging directory is stored inside
|
||||
the instance.data['stagingDir']
|
||||
"""
|
||||
|
||||
return get_instance_staging_dir(instance)
|
||||
|
|
|
|||
|
|
@ -177,7 +177,7 @@ class AbstractTemplateLoader:
|
|||
build_info["profiles"],
|
||||
{
|
||||
"task_types": task_type,
|
||||
"tasks": task_name
|
||||
"task_names": task_name
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import os
|
||||
from importlib import import_module
|
||||
from openpype.lib import classes_from_module
|
||||
from openpype.host import HostBase
|
||||
|
|
@ -30,7 +31,7 @@ def build_workfile_template(*args):
|
|||
template_loader.populate_template()
|
||||
|
||||
|
||||
def update_workfile_template(args):
|
||||
def update_workfile_template(*args):
|
||||
template_loader = build_template_loader()
|
||||
template_loader.update_missing_containers()
|
||||
|
||||
|
|
@ -42,7 +43,10 @@ def build_template_loader():
|
|||
if isinstance(host, HostBase):
|
||||
host_name = host.name
|
||||
else:
|
||||
host_name = host.__name__.partition('.')[2]
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
if not host_name:
|
||||
host_name = host.__name__.split(".")[-2]
|
||||
|
||||
module_path = _module_path_format.format(host=host_name)
|
||||
module = import_module(module_path)
|
||||
if not module:
|
||||
|
|
|
|||
|
|
@ -85,7 +85,6 @@ class InstancePlugin(pyblish.api.InstancePlugin):
|
|||
super(InstancePlugin, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
# NOTE: This class is used on so many places I gave up moving it
|
||||
class Extractor(pyblish.api.InstancePlugin):
|
||||
"""Extractor base class.
|
||||
|
||||
|
|
|
|||
|
|
@ -8,10 +8,10 @@ import shutil
|
|||
|
||||
import clique
|
||||
import six
|
||||
import pyblish
|
||||
import pyblish.api
|
||||
|
||||
import openpype
|
||||
import openpype.api
|
||||
from openpype import resources, PACKAGE_DIR
|
||||
from openpype.pipeline import publish
|
||||
from openpype.lib import (
|
||||
run_openpype_process,
|
||||
|
||||
|
|
@ -23,7 +23,7 @@ from openpype.lib import (
|
|||
)
|
||||
|
||||
|
||||
class ExtractBurnin(openpype.api.Extractor):
|
||||
class ExtractBurnin(publish.Extractor):
|
||||
"""
|
||||
Extractor to create video with pre-defined burnins from
|
||||
existing extracted video representation.
|
||||
|
|
@ -400,7 +400,7 @@ class ExtractBurnin(openpype.api.Extractor):
|
|||
|
||||
# Use OpenPype default font
|
||||
if not font_filepath:
|
||||
font_filepath = openpype.api.resources.get_liberation_font_path()
|
||||
font_filepath = resources.get_liberation_font_path()
|
||||
|
||||
burnin_options["font"] = font_filepath
|
||||
|
||||
|
|
@ -981,7 +981,7 @@ class ExtractBurnin(openpype.api.Extractor):
|
|||
"""Return path to python script for burnin processing."""
|
||||
scriptpath = os.path.normpath(
|
||||
os.path.join(
|
||||
openpype.PACKAGE_DIR,
|
||||
PACKAGE_DIR,
|
||||
"scripts",
|
||||
"otio_burnin.py"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
import opentimelineio as otio
|
||||
|
||||
from openpype.pipeline import publish
|
||||
|
||||
class ExtractOTIOFile(openpype.api.Extractor):
|
||||
|
||||
class ExtractOTIOFile(publish.Extractor):
|
||||
"""
|
||||
Extractor export OTIO file
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -18,7 +18,12 @@ import os
|
|||
import clique
|
||||
import opentimelineio as otio
|
||||
from pyblish import api
|
||||
import openpype
|
||||
|
||||
from openpype.lib import (
|
||||
get_ffmpeg_tool_path,
|
||||
run_subprocess,
|
||||
)
|
||||
from openpype.pipeline import publish
|
||||
from openpype.pipeline.editorial import (
|
||||
otio_range_to_frame_range,
|
||||
trim_media_range,
|
||||
|
|
@ -28,7 +33,7 @@ from openpype.pipeline.editorial import (
|
|||
)
|
||||
|
||||
|
||||
class ExtractOTIOReview(openpype.api.Extractor):
|
||||
class ExtractOTIOReview(publish.Extractor):
|
||||
"""
|
||||
Extract OTIO timeline into one concuted image sequence file.
|
||||
|
||||
|
|
@ -334,7 +339,7 @@ class ExtractOTIOReview(openpype.api.Extractor):
|
|||
otio.time.TimeRange: trimmed available range
|
||||
"""
|
||||
# get rendering app path
|
||||
ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
|
||||
ffmpeg_path = get_ffmpeg_tool_path("ffmpeg")
|
||||
|
||||
# create path and frame start to destination
|
||||
output_path, out_frame_start = self._get_ffmpeg_output()
|
||||
|
|
@ -397,7 +402,7 @@ class ExtractOTIOReview(openpype.api.Extractor):
|
|||
])
|
||||
# execute
|
||||
self.log.debug("Executing: {}".format(" ".join(command)))
|
||||
output = openpype.api.run_subprocess(
|
||||
output = run_subprocess(
|
||||
command, logger=self.log
|
||||
)
|
||||
self.log.debug("Output: {}".format(output))
|
||||
|
|
|
|||
|
|
@ -6,18 +6,24 @@ Requires:
|
|||
"""
|
||||
|
||||
import os
|
||||
from pyblish import api
|
||||
import openpype
|
||||
from copy import deepcopy
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib import (
|
||||
get_ffmpeg_tool_path,
|
||||
run_subprocess,
|
||||
)
|
||||
from openpype.pipeline import publish
|
||||
from openpype.pipeline.editorial import frames_to_seconds
|
||||
|
||||
|
||||
class ExtractOTIOTrimmingVideo(openpype.api.Extractor):
|
||||
class ExtractOTIOTrimmingVideo(publish.Extractor):
|
||||
"""
|
||||
Trimming video file longer then required lenght
|
||||
|
||||
"""
|
||||
order = api.ExtractorOrder
|
||||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract OTIO trim longer video"
|
||||
families = ["trim"]
|
||||
hosts = ["resolve", "hiero", "flame"]
|
||||
|
|
@ -70,7 +76,7 @@ class ExtractOTIOTrimmingVideo(openpype.api.Extractor):
|
|||
|
||||
"""
|
||||
# get rendering app path
|
||||
ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
|
||||
ffmpeg_path = get_ffmpeg_tool_path("ffmpeg")
|
||||
|
||||
# create path to destination
|
||||
output_path = self._get_ffmpeg_output(input_file_path)
|
||||
|
|
@ -96,7 +102,7 @@ class ExtractOTIOTrimmingVideo(openpype.api.Extractor):
|
|||
|
||||
# execute
|
||||
self.log.debug("Executing: {}".format(" ".join(command)))
|
||||
output = openpype.api.run_subprocess(
|
||||
output = run_subprocess(
|
||||
command, logger=self.log
|
||||
)
|
||||
self.log.debug("Output: {}".format(output))
|
||||
|
|
|
|||
|
|
@ -1,19 +1,22 @@
|
|||
import os
|
||||
from pprint import pformat
|
||||
import re
|
||||
import openpype.api
|
||||
import pyblish
|
||||
from pprint import pformat
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib import (
|
||||
path_to_subprocess_arg,
|
||||
run_subprocess,
|
||||
get_ffmpeg_tool_path,
|
||||
get_ffprobe_data,
|
||||
get_ffprobe_streams,
|
||||
get_ffmpeg_codec_args,
|
||||
get_ffmpeg_format_args,
|
||||
)
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractReviewSlate(openpype.api.Extractor):
|
||||
class ExtractReviewSlate(publish.Extractor):
|
||||
"""
|
||||
Will add slate frame at the start of the video files
|
||||
"""
|
||||
|
|
@ -158,7 +161,7 @@ class ExtractReviewSlate(openpype.api.Extractor):
|
|||
|
||||
input_args.extend([
|
||||
"-loop", "1",
|
||||
"-i", openpype.lib.path_to_subprocess_arg(slate_path),
|
||||
"-i", path_to_subprocess_arg(slate_path),
|
||||
"-r", str(input_frame_rate),
|
||||
"-frames:v", "1",
|
||||
])
|
||||
|
|
@ -267,7 +270,7 @@ class ExtractReviewSlate(openpype.api.Extractor):
|
|||
self.log.debug(
|
||||
"Slate Executing: {}".format(slate_subprocess_cmd)
|
||||
)
|
||||
openpype.api.run_subprocess(
|
||||
run_subprocess(
|
||||
slate_subprocess_cmd, shell=True, logger=self.log
|
||||
)
|
||||
|
||||
|
|
@ -348,7 +351,7 @@ class ExtractReviewSlate(openpype.api.Extractor):
|
|||
"Executing concat filter: {}".format
|
||||
(" ".join(concat_args))
|
||||
)
|
||||
openpype.api.run_subprocess(
|
||||
run_subprocess(
|
||||
concat_args, logger=self.log
|
||||
)
|
||||
|
||||
|
|
@ -533,7 +536,7 @@ class ExtractReviewSlate(openpype.api.Extractor):
|
|||
self.log.debug("Silent Slate Executing: {}".format(
|
||||
" ".join(slate_silent_args)
|
||||
))
|
||||
openpype.api.run_subprocess(
|
||||
run_subprocess(
|
||||
slate_silent_args, logger=self.log
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,14 +1,16 @@
|
|||
import os
|
||||
from pprint import pformat
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from openpype.lib import (
|
||||
get_ffmpeg_tool_path,
|
||||
run_subprocess,
|
||||
)
|
||||
from pprint import pformat
|
||||
from openpype.pipeline import publish
|
||||
|
||||
|
||||
class ExtractTrimVideoAudio(openpype.api.Extractor):
|
||||
class ExtractTrimVideoAudio(publish.Extractor):
|
||||
"""Trim with ffmpeg "mov" and "wav" files."""
|
||||
|
||||
# must be before `ExtractThumbnailSP`
|
||||
|
|
@ -98,7 +100,7 @@ class ExtractTrimVideoAudio(openpype.api.Extractor):
|
|||
|
||||
joined_args = " ".join(ffmpeg_args)
|
||||
self.log.info(f"Processing: {joined_args}")
|
||||
openpype.api.run_subprocess(
|
||||
run_subprocess(
|
||||
ffmpeg_args, logger=self.log
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -980,4 +980,4 @@
|
|||
"ValidateNoAnimation": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -325,5 +325,8 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"templated_workfile_build": {
|
||||
"profiles": []
|
||||
},
|
||||
"filters": {}
|
||||
}
|
||||
|
|
@ -308,6 +308,10 @@
|
|||
"type": "schema_template",
|
||||
"name": "template_workfile_options"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_templated_workfile_build"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_publish_gui_filter"
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@
|
|||
"type": "task-types-enum"
|
||||
},
|
||||
{
|
||||
"key": "tasks",
|
||||
"key": "task_names",
|
||||
"label": "Task names",
|
||||
"type": "list",
|
||||
"object_type": "text"
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ class AssetModel(TreeModel):
|
|||
for asset in current_assets:
|
||||
# get label from data, otherwise use name
|
||||
data = asset.get("data", {})
|
||||
label = data.get("label", asset["name"])
|
||||
label = data.get("label") or asset["name"]
|
||||
tags = data.get("tags", [])
|
||||
|
||||
# store for the asset for optimization
|
||||
|
|
|
|||
|
|
@ -20,17 +20,17 @@ It defines:
|
|||
- Colour Management
|
||||
- File Formats
|
||||
|
||||
Anatomy is the only configuration that is always saved as project override. This is to make sure, that any updates to OpenPype or Studio default values, don't affect currently running productions.
|
||||
Anatomy is the only configuration that is always saved as an project override. This is to make sure that any updates to OpenPype or Studio default values, don't affect currently running productions.
|
||||
|
||||

|
||||
|
||||
## Roots
|
||||
|
||||
Roots define where files are stored with path to shared folder. It is required to set root path for each platform you are using in studio. All paths must point to same folder!
|
||||
Roots define where files are stored with path to a shared folder. It is required to set the root path for each platform you are using in the studio. All paths must point to the same folder!
|
||||
|
||||

|
||||
|
||||
It is possible to set multiple roots when necessary. That may be handy when you need to store specific type of data on another disk.
|
||||
It is possible to set multiple roots when necessary. That may be handy when you need to store a specific type of data on another disk.
|
||||

|
||||
|
||||
|
||||
|
|
@ -40,7 +40,7 @@ Note how multiple roots are used here, to push different types of files to diffe
|
|||
|
||||
## Templates
|
||||
|
||||
Templates define project's folder structure and filenames.
|
||||
Templates define the project's folder structure and filenames.
|
||||
|
||||
We have a few required anatomy templates for OpenPype to work properly, however we keep adding more when needed.
|
||||
|
||||
|
|
@ -100,14 +100,36 @@ We have a few required anatomy templates for OpenPype to work properly, however
|
|||
</div>
|
||||
</div>
|
||||
|
||||
### Anatomy reference keys
|
||||
|
||||
Anatomy templates have the ability to use "referenced keys". Best example is `path` in publish or work templates which just contains references to `folder` and `file` (`{@folder}/{@file}`). Any changes in folder or file template are propagated to the path template. The another example is simplification of version and frame formatting with paddings. You can notice that keys `{@version}` or `{@frame}` are used in default templates. They are referencing `Anatomy` -> `Templates` -> `Version` or `Frame` which handle version and frame formatting with padding.
|
||||
|
||||
So if you set `project_anatomy/templates/defaults/version_padding` to `5` the `{@version}` key will be transformed to `v{version:0>5}` automatically and version number in paths will have 5 numbers -> `v00001`.
|
||||
|
||||
### Optional keys
|
||||
|
||||
In some cases of template formatting not all keys are available and should be just ignored. For example `{frame}` should be available only for sequences but we have single publish template. To handle these cases it is possible to use special characters to mark segment of template which should be ignored, if it can't be filled because of missing keys. To mark these segments use `<` and `>`.
|
||||
.
|
||||
Template `{project[code]}_{asset}_{subset}<_{output}><.{@frame}>.{ext}` can handle all 4 possible situations when `output` and `frame` keys are available or not. The optional segments can contain additional text, like in the example dot (`.`) for frame and underscore (`_`) for output, those are also ignored if the keys are not available. Optional segments without formatting keys are kept untouched: `<br/>` -> stays as `<br/>`. It is possible to nest optional segments inside optional segments `<{asset}<.{@frame}><br/>>` which may result in empty string if `asset` key is not available.
|
||||
|
||||
## Attributes
|
||||
|
||||
Project attributes are used as default values for new assets created under project, except `Applications` and `Active project` which are project specific. Values of attributes that are **not** project specific are always used from assets. So if `tools` are not loading as expected it is because the assets have different values.
|
||||
|
||||

|
||||
|
||||
**Most of attributes don't need detailed explanation.**
|
||||
|
||||
| Attribute | Description |
|
||||
| --- | --- |
|
||||
| `Applications` | List of applications that can be used in the project. At the moment used only as a possible filter of applications. |
|
||||
| `Tools` | List of application tools. This value can be overridden per asset. |
|
||||
| `Active project` | Project won't be visible in tools if enabled.<br/> - To revert check `Show Inactive projects` checkbox in project settings. |
|
||||
|
||||
|
||||
## Task Types
|
||||
|
||||
Current state of default Task descriptors.
|
||||
Available task types on a project. Each task on an asset is referencing a task type on the project which allows access to additional task type attributes. At this moment only `short_name` is available (can be used in templates as `{task[short_name]}`).
|
||||
|
||||

|
||||
|
||||
|
|
|
|||
|
|
@ -202,3 +202,67 @@ This video shows a way to publish shot look as effect from Hiero to Nuke.
|
|||
### Assembling edit from published shot versions
|
||||
|
||||
<iframe width="512px" height="288px" src="https://www.youtube.com/embed/5Wd6X-71vbg" frameborder="0" modestbranding="1" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen="1"></iframe>
|
||||
|
||||
|
||||
# Nuke Build Workfile
|
||||
This is a tool of Node Graph initialisation using a pre-created template.
|
||||
|
||||
### Add a profile
|
||||
The path to the template that will be used in the initialisation must be added as a profile on Project Settings.
|
||||
|
||||

|
||||
|
||||
### Create Place Holder
|
||||
|
||||

|
||||
|
||||
This tool creates a Place Holder, which is a node that will be replaced by published instances.
|
||||
|
||||

|
||||
#### Result
|
||||
- Create a red node called `PLACEHOLDER` which can be manipulated as wanted by using it in Node Graph.
|
||||
|
||||

|
||||
|
||||
:::note
|
||||
All published instances that will replace the place holder must contain unique input and output nodes in case they will not be imported as a single node.
|
||||
:::
|
||||
|
||||

|
||||
|
||||
|
||||
The informations about these objects are given by the user by filling the extra attributes of the Place Holder
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
### Update Place Holder
|
||||
This tool alows the user to change the information provided in the extra attributes of the selected Place Holder.
|
||||
|
||||

|
||||
|
||||
|
||||
|
||||
### Build Workfile from template
|
||||
This tool imports the template used and replaces the existed PlaceHolders with the corresponding published objects (which can contain Place Holders too). In case there is no published items with the description given, the place holder will remain in the node graph.
|
||||
|
||||

|
||||
|
||||
#### Result
|
||||
- Replace `PLACEHOLDER` node in the template with the published instance corresponding to the informations provided in extra attributes of the Place Holder
|
||||
|
||||

|
||||
|
||||
:::note
|
||||
In case the instance that will replace the Place holder **A** contains another Place Holder **B** that points to many published elements, all the nodes that were imported with **A** except **B** will be duplicated for each element that will replace **B**
|
||||
:::
|
||||
|
||||
### Update Workfile
|
||||
This tool can be used to check if some instances were published after the last build, so they will be imported.
|
||||
|
||||

|
||||
|
||||
:::note
|
||||
Imported instances must not be deleted because they contain extra attributes that will be used to update the workfile since the place holder is been deleted.
|
||||
:::
|
||||
BIN
website/docs/assets/nuke_addProfile.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
website/docs/assets/nuke_buildWorfileFromTemplate.png
Normal file
|
After Width: | Height: | Size: 29 KiB |
BIN
website/docs/assets/nuke_buildworkfile.png
Normal file
|
After Width: | Height: | Size: 35 KiB |
BIN
website/docs/assets/nuke_createPlaceHolder.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
website/docs/assets/nuke_fillingExtraAttributes.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
website/docs/assets/nuke_placeHolderNode.png
Normal file
|
After Width: | Height: | Size: 3.9 KiB |
BIN
website/docs/assets/nuke_placeholder.png
Normal file
|
After Width: | Height: | Size: 12 KiB |
BIN
website/docs/assets/nuke_publishedinstance.png
Normal file
|
After Width: | Height: | Size: 20 KiB |
BIN
website/docs/assets/nuke_updatePlaceHolder.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
website/docs/assets/nuke_updateWorkfile.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
website/docs/assets/settings/anatomy_attributes.png
Normal file
|
After Width: | Height: | Size: 14 KiB |