Merge branch 'develop' into master-dazzle-prod

This commit is contained in:
Jakub Jezek 2020-01-20 12:03:34 +01:00
commit 3ecedad03b
21 changed files with 1244 additions and 7 deletions

34
pype/blender/__init__.py Normal file
View file

@ -0,0 +1,34 @@
import logging
from pathlib import Path
import os
import bpy
from avalon import api as avalon
from pyblish import api as pyblish
from .plugin import AssetLoader
logger = logging.getLogger("pype.blender")
PARENT_DIR = os.path.dirname(__file__)
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "blender", "publish")
LOAD_PATH = os.path.join(PLUGINS_DIR, "blender", "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "blender", "create")
def install():
"""Install Blender configuration for Avalon."""
pyblish.register_plugin_path(str(PUBLISH_PATH))
avalon.register_plugin_path(avalon.Loader, str(LOAD_PATH))
avalon.register_plugin_path(avalon.Creator, str(CREATE_PATH))
def uninstall():
"""Uninstall Blender configuration for Avalon."""
pyblish.deregister_plugin_path(str(PUBLISH_PATH))
avalon.deregister_plugin_path(avalon.Loader, str(LOAD_PATH))
avalon.deregister_plugin_path(avalon.Creator, str(CREATE_PATH))

47
pype/blender/action.py Normal file
View file

@ -0,0 +1,47 @@
import bpy
import pyblish.api
from ..action import get_errored_instances_from_context
class SelectInvalidAction(pyblish.api.Action):
"""Select invalid objects in Blender when a publish plug-in failed."""
label = "Select Invalid"
on = "failed"
icon = "search"
def process(self, context, plugin):
errored_instances = get_errored_instances_from_context(context)
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
# Get the invalid nodes for the plug-ins
self.log.info("Finding invalid nodes...")
invalid = list()
for instance in instances:
invalid_nodes = plugin.get_invalid(instance)
if invalid_nodes:
if isinstance(invalid_nodes, (list, tuple)):
invalid.extend(invalid_nodes)
else:
self.log.warning(
"Failed plug-in doens't have any selectable objects."
)
bpy.ops.object.select_all(action='DESELECT')
# Make sure every node is only processed once
invalid = list(set(invalid))
if not invalid:
self.log.info("No invalid nodes found.")
return
invalid_names = [obj.name for obj in invalid]
self.log.info(
"Selecting invalid objects: %s", ", ".join(invalid_names)
)
# Select the objects and also make the last one the active object.
for obj in invalid:
obj.select_set(True)
bpy.context.view_layer.objects.active = invalid[-1]

135
pype/blender/plugin.py Normal file
View file

@ -0,0 +1,135 @@
"""Shared functionality for pipeline plugins for Blender."""
from pathlib import Path
from typing import Dict, List, Optional
import bpy
from avalon import api
VALID_EXTENSIONS = [".blend"]
def model_name(asset: str, subset: str, namespace: Optional[str] = None) -> str:
"""Return a consistent name for a model asset."""
name = f"{asset}_{subset}"
if namespace:
name = f"{namespace}:{name}"
return name
class AssetLoader(api.Loader):
"""A basic AssetLoader for Blender
This will implement the basic logic for linking/appending assets
into another Blender scene.
The `update` method should be implemented by a sub-class, because
it's different for different types (e.g. model, rig, animation,
etc.).
"""
@staticmethod
def _get_instance_empty(instance_name: str, nodes: List) -> Optional[bpy.types.Object]:
"""Get the 'instance empty' that holds the collection instance."""
for node in nodes:
if not isinstance(node, bpy.types.Object):
continue
if (node.type == 'EMPTY' and node.instance_type == 'COLLECTION'
and node.instance_collection and node.name == instance_name):
return node
return None
@staticmethod
def _get_instance_collection(instance_name: str, nodes: List) -> Optional[bpy.types.Collection]:
"""Get the 'instance collection' (container) for this asset."""
for node in nodes:
if not isinstance(node, bpy.types.Collection):
continue
if node.name == instance_name:
return node
return None
@staticmethod
def _get_library_from_container(container: bpy.types.Collection) -> bpy.types.Library:
"""Find the library file from the container.
It traverses the objects from this collection, checks if there is only
1 library from which the objects come from and returns the library.
Warning:
No nested collections are supported at the moment!
"""
assert not container.children, "Nested collections are not supported."
assert container.objects, "The collection doesn't contain any objects."
libraries = set()
for obj in container.objects:
assert obj.library, f"'{obj.name}' is not linked."
libraries.add(obj.library)
assert len(libraries) == 1, "'{container.name}' contains objects from more then 1 library."
return list(libraries)[0]
def process_asset(self,
context: dict,
name: str,
namespace: Optional[str] = None,
options: Optional[Dict] = None):
"""Must be implemented by a sub-class"""
raise NotImplementedError("Must be implemented by a sub-class")
def load(self,
context: dict,
name: Optional[str] = None,
namespace: Optional[str] = None,
options: Optional[Dict] = None) -> Optional[bpy.types.Collection]:
"""Load asset via database
Arguments:
context: Full parenthood of representation to load
name: Use pre-defined name
namespace: Use pre-defined namespace
options: Additional settings dictionary
"""
# TODO (jasper): make it possible to add the asset several times by
# just re-using the collection
assert Path(self.fname).exists(), f"{self.fname} doesn't exist."
self.process_asset(
context=context,
name=name,
namespace=namespace,
options=options,
)
# Only containerise if anything was loaded by the Loader.
nodes = self[:]
if not nodes:
return None
# Only containerise if it's not already a collection from a .blend file.
representation = context["representation"]["name"]
if representation != "blend":
from avalon.blender.pipeline import containerise
return containerise(
name=name,
namespace=namespace,
nodes=nodes,
context=context,
loader=self.__class__.__name__,
)
asset = context["asset"]["name"]
subset = context["subset"]["name"]
instance_name = model_name(asset, subset, namespace)
return self._get_instance_collection(instance_name, nodes)
def update(self, container: Dict, representation: Dict):
"""Must be implemented by a sub-class"""
raise NotImplementedError("Must be implemented by a sub-class")
def remove(self, container: Dict) -> bool:
"""Must be implemented by a sub-class"""
raise NotImplementedError("Must be implemented by a sub-class")

View file

@ -1581,3 +1581,70 @@ def get_dependent_nodes(nodes):
})
return connections_in, connections_out
def find_free_space_to_paste_nodes(
nodes,
group=nuke.root(),
direction="right",
offset=300):
"""
For getting coordinates in DAG (node graph) for placing new nodes
Arguments:
nodes (list): list of nuke.Node objects
group (nuke.Node) [optional]: object in which context it is
direction (str) [optional]: where we want it to be placed
[left, right, top, bottom]
offset (int) [optional]: what offset it is from rest of nodes
Returns:
xpos (int): x coordinace in DAG
ypos (int): y coordinace in DAG
"""
if len(nodes) == 0:
return 0, 0
group_xpos = list()
group_ypos = list()
# get local coordinates of all nodes
nodes_xpos = [n.xpos() for n in nodes] + \
[n.xpos() + n.screenWidth() for n in nodes]
nodes_ypos = [n.ypos() for n in nodes] + \
[n.ypos() + n.screenHeight() for n in nodes]
# get complete screen size of all nodes to be placed in
nodes_screen_width = max(nodes_xpos) - min(nodes_xpos)
nodes_screen_heigth = max(nodes_ypos) - min(nodes_ypos)
# get screen size (r,l,t,b) of all nodes in `group`
with group:
group_xpos = [n.xpos() for n in nuke.allNodes() if n not in nodes] + \
[n.xpos() + n.screenWidth() for n in nuke.allNodes()
if n not in nodes]
group_ypos = [n.ypos() for n in nuke.allNodes() if n not in nodes] + \
[n.ypos() + n.screenHeight() for n in nuke.allNodes()
if n not in nodes]
# calc output left
if direction in "left":
xpos = min(group_xpos) - abs(nodes_screen_width) - abs(offset)
ypos = min(group_ypos)
return xpos, ypos
# calc output right
if direction in "right":
xpos = max(group_xpos) + abs(offset)
ypos = min(group_ypos)
return xpos, ypos
# calc output top
if direction in "top":
xpos = min(group_xpos)
ypos = min(group_ypos) - abs(nodes_screen_heigth) - abs(offset)
return xpos, ypos
# calc output bottom
if direction in "bottom":
xpos = min(group_xpos)
ypos = max(group_ypos) + abs(offset)
return xpos, ypos

View file

@ -0,0 +1,32 @@
"""Create a model asset."""
import bpy
from avalon import api
from avalon.blender import Creator, lib
class CreateModel(Creator):
"""Polygonal static geometry"""
name = "modelMain"
label = "Model"
family = "model"
icon = "cube"
def process(self):
import pype.blender
asset = self.data["asset"]
subset = self.data["subset"]
name = pype.blender.plugin.model_name(asset, subset)
collection = bpy.data.collections.new(name=name)
bpy.context.scene.collection.children.link(collection)
self.data['task'] = api.Session.get('AVALON_TASK')
lib.imprint(collection, self.data)
if (self.options or {}).get("useSelection"):
for obj in lib.get_selection():
collection.objects.link(obj)
return collection

View file

@ -0,0 +1,315 @@
"""Load a model asset in Blender."""
import logging
from pathlib import Path
from pprint import pformat
from typing import Dict, List, Optional
import avalon.blender.pipeline
import bpy
import pype.blender
from avalon import api
logger = logging.getLogger("pype").getChild("blender").getChild("load_model")
class BlendModelLoader(pype.blender.AssetLoader):
"""Load models from a .blend file.
Because they come from a .blend file we can simply link the collection that
contains the model. There is no further need to 'containerise' it.
Warning:
Loading the same asset more then once is not properly supported at the
moment.
"""
families = ["model"]
representations = ["blend"]
label = "Link Model"
icon = "code-fork"
color = "orange"
@staticmethod
def _get_lib_collection(name: str, libpath: Path) -> Optional[bpy.types.Collection]:
"""Find the collection(s) with name, loaded from libpath.
Note:
It is assumed that only 1 matching collection is found.
"""
for collection in bpy.data.collections:
if collection.name != name:
continue
if collection.library is None:
continue
if not collection.library.filepath:
continue
collection_lib_path = str(Path(bpy.path.abspath(collection.library.filepath)).resolve())
normalized_libpath = str(Path(bpy.path.abspath(str(libpath))).resolve())
if collection_lib_path == normalized_libpath:
return collection
return None
@staticmethod
def _collection_contains_object(
collection: bpy.types.Collection, object: bpy.types.Object
) -> bool:
"""Check if the collection contains the object."""
for obj in collection.objects:
if obj == object:
return True
return False
def process_asset(
self, context: dict, name: str, namespace: Optional[str] = None,
options: Optional[Dict] = None
) -> Optional[List]:
"""
Arguments:
name: Use pre-defined name
namespace: Use pre-defined namespace
context: Full parenthood of representation to load
options: Additional settings dictionary
"""
libpath = self.fname
asset = context["asset"]["name"]
subset = context["subset"]["name"]
lib_container = pype.blender.plugin.model_name(asset, subset)
container_name = pype.blender.plugin.model_name(
asset, subset, namespace
)
relative = bpy.context.preferences.filepaths.use_relative_paths
with bpy.data.libraries.load(
libpath, link=True, relative=relative
) as (_, data_to):
data_to.collections = [lib_container]
scene = bpy.context.scene
instance_empty = bpy.data.objects.new(
container_name, None
)
if not instance_empty.get("avalon"):
instance_empty["avalon"] = dict()
avalon_info = instance_empty["avalon"]
avalon_info.update({"container_name": container_name})
scene.collection.objects.link(instance_empty)
instance_empty.instance_type = 'COLLECTION'
container = bpy.data.collections[lib_container]
container.name = container_name
instance_empty.instance_collection = container
container.make_local()
avalon.blender.pipeline.containerise_existing(
container,
name,
namespace,
context,
self.__class__.__name__,
)
nodes = list(container.objects)
nodes.append(container)
nodes.append(instance_empty)
self[:] = nodes
return nodes
def update(self, container: Dict, representation: Dict):
"""Update the loaded asset.
This will remove all objects of the current collection, load the new
ones and add them to the collection.
If the objects of the collection are used in another collection they
will not be removed, only unlinked. Normally this should not be the
case though.
Warning:
No nested collections are supported at the moment!
"""
collection = bpy.data.collections.get(
container["objectName"]
)
libpath = Path(api.get_representation_path(representation))
extension = libpath.suffix.lower()
logger.debug(
"Container: %s\nRepresentation: %s",
pformat(container, indent=2),
pformat(representation, indent=2),
)
assert collection, (
f"The asset is not loaded: {container['objectName']}"
)
assert not (collection.children), (
"Nested collections are not supported."
)
assert libpath, (
"No existing library file found for {container['objectName']}"
)
assert libpath.is_file(), (
f"The file doesn't exist: {libpath}"
)
assert extension in pype.blender.plugin.VALID_EXTENSIONS, (
f"Unsupported file: {libpath}"
)
collection_libpath = (
self._get_library_from_container(collection).filepath
)
normalized_collection_libpath = (
str(Path(bpy.path.abspath(collection_libpath)).resolve())
)
normalized_libpath = (
str(Path(bpy.path.abspath(str(libpath))).resolve())
)
logger.debug(
"normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s",
normalized_collection_libpath,
normalized_libpath,
)
if normalized_collection_libpath == normalized_libpath:
logger.info("Library already loaded, not updating...")
return
# Let Blender's garbage collection take care of removing the library
# itself after removing the objects.
objects_to_remove = set()
collection_objects = list()
collection_objects[:] = collection.objects
for obj in collection_objects:
# Unlink every object
collection.objects.unlink(obj)
remove_obj = True
for coll in [
coll for coll in bpy.data.collections
if coll != collection
]:
if (
coll.objects and
self._collection_contains_object(coll, obj)
):
remove_obj = False
if remove_obj:
objects_to_remove.add(obj)
for obj in objects_to_remove:
# Only delete objects that are not used elsewhere
bpy.data.objects.remove(obj)
instance_empties = [
obj for obj in collection.users_dupli_group
if obj.name in collection.name
]
if instance_empties:
instance_empty = instance_empties[0]
container_name = instance_empty["avalon"]["container_name"]
relative = bpy.context.preferences.filepaths.use_relative_paths
with bpy.data.libraries.load(
str(libpath), link=True, relative=relative
) as (_, data_to):
data_to.collections = [container_name]
new_collection = self._get_lib_collection(container_name, libpath)
if new_collection is None:
raise ValueError(
"A matching collection '{container_name}' "
"should have been found in: {libpath}"
)
for obj in new_collection.objects:
collection.objects.link(obj)
bpy.data.collections.remove(new_collection)
# Update the representation on the collection
avalon_prop = collection[avalon.blender.pipeline.AVALON_PROPERTY]
avalon_prop["representation"] = str(representation["_id"])
def remove(self, container: Dict) -> bool:
"""Remove an existing container from a Blender scene.
Arguments:
container (avalon-core:container-1.0): Container to remove,
from `host.ls()`.
Returns:
bool: Whether the container was deleted.
Warning:
No nested collections are supported at the moment!
"""
collection = bpy.data.collections.get(
container["objectName"]
)
if not collection:
return False
assert not (collection.children), (
"Nested collections are not supported."
)
instance_parents = list(collection.users_dupli_group)
instance_objects = list(collection.objects)
for obj in instance_objects + instance_parents:
bpy.data.objects.remove(obj)
bpy.data.collections.remove(collection)
return True
class CacheModelLoader(pype.blender.AssetLoader):
"""Load cache models.
Stores the imported asset in a collection named after the asset.
Note:
At least for now it only supports Alembic files.
"""
families = ["model"]
representations = ["abc"]
label = "Link Model"
icon = "code-fork"
color = "orange"
def process_asset(
self, context: dict, name: str, namespace: Optional[str] = None,
options: Optional[Dict] = None
) -> Optional[List]:
"""
Arguments:
name: Use pre-defined name
namespace: Use pre-defined namespace
context: Full parenthood of representation to load
options: Additional settings dictionary
"""
raise NotImplementedError("Loading of Alembic files is not yet implemented.")
# TODO (jasper): implement Alembic import.
libpath = self.fname
asset = context["asset"]["name"]
subset = context["subset"]["name"]
# TODO (jasper): evaluate use of namespace which is 'alien' to Blender.
lib_container = container_name = (
pype.blender.plugin.model_name(asset, subset, namespace)
)
relative = bpy.context.preferences.filepaths.use_relative_paths
with bpy.data.libraries.load(
libpath, link=True, relative=relative
) as (data_from, data_to):
data_to.collections = [lib_container]
scene = bpy.context.scene
instance_empty = bpy.data.objects.new(
container_name, None
)
scene.collection.objects.link(instance_empty)
instance_empty.instance_type = 'COLLECTION'
collection = bpy.data.collections[lib_container]
collection.name = container_name
instance_empty.instance_collection = collection
nodes = list(collection.objects)
nodes.append(collection)
nodes.append(instance_empty)
self[:] = nodes
return nodes

View file

@ -0,0 +1,16 @@
import bpy
import pyblish.api
class CollectBlenderCurrentFile(pyblish.api.ContextPlugin):
"""Inject the current working file into context"""
order = pyblish.api.CollectorOrder - 0.5
label = "Blender Current File"
hosts = ['blender']
def process(self, context):
"""Inject the current working file"""
current_file = bpy.data.filepath
context.data['currentFile'] = current_file

View file

@ -0,0 +1,53 @@
import typing
from typing import Generator
import bpy
import avalon.api
import pyblish.api
from avalon.blender.pipeline import AVALON_PROPERTY
class CollectModel(pyblish.api.ContextPlugin):
"""Collect the data of a model."""
hosts = ["blender"]
label = "Collect Model"
order = pyblish.api.CollectorOrder
@staticmethod
def get_model_collections() -> Generator:
"""Return all 'model' collections.
Check if the family is 'model' and if it doesn't have the
representation set. If the representation is set, it is a loaded model
and we don't want to publish it.
"""
for collection in bpy.data.collections:
avalon_prop = collection.get(AVALON_PROPERTY) or dict()
if (avalon_prop.get('family') == 'model'
and not avalon_prop.get('representation')):
yield collection
def process(self, context):
"""Collect the models from the current Blender scene."""
collections = self.get_model_collections()
for collection in collections:
avalon_prop = collection[AVALON_PROPERTY]
asset = avalon_prop['asset']
family = avalon_prop['family']
subset = avalon_prop['subset']
task = avalon_prop['task']
name = f"{asset}_{subset}"
instance = context.create_instance(
name=name,
family=family,
families=[family],
subset=subset,
asset=asset,
task=task,
)
members = list(collection.objects)
members.append(collection)
instance[:] = members
self.log.debug(instance.data)

View file

@ -0,0 +1,47 @@
import os
import avalon.blender.workio
import pype.api
class ExtractModel(pype.api.Extractor):
"""Extract as model."""
label = "Model"
hosts = ["blender"]
families = ["model"]
optional = True
def process(self, instance):
# Define extract output file path
stagingdir = self.staging_dir(instance)
filename = f"{instance.name}.blend"
filepath = os.path.join(stagingdir, filename)
# Perform extraction
self.log.info("Performing extraction..")
# Just save the file to a temporary location. At least for now it's no
# problem to have (possibly) extra stuff in the file.
avalon.blender.workio.save_file(filepath, copy=True)
#
# # Store reference for integration
# if "files" not in instance.data:
# instance.data["files"] = list()
#
# # instance.data["files"].append(filename)
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'blend',
'ext': 'blend',
'files': filename,
"stagingDir": stagingdir,
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s", instance.name, representation)

View file

@ -0,0 +1,49 @@
from typing import List
import bpy
import pyblish.api
import pype.blender.action
class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
"""Validate that the current mesh has UV's."""
order = pyblish.api.ValidatorOrder
hosts = ["blender"]
families = ["model"]
category = "geometry"
label = "Mesh Has UV's"
actions = [pype.blender.action.SelectInvalidAction]
optional = True
@staticmethod
def has_uvs(obj: bpy.types.Object) -> bool:
"""Check if an object has uv's."""
if not obj.data.uv_layers:
return False
for uv_layer in obj.data.uv_layers:
for polygon in obj.data.polygons:
for loop_index in polygon.loop_indices:
if not uv_layer.data[loop_index].uv:
return False
return True
@classmethod
def get_invalid(cls, instance) -> List:
invalid = []
# TODO (jasper): only check objects in the collection that will be published?
for obj in [
obj for obj in bpy.data.objects if obj.type == 'MESH'
]:
# Make sure we are in object mode.
bpy.ops.object.mode_set(mode='OBJECT')
if not cls.has_uvs(obj):
invalid.append(obj)
return invalid
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(f"Meshes found in instance without valid UV's: {invalid}")

View file

@ -0,0 +1,35 @@
from typing import List
import bpy
import pyblish.api
import pype.blender.action
class ValidateMeshNoNegativeScale(pyblish.api.Validator):
"""Ensure that meshes don't have a negative scale."""
order = pyblish.api.ValidatorOrder
hosts = ["blender"]
families = ["model"]
label = "Mesh No Negative Scale"
actions = [pype.blender.action.SelectInvalidAction]
@staticmethod
def get_invalid(instance) -> List:
invalid = []
# TODO (jasper): only check objects in the collection that will be published?
for obj in [
obj for obj in bpy.data.objects if obj.type == 'MESH'
]:
if any(v < 0 for v in obj.scale):
invalid.append(obj)
return invalid
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise RuntimeError(
f"Meshes found in instance with negative scale: {invalid}"
)

View file

@ -76,7 +76,18 @@ class CollectTemplates(pyblish.api.InstancePlugin):
"subset": subset_name,
"version": version_number,
"hierarchy": hierarchy.replace("\\", "/"),
"representation": "TEMP"}
"representation": "TEMP")}
resolution_width = instance.data.get("resolutionWidth")
resolution_height = instance.data.get("resolutionHeight")
fps = instance.data.get("fps")
if resolution_width:
template_data["resolution_width"] = resolution_width
if resolution_width:
template_data["resolution_height"] = resolution_height
if resolution_width:
template_data["fps"] = fps
instance.data["template"] = template
instance.data["assumedTemplateData"] = template_data

View file

@ -281,6 +281,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
"codec": codec_args,
"_profile": profile,
"anatomy_template": "render"
"resolutionWidth": resolution_height
"resolutionWidth": resolution_width,
})
if repre_new.get('preview'):
repre_new.pop("preview")

View file

@ -260,6 +260,17 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
"version": int(version["name"]),
"hierarchy": hierarchy}
resolution_width = repre.get("resolutionWidth")
resolution_height = repre.get("resolutionHeight")
fps = instance.data.get("fps")
if resolution_width:
template_data["resolution_width"] = resolution_width
if resolution_width:
template_data["resolution_height"] = resolution_height
if resolution_width:
template_data["fps"] = fps
files = repre['files']
if repre.get('stagingDir'):
stagingdir = repre['stagingDir']

View file

@ -21,15 +21,17 @@ class CollectAssData(pyblish.api.InstancePlugin):
objsets = instance.data['setMembers']
for objset in objsets:
objset = str(objset)
members = cmds.sets(objset, query=True)
if members is None:
self.log.warning("Skipped empty instance: \"%s\" " % objset)
continue
if objset == "content_SET":
if "content_SET" in objset:
instance.data['setMembers'] = members
elif objset == "proxy_SET":
self.log.debug('content members: {}'.format(members))
elif objset.startswith("proxy_SET"):
assert len(members) == 1, "You have multiple proxy meshes, please only use one"
instance.data['proxy'] = members
self.log.debug('proxy members: {}'.format(members))
self.log.debug("data: {}".format(instance.data))

View file

@ -119,11 +119,15 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
texture_filenames = []
if image_search_paths:
# TODO: Somehow this uses OS environment path separator, `:` vs `;`
# Later on check whether this is pipeline OS cross-compatible.
image_search_paths = [p for p in
image_search_paths.split(os.path.pathsep) if p]
# find all ${TOKEN} tokens and replace them with $TOKEN env. variable
image_search_paths = self._replace_tokens(image_search_paths)
# List all related textures
texture_filenames = cmds.pgYetiCommand(node, listTextures=True)
self.log.info("Found %i texture(s)" % len(texture_filenames))
@ -140,6 +144,8 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
"atttribute'" % node)
# Collect all texture files
# find all ${TOKEN} tokens and replace them with $TOKEN env. variable
texture_filenames = self._replace_tokens(texture_filenames)
for texture in texture_filenames:
files = []
@ -283,3 +289,20 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
collection, remainder = clique.assemble(files, patterns=pattern)
return collection
def _replace_tokens(self, strings):
env_re = re.compile(r"\$\{(\w+)\}")
replaced = []
for s in strings:
matches = re.finditer(env_re, s)
for m in matches:
try:
s = s.replace(m.group(), os.environ[m.group(1)])
except KeyError:
msg = "Cannot find requested {} in environment".format(
m.group(1))
self.log.error(msg)
raise RuntimeError(msg)
replaced.append(s)
return replaced

View file

@ -17,6 +17,7 @@ class ExtractAssStandin(pype.api.Extractor):
label = "Ass Standin (.ass)"
hosts = ["maya"]
families = ["ass"]
asciiAss = False
def process(self, instance):
@ -47,7 +48,7 @@ class ExtractAssStandin(pype.api.Extractor):
exported_files = cmds.arnoldExportAss(filename=file_path,
selected=True,
asciiAss=True,
asciiAss=self.asciiAss,
shadowLinks=True,
lightLinks=True,
boundingBox=True,
@ -59,13 +60,15 @@ class ExtractAssStandin(pype.api.Extractor):
filenames.append(os.path.split(file)[1])
self.log.info("Exported: {}".format(filenames))
else:
self.log.info("Extracting ass")
cmds.arnoldExportAss(filename=file_path,
selected=True,
asciiAss=True,
asciiAss=False,
shadowLinks=True,
lightLinks=True,
boundingBox=True
)
self.log.info("Extracted {}".format(filename))
filenames = filename
optionals = [
"frameStart", "frameEnd", "step", "handles",

View file

@ -0,0 +1,319 @@
from avalon import api, style, io
import nuke
import nukescripts
from pype.nuke import lib as pnlib
from avalon.nuke import lib as anlib
from avalon.nuke import containerise, update_container
reload(pnlib)
class LoadBackdropNodes(api.Loader):
"""Loading Published Backdrop nodes (workfile, nukenodes)"""
representations = ["nk"]
families = ["workfile", "nukenodes"]
label = "Iport Nuke Nodes"
order = 0
icon = "eye"
color = style.colors.light
node_color = "0x7533c1ff"
def load(self, context, name, namespace, data):
"""
Loading function to import .nk file into script and wrap
it on backdrop
Arguments:
context (dict): context of version
name (str): name of the version
namespace (str): asset name
data (dict): compulsory attribute > not used
Returns:
nuke node: containerised nuke node object
"""
# get main variables
version = context['version']
version_data = version.get("data", {})
vname = version.get("name", None)
first = version_data.get("frameStart", None)
last = version_data.get("frameEnd", None)
namespace = namespace or context['asset']['name']
colorspace = version_data.get("colorspace", None)
object_name = "{}_{}".format(name, namespace)
# prepare data for imprinting
# add additional metadata from the version to imprint to Avalon knob
add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd",
"source", "author", "fps"]
data_imprint = {"frameStart": first,
"frameEnd": last,
"version": vname,
"colorspaceInput": colorspace,
"objectName": object_name}
for k in add_keys:
data_imprint.update({k: version_data[k]})
# getting file path
file = self.fname.replace("\\", "/")
# adding nodes to node graph
# just in case we are in group lets jump out of it
nuke.endGroup()
# Get mouse position
n = nuke.createNode("NoOp")
xcursor, ycursor = (n.xpos(), n.ypos())
anlib.reset_selection()
nuke.delete(n)
bdn_frame = 50
with anlib.maintained_selection():
# add group from nk
nuke.nodePaste(file)
# get all pasted nodes
new_nodes = list()
nodes = nuke.selectedNodes()
# get pointer position in DAG
xpointer, ypointer = pnlib.find_free_space_to_paste_nodes(nodes, direction="right", offset=200+bdn_frame)
# reset position to all nodes and replace inputs and output
for n in nodes:
anlib.reset_selection()
xpos = (n.xpos() - xcursor) + xpointer
ypos = (n.ypos() - ycursor) + ypointer
n.setXYpos(xpos, ypos)
# replace Input nodes for dots
if n.Class() in "Input":
dot = nuke.createNode("Dot")
new_name = n.name().replace("INP", "DOT")
dot.setName(new_name)
dot["label"].setValue(new_name)
dot.setXYpos(xpos, ypos)
new_nodes.append(dot)
# rewire
dep = n.dependent()
for d in dep:
index = next((i for i, dpcy in enumerate(
d.dependencies())
if n is dpcy), 0)
d.setInput(index, dot)
# remove Input node
anlib.reset_selection()
nuke.delete(n)
continue
# replace Input nodes for dots
elif n.Class() in "Output":
dot = nuke.createNode("Dot")
new_name = n.name() + "_DOT"
dot.setName(new_name)
dot["label"].setValue(new_name)
dot.setXYpos(xpos, ypos)
new_nodes.append(dot)
# rewire
dep = next((d for d in n.dependencies()), None)
if dep:
dot.setInput(0, dep)
# remove Input node
anlib.reset_selection()
nuke.delete(n)
continue
else:
new_nodes.append(n)
# reselect nodes with new Dot instead of Inputs and Output
anlib.reset_selection()
anlib.select_nodes(new_nodes)
# place on backdrop
bdn = nukescripts.autoBackdrop()
# add frame offset
xpos = bdn.xpos() - bdn_frame
ypos = bdn.ypos() - bdn_frame
bdwidth = bdn["bdwidth"].value() + (bdn_frame*2)
bdheight = bdn["bdheight"].value() + (bdn_frame*2)
bdn["xpos"].setValue(xpos)
bdn["ypos"].setValue(ypos)
bdn["bdwidth"].setValue(bdwidth)
bdn["bdheight"].setValue(bdheight)
bdn["name"].setValue(object_name)
bdn["label"].setValue("Version tracked frame: \n`{}`\n\nPLEASE DO NOT REMOVE OR MOVE \nANYTHING FROM THIS FRAME!".format(object_name))
bdn["note_font_size"].setValue(20)
return containerise(
node=bdn,
name=name,
namespace=namespace,
context=context,
loader=self.__class__.__name__,
data=data_imprint)
def update(self, container, representation):
"""Update the Loader's path
Nuke automatically tries to reset some variables when changing
the loader's path to a new file. These automatic changes are to its
inputs:
"""
# get main variables
# Get version from io
version = io.find_one({
"type": "version",
"_id": representation["parent"]
})
# get corresponding node
GN = nuke.toNode(container['objectName'])
file = api.get_representation_path(representation).replace("\\", "/")
context = representation["context"]
name = container['name']
version_data = version.get("data", {})
vname = version.get("name", None)
first = version_data.get("frameStart", None)
last = version_data.get("frameEnd", None)
namespace = container['namespace']
colorspace = version_data.get("colorspace", None)
object_name = "{}_{}".format(name, namespace)
add_keys = ["frameStart", "frameEnd", "handleStart", "handleEnd",
"source", "author", "fps"]
data_imprint = {"representation": str(representation["_id"]),
"frameStart": first,
"frameEnd": last,
"version": vname,
"colorspaceInput": colorspace,
"objectName": object_name}
for k in add_keys:
data_imprint.update({k: version_data[k]})
# adding nodes to node graph
# just in case we are in group lets jump out of it
nuke.endGroup()
with anlib.maintained_selection():
xpos = GN.xpos()
ypos = GN.ypos()
avalon_data = anlib.get_avalon_knob_data(GN)
nuke.delete(GN)
# add group from nk
nuke.nodePaste(file)
GN = nuke.selectedNode()
anlib.set_avalon_knob_data(GN, avalon_data)
GN.setXYpos(xpos, ypos)
GN["name"].setValue(object_name)
# get all versions in list
versions = io.find({
"type": "version",
"parent": version["parent"]
}).distinct('name')
max_version = max(versions)
# change color of node
if version.get("name") not in [max_version]:
GN["tile_color"].setValue(int("0xd88467ff", 16))
else:
GN["tile_color"].setValue(int(self.node_color, 16))
self.log.info("udated to version: {}".format(version.get("name")))
return update_container(GN, data_imprint)
def connect_active_viewer(self, group_node):
"""
Finds Active viewer and
place the node under it, also adds
name of group into Input Process of the viewer
Arguments:
group_node (nuke node): nuke group node object
"""
group_node_name = group_node["name"].value()
viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()]
if len(viewer) > 0:
viewer = viewer[0]
else:
self.log.error("Please create Viewer node before you "
"run this action again")
return None
# get coordinates of Viewer1
xpos = viewer["xpos"].value()
ypos = viewer["ypos"].value()
ypos += 150
viewer["ypos"].setValue(ypos)
# set coordinates to group node
group_node["xpos"].setValue(xpos)
group_node["ypos"].setValue(ypos + 50)
# add group node name to Viewer Input Process
viewer["input_process_node"].setValue(group_node_name)
# put backdrop under
pnlib.create_backdrop(label="Input Process", layer=2,
nodes=[viewer, group_node], color="0x7c7faaff")
return True
def get_item(self, data, trackIndex, subTrackIndex):
return {key: val for key, val in data.items()
if subTrackIndex == val["subTrackIndex"]
if trackIndex == val["trackIndex"]}
def byteify(self, input):
"""
Converts unicode strings to strings
It goes trought all dictionary
Arguments:
input (dict/str): input
Returns:
dict: with fixed values and keys
"""
if isinstance(input, dict):
return {self.byteify(key): self.byteify(value)
for key, value in input.iteritems()}
elif isinstance(input, list):
return [self.byteify(element) for element in input]
elif isinstance(input, unicode):
return input.encode('utf-8')
else:
return input
def switch(self, container, representation):
self.update(container, representation)
def remove(self, container):
from avalon.nuke import viewer_update_and_undo_stop
node = nuke.toNode(container['objectName'])
with viewer_update_and_undo_stop():
nuke.delete(node)

View file

@ -39,6 +39,25 @@ def _streams(source):
return json.loads(out)['streams']
def get_fps(str_value):
if str_value == "0/0":
print("Source has \"r_frame_rate\" value set to \"0/0\".")
return "Unknown"
items = str_value.split("/")
if len(items) == 1:
fps = float(items[0])
elif len(items) == 2:
fps = float(items[0]) / float(items[1])
# Check if fps is integer or float number
if int(fps) == fps:
fps = int(fps)
return str(fps)
class ModifiedBurnins(ffmpeg_burnins.Burnins):
'''
This is modification of OTIO FFmpeg Burnin adapter.
@ -95,6 +114,7 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins):
streams = _streams(source)
super().__init__(source, streams)
if options_init:
self.options_init.update(options_init)
@ -329,6 +349,17 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True)
frame_start = data.get("frame_start")
frame_start_tc = data.get('frame_start_tc', frame_start)
stream = burnin._streams[0]
if "resolution_width" not in data:
data["resolution_width"] = stream.get("width", "Unknown")
if "resolution_height" not in data:
data["resolution_height"] = stream.get("height", "Unknown")
if "fps" not in data:
data["fps"] = get_fps(stream.get("r_frame_rate", "0/0"))
for align_text, preset in presets.get('burnins', {}).items():
align = None
if align_text == 'TOP_LEFT':
@ -383,12 +414,14 @@ def burnins_from_data(input_path, codec_data, output_path, data, overwrite=True)
elif bi_func == 'timecode':
burnin.add_timecode(align, start_frame=frame_start_tc)
elif bi_func == 'text':
if not preset.get('text'):
log.error('Text is not set for text function burnin!')
return
text = preset['text'].format(**data)
burnin.add_text(text, align)
elif bi_func == "datetime":
date_format = preset["format"]
burnin.add_datetime(date_format, align)
@ -415,4 +448,4 @@ if __name__ == '__main__':
data['codec'],
data['output'],
data['burnin_data']
)
)

BIN
res/app_icons/blender.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

3
setup/blender/init.py Normal file
View file

@ -0,0 +1,3 @@
from pype import blender
blender.install()