mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-01 16:34:53 +01:00
Merge branch 'develop' into feature/validate-ass-relative-paths
This commit is contained in:
commit
0abfe03bc4
207 changed files with 14223 additions and 3984 deletions
|
|
@ -140,9 +140,9 @@ class ImportMayaLoader(api.Loader):
|
|||
|
||||
message = "Are you sure you want import this"
|
||||
state = QtWidgets.QMessageBox.warning(None,
|
||||
"Are you sure?",
|
||||
message,
|
||||
buttons=buttons,
|
||||
defaultButton=accept)
|
||||
"Are you sure?",
|
||||
message,
|
||||
buttons=buttons,
|
||||
defaultButton=accept)
|
||||
|
||||
return state == accept
|
||||
|
|
|
|||
|
|
@ -1,62 +0,0 @@
|
|||
import pype.maya.plugin
|
||||
import os
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class CameraLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Specific loader of Alembic for the pype.camera family"""
|
||||
|
||||
families = ["camera"]
|
||||
label = "Reference camera"
|
||||
representations = ["abc", "ma"]
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
|
||||
import maya.cmds as cmds
|
||||
# Get family type from the context
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "camera"
|
||||
|
||||
cmds.loadPlugin("AbcImport.mll", quiet=True)
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
sharedReferenceFile=False,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name),
|
||||
reference=True,
|
||||
returnNewNodes=True)
|
||||
|
||||
cameras = cmds.ls(nodes, type="camera")
|
||||
|
||||
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
|
||||
colors = presets['plugins']['maya']['load']['colors']
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
# Check the Maya version, lockTransform has been introduced since
|
||||
# Maya 2016.5 Ext 2
|
||||
version = int(cmds.about(version=True))
|
||||
if version >= 2016:
|
||||
for camera in cameras:
|
||||
cmds.camera(camera, edit=True, lockTransform=True)
|
||||
else:
|
||||
self.log.warning("This version of Maya does not support locking of"
|
||||
" transforms of cameras.")
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
import pype.maya.plugin
|
||||
import os
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class FBXLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Load the FBX"""
|
||||
|
||||
families = ["fbx"]
|
||||
representations = ["fbx"]
|
||||
|
||||
label = "Reference FBX"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
|
||||
import maya.cmds as cmds
|
||||
from avalon import maya
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "fbx"
|
||||
|
||||
# Ensure FBX plug-in is loaded
|
||||
cmds.loadPlugin("fbxmaya", quiet=True)
|
||||
|
||||
with maya.maintained_selection():
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name))
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
|
||||
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
|
||||
colors = presets['plugins']['maya']['load']['colors']
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -116,9 +116,11 @@ class LookLoader(pype.maya.plugin.ReferenceLoader):
|
|||
shapes=True))
|
||||
nodes = set(nodes_list)
|
||||
|
||||
json_representation = io.find_one({"type": "representation",
|
||||
"parent": representation['parent'],
|
||||
"name": "json"})
|
||||
json_representation = io.find_one({
|
||||
"type": "representation",
|
||||
"parent": representation['parent'],
|
||||
"name": "json"
|
||||
})
|
||||
|
||||
# Load relationships
|
||||
shader_relation = api.get_representation_path(json_representation)
|
||||
|
|
|
|||
|
|
@ -1,68 +0,0 @@
|
|||
import pype.maya.plugin
|
||||
from pypeapp import config
|
||||
import os
|
||||
|
||||
|
||||
class MayaAsciiLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Load the model"""
|
||||
|
||||
families = ["mayaAscii",
|
||||
"setdress",
|
||||
"layout"]
|
||||
representations = ["ma"]
|
||||
|
||||
label = "Reference Maya Ascii"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
|
||||
import maya.cmds as cmds
|
||||
from avalon import maya
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "model"
|
||||
|
||||
with maya.maintained_selection():
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName="{}:{}".format(namespace, name))
|
||||
|
||||
self[:] = nodes
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
|
||||
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
|
||||
colors = presets['plugins']['maya']['load']['colors']
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
cmds.setAttr(groupName + ".displayHandle", 1)
|
||||
# get bounding box
|
||||
bbox = cmds.exactWorldBoundingBox(groupName)
|
||||
# get pivot position on world space
|
||||
pivot = cmds.xform(groupName, q=True, sp=True, ws=True)
|
||||
# center of bounding box
|
||||
cx = (bbox[0] + bbox[3]) / 2
|
||||
cy = (bbox[1] + bbox[4]) / 2
|
||||
cz = (bbox[2] + bbox[5]) / 2
|
||||
# add pivot position to calculate offset
|
||||
cx = cx + pivot[0]
|
||||
cy = cy + pivot[1]
|
||||
cz = cz + pivot[2]
|
||||
# set selection handle offset to center of bounding box
|
||||
cmds.setAttr(groupName + ".selectHandleX", cx)
|
||||
cmds.setAttr(groupName + ".selectHandleY", cy)
|
||||
cmds.setAttr(groupName + ".selectHandleZ", cz)
|
||||
return nodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -1,4 +1,6 @@
|
|||
import pype.maya.plugin
|
||||
from avalon import api, maya
|
||||
from maya import cmds
|
||||
import os
|
||||
from pypeapp import config
|
||||
|
||||
|
|
@ -6,8 +8,15 @@ from pypeapp import config
|
|||
class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Load the model"""
|
||||
|
||||
families = ["model", "pointcache", "animation"]
|
||||
representations = ["ma", "abc"]
|
||||
families = ["model",
|
||||
"pointcache",
|
||||
"animation",
|
||||
"mayaAscii",
|
||||
"setdress",
|
||||
"layout",
|
||||
"camera",
|
||||
"rig"]
|
||||
representations = ["ma", "abc", "fbx"]
|
||||
tool_names = ["loader"]
|
||||
|
||||
label = "Reference"
|
||||
|
|
@ -37,27 +46,29 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
|
|||
reference=True,
|
||||
returnNewNodes=True)
|
||||
|
||||
namespace = cmds.referenceQuery(nodes[0], namespace=True)
|
||||
# namespace = cmds.referenceQuery(nodes[0], namespace=True)
|
||||
|
||||
shapes = cmds.ls(nodes, shapes=True, long=True)
|
||||
print(shapes)
|
||||
|
||||
newNodes = (list(set(nodes) - set(shapes)))
|
||||
print(newNodes)
|
||||
|
||||
current_namespace = pm.namespaceInfo(currentNamespace=True)
|
||||
|
||||
if current_namespace != ":":
|
||||
groupName = current_namespace + ":" + groupName
|
||||
|
||||
groupNode = pm.PyNode(groupName)
|
||||
roots = set()
|
||||
print(nodes)
|
||||
|
||||
for node in newNodes:
|
||||
try:
|
||||
roots.add(pm.PyNode(node).getAllParents()[-2])
|
||||
except:
|
||||
except: # noqa: E722
|
||||
pass
|
||||
for root in roots:
|
||||
root.setParent(world=True)
|
||||
|
||||
groupNode.root().zeroTransformPivots()
|
||||
groupNode.zeroTransformPivots()
|
||||
for root in roots:
|
||||
root.setParent(groupNode)
|
||||
|
||||
|
|
@ -90,23 +101,39 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
|
|||
cmds.setAttr(groupName + ".selectHandleY", cy)
|
||||
cmds.setAttr(groupName + ".selectHandleZ", cz)
|
||||
|
||||
if data.get("post_process", True):
|
||||
if family == "rig":
|
||||
self._post_process_rig(name, namespace, context, data)
|
||||
|
||||
return newNodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def _post_process_rig(self, name, namespace, context, data):
|
||||
|
||||
# for backwards compatibility
|
||||
class AbcLoader(ReferenceLoader):
|
||||
label = "Deprecated loader (don't use)"
|
||||
families = ["pointcache", "animation"]
|
||||
representations = ["abc"]
|
||||
tool_names = []
|
||||
output = next((node for node in self if
|
||||
node.endswith("out_SET")), None)
|
||||
controls = next((node for node in self if
|
||||
node.endswith("controls_SET")), None)
|
||||
|
||||
assert output, "No out_SET in rig, this is a bug."
|
||||
assert controls, "No controls_SET in rig, this is a bug."
|
||||
|
||||
# for backwards compatibility
|
||||
class ModelLoader(ReferenceLoader):
|
||||
label = "Deprecated loader (don't use)"
|
||||
families = ["model", "pointcache"]
|
||||
representations = ["abc"]
|
||||
tool_names = []
|
||||
# Find the roots amongst the loaded nodes
|
||||
roots = cmds.ls(self[:], assemblies=True, long=True)
|
||||
assert roots, "No root nodes in rig, this is a bug."
|
||||
|
||||
asset = api.Session["AVALON_ASSET"]
|
||||
dependency = str(context["representation"]["_id"])
|
||||
|
||||
self.log.info("Creating subset: {}".format(namespace))
|
||||
|
||||
# Create the animation instance
|
||||
with maya.maintained_selection():
|
||||
cmds.select([output, controls] + roots, noExpand=True)
|
||||
api.create(name=namespace,
|
||||
asset=asset,
|
||||
family="animation",
|
||||
options={"useSelection": True},
|
||||
data={"dependencies": dependency})
|
||||
|
|
|
|||
|
|
@ -1,95 +0,0 @@
|
|||
from maya import cmds
|
||||
|
||||
import pype.maya.plugin
|
||||
from avalon import api, maya
|
||||
import os
|
||||
from pypeapp import config
|
||||
|
||||
|
||||
class RigLoader(pype.maya.plugin.ReferenceLoader):
|
||||
"""Specific loader for rigs
|
||||
|
||||
This automatically creates an instance for animators upon load.
|
||||
|
||||
"""
|
||||
|
||||
families = ["rig"]
|
||||
representations = ["ma"]
|
||||
|
||||
label = "Reference rig"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "rig"
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
nodes = cmds.file(self.fname,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName=groupName)
|
||||
|
||||
cmds.xform(groupName, pivots=(0, 0, 0))
|
||||
|
||||
presets = config.get_presets(project=os.environ['AVALON_PROJECT'])
|
||||
colors = presets['plugins']['maya']['load']['colors']
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
shapes = cmds.ls(nodes, shapes=True, long=True)
|
||||
print(shapes)
|
||||
|
||||
newNodes = (list(set(nodes) - set(shapes)))
|
||||
print(newNodes)
|
||||
|
||||
# Store for post-process
|
||||
self[:] = newNodes
|
||||
if data.get("post_process", True):
|
||||
self._post_process(name, namespace, context, data)
|
||||
|
||||
return newNodes
|
||||
|
||||
def _post_process(self, name, namespace, context, data):
|
||||
|
||||
# TODO(marcus): We are hardcoding the name "out_SET" here.
|
||||
# Better register this keyword, so that it can be used
|
||||
# elsewhere, such as in the Integrator plug-in,
|
||||
# without duplication.
|
||||
|
||||
output = next((node for node in self if
|
||||
node.endswith("out_SET")), None)
|
||||
controls = next((node for node in self if
|
||||
node.endswith("controls_SET")), None)
|
||||
|
||||
assert output, "No out_SET in rig, this is a bug."
|
||||
assert controls, "No controls_SET in rig, this is a bug."
|
||||
|
||||
# Find the roots amongst the loaded nodes
|
||||
roots = cmds.ls(self[:], assemblies=True, long=True)
|
||||
assert roots, "No root nodes in rig, this is a bug."
|
||||
|
||||
asset = api.Session["AVALON_ASSET"]
|
||||
dependency = str(context["representation"]["_id"])
|
||||
|
||||
# Create the animation instance
|
||||
with maya.maintained_selection():
|
||||
cmds.select([output, controls] + roots, noExpand=True)
|
||||
api.create(name=namespace,
|
||||
asset=asset,
|
||||
family="animation",
|
||||
options={"useSelection": True},
|
||||
data={"dependencies": dependency})
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
|
@ -117,7 +117,7 @@ class VRayProxyLoader(api.Loader):
|
|||
vray_mesh = cmds.createNode('VRayMesh', name="{}_VRMS".format(name))
|
||||
mesh_shape = cmds.createNode("mesh", name="{}_GEOShape".format(name))
|
||||
vray_mat = cmds.shadingNode("VRayMeshMaterial", asShader=True,
|
||||
name="{}_VRMM".format(name))
|
||||
name="{}_VRMM".format(name))
|
||||
vray_mat_sg = cmds.sets(name="{}_VRSG".format(name),
|
||||
empty=True,
|
||||
renderable=True,
|
||||
|
|
|
|||
|
|
@ -21,15 +21,17 @@ class CollectAssData(pyblish.api.InstancePlugin):
|
|||
objsets = instance.data['setMembers']
|
||||
|
||||
for objset in objsets:
|
||||
objset = str(objset)
|
||||
members = cmds.sets(objset, query=True)
|
||||
if members is None:
|
||||
self.log.warning("Skipped empty instance: \"%s\" " % objset)
|
||||
continue
|
||||
if objset == "content_SET":
|
||||
if "content_SET" in objset:
|
||||
instance.data['setMembers'] = members
|
||||
elif objset == "proxy_SET":
|
||||
self.log.debug('content members: {}'.format(members))
|
||||
elif objset.startswith("proxy_SET"):
|
||||
assert len(members) == 1, "You have multiple proxy meshes, please only use one"
|
||||
instance.data['proxy'] = members
|
||||
|
||||
self.log.debug('proxy members: {}'.format(members))
|
||||
|
||||
self.log.debug("data: {}".format(instance.data))
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ class CollectMayaScene(pyblish.api.ContextPlugin):
|
|||
"subset": subset,
|
||||
"asset": os.getenv("AVALON_ASSET", None),
|
||||
"label": subset,
|
||||
"publish": False,
|
||||
"publish": True,
|
||||
"family": 'workfile',
|
||||
"families": ['workfile'],
|
||||
"setMembers": [current_file]
|
||||
|
|
|
|||
|
|
@ -119,11 +119,15 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
texture_filenames = []
|
||||
if image_search_paths:
|
||||
|
||||
|
||||
# TODO: Somehow this uses OS environment path separator, `:` vs `;`
|
||||
# Later on check whether this is pipeline OS cross-compatible.
|
||||
image_search_paths = [p for p in
|
||||
image_search_paths.split(os.path.pathsep) if p]
|
||||
|
||||
# find all ${TOKEN} tokens and replace them with $TOKEN env. variable
|
||||
image_search_paths = self._replace_tokens(image_search_paths)
|
||||
|
||||
# List all related textures
|
||||
texture_filenames = cmds.pgYetiCommand(node, listTextures=True)
|
||||
self.log.info("Found %i texture(s)" % len(texture_filenames))
|
||||
|
|
@ -140,6 +144,8 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
"atttribute'" % node)
|
||||
|
||||
# Collect all texture files
|
||||
# find all ${TOKEN} tokens and replace them with $TOKEN env. variable
|
||||
texture_filenames = self._replace_tokens(texture_filenames)
|
||||
for texture in texture_filenames:
|
||||
|
||||
files = []
|
||||
|
|
@ -283,3 +289,20 @@ class CollectYetiRig(pyblish.api.InstancePlugin):
|
|||
collection, remainder = clique.assemble(files, patterns=pattern)
|
||||
|
||||
return collection
|
||||
|
||||
def _replace_tokens(self, strings):
|
||||
env_re = re.compile(r"\$\{(\w+)\}")
|
||||
|
||||
replaced = []
|
||||
for s in strings:
|
||||
matches = re.finditer(env_re, s)
|
||||
for m in matches:
|
||||
try:
|
||||
s = s.replace(m.group(), os.environ[m.group(1)])
|
||||
except KeyError:
|
||||
msg = "Cannot find requested {} in environment".format(
|
||||
m.group(1))
|
||||
self.log.error(msg)
|
||||
raise RuntimeError(msg)
|
||||
replaced.append(s)
|
||||
return replaced
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ class ExtractAssStandin(pype.api.Extractor):
|
|||
label = "Ass Standin (.ass)"
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
asciiAss = False
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
|
|
@ -47,7 +48,7 @@ class ExtractAssStandin(pype.api.Extractor):
|
|||
|
||||
exported_files = cmds.arnoldExportAss(filename=file_path,
|
||||
selected=True,
|
||||
asciiAss=True,
|
||||
asciiAss=self.asciiAss,
|
||||
shadowLinks=True,
|
||||
lightLinks=True,
|
||||
boundingBox=True,
|
||||
|
|
@ -59,13 +60,15 @@ class ExtractAssStandin(pype.api.Extractor):
|
|||
filenames.append(os.path.split(file)[1])
|
||||
self.log.info("Exported: {}".format(filenames))
|
||||
else:
|
||||
self.log.info("Extracting ass")
|
||||
cmds.arnoldExportAss(filename=file_path,
|
||||
selected=True,
|
||||
asciiAss=True,
|
||||
asciiAss=False,
|
||||
shadowLinks=True,
|
||||
lightLinks=True,
|
||||
boundingBox=True
|
||||
)
|
||||
self.log.info("Extracted {}".format(filename))
|
||||
filenames = filename
|
||||
optionals = [
|
||||
"frameStart", "frameEnd", "step", "handles",
|
||||
|
|
|
|||
|
|
@ -22,11 +22,11 @@ class ExtractAssembly(pype.api.Extractor):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
parent_dir = self.staging_dir(instance)
|
||||
staging_dir = self.staging_dir(instance)
|
||||
hierarchy_filename = "{}.abc".format(instance.name)
|
||||
hierarchy_path = os.path.join(parent_dir, hierarchy_filename)
|
||||
hierarchy_path = os.path.join(staging_dir, hierarchy_filename)
|
||||
json_filename = "{}.json".format(instance.name)
|
||||
json_path = os.path.join(parent_dir, json_filename)
|
||||
json_path = os.path.join(staging_dir, json_filename)
|
||||
|
||||
self.log.info("Dumping scene data for debugging ..")
|
||||
with open(json_path, "w") as filepath:
|
||||
|
|
@ -46,8 +46,24 @@ class ExtractAssembly(pype.api.Extractor):
|
|||
"uvWrite": True,
|
||||
"selection": True})
|
||||
|
||||
instance.data["files"] = [json_filename, hierarchy_filename]
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation_abc = {
|
||||
'name': 'abc',
|
||||
'ext': 'abc',
|
||||
'files': hierarchy_filename,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation_abc)
|
||||
|
||||
representation_json = {
|
||||
'name': 'json',
|
||||
'ext': 'json',
|
||||
'files': json_filename,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation_json)
|
||||
# Remove data
|
||||
instance.data.pop("scenedata", None)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import copy
|
||||
import tempfile
|
||||
import contextlib
|
||||
import subprocess
|
||||
|
|
@ -330,10 +331,9 @@ class ExtractLook(pype.api.Extractor):
|
|||
maya_path))
|
||||
|
||||
def resource_destination(self, instance, filepath, do_maketx):
|
||||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
self.create_destination_template(instance, anatomy)
|
||||
resources_dir = instance.data["resourcesDir"]
|
||||
|
||||
# Compute destination location
|
||||
basename, ext = os.path.splitext(os.path.basename(filepath))
|
||||
|
|
@ -343,7 +343,7 @@ class ExtractLook(pype.api.Extractor):
|
|||
ext = ".tx"
|
||||
|
||||
return os.path.join(
|
||||
instance.data["assumedDestination"], "resources", basename + ext
|
||||
resources_dir, basename + ext
|
||||
)
|
||||
|
||||
def _process_texture(self, filepath, do_maketx, staging, linearise, force):
|
||||
|
|
@ -407,88 +407,3 @@ class ExtractLook(pype.api.Extractor):
|
|||
return converted, COPY, texture_hash
|
||||
|
||||
return filepath, COPY, texture_hash
|
||||
|
||||
def create_destination_template(self, instance, anatomy):
|
||||
"""Create a filepath based on the current data available
|
||||
|
||||
Example template:
|
||||
{root}/{project}/{silo}/{asset}/publish/{subset}/v{version:0>3}/
|
||||
{subset}.{representation}
|
||||
Args:
|
||||
instance: the instance to publish
|
||||
|
||||
Returns:
|
||||
file path (str)
|
||||
"""
|
||||
|
||||
# get all the stuff from the database
|
||||
subset_name = instance.data["subset"]
|
||||
self.log.info(subset_name)
|
||||
asset_name = instance.data["asset"]
|
||||
project_name = api.Session["AVALON_PROJECT"]
|
||||
a_template = anatomy.templates
|
||||
|
||||
project = io.find_one(
|
||||
{"type": "project", "name": project_name},
|
||||
projection={"config": True, "data": True},
|
||||
)
|
||||
|
||||
template = a_template["publish"]["path"]
|
||||
# anatomy = instance.context.data['anatomy']
|
||||
|
||||
asset = io.find_one(
|
||||
{"type": "asset", "name": asset_name, "parent": project["_id"]}
|
||||
)
|
||||
|
||||
assert asset, ("No asset found by the name '{}' "
|
||||
"in project '{}'").format(asset_name, project_name)
|
||||
silo = asset.get("silo")
|
||||
|
||||
subset = io.find_one(
|
||||
{"type": "subset", "name": subset_name, "parent": asset["_id"]}
|
||||
)
|
||||
|
||||
# assume there is no version yet, we start at `1`
|
||||
version = None
|
||||
version_number = 1
|
||||
if subset is not None:
|
||||
version = io.find_one(
|
||||
{"type": "version",
|
||||
"parent": subset["_id"]
|
||||
}, sort=[("name", -1)]
|
||||
)
|
||||
|
||||
# if there is a subset there ought to be version
|
||||
if version is not None:
|
||||
version_number += version["name"]
|
||||
|
||||
if instance.data.get("version"):
|
||||
version_number = int(instance.data.get("version"))
|
||||
|
||||
padding = int(a_template["render"]["padding"])
|
||||
|
||||
hierarchy = asset["data"]["parents"]
|
||||
if hierarchy:
|
||||
# hierarchy = os.path.sep.join(hierarchy)
|
||||
hierarchy = "/".join(hierarchy)
|
||||
|
||||
template_data = {
|
||||
"root": api.Session["AVALON_PROJECTS"],
|
||||
"project": {"name": project_name, "code": project["data"]["code"]},
|
||||
"silo": silo,
|
||||
"family": instance.data["family"],
|
||||
"asset": asset_name,
|
||||
"subset": subset_name,
|
||||
"frame": ("#" * padding),
|
||||
"version": version_number,
|
||||
"hierarchy": hierarchy,
|
||||
"representation": "TEMP",
|
||||
}
|
||||
|
||||
instance.data["assumedTemplateData"] = template_data
|
||||
self.log.info(template_data)
|
||||
instance.data["template"] = template
|
||||
# We take the parent folder of representation 'filepath'
|
||||
instance.data["assumedDestination"] = os.path.dirname(
|
||||
anatomy.format(template_data)["publish"]["path"]
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,16 +1,14 @@
|
|||
import os
|
||||
import glob
|
||||
import contextlib
|
||||
import capture_gui
|
||||
import clique
|
||||
import capture
|
||||
#
|
||||
import pype.maya.lib as lib
|
||||
import pype.api
|
||||
#
|
||||
from maya import cmds, mel
|
||||
import pymel.core as pm
|
||||
# import ffmpeg
|
||||
# # from pype.scripts import otio_burnin
|
||||
# reload(ffmpeg)
|
||||
|
||||
|
||||
# TODO: move codec settings to presets
|
||||
|
|
@ -93,7 +91,18 @@ class ExtractQuicktime(pype.api.Extractor):
|
|||
pm.currentTime(refreshFrameInt, edit=True)
|
||||
|
||||
with maintained_time():
|
||||
playblast = capture_gui.lib.capture_scene(preset)
|
||||
filename = preset.get("filename", "%TEMP%")
|
||||
|
||||
# Force viewer to False in call to capture because we have our own
|
||||
# viewer opening call to allow a signal to trigger between playblast
|
||||
# and viewer
|
||||
preset['viewer'] = False
|
||||
|
||||
# Remove panel key since it's internal value to capture_gui
|
||||
preset.pop("panel", None)
|
||||
|
||||
path = capture.capture(**preset)
|
||||
playblast = self._fix_playblast_output_path(path)
|
||||
|
||||
self.log.info("file list {}".format(playblast))
|
||||
|
||||
|
|
@ -119,6 +128,46 @@ class ExtractQuicktime(pype.api.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
def _fix_playblast_output_path(self, filepath):
|
||||
"""Workaround a bug in maya.cmds.playblast to return correct filepath.
|
||||
|
||||
When the `viewer` argument is set to False and maya.cmds.playblast
|
||||
does not automatically open the playblasted file the returned
|
||||
filepath does not have the file's extension added correctly.
|
||||
|
||||
To workaround this we just glob.glob() for any file extensions and
|
||||
assume the latest modified file is the correct file and return it.
|
||||
|
||||
"""
|
||||
# Catch cancelled playblast
|
||||
if filepath is None:
|
||||
self.log.warning("Playblast did not result in output path. "
|
||||
"Playblast is probably interrupted.")
|
||||
return None
|
||||
|
||||
# Fix: playblast not returning correct filename (with extension)
|
||||
# Lets assume the most recently modified file is the correct one.
|
||||
if not os.path.exists(filepath):
|
||||
directory = os.path.dirname(filepath)
|
||||
filename = os.path.basename(filepath)
|
||||
# check if the filepath is has frame based filename
|
||||
# example : capture.####.png
|
||||
parts = filename.split(".")
|
||||
if len(parts) == 3:
|
||||
query = os.path.join(directory, "{}.*.{}".format(parts[0],
|
||||
parts[-1]))
|
||||
files = glob.glob(query)
|
||||
else:
|
||||
files = glob.glob("{}.*".format(filepath))
|
||||
|
||||
if not files:
|
||||
raise RuntimeError("Couldn't find playblast from: "
|
||||
"{0}".format(filepath))
|
||||
filepath = max(files, key=os.path.getmtime)
|
||||
|
||||
return filepath
|
||||
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_time():
|
||||
|
|
|
|||
|
|
@ -1,31 +1,14 @@
|
|||
import os
|
||||
import contextlib
|
||||
import time
|
||||
import sys
|
||||
import glob
|
||||
|
||||
import capture_gui
|
||||
import clique
|
||||
import capture
|
||||
|
||||
import pype.maya.lib as lib
|
||||
import pype.api
|
||||
|
||||
from maya import cmds
|
||||
import pymel.core as pm
|
||||
# import ffmpeg
|
||||
# reload(ffmpeg)
|
||||
|
||||
import avalon.maya
|
||||
|
||||
# import maya_utils as mu
|
||||
|
||||
# from tweakHUD import master
|
||||
# from tweakHUD import draft_hud as dHUD
|
||||
# from tweakHUD import ftrackStrings as fStrings
|
||||
|
||||
#
|
||||
# def soundOffsetFunc(oSF, SF, H):
|
||||
# tmOff = (oSF - H) - SF
|
||||
# return tmOff
|
||||
|
||||
|
||||
class ExtractThumbnail(pype.api.Extractor):
|
||||
|
|
@ -47,39 +30,8 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
end = cmds.currentTime(query=True)
|
||||
self.log.info("start: {}, end: {}".format(start, end))
|
||||
|
||||
members = instance.data['setMembers']
|
||||
camera = instance.data['review_camera']
|
||||
|
||||
# project_code = ftrack_data['Project']['code']
|
||||
# task_type = ftrack_data['Task']['type']
|
||||
#
|
||||
# # load Preset
|
||||
# studio_repos = os.path.abspath(os.environ.get('studio_repos'))
|
||||
# shot_preset_path = os.path.join(studio_repos, 'maya',
|
||||
# 'capture_gui_presets',
|
||||
# (project_code + '_' + task_type + '_' + asset + '.json'))
|
||||
#
|
||||
# task_preset_path = os.path.join(studio_repos, 'maya',
|
||||
# 'capture_gui_presets',
|
||||
# (project_code + '_' + task_type + '.json'))
|
||||
#
|
||||
# project_preset_path = os.path.join(studio_repos, 'maya',
|
||||
# 'capture_gui_presets',
|
||||
# (project_code + '.json'))
|
||||
#
|
||||
# default_preset_path = os.path.join(studio_repos, 'maya',
|
||||
# 'capture_gui_presets',
|
||||
# 'default.json')
|
||||
#
|
||||
# if os.path.isfile(shot_preset_path):
|
||||
# preset_to_use = shot_preset_path
|
||||
# elif os.path.isfile(task_preset_path):
|
||||
# preset_to_use = task_preset_path
|
||||
# elif os.path.isfile(project_preset_path):
|
||||
# preset_to_use = project_preset_path
|
||||
# else:
|
||||
# preset_to_use = default_preset_path
|
||||
|
||||
capture_preset = ""
|
||||
capture_preset = instance.context.data['presets']['maya']['capture']
|
||||
try:
|
||||
|
|
@ -126,7 +78,18 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
pm.currentTime(refreshFrameInt, edit=True)
|
||||
|
||||
with maintained_time():
|
||||
playblast = capture_gui.lib.capture_scene(preset)
|
||||
filename = preset.get("filename", "%TEMP%")
|
||||
|
||||
# Force viewer to False in call to capture because we have our own
|
||||
# viewer opening call to allow a signal to trigger between
|
||||
# playblast and viewer
|
||||
preset['viewer'] = False
|
||||
|
||||
# Remove panel key since it's internal value to capture_gui
|
||||
preset.pop("panel", None)
|
||||
|
||||
path = capture.capture(**preset)
|
||||
playblast = self._fix_playblast_output_path(path)
|
||||
|
||||
_, thumbnail = os.path.split(playblast)
|
||||
|
||||
|
|
@ -144,6 +107,45 @@ class ExtractThumbnail(pype.api.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
def _fix_playblast_output_path(self, filepath):
|
||||
"""Workaround a bug in maya.cmds.playblast to return correct filepath.
|
||||
|
||||
When the `viewer` argument is set to False and maya.cmds.playblast
|
||||
does not automatically open the playblasted file the returned
|
||||
filepath does not have the file's extension added correctly.
|
||||
|
||||
To workaround this we just glob.glob() for any file extensions and
|
||||
assume the latest modified file is the correct file and return it.
|
||||
|
||||
"""
|
||||
# Catch cancelled playblast
|
||||
if filepath is None:
|
||||
self.log.warning("Playblast did not result in output path. "
|
||||
"Playblast is probably interrupted.")
|
||||
return None
|
||||
|
||||
# Fix: playblast not returning correct filename (with extension)
|
||||
# Lets assume the most recently modified file is the correct one.
|
||||
if not os.path.exists(filepath):
|
||||
directory = os.path.dirname(filepath)
|
||||
filename = os.path.basename(filepath)
|
||||
# check if the filepath is has frame based filename
|
||||
# example : capture.####.png
|
||||
parts = filename.split(".")
|
||||
if len(parts) == 3:
|
||||
query = os.path.join(directory, "{}.*.{}".format(parts[0],
|
||||
parts[-1]))
|
||||
files = glob.glob(query)
|
||||
else:
|
||||
files = glob.glob("{}.*".format(filepath))
|
||||
|
||||
if not files:
|
||||
raise RuntimeError("Couldn't find playblast from: "
|
||||
"{0}".format(filepath))
|
||||
filepath = max(files, key=os.path.getmtime)
|
||||
|
||||
return filepath
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def maintained_time():
|
||||
|
|
|
|||
|
|
@ -110,15 +110,7 @@ class ExtractYetiRig(pype.api.Extractor):
|
|||
|
||||
self.log.info("Writing metadata file")
|
||||
|
||||
# Create assumed destination folder for imageSearchPath
|
||||
assumed_temp_data = instance.data["assumedTemplateData"]
|
||||
template = instance.data["template"]
|
||||
template_formatted = template.format(**assumed_temp_data)
|
||||
|
||||
destination_folder = os.path.dirname(template_formatted)
|
||||
|
||||
image_search_path = os.path.join(destination_folder, "resources")
|
||||
image_search_path = os.path.normpath(image_search_path)
|
||||
image_search_path = resources_dir = instance.data["resourcesDir"]
|
||||
|
||||
settings = instance.data.get("rigsettings", None)
|
||||
if settings:
|
||||
|
|
|
|||
|
|
@ -228,80 +228,19 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"AuxFiles": []
|
||||
}
|
||||
|
||||
# Include critical environment variables with submission
|
||||
# We need those to pass them to pype for it to set correct context
|
||||
keys = [
|
||||
# This will trigger `userSetup.py` on the slave
|
||||
# such that proper initialisation happens the same
|
||||
# way as it does on a local machine.
|
||||
# TODO(marcus): This won't work if the slaves don't
|
||||
# have accesss to these paths, such as if slaves are
|
||||
# running Linux and the submitter is on Windows.
|
||||
"PYTHONPATH",
|
||||
"PATH",
|
||||
|
||||
"MTOA_EXTENSIONS_PATH",
|
||||
"MTOA_EXTENSIONS",
|
||||
"DYLD_LIBRARY_PATH",
|
||||
"MAYA_RENDER_DESC_PATH",
|
||||
"MAYA_MODULE_PATH",
|
||||
"ARNOLD_PLUGIN_PATH",
|
||||
"AVALON_SCHEMA",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"PYBLISHPLUGINPATH",
|
||||
|
||||
# todo: This is a temporary fix for yeti variables
|
||||
"PEREGRINEL_LICENSE",
|
||||
"SOLIDANGLE_LICENSE",
|
||||
"ARNOLD_LICENSE"
|
||||
"MAYA_MODULE_PATH",
|
||||
"TOOL_ENV"
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK",
|
||||
"PYPE_USERNAME"
|
||||
]
|
||||
|
||||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **api.Session)
|
||||
# self.log.debug("enviro: {}".format(pprint(environment)))
|
||||
for path in os.environ:
|
||||
if path.lower().startswith('pype_'):
|
||||
environment[path] = os.environ[path]
|
||||
|
||||
environment["PATH"] = os.environ["PATH"]
|
||||
# self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
|
||||
clean_environment = {}
|
||||
for key in environment:
|
||||
clean_path = ""
|
||||
self.log.debug("key: {}".format(key))
|
||||
self.log.debug("value: {}".format(environment[key]))
|
||||
to_process = str(environment[key])
|
||||
if key == "PYPE_STUDIO_CORE_MOUNT":
|
||||
clean_path = to_process
|
||||
elif "://" in to_process:
|
||||
clean_path = to_process
|
||||
elif os.pathsep not in str(to_process):
|
||||
try:
|
||||
path = to_process
|
||||
path.decode('UTF-8', 'strict')
|
||||
clean_path = os.path.normpath(path)
|
||||
except UnicodeDecodeError:
|
||||
print('path contains non UTF characters')
|
||||
else:
|
||||
for path in to_process.split(os.pathsep):
|
||||
try:
|
||||
path.decode('UTF-8', 'strict')
|
||||
clean_path += os.path.normpath(path) + os.pathsep
|
||||
except UnicodeDecodeError:
|
||||
print('path contains non UTF characters')
|
||||
|
||||
if key == "PYTHONPATH":
|
||||
clean_path = clean_path.replace('python2', 'python3')
|
||||
clean_path = clean_path.replace(
|
||||
os.path.normpath(
|
||||
environment['PYPE_STUDIO_CORE_MOUNT']), # noqa
|
||||
os.path.normpath(
|
||||
environment['PYPE_STUDIO_CORE_PATH'])) # noqa
|
||||
clean_environment[key] = clean_path
|
||||
|
||||
environment = clean_environment
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
|
|
@ -319,7 +258,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
|
||||
self.preflight_check(instance)
|
||||
|
||||
self.log.info("Submitting..")
|
||||
self.log.info("Submitting ...")
|
||||
self.log.info(json.dumps(payload, indent=4, sort_keys=True))
|
||||
|
||||
# E.g. http://192.168.0.1:8082/api/jobs
|
||||
|
|
|
|||
|
|
@ -38,9 +38,13 @@ class ValidateNodeIDsRelated(pyblish.api.InstancePlugin):
|
|||
invalid = list()
|
||||
|
||||
asset = instance.data['asset']
|
||||
asset_data = io.find_one({"name": asset,
|
||||
"type": "asset"},
|
||||
projection={"_id": True})
|
||||
asset_data = io.find_one(
|
||||
{
|
||||
"name": asset,
|
||||
"type": "asset"
|
||||
},
|
||||
projection={"_id": True}
|
||||
)
|
||||
asset_id = str(asset_data['_id'])
|
||||
|
||||
# We do want to check the referenced nodes as we it might be
|
||||
|
|
|
|||
|
|
@ -49,9 +49,10 @@ class ValidateRenderLayerAOVs(pyblish.api.InstancePlugin):
|
|||
"""Check if subset is registered in the database under the asset"""
|
||||
|
||||
asset = io.find_one({"type": "asset", "name": asset_name})
|
||||
is_valid = io.find_one({"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]})
|
||||
is_valid = io.find_one({
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"parent": asset["_id"]
|
||||
})
|
||||
|
||||
return is_valid
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue