mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch 'develop' into feature/PYPE-762_multi_root
This commit is contained in:
commit
b138713c19
18 changed files with 210 additions and 107 deletions
2
.flake8
2
.flake8
|
|
@ -1,5 +1,7 @@
|
|||
[flake8]
|
||||
# ignore = D203
|
||||
ignore = BLK100
|
||||
max-line-length = 79
|
||||
exclude =
|
||||
.git,
|
||||
__pycache__,
|
||||
|
|
|
|||
4
.hound.yml
Normal file
4
.hound.yml
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
flake8:
|
||||
enabled: true
|
||||
config_file: .flake8
|
||||
|
||||
2
LICENSE
2
LICENSE
|
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2018 orbi tools s.r.o
|
||||
Copyright (c) 2020 Orbi Tools s.r.o.
|
||||
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
|
|
|
|||
30
README.md
30
README.md
|
|
@ -1,31 +1,11 @@
|
|||
Pype
|
||||
====
|
||||
|
||||
The base studio _config_ for [Avalon](https://getavalon.github.io/)
|
||||
Welcome to PYPE _config_ for [Avalon](https://getavalon.github.io/)
|
||||
|
||||
Currently this config is dependent on our customised avalon instalation so it won't work with vanilla avalon core. We're working on open sourcing all of the necessary code though. You can still get inspiration or take our individual validators and scripts which should work just fine in other pipelines.
|
||||
To get all the key information about the project, go to [PYPE.club](http://pype.club)
|
||||
|
||||
|
||||
Currently this config is dependent on our customised avalon instalation so it won't work with vanilla avalon core. To install it you'll need to download [pype-setup](github.com/pypeclub/pype-setup), which is able to deploy everything for you if you follow the documentation.
|
||||
|
||||
_This configuration acts as a starting point for all pype club clients wth avalon deployment._
|
||||
|
||||
Code convention
|
||||
---------------
|
||||
|
||||
Below are some of the standard practices applied to this repositories.
|
||||
|
||||
- **Etiquette: PEP8**
|
||||
|
||||
All code is written in PEP8. It is recommended you use a linter as you work, flake8 and pylinter are both good options.
|
||||
- **Etiquette: Napoleon docstrings**
|
||||
|
||||
Any docstrings are made in Google Napoleon format. See [Napoleon](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html) for details.
|
||||
|
||||
- **Etiquette: Semantic Versioning**
|
||||
|
||||
This project follows [semantic versioning](http://semver.org).
|
||||
- **Etiquette: Underscore means private**
|
||||
|
||||
Anything prefixed with an underscore means that it is internal to wherever it is used. For example, a variable name is only ever used in the parent function or class. A module is not for use by the end-user. In contrast, anything without an underscore is public, but not necessarily part of the API. Members of the API resides in `api.py`.
|
||||
|
||||
- **API: Idempotence**
|
||||
|
||||
A public function must be able to be called twice and produce the exact same result. This means no changing of state without restoring previous state when finishing. For example, if a function requires changing the current selection in Autodesk Maya, it must restore the previous selection prior to completing.
|
||||
|
|
|
|||
|
|
@ -1,16 +1,10 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
import os
|
||||
|
||||
import bpy
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
|
||||
from .plugin import AssetLoader
|
||||
|
||||
logger = logging.getLogger("pype.blender")
|
||||
|
||||
PARENT_DIR = os.path.dirname(__file__)
|
||||
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
|
||||
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
|
||||
|
|
@ -19,9 +13,16 @@ PUBLISH_PATH = os.path.join(PLUGINS_DIR, "blender", "publish")
|
|||
LOAD_PATH = os.path.join(PLUGINS_DIR, "blender", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "blender", "create")
|
||||
|
||||
ORIGINAL_EXCEPTHOOK = sys.excepthook
|
||||
|
||||
|
||||
def pype_excepthook_handler(*args):
|
||||
traceback.print_exception(*args)
|
||||
|
||||
|
||||
def install():
|
||||
"""Install Blender configuration for Avalon."""
|
||||
sys.excepthook = pype_excepthook_handler
|
||||
pyblish.register_plugin_path(str(PUBLISH_PATH))
|
||||
avalon.register_plugin_path(avalon.Loader, str(LOAD_PATH))
|
||||
avalon.register_plugin_path(avalon.Creator, str(CREATE_PATH))
|
||||
|
|
@ -29,6 +30,7 @@ def install():
|
|||
|
||||
def uninstall():
|
||||
"""Uninstall Blender configuration for Avalon."""
|
||||
sys.excepthook = ORIGINAL_EXCEPTHOOK
|
||||
pyblish.deregister_plugin_path(str(PUBLISH_PATH))
|
||||
avalon.deregister_plugin_path(avalon.Loader, str(LOAD_PATH))
|
||||
avalon.deregister_plugin_path(avalon.Creator, str(CREATE_PATH))
|
||||
|
|
|
|||
|
|
@ -286,7 +286,9 @@ class AppAction(BaseHandler):
|
|||
|
||||
# Run SW if was found executable
|
||||
if execfile is not None:
|
||||
avalonlib.launch(executable=execfile, args=[], environment=env)
|
||||
popen = avalonlib.launch(
|
||||
executable=execfile, args=[], environment=env
|
||||
)
|
||||
else:
|
||||
return {
|
||||
'success': False,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from avalon import api
|
||||
from avalon.vendor import qargparse
|
||||
|
||||
|
||||
def get_reference_node_parents(ref):
|
||||
|
|
@ -33,11 +34,29 @@ class ReferenceLoader(api.Loader):
|
|||
`update` logic.
|
||||
|
||||
"""
|
||||
def load(self,
|
||||
context,
|
||||
name=None,
|
||||
namespace=None,
|
||||
data=None):
|
||||
|
||||
options = [
|
||||
qargparse.Integer(
|
||||
"count",
|
||||
label="Count",
|
||||
default=1,
|
||||
min=1,
|
||||
help="How many times to load?"
|
||||
),
|
||||
qargparse.Double3(
|
||||
"offset",
|
||||
label="Position Offset",
|
||||
help="Offset loaded models for easier selection."
|
||||
)
|
||||
]
|
||||
|
||||
def load(
|
||||
self,
|
||||
context,
|
||||
name=None,
|
||||
namespace=None,
|
||||
options=None
|
||||
):
|
||||
|
||||
import os
|
||||
from avalon.maya import lib
|
||||
|
|
@ -46,29 +65,46 @@ class ReferenceLoader(api.Loader):
|
|||
assert os.path.exists(self.fname), "%s does not exist." % self.fname
|
||||
|
||||
asset = context['asset']
|
||||
loaded_containers = []
|
||||
|
||||
namespace = namespace or lib.unique_namespace(
|
||||
asset["name"] + "_",
|
||||
prefix="_" if asset["name"][0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
count = options.get("count") or 1
|
||||
for c in range(0, count):
|
||||
namespace = namespace or lib.unique_namespace(
|
||||
asset["name"] + "_",
|
||||
prefix="_" if asset["name"][0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
self.process_reference(context=context,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
data=data)
|
||||
# Offset loaded subset
|
||||
if "offset" in options:
|
||||
offset = [i * c for i in options["offset"]]
|
||||
options["translate"] = offset
|
||||
|
||||
# Only containerize if any nodes were loaded by the Loader
|
||||
nodes = self[:]
|
||||
if not nodes:
|
||||
return
|
||||
self.log.info(options)
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
self.process_reference(
|
||||
context=context,
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
options=options
|
||||
)
|
||||
|
||||
# Only containerize if any nodes were loaded by the Loader
|
||||
nodes = self[:]
|
||||
if not nodes:
|
||||
return
|
||||
|
||||
loaded_containers.append(containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__
|
||||
))
|
||||
|
||||
c += 1
|
||||
namespace = None
|
||||
return loaded_containers
|
||||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
"""To be implemented by subclass"""
|
||||
|
|
|
|||
|
|
@ -128,6 +128,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
# Add custom attributes for AssetVersion
|
||||
assetversion_cust_attrs = {}
|
||||
intent_val = instance.context.data.get("intent")
|
||||
if intent_val and isinstance(intent_val, dict):
|
||||
intent_val = intent_val.get("value")
|
||||
|
||||
if intent_val:
|
||||
assetversion_cust_attrs["intent"] = intent_val
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import sys
|
||||
import json
|
||||
import pyblish.api
|
||||
import six
|
||||
|
||||
|
|
@ -18,6 +19,48 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin):
|
|||
# - note label must exist in Ftrack
|
||||
note_labels = []
|
||||
|
||||
def get_intent_label(self, session, intent_value):
|
||||
if not intent_value:
|
||||
return
|
||||
|
||||
intent_configurations = session.query(
|
||||
"CustomAttributeConfiguration where key is intent"
|
||||
).all()
|
||||
if not intent_configurations:
|
||||
return
|
||||
|
||||
intent_configuration = intent_configurations[0]
|
||||
if len(intent_configuration) > 1:
|
||||
self.log.warning((
|
||||
"Found more than one `intent` custom attribute."
|
||||
" Using first found."
|
||||
))
|
||||
|
||||
config = intent_configuration.get("config")
|
||||
if not config:
|
||||
return
|
||||
|
||||
configuration = json.loads(config)
|
||||
items = configuration.get("data")
|
||||
if not items:
|
||||
return
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
string_type = basestring
|
||||
else:
|
||||
string_type = str
|
||||
|
||||
if isinstance(items, string_type):
|
||||
items = json.loads(items)
|
||||
|
||||
intent_label = None
|
||||
for item in items:
|
||||
if item["value"] == intent_value:
|
||||
intent_label = item["menu"]
|
||||
break
|
||||
|
||||
return intent_label
|
||||
|
||||
def process(self, instance):
|
||||
comment = (instance.context.data.get("comment") or "").strip()
|
||||
if not comment:
|
||||
|
|
@ -26,17 +69,39 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.debug("Comment is set to `{}`".format(comment))
|
||||
|
||||
session = instance.context.data["ftrackSession"]
|
||||
|
||||
intent = instance.context.data.get("intent")
|
||||
if intent:
|
||||
msg = "Intent is set to `{}` and was added to comment.".format(
|
||||
intent
|
||||
)
|
||||
if intent and isinstance(intent, dict):
|
||||
intent_val = intent.get("value")
|
||||
intent_label = intent.get("label")
|
||||
else:
|
||||
intent_val = intent_label = intent
|
||||
|
||||
final_label = None
|
||||
if intent_val:
|
||||
final_label = self.get_intent_label(session, intent_val)
|
||||
if final_label is None:
|
||||
final_label = intent_label
|
||||
|
||||
# if intent label is set then format comment
|
||||
# - it is possible that intent_label is equal to "" (empty string)
|
||||
if final_label:
|
||||
msg = "Intent label is set to `{}`.".format(final_label)
|
||||
comment = self.note_with_intent_template.format(**{
|
||||
"intent": intent,
|
||||
"intent": final_label,
|
||||
"comment": comment
|
||||
})
|
||||
|
||||
elif intent_val:
|
||||
msg = (
|
||||
"Intent is set to `{}` and was not added"
|
||||
" to comment because label is set to `{}`."
|
||||
).format(intent_val, final_label)
|
||||
|
||||
else:
|
||||
msg = "Intent is not set."
|
||||
|
||||
self.log.debug(msg)
|
||||
|
||||
asset_versions_key = "ftrackIntegratedAssetVersions"
|
||||
|
|
@ -45,7 +110,6 @@ class IntegrateFtrackNote(pyblish.api.InstancePlugin):
|
|||
self.log.info("There are any integrated AssetVersions")
|
||||
return
|
||||
|
||||
session = instance.context.data["ftrackSession"]
|
||||
user = session.query(
|
||||
"User where username is \"{}\"".format(session.api_user)
|
||||
).first()
|
||||
|
|
|
|||
|
|
@ -6,10 +6,6 @@ Requires:
|
|||
username -> collect_pype_user *(pyblish.api.CollectorOrder + 0.001)
|
||||
datetimeData -> collect_datetime_data *(pyblish.api.CollectorOrder)
|
||||
|
||||
Optional:
|
||||
comment -> collect_comment *(pyblish.api.CollectorOrder)
|
||||
intent -> collected in pyblish-lite
|
||||
|
||||
Provides:
|
||||
context -> anatomy (pypeapp.Anatomy)
|
||||
context -> anatomyData
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
|
|||
def _process_path(self, data):
|
||||
# validate basic necessary data
|
||||
data_err = "invalid json file - missing data"
|
||||
required = ["asset", "user", "intent", "comment",
|
||||
required = ["asset", "user", "comment",
|
||||
"job", "instances", "session", "version"]
|
||||
assert all(elem in data.keys() for elem in required), data_err
|
||||
|
||||
|
|
|
|||
|
|
@ -51,10 +51,16 @@ class ExtractBurnin(pype.api.Extractor):
|
|||
"frame_end": frame_end_handle,
|
||||
"duration": duration,
|
||||
"version": int(version),
|
||||
"comment": instance.context.data.get("comment", ""),
|
||||
"intent": instance.context.data.get("intent", "")
|
||||
"comment": instance.context.data.get("comment", "")
|
||||
})
|
||||
|
||||
intent_label = instance.context.data.get("intent")
|
||||
if intent_label and isinstance(intent_label, dict):
|
||||
intent_label = intent_label.get("label")
|
||||
|
||||
if intent_label:
|
||||
prep_data["intent"] = intent_label
|
||||
|
||||
# get anatomy project
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
|
|
|
|||
|
|
@ -243,9 +243,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
|
||||
instance.data['version'] = version['name']
|
||||
|
||||
intent = context.data.get("intent")
|
||||
if intent is not None:
|
||||
anatomy_data["intent"] = intent
|
||||
intent_value = instance.context.data.get("intent")
|
||||
if intent_value and isinstance(intent_value, dict):
|
||||
intent_value = intent_value.get("value")
|
||||
|
||||
if intent_value:
|
||||
anatomy_data["intent"] = intent_value
|
||||
|
||||
anatomy = instance.context.data['anatomy']
|
||||
|
||||
|
|
@ -653,9 +656,12 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"fps": context.data.get(
|
||||
"fps", instance.data.get("fps"))}
|
||||
|
||||
intent = context.data.get("intent")
|
||||
if intent is not None:
|
||||
version_data["intent"] = intent
|
||||
intent_value = instance.context.data.get("intent")
|
||||
if intent_value and isinstance(intent_value, dict):
|
||||
intent_value = intent_value.get("value")
|
||||
|
||||
if intent_value:
|
||||
version_data["intent"] = intent_value
|
||||
|
||||
# Include optional data if present in
|
||||
optionals = [
|
||||
|
|
|
|||
|
|
@ -238,8 +238,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
)
|
||||
i += 1
|
||||
|
||||
# Avoid copied pools and remove secondary pool
|
||||
payload["JobInfo"]["Pool"] = "none"
|
||||
# remove secondary pool
|
||||
payload["JobInfo"].pop("SecondaryPool", None)
|
||||
|
||||
self.log.info("Submitting Deadline job ...")
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
|
|||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, data):
|
||||
def process_reference(self, context, name, namespace, options):
|
||||
import maya.cmds as cmds
|
||||
from avalon import maya
|
||||
import pymel.core as pm
|
||||
|
|
@ -101,16 +101,18 @@ class ReferenceLoader(pype.maya.plugin.ReferenceLoader):
|
|||
cmds.setAttr(groupName + ".selectHandleY", cy)
|
||||
cmds.setAttr(groupName + ".selectHandleZ", cz)
|
||||
|
||||
if data.get("post_process", True):
|
||||
if family == "rig":
|
||||
self._post_process_rig(name, namespace, context, data)
|
||||
if family == "rig":
|
||||
self._post_process_rig(name, namespace, context, options)
|
||||
else:
|
||||
if "translate" in options:
|
||||
cmds.setAttr(groupName + ".t", *options["translate"])
|
||||
|
||||
return newNodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def _post_process_rig(self, name, namespace, context, data):
|
||||
def _post_process_rig(self, name, namespace, context, options):
|
||||
|
||||
output = next((node for node in self if
|
||||
node.endswith("out_SET")), None)
|
||||
|
|
|
|||
|
|
@ -238,7 +238,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
"expectedFiles": full_exp_files,
|
||||
"resolutionWidth": cmds.getAttr("defaultResolution.width"),
|
||||
"resolutionHeight": cmds.getAttr("defaultResolution.height"),
|
||||
"pixelAspect": cmds.getAttr("defaultResolution.height")
|
||||
"pixelAspect": cmds.getAttr("defaultResolution.pixelAspect")
|
||||
}
|
||||
|
||||
# Apply each user defined attribute as data
|
||||
|
|
|
|||
|
|
@ -94,11 +94,6 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
|
|||
step = instance.data.get("step", 1.0)
|
||||
bake_to_worldspace = instance.data("bakeToWorldSpace", True)
|
||||
|
||||
# TODO: Implement a bake to non-world space
|
||||
# Currently it will always bake the resulting camera to world-space
|
||||
# and it does not allow to include the parent hierarchy, even though
|
||||
# with `bakeToWorldSpace` set to False it should include its
|
||||
# hierarchy to be correct with the family implementation.
|
||||
if not bake_to_worldspace:
|
||||
self.log.warning("Camera (Maya Ascii) export only supports world"
|
||||
"space baked camera extractions. The disabled "
|
||||
|
|
@ -113,7 +108,7 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
|
|||
framerange[1] + handles]
|
||||
|
||||
# validate required settings
|
||||
assert len(cameras) == 1, "Not a single camera found in extraction"
|
||||
assert len(cameras) == 1, "Single camera must be found in extraction"
|
||||
assert isinstance(step, float), "Step must be a float value"
|
||||
camera = cameras[0]
|
||||
transform = cmds.listRelatives(camera, parent=True, fullPath=True)
|
||||
|
|
@ -124,21 +119,24 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
|
|||
path = os.path.join(dir_path, filename)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing camera bakes for: {0}".format(transform))
|
||||
with avalon.maya.maintained_selection():
|
||||
with lib.evaluation("off"):
|
||||
with avalon.maya.suspended_refresh():
|
||||
baked = lib.bake_to_world_space(
|
||||
transform,
|
||||
frame_range=range_with_handles,
|
||||
step=step
|
||||
)
|
||||
baked_shapes = cmds.ls(baked,
|
||||
type="camera",
|
||||
dag=True,
|
||||
shapes=True,
|
||||
long=True)
|
||||
|
||||
if bake_to_worldspace:
|
||||
self.log.info(
|
||||
"Performing camera bakes: {}".format(transform))
|
||||
baked = lib.bake_to_world_space(
|
||||
transform,
|
||||
frame_range=range_with_handles,
|
||||
step=step
|
||||
)
|
||||
baked_shapes = cmds.ls(baked,
|
||||
type="camera",
|
||||
dag=True,
|
||||
shapes=True,
|
||||
long=True)
|
||||
else:
|
||||
baked_shapes = cameras
|
||||
# Fix PLN-178: Don't allow background color to be non-black
|
||||
for cam in baked_shapes:
|
||||
attrs = {"backgroundColorR": 0.0,
|
||||
|
|
@ -164,7 +162,8 @@ class ExtractCameraMayaAscii(pype.api.Extractor):
|
|||
expressions=False)
|
||||
|
||||
# Delete the baked hierarchy
|
||||
cmds.delete(baked)
|
||||
if bake_to_worldspace:
|
||||
cmds.delete(baked)
|
||||
|
||||
massage_ma_file(path)
|
||||
|
||||
|
|
|
|||
|
|
@ -157,11 +157,13 @@ class ExtractSlateFrame(pype.api.Extractor):
|
|||
return
|
||||
|
||||
comment = instance.context.data.get("comment")
|
||||
intent = instance.context.data.get("intent")
|
||||
intent_value = instance.context.data.get("intent")
|
||||
if intent_value and isinstance(intent_value, dict):
|
||||
intent_value = intent_value.get("value")
|
||||
|
||||
try:
|
||||
node["f_submission_note"].setValue(comment)
|
||||
node["f_submitting_for"].setValue(intent)
|
||||
node["f_submitting_for"].setValue(intent_value or "")
|
||||
except NameError:
|
||||
return
|
||||
instance.data.pop("slateNode")
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue