Merge branch 'develop' into feature/OP-3663_Hiero-export-of-large-number-of-shots-fill-up-disk

This commit is contained in:
Jakub Jezek 2023-02-10 11:22:37 +01:00
commit da46873cbc
No known key found for this signature in database
GPG key ID: 730D7C02726179A7
132 changed files with 3143 additions and 953 deletions

View file

@ -1,19 +0,0 @@
name: Automate Projects
on:
issues:
types: [opened, labeled]
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
jobs:
assign_one_project:
runs-on: ubuntu-latest
name: Assign to One Project
steps:
- name: Assign NEW bugs to triage
uses: srggrs/assign-one-project-github-action@1.2.0
if: contains(github.event.issue.labels.*.name, 'bug')
with:
project: 'https://github.com/pypeclub/pype/projects/2'
column_name: 'Needs triage'

View file

@ -13,7 +13,7 @@ jobs:
if: github.event.pull_request.milestone == null
uses: zoispag/action-assign-milestone@v1
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
repo-token: "${{ secrets.YNPUT_BOT_TOKEN }}"
milestone: 'next-minor'
run_if_develop:
@ -24,5 +24,5 @@ jobs:
if: github.event.pull_request.milestone == null
uses: zoispag/action-assign-milestone@v1
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
milestone: 'next-patch'
repo-token: "${{ secrets.YNPUT_BOT_TOKEN }}"
milestone: 'next-patch'

View file

@ -12,7 +12,7 @@ jobs:
uses: "WyriHaximus/github-action-get-milestones@master"
id: milestones
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
id: querymilestone
@ -31,7 +31,7 @@ jobs:
with:
title: 'next-patch'
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
generate-next-minor:
runs-on: ubuntu-latest
@ -40,7 +40,7 @@ jobs:
uses: "WyriHaximus/github-action-get-milestones@master"
id: milestones
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
id: querymilestone
@ -59,4 +59,4 @@ jobs:
with:
title: 'next-minor'
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"

View file

@ -14,10 +14,10 @@ jobs:
- name: 🚛 Checkout Code
uses: actions/checkout@v2
- name: 🔨 Merge develop to main
- name: 🔨 Merge develop to main
uses: everlytic/branch-merge@1.1.0
with:
github_token: ${{ secrets.ADMIN_TOKEN }}
github_token: ${{ secrets.YNPUT_BOT_TOKEN }}
source_ref: 'develop'
target_branch: 'main'
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
@ -26,4 +26,4 @@ jobs:
uses: benc-uk/workflow-dispatch@v1
with:
workflow: Nightly Prerelease
token: ${{ secrets.ADMIN_TOKEN }}
token: ${{ secrets.YNPUT_BOT_TOKEN }}

View file

@ -25,43 +25,15 @@ jobs:
- name: 🔎 Determine next version type
id: version_type
run: |
TYPE=$(python ./tools/ci_tools.py --bump --github_token ${{ secrets.GITHUB_TOKEN }})
echo ::set-output name=type::$TYPE
TYPE=$(python ./tools/ci_tools.py --bump --github_token ${{ secrets.YNPUT_BOT_TOKEN }})
echo "type=${TYPE}" >> $GITHUB_OUTPUT
- name: 💉 Inject new version into files
id: version
if: steps.version_type.outputs.type != 'skip'
run: |
RESULT=$(python ./tools/ci_tools.py --nightly --github_token ${{ secrets.GITHUB_TOKEN }})
echo ::set-output name=next_tag::$RESULT
# - name: "✏️ Generate full changelog"
# if: steps.version_type.outputs.type != 'skip'
# id: generate-full-changelog
# uses: heinrichreimer/github-changelog-generator-action@v2.3
# with:
# token: ${{ secrets.ADMIN_TOKEN }}
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
# issues: false
# issuesWoLabels: false
# sinceTag: "3.12.0"
# maxIssues: 100
# pullRequests: true
# prWoLabels: false
# author: false
# unreleased: true
# compareLink: true
# stripGeneratorNotice: true
# verbose: true
# unreleasedLabel: ${{ steps.version.outputs.next_tag }}
# excludeTagsRegex: "CI/.+"
# releaseBranch: "main"
- name: "🖨️ Print changelog to console"
if: steps.version_type.outputs.type != 'skip'
run: cat CHANGELOG.md
NEW_VERSION_TAG=$(python ./tools/ci_tools.py --nightly --github_token ${{ secrets.YNPUT_BOT_TOKEN }})
echo "next_tag=${NEW_VERSION_TAG}" >> $GITHUB_OUTPUT
- name: 💾 Commit and Tag
id: git_commit
@ -80,7 +52,7 @@ jobs:
- name: Push to protected main branch
uses: CasperWA/push-protected@v2.10.0
with:
token: ${{ secrets.ADMIN_TOKEN }}
token: ${{ secrets.YNPUT_BOT_TOKEN }}
branch: main
tags: true
unprotect_reviews: true
@ -89,7 +61,7 @@ jobs:
uses: everlytic/branch-merge@1.1.0
if: steps.version_type.outputs.type != 'skip'
with:
github_token: ${{ secrets.ADMIN_TOKEN }}
github_token: ${{ secrets.YNPUT_BOT_TOKEN }}
source_ref: 'main'
target_branch: 'develop'
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'

View file

@ -26,34 +26,12 @@ jobs:
- name: 💉 Inject new version into files
id: version
run: |
echo ::set-output name=current_version::${GITHUB_REF#refs/*/}
RESULT=$(python ./tools/ci_tools.py --finalize ${GITHUB_REF#refs/*/})
LASTRELEASE=$(python ./tools/ci_tools.py --lastversion release)
NEW_VERSION=$(python ./tools/ci_tools.py --finalize ${GITHUB_REF#refs/*/})
LAST_VERSION=$(python ./tools/ci_tools.py --lastversion release)
echo ::set-output name=last_release::$LASTRELEASE
echo ::set-output name=release_tag::$RESULT
# - name: "✏️ Generate full changelog"
# if: steps.version.outputs.release_tag != 'skip'
# id: generate-full-changelog
# uses: heinrichreimer/github-changelog-generator-action@v2.3
# with:
# token: ${{ secrets.ADMIN_TOKEN }}
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
# issues: false
# issuesWoLabels: false
# sinceTag: "3.12.0"
# maxIssues: 100
# pullRequests: true
# prWoLabels: false
# author: false
# unreleased: true
# compareLink: true
# stripGeneratorNotice: true
# verbose: true
# futureRelease: ${{ steps.version.outputs.release_tag }}
# excludeTagsRegex: "CI/.+"
# releaseBranch: "main"
echo "current_version=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
echo "last_release=${LAST_VERSION}" >> $GITHUB_OUTPUT
echo "release_tag=${NEW_VERSION}" >> $GITHUB_OUTPUT
- name: 💾 Commit and Tag
id: git_commit
@ -70,43 +48,17 @@ jobs:
if: steps.version.outputs.release_tag != 'skip'
uses: CasperWA/push-protected@v2.10.0
with:
token: ${{ secrets.ADMIN_TOKEN }}
token: ${{ secrets.YNPUT_BOT_TOKEN }}
branch: main
tags: true
unprotect_reviews: true
- name: "✏️ Generate last changelog"
if: steps.version.outputs.release_tag != 'skip'
id: generate-last-changelog
uses: heinrichreimer/github-changelog-generator-action@v2.2
with:
token: ${{ secrets.ADMIN_TOKEN }}
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
issues: false
issuesWoLabels: false
sinceTag: ${{ steps.version.outputs.last_release }}
maxIssues: 100
pullRequests: true
prWoLabels: false
author: false
unreleased: true
compareLink: true
stripGeneratorNotice: true
verbose: true
futureRelease: ${{ steps.version.outputs.release_tag }}
excludeTagsRegex: "CI/.+"
releaseBranch: "main"
stripHeaders: true
base: 'none'
- name: 🚀 Github Release
if: steps.version.outputs.release_tag != 'skip'
uses: ncipollo/release-action@v1
with:
body: ${{ steps.generate-last-changelog.outputs.changelog }}
tag: ${{ steps.version.outputs.release_tag }}
token: ${{ secrets.ADMIN_TOKEN }}
token: ${{ secrets.YNPUT_BOT_TOKEN }}
- name: ☠ Delete Pre-release
if: steps.version.outputs.release_tag != 'skip'
@ -118,7 +70,7 @@ jobs:
if: steps.version.outputs.release_tag != 'skip'
uses: everlytic/branch-merge@1.1.0
with:
github_token: ${{ secrets.ADMIN_TOKEN }}
github_token: ${{ secrets.YNPUT_BOT_TOKEN }}
source_ref: 'main'
target_branch: 'develop'
commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}'

View file

@ -28,7 +28,7 @@ jobs:
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: 🧵 Install Requirements
shell: pwsh
run: |
@ -64,27 +64,3 @@ jobs:
run: |
export SKIP_THIRD_PARTY_VALIDATION="1"
./tools/build.sh
# MacOS-latest:
# runs-on: macos-latest
# strategy:
# matrix:
# python-version: [3.9]
# steps:
# - name: 🚛 Checkout Code
# uses: actions/checkout@v2
# - name: Set up Python
# uses: actions/setup-python@v2
# with:
# python-version: ${{ matrix.python-version }}
# - name: 🧵 Install Requirements
# run: |
# ./tools/create_env.sh
# - name: 🔨 Build
# run: |
# ./tools/build.sh

View file

@ -9,4 +9,4 @@ repos:
- id: check-yaml
- id: check-added-large-files
- id: no-commit-to-branch
args: [ '--pattern', '^(?!((release|enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-]+)$).*' ]
args: [ '--pattern', '^(?!((release|enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-_]+)$).*' ]

View file

@ -1,112 +0,0 @@
from .settings import (
get_system_settings,
get_project_settings,
get_current_project_settings,
get_anatomy_settings,
SystemSettings,
ProjectSettings
)
from .lib import (
PypeLogger,
Logger,
Anatomy,
execute,
run_subprocess,
version_up,
get_asset,
get_workdir_data,
get_version_from_path,
get_last_version_from_path,
get_app_environments_for_context,
source_hash,
get_latest_version,
get_local_site_id,
change_openpype_mongo_url,
create_project_folders,
get_project_basic_paths
)
from .lib.mongo import (
get_default_components
)
from .lib.applications import (
ApplicationManager
)
from .lib.avalon_context import (
BuildWorkfile
)
from . import resources
from .plugin import (
Extractor,
ValidatePipelineOrder,
ValidateContentsOrder,
ValidateSceneOrder,
ValidateMeshOrder,
)
# temporary fix, might
from .action import (
get_errored_instances_from_context,
RepairAction,
RepairContextAction
)
__all__ = [
"get_system_settings",
"get_project_settings",
"get_current_project_settings",
"get_anatomy_settings",
"get_project_basic_paths",
"SystemSettings",
"ProjectSettings",
"PypeLogger",
"Logger",
"Anatomy",
"execute",
"get_default_components",
"ApplicationManager",
"BuildWorkfile",
# Resources
"resources",
# plugin classes
"Extractor",
# ordering
"ValidatePipelineOrder",
"ValidateContentsOrder",
"ValidateSceneOrder",
"ValidateMeshOrder",
# action
"get_errored_instances_from_context",
"RepairAction",
"RepairContextAction",
# get contextual data
"version_up",
"get_asset",
"get_workdir_data",
"get_version_from_path",
"get_last_version_from_path",
"get_app_environments_for_context",
"source_hash",
"run_subprocess",
"get_latest_version",
"get_local_site_id",
"change_openpype_mongo_url",
"get_project_basic_paths",
"create_project_folders"
]

View file

@ -1,4 +1,5 @@
import os
from openpype.lib import PreLaunchHook
@ -40,5 +41,13 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
self.log.info("Current context does not have any workfile yet.")
return
# Determine whether to open workfile post initialization.
if self.host_name == "maya":
key = "open_workfile_post_initialization"
if self.data["project_settings"]["maya"][key]:
self.log.debug("Opening workfile post initialization.")
self.data["env"]["OPENPYPE_" + key.upper()] = "1"
return
# Add path to workfile to arguments
self.launch_context.launch_args.append(last_workfile)

View file

@ -8,6 +8,7 @@ exists is used.
import os
from abc import ABCMeta, abstractmethod
import platform
import six
@ -187,11 +188,19 @@ class HostDirmap(object):
self.log.debug("local overrides {}".format(active_overrides))
self.log.debug("remote overrides {}".format(remote_overrides))
current_platform = platform.system().lower()
for root_name, active_site_dir in active_overrides.items():
remote_site_dir = (
remote_overrides.get(root_name)
or sync_settings["sites"][remote_site]["root"][root_name]
)
if isinstance(remote_site_dir, dict):
remote_site_dir = remote_site_dir.get(current_platform)
if not remote_site_dir:
continue
if os.path.isdir(active_site_dir):
if "destination-path" not in mapping:
mapping["destination-path"] = []

View file

@ -1,3 +1,4 @@
import os
import logging
import contextlib
from abc import ABCMeta, abstractproperty
@ -100,6 +101,30 @@ class HostBase(object):
pass
def get_current_project_name(self):
"""
Returns:
Union[str, None]: Current project name.
"""
return os.environ.get("AVALON_PROJECT")
def get_current_asset_name(self):
"""
Returns:
Union[str, None]: Current asset name.
"""
return os.environ.get("AVALON_ASSET")
def get_current_task_name(self):
"""
Returns:
Union[str, None]: Current task name.
"""
return os.environ.get("AVALON_TASK")
def get_current_context(self):
"""Get current context information.
@ -111,19 +136,14 @@ class HostBase(object):
Default implementation returns values from 'legacy_io.Session'.
Returns:
dict: Context with 3 keys 'project_name', 'asset_name' and
'task_name'. All of them can be 'None'.
Dict[str, Union[str, None]]: Context with 3 keys 'project_name',
'asset_name' and 'task_name'. All of them can be 'None'.
"""
from openpype.pipeline import legacy_io
if legacy_io.is_installed():
legacy_io.install()
return {
"project_name": legacy_io.Session["AVALON_PROJECT"],
"asset_name": legacy_io.Session["AVALON_ASSET"],
"task_name": legacy_io.Session["AVALON_TASK"]
"project_name": self.get_current_project_name(),
"asset_name": self.get_current_asset_name(),
"task_name": self.get_current_task_name()
}
def get_context_title(self):

View file

@ -11,9 +11,15 @@ from openpype.pipeline import (
)
from openpype.hosts.aftereffects.api.pipeline import cache_and_get_instances
from openpype.lib import prepare_template_data
from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS
class RenderCreator(Creator):
"""Creates 'render' instance for publishing.
Result of 'render' instance is video or sequence of images for particular
composition based of configuration in its RenderQueue.
"""
identifier = "render"
label = "Render"
family = "render"
@ -28,45 +34,6 @@ class RenderCreator(Creator):
["RenderCreator"]
["defaults"])
def get_icon(self):
return resources.get_openpype_splash_filepath()
def collect_instances(self):
for instance_data in cache_and_get_instances(self):
# legacy instances have family=='render' or 'renderLocal', use them
creator_id = (instance_data.get("creator_identifier") or
instance_data.get("family", '').replace("Local", ''))
if creator_id == self.identifier:
instance_data = self._handle_legacy(instance_data)
instance = CreatedInstance.from_existing(
instance_data, self
)
self._add_instance_to_context(instance)
def update_instances(self, update_list):
for created_inst, _changes in update_list:
api.get_stub().imprint(created_inst.get("instance_id"),
created_inst.data_to_store())
subset_change = _changes.get("subset")
if subset_change:
api.get_stub().rename_item(created_inst.data["members"][0],
subset_change[1])
def remove_instances(self, instances):
for instance in instances:
self._remove_instance_from_context(instance)
self.host.remove_instance(instance)
subset = instance.data["subset"]
comp_id = instance.data["members"][0]
comp = api.get_stub().get_item(comp_id)
if comp:
new_comp_name = comp.name.replace(subset, '')
if not new_comp_name:
new_comp_name = "dummyCompName"
api.get_stub().rename_item(comp_id,
new_comp_name)
def create(self, subset_name_from_ui, data, pre_create_data):
stub = api.get_stub() # only after After Effects is up
if pre_create_data.get("use_selection"):
@ -82,10 +49,19 @@ class RenderCreator(Creator):
"if 'useSelection' or create at least "
"one composition."
)
use_composition_name = (pre_create_data.get("use_composition_name") or
len(comps) > 1)
for comp in comps:
if pre_create_data.get("use_composition_name"):
composition_name = comp.name
if use_composition_name:
if "{composition}" not in subset_name_from_ui.lower():
subset_name_from_ui += "{Composition}"
composition_name = re.sub(
"[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS),
"",
comp.name
)
dynamic_fill = prepare_template_data({"composition":
composition_name})
subset_name = subset_name_from_ui.format(**dynamic_fill)
@ -129,8 +105,72 @@ class RenderCreator(Creator):
]
return output
def get_icon(self):
return resources.get_openpype_splash_filepath()
def collect_instances(self):
for instance_data in cache_and_get_instances(self):
# legacy instances have family=='render' or 'renderLocal', use them
creator_id = (instance_data.get("creator_identifier") or
instance_data.get("family", '').replace("Local", ''))
if creator_id == self.identifier:
instance_data = self._handle_legacy(instance_data)
instance = CreatedInstance.from_existing(
instance_data, self
)
self._add_instance_to_context(instance)
def update_instances(self, update_list):
for created_inst, _changes in update_list:
api.get_stub().imprint(created_inst.get("instance_id"),
created_inst.data_to_store())
subset_change = _changes.get("subset")
if subset_change:
api.get_stub().rename_item(created_inst.data["members"][0],
subset_change[1])
def remove_instances(self, instances):
for instance in instances:
self._remove_instance_from_context(instance)
self.host.remove_instance(instance)
subset = instance.data["subset"]
comp_id = instance.data["members"][0]
comp = api.get_stub().get_item(comp_id)
if comp:
new_comp_name = comp.name.replace(subset, '')
if not new_comp_name:
new_comp_name = "dummyCompName"
api.get_stub().rename_item(comp_id,
new_comp_name)
def get_detail_description(self):
return """Creator for Render instances"""
return """Creator for Render instances
Main publishable item in AfterEffects will be of `render` family.
Result of this item (instance) is picture sequence or video that could
be a final delivery product or loaded and used in another DCCs.
Select single composition and create instance of 'render' family or
turn off 'Use selection' to create instance for all compositions.
'Use composition name in subset' allows to explicitly add composition
name into created subset name.
Position of composition name could be set in
`project_settings/global/tools/creator/subset_name_profiles` with some
form of '{composition}' placeholder.
Composition name will be used implicitly if multiple composition should
be handled at same time.
If {composition} placeholder is not us 'subset_name_profiles'
composition name will be capitalized and set at the end of subset name
if necessary.
If composition name should be used, it will be cleaned up of characters
that would cause an issue in published file names.
"""
def get_dynamic_data(self, variant, task_name, asset_doc,
project_name, host_name, instance):

View file

@ -19,7 +19,6 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["camera"]
version = (0, 1, 0)
label = "Zero Keyframe"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]

View file

@ -14,7 +14,6 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model"]
category = "geometry"
label = "Mesh Has UV's"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
optional = True

View file

@ -14,7 +14,6 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model"]
category = "geometry"
label = "Mesh No Negative Scale"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]

View file

@ -19,7 +19,6 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model", "rig"]
version = (0, 1, 0)
label = "No Colons in names"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]

View file

@ -21,7 +21,6 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model"]
version = (0, 1, 0)
label = "Transform Zero"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]

View file

@ -113,7 +113,7 @@ class HoudiniCreatorBase(object):
Dict[str, Any]: Shared data dictionary.
"""
if shared_data.get("houdini_cached_subsets") is not None:
if shared_data.get("houdini_cached_subsets") is None:
cache = dict()
cache_legacy = dict()
@ -225,12 +225,12 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
self._add_instance_to_context(created_instance)
def update_instances(self, update_list):
for created_inst, _changes in update_list:
for created_inst, changes in update_list:
instance_node = hou.node(created_inst.get("instance_node"))
new_values = {
key: new_value
for key, (_old_value, new_value) in _changes.items()
key: changes[key].new_value
for key in changes.changed_keys
}
imprint(
instance_node,

View file

@ -1,4 +1,5 @@
import os
import re
import logging
import platform
@ -66,7 +67,7 @@ def generate_shelves():
)
continue
mandatory_attributes = {'name', 'script'}
mandatory_attributes = {'label', 'script'}
for tool_definition in shelf_definition.get('tools_list'):
# We verify that the name and script attibutes of the tool
# are set
@ -152,31 +153,32 @@ def get_or_create_tool(tool_definition, shelf):
Returns:
hou.Tool: The tool updated or the new one
"""
existing_tools = shelf.tools()
tool_label = tool_definition.get('label')
tool_label = tool_definition.get("label")
if not tool_label:
log.warning("Skipped shelf without label")
return
script_path = tool_definition["script"]
if not script_path or not os.path.exists(script_path):
log.warning("This path doesn't exist - {}".format(script_path))
return
existing_tools = shelf.tools()
existing_tool = next(
(tool for tool in existing_tools if tool.label() == tool_label),
None
)
with open(script_path) as stream:
script = stream.read()
tool_definition["script"] = script
if existing_tool:
tool_definition.pop('name', None)
tool_definition.pop('label', None)
tool_definition.pop("label", None)
existing_tool.setData(**tool_definition)
return existing_tool
tool_name = tool_label.replace(' ', '_').lower()
if not os.path.exists(tool_definition['script']):
log.warning(
"This path doesn't exist - {}".format(tool_definition['script'])
)
return
with open(tool_definition['script']) as f:
script = f.read()
tool_definition.update({'script': script})
new_tool = hou.shelves.newTool(name=tool_name, **tool_definition)
return new_tool
tool_name = re.sub(r"[^\w\d]+", "_", tool_label).lower()
return hou.shelves.newTool(name=tool_name, **tool_definition)

View file

@ -12,6 +12,11 @@ class MaxAddon(OpenPypeModule, IHostAddon):
def initialize(self, module_settings):
self.enabled = True
def add_implementation_envs(self, env, _app):
# Remove auto screen scale factor for Qt
# - let 3dsmax decide it's value
env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
def get_workfile_extensions(self):
return [".max"]

View file

@ -78,12 +78,12 @@ class MaxCreator(Creator, MaxCreatorBase):
self._add_instance_to_context(created_instance)
def update_instances(self, update_list):
for created_inst, _changes in update_list:
for created_inst, changes in update_list:
instance_node = created_inst.get("instance_node")
new_values = {
key: new_value
for key, (_old_value, new_value) in _changes.items()
key: changes[key].new_value
for key in changes.changed_keys
}
imprint(
instance_node,

View file

@ -0,0 +1,26 @@
# -*- coding: utf-8 -*-
"""Creator plugin for creating camera."""
from openpype.hosts.max.api import plugin
from openpype.pipeline import CreatedInstance
class CreateCamera(plugin.MaxCreator):
identifier = "io.openpype.creators.max.camera"
label = "Camera"
family = "camera"
icon = "gear"
def create(self, subset_name, instance_data, pre_create_data):
from pymxs import runtime as rt
sel_obj = list(rt.selection)
instance = super(CreateCamera, self).create(
subset_name,
instance_data,
pre_create_data) # type: CreatedInstance
container = rt.getNodeByName(instance.data.get("instance_node"))
# TODO: Disable "Add to Containers?" Panel
# parent the selected cameras into the container
for obj in sel_obj:
obj.parent = container
# for additional work on the node:
# instance_node = rt.getNodeByName(instance.get("instance_node"))

View file

@ -0,0 +1,49 @@
import os
from openpype.pipeline import (
load
)
class FbxLoader(load.LoaderPlugin):
"""Fbx Loader"""
families = ["camera"]
representations = ["fbx"]
order = -9
icon = "code-fork"
color = "white"
def load(self, context, name=None, namespace=None, data=None):
from pymxs import runtime as rt
filepath = os.path.normpath(self.fname)
fbx_import_cmd = (
f"""
FBXImporterSetParam "Animation" true
FBXImporterSetParam "Cameras" true
FBXImporterSetParam "AxisConversionMethod" true
FbxExporterSetParam "UpAxis" "Y"
FbxExporterSetParam "Preserveinstances" true
importFile @"{filepath}" #noPrompt using:FBXIMP
""")
self.log.debug(f"Executing command: {fbx_import_cmd}")
rt.execute(fbx_import_cmd)
container_name = f"{name}_CON"
asset = rt.getNodeByName(f"{name}")
# rename the container with "_CON"
container = rt.container(name=container_name)
asset.Parent = container
return container
def remove(self, container):
from pymxs import runtime as rt
node = container["node"]
rt.delete(node)

View file

@ -0,0 +1,50 @@
import os
from openpype.pipeline import (
load
)
class MaxSceneLoader(load.LoaderPlugin):
"""Max Scene Loader"""
families = ["camera"]
representations = ["max"]
order = -8
icon = "code-fork"
color = "green"
def load(self, context, name=None, namespace=None, data=None):
from pymxs import runtime as rt
path = os.path.normpath(self.fname)
# import the max scene by using "merge file"
path = path.replace('\\', '/')
merge_before = {
c for c in rt.rootNode.Children
if rt.classOf(c) == rt.Container
}
rt.mergeMaxFile(path)
merge_after = {
c for c in rt.rootNode.Children
if rt.classOf(c) == rt.Container
}
max_containers = merge_after.difference(merge_before)
if len(max_containers) != 1:
self.log.error("Something failed when loading.")
max_container = max_containers.pop()
container_name = f"{name}_CON"
# rename the container with "_CON"
# get the original container
container = rt.container(name=container_name)
max_container.Parent = container
return container
def remove(self, container):
from pymxs import runtime as rt
node = container["node"]
rt.delete(node)

View file

@ -15,7 +15,10 @@ from openpype.hosts.max.api import lib
class AbcLoader(load.LoaderPlugin):
"""Alembic loader."""
families = ["model", "animation", "pointcache"]
families = ["model",
"camera",
"animation",
"pointcache"]
label = "Load Alembic"
representations = ["abc"]
order = -10

View file

@ -0,0 +1,75 @@
import os
import pyblish.api
from openpype.pipeline import (
publish,
OptionalPyblishPluginMixin
)
from pymxs import runtime as rt
from openpype.hosts.max.api import (
maintained_selection,
get_all_children
)
class ExtractCameraAlembic(publish.Extractor,
OptionalPyblishPluginMixin):
"""
Extract Camera with AlembicExport
"""
order = pyblish.api.ExtractorOrder - 0.1
label = "Extract Alembic Camera"
hosts = ["max"]
families = ["camera"]
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
start = float(instance.data.get("frameStartHandle", 1))
end = float(instance.data.get("frameEndHandle", 1))
container = instance.data["instance_node"]
self.log.info("Extracting Camera ...")
stagingdir = self.staging_dir(instance)
filename = "{name}.abc".format(**instance.data)
path = os.path.join(stagingdir, filename)
# We run the render
self.log.info("Writing alembic '%s' to '%s'" % (filename,
stagingdir))
export_cmd = (
f"""
AlembicExport.ArchiveType = #ogawa
AlembicExport.CoordinateSystem = #maya
AlembicExport.StartFrame = {start}
AlembicExport.EndFrame = {end}
AlembicExport.CustomAttributes = true
exportFile @"{path}" #noPrompt selectedOnly:on using:AlembicExport
""")
self.log.debug(f"Executing command: {export_cmd}")
with maintained_selection():
# select and export
rt.select(get_all_children(rt.getNodeByName(container)))
rt.execute(export_cmd)
self.log.info("Performing Extraction ...")
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'abc',
'ext': 'abc',
'files': filename,
"stagingDir": stagingdir,
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
path))

View file

@ -0,0 +1,75 @@
import os
import pyblish.api
from openpype.pipeline import (
publish,
OptionalPyblishPluginMixin
)
from pymxs import runtime as rt
from openpype.hosts.max.api import (
maintained_selection,
get_all_children
)
class ExtractCameraFbx(publish.Extractor,
OptionalPyblishPluginMixin):
"""
Extract Camera with FbxExporter
"""
order = pyblish.api.ExtractorOrder - 0.2
label = "Extract Fbx Camera"
hosts = ["max"]
families = ["camera"]
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
container = instance.data["instance_node"]
self.log.info("Extracting Camera ...")
stagingdir = self.staging_dir(instance)
filename = "{name}.fbx".format(**instance.data)
filepath = os.path.join(stagingdir, filename)
self.log.info("Writing fbx file '%s' to '%s'" % (filename,
filepath))
# Need to export:
# Animation = True
# Cameras = True
# AxisConversionMethod
fbx_export_cmd = (
f"""
FBXExporterSetParam "Animation" true
FBXExporterSetParam "Cameras" true
FBXExporterSetParam "AxisConversionMethod" "Animation"
FbxExporterSetParam "UpAxis" "Y"
FbxExporterSetParam "Preserveinstances" true
exportFile @"{filepath}" #noPrompt selectedOnly:true using:FBXEXP
""")
self.log.debug(f"Executing command: {fbx_export_cmd}")
with maintained_selection():
# select and export
rt.select(get_all_children(rt.getNodeByName(container)))
rt.execute(fbx_export_cmd)
self.log.info("Performing Extraction ...")
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'fbx',
'ext': 'fbx',
'files': filename,
"stagingDir": stagingdir,
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
filepath))

View file

@ -0,0 +1,60 @@
import os
import pyblish.api
from openpype.pipeline import (
publish,
OptionalPyblishPluginMixin
)
from pymxs import runtime as rt
from openpype.hosts.max.api import (
maintained_selection,
get_all_children
)
class ExtractMaxSceneRaw(publish.Extractor,
OptionalPyblishPluginMixin):
"""
Extract Raw Max Scene with SaveSelected
"""
order = pyblish.api.ExtractorOrder - 0.2
label = "Extract Max Scene (Raw)"
hosts = ["max"]
families = ["camera"]
optional = True
def process(self, instance):
if not self.is_active(instance.data):
return
container = instance.data["instance_node"]
# publish the raw scene for camera
self.log.info("Extracting Raw Max Scene ...")
stagingdir = self.staging_dir(instance)
filename = "{name}.max".format(**instance.data)
max_path = os.path.join(stagingdir, filename)
self.log.info("Writing max file '%s' to '%s'" % (filename,
max_path))
if "representations" not in instance.data:
instance.data["representations"] = []
# saving max scene
with maintained_selection():
# need to figure out how to select the camera
rt.select(get_all_children(rt.getNodeByName(container)))
rt.execute(f'saveNodes selection "{max_path}" quiet:true')
self.log.info("Performing Extraction ...")
representation = {
'name': 'max',
'ext': 'max',
'files': filename,
"stagingDir": stagingdir,
}
instance.data["representations"].append(representation)
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
max_path))

View file

@ -51,7 +51,7 @@ class ExtractAlembic(publish.Extractor):
order = pyblish.api.ExtractorOrder
label = "Extract Pointcache"
hosts = ["max"]
families = ["pointcache", "camera"]
families = ["pointcache"]
def process(self, instance):
start = float(instance.data.get("frameStartHandle", 1))

View file

@ -0,0 +1,48 @@
# -*- coding: utf-8 -*-
import pyblish.api
from openpype.pipeline import PublishValidationError
from pymxs import runtime as rt
class ValidateCameraContent(pyblish.api.InstancePlugin):
"""Validates Camera instance contents.
A Camera instance may only hold a SINGLE camera's transform
"""
order = pyblish.api.ValidatorOrder
families = ["camera"]
hosts = ["max"]
label = "Camera Contents"
camera_type = ["$Free_Camera", "$Target_Camera",
"$Physical_Camera", "$Target"]
def process(self, instance):
invalid = self.get_invalid(instance)
if invalid:
raise PublishValidationError("Camera instance must only include"
"camera (and camera target)")
def get_invalid(self, instance):
"""
Get invalid nodes if the instance is not camera
"""
invalid = list()
container = instance.data["instance_node"]
self.log.info("Validating look content for "
"{}".format(container))
con = rt.getNodeByName(container)
selection_list = list(con.Children)
for sel in selection_list:
# to avoid Attribute Error from pymxs wrapper
sel_tmp = str(sel)
found = False
for cam in self.camera_type:
if sel_tmp.startswith(cam):
found = True
break
if not found:
self.log.error("Camera not found")
invalid.append(sel)
return invalid

View file

@ -1,4 +1,13 @@
# -*- coding: utf-8 -*-
import os
import sys
# this might happen in some 3dsmax version where PYTHONPATH isn't added
# to sys.path automatically
for path in os.environ["PYTHONPATH"].split(os.pathsep):
if path and path not in sys.path:
sys.path.append(path)
from openpype.hosts.max.api import MaxHost
from openpype.pipeline import install_host

View file

@ -5,6 +5,7 @@ import sys
import platform
import uuid
import math
import re
import json
import logging
@ -254,11 +255,6 @@ def read(node):
return data
def _get_mel_global(name):
"""Return the value of a mel global variable"""
return mel.eval("$%s = $%s;" % (name, name))
def matrix_equals(a, b, tolerance=1e-10):
"""
Compares two matrices with an imperfection tolerance
@ -624,15 +620,15 @@ class delete_after(object):
cmds.delete(self._nodes)
def get_current_renderlayer():
return cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
def get_renderer(layer):
with renderlayer(layer):
return cmds.getAttr("defaultRenderGlobals.currentRenderer")
def get_current_renderlayer():
return cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
@contextlib.contextmanager
def no_undo(flush=False):
"""Disable the undo queue during the context
@ -1373,27 +1369,6 @@ def set_id(node, unique_id, overwrite=False):
cmds.setAttr(attr, unique_id, type="string")
# endregion ID
def get_reference_node(path):
"""
Get the reference node when the path is found being used in a reference
Args:
path (str): the file path to check
Returns:
node (str): name of the reference node in question
"""
try:
node = cmds.file(path, query=True, referenceNode=True)
except RuntimeError:
log.debug('File is not referenced : "{}"'.format(path))
return
reference_path = cmds.referenceQuery(path, filename=True)
if os.path.normpath(path) == os.path.normpath(reference_path):
return node
def set_attribute(attribute, value, node):
"""Adjust attributes based on the value from the attribute data
@ -3379,3 +3354,34 @@ def iter_visible_nodes_in_range(nodes, start, end):
def get_attribute_input(attr):
connections = cmds.listConnections(attr, plugs=True, destination=False)
return connections[0] if connections else None
def write_xgen_file(data, filepath):
"""Overwrites data in .xgen files.
Quite naive approach to mainly overwrite "xgDataPath" and "xgProjectPath".
Args:
data (dict): Dictionary of key, value. Key matches with xgen file.
For example:
{"xgDataPath": "some/path"}
filepath (string): Absolute path of .xgen file.
"""
# Generate regex lookup for line to key basically
# match any of the keys in `\t{key}\t\t`
keys = "|".join(re.escape(key) for key in data.keys())
re_keys = re.compile("^\t({})\t\t".format(keys))
lines = []
with open(filepath, "r") as f:
for line in f:
match = re_keys.match(line)
if match:
key = match.group(1)
value = data[key]
line = "\t{}\t\t{}\n".format(key, value)
lines.append(line)
with open(filepath, "w") as f:
f.writelines(lines)

View file

@ -50,7 +50,6 @@ def install():
parent="MayaWindow"
)
renderer = cmds.getAttr('defaultRenderGlobals.currentRenderer').lower()
# Create context menu
context_label = "{}, {}".format(
legacy_io.Session["AVALON_ASSET"],

View file

@ -514,6 +514,9 @@ def check_lock_on_current_file():
# add the lock file when opening the file
filepath = current_file()
# Skip if current file is 'untitled'
if not filepath:
return
if is_workfile_locked(filepath):
# add lockfile dialog
@ -680,10 +683,12 @@ def before_workfile_save(event):
def after_workfile_save(event):
workfile_name = event["filename"]
if handle_workfile_locks():
if workfile_name:
if not is_workfile_locked(workfile_name):
create_workfile_lock(workfile_name)
if (
handle_workfile_locks()
and workfile_name
and not is_workfile_locked(workfile_name)
):
create_workfile_lock(workfile_name)
class MayaDirmap(HostDirmap):

View file

@ -300,6 +300,39 @@ class ReferenceLoader(Loader):
str(representation["_id"]),
type="string")
# When an animation or pointcache gets connected to an Xgen container,
# the compound attribute "xgenContainers" gets created. When animation
# containers gets updated we also need to update the cacheFileName on
# the Xgen collection.
compound_name = "xgenContainers"
if cmds.objExists("{}.{}".format(node, compound_name)):
import xgenm
container_amount = cmds.getAttr(
"{}.{}".format(node, compound_name), size=True
)
# loop through all compound children
for i in range(container_amount):
attr = "{}.{}[{}].container".format(node, compound_name, i)
objectset = cmds.listConnections(attr)[0]
reference_node = cmds.sets(objectset, query=True)[0]
palettes = cmds.ls(
cmds.referenceQuery(reference_node, nodes=True),
type="xgmPalette"
)
for palette in palettes:
for description in xgenm.descriptions(palette):
xgenm.setAttr(
"cacheFileName",
path.replace("\\", "/"),
palette,
description,
"SplinePrimitive"
)
# Refresh UI and viewport.
de = xgenm.xgGlobal.DescriptionEditor
de.refresh("Full")
def remove(self, container):
"""Remove an existing `container` from Maya scene

View file

@ -54,6 +54,7 @@ class CreateRender(plugin.Creator):
tileRendering (bool): Instance is set to tile rendering mode. We
won't submit actual render, but we'll make publish job to wait
for Tile Assembly job done and then publish.
strict_error_checking (bool): Enable/disable error checking on DL
See Also:
https://pype.club/docs/artist_hosts_maya#creating-basic-render-setup
@ -271,6 +272,9 @@ class CreateRender(plugin.Creator):
secondary_pool = pool_setting["secondary_pool"]
self.data["secondaryPool"] = self._set_default_pool(pool_names,
secondary_pool)
strict_error_checking = maya_submit_dl.get("strict_error_checking",
True)
self.data["strict_error_checking"] = strict_error_checking
if muster_enabled:
self.log.info(">>> Loading Muster credentials ...")

View file

@ -2,9 +2,9 @@ from openpype.hosts.maya.api import plugin
class CreateXgen(plugin.Creator):
"""Xgen interactive export"""
"""Xgen"""
name = "xgen"
label = "Xgen Interactive"
label = "Xgen"
family = "xgen"
icon = "pagelines"

View file

@ -0,0 +1,153 @@
from maya import cmds
from openpype.pipeline import InventoryAction, get_representation_context
from openpype.hosts.maya.api.lib import get_id
class ConnectGeometry(InventoryAction):
"""Connect geometries within containers.
Source container will connect to the target containers, by searching for
matching geometry IDs (cbid).
Source containers are of family; "animation" and "pointcache".
The connection with be done with a live world space blendshape.
"""
label = "Connect Geometry"
icon = "link"
color = "white"
def process(self, containers):
# Validate selection is more than 1.
message = (
"Only 1 container selected. 2+ containers needed for this action."
)
if len(containers) == 1:
self.display_warning(message)
return
# Categorize containers by family.
containers_by_family = {}
for container in containers:
family = get_representation_context(
container["representation"]
)["subset"]["data"]["family"]
try:
containers_by_family[family].append(container)
except KeyError:
containers_by_family[family] = [container]
# Validate to only 1 source container.
source_containers = containers_by_family.get("animation", [])
source_containers += containers_by_family.get("pointcache", [])
source_container_namespaces = [
x["namespace"] for x in source_containers
]
message = (
"{} animation containers selected:\n\n{}\n\nOnly select 1 of type "
"\"animation\" or \"pointcache\".".format(
len(source_containers), source_container_namespaces
)
)
if len(source_containers) != 1:
self.display_warning(message)
return
source_object = source_containers[0]["objectName"]
# Collect matching geometry transforms based cbId attribute.
target_containers = []
for family, containers in containers_by_family.items():
if family in ["animation", "pointcache"]:
continue
target_containers.extend(containers)
source_data = self.get_container_data(source_object)
matches = []
node_types = set()
for target_container in target_containers:
target_data = self.get_container_data(
target_container["objectName"]
)
node_types.update(target_data["node_types"])
for id, transform in target_data["ids"].items():
source_match = source_data["ids"].get(id)
if source_match:
matches.append([source_match, transform])
# Message user about what is about to happen.
if not matches:
self.display_warning("No matching geometries found.")
return
message = "Connecting geometries:\n\n"
for match in matches:
message += "{} > {}\n".format(match[0], match[1])
choice = self.display_warning(message, show_cancel=True)
if choice is False:
return
# Setup live worldspace blendshape connection.
for source, target in matches:
blendshape = cmds.blendShape(source, target)[0]
cmds.setAttr(blendshape + ".origin", 0)
cmds.setAttr(blendshape + "." + target.split(":")[-1], 1)
# Update Xgen if in any of the containers.
if "xgmPalette" in node_types:
cmds.xgmPreview()
def get_container_data(self, container):
"""Collects data about the container nodes.
Args:
container (dict): Container instance.
Returns:
data (dict):
"node_types": All node types in container nodes.
"ids": If the node is a mesh, we collect its parent transform
id.
"""
data = {"node_types": set(), "ids": {}}
ref_node = cmds.sets(container, query=True, nodesOnly=True)[0]
for node in cmds.referenceQuery(ref_node, nodes=True):
node_type = cmds.nodeType(node)
data["node_types"].add(node_type)
# Only interested in mesh transforms for connecting geometry with
# blendshape.
if node_type != "mesh":
continue
transform = cmds.listRelatives(node, parent=True)[0]
data["ids"][get_id(transform)] = transform
return data
def display_warning(self, message, show_cancel=False):
"""Show feedback to user.
Returns:
bool
"""
from Qt import QtWidgets
accept = QtWidgets.QMessageBox.Ok
if show_cancel:
buttons = accept | QtWidgets.QMessageBox.Cancel
else:
buttons = accept
state = QtWidgets.QMessageBox.warning(
None,
"",
message,
buttons=buttons,
defaultButton=accept
)
return state == accept

View file

@ -0,0 +1,168 @@
from maya import cmds
import xgenm
from openpype.pipeline import (
InventoryAction, get_representation_context, get_representation_path
)
class ConnectXgen(InventoryAction):
"""Connect Xgen with an animation or pointcache.
"""
label = "Connect Xgen"
icon = "link"
color = "white"
def process(self, containers):
# Validate selection is more than 1.
message = (
"Only 1 container selected. 2+ containers needed for this action."
)
if len(containers) == 1:
self.display_warning(message)
return
# Categorize containers by family.
containers_by_family = {}
for container in containers:
family = get_representation_context(
container["representation"]
)["subset"]["data"]["family"]
try:
containers_by_family[family].append(container)
except KeyError:
containers_by_family[family] = [container]
# Validate to only 1 source container.
source_containers = containers_by_family.get("animation", [])
source_containers += containers_by_family.get("pointcache", [])
source_container_namespaces = [
x["namespace"] for x in source_containers
]
message = (
"{} animation containers selected:\n\n{}\n\nOnly select 1 of type "
"\"animation\" or \"pointcache\".".format(
len(source_containers), source_container_namespaces
)
)
if len(source_containers) != 1:
self.display_warning(message)
return
source_container = source_containers[0]
source_object = source_container["objectName"]
# Validate source representation is an alembic.
source_path = get_representation_path(
get_representation_context(
source_container["representation"]
)["representation"]
).replace("\\", "/")
message = "Animation container \"{}\" is not an alembic:\n{}".format(
source_container["namespace"], source_path
)
if not source_path.endswith(".abc"):
self.display_warning(message)
return
# Target containers.
target_containers = []
for family, containers in containers_by_family.items():
if family in ["animation", "pointcache"]:
continue
target_containers.extend(containers)
# Inform user of connections from source representation to target
# descriptions.
descriptions_data = []
connections_msg = ""
for target_container in target_containers:
reference_node = cmds.sets(
target_container["objectName"], query=True
)[0]
palettes = cmds.ls(
cmds.referenceQuery(reference_node, nodes=True),
type="xgmPalette"
)
for palette in palettes:
for description in xgenm.descriptions(palette):
descriptions_data.append([palette, description])
connections_msg += "\n{}/{}".format(palette, description)
message = "Connecting \"{}\" to:\n".format(
source_container["namespace"]
)
message += connections_msg
choice = self.display_warning(message, show_cancel=True)
if choice is False:
return
# Recreate "xgenContainers" attribute to reset.
compound_name = "xgenContainers"
attr = "{}.{}".format(source_object, compound_name)
if cmds.objExists(attr):
cmds.deleteAttr(attr)
cmds.addAttr(
source_object,
longName=compound_name,
attributeType="compound",
numberOfChildren=1,
multi=True
)
# Connect target containers.
for target_container in target_containers:
cmds.addAttr(
source_object,
longName="container",
attributeType="message",
parent=compound_name
)
index = target_containers.index(target_container)
cmds.connectAttr(
target_container["objectName"] + ".message",
source_object + ".{}[{}].container".format(
compound_name, index
)
)
# Setup cache on Xgen
object = "SplinePrimitive"
for palette, description in descriptions_data:
xgenm.setAttr("useCache", "true", palette, description, object)
xgenm.setAttr("liveMode", "false", palette, description, object)
xgenm.setAttr(
"cacheFileName", source_path, palette, description, object
)
# Refresh UI and viewport.
de = xgenm.xgGlobal.DescriptionEditor
de.refresh("Full")
def display_warning(self, message, show_cancel=False):
"""Show feedback to user.
Returns:
bool
"""
from Qt import QtWidgets
accept = QtWidgets.QMessageBox.Ok
if show_cancel:
buttons = accept | QtWidgets.QMessageBox.Cancel
else:
buttons = accept
state = QtWidgets.QMessageBox.warning(
None,
"",
message,
buttons=buttons,
defaultButton=accept
)
return state == accept

View file

@ -93,7 +93,20 @@ class ImportMayaLoader(load.LoaderPlugin):
"""
representations = ["ma", "mb", "obj"]
families = ["*"]
families = [
"model",
"pointcache",
"proxyAbc",
"animation",
"mayaAscii",
"mayaScene",
"setdress",
"layout",
"camera",
"rig",
"camerarig",
"staticMesh"
]
label = "Import"
order = 10

View file

@ -25,9 +25,9 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
"camera",
"rig",
"camerarig",
"xgen",
"staticMesh",
"mvLook"]
representations = ["ma", "abc", "fbx", "mb"]
label = "Reference"

View file

@ -81,10 +81,11 @@ class VRayProxyLoader(load.LoaderPlugin):
c = colors.get(family)
if c is not None:
cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1)
cmds.setAttr("{0}.outlinerColor".format(group_node),
(float(c[0])/255),
(float(c[1])/255),
(float(c[2])/255)
cmds.setAttr(
"{0}.outlinerColor".format(group_node),
(float(c[0]) / 255),
(float(c[1]) / 255),
(float(c[2]) / 255)
)
return containerise(
@ -101,7 +102,7 @@ class VRayProxyLoader(load.LoaderPlugin):
assert cmds.objExists(node), "Missing container"
members = cmds.sets(node, query=True) or []
vraymeshes = cmds.ls(members, type="VRayMesh")
vraymeshes = cmds.ls(members, type="VRayProxy")
assert vraymeshes, "Cannot find VRayMesh in container"
# get all representations for this version

View file

@ -0,0 +1,173 @@
import os
import maya.cmds as cmds
import xgenm
from Qt import QtWidgets
import openpype.hosts.maya.api.plugin
from openpype.hosts.maya.api.lib import (
maintained_selection,
get_container_members,
attribute_values,
write_xgen_file
)
from openpype.hosts.maya.api import current_file
from openpype.pipeline import get_representation_path
class XgenLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
"""Load Xgen as reference"""
families = ["xgen"]
representations = ["ma", "mb"]
label = "Reference Xgen"
icon = "code-fork"
color = "orange"
def get_xgen_xgd_paths(self, palette):
_, maya_extension = os.path.splitext(current_file())
xgen_file = current_file().replace(
maya_extension,
"__{}.xgen".format(palette.replace("|", "").replace(":", "__"))
)
xgd_file = xgen_file.replace(".xgen", ".xgd")
return xgen_file, xgd_file
def process_reference(self, context, name, namespace, options):
# Validate workfile has a path.
if current_file() is None:
QtWidgets.QMessageBox.warning(
None,
"",
"Current workfile has not been saved. Please save the workfile"
" before loading an Xgen."
)
return
maya_filepath = self.prepare_root_value(
self.fname, context["project"]["name"]
)
# Reference xgen. Xgen does not like being referenced in under a group.
new_nodes = []
with maintained_selection():
nodes = cmds.file(
maya_filepath,
namespace=namespace,
sharedReferenceFile=False,
reference=True,
returnNewNodes=True
)
xgen_palette = cmds.ls(
nodes, type="xgmPalette", long=True
)[0].replace("|", "")
xgen_file, xgd_file = self.get_xgen_xgd_paths(xgen_palette)
self.set_palette_attributes(xgen_palette, xgen_file, xgd_file)
# Change the cache and disk values of xgDataPath and xgProjectPath
# to ensure paths are setup correctly.
project_path = os.path.dirname(current_file()).replace("\\", "/")
xgenm.setAttr("xgProjectPath", project_path, xgen_palette)
data_path = "${{PROJECT}}xgen/collections/{};{}".format(
xgen_palette.replace(":", "__ns__"),
xgenm.getAttr("xgDataPath", xgen_palette)
)
xgenm.setAttr("xgDataPath", data_path, xgen_palette)
data = {"xgProjectPath": project_path, "xgDataPath": data_path}
write_xgen_file(data, xgen_file)
# This create an expression attribute of float. If we did not add
# any changes to collection, then Xgen does not create an xgd file
# on save. This gives errors when launching the workfile again due
# to trying to find the xgd file.
name = "custom_float_ignore"
if name not in xgenm.customAttrs(xgen_palette):
xgenm.addCustomAttr(
"custom_float_ignore", xgen_palette
)
shapes = cmds.ls(nodes, shapes=True, long=True)
new_nodes = (list(set(nodes) - set(shapes)))
self[:] = new_nodes
return new_nodes
def set_palette_attributes(self, xgen_palette, xgen_file, xgd_file):
cmds.setAttr(
"{}.xgBaseFile".format(xgen_palette),
os.path.basename(xgen_file),
type="string"
)
cmds.setAttr(
"{}.xgFileName".format(xgen_palette),
os.path.basename(xgd_file),
type="string"
)
cmds.setAttr("{}.xgExportAsDelta".format(xgen_palette), True)
def update(self, container, representation):
"""Workflow for updating Xgen.
- Copy and potentially overwrite the workspace .xgen file.
- Export changes to delta file.
- Set collection attributes to not include delta files.
- Update xgen maya file reference.
- Apply the delta file changes.
- Reset collection attributes to include delta files.
We have to do this workflow because when using referencing of the xgen
collection, Maya implicitly imports the Xgen data from the xgen file so
we dont have any control over when adding the delta file changes.
There is an implicit increment of the xgen and delta files, due to
using the workfile basename.
"""
container_node = container["objectName"]
members = get_container_members(container_node)
xgen_palette = cmds.ls(
members, type="xgmPalette", long=True
)[0].replace("|", "")
xgen_file, xgd_file = self.get_xgen_xgd_paths(xgen_palette)
# Export current changes to apply later.
xgenm.createDelta(xgen_palette.replace("|", ""), xgd_file)
self.set_palette_attributes(xgen_palette, xgen_file, xgd_file)
maya_file = get_representation_path(representation)
_, extension = os.path.splitext(maya_file)
new_xgen_file = maya_file.replace(extension, ".xgen")
data_path = ""
with open(new_xgen_file, "r") as f:
for line in f:
if line.startswith("\txgDataPath"):
line = line.rstrip()
data_path = line.split("\t")[-1]
break
project_path = os.path.dirname(current_file()).replace("\\", "/")
data_path = "${{PROJECT}}xgen/collections/{};{}".format(
xgen_palette.replace(":", "__ns__"),
data_path
)
data = {"xgProjectPath": project_path, "xgDataPath": data_path}
write_xgen_file(data, xgen_file)
attribute_data = {
"{}.xgFileName".format(xgen_palette): os.path.basename(xgen_file),
"{}.xgBaseFile".format(xgen_palette): "",
"{}.xgExportAsDelta".format(xgen_palette): False
}
with attribute_values(attribute_data):
super().update(container, representation)
xgenm.applyDelta(xgen_palette.replace("|", ""), xgd_file)

View file

@ -12,7 +12,6 @@ class CollectMayaWorkspace(pyblish.api.ContextPlugin):
label = "Maya Workspace"
hosts = ['maya']
version = (0, 1, 0)
def process(self, context):
workspace = cmds.workspace(rootDirectory=True, query=True)

View file

@ -318,7 +318,9 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
"aovSeparator": layer_render_products.layer_data.aov_separator, # noqa: E501
"renderSetupIncludeLights": render_instance.data.get(
"renderSetupIncludeLights"
)
),
"strict_error_checking": render_instance.data.get(
"strict_error_checking")
}
# Collect Deadline url if Deadline module is enabled

View file

@ -0,0 +1,71 @@
import os
from maya import cmds
import pyblish.api
from openpype.hosts.maya.api.lib import get_attribute_input
class CollectXgen(pyblish.api.InstancePlugin):
"""Collect Xgen"""
order = pyblish.api.CollectorOrder + 0.499999
label = "Collect Xgen"
families = ["xgen"]
def process(self, instance):
data = {
"xgmPalettes": cmds.ls(instance, type="xgmPalette", long=True),
"xgmDescriptions": cmds.ls(
instance, type="xgmDescription", long=True
),
"xgmSubdPatches": cmds.ls(instance, type="xgmSubdPatch", long=True)
}
data["xgenNodes"] = (
data["xgmPalettes"] +
data["xgmDescriptions"] +
data["xgmSubdPatches"]
)
if data["xgmPalettes"]:
data["xgmPalette"] = data["xgmPalettes"][0]
data["xgenConnections"] = {}
for node in data["xgmSubdPatches"]:
data["xgenConnections"][node] = {}
for attr in ["transform", "geometry"]:
input = get_attribute_input("{}.{}".format(node, attr))
data["xgenConnections"][node][attr] = input
# Collect all files under palette root as resources.
import xgenm
data_path = xgenm.getAttr(
"xgDataPath", data["xgmPalette"].replace("|", "")
).split(os.pathsep)[0]
data_path = data_path.replace(
"${PROJECT}",
xgenm.getAttr("xgProjectPath", data["xgmPalette"].replace("|", ""))
)
transfers = []
# Since we are duplicating this palette when extracting we predict that
# the name will be the basename without namespaces.
predicted_palette_name = data["xgmPalette"].split(":")[-1]
predicted_palette_name = predicted_palette_name.replace("|", "")
for root, _, files in os.walk(data_path):
for file in files:
source = os.path.join(root, file).replace("\\", "/")
destination = os.path.join(
instance.data["resourcesDir"],
"collections",
predicted_palette_name,
source.replace(data_path, "")[1:]
)
transfers.append((source, destination.replace("\\", "/")))
data["transfers"] = transfers
self.log.info(data)
instance.data.update(data)

View file

@ -20,8 +20,7 @@ class ExtractMayaSceneRaw(publish.Extractor):
"mayaScene",
"setdress",
"layout",
"camerarig",
"xgen"]
"camerarig"]
scene_type = "ma"
def process(self, instance):

View file

@ -0,0 +1,250 @@
import os
import shutil
import copy
from maya import cmds
import pyblish.api
from openpype.hosts.maya.api.lib import extract_alembic
from openpype.pipeline import publish
from openpype.lib import StringTemplate
class ExtractWorkfileXgen(publish.Extractor):
"""Extract Workfile Xgen.
When submitting a render, we need to prep Xgen side car files.
"""
# Offset to run before workfile scene save.
order = pyblish.api.ExtractorOrder - 0.499
label = "Extract Workfile Xgen"
families = ["workfile"]
hosts = ["maya"]
def get_render_max_frame_range(self, context):
"""Return start to end frame range including all renderlayers in
context.
This will return the full frame range which includes all frames of the
renderlayer instances to be published/submitted.
Args:
context (pyblish.api.Context): Current publishing context.
Returns:
tuple or None: Start frame, end frame tuple if any renderlayers
found. Otherwise None is returned.
"""
def _is_active_renderlayer(i):
"""Return whether instance is active renderlayer"""
if not i.data.get("publish", True):
return False
is_renderlayer = (
"renderlayer" in i.data.get("families", []) or
i.data["family"] == "renderlayer"
)
return is_renderlayer
start_frame = None
end_frame = None
for instance in context:
if not _is_active_renderlayer(instance):
# Only consider renderlyare instances
continue
render_start_frame = instance.data["frameStart"]
render_end_frame = instance.data["frameStart"]
if start_frame is None:
start_frame = render_start_frame
else:
start_frame = min(start_frame, render_start_frame)
if end_frame is None:
end_frame = render_end_frame
else:
end_frame = max(end_frame, render_end_frame)
if start_frame is None or end_frame is None:
return
return start_frame, end_frame
def process(self, instance):
transfers = []
# Validate there is any palettes in the scene.
if not cmds.ls(type="xgmPalette"):
self.log.debug(
"No collections found in the scene. Skipping Xgen extraction."
)
return
import xgenm
# Validate to extract only when we are publishing a renderlayer as
# well.
render_range = self.get_render_max_frame_range(instance.context)
if not render_range:
self.log.debug(
"No publishable renderlayers found in context. Skipping Xgen"
" extraction."
)
return
start_frame, end_frame = render_range
# We decrement start frame and increment end frame so motion blur will
# render correctly.
start_frame -= 1
end_frame += 1
# Extract patches alembic.
path_no_ext, _ = os.path.splitext(instance.context.data["currentFile"])
kwargs = {"attrPrefix": ["xgen"], "stripNamespaces": True}
alembic_files = []
for palette in cmds.ls(type="xgmPalette"):
patch_names = []
for description in xgenm.descriptions(palette):
for name in xgenm.boundGeometry(palette, description):
patch_names.append(name)
alembic_file = "{}__{}.abc".format(
path_no_ext, palette.replace(":", "__ns__")
)
extract_alembic(
alembic_file,
root=patch_names,
selection=False,
startFrame=float(start_frame),
endFrame=float(end_frame),
verbose=True,
**kwargs
)
alembic_files.append(alembic_file)
template_data = copy.deepcopy(instance.data["anatomyData"])
published_maya_path = StringTemplate(
instance.context.data["anatomy"].templates["publish"]["file"]
).format(template_data)
published_basename, _ = os.path.splitext(published_maya_path)
for source in alembic_files:
destination = os.path.join(
os.path.dirname(instance.data["resourcesDir"]),
os.path.basename(
source.replace(path_no_ext, published_basename)
)
)
transfers.append((source, destination))
# Validate that we are using the published workfile.
deadline_settings = instance.context.get("deadline")
if deadline_settings:
publish_settings = deadline_settings["publish"]
if not publish_settings["MayaSubmitDeadline"]["use_published"]:
self.log.debug(
"Not using the published workfile. Abort Xgen extraction."
)
return
# Collect Xgen and Delta files.
xgen_files = []
sources = []
current_dir = os.path.dirname(instance.context.data["currentFile"])
attrs = ["xgFileName", "xgBaseFile"]
for palette in cmds.ls(type="xgmPalette"):
for attr in attrs:
source = os.path.join(
current_dir, cmds.getAttr(palette + "." + attr)
)
if not os.path.exists(source):
continue
ext = os.path.splitext(source)[1]
if ext == ".xgen":
xgen_files.append(source)
if ext == ".xgd":
sources.append(source)
# Copy .xgen file to temporary location and modify.
staging_dir = self.staging_dir(instance)
for source in xgen_files:
destination = os.path.join(staging_dir, os.path.basename(source))
shutil.copy(source, destination)
lines = []
with open(destination, "r") as f:
for line in [line.rstrip() for line in f]:
if line.startswith("\txgProjectPath"):
path = os.path.dirname(instance.data["resourcesDir"])
line = "\txgProjectPath\t\t{}/".format(
path.replace("\\", "/")
)
lines.append(line)
with open(destination, "w") as f:
f.write("\n".join(lines))
sources.append(destination)
# Add resource files to workfile instance.
for source in sources:
basename = os.path.basename(source)
destination = os.path.join(
os.path.dirname(instance.data["resourcesDir"]), basename
)
transfers.append((source, destination))
destination_dir = os.path.join(
instance.data["resourcesDir"], "collections"
)
for palette in cmds.ls(type="xgmPalette"):
project_path = xgenm.getAttr("xgProjectPath", palette)
data_path = xgenm.getAttr("xgDataPath", palette)
data_path = data_path.replace("${PROJECT}", project_path)
for path in data_path.split(";"):
for root, _, files in os.walk(path):
for f in files:
source = os.path.join(root, f)
destination = "{}/{}{}".format(
destination_dir,
palette.replace(":", "__ns__"),
source.replace(path, "")
)
transfers.append((source, destination))
for source, destination in transfers:
self.log.debug("Transfer: {} > {}".format(source, destination))
instance.data["transfers"] = transfers
# Set palette attributes in preparation for workfile publish.
attrs = {"xgFileName": None, "xgBaseFile": ""}
data = {}
for palette in cmds.ls(type="xgmPalette"):
attrs["xgFileName"] = "resources/{}.xgen".format(
palette.replace(":", "__ns__")
)
for attr, value in attrs.items():
node_attr = palette + "." + attr
old_value = cmds.getAttr(node_attr)
try:
data[palette][attr] = old_value
except KeyError:
data[palette] = {attr: old_value}
cmds.setAttr(node_attr, value, type="string")
self.log.info(
"Setting \"{}\" on \"{}\"".format(value, node_attr)
)
cmds.setAttr(palette + "." + "xgExportAsDelta", False)
instance.data["xgenAttributes"] = data

View file

@ -0,0 +1,142 @@
import os
import copy
import tempfile
from maya import cmds
import xgenm
from openpype.pipeline import publish
from openpype.hosts.maya.api.lib import (
maintained_selection, attribute_values, write_xgen_file, delete_after
)
from openpype.lib import StringTemplate
class ExtractXgen(publish.Extractor):
"""Extract Xgen
Workflow:
- Duplicate nodes used for patches.
- Export palette and import onto duplicate nodes.
- Export/Publish duplicate nodes and palette.
- Export duplicate palette to .xgen file and add to publish.
- Publish all xgen files as resources.
"""
label = "Extract Xgen"
hosts = ["maya"]
families = ["xgen"]
scene_type = "ma"
def process(self, instance):
if "representations" not in instance.data:
instance.data["representations"] = []
staging_dir = self.staging_dir(instance)
maya_filename = "{}.{}".format(instance.data["name"], self.scene_type)
maya_filepath = os.path.join(staging_dir, maya_filename)
# Get published xgen file name.
template_data = copy.deepcopy(instance.data["anatomyData"])
template_data.update({"ext": "xgen"})
templates = instance.context.data["anatomy"].templates["publish"]
xgen_filename = StringTemplate(templates["file"]).format(template_data)
xgen_path = os.path.join(
self.staging_dir(instance), xgen_filename
).replace("\\", "/")
type = "mayaAscii" if self.scene_type == "ma" else "mayaBinary"
# Duplicate xgen setup.
with delete_after() as delete_bin:
duplicate_nodes = []
# Collect nodes to export.
for _, connections in instance.data["xgenConnections"].items():
transform_name = connections["transform"].split(".")[0]
# Duplicate_transform subd patch geometry.
duplicate_transform = cmds.duplicate(transform_name)[0]
delete_bin.append(duplicate_transform)
# Discard the children.
shapes = cmds.listRelatives(duplicate_transform, shapes=True)
children = cmds.listRelatives(
duplicate_transform, children=True
)
cmds.delete(set(children) - set(shapes))
duplicate_transform = cmds.parent(
duplicate_transform, world=True
)[0]
duplicate_nodes.append(duplicate_transform)
# Export temp xgen palette files.
temp_xgen_path = os.path.join(
tempfile.gettempdir(), "temp.xgen"
).replace("\\", "/")
xgenm.exportPalette(
instance.data["xgmPalette"].replace("|", ""), temp_xgen_path
)
self.log.info("Extracted to {}".format(temp_xgen_path))
# Import xgen onto the duplicate.
with maintained_selection():
cmds.select(duplicate_nodes)
palette = xgenm.importPalette(temp_xgen_path, [])
delete_bin.append(palette)
# Export duplicated palettes.
xgenm.exportPalette(palette, xgen_path)
# Export Maya file.
attribute_data = {"{}.xgFileName".format(palette): xgen_filename}
with attribute_values(attribute_data):
with maintained_selection():
cmds.select(duplicate_nodes + [palette])
cmds.file(
maya_filepath,
force=True,
type=type,
exportSelected=True,
preserveReferences=False,
constructionHistory=True,
shader=True,
constraints=True,
expressions=True
)
self.log.info("Extracted to {}".format(maya_filepath))
if os.path.exists(temp_xgen_path):
os.remove(temp_xgen_path)
data = {
"xgDataPath": os.path.join(
instance.data["resourcesDir"],
"collections",
palette.replace(":", "__ns__")
).replace("\\", "/"),
"xgProjectPath": os.path.dirname(
instance.data["resourcesDir"]
).replace("\\", "/")
}
write_xgen_file(data, xgen_path)
# Adding representations.
representation = {
"name": "xgen",
"ext": "xgen",
"files": xgen_filename,
"stagingDir": staging_dir,
}
instance.data["representations"].append(representation)
representation = {
"name": self.scene_type,
"ext": self.scene_type,
"files": maya_filename,
"stagingDir": staging_dir
}
instance.data["representations"].append(representation)

View file

@ -1,64 +0,0 @@
import os
from maya import cmds
from openpype.pipeline import publish
from openpype.hosts.maya.api.lib import (
suspended_refresh,
maintained_selection
)
class ExtractXgenCache(publish.Extractor):
"""Produce an alembic of just xgen interactive groom
"""
label = "Extract Xgen ABC Cache"
hosts = ["maya"]
families = ["xgen"]
optional = True
def process(self, instance):
# Collect the out set nodes
out_descriptions = [node for node in instance
if cmds.nodeType(node) == "xgmSplineDescription"]
start = 1
end = 1
self.log.info("Extracting Xgen Cache..")
dirname = self.staging_dir(instance)
parent_dir = self.staging_dir(instance)
filename = "{name}.abc".format(**instance.data)
path = os.path.join(parent_dir, filename)
with suspended_refresh():
with maintained_selection():
command = (
'-file '
+ path
+ ' -df "ogawa" -fr '
+ str(start)
+ ' '
+ str(end)
+ ' -step 1 -mxf -wfw'
)
for desc in out_descriptions:
command += (" -obj " + desc)
cmds.xgmSplineCache(export=True, j=command)
if "representations" not in instance.data:
instance.data["representations"] = []
representation = {
'name': 'abc',
'ext': 'abc',
'files': filename,
"stagingDir": dirname,
}
instance.data["representations"].append(representation)
self.log.info("Extracted {} to {}".format(instance, dirname))

View file

@ -0,0 +1,36 @@
from maya import cmds
import pyblish.api
class ResetXgenAttributes(pyblish.api.InstancePlugin):
"""Reset Xgen attributes.
When the incremental save of the workfile triggers, the Xgen attributes
changes so this plugin will change it back to the values before publishing.
"""
label = "Reset Xgen Attributes."
# Offset to run after workfile increment plugin.
order = pyblish.api.IntegratorOrder + 10.0
families = ["workfile"]
def process(self, instance):
xgen_attributes = instance.data.get("xgenAttributes", {})
if not xgen_attributes:
return
for palette, data in xgen_attributes.items():
for attr, value in data.items():
node_attr = "{}.{}".format(palette, attr)
self.log.info(
"Setting \"{}\" on \"{}\"".format(value, node_attr)
)
cmds.setAttr(node_attr, value, type="string")
cmds.setAttr(palette + ".xgExportAsDelta", True)
# Need to save the scene, cause the attribute changes above does not
# mark the scene as modified so user can exit without commiting the
# changes.
self.log.info("Saving changes.")
cmds.file(save=True)

View file

@ -58,23 +58,23 @@ class ValidateAttributes(pyblish.api.ContextPlugin):
# Filter families.
families = [instance.data["family"]]
families += instance.data.get("families", [])
families = list(set(families) & set(self.attributes.keys()))
families = list(set(families) & set(cls.attributes.keys()))
if not families:
continue
# Get all attributes to validate.
attributes = {}
for family in families:
for preset in self.attributes[family]:
for preset in cls.attributes[family]:
[node_name, attribute_name] = preset.split(".")
try:
attributes[node_name].update(
{attribute_name: self.attributes[family][preset]}
{attribute_name: cls.attributes[family][preset]}
)
except KeyError:
attributes.update({
node_name: {
attribute_name: self.attributes[family][preset]
attribute_name: cls.attributes[family][preset]
}
})

View file

@ -19,7 +19,6 @@ class ValidateColorSets(pyblish.api.Validator):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model']
category = 'geometry'
label = 'Mesh ColorSets'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -11,10 +11,6 @@ from openpype.pipeline.publish import (
)
def float_round(num, places=0, direction=ceil):
return direction(num * (10**places)) / float(10**places)
class ValidateMayaUnits(pyblish.api.ContextPlugin):
"""Check if the Maya units are set correct"""
@ -36,6 +32,7 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin):
# Collected units
linearunits = context.data.get('linearUnits')
angularunits = context.data.get('angularUnits')
# TODO(antirotor): This is hack as for framerates having multiple
# decimal places. FTrack is ceiling decimal values on
# fps to two decimal places but Maya 2019+ is reporting those fps
@ -43,7 +40,7 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin):
# rounding, we have to round those numbers coming from Maya.
# NOTE: this must be revisited yet again as it seems that Ftrack is
# now flooring the value?
fps = float_round(context.data.get('fps'), 2, ceil)
fps = mayalib.float_round(context.data.get('fps'), 2, ceil)
# TODO repace query with using 'context.data["assetEntity"]'
asset_doc = get_current_project_asset()

View file

@ -19,7 +19,6 @@ class ValidateMeshArnoldAttributes(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ["maya"]
families = ["model"]
category = "geometry"
label = "Mesh Arnold Attributes"
actions = [
openpype.hosts.maya.api.action.SelectInvalidAction,

View file

@ -48,7 +48,6 @@ class ValidateMeshHasUVs(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model']
category = 'geometry'
label = 'Mesh Has UVs'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
optional = True

View file

@ -15,8 +15,6 @@ class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model']
category = 'geometry'
version = (0, 1, 0)
label = 'Mesh Lamina Faces'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]

View file

@ -19,8 +19,6 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
families = ['model']
hosts = ['maya']
category = 'geometry'
version = (0, 1, 0)
label = 'Mesh Edge Length Non Zero'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
optional = True

View file

@ -20,8 +20,6 @@ class ValidateMeshNormalsUnlocked(pyblish.api.Validator):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model']
category = 'geometry'
version = (0, 1, 0)
label = 'Mesh Normals Unlocked'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -235,7 +235,6 @@ class ValidateMeshHasOverlappingUVs(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model']
category = 'geometry'
label = 'Mesh Has Overlapping UVs'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
optional = True

View file

@ -21,9 +21,7 @@ class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model', 'pointcache']
category = 'uv'
optional = True
version = (0, 1, 0)
label = "Mesh Single UV Set"
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -63,7 +63,6 @@ class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ['maya']
families = ['model']
category = 'geometry'
label = 'Mesh Vertices Have Edges'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -16,7 +16,6 @@ class ValidateNoDefaultCameras(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ['maya']
families = ['camera']
version = (0, 1, 0)
label = "No Default Cameras"
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]

View file

@ -23,8 +23,6 @@ class ValidateNoNamespace(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ['maya']
families = ['model']
category = 'cleanup'
version = (0, 1, 0)
label = 'No Namespaces'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -43,8 +43,6 @@ class ValidateNoNullTransforms(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ['maya']
families = ['model']
category = 'cleanup'
version = (0, 1, 0)
label = 'No Empty/Null Transforms'
actions = [RepairAction,
openpype.hosts.maya.api.action.SelectInvalidAction]

View file

@ -24,7 +24,6 @@ class ValidateRigJointsHidden(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ['maya']
families = ['rig']
version = (0, 1, 0)
label = "Joints Hidden"
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -31,8 +31,6 @@ class ValidateSceneSetWorkspace(pyblish.api.ContextPlugin):
order = ValidatePipelineOrder
hosts = ['maya']
category = 'scene'
version = (0, 1, 0)
label = 'Maya Workspace Set'
def process(self, context):

View file

@ -38,9 +38,7 @@ class ValidateShapeDefaultNames(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ['maya']
families = ['model']
category = 'cleanup'
optional = True
version = (0, 1, 0)
label = "Shape Default Naming"
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
RepairAction]

View file

@ -32,9 +32,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ['maya']
families = ['model']
category = 'cleanup'
optional = True
version = (0, 1, 0)
label = 'Suffix Naming Conventions'
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
SUFFIX_NAMING_TABLE = {"mesh": ["_GEO", "_GES", "_GEP", "_OSD"],

View file

@ -18,8 +18,6 @@ class ValidateTransformZero(pyblish.api.Validator):
order = ValidateContentsOrder
hosts = ["maya"]
families = ["model"]
category = "geometry"
version = (0, 1, 0)
label = "Transform Zero (Freeze)"
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]

View file

@ -13,7 +13,6 @@ class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin):
order = ValidateMeshOrder
hosts = ["maya"]
families = ["staticMesh"]
category = "geometry"
label = "Mesh is Triangulated"
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
active = False

View file

@ -0,0 +1,18 @@
from maya import cmds
import pyblish.api
from openpype.pipeline import PublishValidationError
class ValidateVray(pyblish.api.InstancePlugin):
"""Validate general Vray setup."""
order = pyblish.api.ValidatorOrder
label = 'VRay'
hosts = ["maya"]
families = ["vrayproxy"]
def process(self, instance):
# Validate vray plugin is loaded.
if not cmds.pluginInfo("vrayformaya", query=True, loaded=True):
raise PublishValidationError("Vray plugin is not loaded.")

View file

@ -0,0 +1,59 @@
import json
import maya.cmds as cmds
import xgenm
import pyblish.api
from openpype.pipeline.publish import PublishValidationError
class ValidateXgen(pyblish.api.InstancePlugin):
"""Validate Xgen data."""
label = "Validate Xgen"
order = pyblish.api.ValidatorOrder
host = ["maya"]
families = ["xgen"]
def process(self, instance):
set_members = instance.data.get("setMembers")
# Only 1 collection/node per instance.
if len(set_members) != 1:
raise PublishValidationError(
"Only one collection per instance is allowed."
" Found:\n{}".format(set_members)
)
# Only xgen palette node is allowed.
node_type = cmds.nodeType(set_members[0])
if node_type != "xgmPalette":
raise PublishValidationError(
"Only node of type \"xgmPalette\" are allowed. Referred to as"
" \"collection\" in the Maya UI."
" Node type found: {}".format(node_type)
)
# Cant have inactive modifiers in collection cause Xgen will try and
# look for them when loading.
palette = instance.data["xgmPalette"].replace("|", "")
inactive_modifiers = {}
for description in instance.data["xgmDescriptions"]:
description = description.split("|")[-2]
modifier_names = xgenm.fxModules(palette, description)
for name in modifier_names:
attr = xgenm.getAttr("active", palette, description, name)
# Attribute value are lowercase strings of false/true.
if attr == "false":
try:
inactive_modifiers[description].append(name)
except KeyError:
inactive_modifiers[description] = [name]
if inactive_modifiers:
raise PublishValidationError(
"There are inactive modifiers on the collection. "
"Please delete these:\n{}".format(
json.dumps(inactive_modifiers, indent=4, sort_keys=True)
)
)

View file

@ -1,16 +1,33 @@
import os
from functools import partial
from openpype.settings import get_project_settings
from openpype.pipeline import install_host
from openpype.hosts.maya.api import MayaHost
from maya import cmds
host = MayaHost()
install_host(host)
print("starting OpenPype usersetup")
print("Starting OpenPype usersetup...")
# build a shelf
# Open Workfile Post Initialization.
key = "OPENPYPE_OPEN_WORKFILE_POST_INITIALIZATION"
if bool(int(os.environ.get(key, "0"))):
cmds.evalDeferred(
partial(
cmds.file,
os.environ["AVALON_LAST_WORKFILE"],
open=True,
force=True
),
lowestPriority=True
)
# Build a shelf.
settings = get_project_settings(os.environ['AVALON_PROJECT'])
shelf_preset = settings['maya'].get('project_shelf')
@ -26,7 +43,10 @@ if shelf_preset:
print(import_string)
exec(import_string)
cmds.evalDeferred("mlib.shelf(name=shelf_preset['name'], iconPath=icon_path, preset=shelf_preset)")
cmds.evalDeferred(
"mlib.shelf(name=shelf_preset['name'], iconPath=icon_path,"
" preset=shelf_preset)"
)
print("finished OpenPype usersetup")
print("Finished OpenPype usersetup.")

View file

@ -53,12 +53,18 @@ class GizmoMenu():
item_type = item.get("sourcetype")
if item_type == ("python" or "file"):
if item_type == "python":
parent.addCommand(
item["title"],
command=str(item["command"]),
icon=item.get("icon"),
shortcut=item.get("hotkey")
shortcut=item.get("shortcut")
)
elif item_type == "file":
parent.addCommand(
item['title'],
"nuke.createNode('{}')".format(item.get('file_name')),
shortcut=item.get('shortcut')
)
# add separator

View file

@ -1,7 +1,7 @@
import os
import nuke
import pyblish.api
import openpype.api as api
from openpype.lib import get_version_from_path
import openpype.hosts.nuke.api as napi
from openpype.pipeline import KnownPublishError
@ -57,7 +57,7 @@ class CollectContextData(pyblish.api.ContextPlugin):
"fps": root_node['fps'].value(),
"currentFile": current_file,
"version": int(api.get_version_from_path(current_file)),
"version": int(get_version_from_path(current_file)),
"host": pyblish.api.current_host(),
"hostVersion": nuke.NUKE_VERSION_STRING

View file

@ -5,7 +5,7 @@ from openpype.lib import BoolDef
from openpype.pipeline import (
Creator,
CreatedInstance,
legacy_io
CreatorError
)
from openpype.lib import prepare_template_data
from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS
@ -13,27 +13,16 @@ from openpype.hosts.photoshop.api.pipeline import cache_and_get_instances
class ImageCreator(Creator):
"""Creates image instance for publishing."""
"""Creates image instance for publishing.
Result of 'image' instance is image of all visible layers, or image(s) of
selected layers.
"""
identifier = "image"
label = "Image"
family = "image"
description = "Image creator"
def collect_instances(self):
for instance_data in cache_and_get_instances(self):
# legacy instances have family=='image'
creator_id = (instance_data.get("creator_identifier") or
instance_data.get("family"))
if creator_id == self.identifier:
instance_data = self._handle_legacy(instance_data)
layer = api.stub().get_layer(instance_data["members"][0])
instance_data["layer"] = layer
instance = CreatedInstance.from_existing(
instance_data, self
)
self._add_instance_to_context(instance)
def create(self, subset_name_from_ui, data, pre_create_data):
groups_to_create = []
top_layers_to_wrap = []
@ -59,9 +48,10 @@ class ImageCreator(Creator):
try:
group = stub.group_selected_layers(subset_name_from_ui)
except:
raise ValueError("Cannot group locked Bakcground layer!")
raise CreatorError("Cannot group locked Background layer!")
groups_to_create.append(group)
# create empty group if nothing selected
if not groups_to_create and not top_layers_to_wrap:
group = stub.create_group(subset_name_from_ui)
groups_to_create.append(group)
@ -73,13 +63,16 @@ class ImageCreator(Creator):
groups_to_create.append(group)
layer_name = ''
creating_multiple_groups = len(groups_to_create) > 1
# use artist chosen option OR force layer if more subsets are created
# to differentiate them
use_layer_name = (pre_create_data.get("use_layer_name") or
len(groups_to_create) > 1)
for group in groups_to_create:
subset_name = subset_name_from_ui # reset to name from creator UI
layer_names_in_hierarchy = []
created_group_name = self._clean_highlights(stub, group.name)
if creating_multiple_groups:
if use_layer_name:
layer_name = re.sub(
"[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS),
"",
@ -112,6 +105,21 @@ class ImageCreator(Creator):
stub.rename_layer(group.id,
stub.PUBLISH_ICON + created_group_name)
def collect_instances(self):
for instance_data in cache_and_get_instances(self):
# legacy instances have family=='image'
creator_id = (instance_data.get("creator_identifier") or
instance_data.get("family"))
if creator_id == self.identifier:
instance_data = self._handle_legacy(instance_data)
layer = api.stub().get_layer(instance_data["members"][0])
instance_data["layer"] = layer
instance = CreatedInstance.from_existing(
instance_data, self
)
self._add_instance_to_context(instance)
def update_instances(self, update_list):
self.log.debug("update_list:: {}".format(update_list))
for created_inst, _changes in update_list:
@ -137,12 +145,42 @@ class ImageCreator(Creator):
label="Create only for selected"),
BoolDef("create_multiple",
default=True,
label="Create separate instance for each selected")
label="Create separate instance for each selected"),
BoolDef("use_layer_name",
default=False,
label="Use layer name in subset")
]
return output
def get_detail_description(self):
return """Creator for Image instances"""
return """Creator for Image instances
Main publishable item in Photoshop will be of `image` family. Result of
this item (instance) is picture that could be loaded and used
in another DCCs (for example as single layer in composition in
AfterEffects, reference in Maya etc).
There are couple of options what to publish:
- separate image per selected layer (or group of layers)
- one image for all selected layers
- all visible layers (groups) flattened into single image
In most cases you would like to keep `Create only for selected`
toggled on and select what you would like to publish.
Toggling this option off will allow you to create instance for all
visible layers without a need to select them explicitly.
Use 'Create separate instance for each selected' to create separate
images per selected layer (group of layers).
'Use layer name in subset' will explicitly add layer name into subset
name. Position of this name is configurable in
`project_settings/global/tools/creator/subset_name_profiles`.
If layer placeholder ({layer}) is not used in `subset_name_profiles`
but layer name should be used (set explicitly in UI or implicitly if
multiple images should be created), it is added in capitalized form
as a suffix to subset name.
"""
def _handle_legacy(self, instance_data):
"""Converts old instances to new format."""

View file

@ -37,7 +37,7 @@ class TrayPublisherHost(HostBase, IPublishHost):
return HostContext.get_context_data()
def update_context_data(self, data, changes):
HostContext.save_context_data(data, changes)
HostContext.save_context_data(data)
def set_project_name(self, project_name):
# TODO Deregister project specific plugins and register new project

View file

@ -33,6 +33,8 @@ class BatchMovieCreator(TrayPublishCreator):
create_allow_context_change = False
version_regex = re.compile(r"^(.+)_v([0-9]+)$")
# Position batch creator after simple creators
order = 110
def __init__(self, project_settings, *args, **kwargs):
super(BatchMovieCreator, self).__init__(project_settings,

View file

@ -30,7 +30,7 @@ from .vendor_bin_utils import (
)
from .attribute_definitions import (
AbtractAttrDef,
AbstractAttrDef,
UIDef,
UISeparatorDef,
@ -246,7 +246,7 @@ __all__ = [
"get_ffmpeg_tool_path",
"is_oiio_supported",
"AbtractAttrDef",
"AbstractAttrDef",
"UIDef",
"UISeparatorDef",

View file

@ -20,7 +20,7 @@ def register_attr_def_class(cls):
Currently are registered definitions used to deserialize data to objects.
Attrs:
cls (AbtractAttrDef): Non-abstract class to be registered with unique
cls (AbstractAttrDef): Non-abstract class to be registered with unique
'type' attribute.
Raises:
@ -36,7 +36,7 @@ def get_attributes_keys(attribute_definitions):
"""Collect keys from list of attribute definitions.
Args:
attribute_definitions (List[AbtractAttrDef]): Objects of attribute
attribute_definitions (List[AbstractAttrDef]): Objects of attribute
definitions.
Returns:
@ -57,8 +57,8 @@ def get_default_values(attribute_definitions):
"""Receive default values for attribute definitions.
Args:
attribute_definitions (List[AbtractAttrDef]): Attribute definitions for
which default values should be collected.
attribute_definitions (List[AbstractAttrDef]): Attribute definitions
for which default values should be collected.
Returns:
Dict[str, Any]: Default values for passet attribute definitions.
@ -76,15 +76,15 @@ def get_default_values(attribute_definitions):
class AbstractAttrDefMeta(ABCMeta):
"""Meta class to validate existence of 'key' attribute.
"""Metaclass to validate existence of 'key' attribute.
Each object of `AbtractAttrDef` mus have defined 'key' attribute.
Each object of `AbstractAttrDef` mus have defined 'key' attribute.
"""
def __call__(self, *args, **kwargs):
obj = super(AbstractAttrDefMeta, self).__call__(*args, **kwargs)
init_class = getattr(obj, "__init__class__", None)
if init_class is not AbtractAttrDef:
if init_class is not AbstractAttrDef:
raise TypeError("{} super was not called in __init__.".format(
type(obj)
))
@ -92,7 +92,7 @@ class AbstractAttrDefMeta(ABCMeta):
@six.add_metaclass(AbstractAttrDefMeta)
class AbtractAttrDef(object):
class AbstractAttrDef(object):
"""Abstraction of attribute definiton.
Each attribute definition must have implemented validation and
@ -145,7 +145,7 @@ class AbtractAttrDef(object):
self.disabled = disabled
self._id = uuid.uuid4().hex
self.__init__class__ = AbtractAttrDef
self.__init__class__ = AbstractAttrDef
@property
def id(self):
@ -154,7 +154,15 @@ class AbtractAttrDef(object):
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return self.key == other.key
return (
self.key == other.key
and self.hidden == other.hidden
and self.default == other.default
and self.disabled == other.disabled
)
def __ne__(self, other):
return not self.__eq__(other)
@abstractproperty
def type(self):
@ -212,7 +220,7 @@ class AbtractAttrDef(object):
# UI attribute definitoins won't hold value
# -----------------------------------------
class UIDef(AbtractAttrDef):
class UIDef(AbstractAttrDef):
is_value_def = False
def __init__(self, key=None, default=None, *args, **kwargs):
@ -237,7 +245,7 @@ class UILabelDef(UIDef):
# Attribute defintioins should hold value
# ---------------------------------------
class UnknownDef(AbtractAttrDef):
class UnknownDef(AbstractAttrDef):
"""Definition is not known because definition is not available.
This attribute can be used to keep existing data unchanged but does not
@ -254,7 +262,7 @@ class UnknownDef(AbtractAttrDef):
return value
class HiddenDef(AbtractAttrDef):
class HiddenDef(AbstractAttrDef):
"""Hidden value of Any type.
This attribute can be used for UI purposes to pass values related
@ -274,7 +282,7 @@ class HiddenDef(AbtractAttrDef):
return value
class NumberDef(AbtractAttrDef):
class NumberDef(AbstractAttrDef):
"""Number definition.
Number can have defined minimum/maximum value and decimal points. Value
@ -350,7 +358,7 @@ class NumberDef(AbtractAttrDef):
return round(float(value), self.decimals)
class TextDef(AbtractAttrDef):
class TextDef(AbstractAttrDef):
"""Text definition.
Text can have multiline option so endline characters are allowed regex
@ -415,7 +423,7 @@ class TextDef(AbtractAttrDef):
return data
class EnumDef(AbtractAttrDef):
class EnumDef(AbstractAttrDef):
"""Enumeration of single item from items.
Args:
@ -457,7 +465,7 @@ class EnumDef(AbtractAttrDef):
return self.default
def serialize(self):
data = super(TextDef, self).serialize()
data = super(EnumDef, self).serialize()
data["items"] = copy.deepcopy(self.items)
return data
@ -523,7 +531,8 @@ class EnumDef(AbtractAttrDef):
return output
class BoolDef(AbtractAttrDef):
class BoolDef(AbstractAttrDef):
"""Boolean representation.
Args:
@ -768,7 +777,7 @@ class FileDefItem(object):
return output
class FileDef(AbtractAttrDef):
class FileDef(AbstractAttrDef):
"""File definition.
It is possible to define filters of allowed file extensions and if supports
folders.
@ -886,7 +895,7 @@ def serialize_attr_def(attr_def):
"""Serialize attribute definition to data.
Args:
attr_def (AbtractAttrDef): Attribute definition to serialize.
attr_def (AbstractAttrDef): Attribute definition to serialize.
Returns:
Dict[str, Any]: Serialized data.
@ -899,7 +908,7 @@ def serialize_attr_defs(attr_defs):
"""Serialize attribute definitions to data.
Args:
attr_defs (List[AbtractAttrDef]): Attribute definitions to serialize.
attr_defs (List[AbstractAttrDef]): Attribute definitions to serialize.
Returns:
List[Dict[str, Any]]: Serialized data.

View file

@ -64,6 +64,7 @@ class MayaPluginInfo(object):
# Include all lights flag
RenderSetupIncludeLights = attr.ib(
default="1", validator=_validate_deadline_bool_value)
StrictErrorChecking = attr.ib(default=True)
@attr.s
@ -219,6 +220,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
"renderSetupIncludeLights", default_rs_include_lights)
if rs_include_lights not in {"1", "0", True, False}:
rs_include_lights = default_rs_include_lights
strict_error_checking = instance.data.get("strict_error_checking",
True)
plugin_info = MayaPluginInfo(
SceneFile=self.scene_path,
Version=cmds.about(version=True),
@ -227,6 +230,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
RenderSetupIncludeLights=rs_include_lights, # noqa
ProjectPath=context.data["workspaceDir"],
UsingRenderLayers=True,
StrictErrorChecking=strict_error_checking
)
plugin_payload = attr.asdict(plugin_info)

View file

@ -35,7 +35,7 @@ class OpenPypeVersion:
self.prerelease = prerelease
is_valid = True
if not major or not minor or not patch:
if major is None or minor is None or patch is None:
is_valid = False
self.is_valid = is_valid
@ -157,7 +157,7 @@ def get_openpype_version_from_path(path, build=True):
# fix path for application bundle on macos
if platform.system().lower() == "darwin":
path = os.path.join(path, "Contents", "MacOS", "lib", "Python")
path = os.path.join(path, "MacOS")
version_file = os.path.join(path, "openpype", "version.py")
if not os.path.isfile(version_file):
@ -189,6 +189,11 @@ def get_openpype_executable():
exe_list = config.GetConfigEntryWithDefault("OpenPypeExecutable", "")
dir_list = config.GetConfigEntryWithDefault(
"OpenPypeInstallationDirs", "")
# clean '\ ' for MacOS pasting
if platform.system().lower() == "darwin":
exe_list = exe_list.replace("\\ ", " ")
dir_list = dir_list.replace("\\ ", " ")
return exe_list, dir_list
@ -218,8 +223,8 @@ def get_requested_openpype_executable(
requested_version_obj = OpenPypeVersion.from_string(requested_version)
if not requested_version_obj:
print((
">>> Requested version does not match version regex \"{}\""
).format(VERSION_REGEX))
">>> Requested version '{}' does not match version regex '{}'"
).format(requested_version, VERSION_REGEX))
return None
print((
@ -272,7 +277,8 @@ def get_requested_openpype_executable(
# Deadline decide.
exe_list = [
os.path.join(version_dir, "openpype_console.exe"),
os.path.join(version_dir, "openpype_console")
os.path.join(version_dir, "openpype_console"),
os.path.join(version_dir, "MacOS", "openpype_console")
]
return FileUtils.SearchFileList(";".join(exe_list))

View file

@ -73,7 +73,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin):
"""
# fix path for application bundle on macos
if platform.system().lower() == "darwin":
path = os.path.join(path, "Contents", "MacOS", "lib", "Python")
path = os.path.join(path, "MacOS")
version_file = os.path.join(path, "openpype", "version.py")
if not os.path.isfile(version_file):
@ -107,8 +107,11 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin):
"Scanning for compatible requested "
f"version {requested_version}"))
dir_list = self.GetConfigEntry("OpenPypeInstallationDirs")
# clean '\ ' for MacOS pasting
if platform.system().lower() == "darwin":
dir_list = dir_list.replace("\\ ", " ")
install_dir = DirectoryUtils.SearchDirectoryList(dir_list)
if dir:
if install_dir:
sub_dirs = [
f.path for f in os.scandir(install_dir)
if f.is_dir()
@ -120,6 +123,9 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin):
openpype_versions.append((version, subdir))
exe_list = self.GetConfigEntry("OpenPypeExecutable")
# clean '\ ' for MacOS pasting
if platform.system().lower() == "darwin":
exe_list = exe_list.replace("\\ ", " ")
exe = FileUtils.SearchFileList(exe_list)
if openpype_versions:
# if looking for requested compatible version,
@ -161,7 +167,9 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin):
os.path.join(
compatible_versions[-1][1], "openpype_console.exe"),
os.path.join(
compatible_versions[-1][1], "openpype_console")
compatible_versions[-1][1], "openpype_console"),
os.path.join(
compatible_versions[-1][1], "MacOS", "openpype_console")
]
exe = FileUtils.SearchFileList(";".join(exe_list))

View file

@ -64,6 +64,16 @@ class FtrackModule(
self._timers_manager_module = None
def get_ftrack_url(self):
"""Resolved ftrack url.
Resolving is trying to fill missing information in url and tried to
connect to the server.
Returns:
Union[str, None]: Final variant of url or None if url could not be
reached.
"""
if self._ftrack_url is _URL_NOT_SET:
self._ftrack_url = resolve_ftrack_url(
self._settings_ftrack_url,
@ -73,8 +83,19 @@ class FtrackModule(
ftrack_url = property(get_ftrack_url)
@property
def settings_ftrack_url(self):
"""Ftrack url from settings in a format as it is.
Returns:
str: Ftrack url from settings.
"""
return self._settings_ftrack_url
def get_global_environments(self):
"""Ftrack's global environments."""
return {
"FTRACK_SERVER": self.ftrack_url
}
@ -510,7 +531,10 @@ def resolve_ftrack_url(url, logger=None):
url = "https://" + url
ftrack_url = None
if not url.endswith("ftrackapp.com"):
if url and _check_ftrack_url(url):
ftrack_url = url
if not ftrack_url and not url.endswith("ftrackapp.com"):
ftrackapp_url = url + ".ftrackapp.com"
if _check_ftrack_url(ftrackapp_url):
ftrack_url = ftrackapp_url

View file

@ -316,7 +316,7 @@ def main_loop(ftrack_url):
statuser_failed_count = 0
# If thread failed test Ftrack and Mongo connection
elif not statuser_thread.isAlive():
elif not statuser_thread.is_alive():
statuser_thread.join()
statuser_thread = None
ftrack_accessible = False
@ -359,7 +359,7 @@ def main_loop(ftrack_url):
storer_failed_count = 0
# If thread failed test Ftrack and Mongo connection
elif not storer_thread.isAlive():
elif not storer_thread.is_alive():
if storer_thread.mongo_error:
raise MongoPermissionsError()
storer_thread.join()
@ -396,7 +396,7 @@ def main_loop(ftrack_url):
processor_failed_count = 0
# If thread failed test Ftrack and Mongo connection
elif not processor_thread.isAlive():
elif not processor_thread.is_alive():
if processor_thread.mongo_error:
raise Exception(
"Exiting because have issue with acces to MongoDB"

View file

@ -139,8 +139,7 @@ class CredentialsDialog(QtWidgets.QDialog):
self.fill_ftrack_url()
def fill_ftrack_url(self):
url = os.getenv("FTRACK_SERVER")
checked_url = self.check_url(url)
checked_url = self.check_url()
if checked_url == self.ftsite_input.text():
return
@ -154,7 +153,7 @@ class CredentialsDialog(QtWidgets.QDialog):
self.api_input.setEnabled(enabled)
self.user_input.setEnabled(enabled)
if not url:
if not checked_url:
self.btn_advanced.hide()
self.btn_simple.hide()
self.btn_ftrack_login.hide()
@ -254,13 +253,13 @@ class CredentialsDialog(QtWidgets.QDialog):
)
def _on_ftrack_login_clicked(self):
url = self.check_url(self.ftsite_input.text())
url = self.check_url()
if not url:
return
# If there is an existing server thread running we need to stop it.
if self._login_server_thread:
if self._login_server_thread.isAlive():
if self._login_server_thread.is_alive():
self._login_server_thread.stop()
self._login_server_thread.join()
self._login_server_thread = None
@ -302,21 +301,21 @@ class CredentialsDialog(QtWidgets.QDialog):
if is_logged is not None:
self.set_is_logged(is_logged)
def check_url(self, url):
if url is not None:
url = url.strip("/ ")
if not url:
def check_url(self):
settings_url = self._module.settings_ftrack_url
url = self._module.ftrack_url
if not settings_url:
self.set_error(
"Ftrack URL is not defined in settings!"
)
return
if "http" not in url:
if url.endswith("ftrackapp.com"):
url = "https://" + url
else:
url = "https://{}.ftrackapp.com".format(url)
if url is None:
self.set_error(
"Specified URL does not lead to a valid Ftrack server."
)
return
try:
result = requests.get(
url,

View file

@ -19,6 +19,8 @@ oauth_config:
- chat:write.public
- files:write
- channels:read
- users:read
- usergroups:read
settings:
org_deploy_enabled: false
socket_mode_enabled: false

View file

@ -86,6 +86,12 @@ from .context_tools import (
registered_host,
deregister_host,
get_process_id,
get_current_context,
get_current_host_name,
get_current_project_name,
get_current_asset_name,
get_current_task_name,
)
install = install_host
uninstall = uninstall_host
@ -176,6 +182,13 @@ __all__ = (
"register_host",
"registered_host",
"deregister_host",
"get_process_id",
"get_current_context",
"get_current_host_name",
"get_current_project_name",
"get_current_asset_name",
"get_current_task_name",
# Backwards compatible function names
"install",

View file

@ -60,6 +60,7 @@ class BaseAnatomy(object):
def __init__(self, project_doc, local_settings, site_name):
project_name = project_doc["name"]
self.project_name = project_name
self.project_code = project_doc["data"]["code"]
if (site_name and
site_name not in ["studio", "local", get_local_site_id()]):

View file

@ -438,13 +438,14 @@ def get_imageio_file_rules(project_name, host_name, project_settings=None):
# get file rules from global and host_name
frules_global = imageio_global["file_rules"]
frules_host = imageio_host["file_rules"]
# host is optional, some might not have any settings
frules_host = imageio_host.get("file_rules", {})
# compile file rules dictionary
file_rules = {}
if frules_global["enabled"]:
file_rules.update(frules_global["rules"])
if frules_host["enabled"]:
if frules_host and frules_host["enabled"]:
file_rules.update(frules_host["rules"])
return file_rules
@ -455,7 +456,7 @@ def _get_imageio_settings(project_settings, host_name):
Args:
project_settings (dict): project settings.
Defaults to None.
Defaults to None.
host_name (str): host name
Returns:
@ -463,6 +464,7 @@ def _get_imageio_settings(project_settings, host_name):
"""
# get image io from global and host_name
imageio_global = project_settings["global"]["imageio"]
imageio_host = project_settings[host_name]["imageio"]
# host is optional, some might not have any settings
imageio_host = project_settings.get(host_name, {}).get("imageio", {})
return imageio_global, imageio_host

View file

@ -11,6 +11,7 @@ import pyblish.api
from pyblish.lib import MessageHandler
import openpype
from openpype.host import HostBase
from openpype.client import (
get_project,
get_asset_by_id,
@ -306,6 +307,58 @@ def debug_host():
return host
def get_current_host_name():
"""Current host name.
Function is based on currently registered host integration or environment
variant 'AVALON_APP'.
Returns:
Union[str, None]: Name of host integration in current process or None.
"""
host = registered_host()
if isinstance(host, HostBase):
return host.name
return os.environ.get("AVALON_APP")
def get_global_context():
return {
"project_name": os.environ.get("AVALON_PROJECT"),
"asset_name": os.environ.get("AVALON_ASSET"),
"task_name": os.environ.get("AVALON_TASK"),
}
def get_current_context():
host = registered_host()
if isinstance(host, HostBase):
return host.get_current_context()
return get_global_context()
def get_current_project_name():
host = registered_host()
if isinstance(host, HostBase):
return host.get_current_project_name()
return get_global_context()["project_name"]
def get_current_asset_name():
host = registered_host()
if isinstance(host, HostBase):
return host.get_current_asset_name()
return get_global_context()["asset_name"]
def get_current_task_name():
host = registered_host()
if isinstance(host, HostBase):
return host.get_current_task_name()
return get_global_context()["task_name"]
def get_current_project(fields=None):
"""Helper function to get project document based on global Session.
@ -316,7 +369,7 @@ def get_current_project(fields=None):
None: Project is not set.
"""
project_name = legacy_io.active_project()
project_name = get_current_project_name()
return get_project(project_name, fields=fields)
@ -341,12 +394,12 @@ def get_current_project_asset(asset_name=None, asset_id=None, fields=None):
None: Asset is not set or not exist.
"""
project_name = legacy_io.active_project()
project_name = get_current_project_name()
if asset_id:
return get_asset_by_id(project_name, asset_id, fields=fields)
if not asset_name:
asset_name = legacy_io.Session.get("AVALON_ASSET")
asset_name = get_current_asset_name()
# Skip if is not set even on context
if not asset_name:
return None
@ -363,7 +416,7 @@ def is_representation_from_latest(representation):
bool: Whether the representation is of latest version.
"""
project_name = legacy_io.active_project()
project_name = get_current_project_name()
return version_is_latest(project_name, representation["parent"])

File diff suppressed because it is too large Load diff

View file

@ -107,7 +107,11 @@ class SubsetConvertorPlugin(object):
@property
def create_context(self):
"""Quick access to create context."""
"""Quick access to create context.
Returns:
CreateContext: Context which initialized the plugin.
"""
return self._create_context
@ -157,6 +161,10 @@ class BaseCreator:
# Cached group label after first call 'get_group_label'
_cached_group_label = None
# Order in which will be plugin executed (collect & update instances)
# less == earlier -> Order '90' will be processed before '100'
order = 100
# Variable to store logger
_log = None
@ -425,8 +433,8 @@ class BaseCreator:
keys/values when plugin attributes change.
Returns:
List[AbtractAttrDef]: Attribute definitions that can be tweaked for
created instance.
List[AbstractAttrDef]: Attribute definitions that can be tweaked
for created instance.
"""
return self.instance_attr_defs
@ -489,6 +497,17 @@ class Creator(BaseCreator):
# - similar to instance attribute definitions
pre_create_attr_defs = []
@property
def show_order(self):
"""Order in which is creator shown in UI.
Returns:
int: Order in which is creator shown (less == earlier). By default
is using Creator's 'order' or processing.
"""
return self.order
@abstractmethod
def create(self, subset_name, instance_data, pre_create_data):
"""Create new instance and store it.
@ -563,8 +582,8 @@ class Creator(BaseCreator):
updating keys/values when plugin attributes change.
Returns:
List[AbtractAttrDef]: Attribute definitions that can be tweaked for
created instance.
List[AbstractAttrDef]: Attribute definitions that can be tweaked
for created instance.
"""
return self.pre_create_attr_defs

View file

@ -2,7 +2,10 @@ import os
import logging
from openpype.settings import get_system_settings, get_project_settings
from openpype.pipeline import legacy_io
from openpype.pipeline import (
schema,
legacy_io,
)
from openpype.pipeline.plugin_discover import (
discover,
register_plugin,
@ -79,6 +82,45 @@ class LoaderPlugin(list):
print(" - setting `{}`: `{}`".format(option, value))
setattr(cls, option, value)
@classmethod
def is_compatible_loader(cls, context):
"""Return whether a loader is compatible with a context.
This checks the version's families and the representation for the given
Loader.
Returns:
bool
"""
plugin_repre_names = cls.get_representations()
plugin_families = cls.families
if not plugin_repre_names or not plugin_families:
return False
repre_doc = context.get("representation")
if not repre_doc:
return False
plugin_repre_names = set(plugin_repre_names)
if (
"*" not in plugin_repre_names
and repre_doc["name"] not in plugin_repre_names
):
return False
maj_version, _ = schema.get_schema_version(context["subset"]["schema"])
if maj_version < 3:
families = context["version"]["data"].get("families", [])
else:
families = context["subset"]["data"]["families"]
plugin_families = set(plugin_families)
return (
"*" in plugin_families
or any(family in plugin_families for family in families)
)
@classmethod
def get_representations(cls):
return cls.representations

View file

@ -748,25 +748,9 @@ def is_compatible_loader(Loader, context):
Returns:
bool
"""
maj_version, _ = schema.get_schema_version(context["subset"]["schema"])
if maj_version < 3:
families = context["version"]["data"].get("families", [])
else:
families = context["subset"]["data"]["families"]
representation = context["representation"]
has_family = (
"*" in Loader.families or any(
family in Loader.families for family in families
)
)
representations = Loader.get_representations()
has_representation = (
"*" in representations or representation["name"] in representations
)
return has_family and has_representation
return Loader.is_compatible_loader(context)
def loaders_from_repre_context(loaders, repre_context):

View file

@ -118,7 +118,7 @@ class OpenPypePyblishPluginMixin:
Attributes available for all families in plugin's `families` attribute.
Returns:
list<AbtractAttrDef>: Attribute definitions for plugin.
list<AbstractAttrDef>: Attribute definitions for plugin.
"""
return []

View file

@ -842,7 +842,8 @@ class PlaceholderPlugin(object):
"""Placeholder options for data showed.
Returns:
List[AbtractAttrDef]: Attribute definitions of placeholder options.
List[AbstractAttrDef]: Attribute definitions of
placeholder options.
"""
return []
@ -1143,7 +1144,7 @@ class PlaceholderLoadMixin(object):
as defaults for attributes.
Returns:
List[AbtractAttrDef]: Attribute definitions common for load
List[AbstractAttrDef]: Attribute definitions common for load
plugins.
"""
@ -1513,7 +1514,7 @@ class PlaceholderCreateMixin(object):
as defaults for attributes.
Returns:
List[AbtractAttrDef]: Attribute definitions common for create
List[AbstractAttrDef]: Attribute definitions common for create
plugins.
"""

View file

@ -61,7 +61,8 @@ class CollectResourcesPath(pyblish.api.InstancePlugin):
"background",
"effect",
"staticMesh",
"skeletalMesh"
"skeletalMesh",
"xgen"
]
def process(self, instance):

Some files were not shown because too many files have changed in this diff Show more