mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-02 00:44:52 +01:00
resolve conflict
This commit is contained in:
commit
7d1744dd93
192 changed files with 5883 additions and 2827 deletions
19
.github/workflows/automate-projects.yml
vendored
19
.github/workflows/automate-projects.yml
vendored
|
|
@ -1,19 +0,0 @@
|
|||
name: Automate Projects
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened, labeled]
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
jobs:
|
||||
assign_one_project:
|
||||
runs-on: ubuntu-latest
|
||||
name: Assign to One Project
|
||||
steps:
|
||||
- name: Assign NEW bugs to triage
|
||||
uses: srggrs/assign-one-project-github-action@1.2.0
|
||||
if: contains(github.event.issue.labels.*.name, 'bug')
|
||||
with:
|
||||
project: 'https://github.com/pypeclub/pype/projects/2'
|
||||
column_name: 'Needs triage'
|
||||
6
.github/workflows/milestone_assign.yml
vendored
6
.github/workflows/milestone_assign.yml
vendored
|
|
@ -13,7 +13,7 @@ jobs:
|
|||
if: github.event.pull_request.milestone == null
|
||||
uses: zoispag/action-assign-milestone@v1
|
||||
with:
|
||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
repo-token: "${{ secrets.YNPUT_BOT_TOKEN }}"
|
||||
milestone: 'next-minor'
|
||||
|
||||
run_if_develop:
|
||||
|
|
@ -24,5 +24,5 @@ jobs:
|
|||
if: github.event.pull_request.milestone == null
|
||||
uses: zoispag/action-assign-milestone@v1
|
||||
with:
|
||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
milestone: 'next-patch'
|
||||
repo-token: "${{ secrets.YNPUT_BOT_TOKEN }}"
|
||||
milestone: 'next-patch'
|
||||
|
|
|
|||
8
.github/workflows/milestone_create.yml
vendored
8
.github/workflows/milestone_create.yml
vendored
|
|
@ -12,7 +12,7 @@ jobs:
|
|||
uses: "WyriHaximus/github-action-get-milestones@master"
|
||||
id: milestones
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
|
||||
|
||||
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
|
||||
id: querymilestone
|
||||
|
|
@ -31,7 +31,7 @@ jobs:
|
|||
with:
|
||||
title: 'next-patch'
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
|
||||
|
||||
generate-next-minor:
|
||||
runs-on: ubuntu-latest
|
||||
|
|
@ -40,7 +40,7 @@ jobs:
|
|||
uses: "WyriHaximus/github-action-get-milestones@master"
|
||||
id: milestones
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
|
||||
|
||||
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
|
||||
id: querymilestone
|
||||
|
|
@ -59,4 +59,4 @@ jobs:
|
|||
with:
|
||||
title: 'next-minor'
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
|
||||
|
|
|
|||
47
.github/workflows/miletone_release_trigger.yml
vendored
Normal file
47
.github/workflows/miletone_release_trigger.yml
vendored
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
name: Milestone Release [trigger]
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
milestone:
|
||||
required: true
|
||||
release-type:
|
||||
type: choice
|
||||
description: What release should be created
|
||||
options:
|
||||
- release
|
||||
- pre-release
|
||||
milestone:
|
||||
types: closed
|
||||
|
||||
|
||||
jobs:
|
||||
milestone-title:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
milestone: ${{ steps.milestoneTitle.outputs.value }}
|
||||
steps:
|
||||
- name: Switch input milestone
|
||||
uses: haya14busa/action-cond@v1
|
||||
id: milestoneTitle
|
||||
with:
|
||||
cond: ${{ inputs.milestone == '' }}
|
||||
if_true: ${{ github.event.milestone.title }}
|
||||
if_false: ${{ inputs.milestone }}
|
||||
- name: Print resulted milestone
|
||||
run: |
|
||||
echo "${{ steps.milestoneTitle.outputs.value }}"
|
||||
|
||||
call-ci-tools-milestone-release:
|
||||
needs: milestone-title
|
||||
uses: ynput/ci-tools/.github/workflows/milestone_release_ref.yml@main
|
||||
with:
|
||||
milestone: ${{ needs.milestone-title.outputs.milestone }}
|
||||
repo-owner: ${{ github.event.repository.owner.login }}
|
||||
repo-name: ${{ github.event.repository.name }}
|
||||
version-py-path: "./openpype/version.py"
|
||||
pyproject-path: "./pyproject.toml"
|
||||
secrets:
|
||||
token: ${{ secrets.YNPUT_BOT_TOKEN }}
|
||||
user_email: ${{ secrets.CI_EMAIL }}
|
||||
user_name: ${{ secrets.CI_USER }}
|
||||
29
.github/workflows/nightly_merge.yml
vendored
29
.github/workflows/nightly_merge.yml
vendored
|
|
@ -1,29 +0,0 @@
|
|||
name: Dev -> Main
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '21 3 * * 3,6'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
develop-to-main:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: 🚛 Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: 🔨 Merge develop to main
|
||||
uses: everlytic/branch-merge@1.1.0
|
||||
with:
|
||||
github_token: ${{ secrets.ADMIN_TOKEN }}
|
||||
source_ref: 'develop'
|
||||
target_branch: 'main'
|
||||
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
|
||||
|
||||
- name: Invoke pre-release workflow
|
||||
uses: benc-uk/workflow-dispatch@v1
|
||||
with:
|
||||
workflow: Nightly Prerelease
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
95
.github/workflows/prerelease.yml
vendored
95
.github/workflows/prerelease.yml
vendored
|
|
@ -1,95 +0,0 @@
|
|||
name: Nightly Prerelease
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
jobs:
|
||||
create_nightly:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: 🚛 Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install Python requirements
|
||||
run: pip install gitpython semver PyGithub
|
||||
|
||||
- name: 🔎 Determine next version type
|
||||
id: version_type
|
||||
run: |
|
||||
TYPE=$(python ./tools/ci_tools.py --bump --github_token ${{ secrets.GITHUB_TOKEN }})
|
||||
|
||||
echo ::set-output name=type::$TYPE
|
||||
|
||||
- name: 💉 Inject new version into files
|
||||
id: version
|
||||
if: steps.version_type.outputs.type != 'skip'
|
||||
run: |
|
||||
RESULT=$(python ./tools/ci_tools.py --nightly --github_token ${{ secrets.GITHUB_TOKEN }})
|
||||
|
||||
echo ::set-output name=next_tag::$RESULT
|
||||
|
||||
# - name: "✏️ Generate full changelog"
|
||||
# if: steps.version_type.outputs.type != 'skip'
|
||||
# id: generate-full-changelog
|
||||
# uses: heinrichreimer/github-changelog-generator-action@v2.3
|
||||
# with:
|
||||
# token: ${{ secrets.ADMIN_TOKEN }}
|
||||
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||
# issues: false
|
||||
# issuesWoLabels: false
|
||||
# sinceTag: "3.12.0"
|
||||
# maxIssues: 100
|
||||
# pullRequests: true
|
||||
# prWoLabels: false
|
||||
# author: false
|
||||
# unreleased: true
|
||||
# compareLink: true
|
||||
# stripGeneratorNotice: true
|
||||
# verbose: true
|
||||
# unreleasedLabel: ${{ steps.version.outputs.next_tag }}
|
||||
# excludeTagsRegex: "CI/.+"
|
||||
# releaseBranch: "main"
|
||||
|
||||
- name: "🖨️ Print changelog to console"
|
||||
if: steps.version_type.outputs.type != 'skip'
|
||||
run: cat CHANGELOG.md
|
||||
|
||||
- name: 💾 Commit and Tag
|
||||
id: git_commit
|
||||
if: steps.version_type.outputs.type != 'skip'
|
||||
run: |
|
||||
git config user.email ${{ secrets.CI_EMAIL }}
|
||||
git config user.name ${{ secrets.CI_USER }}
|
||||
git checkout main
|
||||
git pull
|
||||
git add .
|
||||
git commit -m "[Automated] Bump version"
|
||||
tag_name="CI/${{ steps.version.outputs.next_tag }}"
|
||||
echo $tag_name
|
||||
git tag -a $tag_name -m "nightly build"
|
||||
|
||||
- name: Push to protected main branch
|
||||
uses: CasperWA/push-protected@v2.10.0
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
branch: main
|
||||
tags: true
|
||||
unprotect_reviews: true
|
||||
|
||||
- name: 🔨 Merge main back to develop
|
||||
uses: everlytic/branch-merge@1.1.0
|
||||
if: steps.version_type.outputs.type != 'skip'
|
||||
with:
|
||||
github_token: ${{ secrets.ADMIN_TOKEN }}
|
||||
source_ref: 'main'
|
||||
target_branch: 'develop'
|
||||
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
|
||||
124
.github/workflows/release.yml
vendored
124
.github/workflows/release.yml
vendored
|
|
@ -1,124 +0,0 @@
|
|||
name: Stable Release
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- prereleased
|
||||
|
||||
jobs:
|
||||
create_release:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.actor != 'pypebot'
|
||||
|
||||
steps:
|
||||
- name: 🚛 Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python requirements
|
||||
run: pip install gitpython semver PyGithub
|
||||
|
||||
- name: 💉 Inject new version into files
|
||||
id: version
|
||||
run: |
|
||||
echo ::set-output name=current_version::${GITHUB_REF#refs/*/}
|
||||
RESULT=$(python ./tools/ci_tools.py --finalize ${GITHUB_REF#refs/*/})
|
||||
LASTRELEASE=$(python ./tools/ci_tools.py --lastversion release)
|
||||
|
||||
echo ::set-output name=last_release::$LASTRELEASE
|
||||
echo ::set-output name=release_tag::$RESULT
|
||||
|
||||
# - name: "✏️ Generate full changelog"
|
||||
# if: steps.version.outputs.release_tag != 'skip'
|
||||
# id: generate-full-changelog
|
||||
# uses: heinrichreimer/github-changelog-generator-action@v2.3
|
||||
# with:
|
||||
# token: ${{ secrets.ADMIN_TOKEN }}
|
||||
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||
# issues: false
|
||||
# issuesWoLabels: false
|
||||
# sinceTag: "3.12.0"
|
||||
# maxIssues: 100
|
||||
# pullRequests: true
|
||||
# prWoLabels: false
|
||||
# author: false
|
||||
# unreleased: true
|
||||
# compareLink: true
|
||||
# stripGeneratorNotice: true
|
||||
# verbose: true
|
||||
# futureRelease: ${{ steps.version.outputs.release_tag }}
|
||||
# excludeTagsRegex: "CI/.+"
|
||||
# releaseBranch: "main"
|
||||
|
||||
- name: 💾 Commit and Tag
|
||||
id: git_commit
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
run: |
|
||||
git config user.email ${{ secrets.CI_EMAIL }}
|
||||
git config user.name ${{ secrets.CI_USER }}
|
||||
git add .
|
||||
git commit -m "[Automated] Release"
|
||||
tag_name="${{ steps.version.outputs.release_tag }}"
|
||||
git tag -a $tag_name -m "stable release"
|
||||
|
||||
- name: 🔏 Push to protected main branch
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
uses: CasperWA/push-protected@v2.10.0
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
branch: main
|
||||
tags: true
|
||||
unprotect_reviews: true
|
||||
|
||||
- name: "✏️ Generate last changelog"
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
id: generate-last-changelog
|
||||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: ${{ steps.version.outputs.last_release }}
|
||||
maxIssues: 100
|
||||
pullRequests: true
|
||||
prWoLabels: false
|
||||
author: false
|
||||
unreleased: true
|
||||
compareLink: true
|
||||
stripGeneratorNotice: true
|
||||
verbose: true
|
||||
futureRelease: ${{ steps.version.outputs.release_tag }}
|
||||
excludeTagsRegex: "CI/.+"
|
||||
releaseBranch: "main"
|
||||
stripHeaders: true
|
||||
base: 'none'
|
||||
|
||||
|
||||
- name: 🚀 Github Release
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
body: ${{ steps.generate-last-changelog.outputs.changelog }}
|
||||
tag: ${{ steps.version.outputs.release_tag }}
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
|
||||
- name: ☠ Delete Pre-release
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
uses: cb80/delrel@latest
|
||||
with:
|
||||
tag: "${{ steps.version.outputs.current_version }}"
|
||||
|
||||
- name: 🔁 Merge main back to develop
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
uses: everlytic/branch-merge@1.1.0
|
||||
with:
|
||||
github_token: ${{ secrets.ADMIN_TOKEN }}
|
||||
source_ref: 'main'
|
||||
target_branch: 'develop'
|
||||
commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}'
|
||||
26
.github/workflows/test_build.yml
vendored
26
.github/workflows/test_build.yml
vendored
|
|
@ -28,7 +28,7 @@ jobs:
|
|||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
|
||||
- name: 🧵 Install Requirements
|
||||
shell: pwsh
|
||||
run: |
|
||||
|
|
@ -64,27 +64,3 @@ jobs:
|
|||
run: |
|
||||
export SKIP_THIRD_PARTY_VALIDATION="1"
|
||||
./tools/build.sh
|
||||
|
||||
# MacOS-latest:
|
||||
|
||||
# runs-on: macos-latest
|
||||
# strategy:
|
||||
# matrix:
|
||||
# python-version: [3.9]
|
||||
|
||||
# steps:
|
||||
# - name: 🚛 Checkout Code
|
||||
# uses: actions/checkout@v2
|
||||
|
||||
# - name: Set up Python
|
||||
# uses: actions/setup-python@v2
|
||||
# with:
|
||||
# python-version: ${{ matrix.python-version }}
|
||||
|
||||
# - name: 🧵 Install Requirements
|
||||
# run: |
|
||||
# ./tools/create_env.sh
|
||||
|
||||
# - name: 🔨 Build
|
||||
# run: |
|
||||
# ./tools/build.sh
|
||||
|
|
|
|||
|
|
@ -9,4 +9,4 @@ repos:
|
|||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
- id: no-commit-to-branch
|
||||
args: [ '--pattern', '^(?!((release|enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-]+)$).*' ]
|
||||
args: [ '--pattern', '^(?!((release|enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-_]+)$).*' ]
|
||||
|
|
|
|||
1183
CHANGELOG.md
1183
CHANGELOG.md
File diff suppressed because it is too large
Load diff
|
|
@ -8,7 +8,6 @@ OpenPype
|
|||
[](https://github.com/pypeclub/pype/actions/workflows/documentation.yml) 
|
||||
|
||||
|
||||
this
|
||||
Introduction
|
||||
------------
|
||||
|
||||
|
|
|
|||
112
openpype/api.py
112
openpype/api.py
|
|
@ -1,112 +0,0 @@
|
|||
from .settings import (
|
||||
get_system_settings,
|
||||
get_project_settings,
|
||||
get_current_project_settings,
|
||||
get_anatomy_settings,
|
||||
|
||||
SystemSettings,
|
||||
ProjectSettings
|
||||
)
|
||||
from .lib import (
|
||||
PypeLogger,
|
||||
Logger,
|
||||
Anatomy,
|
||||
execute,
|
||||
run_subprocess,
|
||||
version_up,
|
||||
get_asset,
|
||||
get_workdir_data,
|
||||
get_version_from_path,
|
||||
get_last_version_from_path,
|
||||
get_app_environments_for_context,
|
||||
source_hash,
|
||||
get_latest_version,
|
||||
get_local_site_id,
|
||||
change_openpype_mongo_url,
|
||||
create_project_folders,
|
||||
get_project_basic_paths
|
||||
)
|
||||
|
||||
from .lib.mongo import (
|
||||
get_default_components
|
||||
)
|
||||
|
||||
from .lib.applications import (
|
||||
ApplicationManager
|
||||
)
|
||||
|
||||
from .lib.avalon_context import (
|
||||
BuildWorkfile
|
||||
)
|
||||
|
||||
from . import resources
|
||||
|
||||
from .plugin import (
|
||||
Extractor,
|
||||
|
||||
ValidatePipelineOrder,
|
||||
ValidateContentsOrder,
|
||||
ValidateSceneOrder,
|
||||
ValidateMeshOrder,
|
||||
)
|
||||
|
||||
# temporary fix, might
|
||||
from .action import (
|
||||
get_errored_instances_from_context,
|
||||
RepairAction,
|
||||
RepairContextAction
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"get_system_settings",
|
||||
"get_project_settings",
|
||||
"get_current_project_settings",
|
||||
"get_anatomy_settings",
|
||||
"get_project_basic_paths",
|
||||
|
||||
"SystemSettings",
|
||||
"ProjectSettings",
|
||||
|
||||
"PypeLogger",
|
||||
"Logger",
|
||||
"Anatomy",
|
||||
"execute",
|
||||
"get_default_components",
|
||||
"ApplicationManager",
|
||||
"BuildWorkfile",
|
||||
|
||||
# Resources
|
||||
"resources",
|
||||
|
||||
# plugin classes
|
||||
"Extractor",
|
||||
# ordering
|
||||
"ValidatePipelineOrder",
|
||||
"ValidateContentsOrder",
|
||||
"ValidateSceneOrder",
|
||||
"ValidateMeshOrder",
|
||||
# action
|
||||
"get_errored_instances_from_context",
|
||||
"RepairAction",
|
||||
"RepairContextAction",
|
||||
|
||||
# get contextual data
|
||||
"version_up",
|
||||
"get_asset",
|
||||
"get_workdir_data",
|
||||
"get_version_from_path",
|
||||
"get_last_version_from_path",
|
||||
"get_app_environments_for_context",
|
||||
"source_hash",
|
||||
|
||||
"run_subprocess",
|
||||
"get_latest_version",
|
||||
|
||||
"get_local_site_id",
|
||||
"change_openpype_mongo_url",
|
||||
|
||||
"get_project_basic_paths",
|
||||
"create_project_folders"
|
||||
|
||||
]
|
||||
|
|
@ -164,7 +164,6 @@ def get_linked_representation_id(
|
|||
# Recursive graph lookup for inputs
|
||||
{"$graphLookup": graph_lookup}
|
||||
]
|
||||
|
||||
conn = get_project_connection(project_name)
|
||||
result = conn.aggregate(query_pipeline)
|
||||
referenced_version_ids = _process_referenced_pipeline_result(
|
||||
|
|
@ -213,7 +212,7 @@ def _process_referenced_pipeline_result(result, link_type):
|
|||
|
||||
for output in sorted(outputs_recursive, key=lambda o: o["depth"]):
|
||||
output_links = output.get("data", {}).get("inputLinks")
|
||||
if not output_links:
|
||||
if not output_links and output["type"] != "hero_version":
|
||||
continue
|
||||
|
||||
# Leaf
|
||||
|
|
@ -232,6 +231,9 @@ def _process_referenced_pipeline_result(result, link_type):
|
|||
|
||||
|
||||
def _filter_input_links(input_links, link_type, correctly_linked_ids):
|
||||
if not input_links: # to handle hero versions
|
||||
return
|
||||
|
||||
for input_link in input_links:
|
||||
if link_type and input_link["type"] != link_type:
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
|
||||
from openpype.lib import PreLaunchHook
|
||||
|
||||
|
||||
|
|
@ -40,5 +41,13 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
|
|||
self.log.info("Current context does not have any workfile yet.")
|
||||
return
|
||||
|
||||
# Determine whether to open workfile post initialization.
|
||||
if self.host_name == "maya":
|
||||
key = "open_workfile_post_initialization"
|
||||
if self.data["project_settings"]["maya"][key]:
|
||||
self.log.debug("Opening workfile post initialization.")
|
||||
self.data["env"]["OPENPYPE_" + key.upper()] = "1"
|
||||
return
|
||||
|
||||
# Add path to workfile to arguments
|
||||
self.launch_context.launch_args.append(last_workfile)
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ exists is used.
|
|||
|
||||
import os
|
||||
from abc import ABCMeta, abstractmethod
|
||||
import platform
|
||||
|
||||
import six
|
||||
|
||||
|
|
@ -187,11 +188,19 @@ class HostDirmap(object):
|
|||
|
||||
self.log.debug("local overrides {}".format(active_overrides))
|
||||
self.log.debug("remote overrides {}".format(remote_overrides))
|
||||
current_platform = platform.system().lower()
|
||||
for root_name, active_site_dir in active_overrides.items():
|
||||
remote_site_dir = (
|
||||
remote_overrides.get(root_name)
|
||||
or sync_settings["sites"][remote_site]["root"][root_name]
|
||||
)
|
||||
|
||||
if isinstance(remote_site_dir, dict):
|
||||
remote_site_dir = remote_site_dir.get(current_platform)
|
||||
|
||||
if not remote_site_dir:
|
||||
continue
|
||||
|
||||
if os.path.isdir(active_site_dir):
|
||||
if "destination-path" not in mapping:
|
||||
mapping["destination-path"] = []
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import os
|
||||
import logging
|
||||
import contextlib
|
||||
from abc import ABCMeta, abstractproperty
|
||||
|
|
@ -100,6 +101,30 @@ class HostBase(object):
|
|||
|
||||
pass
|
||||
|
||||
def get_current_project_name(self):
|
||||
"""
|
||||
Returns:
|
||||
Union[str, None]: Current project name.
|
||||
"""
|
||||
|
||||
return os.environ.get("AVALON_PROJECT")
|
||||
|
||||
def get_current_asset_name(self):
|
||||
"""
|
||||
Returns:
|
||||
Union[str, None]: Current asset name.
|
||||
"""
|
||||
|
||||
return os.environ.get("AVALON_ASSET")
|
||||
|
||||
def get_current_task_name(self):
|
||||
"""
|
||||
Returns:
|
||||
Union[str, None]: Current task name.
|
||||
"""
|
||||
|
||||
return os.environ.get("AVALON_TASK")
|
||||
|
||||
def get_current_context(self):
|
||||
"""Get current context information.
|
||||
|
||||
|
|
@ -111,19 +136,14 @@ class HostBase(object):
|
|||
Default implementation returns values from 'legacy_io.Session'.
|
||||
|
||||
Returns:
|
||||
dict: Context with 3 keys 'project_name', 'asset_name' and
|
||||
'task_name'. All of them can be 'None'.
|
||||
Dict[str, Union[str, None]]: Context with 3 keys 'project_name',
|
||||
'asset_name' and 'task_name'. All of them can be 'None'.
|
||||
"""
|
||||
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
if legacy_io.is_installed():
|
||||
legacy_io.install()
|
||||
|
||||
return {
|
||||
"project_name": legacy_io.Session["AVALON_PROJECT"],
|
||||
"asset_name": legacy_io.Session["AVALON_ASSET"],
|
||||
"task_name": legacy_io.Session["AVALON_TASK"]
|
||||
"project_name": self.get_current_project_name(),
|
||||
"asset_name": self.get_current_asset_name(),
|
||||
"task_name": self.get_current_task_name()
|
||||
}
|
||||
|
||||
def get_context_title(self):
|
||||
|
|
|
|||
|
|
@ -6,14 +6,19 @@ from openpype.hosts.aftereffects import api
|
|||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
CreatorError,
|
||||
legacy_io,
|
||||
CreatorError
|
||||
)
|
||||
from openpype.hosts.aftereffects.api.pipeline import cache_and_get_instances
|
||||
from openpype.lib import prepare_template_data
|
||||
from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS
|
||||
|
||||
|
||||
class RenderCreator(Creator):
|
||||
"""Creates 'render' instance for publishing.
|
||||
|
||||
Result of 'render' instance is video or sequence of images for particular
|
||||
composition based of configuration in its RenderQueue.
|
||||
"""
|
||||
identifier = "render"
|
||||
label = "Render"
|
||||
family = "render"
|
||||
|
|
@ -28,45 +33,6 @@ class RenderCreator(Creator):
|
|||
["RenderCreator"]
|
||||
["defaults"])
|
||||
|
||||
def get_icon(self):
|
||||
return resources.get_openpype_splash_filepath()
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in cache_and_get_instances(self):
|
||||
# legacy instances have family=='render' or 'renderLocal', use them
|
||||
creator_id = (instance_data.get("creator_identifier") or
|
||||
instance_data.get("family", '').replace("Local", ''))
|
||||
if creator_id == self.identifier:
|
||||
instance_data = self._handle_legacy(instance_data)
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, _changes in update_list:
|
||||
api.get_stub().imprint(created_inst.get("instance_id"),
|
||||
created_inst.data_to_store())
|
||||
subset_change = _changes.get("subset")
|
||||
if subset_change:
|
||||
api.get_stub().rename_item(created_inst.data["members"][0],
|
||||
subset_change[1])
|
||||
|
||||
def remove_instances(self, instances):
|
||||
for instance in instances:
|
||||
self._remove_instance_from_context(instance)
|
||||
self.host.remove_instance(instance)
|
||||
|
||||
subset = instance.data["subset"]
|
||||
comp_id = instance.data["members"][0]
|
||||
comp = api.get_stub().get_item(comp_id)
|
||||
if comp:
|
||||
new_comp_name = comp.name.replace(subset, '')
|
||||
if not new_comp_name:
|
||||
new_comp_name = "dummyCompName"
|
||||
api.get_stub().rename_item(comp_id,
|
||||
new_comp_name)
|
||||
|
||||
def create(self, subset_name_from_ui, data, pre_create_data):
|
||||
stub = api.get_stub() # only after After Effects is up
|
||||
if pre_create_data.get("use_selection"):
|
||||
|
|
@ -82,10 +48,19 @@ class RenderCreator(Creator):
|
|||
"if 'useSelection' or create at least "
|
||||
"one composition."
|
||||
)
|
||||
|
||||
use_composition_name = (pre_create_data.get("use_composition_name") or
|
||||
len(comps) > 1)
|
||||
for comp in comps:
|
||||
if pre_create_data.get("use_composition_name"):
|
||||
composition_name = comp.name
|
||||
if use_composition_name:
|
||||
if "{composition}" not in subset_name_from_ui.lower():
|
||||
subset_name_from_ui += "{Composition}"
|
||||
|
||||
composition_name = re.sub(
|
||||
"[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS),
|
||||
"",
|
||||
comp.name
|
||||
)
|
||||
|
||||
dynamic_fill = prepare_template_data({"composition":
|
||||
composition_name})
|
||||
subset_name = subset_name_from_ui.format(**dynamic_fill)
|
||||
|
|
@ -129,8 +104,72 @@ class RenderCreator(Creator):
|
|||
]
|
||||
return output
|
||||
|
||||
def get_icon(self):
|
||||
return resources.get_openpype_splash_filepath()
|
||||
|
||||
def collect_instances(self):
|
||||
for instance_data in cache_and_get_instances(self):
|
||||
# legacy instances have family=='render' or 'renderLocal', use them
|
||||
creator_id = (instance_data.get("creator_identifier") or
|
||||
instance_data.get("family", '').replace("Local", ''))
|
||||
if creator_id == self.identifier:
|
||||
instance_data = self._handle_legacy(instance_data)
|
||||
instance = CreatedInstance.from_existing(
|
||||
instance_data, self
|
||||
)
|
||||
self._add_instance_to_context(instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, _changes in update_list:
|
||||
api.get_stub().imprint(created_inst.get("instance_id"),
|
||||
created_inst.data_to_store())
|
||||
subset_change = _changes.get("subset")
|
||||
if subset_change:
|
||||
api.get_stub().rename_item(created_inst.data["members"][0],
|
||||
subset_change.new_value)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
for instance in instances:
|
||||
self._remove_instance_from_context(instance)
|
||||
self.host.remove_instance(instance)
|
||||
|
||||
subset = instance.data["subset"]
|
||||
comp_id = instance.data["members"][0]
|
||||
comp = api.get_stub().get_item(comp_id)
|
||||
if comp:
|
||||
new_comp_name = comp.name.replace(subset, '')
|
||||
if not new_comp_name:
|
||||
new_comp_name = "dummyCompName"
|
||||
api.get_stub().rename_item(comp_id,
|
||||
new_comp_name)
|
||||
|
||||
def get_detail_description(self):
|
||||
return """Creator for Render instances"""
|
||||
return """Creator for Render instances
|
||||
|
||||
Main publishable item in AfterEffects will be of `render` family.
|
||||
Result of this item (instance) is picture sequence or video that could
|
||||
be a final delivery product or loaded and used in another DCCs.
|
||||
|
||||
Select single composition and create instance of 'render' family or
|
||||
turn off 'Use selection' to create instance for all compositions.
|
||||
|
||||
'Use composition name in subset' allows to explicitly add composition
|
||||
name into created subset name.
|
||||
|
||||
Position of composition name could be set in
|
||||
`project_settings/global/tools/creator/subset_name_profiles` with some
|
||||
form of '{composition}' placeholder.
|
||||
|
||||
Composition name will be used implicitly if multiple composition should
|
||||
be handled at same time.
|
||||
|
||||
If {composition} placeholder is not us 'subset_name_profiles'
|
||||
composition name will be capitalized and set at the end of subset name
|
||||
if necessary.
|
||||
|
||||
If composition name should be used, it will be cleaned up of characters
|
||||
that would cause an issue in published file names.
|
||||
"""
|
||||
|
||||
def get_dynamic_data(self, variant, task_name, asset_doc,
|
||||
project_name, host_name, instance):
|
||||
|
|
@ -155,7 +194,7 @@ class RenderCreator(Creator):
|
|||
instance_data.pop("uuid")
|
||||
|
||||
if not instance_data.get("task"):
|
||||
instance_data["task"] = legacy_io.Session.get("AVALON_TASK")
|
||||
instance_data["task"] = self.create_context.get_current_task_name()
|
||||
|
||||
if not instance_data.get("creator_attributes"):
|
||||
is_old_farm = instance_data["family"] != "renderLocal"
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@ import openpype.hosts.aftereffects.api as api
|
|||
from openpype.client import get_asset_by_name
|
||||
from openpype.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance,
|
||||
legacy_io,
|
||||
CreatedInstance
|
||||
)
|
||||
from openpype.hosts.aftereffects.api.pipeline import cache_and_get_instances
|
||||
|
||||
|
|
@ -38,10 +37,11 @@ class AEWorkfileCreator(AutoCreator):
|
|||
existing_instance = instance
|
||||
break
|
||||
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
host_name = legacy_io.Session["AVALON_APP"]
|
||||
context = self.create_context
|
||||
project_name = context.get_current_project_name()
|
||||
asset_name = context.get_current_asset_name()
|
||||
task_name = context.get_current_task_name()
|
||||
host_name = context.host_name
|
||||
|
||||
if existing_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ class AppendBlendLoader(plugin.AssetLoader):
|
|||
"""
|
||||
|
||||
representations = ["blend"]
|
||||
families = ["*"]
|
||||
families = ["workfile"]
|
||||
|
||||
label = "Append Workfile"
|
||||
order = 9
|
||||
|
|
@ -68,7 +68,7 @@ class ImportBlendLoader(plugin.AssetLoader):
|
|||
"""
|
||||
|
||||
representations = ["blend"]
|
||||
families = ["*"]
|
||||
families = ["workfile"]
|
||||
|
||||
label = "Import Workfile"
|
||||
order = 9
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["camera"]
|
||||
version = (0, 1, 0)
|
||||
label = "Zero Keyframe"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
category = "geometry"
|
||||
label = "Mesh Has UV's"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
optional = True
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator):
|
|||
order = ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
category = "geometry"
|
||||
label = "Mesh No Negative Scale"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model", "rig"]
|
||||
version = (0, 1, 0)
|
||||
label = "No Colons in names"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
|
||||
|
|
|
|||
|
|
@ -21,7 +21,6 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
hosts = ["blender"]
|
||||
families = ["model"]
|
||||
version = (0, 1, 0)
|
||||
label = "Transform Zero"
|
||||
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
|
||||
|
||||
|
|
|
|||
|
|
@ -143,6 +143,9 @@ class ExtractSubsetResources(publish.Extractor):
|
|||
# create staging dir path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
||||
# append staging dir for later cleanup
|
||||
instance.context.data["cleanupFullPaths"].append(staging_dir)
|
||||
|
||||
# add default preset type for thumbnail and reviewable video
|
||||
# update them with settings and override in case the same
|
||||
# are found in there
|
||||
|
|
@ -548,30 +551,3 @@ class ExtractSubsetResources(publish.Extractor):
|
|||
"Path `{}` is containing more that one clip".format(path)
|
||||
)
|
||||
return clips[0]
|
||||
|
||||
def staging_dir(self, instance):
|
||||
"""Provide a temporary directory in which to store extracted files
|
||||
|
||||
Upon calling this method the staging directory is stored inside
|
||||
the instance.data['stagingDir']
|
||||
"""
|
||||
staging_dir = instance.data.get('stagingDir', None)
|
||||
openpype_temp_dir = os.getenv("OPENPYPE_TEMP_DIR")
|
||||
|
||||
if not staging_dir:
|
||||
if openpype_temp_dir and os.path.exists(openpype_temp_dir):
|
||||
staging_dir = os.path.normpath(
|
||||
tempfile.mkdtemp(
|
||||
prefix="pyblish_tmp_",
|
||||
dir=openpype_temp_dir
|
||||
)
|
||||
)
|
||||
else:
|
||||
staging_dir = os.path.normpath(
|
||||
tempfile.mkdtemp(prefix="pyblish_tmp_")
|
||||
)
|
||||
instance.data['stagingDir'] = staging_dir
|
||||
|
||||
instance.context.data["cleanupFullPaths"].append(staging_dir)
|
||||
|
||||
return staging_dir
|
||||
|
|
|
|||
|
|
@ -108,9 +108,9 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
output = process.communicate()[0]
|
||||
|
||||
if process.returncode != 0:
|
||||
raise ValueError(output.decode("utf-8"))
|
||||
raise ValueError(output.decode("utf-8", errors="backslashreplace"))
|
||||
|
||||
self.log.debug(output.decode("utf-8"))
|
||||
self.log.debug(output.decode("utf-8", errors="backslashreplace"))
|
||||
|
||||
# Generate representations.
|
||||
extension = collection.tail[1:]
|
||||
|
|
|
|||
|
|
@ -113,7 +113,7 @@ class HoudiniCreatorBase(object):
|
|||
Dict[str, Any]: Shared data dictionary.
|
||||
|
||||
"""
|
||||
if shared_data.get("houdini_cached_subsets") is not None:
|
||||
if shared_data.get("houdini_cached_subsets") is None:
|
||||
cache = dict()
|
||||
cache_legacy = dict()
|
||||
|
||||
|
|
@ -225,12 +225,12 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
|
|||
self._add_instance_to_context(created_instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, _changes in update_list:
|
||||
for created_inst, changes in update_list:
|
||||
instance_node = hou.node(created_inst.get("instance_node"))
|
||||
|
||||
new_values = {
|
||||
key: new_value
|
||||
for key, (_old_value, new_value) in _changes.items()
|
||||
key: changes[key].new_value
|
||||
for key in changes.changed_keys
|
||||
}
|
||||
imprint(
|
||||
instance_node,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import re
|
||||
import logging
|
||||
import platform
|
||||
|
||||
|
|
@ -66,7 +67,7 @@ def generate_shelves():
|
|||
)
|
||||
continue
|
||||
|
||||
mandatory_attributes = {'name', 'script'}
|
||||
mandatory_attributes = {'label', 'script'}
|
||||
for tool_definition in shelf_definition.get('tools_list'):
|
||||
# We verify that the name and script attibutes of the tool
|
||||
# are set
|
||||
|
|
@ -152,31 +153,32 @@ def get_or_create_tool(tool_definition, shelf):
|
|||
Returns:
|
||||
hou.Tool: The tool updated or the new one
|
||||
"""
|
||||
existing_tools = shelf.tools()
|
||||
tool_label = tool_definition.get('label')
|
||||
|
||||
tool_label = tool_definition.get("label")
|
||||
if not tool_label:
|
||||
log.warning("Skipped shelf without label")
|
||||
return
|
||||
|
||||
script_path = tool_definition["script"]
|
||||
if not script_path or not os.path.exists(script_path):
|
||||
log.warning("This path doesn't exist - {}".format(script_path))
|
||||
return
|
||||
|
||||
existing_tools = shelf.tools()
|
||||
existing_tool = next(
|
||||
(tool for tool in existing_tools if tool.label() == tool_label),
|
||||
None
|
||||
)
|
||||
|
||||
with open(script_path) as stream:
|
||||
script = stream.read()
|
||||
|
||||
tool_definition["script"] = script
|
||||
|
||||
if existing_tool:
|
||||
tool_definition.pop('name', None)
|
||||
tool_definition.pop('label', None)
|
||||
tool_definition.pop("label", None)
|
||||
existing_tool.setData(**tool_definition)
|
||||
return existing_tool
|
||||
|
||||
tool_name = tool_label.replace(' ', '_').lower()
|
||||
|
||||
if not os.path.exists(tool_definition['script']):
|
||||
log.warning(
|
||||
"This path doesn't exist - {}".format(tool_definition['script'])
|
||||
)
|
||||
return
|
||||
|
||||
with open(tool_definition['script']) as f:
|
||||
script = f.read()
|
||||
tool_definition.update({'script': script})
|
||||
|
||||
new_tool = hou.shelves.newTool(name=tool_name, **tool_definition)
|
||||
|
||||
return new_tool
|
||||
tool_name = re.sub(r"[^\w\d]+", "_", tool_label).lower()
|
||||
return hou.shelves.newTool(name=tool_name, **tool_definition)
|
||||
|
|
|
|||
|
|
@ -12,6 +12,11 @@ class MaxAddon(OpenPypeModule, IHostAddon):
|
|||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def add_implementation_envs(self, env, _app):
|
||||
# Remove auto screen scale factor for Qt
|
||||
# - let 3dsmax decide it's value
|
||||
env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".max"]
|
||||
|
||||
|
|
|
|||
|
|
@ -78,12 +78,12 @@ class MaxCreator(Creator, MaxCreatorBase):
|
|||
self._add_instance_to_context(created_instance)
|
||||
|
||||
def update_instances(self, update_list):
|
||||
for created_inst, _changes in update_list:
|
||||
for created_inst, changes in update_list:
|
||||
instance_node = created_inst.get("instance_node")
|
||||
|
||||
new_values = {
|
||||
key: new_value
|
||||
for key, (_old_value, new_value) in _changes.items()
|
||||
key: changes[key].new_value
|
||||
for key in changes.changed_keys
|
||||
}
|
||||
imprint(
|
||||
instance_node,
|
||||
|
|
|
|||
26
openpype/hosts/max/plugins/create/create_camera.py
Normal file
26
openpype/hosts/max/plugins/create/create_camera.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Creator plugin for creating camera."""
|
||||
from openpype.hosts.max.api import plugin
|
||||
from openpype.pipeline import CreatedInstance
|
||||
|
||||
|
||||
class CreateCamera(plugin.MaxCreator):
|
||||
identifier = "io.openpype.creators.max.camera"
|
||||
label = "Camera"
|
||||
family = "camera"
|
||||
icon = "gear"
|
||||
|
||||
def create(self, subset_name, instance_data, pre_create_data):
|
||||
from pymxs import runtime as rt
|
||||
sel_obj = list(rt.selection)
|
||||
instance = super(CreateCamera, self).create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
pre_create_data) # type: CreatedInstance
|
||||
container = rt.getNodeByName(instance.data.get("instance_node"))
|
||||
# TODO: Disable "Add to Containers?" Panel
|
||||
# parent the selected cameras into the container
|
||||
for obj in sel_obj:
|
||||
obj.parent = container
|
||||
# for additional work on the node:
|
||||
# instance_node = rt.getNodeByName(instance.get("instance_node"))
|
||||
49
openpype/hosts/max/plugins/load/load_camera_fbx.py
Normal file
49
openpype/hosts/max/plugins/load/load_camera_fbx.py
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
import os
|
||||
from openpype.pipeline import (
|
||||
load
|
||||
)
|
||||
|
||||
|
||||
class FbxLoader(load.LoaderPlugin):
|
||||
"""Fbx Loader"""
|
||||
|
||||
families = ["camera"]
|
||||
representations = ["fbx"]
|
||||
order = -9
|
||||
icon = "code-fork"
|
||||
color = "white"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
filepath = os.path.normpath(self.fname)
|
||||
|
||||
fbx_import_cmd = (
|
||||
f"""
|
||||
|
||||
FBXImporterSetParam "Animation" true
|
||||
FBXImporterSetParam "Cameras" true
|
||||
FBXImporterSetParam "AxisConversionMethod" true
|
||||
FbxExporterSetParam "UpAxis" "Y"
|
||||
FbxExporterSetParam "Preserveinstances" true
|
||||
|
||||
importFile @"{filepath}" #noPrompt using:FBXIMP
|
||||
""")
|
||||
|
||||
self.log.debug(f"Executing command: {fbx_import_cmd}")
|
||||
rt.execute(fbx_import_cmd)
|
||||
|
||||
container_name = f"{name}_CON"
|
||||
|
||||
asset = rt.getNodeByName(f"{name}")
|
||||
# rename the container with "_CON"
|
||||
container = rt.container(name=container_name)
|
||||
asset.Parent = container
|
||||
|
||||
return container
|
||||
|
||||
def remove(self, container):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
node = container["node"]
|
||||
rt.delete(node)
|
||||
50
openpype/hosts/max/plugins/load/load_max_scene.py
Normal file
50
openpype/hosts/max/plugins/load/load_max_scene.py
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
import os
|
||||
from openpype.pipeline import (
|
||||
load
|
||||
)
|
||||
|
||||
|
||||
class MaxSceneLoader(load.LoaderPlugin):
|
||||
"""Max Scene Loader"""
|
||||
|
||||
families = ["camera"]
|
||||
representations = ["max"]
|
||||
order = -8
|
||||
icon = "code-fork"
|
||||
color = "green"
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
from pymxs import runtime as rt
|
||||
path = os.path.normpath(self.fname)
|
||||
# import the max scene by using "merge file"
|
||||
path = path.replace('\\', '/')
|
||||
|
||||
merge_before = {
|
||||
c for c in rt.rootNode.Children
|
||||
if rt.classOf(c) == rt.Container
|
||||
}
|
||||
rt.mergeMaxFile(path)
|
||||
|
||||
merge_after = {
|
||||
c for c in rt.rootNode.Children
|
||||
if rt.classOf(c) == rt.Container
|
||||
}
|
||||
max_containers = merge_after.difference(merge_before)
|
||||
|
||||
if len(max_containers) != 1:
|
||||
self.log.error("Something failed when loading.")
|
||||
|
||||
max_container = max_containers.pop()
|
||||
container_name = f"{name}_CON"
|
||||
# rename the container with "_CON"
|
||||
# get the original container
|
||||
container = rt.container(name=container_name)
|
||||
max_container.Parent = container
|
||||
|
||||
return container
|
||||
|
||||
def remove(self, container):
|
||||
from pymxs import runtime as rt
|
||||
|
||||
node = container["node"]
|
||||
rt.delete(node)
|
||||
|
|
@ -15,7 +15,10 @@ from openpype.hosts.max.api import lib
|
|||
class AbcLoader(load.LoaderPlugin):
|
||||
"""Alembic loader."""
|
||||
|
||||
families = ["model", "animation", "pointcache"]
|
||||
families = ["model",
|
||||
"camera",
|
||||
"animation",
|
||||
"pointcache"]
|
||||
label = "Load Alembic"
|
||||
representations = ["abc"]
|
||||
order = -10
|
||||
|
|
|
|||
75
openpype/hosts/max/plugins/publish/extract_camera_abc.py
Normal file
75
openpype/hosts/max/plugins/publish/extract_camera_abc.py
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from openpype.pipeline import (
|
||||
publish,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from pymxs import runtime as rt
|
||||
from openpype.hosts.max.api import (
|
||||
maintained_selection,
|
||||
get_all_children
|
||||
)
|
||||
|
||||
|
||||
class ExtractCameraAlembic(publish.Extractor,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""
|
||||
Extract Camera with AlembicExport
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.1
|
||||
label = "Extract Alembic Camera"
|
||||
hosts = ["max"]
|
||||
families = ["camera"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
start = float(instance.data.get("frameStartHandle", 1))
|
||||
end = float(instance.data.get("frameEndHandle", 1))
|
||||
|
||||
container = instance.data["instance_node"]
|
||||
|
||||
self.log.info("Extracting Camera ...")
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{name}.abc".format(**instance.data)
|
||||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
# We run the render
|
||||
self.log.info("Writing alembic '%s' to '%s'" % (filename,
|
||||
stagingdir))
|
||||
|
||||
export_cmd = (
|
||||
f"""
|
||||
AlembicExport.ArchiveType = #ogawa
|
||||
AlembicExport.CoordinateSystem = #maya
|
||||
AlembicExport.StartFrame = {start}
|
||||
AlembicExport.EndFrame = {end}
|
||||
AlembicExport.CustomAttributes = true
|
||||
|
||||
exportFile @"{path}" #noPrompt selectedOnly:on using:AlembicExport
|
||||
|
||||
""")
|
||||
|
||||
self.log.debug(f"Executing command: {export_cmd}")
|
||||
|
||||
with maintained_selection():
|
||||
# select and export
|
||||
rt.select(get_all_children(rt.getNodeByName(container)))
|
||||
rt.execute(export_cmd)
|
||||
|
||||
self.log.info("Performing Extraction ...")
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'abc',
|
||||
'ext': 'abc',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
|
||||
path))
|
||||
75
openpype/hosts/max/plugins/publish/extract_camera_fbx.py
Normal file
75
openpype/hosts/max/plugins/publish/extract_camera_fbx.py
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from openpype.pipeline import (
|
||||
publish,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from pymxs import runtime as rt
|
||||
from openpype.hosts.max.api import (
|
||||
maintained_selection,
|
||||
get_all_children
|
||||
)
|
||||
|
||||
|
||||
class ExtractCameraFbx(publish.Extractor,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""
|
||||
Extract Camera with FbxExporter
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.2
|
||||
label = "Extract Fbx Camera"
|
||||
hosts = ["max"]
|
||||
families = ["camera"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
container = instance.data["instance_node"]
|
||||
|
||||
self.log.info("Extracting Camera ...")
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{name}.fbx".format(**instance.data)
|
||||
|
||||
filepath = os.path.join(stagingdir, filename)
|
||||
self.log.info("Writing fbx file '%s' to '%s'" % (filename,
|
||||
filepath))
|
||||
|
||||
# Need to export:
|
||||
# Animation = True
|
||||
# Cameras = True
|
||||
# AxisConversionMethod
|
||||
fbx_export_cmd = (
|
||||
f"""
|
||||
|
||||
FBXExporterSetParam "Animation" true
|
||||
FBXExporterSetParam "Cameras" true
|
||||
FBXExporterSetParam "AxisConversionMethod" "Animation"
|
||||
FbxExporterSetParam "UpAxis" "Y"
|
||||
FbxExporterSetParam "Preserveinstances" true
|
||||
|
||||
exportFile @"{filepath}" #noPrompt selectedOnly:true using:FBXEXP
|
||||
|
||||
""")
|
||||
|
||||
self.log.debug(f"Executing command: {fbx_export_cmd}")
|
||||
|
||||
with maintained_selection():
|
||||
# select and export
|
||||
rt.select(get_all_children(rt.getNodeByName(container)))
|
||||
rt.execute(fbx_export_cmd)
|
||||
|
||||
self.log.info("Performing Extraction ...")
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'fbx',
|
||||
'ext': 'fbx',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
|
||||
filepath))
|
||||
60
openpype/hosts/max/plugins/publish/extract_max_scene_raw.py
Normal file
60
openpype/hosts/max/plugins/publish/extract_max_scene_raw.py
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
from openpype.pipeline import (
|
||||
publish,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
from pymxs import runtime as rt
|
||||
from openpype.hosts.max.api import (
|
||||
maintained_selection,
|
||||
get_all_children
|
||||
)
|
||||
|
||||
|
||||
class ExtractMaxSceneRaw(publish.Extractor,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""
|
||||
Extract Raw Max Scene with SaveSelected
|
||||
"""
|
||||
|
||||
order = pyblish.api.ExtractorOrder - 0.2
|
||||
label = "Extract Max Scene (Raw)"
|
||||
hosts = ["max"]
|
||||
families = ["camera"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
container = instance.data["instance_node"]
|
||||
|
||||
# publish the raw scene for camera
|
||||
self.log.info("Extracting Raw Max Scene ...")
|
||||
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{name}.max".format(**instance.data)
|
||||
|
||||
max_path = os.path.join(stagingdir, filename)
|
||||
self.log.info("Writing max file '%s' to '%s'" % (filename,
|
||||
max_path))
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
# saving max scene
|
||||
with maintained_selection():
|
||||
# need to figure out how to select the camera
|
||||
rt.select(get_all_children(rt.getNodeByName(container)))
|
||||
rt.execute(f'saveNodes selection "{max_path}" quiet:true')
|
||||
|
||||
self.log.info("Performing Extraction ...")
|
||||
|
||||
representation = {
|
||||
'name': 'max',
|
||||
'ext': 'max',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
|
||||
max_path))
|
||||
|
|
@ -51,7 +51,7 @@ class ExtractAlembic(publish.Extractor):
|
|||
order = pyblish.api.ExtractorOrder
|
||||
label = "Extract Pointcache"
|
||||
hosts = ["max"]
|
||||
families = ["pointcache", "camera"]
|
||||
families = ["pointcache"]
|
||||
|
||||
def process(self, instance):
|
||||
start = float(instance.data.get("frameStartHandle", 1))
|
||||
|
|
|
|||
|
|
@ -0,0 +1,48 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class ValidateCameraContent(pyblish.api.InstancePlugin):
|
||||
"""Validates Camera instance contents.
|
||||
|
||||
A Camera instance may only hold a SINGLE camera's transform
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["camera"]
|
||||
hosts = ["max"]
|
||||
label = "Camera Contents"
|
||||
camera_type = ["$Free_Camera", "$Target_Camera",
|
||||
"$Physical_Camera", "$Target"]
|
||||
|
||||
def process(self, instance):
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError("Camera instance must only include"
|
||||
"camera (and camera target)")
|
||||
|
||||
def get_invalid(self, instance):
|
||||
"""
|
||||
Get invalid nodes if the instance is not camera
|
||||
"""
|
||||
invalid = list()
|
||||
container = instance.data["instance_node"]
|
||||
self.log.info("Validating look content for "
|
||||
"{}".format(container))
|
||||
|
||||
con = rt.getNodeByName(container)
|
||||
selection_list = list(con.Children)
|
||||
for sel in selection_list:
|
||||
# to avoid Attribute Error from pymxs wrapper
|
||||
sel_tmp = str(sel)
|
||||
found = False
|
||||
for cam in self.camera_type:
|
||||
if sel_tmp.startswith(cam):
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
self.log.error("Camera not found")
|
||||
invalid.append(sel)
|
||||
return invalid
|
||||
|
|
@ -1,4 +1,13 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import sys
|
||||
|
||||
# this might happen in some 3dsmax version where PYTHONPATH isn't added
|
||||
# to sys.path automatically
|
||||
for path in os.environ["PYTHONPATH"].split(os.pathsep):
|
||||
if path and path not in sys.path:
|
||||
sys.path.append(path)
|
||||
|
||||
from openpype.hosts.max.api import MaxHost
|
||||
from openpype.pipeline import install_host
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from maya import cmds
|
|||
|
||||
from openpype.client import get_asset_by_name, get_project
|
||||
from openpype.pipeline import legacy_io
|
||||
from . import lib
|
||||
|
||||
|
||||
class ToolWindows:
|
||||
|
|
@ -59,25 +60,11 @@ def edit_shader_definitions():
|
|||
|
||||
def reset_frame_range():
|
||||
"""Set frame range to current asset"""
|
||||
# Set FPS first
|
||||
fps = {15: 'game',
|
||||
24: 'film',
|
||||
25: 'pal',
|
||||
30: 'ntsc',
|
||||
48: 'show',
|
||||
50: 'palf',
|
||||
60: 'ntscf',
|
||||
23.98: '23.976fps',
|
||||
23.976: '23.976fps',
|
||||
29.97: '29.97fps',
|
||||
47.952: '47.952fps',
|
||||
47.95: '47.952fps',
|
||||
59.94: '59.94fps',
|
||||
44100: '44100fps',
|
||||
48000: '48000fps'
|
||||
}.get(float(legacy_io.Session.get("AVALON_FPS", 25)), "pal")
|
||||
|
||||
cmds.currentUnit(time=fps)
|
||||
fps = lib.convert_to_maya_fps(
|
||||
float(legacy_io.Session.get("AVALON_FPS", 25))
|
||||
)
|
||||
lib.set_scene_fps(fps)
|
||||
|
||||
# Set frame start/end
|
||||
project_name = legacy_io.active_project()
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import sys
|
|||
import platform
|
||||
import uuid
|
||||
import math
|
||||
import re
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
|
@ -255,11 +256,6 @@ def read(node):
|
|||
return data
|
||||
|
||||
|
||||
def _get_mel_global(name):
|
||||
"""Return the value of a mel global variable"""
|
||||
return mel.eval("$%s = $%s;" % (name, name))
|
||||
|
||||
|
||||
def matrix_equals(a, b, tolerance=1e-10):
|
||||
"""
|
||||
Compares two matrices with an imperfection tolerance
|
||||
|
|
@ -625,15 +621,15 @@ class delete_after(object):
|
|||
cmds.delete(self._nodes)
|
||||
|
||||
|
||||
def get_current_renderlayer():
|
||||
return cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
|
||||
|
||||
|
||||
def get_renderer(layer):
|
||||
with renderlayer(layer):
|
||||
return cmds.getAttr("defaultRenderGlobals.currentRenderer")
|
||||
|
||||
|
||||
def get_current_renderlayer():
|
||||
return cmds.editRenderLayerGlobals(query=True, currentRenderLayer=True)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def no_undo(flush=False):
|
||||
"""Disable the undo queue during the context
|
||||
|
|
@ -1374,27 +1370,6 @@ def set_id(node, unique_id, overwrite=False):
|
|||
cmds.setAttr(attr, unique_id, type="string")
|
||||
|
||||
|
||||
# endregion ID
|
||||
def get_reference_node(path):
|
||||
"""
|
||||
Get the reference node when the path is found being used in a reference
|
||||
Args:
|
||||
path (str): the file path to check
|
||||
|
||||
Returns:
|
||||
node (str): name of the reference node in question
|
||||
"""
|
||||
try:
|
||||
node = cmds.file(path, query=True, referenceNode=True)
|
||||
except RuntimeError:
|
||||
log.debug('File is not referenced : "{}"'.format(path))
|
||||
return
|
||||
|
||||
reference_path = cmds.referenceQuery(path, filename=True)
|
||||
if os.path.normpath(path) == os.path.normpath(reference_path):
|
||||
return node
|
||||
|
||||
|
||||
def set_attribute(attribute, value, node):
|
||||
"""Adjust attributes based on the value from the attribute data
|
||||
|
||||
|
|
@ -1996,8 +1971,6 @@ def get_id_from_sibling(node, history_only=True):
|
|||
return first_id
|
||||
|
||||
|
||||
|
||||
# Project settings
|
||||
def set_scene_fps(fps, update=True):
|
||||
"""Set FPS from project configuration
|
||||
|
||||
|
|
@ -2010,28 +1983,21 @@ def set_scene_fps(fps, update=True):
|
|||
|
||||
"""
|
||||
|
||||
fps_mapping = {'15': 'game',
|
||||
'24': 'film',
|
||||
'25': 'pal',
|
||||
'30': 'ntsc',
|
||||
'48': 'show',
|
||||
'50': 'palf',
|
||||
'60': 'ntscf',
|
||||
'23.98': '23.976fps',
|
||||
'23.976': '23.976fps',
|
||||
'29.97': '29.97fps',
|
||||
'47.952': '47.952fps',
|
||||
'47.95': '47.952fps',
|
||||
'59.94': '59.94fps',
|
||||
'44100': '44100fps',
|
||||
'48000': '48000fps'}
|
||||
|
||||
# pull from mapping
|
||||
# this should convert float string to float and int to int
|
||||
# so 25.0 is converted to 25, but 23.98 will be still float.
|
||||
dec, ipart = math.modf(fps)
|
||||
if dec == 0.0:
|
||||
fps = int(ipart)
|
||||
fps_mapping = {
|
||||
'15': 'game',
|
||||
'24': 'film',
|
||||
'25': 'pal',
|
||||
'30': 'ntsc',
|
||||
'48': 'show',
|
||||
'50': 'palf',
|
||||
'60': 'ntscf',
|
||||
'23.976023976023978': '23.976fps',
|
||||
'29.97002997002997': '29.97fps',
|
||||
'47.952047952047955': '47.952fps',
|
||||
'59.94005994005994': '59.94fps',
|
||||
'44100': '44100fps',
|
||||
'48000': '48000fps'
|
||||
}
|
||||
|
||||
unit = fps_mapping.get(str(fps), None)
|
||||
if unit is None:
|
||||
|
|
@ -2151,7 +2117,9 @@ def set_context_settings():
|
|||
asset_data = asset_doc.get("data", {})
|
||||
|
||||
# Set project fps
|
||||
fps = asset_data.get("fps", project_data.get("fps", 25))
|
||||
fps = convert_to_maya_fps(
|
||||
asset_data.get("fps", project_data.get("fps", 25))
|
||||
)
|
||||
legacy_io.Session["AVALON_FPS"] = str(fps)
|
||||
set_scene_fps(fps)
|
||||
|
||||
|
|
@ -2173,15 +2141,12 @@ def validate_fps():
|
|||
|
||||
"""
|
||||
|
||||
fps = get_current_project_asset(fields=["data.fps"])["data"]["fps"]
|
||||
# TODO(antirotor): This is hack as for framerates having multiple
|
||||
# decimal places. FTrack is ceiling decimal values on
|
||||
# fps to two decimal places but Maya 2019+ is reporting those fps
|
||||
# with much higher resolution. As we currently cannot fix Ftrack
|
||||
# rounding, we have to round those numbers coming from Maya.
|
||||
current_fps = float_round(mel.eval('currentTimeUnitToFPS()'), 2)
|
||||
expected_fps = convert_to_maya_fps(
|
||||
get_current_project_asset(fields=["data.fps"])["data"]["fps"]
|
||||
)
|
||||
current_fps = mel.eval('currentTimeUnitToFPS()')
|
||||
|
||||
fps_match = current_fps == fps
|
||||
fps_match = current_fps == expected_fps
|
||||
if not fps_match and not IS_HEADLESS:
|
||||
from openpype.widgets import popup
|
||||
|
||||
|
|
@ -2190,14 +2155,19 @@ def validate_fps():
|
|||
dialog = popup.PopupUpdateKeys(parent=parent)
|
||||
dialog.setModal(True)
|
||||
dialog.setWindowTitle("Maya scene does not match project FPS")
|
||||
dialog.setMessage("Scene %i FPS does not match project %i FPS" %
|
||||
(current_fps, fps))
|
||||
dialog.setMessage(
|
||||
"Scene {} FPS does not match project {} FPS".format(
|
||||
current_fps, expected_fps
|
||||
)
|
||||
)
|
||||
dialog.setButtonText("Fix")
|
||||
|
||||
# Set new text for button (add optional argument for the popup?)
|
||||
toggle = dialog.widgets["toggle"]
|
||||
update = toggle.isChecked()
|
||||
dialog.on_clicked_state.connect(lambda: set_scene_fps(fps, update))
|
||||
dialog.on_clicked_state.connect(
|
||||
lambda: set_scene_fps(expected_fps, update)
|
||||
)
|
||||
|
||||
dialog.show()
|
||||
|
||||
|
|
@ -3382,6 +3352,119 @@ def get_attribute_input(attr):
|
|||
return connections[0] if connections else None
|
||||
|
||||
|
||||
def convert_to_maya_fps(fps):
|
||||
"""Convert any fps to supported Maya framerates."""
|
||||
float_framerates = [
|
||||
23.976023976023978,
|
||||
# WTF is 29.97 df vs fps?
|
||||
29.97002997002997,
|
||||
47.952047952047955,
|
||||
59.94005994005994
|
||||
]
|
||||
# 44100 fps evaluates as 41000.0. Why? Omitting for now.
|
||||
int_framerates = [
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
8,
|
||||
10,
|
||||
12,
|
||||
15,
|
||||
16,
|
||||
20,
|
||||
24,
|
||||
25,
|
||||
30,
|
||||
40,
|
||||
48,
|
||||
50,
|
||||
60,
|
||||
75,
|
||||
80,
|
||||
90,
|
||||
100,
|
||||
120,
|
||||
125,
|
||||
150,
|
||||
200,
|
||||
240,
|
||||
250,
|
||||
300,
|
||||
375,
|
||||
400,
|
||||
500,
|
||||
600,
|
||||
750,
|
||||
1200,
|
||||
1500,
|
||||
2000,
|
||||
3000,
|
||||
6000,
|
||||
48000
|
||||
]
|
||||
|
||||
# If input fps is a whole number we'll return.
|
||||
if float(fps).is_integer():
|
||||
# Validate fps is part of Maya's fps selection.
|
||||
if fps not in int_framerates:
|
||||
raise ValueError(
|
||||
"Framerate \"{}\" is not supported in Maya".format(fps)
|
||||
)
|
||||
return fps
|
||||
else:
|
||||
# Differences to supported float frame rates.
|
||||
differences = []
|
||||
for i in float_framerates:
|
||||
differences.append(abs(i - fps))
|
||||
|
||||
# Validate difference does not stray too far from supported framerates.
|
||||
min_difference = min(differences)
|
||||
min_index = differences.index(min_difference)
|
||||
supported_framerate = float_framerates[min_index]
|
||||
if min_difference > 0.1:
|
||||
raise ValueError(
|
||||
"Framerate \"{}\" strays too far from any supported framerate"
|
||||
" in Maya. Closest supported framerate is \"{}\"".format(
|
||||
fps, supported_framerate
|
||||
)
|
||||
)
|
||||
|
||||
return supported_framerate
|
||||
|
||||
|
||||
def write_xgen_file(data, filepath):
|
||||
"""Overwrites data in .xgen files.
|
||||
|
||||
Quite naive approach to mainly overwrite "xgDataPath" and "xgProjectPath".
|
||||
|
||||
Args:
|
||||
data (dict): Dictionary of key, value. Key matches with xgen file.
|
||||
For example:
|
||||
{"xgDataPath": "some/path"}
|
||||
filepath (string): Absolute path of .xgen file.
|
||||
"""
|
||||
# Generate regex lookup for line to key basically
|
||||
# match any of the keys in `\t{key}\t\t`
|
||||
keys = "|".join(re.escape(key) for key in data.keys())
|
||||
re_keys = re.compile("^\t({})\t\t".format(keys))
|
||||
|
||||
lines = []
|
||||
with open(filepath, "r") as f:
|
||||
for line in f:
|
||||
match = re_keys.match(line)
|
||||
if match:
|
||||
key = match.group(1)
|
||||
value = data[key]
|
||||
line = "\t{}\t\t{}\n".format(key, value)
|
||||
|
||||
lines.append(line)
|
||||
|
||||
with open(filepath, "w") as f:
|
||||
f.writelines(lines)
|
||||
|
||||
|
||||
def imageInfo(filepath):
|
||||
"""Take reference from makeTx.py in Arnold
|
||||
ImageInfo(filename): Get Image Information for colorspace
|
||||
|
|
|
|||
|
|
@ -50,7 +50,6 @@ def install():
|
|||
parent="MayaWindow"
|
||||
)
|
||||
|
||||
renderer = cmds.getAttr('defaultRenderGlobals.currentRenderer').lower()
|
||||
# Create context menu
|
||||
context_label = "{}, {}".format(
|
||||
legacy_io.Session["AVALON_ASSET"],
|
||||
|
|
|
|||
|
|
@ -514,6 +514,9 @@ def check_lock_on_current_file():
|
|||
|
||||
# add the lock file when opening the file
|
||||
filepath = current_file()
|
||||
# Skip if current file is 'untitled'
|
||||
if not filepath:
|
||||
return
|
||||
|
||||
if is_workfile_locked(filepath):
|
||||
# add lockfile dialog
|
||||
|
|
@ -680,10 +683,12 @@ def before_workfile_save(event):
|
|||
|
||||
def after_workfile_save(event):
|
||||
workfile_name = event["filename"]
|
||||
if handle_workfile_locks():
|
||||
if workfile_name:
|
||||
if not is_workfile_locked(workfile_name):
|
||||
create_workfile_lock(workfile_name)
|
||||
if (
|
||||
handle_workfile_locks()
|
||||
and workfile_name
|
||||
and not is_workfile_locked(workfile_name)
|
||||
):
|
||||
create_workfile_lock(workfile_name)
|
||||
|
||||
|
||||
class MayaDirmap(HostDirmap):
|
||||
|
|
|
|||
|
|
@ -300,6 +300,39 @@ class ReferenceLoader(Loader):
|
|||
str(representation["_id"]),
|
||||
type="string")
|
||||
|
||||
# When an animation or pointcache gets connected to an Xgen container,
|
||||
# the compound attribute "xgenContainers" gets created. When animation
|
||||
# containers gets updated we also need to update the cacheFileName on
|
||||
# the Xgen collection.
|
||||
compound_name = "xgenContainers"
|
||||
if cmds.objExists("{}.{}".format(node, compound_name)):
|
||||
import xgenm
|
||||
container_amount = cmds.getAttr(
|
||||
"{}.{}".format(node, compound_name), size=True
|
||||
)
|
||||
# loop through all compound children
|
||||
for i in range(container_amount):
|
||||
attr = "{}.{}[{}].container".format(node, compound_name, i)
|
||||
objectset = cmds.listConnections(attr)[0]
|
||||
reference_node = cmds.sets(objectset, query=True)[0]
|
||||
palettes = cmds.ls(
|
||||
cmds.referenceQuery(reference_node, nodes=True),
|
||||
type="xgmPalette"
|
||||
)
|
||||
for palette in palettes:
|
||||
for description in xgenm.descriptions(palette):
|
||||
xgenm.setAttr(
|
||||
"cacheFileName",
|
||||
path.replace("\\", "/"),
|
||||
palette,
|
||||
description,
|
||||
"SplinePrimitive"
|
||||
)
|
||||
|
||||
# Refresh UI and viewport.
|
||||
de = xgenm.xgGlobal.DescriptionEditor
|
||||
de.refresh("Full")
|
||||
|
||||
def remove(self, container):
|
||||
"""Remove an existing `container` from Maya scene
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import json
|
|||
|
||||
from maya import cmds
|
||||
|
||||
from openpype.pipeline import registered_host
|
||||
from openpype.pipeline import registered_host, get_current_asset_name
|
||||
from openpype.pipeline.workfile.workfile_template_builder import (
|
||||
TemplateAlreadyImported,
|
||||
AbstractTemplateBuilder,
|
||||
|
|
@ -41,10 +41,27 @@ class MayaTemplateBuilder(AbstractTemplateBuilder):
|
|||
))
|
||||
|
||||
cmds.sets(name=PLACEHOLDER_SET, empty=True)
|
||||
cmds.file(path, i=True, returnNewNodes=True)
|
||||
new_nodes = cmds.file(path, i=True, returnNewNodes=True)
|
||||
|
||||
cmds.setAttr(PLACEHOLDER_SET + ".hiddenInOutliner", True)
|
||||
|
||||
imported_sets = cmds.ls(new_nodes, set=True)
|
||||
if not imported_sets:
|
||||
return True
|
||||
|
||||
# update imported sets information
|
||||
asset_name = get_current_asset_name()
|
||||
for node in imported_sets:
|
||||
if not cmds.attributeQuery("id", node=node, exists=True):
|
||||
continue
|
||||
if cmds.getAttr("{}.id".format(node)) != "pyblish.avalon.instance":
|
||||
continue
|
||||
if not cmds.attributeQuery("asset", node=node, exists=True):
|
||||
continue
|
||||
|
||||
cmds.setAttr(
|
||||
"{}.asset".format(node), asset_name, type="string")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from openpype.hosts.maya.api import (
|
|||
from maya import cmds
|
||||
|
||||
|
||||
class CreateAss(plugin.Creator):
|
||||
class CreateArnoldSceneSource(plugin.Creator):
|
||||
"""Arnold Scene Source"""
|
||||
|
||||
name = "ass"
|
||||
|
|
@ -29,7 +29,7 @@ class CreateAss(plugin.Creator):
|
|||
maskOperator = False
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateAss, self).__init__(*args, **kwargs)
|
||||
super(CreateArnoldSceneSource, self).__init__(*args, **kwargs)
|
||||
|
||||
# Add animation data
|
||||
self.data.update(lib.collect_animation_data())
|
||||
|
|
@ -52,7 +52,7 @@ class CreateAss(plugin.Creator):
|
|||
self.data["maskOperator"] = self.maskOperator
|
||||
|
||||
def process(self):
|
||||
instance = super(CreateAss, self).process()
|
||||
instance = super(CreateArnoldSceneSource, self).process()
|
||||
|
||||
nodes = []
|
||||
|
||||
|
|
@ -61,6 +61,6 @@ class CreateAss(plugin.Creator):
|
|||
|
||||
cmds.sets(nodes, rm=instance)
|
||||
|
||||
assContent = cmds.sets(name="content_SET")
|
||||
assProxy = cmds.sets(name="proxy_SET", empty=True)
|
||||
assContent = cmds.sets(name=instance + "_content_SET")
|
||||
assProxy = cmds.sets(name=instance + "_proxy_SET", empty=True)
|
||||
cmds.sets([assContent, assProxy], forceElement=instance)
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
from maya import cmds
|
||||
|
||||
from openpype.hosts.maya.api import (
|
||||
lib,
|
||||
plugin
|
||||
|
|
@ -37,3 +39,9 @@ class CreatePointCache(plugin.Creator):
|
|||
# Default to not send to farm.
|
||||
self.data["farm"] = False
|
||||
self.data["priority"] = 50
|
||||
|
||||
def process(self):
|
||||
instance = super(CreatePointCache, self).process()
|
||||
|
||||
assProxy = cmds.sets(name=instance + "_proxy_SET", empty=True)
|
||||
cmds.sets(assProxy, forceElement=instance)
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ class CreateRender(plugin.Creator):
|
|||
tileRendering (bool): Instance is set to tile rendering mode. We
|
||||
won't submit actual render, but we'll make publish job to wait
|
||||
for Tile Assembly job done and then publish.
|
||||
strict_error_checking (bool): Enable/disable error checking on DL
|
||||
|
||||
See Also:
|
||||
https://pype.club/docs/artist_hosts_maya#creating-basic-render-setup
|
||||
|
|
@ -271,6 +272,9 @@ class CreateRender(plugin.Creator):
|
|||
secondary_pool = pool_setting["secondary_pool"]
|
||||
self.data["secondaryPool"] = self._set_default_pool(pool_names,
|
||||
secondary_pool)
|
||||
strict_error_checking = maya_submit_dl.get("strict_error_checking",
|
||||
True)
|
||||
self.data["strict_error_checking"] = strict_error_checking
|
||||
|
||||
if muster_enabled:
|
||||
self.log.info(">>> Loading Muster credentials ...")
|
||||
|
|
|
|||
|
|
@ -2,9 +2,9 @@ from openpype.hosts.maya.api import plugin
|
|||
|
||||
|
||||
class CreateXgen(plugin.Creator):
|
||||
"""Xgen interactive export"""
|
||||
"""Xgen"""
|
||||
|
||||
name = "xgen"
|
||||
label = "Xgen Interactive"
|
||||
label = "Xgen"
|
||||
family = "xgen"
|
||||
icon = "pagelines"
|
||||
|
|
|
|||
153
openpype/hosts/maya/plugins/inventory/connect_geometry.py
Normal file
153
openpype/hosts/maya/plugins/inventory/connect_geometry.py
Normal file
|
|
@ -0,0 +1,153 @@
|
|||
from maya import cmds
|
||||
|
||||
from openpype.pipeline import InventoryAction, get_representation_context
|
||||
from openpype.hosts.maya.api.lib import get_id
|
||||
|
||||
|
||||
class ConnectGeometry(InventoryAction):
|
||||
"""Connect geometries within containers.
|
||||
|
||||
Source container will connect to the target containers, by searching for
|
||||
matching geometry IDs (cbid).
|
||||
Source containers are of family; "animation" and "pointcache".
|
||||
The connection with be done with a live world space blendshape.
|
||||
"""
|
||||
|
||||
label = "Connect Geometry"
|
||||
icon = "link"
|
||||
color = "white"
|
||||
|
||||
def process(self, containers):
|
||||
# Validate selection is more than 1.
|
||||
message = (
|
||||
"Only 1 container selected. 2+ containers needed for this action."
|
||||
)
|
||||
if len(containers) == 1:
|
||||
self.display_warning(message)
|
||||
return
|
||||
|
||||
# Categorize containers by family.
|
||||
containers_by_family = {}
|
||||
for container in containers:
|
||||
family = get_representation_context(
|
||||
container["representation"]
|
||||
)["subset"]["data"]["family"]
|
||||
try:
|
||||
containers_by_family[family].append(container)
|
||||
except KeyError:
|
||||
containers_by_family[family] = [container]
|
||||
|
||||
# Validate to only 1 source container.
|
||||
source_containers = containers_by_family.get("animation", [])
|
||||
source_containers += containers_by_family.get("pointcache", [])
|
||||
source_container_namespaces = [
|
||||
x["namespace"] for x in source_containers
|
||||
]
|
||||
message = (
|
||||
"{} animation containers selected:\n\n{}\n\nOnly select 1 of type "
|
||||
"\"animation\" or \"pointcache\".".format(
|
||||
len(source_containers), source_container_namespaces
|
||||
)
|
||||
)
|
||||
if len(source_containers) != 1:
|
||||
self.display_warning(message)
|
||||
return
|
||||
|
||||
source_object = source_containers[0]["objectName"]
|
||||
|
||||
# Collect matching geometry transforms based cbId attribute.
|
||||
target_containers = []
|
||||
for family, containers in containers_by_family.items():
|
||||
if family in ["animation", "pointcache"]:
|
||||
continue
|
||||
|
||||
target_containers.extend(containers)
|
||||
|
||||
source_data = self.get_container_data(source_object)
|
||||
matches = []
|
||||
node_types = set()
|
||||
for target_container in target_containers:
|
||||
target_data = self.get_container_data(
|
||||
target_container["objectName"]
|
||||
)
|
||||
node_types.update(target_data["node_types"])
|
||||
for id, transform in target_data["ids"].items():
|
||||
source_match = source_data["ids"].get(id)
|
||||
if source_match:
|
||||
matches.append([source_match, transform])
|
||||
|
||||
# Message user about what is about to happen.
|
||||
if not matches:
|
||||
self.display_warning("No matching geometries found.")
|
||||
return
|
||||
|
||||
message = "Connecting geometries:\n\n"
|
||||
for match in matches:
|
||||
message += "{} > {}\n".format(match[0], match[1])
|
||||
|
||||
choice = self.display_warning(message, show_cancel=True)
|
||||
if choice is False:
|
||||
return
|
||||
|
||||
# Setup live worldspace blendshape connection.
|
||||
for source, target in matches:
|
||||
blendshape = cmds.blendShape(source, target)[0]
|
||||
cmds.setAttr(blendshape + ".origin", 0)
|
||||
cmds.setAttr(blendshape + "." + target.split(":")[-1], 1)
|
||||
|
||||
# Update Xgen if in any of the containers.
|
||||
if "xgmPalette" in node_types:
|
||||
cmds.xgmPreview()
|
||||
|
||||
def get_container_data(self, container):
|
||||
"""Collects data about the container nodes.
|
||||
|
||||
Args:
|
||||
container (dict): Container instance.
|
||||
|
||||
Returns:
|
||||
data (dict):
|
||||
"node_types": All node types in container nodes.
|
||||
"ids": If the node is a mesh, we collect its parent transform
|
||||
id.
|
||||
"""
|
||||
data = {"node_types": set(), "ids": {}}
|
||||
ref_node = cmds.sets(container, query=True, nodesOnly=True)[0]
|
||||
for node in cmds.referenceQuery(ref_node, nodes=True):
|
||||
node_type = cmds.nodeType(node)
|
||||
data["node_types"].add(node_type)
|
||||
|
||||
# Only interested in mesh transforms for connecting geometry with
|
||||
# blendshape.
|
||||
if node_type != "mesh":
|
||||
continue
|
||||
|
||||
transform = cmds.listRelatives(node, parent=True)[0]
|
||||
data["ids"][get_id(transform)] = transform
|
||||
|
||||
return data
|
||||
|
||||
def display_warning(self, message, show_cancel=False):
|
||||
"""Show feedback to user.
|
||||
|
||||
Returns:
|
||||
bool
|
||||
"""
|
||||
|
||||
from Qt import QtWidgets
|
||||
|
||||
accept = QtWidgets.QMessageBox.Ok
|
||||
if show_cancel:
|
||||
buttons = accept | QtWidgets.QMessageBox.Cancel
|
||||
else:
|
||||
buttons = accept
|
||||
|
||||
state = QtWidgets.QMessageBox.warning(
|
||||
None,
|
||||
"",
|
||||
message,
|
||||
buttons=buttons,
|
||||
defaultButton=accept
|
||||
)
|
||||
|
||||
return state == accept
|
||||
168
openpype/hosts/maya/plugins/inventory/connect_xgen.py
Normal file
168
openpype/hosts/maya/plugins/inventory/connect_xgen.py
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
from maya import cmds
|
||||
import xgenm
|
||||
|
||||
from openpype.pipeline import (
|
||||
InventoryAction, get_representation_context, get_representation_path
|
||||
)
|
||||
|
||||
|
||||
class ConnectXgen(InventoryAction):
|
||||
"""Connect Xgen with an animation or pointcache.
|
||||
"""
|
||||
|
||||
label = "Connect Xgen"
|
||||
icon = "link"
|
||||
color = "white"
|
||||
|
||||
def process(self, containers):
|
||||
# Validate selection is more than 1.
|
||||
message = (
|
||||
"Only 1 container selected. 2+ containers needed for this action."
|
||||
)
|
||||
if len(containers) == 1:
|
||||
self.display_warning(message)
|
||||
return
|
||||
|
||||
# Categorize containers by family.
|
||||
containers_by_family = {}
|
||||
for container in containers:
|
||||
family = get_representation_context(
|
||||
container["representation"]
|
||||
)["subset"]["data"]["family"]
|
||||
try:
|
||||
containers_by_family[family].append(container)
|
||||
except KeyError:
|
||||
containers_by_family[family] = [container]
|
||||
|
||||
# Validate to only 1 source container.
|
||||
source_containers = containers_by_family.get("animation", [])
|
||||
source_containers += containers_by_family.get("pointcache", [])
|
||||
source_container_namespaces = [
|
||||
x["namespace"] for x in source_containers
|
||||
]
|
||||
message = (
|
||||
"{} animation containers selected:\n\n{}\n\nOnly select 1 of type "
|
||||
"\"animation\" or \"pointcache\".".format(
|
||||
len(source_containers), source_container_namespaces
|
||||
)
|
||||
)
|
||||
if len(source_containers) != 1:
|
||||
self.display_warning(message)
|
||||
return
|
||||
|
||||
source_container = source_containers[0]
|
||||
source_object = source_container["objectName"]
|
||||
|
||||
# Validate source representation is an alembic.
|
||||
source_path = get_representation_path(
|
||||
get_representation_context(
|
||||
source_container["representation"]
|
||||
)["representation"]
|
||||
).replace("\\", "/")
|
||||
message = "Animation container \"{}\" is not an alembic:\n{}".format(
|
||||
source_container["namespace"], source_path
|
||||
)
|
||||
if not source_path.endswith(".abc"):
|
||||
self.display_warning(message)
|
||||
return
|
||||
|
||||
# Target containers.
|
||||
target_containers = []
|
||||
for family, containers in containers_by_family.items():
|
||||
if family in ["animation", "pointcache"]:
|
||||
continue
|
||||
|
||||
target_containers.extend(containers)
|
||||
|
||||
# Inform user of connections from source representation to target
|
||||
# descriptions.
|
||||
descriptions_data = []
|
||||
connections_msg = ""
|
||||
for target_container in target_containers:
|
||||
reference_node = cmds.sets(
|
||||
target_container["objectName"], query=True
|
||||
)[0]
|
||||
palettes = cmds.ls(
|
||||
cmds.referenceQuery(reference_node, nodes=True),
|
||||
type="xgmPalette"
|
||||
)
|
||||
for palette in palettes:
|
||||
for description in xgenm.descriptions(palette):
|
||||
descriptions_data.append([palette, description])
|
||||
connections_msg += "\n{}/{}".format(palette, description)
|
||||
|
||||
message = "Connecting \"{}\" to:\n".format(
|
||||
source_container["namespace"]
|
||||
)
|
||||
message += connections_msg
|
||||
choice = self.display_warning(message, show_cancel=True)
|
||||
if choice is False:
|
||||
return
|
||||
|
||||
# Recreate "xgenContainers" attribute to reset.
|
||||
compound_name = "xgenContainers"
|
||||
attr = "{}.{}".format(source_object, compound_name)
|
||||
if cmds.objExists(attr):
|
||||
cmds.deleteAttr(attr)
|
||||
|
||||
cmds.addAttr(
|
||||
source_object,
|
||||
longName=compound_name,
|
||||
attributeType="compound",
|
||||
numberOfChildren=1,
|
||||
multi=True
|
||||
)
|
||||
|
||||
# Connect target containers.
|
||||
for target_container in target_containers:
|
||||
cmds.addAttr(
|
||||
source_object,
|
||||
longName="container",
|
||||
attributeType="message",
|
||||
parent=compound_name
|
||||
)
|
||||
index = target_containers.index(target_container)
|
||||
cmds.connectAttr(
|
||||
target_container["objectName"] + ".message",
|
||||
source_object + ".{}[{}].container".format(
|
||||
compound_name, index
|
||||
)
|
||||
)
|
||||
|
||||
# Setup cache on Xgen
|
||||
object = "SplinePrimitive"
|
||||
for palette, description in descriptions_data:
|
||||
xgenm.setAttr("useCache", "true", palette, description, object)
|
||||
xgenm.setAttr("liveMode", "false", palette, description, object)
|
||||
xgenm.setAttr(
|
||||
"cacheFileName", source_path, palette, description, object
|
||||
)
|
||||
|
||||
# Refresh UI and viewport.
|
||||
de = xgenm.xgGlobal.DescriptionEditor
|
||||
de.refresh("Full")
|
||||
|
||||
def display_warning(self, message, show_cancel=False):
|
||||
"""Show feedback to user.
|
||||
|
||||
Returns:
|
||||
bool
|
||||
"""
|
||||
|
||||
from Qt import QtWidgets
|
||||
|
||||
accept = QtWidgets.QMessageBox.Ok
|
||||
if show_cancel:
|
||||
buttons = accept | QtWidgets.QMessageBox.Cancel
|
||||
else:
|
||||
buttons = accept
|
||||
|
||||
state = QtWidgets.QMessageBox.warning(
|
||||
None,
|
||||
"",
|
||||
message,
|
||||
buttons=buttons,
|
||||
defaultButton=accept
|
||||
)
|
||||
|
||||
return state == accept
|
||||
|
|
@ -93,7 +93,20 @@ class ImportMayaLoader(load.LoaderPlugin):
|
|||
|
||||
"""
|
||||
representations = ["ma", "mb", "obj"]
|
||||
families = ["*"]
|
||||
families = [
|
||||
"model",
|
||||
"pointcache",
|
||||
"proxyAbc",
|
||||
"animation",
|
||||
"mayaAscii",
|
||||
"mayaScene",
|
||||
"setdress",
|
||||
"layout",
|
||||
"camera",
|
||||
"rig",
|
||||
"camerarig",
|
||||
"staticMesh"
|
||||
]
|
||||
|
||||
label = "Import"
|
||||
order = 10
|
||||
|
|
|
|||
|
|
@ -1,132 +0,0 @@
|
|||
import os
|
||||
|
||||
from openpype.pipeline import (
|
||||
legacy_io,
|
||||
load,
|
||||
get_representation_path
|
||||
)
|
||||
from openpype.settings import get_project_settings
|
||||
|
||||
|
||||
class AlembicStandinLoader(load.LoaderPlugin):
|
||||
"""Load Alembic as Arnold Standin"""
|
||||
|
||||
families = ["animation", "model", "proxyAbc", "pointcache"]
|
||||
representations = ["abc"]
|
||||
|
||||
label = "Import Alembic as Arnold Standin"
|
||||
order = -5
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
||||
import maya.cmds as cmds
|
||||
import mtoa.ui.arnoldmenu
|
||||
from openpype.hosts.maya.api.pipeline import containerise
|
||||
from openpype.hosts.maya.api.lib import unique_namespace
|
||||
|
||||
version = context["version"]
|
||||
version_data = version.get("data", {})
|
||||
family = version["data"]["families"]
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
self.log.info("family: {}\n".format(family))
|
||||
frameStart = version_data.get("frameStart", None)
|
||||
|
||||
asset = context["asset"]["name"]
|
||||
namespace = namespace or unique_namespace(
|
||||
asset + "_",
|
||||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# Root group
|
||||
label = "{}:{}".format(namespace, name)
|
||||
root = cmds.group(name=label, empty=True)
|
||||
|
||||
settings = get_project_settings(os.environ['AVALON_PROJECT'])
|
||||
colors = settings["maya"]["load"]["colors"]
|
||||
fps = legacy_io.Session["AVALON_FPS"]
|
||||
c = colors.get(family[0])
|
||||
if c is not None:
|
||||
r = (float(c[0]) / 255)
|
||||
g = (float(c[1]) / 255)
|
||||
b = (float(c[2]) / 255)
|
||||
cmds.setAttr(root + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(root + ".outlinerColor",
|
||||
r, g, b)
|
||||
|
||||
transform_name = label + "_ABC"
|
||||
|
||||
standinShape = cmds.ls(mtoa.ui.arnoldmenu.createStandIn())[0]
|
||||
standin = cmds.listRelatives(standinShape, parent=True,
|
||||
typ="transform")
|
||||
standin = cmds.rename(standin, transform_name)
|
||||
standinShape = cmds.listRelatives(standin, children=True)[0]
|
||||
|
||||
cmds.parent(standin, root)
|
||||
|
||||
# Set the standin filepath
|
||||
cmds.setAttr(standinShape + ".dso", self.fname, type="string")
|
||||
cmds.setAttr(standinShape + ".abcFPS", float(fps))
|
||||
|
||||
if frameStart is None:
|
||||
cmds.setAttr(standinShape + ".useFrameExtension", 0)
|
||||
|
||||
elif "model" in family:
|
||||
cmds.setAttr(standinShape + ".useFrameExtension", 0)
|
||||
|
||||
else:
|
||||
cmds.setAttr(standinShape + ".useFrameExtension", 1)
|
||||
|
||||
nodes = [root, standin]
|
||||
self[:] = nodes
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
import pymel.core as pm
|
||||
|
||||
path = get_representation_path(representation)
|
||||
fps = legacy_io.Session["AVALON_FPS"]
|
||||
# Update the standin
|
||||
standins = list()
|
||||
members = pm.sets(container['objectName'], query=True)
|
||||
self.log.info("container:{}".format(container))
|
||||
for member in members:
|
||||
shape = member.getShape()
|
||||
if (shape and shape.type() == "aiStandIn"):
|
||||
standins.append(shape)
|
||||
|
||||
for standin in standins:
|
||||
standin.dso.set(path)
|
||||
standin.abcFPS.set(float(fps))
|
||||
if "modelMain" in container['objectName']:
|
||||
standin.useFrameExtension.set(0)
|
||||
else:
|
||||
standin.useFrameExtension.set(1)
|
||||
|
||||
container = pm.PyNode(container["objectName"])
|
||||
container.representation.set(str(representation["_id"]))
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
import maya.cmds as cmds
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
cmds.delete([container['objectName']] + members)
|
||||
|
||||
# Clean up the namespace
|
||||
try:
|
||||
cmds.namespace(removeNamespace=container['namespace'],
|
||||
deleteNamespaceContent=True)
|
||||
except RuntimeError:
|
||||
pass
|
||||
218
openpype/hosts/maya/plugins/load/load_arnold_standin.py
Normal file
218
openpype/hosts/maya/plugins/load/load_arnold_standin.py
Normal file
|
|
@ -0,0 +1,218 @@
|
|||
import os
|
||||
import clique
|
||||
|
||||
import maya.cmds as cmds
|
||||
import mtoa.ui.arnoldmenu
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
get_representation_path
|
||||
)
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
unique_namespace, get_attribute_input, maintained_selection
|
||||
)
|
||||
from openpype.hosts.maya.api.pipeline import containerise
|
||||
|
||||
|
||||
def is_sequence(files):
|
||||
sequence = False
|
||||
collections, remainder = clique.assemble(files)
|
||||
if collections:
|
||||
sequence = True
|
||||
|
||||
return sequence
|
||||
|
||||
|
||||
class ArnoldStandinLoader(load.LoaderPlugin):
|
||||
"""Load as Arnold standin"""
|
||||
|
||||
families = ["ass", "animation", "model", "proxyAbc", "pointcache"]
|
||||
representations = ["ass", "abc"]
|
||||
|
||||
label = "Load as Arnold standin"
|
||||
order = -5
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
|
||||
asset = context['asset']['name']
|
||||
namespace = namespace or unique_namespace(
|
||||
asset + "_",
|
||||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# Root group
|
||||
label = "{}:{}".format(namespace, name)
|
||||
root = cmds.group(name=label, empty=True)
|
||||
|
||||
# Set color.
|
||||
settings = get_project_settings(context["project"]["name"])
|
||||
color = settings['maya']['load']['colors'].get('ass')
|
||||
if color is not None:
|
||||
cmds.setAttr(root + ".useOutlinerColor", True)
|
||||
cmds.setAttr(
|
||||
root + ".outlinerColor", color[0], color[1], color[2]
|
||||
)
|
||||
|
||||
with maintained_selection():
|
||||
# Create transform with shape
|
||||
transform_name = label + "_standin"
|
||||
|
||||
standin_shape = mtoa.ui.arnoldmenu.createStandIn()
|
||||
standin = cmds.listRelatives(standin_shape, parent=True)[0]
|
||||
standin = cmds.rename(standin, transform_name)
|
||||
standin_shape = cmds.listRelatives(standin, shapes=True)[0]
|
||||
|
||||
cmds.parent(standin, root)
|
||||
|
||||
# Set the standin filepath
|
||||
path, operator = self._setup_proxy(
|
||||
standin_shape, self.fname, namespace
|
||||
)
|
||||
cmds.setAttr(standin_shape + ".dso", path, type="string")
|
||||
sequence = is_sequence(os.listdir(os.path.dirname(self.fname)))
|
||||
cmds.setAttr(standin_shape + ".useFrameExtension", sequence)
|
||||
|
||||
nodes = [root, standin]
|
||||
if operator is not None:
|
||||
nodes.append(operator)
|
||||
self[:] = nodes
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def get_next_free_multi_index(self, attr_name):
|
||||
"""Find the next unconnected multi index at the input attribute."""
|
||||
for index in range(10000000):
|
||||
connection_info = cmds.connectionInfo(
|
||||
"{}[{}]".format(attr_name, index),
|
||||
sourceFromDestination=True
|
||||
)
|
||||
if len(connection_info or []) == 0:
|
||||
return index
|
||||
|
||||
def _get_proxy_path(self, path):
|
||||
basename_split = os.path.basename(path).split(".")
|
||||
proxy_basename = (
|
||||
basename_split[0] + "_proxy." + ".".join(basename_split[1:])
|
||||
)
|
||||
proxy_path = "/".join([os.path.dirname(path), proxy_basename])
|
||||
return proxy_basename, proxy_path
|
||||
|
||||
def _setup_proxy(self, shape, path, namespace):
|
||||
proxy_basename, proxy_path = self._get_proxy_path(path)
|
||||
|
||||
options_node = "defaultArnoldRenderOptions"
|
||||
merge_operator = get_attribute_input(options_node + ".operator")
|
||||
if merge_operator is None:
|
||||
merge_operator = cmds.createNode("aiMerge")
|
||||
cmds.connectAttr(
|
||||
merge_operator + ".message", options_node + ".operator"
|
||||
)
|
||||
|
||||
merge_operator = merge_operator.split(".")[0]
|
||||
|
||||
string_replace_operator = cmds.createNode(
|
||||
"aiStringReplace", name=namespace + ":string_replace_operator"
|
||||
)
|
||||
node_type = "alembic" if path.endswith(".abc") else "procedural"
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".selection",
|
||||
"*.(@node=='{}')".format(node_type),
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".match",
|
||||
proxy_basename,
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".replace",
|
||||
os.path.basename(path),
|
||||
type="string"
|
||||
)
|
||||
|
||||
cmds.connectAttr(
|
||||
string_replace_operator + ".out",
|
||||
"{}.inputs[{}]".format(
|
||||
merge_operator,
|
||||
self.get_next_free_multi_index(merge_operator + ".inputs")
|
||||
)
|
||||
)
|
||||
|
||||
# We setup the string operator no matter whether there is a proxy or
|
||||
# not. This makes it easier to update since the string operator will
|
||||
# always be created. Return original path to use for standin.
|
||||
if not os.path.exists(proxy_path):
|
||||
return path, string_replace_operator
|
||||
|
||||
return proxy_path, string_replace_operator
|
||||
|
||||
def update(self, container, representation):
|
||||
# Update the standin
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
for member in members:
|
||||
if cmds.nodeType(member) == "aiStringReplace":
|
||||
string_replace_operator = member
|
||||
|
||||
shapes = cmds.listRelatives(member, shapes=True)
|
||||
if not shapes:
|
||||
continue
|
||||
if cmds.nodeType(shapes[0]) == "aiStandIn":
|
||||
standin = shapes[0]
|
||||
|
||||
path = get_representation_path(representation)
|
||||
proxy_basename, proxy_path = self._get_proxy_path(path)
|
||||
|
||||
# Whether there is proxy or so, we still update the string operator.
|
||||
# If no proxy exists, the string operator wont replace anything.
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".match",
|
||||
"resources/" + proxy_basename,
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
string_replace_operator + ".replace",
|
||||
os.path.basename(path),
|
||||
type="string"
|
||||
)
|
||||
|
||||
dso_path = path
|
||||
if os.path.exists(proxy_path):
|
||||
dso_path = proxy_path
|
||||
cmds.setAttr(standin + ".dso", dso_path, type="string")
|
||||
|
||||
sequence = is_sequence(os.listdir(os.path.dirname(path)))
|
||||
cmds.setAttr(standin + ".useFrameExtension", sequence)
|
||||
|
||||
cmds.setAttr(
|
||||
container["objectName"] + ".representation",
|
||||
str(representation["_id"]),
|
||||
type="string"
|
||||
)
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
cmds.delete([container['objectName']] + members)
|
||||
|
||||
# Clean up the namespace
|
||||
try:
|
||||
cmds.namespace(removeNamespace=container['namespace'],
|
||||
deleteNamespaceContent=True)
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
|
@ -1,290 +0,0 @@
|
|||
import os
|
||||
import clique
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import (
|
||||
load,
|
||||
get_representation_path
|
||||
)
|
||||
import openpype.hosts.maya.api.plugin
|
||||
from openpype.hosts.maya.api.plugin import get_reference_node
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
maintained_selection,
|
||||
unique_namespace
|
||||
)
|
||||
from openpype.hosts.maya.api.pipeline import containerise
|
||||
|
||||
|
||||
class AssProxyLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
||||
"""Load Arnold Proxy as reference"""
|
||||
|
||||
families = ["ass"]
|
||||
representations = ["ass"]
|
||||
|
||||
label = "Reference .ASS standin with Proxy"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def process_reference(self, context, name, namespace, options):
|
||||
|
||||
import maya.cmds as cmds
|
||||
import pymel.core as pm
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
|
||||
frameStart = version_data.get("frameStart", None)
|
||||
|
||||
try:
|
||||
family = context["representation"]["context"]["family"]
|
||||
except ValueError:
|
||||
family = "ass"
|
||||
|
||||
with maintained_selection():
|
||||
|
||||
groupName = "{}:{}".format(namespace, name)
|
||||
path = self.fname
|
||||
proxyPath_base = os.path.splitext(path)[0]
|
||||
|
||||
if frameStart is not None:
|
||||
proxyPath_base = os.path.splitext(proxyPath_base)[0]
|
||||
|
||||
publish_folder = os.path.split(path)[0]
|
||||
files_in_folder = os.listdir(publish_folder)
|
||||
collections, remainder = clique.assemble(files_in_folder)
|
||||
|
||||
if collections:
|
||||
hashes = collections[0].padding * '#'
|
||||
coll = collections[0].format('{head}[index]{tail}')
|
||||
filename = coll.replace('[index]', hashes)
|
||||
|
||||
path = os.path.join(publish_folder, filename)
|
||||
|
||||
proxyPath = proxyPath_base + ".ma"
|
||||
|
||||
project_name = context["project"]["name"]
|
||||
file_url = self.prepare_root_value(proxyPath,
|
||||
project_name)
|
||||
|
||||
nodes = cmds.file(file_url,
|
||||
namespace=namespace,
|
||||
reference=True,
|
||||
returnNewNodes=True,
|
||||
groupReference=True,
|
||||
groupName=groupName)
|
||||
|
||||
cmds.makeIdentity(groupName, apply=False, rotate=True,
|
||||
translate=True, scale=True)
|
||||
|
||||
# Set attributes
|
||||
proxyShape = pm.ls(nodes, type="mesh")[0]
|
||||
|
||||
proxyShape.aiTranslator.set('procedural')
|
||||
proxyShape.dso.set(path)
|
||||
proxyShape.aiOverrideShaders.set(0)
|
||||
|
||||
settings = get_project_settings(project_name)
|
||||
colors = settings['maya']['load']['colors']
|
||||
|
||||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr(groupName + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(groupName + ".outlinerColor",
|
||||
(float(c[0])/255),
|
||||
(float(c[1])/255),
|
||||
(float(c[2])/255)
|
||||
)
|
||||
|
||||
self[:] = nodes
|
||||
|
||||
return nodes
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def update(self, container, representation):
|
||||
from maya import cmds
|
||||
import pymel.core as pm
|
||||
|
||||
node = container["objectName"]
|
||||
|
||||
representation["context"].pop("frame", None)
|
||||
path = get_representation_path(representation)
|
||||
print(path)
|
||||
# path = self.fname
|
||||
print(self.fname)
|
||||
proxyPath = os.path.splitext(path)[0] + ".ma"
|
||||
print(proxyPath)
|
||||
|
||||
# Get reference node from container members
|
||||
members = cmds.sets(node, query=True, nodesOnly=True)
|
||||
reference_node = get_reference_node(members)
|
||||
|
||||
assert os.path.exists(proxyPath), "%s does not exist." % proxyPath
|
||||
|
||||
try:
|
||||
file_url = self.prepare_root_value(proxyPath,
|
||||
representation["context"]
|
||||
["project"]
|
||||
["name"])
|
||||
content = cmds.file(file_url,
|
||||
loadReference=reference_node,
|
||||
type="mayaAscii",
|
||||
returnNewNodes=True)
|
||||
|
||||
# Set attributes
|
||||
proxyShape = pm.ls(content, type="mesh")[0]
|
||||
|
||||
proxyShape.aiTranslator.set('procedural')
|
||||
proxyShape.dso.set(path)
|
||||
proxyShape.aiOverrideShaders.set(0)
|
||||
|
||||
except RuntimeError as exc:
|
||||
# When changing a reference to a file that has load errors the
|
||||
# command will raise an error even if the file is still loaded
|
||||
# correctly (e.g. when raising errors on Arnold attributes)
|
||||
# When the file is loaded and has content, we consider it's fine.
|
||||
if not cmds.referenceQuery(reference_node, isLoaded=True):
|
||||
raise
|
||||
|
||||
content = cmds.referenceQuery(reference_node,
|
||||
nodes=True,
|
||||
dagPath=True)
|
||||
if not content:
|
||||
raise
|
||||
|
||||
self.log.warning("Ignoring file read error:\n%s", exc)
|
||||
|
||||
# Add new nodes of the reference to the container
|
||||
cmds.sets(content, forceElement=node)
|
||||
|
||||
# Remove any placeHolderList attribute entries from the set that
|
||||
# are remaining from nodes being removed from the referenced file.
|
||||
members = cmds.sets(node, query=True)
|
||||
invalid = [x for x in members if ".placeHolderList" in x]
|
||||
if invalid:
|
||||
cmds.sets(invalid, remove=node)
|
||||
|
||||
# Update metadata
|
||||
cmds.setAttr("{}.representation".format(node),
|
||||
str(representation["_id"]),
|
||||
type="string")
|
||||
|
||||
|
||||
class AssStandinLoader(load.LoaderPlugin):
|
||||
"""Load .ASS file as standin"""
|
||||
|
||||
families = ["ass"]
|
||||
representations = ["ass"]
|
||||
|
||||
label = "Load .ASS file as standin"
|
||||
order = -5
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def load(self, context, name, namespace, options):
|
||||
|
||||
import maya.cmds as cmds
|
||||
import mtoa.ui.arnoldmenu
|
||||
import pymel.core as pm
|
||||
|
||||
version = context['version']
|
||||
version_data = version.get("data", {})
|
||||
|
||||
self.log.info("version_data: {}\n".format(version_data))
|
||||
|
||||
frameStart = version_data.get("frameStart", None)
|
||||
|
||||
asset = context['asset']['name']
|
||||
namespace = namespace or unique_namespace(
|
||||
asset + "_",
|
||||
prefix="_" if asset[0].isdigit() else "",
|
||||
suffix="_",
|
||||
)
|
||||
|
||||
# cmds.loadPlugin("gpuCache", quiet=True)
|
||||
|
||||
# Root group
|
||||
label = "{}:{}".format(namespace, name)
|
||||
root = pm.group(name=label, empty=True)
|
||||
|
||||
settings = get_project_settings(os.environ['AVALON_PROJECT'])
|
||||
colors = settings['maya']['load']['colors']
|
||||
|
||||
c = colors.get('ass')
|
||||
if c is not None:
|
||||
cmds.setAttr(root + ".useOutlinerColor", 1)
|
||||
cmds.setAttr(root + ".outlinerColor",
|
||||
c[0], c[1], c[2])
|
||||
|
||||
# Create transform with shape
|
||||
transform_name = label + "_ASS"
|
||||
# transform = pm.createNode("transform", name=transform_name,
|
||||
# parent=root)
|
||||
|
||||
standinShape = pm.PyNode(mtoa.ui.arnoldmenu.createStandIn())
|
||||
standin = standinShape.getParent()
|
||||
standin.rename(transform_name)
|
||||
|
||||
pm.parent(standin, root)
|
||||
|
||||
# Set the standin filepath
|
||||
standinShape.dso.set(self.fname)
|
||||
if frameStart is not None:
|
||||
standinShape.useFrameExtension.set(1)
|
||||
|
||||
nodes = [root, standin]
|
||||
self[:] = nodes
|
||||
|
||||
return containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
nodes=nodes,
|
||||
context=context,
|
||||
loader=self.__class__.__name__)
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
import pymel.core as pm
|
||||
|
||||
path = get_representation_path(representation)
|
||||
|
||||
files_in_path = os.listdir(os.path.split(path)[0])
|
||||
sequence = 0
|
||||
collections, remainder = clique.assemble(files_in_path)
|
||||
if collections:
|
||||
sequence = 1
|
||||
|
||||
# Update the standin
|
||||
standins = list()
|
||||
members = pm.sets(container['objectName'], query=True)
|
||||
for member in members:
|
||||
shape = member.getShape()
|
||||
if (shape and shape.type() == "aiStandIn"):
|
||||
standins.append(shape)
|
||||
|
||||
for standin in standins:
|
||||
standin.dso.set(path)
|
||||
standin.useFrameExtension.set(sequence)
|
||||
|
||||
container = pm.PyNode(container["objectName"])
|
||||
container.representation.set(str(representation["_id"]))
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
|
||||
def remove(self, container):
|
||||
import maya.cmds as cmds
|
||||
members = cmds.sets(container['objectName'], query=True)
|
||||
cmds.lockNode(members, lock=False)
|
||||
cmds.delete([container['objectName']] + members)
|
||||
|
||||
# Clean up the namespace
|
||||
try:
|
||||
cmds.namespace(removeNamespace=container['namespace'],
|
||||
deleteNamespaceContent=True)
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
|
@ -25,9 +25,9 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
"camera",
|
||||
"rig",
|
||||
"camerarig",
|
||||
"xgen",
|
||||
"staticMesh",
|
||||
"mvLook"]
|
||||
|
||||
representations = ["ma", "abc", "fbx", "mb"]
|
||||
|
||||
label = "Reference"
|
||||
|
|
|
|||
|
|
@ -81,10 +81,11 @@ class VRayProxyLoader(load.LoaderPlugin):
|
|||
c = colors.get(family)
|
||||
if c is not None:
|
||||
cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1)
|
||||
cmds.setAttr("{0}.outlinerColor".format(group_node),
|
||||
(float(c[0])/255),
|
||||
(float(c[1])/255),
|
||||
(float(c[2])/255)
|
||||
cmds.setAttr(
|
||||
"{0}.outlinerColor".format(group_node),
|
||||
(float(c[0]) / 255),
|
||||
(float(c[1]) / 255),
|
||||
(float(c[2]) / 255)
|
||||
)
|
||||
|
||||
return containerise(
|
||||
|
|
@ -101,7 +102,7 @@ class VRayProxyLoader(load.LoaderPlugin):
|
|||
assert cmds.objExists(node), "Missing container"
|
||||
|
||||
members = cmds.sets(node, query=True) or []
|
||||
vraymeshes = cmds.ls(members, type="VRayMesh")
|
||||
vraymeshes = cmds.ls(members, type="VRayProxy")
|
||||
assert vraymeshes, "Cannot find VRayMesh in container"
|
||||
|
||||
# get all representations for this version
|
||||
|
|
|
|||
173
openpype/hosts/maya/plugins/load/load_xgen.py
Normal file
173
openpype/hosts/maya/plugins/load/load_xgen.py
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
import os
|
||||
|
||||
import maya.cmds as cmds
|
||||
import xgenm
|
||||
|
||||
from Qt import QtWidgets
|
||||
|
||||
import openpype.hosts.maya.api.plugin
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
maintained_selection,
|
||||
get_container_members,
|
||||
attribute_values,
|
||||
write_xgen_file
|
||||
)
|
||||
from openpype.hosts.maya.api import current_file
|
||||
from openpype.pipeline import get_representation_path
|
||||
|
||||
|
||||
class XgenLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
||||
"""Load Xgen as reference"""
|
||||
|
||||
families = ["xgen"]
|
||||
representations = ["ma", "mb"]
|
||||
|
||||
label = "Reference Xgen"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def get_xgen_xgd_paths(self, palette):
|
||||
_, maya_extension = os.path.splitext(current_file())
|
||||
xgen_file = current_file().replace(
|
||||
maya_extension,
|
||||
"__{}.xgen".format(palette.replace("|", "").replace(":", "__"))
|
||||
)
|
||||
xgd_file = xgen_file.replace(".xgen", ".xgd")
|
||||
return xgen_file, xgd_file
|
||||
|
||||
def process_reference(self, context, name, namespace, options):
|
||||
# Validate workfile has a path.
|
||||
if current_file() is None:
|
||||
QtWidgets.QMessageBox.warning(
|
||||
None,
|
||||
"",
|
||||
"Current workfile has not been saved. Please save the workfile"
|
||||
" before loading an Xgen."
|
||||
)
|
||||
return
|
||||
|
||||
maya_filepath = self.prepare_root_value(
|
||||
self.fname, context["project"]["name"]
|
||||
)
|
||||
|
||||
# Reference xgen. Xgen does not like being referenced in under a group.
|
||||
new_nodes = []
|
||||
|
||||
with maintained_selection():
|
||||
nodes = cmds.file(
|
||||
maya_filepath,
|
||||
namespace=namespace,
|
||||
sharedReferenceFile=False,
|
||||
reference=True,
|
||||
returnNewNodes=True
|
||||
)
|
||||
|
||||
xgen_palette = cmds.ls(
|
||||
nodes, type="xgmPalette", long=True
|
||||
)[0].replace("|", "")
|
||||
|
||||
xgen_file, xgd_file = self.get_xgen_xgd_paths(xgen_palette)
|
||||
self.set_palette_attributes(xgen_palette, xgen_file, xgd_file)
|
||||
|
||||
# Change the cache and disk values of xgDataPath and xgProjectPath
|
||||
# to ensure paths are setup correctly.
|
||||
project_path = os.path.dirname(current_file()).replace("\\", "/")
|
||||
xgenm.setAttr("xgProjectPath", project_path, xgen_palette)
|
||||
data_path = "${{PROJECT}}xgen/collections/{};{}".format(
|
||||
xgen_palette.replace(":", "__ns__"),
|
||||
xgenm.getAttr("xgDataPath", xgen_palette)
|
||||
)
|
||||
xgenm.setAttr("xgDataPath", data_path, xgen_palette)
|
||||
|
||||
data = {"xgProjectPath": project_path, "xgDataPath": data_path}
|
||||
write_xgen_file(data, xgen_file)
|
||||
|
||||
# This create an expression attribute of float. If we did not add
|
||||
# any changes to collection, then Xgen does not create an xgd file
|
||||
# on save. This gives errors when launching the workfile again due
|
||||
# to trying to find the xgd file.
|
||||
name = "custom_float_ignore"
|
||||
if name not in xgenm.customAttrs(xgen_palette):
|
||||
xgenm.addCustomAttr(
|
||||
"custom_float_ignore", xgen_palette
|
||||
)
|
||||
|
||||
shapes = cmds.ls(nodes, shapes=True, long=True)
|
||||
|
||||
new_nodes = (list(set(nodes) - set(shapes)))
|
||||
|
||||
self[:] = new_nodes
|
||||
|
||||
return new_nodes
|
||||
|
||||
def set_palette_attributes(self, xgen_palette, xgen_file, xgd_file):
|
||||
cmds.setAttr(
|
||||
"{}.xgBaseFile".format(xgen_palette),
|
||||
os.path.basename(xgen_file),
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
"{}.xgFileName".format(xgen_palette),
|
||||
os.path.basename(xgd_file),
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr("{}.xgExportAsDelta".format(xgen_palette), True)
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Workflow for updating Xgen.
|
||||
|
||||
- Copy and potentially overwrite the workspace .xgen file.
|
||||
- Export changes to delta file.
|
||||
- Set collection attributes to not include delta files.
|
||||
- Update xgen maya file reference.
|
||||
- Apply the delta file changes.
|
||||
- Reset collection attributes to include delta files.
|
||||
|
||||
We have to do this workflow because when using referencing of the xgen
|
||||
collection, Maya implicitly imports the Xgen data from the xgen file so
|
||||
we dont have any control over when adding the delta file changes.
|
||||
|
||||
There is an implicit increment of the xgen and delta files, due to
|
||||
using the workfile basename.
|
||||
"""
|
||||
|
||||
container_node = container["objectName"]
|
||||
members = get_container_members(container_node)
|
||||
xgen_palette = cmds.ls(
|
||||
members, type="xgmPalette", long=True
|
||||
)[0].replace("|", "")
|
||||
xgen_file, xgd_file = self.get_xgen_xgd_paths(xgen_palette)
|
||||
|
||||
# Export current changes to apply later.
|
||||
xgenm.createDelta(xgen_palette.replace("|", ""), xgd_file)
|
||||
|
||||
self.set_palette_attributes(xgen_palette, xgen_file, xgd_file)
|
||||
|
||||
maya_file = get_representation_path(representation)
|
||||
_, extension = os.path.splitext(maya_file)
|
||||
new_xgen_file = maya_file.replace(extension, ".xgen")
|
||||
data_path = ""
|
||||
with open(new_xgen_file, "r") as f:
|
||||
for line in f:
|
||||
if line.startswith("\txgDataPath"):
|
||||
line = line.rstrip()
|
||||
data_path = line.split("\t")[-1]
|
||||
break
|
||||
|
||||
project_path = os.path.dirname(current_file()).replace("\\", "/")
|
||||
data_path = "${{PROJECT}}xgen/collections/{};{}".format(
|
||||
xgen_palette.replace(":", "__ns__"),
|
||||
data_path
|
||||
)
|
||||
data = {"xgProjectPath": project_path, "xgDataPath": data_path}
|
||||
write_xgen_file(data, xgen_file)
|
||||
|
||||
attribute_data = {
|
||||
"{}.xgFileName".format(xgen_palette): os.path.basename(xgen_file),
|
||||
"{}.xgBaseFile".format(xgen_palette): "",
|
||||
"{}.xgExportAsDelta".format(xgen_palette): False
|
||||
}
|
||||
with attribute_values(attribute_data):
|
||||
super().update(container, representation)
|
||||
|
||||
xgenm.applyDelta(xgen_palette.replace("|", ""), xgd_file)
|
||||
|
|
@ -1,19 +1,18 @@
|
|||
from maya import cmds
|
||||
from openpype.pipeline.publish import KnownPublishError
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectAssData(pyblish.api.InstancePlugin):
|
||||
"""Collect Ass data."""
|
||||
class CollectArnoldSceneSource(pyblish.api.InstancePlugin):
|
||||
"""Collect Arnold Scene Source data."""
|
||||
|
||||
# Offset to be after renderable camera collection.
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = 'Collect Ass'
|
||||
label = "Collect Arnold Scene Source"
|
||||
families = ["ass"]
|
||||
|
||||
def process(self, instance):
|
||||
objsets = instance.data['setMembers']
|
||||
objsets = instance.data["setMembers"]
|
||||
|
||||
for objset in objsets:
|
||||
objset = str(objset)
|
||||
|
|
@ -21,15 +20,12 @@ class CollectAssData(pyblish.api.InstancePlugin):
|
|||
if members is None:
|
||||
self.log.warning("Skipped empty instance: \"%s\" " % objset)
|
||||
continue
|
||||
if "content_SET" in objset:
|
||||
instance.data['setMembers'] = members
|
||||
self.log.debug('content members: {}'.format(members))
|
||||
elif objset.startswith("proxy_SET"):
|
||||
if len(members) != 1:
|
||||
msg = "You have multiple proxy meshes, please only use one"
|
||||
raise KnownPublishError(msg)
|
||||
instance.data['proxy'] = members
|
||||
self.log.debug('proxy members: {}'.format(members))
|
||||
if objset.endswith("content_SET"):
|
||||
instance.data["setMembers"] = cmds.ls(members, long=True)
|
||||
self.log.debug("content members: {}".format(members))
|
||||
elif objset.endswith("proxy_SET"):
|
||||
instance.data["proxy"] = cmds.ls(members, long=True)
|
||||
self.log.debug("proxy members: {}".format(members))
|
||||
|
||||
# Use camera in object set if present else default to render globals
|
||||
# camera.
|
||||
|
|
@ -12,7 +12,6 @@ class CollectMayaWorkspace(pyblish.api.ContextPlugin):
|
|||
label = "Maya Workspace"
|
||||
|
||||
hosts = ['maya']
|
||||
version = (0, 1, 0)
|
||||
|
||||
def process(self, context):
|
||||
workspace = cmds.workspace(rootDirectory=True, query=True)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
|
|
@ -12,3 +14,31 @@ class CollectPointcache(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
instance.data["families"].append("publish.farm")
|
||||
|
||||
proxy_set = None
|
||||
for node in instance.data["setMembers"]:
|
||||
if cmds.nodeType(node) != "objectSet":
|
||||
continue
|
||||
members = cmds.sets(node, query=True)
|
||||
if members is None:
|
||||
self.log.warning("Skipped empty objectset: \"%s\" " % node)
|
||||
continue
|
||||
if node.endswith("proxy_SET"):
|
||||
proxy_set = node
|
||||
instance.data["proxy"] = []
|
||||
instance.data["proxyRoots"] = []
|
||||
for member in members:
|
||||
instance.data["proxy"].extend(cmds.ls(member, long=True))
|
||||
instance.data["proxyRoots"].extend(
|
||||
cmds.ls(member, long=True)
|
||||
)
|
||||
instance.data["proxy"].extend(
|
||||
cmds.listRelatives(member, shapes=True, fullPath=True)
|
||||
)
|
||||
self.log.debug(
|
||||
"proxy members: {}".format(instance.data["proxy"])
|
||||
)
|
||||
|
||||
if proxy_set:
|
||||
instance.remove(proxy_set)
|
||||
instance.data["setMembers"].remove(proxy_set)
|
||||
|
|
|
|||
|
|
@ -42,7 +42,6 @@ Provides:
|
|||
import re
|
||||
import os
|
||||
import platform
|
||||
import json
|
||||
|
||||
from maya import cmds
|
||||
import maya.app.renderSetup.model.renderSetup as renderSetup
|
||||
|
|
@ -318,6 +317,9 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
"aovSeparator": layer_render_products.layer_data.aov_separator, # noqa: E501
|
||||
"renderSetupIncludeLights": render_instance.data.get(
|
||||
"renderSetupIncludeLights"
|
||||
),
|
||||
"strict_error_checking": render_instance.data.get(
|
||||
"strict_error_checking", True
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
|||
71
openpype/hosts/maya/plugins/publish/collect_xgen.py
Normal file
71
openpype/hosts/maya/plugins/publish/collect_xgen.py
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.hosts.maya.api.lib import get_attribute_input
|
||||
|
||||
|
||||
class CollectXgen(pyblish.api.InstancePlugin):
|
||||
"""Collect Xgen"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499999
|
||||
label = "Collect Xgen"
|
||||
families = ["xgen"]
|
||||
|
||||
def process(self, instance):
|
||||
data = {
|
||||
"xgmPalettes": cmds.ls(instance, type="xgmPalette", long=True),
|
||||
"xgmDescriptions": cmds.ls(
|
||||
instance, type="xgmDescription", long=True
|
||||
),
|
||||
"xgmSubdPatches": cmds.ls(instance, type="xgmSubdPatch", long=True)
|
||||
}
|
||||
data["xgenNodes"] = (
|
||||
data["xgmPalettes"] +
|
||||
data["xgmDescriptions"] +
|
||||
data["xgmSubdPatches"]
|
||||
)
|
||||
|
||||
if data["xgmPalettes"]:
|
||||
data["xgmPalette"] = data["xgmPalettes"][0]
|
||||
|
||||
data["xgenConnections"] = {}
|
||||
for node in data["xgmSubdPatches"]:
|
||||
data["xgenConnections"][node] = {}
|
||||
for attr in ["transform", "geometry"]:
|
||||
input = get_attribute_input("{}.{}".format(node, attr))
|
||||
data["xgenConnections"][node][attr] = input
|
||||
|
||||
# Collect all files under palette root as resources.
|
||||
import xgenm
|
||||
|
||||
data_path = xgenm.getAttr(
|
||||
"xgDataPath", data["xgmPalette"].replace("|", "")
|
||||
).split(os.pathsep)[0]
|
||||
data_path = data_path.replace(
|
||||
"${PROJECT}",
|
||||
xgenm.getAttr("xgProjectPath", data["xgmPalette"].replace("|", ""))
|
||||
)
|
||||
transfers = []
|
||||
|
||||
# Since we are duplicating this palette when extracting we predict that
|
||||
# the name will be the basename without namespaces.
|
||||
predicted_palette_name = data["xgmPalette"].split(":")[-1]
|
||||
predicted_palette_name = predicted_palette_name.replace("|", "")
|
||||
|
||||
for root, _, files in os.walk(data_path):
|
||||
for file in files:
|
||||
source = os.path.join(root, file).replace("\\", "/")
|
||||
destination = os.path.join(
|
||||
instance.data["resourcesDir"],
|
||||
"collections",
|
||||
predicted_palette_name,
|
||||
source.replace(data_path, "")[1:]
|
||||
)
|
||||
transfers.append((source, destination.replace("\\", "/")))
|
||||
|
||||
data["transfers"] = transfers
|
||||
|
||||
self.log.info(data)
|
||||
instance.data.update(data)
|
||||
|
|
@ -0,0 +1,160 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
import arnold
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
maintained_selection, attribute_values, delete_after
|
||||
)
|
||||
|
||||
|
||||
class ExtractArnoldSceneSource(publish.Extractor):
|
||||
"""Extract the content of the instance to an Arnold Scene Source file."""
|
||||
|
||||
label = "Extract Arnold Scene Source"
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
asciiAss = False
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filename = "{}.ass".format(instance.name)
|
||||
file_path = os.path.join(staging_dir, filename)
|
||||
|
||||
# Mask
|
||||
mask = arnold.AI_NODE_ALL
|
||||
|
||||
node_types = {
|
||||
"options": arnold.AI_NODE_OPTIONS,
|
||||
"camera": arnold.AI_NODE_CAMERA,
|
||||
"light": arnold.AI_NODE_LIGHT,
|
||||
"shape": arnold.AI_NODE_SHAPE,
|
||||
"shader": arnold.AI_NODE_SHADER,
|
||||
"override": arnold.AI_NODE_OVERRIDE,
|
||||
"driver": arnold.AI_NODE_DRIVER,
|
||||
"filter": arnold.AI_NODE_FILTER,
|
||||
"color_manager": arnold.AI_NODE_COLOR_MANAGER,
|
||||
"operator": arnold.AI_NODE_OPERATOR
|
||||
}
|
||||
|
||||
for key in node_types.keys():
|
||||
if instance.data.get("mask" + key.title()):
|
||||
mask = mask ^ node_types[key]
|
||||
|
||||
# Motion blur
|
||||
attribute_data = {
|
||||
"defaultArnoldRenderOptions.motion_blur_enable": instance.data.get(
|
||||
"motionBlur", True
|
||||
),
|
||||
"defaultArnoldRenderOptions.motion_steps": instance.data.get(
|
||||
"motionBlurKeys", 2
|
||||
),
|
||||
"defaultArnoldRenderOptions.motion_frames": instance.data.get(
|
||||
"motionBlurLength", 0.5
|
||||
)
|
||||
}
|
||||
|
||||
# Write out .ass file
|
||||
kwargs = {
|
||||
"filename": file_path,
|
||||
"startFrame": instance.data.get("frameStartHandle", 1),
|
||||
"endFrame": instance.data.get("frameEndHandle", 1),
|
||||
"frameStep": instance.data.get("step", 1),
|
||||
"selected": True,
|
||||
"asciiAss": self.asciiAss,
|
||||
"shadowLinks": True,
|
||||
"lightLinks": True,
|
||||
"boundingBox": True,
|
||||
"expandProcedurals": instance.data.get("expandProcedurals", False),
|
||||
"camera": instance.data["camera"],
|
||||
"mask": mask
|
||||
}
|
||||
|
||||
filenames = self._extract(
|
||||
instance.data["setMembers"], attribute_data, kwargs
|
||||
)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": "ass",
|
||||
"ext": "ass",
|
||||
"files": filenames if len(filenames) > 1 else filenames[0],
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": kwargs["startFrame"]
|
||||
}
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info(
|
||||
"Extracted instance {} to: {}".format(instance.name, staging_dir)
|
||||
)
|
||||
|
||||
# Extract proxy.
|
||||
if not instance.data.get("proxy", []):
|
||||
return
|
||||
|
||||
kwargs["filename"] = file_path.replace(".ass", "_proxy.ass")
|
||||
filenames = self._extract(
|
||||
instance.data["proxy"], attribute_data, kwargs
|
||||
)
|
||||
|
||||
representation = {
|
||||
"name": "proxy",
|
||||
"ext": "ass",
|
||||
"files": filenames if len(filenames) > 1 else filenames[0],
|
||||
"stagingDir": staging_dir,
|
||||
"frameStart": kwargs["startFrame"],
|
||||
"outputName": "proxy"
|
||||
}
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
def _extract(self, nodes, attribute_data, kwargs):
|
||||
self.log.info("Writing: " + kwargs["filename"])
|
||||
filenames = []
|
||||
# Duplicating nodes so they are direct children of the world. This
|
||||
# makes the hierarchy of any exported ass file the same.
|
||||
with delete_after() as delete_bin:
|
||||
duplicate_nodes = []
|
||||
for node in nodes:
|
||||
duplicate_transform = cmds.duplicate(node)[0]
|
||||
|
||||
# Discard the children.
|
||||
shapes = cmds.listRelatives(duplicate_transform, shapes=True)
|
||||
children = cmds.listRelatives(
|
||||
duplicate_transform, children=True
|
||||
)
|
||||
cmds.delete(set(children) - set(shapes))
|
||||
|
||||
duplicate_transform = cmds.parent(
|
||||
duplicate_transform, world=True
|
||||
)[0]
|
||||
|
||||
cmds.rename(duplicate_transform, node.split("|")[-1])
|
||||
duplicate_transform = "|" + node.split("|")[-1]
|
||||
|
||||
duplicate_nodes.append(duplicate_transform)
|
||||
delete_bin.append(duplicate_transform)
|
||||
|
||||
with attribute_values(attribute_data):
|
||||
with maintained_selection():
|
||||
self.log.info(
|
||||
"Writing: {}".format(duplicate_nodes)
|
||||
)
|
||||
cmds.select(duplicate_nodes, noExpand=True)
|
||||
|
||||
self.log.info(
|
||||
"Extracting ass sequence with: {}".format(kwargs)
|
||||
)
|
||||
|
||||
exported_files = cmds.arnoldExportAss(**kwargs)
|
||||
|
||||
for file in exported_files:
|
||||
filenames.append(os.path.split(file)[1])
|
||||
|
||||
self.log.info("Exported: {}".format(filenames))
|
||||
|
||||
return filenames
|
||||
|
|
@ -1,106 +0,0 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
import arnold
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.maya.api.lib import maintained_selection, attribute_values
|
||||
|
||||
|
||||
class ExtractAssStandin(publish.Extractor):
|
||||
"""Extract the content of the instance to a ass file"""
|
||||
|
||||
label = "Arnold Scene Source (.ass)"
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
asciiAss = False
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
filename = "{}.ass".format(instance.name)
|
||||
filenames = []
|
||||
file_path = os.path.join(staging_dir, filename)
|
||||
|
||||
# Mask
|
||||
mask = arnold.AI_NODE_ALL
|
||||
|
||||
node_types = {
|
||||
"options": arnold.AI_NODE_OPTIONS,
|
||||
"camera": arnold.AI_NODE_CAMERA,
|
||||
"light": arnold.AI_NODE_LIGHT,
|
||||
"shape": arnold.AI_NODE_SHAPE,
|
||||
"shader": arnold.AI_NODE_SHADER,
|
||||
"override": arnold.AI_NODE_OVERRIDE,
|
||||
"driver": arnold.AI_NODE_DRIVER,
|
||||
"filter": arnold.AI_NODE_FILTER,
|
||||
"color_manager": arnold.AI_NODE_COLOR_MANAGER,
|
||||
"operator": arnold.AI_NODE_OPERATOR
|
||||
}
|
||||
|
||||
for key in node_types.keys():
|
||||
if instance.data.get("mask" + key.title()):
|
||||
mask = mask ^ node_types[key]
|
||||
|
||||
# Motion blur
|
||||
values = {
|
||||
"defaultArnoldRenderOptions.motion_blur_enable": instance.data.get(
|
||||
"motionBlur", True
|
||||
),
|
||||
"defaultArnoldRenderOptions.motion_steps": instance.data.get(
|
||||
"motionBlurKeys", 2
|
||||
),
|
||||
"defaultArnoldRenderOptions.motion_frames": instance.data.get(
|
||||
"motionBlurLength", 0.5
|
||||
)
|
||||
}
|
||||
|
||||
# Write out .ass file
|
||||
kwargs = {
|
||||
"filename": file_path,
|
||||
"startFrame": instance.data.get("frameStartHandle", 1),
|
||||
"endFrame": instance.data.get("frameEndHandle", 1),
|
||||
"frameStep": instance.data.get("step", 1),
|
||||
"selected": True,
|
||||
"asciiAss": self.asciiAss,
|
||||
"shadowLinks": True,
|
||||
"lightLinks": True,
|
||||
"boundingBox": True,
|
||||
"expandProcedurals": instance.data.get("expandProcedurals", False),
|
||||
"camera": instance.data["camera"],
|
||||
"mask": mask
|
||||
}
|
||||
|
||||
self.log.info("Writing: '%s'" % file_path)
|
||||
with attribute_values(values):
|
||||
with maintained_selection():
|
||||
self.log.info(
|
||||
"Writing: {}".format(instance.data["setMembers"])
|
||||
)
|
||||
cmds.select(instance.data["setMembers"], noExpand=True)
|
||||
|
||||
self.log.info(
|
||||
"Extracting ass sequence with: {}".format(kwargs)
|
||||
)
|
||||
|
||||
exported_files = cmds.arnoldExportAss(**kwargs)
|
||||
|
||||
for file in exported_files:
|
||||
filenames.append(os.path.split(file)[1])
|
||||
|
||||
self.log.info("Exported: {}".format(filenames))
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'ass',
|
||||
'ext': 'ass',
|
||||
'files': filenames if len(filenames) > 1 else filenames[0],
|
||||
"stagingDir": staging_dir,
|
||||
'frameStart': kwargs["startFrame"]
|
||||
}
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s"
|
||||
% (instance.name, staging_dir))
|
||||
|
|
@ -1,81 +0,0 @@
|
|||
import os
|
||||
import contextlib
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.maya.api.lib import maintained_selection
|
||||
|
||||
|
||||
class ExtractAssProxy(publish.Extractor):
|
||||
"""Extract proxy model as Maya Ascii to use as arnold standin
|
||||
|
||||
|
||||
"""
|
||||
|
||||
order = publish.Extractor.order + 0.2
|
||||
label = "Ass Proxy (Maya ASCII)"
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def unparent(root):
|
||||
"""Temporarily unparent `root`"""
|
||||
parent = cmds.listRelatives(root, parent=True)
|
||||
if parent:
|
||||
cmds.parent(root, world=True)
|
||||
yield
|
||||
self.log.info("{} - {}".format(root, parent))
|
||||
cmds.parent(root, parent)
|
||||
else:
|
||||
yield
|
||||
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
filename = "{0}.ma".format(instance.name)
|
||||
path = os.path.join(stagingdir, filename)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
# Get only the shape contents we need in such a way that we avoid
|
||||
# taking along intermediateObjects
|
||||
proxy = instance.data.get('proxy', None)
|
||||
|
||||
if not proxy:
|
||||
self.log.info("no proxy mesh")
|
||||
return
|
||||
|
||||
members = cmds.ls(proxy,
|
||||
dag=True,
|
||||
transforms=True,
|
||||
noIntermediate=True)
|
||||
self.log.info(members)
|
||||
|
||||
with maintained_selection():
|
||||
with unparent(members[0]):
|
||||
cmds.select(members, noExpand=True)
|
||||
cmds.file(path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=False,
|
||||
constraints=False,
|
||||
expressions=False,
|
||||
constructionHistory=False)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'ma',
|
||||
'ext': 'ma',
|
||||
'files': filename,
|
||||
"stagingDir": stagingdir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name, path))
|
||||
|
|
@ -20,8 +20,7 @@ class ExtractMayaSceneRaw(publish.Extractor):
|
|||
"mayaScene",
|
||||
"setdress",
|
||||
"layout",
|
||||
"camerarig",
|
||||
"xgen"]
|
||||
"camerarig"]
|
||||
scene_type = "ma"
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import copy
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
|
@ -9,6 +10,7 @@ from openpype.hosts.maya.api.lib import (
|
|||
maintained_selection,
|
||||
iter_visible_nodes_in_range
|
||||
)
|
||||
from openpype.lib import StringTemplate
|
||||
|
||||
|
||||
class ExtractAlembic(publish.Extractor):
|
||||
|
|
@ -23,9 +25,7 @@ class ExtractAlembic(publish.Extractor):
|
|||
|
||||
label = "Extract Pointcache (Alembic)"
|
||||
hosts = ["maya"]
|
||||
families = ["pointcache",
|
||||
"model",
|
||||
"vrayproxy"]
|
||||
families = ["pointcache", "model", "vrayproxy"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -87,6 +87,7 @@ class ExtractAlembic(publish.Extractor):
|
|||
end=end))
|
||||
|
||||
suspend = not instance.data.get("refresh", False)
|
||||
self.log.info(nodes)
|
||||
with suspended_refresh(suspend=suspend):
|
||||
with maintained_selection():
|
||||
cmds.select(nodes, noExpand=True)
|
||||
|
|
@ -101,9 +102,9 @@ class ExtractAlembic(publish.Extractor):
|
|||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'abc',
|
||||
'ext': 'abc',
|
||||
'files': filename,
|
||||
"name": "abc",
|
||||
"ext": "abc",
|
||||
"files": filename,
|
||||
"stagingDir": dirname
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -112,6 +113,48 @@ class ExtractAlembic(publish.Extractor):
|
|||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
|
||||
# Extract proxy.
|
||||
if not instance.data.get("proxy"):
|
||||
return
|
||||
|
||||
path = path.replace(".abc", "_proxy.abc")
|
||||
if not instance.data.get("includeParentHierarchy", True):
|
||||
# Set the root nodes if we don't want to include parents
|
||||
# The roots are to be considered the ones that are the actual
|
||||
# direct members of the set
|
||||
options["root"] = instance.data["proxyRoots"]
|
||||
|
||||
with suspended_refresh(suspend=suspend):
|
||||
with maintained_selection():
|
||||
cmds.select(instance.data["proxy"])
|
||||
extract_alembic(
|
||||
file=path,
|
||||
startFrame=start,
|
||||
endFrame=end,
|
||||
**options
|
||||
)
|
||||
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
template_data.update({"ext": "abc"})
|
||||
templates = instance.context.data["anatomy"].templates["publish"]
|
||||
published_filename_without_extension = StringTemplate(
|
||||
templates["file"]
|
||||
).format(template_data).replace(".abc", "_proxy")
|
||||
transfers = []
|
||||
destination = os.path.join(
|
||||
instance.data["resourcesDir"],
|
||||
filename.replace(
|
||||
filename.split(".")[0],
|
||||
published_filename_without_extension
|
||||
)
|
||||
)
|
||||
transfers.append((path, destination))
|
||||
|
||||
for source, destination in transfers:
|
||||
self.log.debug("Transfer: {} > {}".format(source, destination))
|
||||
|
||||
instance.data["transfers"] = transfers
|
||||
|
||||
def get_members_and_roots(self, instance):
|
||||
return instance[:], instance.data.get("setMembers")
|
||||
|
||||
|
|
|
|||
250
openpype/hosts/maya/plugins/publish/extract_workfile_xgen.py
Normal file
250
openpype/hosts/maya/plugins/publish/extract_workfile_xgen.py
Normal file
|
|
@ -0,0 +1,250 @@
|
|||
import os
|
||||
import shutil
|
||||
import copy
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.hosts.maya.api.lib import extract_alembic
|
||||
from openpype.pipeline import publish
|
||||
from openpype.lib import StringTemplate
|
||||
|
||||
|
||||
class ExtractWorkfileXgen(publish.Extractor):
|
||||
"""Extract Workfile Xgen.
|
||||
|
||||
When submitting a render, we need to prep Xgen side car files.
|
||||
"""
|
||||
|
||||
# Offset to run before workfile scene save.
|
||||
order = pyblish.api.ExtractorOrder - 0.499
|
||||
label = "Extract Workfile Xgen"
|
||||
families = ["workfile"]
|
||||
hosts = ["maya"]
|
||||
|
||||
def get_render_max_frame_range(self, context):
|
||||
"""Return start to end frame range including all renderlayers in
|
||||
context.
|
||||
|
||||
This will return the full frame range which includes all frames of the
|
||||
renderlayer instances to be published/submitted.
|
||||
|
||||
Args:
|
||||
context (pyblish.api.Context): Current publishing context.
|
||||
|
||||
Returns:
|
||||
tuple or None: Start frame, end frame tuple if any renderlayers
|
||||
found. Otherwise None is returned.
|
||||
|
||||
"""
|
||||
|
||||
def _is_active_renderlayer(i):
|
||||
"""Return whether instance is active renderlayer"""
|
||||
if not i.data.get("publish", True):
|
||||
return False
|
||||
|
||||
is_renderlayer = (
|
||||
"renderlayer" in i.data.get("families", []) or
|
||||
i.data["family"] == "renderlayer"
|
||||
)
|
||||
return is_renderlayer
|
||||
|
||||
start_frame = None
|
||||
end_frame = None
|
||||
for instance in context:
|
||||
if not _is_active_renderlayer(instance):
|
||||
# Only consider renderlyare instances
|
||||
continue
|
||||
|
||||
render_start_frame = instance.data["frameStart"]
|
||||
render_end_frame = instance.data["frameStart"]
|
||||
|
||||
if start_frame is None:
|
||||
start_frame = render_start_frame
|
||||
else:
|
||||
start_frame = min(start_frame, render_start_frame)
|
||||
|
||||
if end_frame is None:
|
||||
end_frame = render_end_frame
|
||||
else:
|
||||
end_frame = max(end_frame, render_end_frame)
|
||||
|
||||
if start_frame is None or end_frame is None:
|
||||
return
|
||||
|
||||
return start_frame, end_frame
|
||||
|
||||
def process(self, instance):
|
||||
transfers = []
|
||||
|
||||
# Validate there is any palettes in the scene.
|
||||
if not cmds.ls(type="xgmPalette"):
|
||||
self.log.debug(
|
||||
"No collections found in the scene. Skipping Xgen extraction."
|
||||
)
|
||||
return
|
||||
|
||||
import xgenm
|
||||
|
||||
# Validate to extract only when we are publishing a renderlayer as
|
||||
# well.
|
||||
render_range = self.get_render_max_frame_range(instance.context)
|
||||
if not render_range:
|
||||
self.log.debug(
|
||||
"No publishable renderlayers found in context. Skipping Xgen"
|
||||
" extraction."
|
||||
)
|
||||
return
|
||||
|
||||
start_frame, end_frame = render_range
|
||||
|
||||
# We decrement start frame and increment end frame so motion blur will
|
||||
# render correctly.
|
||||
start_frame -= 1
|
||||
end_frame += 1
|
||||
|
||||
# Extract patches alembic.
|
||||
path_no_ext, _ = os.path.splitext(instance.context.data["currentFile"])
|
||||
kwargs = {"attrPrefix": ["xgen"], "stripNamespaces": True}
|
||||
alembic_files = []
|
||||
for palette in cmds.ls(type="xgmPalette"):
|
||||
patch_names = []
|
||||
for description in xgenm.descriptions(palette):
|
||||
for name in xgenm.boundGeometry(palette, description):
|
||||
patch_names.append(name)
|
||||
|
||||
alembic_file = "{}__{}.abc".format(
|
||||
path_no_ext, palette.replace(":", "__ns__")
|
||||
)
|
||||
extract_alembic(
|
||||
alembic_file,
|
||||
root=patch_names,
|
||||
selection=False,
|
||||
startFrame=float(start_frame),
|
||||
endFrame=float(end_frame),
|
||||
verbose=True,
|
||||
**kwargs
|
||||
)
|
||||
alembic_files.append(alembic_file)
|
||||
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
published_maya_path = StringTemplate(
|
||||
instance.context.data["anatomy"].templates["publish"]["file"]
|
||||
).format(template_data)
|
||||
published_basename, _ = os.path.splitext(published_maya_path)
|
||||
|
||||
for source in alembic_files:
|
||||
destination = os.path.join(
|
||||
os.path.dirname(instance.data["resourcesDir"]),
|
||||
os.path.basename(
|
||||
source.replace(path_no_ext, published_basename)
|
||||
)
|
||||
)
|
||||
transfers.append((source, destination))
|
||||
|
||||
# Validate that we are using the published workfile.
|
||||
deadline_settings = instance.context.get("deadline")
|
||||
if deadline_settings:
|
||||
publish_settings = deadline_settings["publish"]
|
||||
if not publish_settings["MayaSubmitDeadline"]["use_published"]:
|
||||
self.log.debug(
|
||||
"Not using the published workfile. Abort Xgen extraction."
|
||||
)
|
||||
return
|
||||
|
||||
# Collect Xgen and Delta files.
|
||||
xgen_files = []
|
||||
sources = []
|
||||
current_dir = os.path.dirname(instance.context.data["currentFile"])
|
||||
attrs = ["xgFileName", "xgBaseFile"]
|
||||
for palette in cmds.ls(type="xgmPalette"):
|
||||
for attr in attrs:
|
||||
source = os.path.join(
|
||||
current_dir, cmds.getAttr(palette + "." + attr)
|
||||
)
|
||||
if not os.path.exists(source):
|
||||
continue
|
||||
|
||||
ext = os.path.splitext(source)[1]
|
||||
if ext == ".xgen":
|
||||
xgen_files.append(source)
|
||||
if ext == ".xgd":
|
||||
sources.append(source)
|
||||
|
||||
# Copy .xgen file to temporary location and modify.
|
||||
staging_dir = self.staging_dir(instance)
|
||||
for source in xgen_files:
|
||||
destination = os.path.join(staging_dir, os.path.basename(source))
|
||||
shutil.copy(source, destination)
|
||||
|
||||
lines = []
|
||||
with open(destination, "r") as f:
|
||||
for line in [line.rstrip() for line in f]:
|
||||
if line.startswith("\txgProjectPath"):
|
||||
path = os.path.dirname(instance.data["resourcesDir"])
|
||||
line = "\txgProjectPath\t\t{}/".format(
|
||||
path.replace("\\", "/")
|
||||
)
|
||||
|
||||
lines.append(line)
|
||||
|
||||
with open(destination, "w") as f:
|
||||
f.write("\n".join(lines))
|
||||
|
||||
sources.append(destination)
|
||||
|
||||
# Add resource files to workfile instance.
|
||||
for source in sources:
|
||||
basename = os.path.basename(source)
|
||||
destination = os.path.join(
|
||||
os.path.dirname(instance.data["resourcesDir"]), basename
|
||||
)
|
||||
transfers.append((source, destination))
|
||||
|
||||
destination_dir = os.path.join(
|
||||
instance.data["resourcesDir"], "collections"
|
||||
)
|
||||
for palette in cmds.ls(type="xgmPalette"):
|
||||
project_path = xgenm.getAttr("xgProjectPath", palette)
|
||||
data_path = xgenm.getAttr("xgDataPath", palette)
|
||||
data_path = data_path.replace("${PROJECT}", project_path)
|
||||
for path in data_path.split(";"):
|
||||
for root, _, files in os.walk(path):
|
||||
for f in files:
|
||||
source = os.path.join(root, f)
|
||||
destination = "{}/{}{}".format(
|
||||
destination_dir,
|
||||
palette.replace(":", "__ns__"),
|
||||
source.replace(path, "")
|
||||
)
|
||||
transfers.append((source, destination))
|
||||
|
||||
for source, destination in transfers:
|
||||
self.log.debug("Transfer: {} > {}".format(source, destination))
|
||||
|
||||
instance.data["transfers"] = transfers
|
||||
|
||||
# Set palette attributes in preparation for workfile publish.
|
||||
attrs = {"xgFileName": None, "xgBaseFile": ""}
|
||||
data = {}
|
||||
for palette in cmds.ls(type="xgmPalette"):
|
||||
attrs["xgFileName"] = "resources/{}.xgen".format(
|
||||
palette.replace(":", "__ns__")
|
||||
)
|
||||
for attr, value in attrs.items():
|
||||
node_attr = palette + "." + attr
|
||||
|
||||
old_value = cmds.getAttr(node_attr)
|
||||
try:
|
||||
data[palette][attr] = old_value
|
||||
except KeyError:
|
||||
data[palette] = {attr: old_value}
|
||||
|
||||
cmds.setAttr(node_attr, value, type="string")
|
||||
self.log.info(
|
||||
"Setting \"{}\" on \"{}\"".format(value, node_attr)
|
||||
)
|
||||
|
||||
cmds.setAttr(palette + "." + "xgExportAsDelta", False)
|
||||
|
||||
instance.data["xgenAttributes"] = data
|
||||
142
openpype/hosts/maya/plugins/publish/extract_xgen.py
Normal file
142
openpype/hosts/maya/plugins/publish/extract_xgen.py
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
import os
|
||||
import copy
|
||||
import tempfile
|
||||
|
||||
from maya import cmds
|
||||
import xgenm
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
maintained_selection, attribute_values, write_xgen_file, delete_after
|
||||
)
|
||||
from openpype.lib import StringTemplate
|
||||
|
||||
|
||||
class ExtractXgen(publish.Extractor):
|
||||
"""Extract Xgen
|
||||
|
||||
Workflow:
|
||||
- Duplicate nodes used for patches.
|
||||
- Export palette and import onto duplicate nodes.
|
||||
- Export/Publish duplicate nodes and palette.
|
||||
- Export duplicate palette to .xgen file and add to publish.
|
||||
- Publish all xgen files as resources.
|
||||
"""
|
||||
|
||||
label = "Extract Xgen"
|
||||
hosts = ["maya"]
|
||||
families = ["xgen"]
|
||||
scene_type = "ma"
|
||||
|
||||
def process(self, instance):
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
maya_filename = "{}.{}".format(instance.data["name"], self.scene_type)
|
||||
maya_filepath = os.path.join(staging_dir, maya_filename)
|
||||
|
||||
# Get published xgen file name.
|
||||
template_data = copy.deepcopy(instance.data["anatomyData"])
|
||||
template_data.update({"ext": "xgen"})
|
||||
templates = instance.context.data["anatomy"].templates["publish"]
|
||||
xgen_filename = StringTemplate(templates["file"]).format(template_data)
|
||||
|
||||
xgen_path = os.path.join(
|
||||
self.staging_dir(instance), xgen_filename
|
||||
).replace("\\", "/")
|
||||
type = "mayaAscii" if self.scene_type == "ma" else "mayaBinary"
|
||||
|
||||
# Duplicate xgen setup.
|
||||
with delete_after() as delete_bin:
|
||||
duplicate_nodes = []
|
||||
# Collect nodes to export.
|
||||
for _, connections in instance.data["xgenConnections"].items():
|
||||
transform_name = connections["transform"].split(".")[0]
|
||||
|
||||
# Duplicate_transform subd patch geometry.
|
||||
duplicate_transform = cmds.duplicate(transform_name)[0]
|
||||
delete_bin.append(duplicate_transform)
|
||||
|
||||
# Discard the children.
|
||||
shapes = cmds.listRelatives(duplicate_transform, shapes=True)
|
||||
children = cmds.listRelatives(
|
||||
duplicate_transform, children=True
|
||||
)
|
||||
cmds.delete(set(children) - set(shapes))
|
||||
|
||||
duplicate_transform = cmds.parent(
|
||||
duplicate_transform, world=True
|
||||
)[0]
|
||||
|
||||
duplicate_nodes.append(duplicate_transform)
|
||||
|
||||
# Export temp xgen palette files.
|
||||
temp_xgen_path = os.path.join(
|
||||
tempfile.gettempdir(), "temp.xgen"
|
||||
).replace("\\", "/")
|
||||
xgenm.exportPalette(
|
||||
instance.data["xgmPalette"].replace("|", ""), temp_xgen_path
|
||||
)
|
||||
self.log.info("Extracted to {}".format(temp_xgen_path))
|
||||
|
||||
# Import xgen onto the duplicate.
|
||||
with maintained_selection():
|
||||
cmds.select(duplicate_nodes)
|
||||
palette = xgenm.importPalette(temp_xgen_path, [])
|
||||
|
||||
delete_bin.append(palette)
|
||||
|
||||
# Export duplicated palettes.
|
||||
xgenm.exportPalette(palette, xgen_path)
|
||||
|
||||
# Export Maya file.
|
||||
attribute_data = {"{}.xgFileName".format(palette): xgen_filename}
|
||||
with attribute_values(attribute_data):
|
||||
with maintained_selection():
|
||||
cmds.select(duplicate_nodes + [palette])
|
||||
cmds.file(
|
||||
maya_filepath,
|
||||
force=True,
|
||||
type=type,
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
constructionHistory=True,
|
||||
shader=True,
|
||||
constraints=True,
|
||||
expressions=True
|
||||
)
|
||||
|
||||
self.log.info("Extracted to {}".format(maya_filepath))
|
||||
|
||||
if os.path.exists(temp_xgen_path):
|
||||
os.remove(temp_xgen_path)
|
||||
|
||||
data = {
|
||||
"xgDataPath": os.path.join(
|
||||
instance.data["resourcesDir"],
|
||||
"collections",
|
||||
palette.replace(":", "__ns__")
|
||||
).replace("\\", "/"),
|
||||
"xgProjectPath": os.path.dirname(
|
||||
instance.data["resourcesDir"]
|
||||
).replace("\\", "/")
|
||||
}
|
||||
write_xgen_file(data, xgen_path)
|
||||
|
||||
# Adding representations.
|
||||
representation = {
|
||||
"name": "xgen",
|
||||
"ext": "xgen",
|
||||
"files": xgen_filename,
|
||||
"stagingDir": staging_dir,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
representation = {
|
||||
"name": self.scene_type,
|
||||
"ext": self.scene_type,
|
||||
"files": maya_filename,
|
||||
"stagingDir": staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
@ -1,64 +0,0 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from openpype.pipeline import publish
|
||||
from openpype.hosts.maya.api.lib import (
|
||||
suspended_refresh,
|
||||
maintained_selection
|
||||
)
|
||||
|
||||
|
||||
class ExtractXgenCache(publish.Extractor):
|
||||
"""Produce an alembic of just xgen interactive groom
|
||||
|
||||
"""
|
||||
|
||||
label = "Extract Xgen ABC Cache"
|
||||
hosts = ["maya"]
|
||||
families = ["xgen"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
# Collect the out set nodes
|
||||
out_descriptions = [node for node in instance
|
||||
if cmds.nodeType(node) == "xgmSplineDescription"]
|
||||
|
||||
start = 1
|
||||
end = 1
|
||||
|
||||
self.log.info("Extracting Xgen Cache..")
|
||||
dirname = self.staging_dir(instance)
|
||||
|
||||
parent_dir = self.staging_dir(instance)
|
||||
filename = "{name}.abc".format(**instance.data)
|
||||
path = os.path.join(parent_dir, filename)
|
||||
|
||||
with suspended_refresh():
|
||||
with maintained_selection():
|
||||
command = (
|
||||
'-file '
|
||||
+ path
|
||||
+ ' -df "ogawa" -fr '
|
||||
+ str(start)
|
||||
+ ' '
|
||||
+ str(end)
|
||||
+ ' -step 1 -mxf -wfw'
|
||||
)
|
||||
for desc in out_descriptions:
|
||||
command += (" -obj " + desc)
|
||||
cmds.xgmSplineCache(export=True, j=command)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'abc',
|
||||
'ext': 'abc',
|
||||
'files': filename,
|
||||
"stagingDir": dirname,
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
36
openpype/hosts/maya/plugins/publish/reset_xgen_attributes.py
Normal file
36
openpype/hosts/maya/plugins/publish/reset_xgen_attributes.py
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class ResetXgenAttributes(pyblish.api.InstancePlugin):
|
||||
"""Reset Xgen attributes.
|
||||
|
||||
When the incremental save of the workfile triggers, the Xgen attributes
|
||||
changes so this plugin will change it back to the values before publishing.
|
||||
"""
|
||||
|
||||
label = "Reset Xgen Attributes."
|
||||
# Offset to run after workfile increment plugin.
|
||||
order = pyblish.api.IntegratorOrder + 10.0
|
||||
families = ["workfile"]
|
||||
|
||||
def process(self, instance):
|
||||
xgen_attributes = instance.data.get("xgenAttributes", {})
|
||||
if not xgen_attributes:
|
||||
return
|
||||
|
||||
for palette, data in xgen_attributes.items():
|
||||
for attr, value in data.items():
|
||||
node_attr = "{}.{}".format(palette, attr)
|
||||
self.log.info(
|
||||
"Setting \"{}\" on \"{}\"".format(value, node_attr)
|
||||
)
|
||||
cmds.setAttr(node_attr, value, type="string")
|
||||
cmds.setAttr(palette + ".xgExportAsDelta", True)
|
||||
|
||||
# Need to save the scene, cause the attribute changes above does not
|
||||
# mark the scene as modified so user can exit without commiting the
|
||||
# changes.
|
||||
self.log.info("Saving changes.")
|
||||
cmds.file(save=True)
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
import maya.cmds as cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import (
|
||||
ValidateContentsOrder, PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
class ValidateArnoldSceneSource(pyblish.api.InstancePlugin):
|
||||
"""Validate Arnold Scene Source.
|
||||
|
||||
We require at least 1 root node/parent for the meshes. This is to ensure we
|
||||
can duplicate the nodes and preserve the names.
|
||||
|
||||
If using proxies we need the nodes to share the same names and not be
|
||||
parent to the world. This ends up needing at least two groups with content
|
||||
nodes and proxy nodes in another.
|
||||
"""
|
||||
|
||||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["ass"]
|
||||
label = "Validate Arnold Scene Source"
|
||||
|
||||
def _get_nodes_data(self, nodes):
|
||||
ungrouped_nodes = []
|
||||
nodes_by_name = {}
|
||||
parents = []
|
||||
for node in nodes:
|
||||
node_split = node.split("|")
|
||||
if len(node_split) == 2:
|
||||
ungrouped_nodes.append(node)
|
||||
|
||||
parent = "|".join(node_split[:-1])
|
||||
if parent:
|
||||
parents.append(parent)
|
||||
|
||||
nodes_by_name[node_split[-1]] = node
|
||||
for shape in cmds.listRelatives(node, shapes=True):
|
||||
nodes_by_name[shape.split("|")[-1]] = shape
|
||||
|
||||
return ungrouped_nodes, nodes_by_name, parents
|
||||
|
||||
def process(self, instance):
|
||||
ungrouped_nodes = []
|
||||
|
||||
nodes, content_nodes_by_name, content_parents = self._get_nodes_data(
|
||||
instance.data["setMembers"]
|
||||
)
|
||||
ungrouped_nodes.extend(nodes)
|
||||
|
||||
nodes, proxy_nodes_by_name, proxy_parents = self._get_nodes_data(
|
||||
instance.data.get("proxy", [])
|
||||
)
|
||||
ungrouped_nodes.extend(nodes)
|
||||
|
||||
# Validate against nodes directly parented to world.
|
||||
if ungrouped_nodes:
|
||||
raise PublishValidationError(
|
||||
"Found nodes parented to the world: {}\n"
|
||||
"All nodes need to be grouped.".format(ungrouped_nodes)
|
||||
)
|
||||
|
||||
# Proxy validation.
|
||||
if not instance.data.get("proxy", []):
|
||||
return
|
||||
|
||||
# Validate for content and proxy nodes amount being the same.
|
||||
if len(instance.data["setMembers"]) != len(instance.data["proxy"]):
|
||||
raise PublishValidationError(
|
||||
"Amount of content nodes ({}) and proxy nodes ({}) needs to "
|
||||
"be the same.".format(
|
||||
len(instance.data["setMembers"]),
|
||||
len(instance.data["proxy"])
|
||||
)
|
||||
)
|
||||
|
||||
# Validate against content and proxy nodes sharing same parent.
|
||||
if list(set(content_parents) & set(proxy_parents)):
|
||||
raise PublishValidationError(
|
||||
"Content and proxy nodes cannot share the same parent."
|
||||
)
|
||||
|
||||
# Validate for content and proxy nodes sharing same names.
|
||||
sorted_content_names = sorted(content_nodes_by_name.keys())
|
||||
sorted_proxy_names = sorted(proxy_nodes_by_name.keys())
|
||||
odd_content_names = list(
|
||||
set(sorted_content_names) - set(sorted_proxy_names)
|
||||
)
|
||||
odd_content_nodes = [
|
||||
content_nodes_by_name[x] for x in odd_content_names
|
||||
]
|
||||
odd_proxy_names = list(
|
||||
set(sorted_proxy_names) - set(sorted_content_names)
|
||||
)
|
||||
odd_proxy_nodes = [
|
||||
proxy_nodes_by_name[x] for x in odd_proxy_names
|
||||
]
|
||||
if not sorted_content_names == sorted_proxy_names:
|
||||
raise PublishValidationError(
|
||||
"Content and proxy nodes need to share the same names.\n"
|
||||
"Content nodes not matching: {}\n"
|
||||
"Proxy nodes not matching: {}".format(
|
||||
odd_content_nodes, odd_proxy_nodes
|
||||
)
|
||||
)
|
||||
|
|
@ -2,11 +2,13 @@ import os
|
|||
import types
|
||||
|
||||
import maya.cmds as cmds
|
||||
from mtoa.core import createOptions
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import (
|
||||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -34,8 +36,9 @@ class ValidateAssRelativePaths(pyblish.api.InstancePlugin):
|
|||
"defaultArnoldRenderOptions.pspath"
|
||||
)
|
||||
except ValueError:
|
||||
assert False, ("Can not validate, render setting were not opened "
|
||||
"yet so Arnold setting cannot be validate")
|
||||
raise PublishValidationError(
|
||||
"Default Arnold options has not been created yet."
|
||||
)
|
||||
|
||||
scene_dir, scene_basename = os.path.split(cmds.file(q=True, loc=True))
|
||||
scene_name, _ = os.path.splitext(scene_basename)
|
||||
|
|
@ -66,6 +69,8 @@ class ValidateAssRelativePaths(pyblish.api.InstancePlugin):
|
|||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
createOptions()
|
||||
|
||||
texture_path = cmds.getAttr("defaultArnoldRenderOptions.tspath")
|
||||
procedural_path = cmds.getAttr("defaultArnoldRenderOptions.pspath")
|
||||
|
||||
|
|
|
|||
|
|
@ -58,23 +58,23 @@ class ValidateAttributes(pyblish.api.ContextPlugin):
|
|||
# Filter families.
|
||||
families = [instance.data["family"]]
|
||||
families += instance.data.get("families", [])
|
||||
families = list(set(families) & set(self.attributes.keys()))
|
||||
families = list(set(families) & set(cls.attributes.keys()))
|
||||
if not families:
|
||||
continue
|
||||
|
||||
# Get all attributes to validate.
|
||||
attributes = {}
|
||||
for family in families:
|
||||
for preset in self.attributes[family]:
|
||||
for preset in cls.attributes[family]:
|
||||
[node_name, attribute_name] = preset.split(".")
|
||||
try:
|
||||
attributes[node_name].update(
|
||||
{attribute_name: self.attributes[family][preset]}
|
||||
{attribute_name: cls.attributes[family][preset]}
|
||||
)
|
||||
except KeyError:
|
||||
attributes.update({
|
||||
node_name: {
|
||||
attribute_name: self.attributes[family][preset]
|
||||
attribute_name: cls.attributes[family][preset]
|
||||
}
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ class ValidateColorSets(pyblish.api.Validator):
|
|||
order = ValidateMeshOrder
|
||||
hosts = ['maya']
|
||||
families = ['model']
|
||||
category = 'geometry'
|
||||
label = 'Mesh ColorSets'
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
|
||||
RepairAction]
|
||||
|
|
|
|||
|
|
@ -11,10 +11,6 @@ from openpype.pipeline.publish import (
|
|||
)
|
||||
|
||||
|
||||
def float_round(num, places=0, direction=ceil):
|
||||
return direction(num * (10**places)) / float(10**places)
|
||||
|
||||
|
||||
class ValidateMayaUnits(pyblish.api.ContextPlugin):
|
||||
"""Check if the Maya units are set correct"""
|
||||
|
||||
|
|
@ -36,18 +32,12 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin):
|
|||
# Collected units
|
||||
linearunits = context.data.get('linearUnits')
|
||||
angularunits = context.data.get('angularUnits')
|
||||
# TODO(antirotor): This is hack as for framerates having multiple
|
||||
# decimal places. FTrack is ceiling decimal values on
|
||||
# fps to two decimal places but Maya 2019+ is reporting those fps
|
||||
# with much higher resolution. As we currently cannot fix Ftrack
|
||||
# rounding, we have to round those numbers coming from Maya.
|
||||
# NOTE: this must be revisited yet again as it seems that Ftrack is
|
||||
# now flooring the value?
|
||||
fps = float_round(context.data.get('fps'), 2, ceil)
|
||||
|
||||
fps = context.data.get('fps')
|
||||
|
||||
# TODO repace query with using 'context.data["assetEntity"]'
|
||||
asset_doc = get_current_project_asset()
|
||||
asset_fps = asset_doc["data"]["fps"]
|
||||
asset_fps = mayalib.convert_to_maya_fps(asset_doc["data"]["fps"])
|
||||
|
||||
self.log.info('Units (linear): {0}'.format(linearunits))
|
||||
self.log.info('Units (angular): {0}'.format(angularunits))
|
||||
|
|
|
|||
|
|
@ -19,7 +19,6 @@ class ValidateMeshArnoldAttributes(pyblish.api.InstancePlugin):
|
|||
order = ValidateMeshOrder
|
||||
hosts = ["maya"]
|
||||
families = ["model"]
|
||||
category = "geometry"
|
||||
label = "Mesh Arnold Attributes"
|
||||
actions = [
|
||||
openpype.hosts.maya.api.action.SelectInvalidAction,
|
||||
|
|
|
|||
|
|
@ -48,7 +48,6 @@ class ValidateMeshHasUVs(pyblish.api.InstancePlugin):
|
|||
order = ValidateMeshOrder
|
||||
hosts = ['maya']
|
||||
families = ['model']
|
||||
category = 'geometry'
|
||||
label = 'Mesh Has UVs'
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
|
||||
optional = True
|
||||
|
|
|
|||
|
|
@ -15,8 +15,6 @@ class ValidateMeshLaminaFaces(pyblish.api.InstancePlugin):
|
|||
order = ValidateMeshOrder
|
||||
hosts = ['maya']
|
||||
families = ['model']
|
||||
category = 'geometry'
|
||||
version = (0, 1, 0)
|
||||
label = 'Mesh Lamina Faces'
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
|
||||
|
||||
|
|
|
|||
|
|
@ -19,8 +19,6 @@ class ValidateMeshNonZeroEdgeLength(pyblish.api.InstancePlugin):
|
|||
order = ValidateMeshOrder
|
||||
families = ['model']
|
||||
hosts = ['maya']
|
||||
category = 'geometry'
|
||||
version = (0, 1, 0)
|
||||
label = 'Mesh Edge Length Non Zero'
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
|
||||
optional = True
|
||||
|
|
|
|||
|
|
@ -20,8 +20,6 @@ class ValidateMeshNormalsUnlocked(pyblish.api.Validator):
|
|||
order = ValidateMeshOrder
|
||||
hosts = ['maya']
|
||||
families = ['model']
|
||||
category = 'geometry'
|
||||
version = (0, 1, 0)
|
||||
label = 'Mesh Normals Unlocked'
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
|
||||
RepairAction]
|
||||
|
|
|
|||
|
|
@ -235,7 +235,6 @@ class ValidateMeshHasOverlappingUVs(pyblish.api.InstancePlugin):
|
|||
order = ValidateMeshOrder
|
||||
hosts = ['maya']
|
||||
families = ['model']
|
||||
category = 'geometry'
|
||||
label = 'Mesh Has Overlapping UVs'
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
|
||||
optional = True
|
||||
|
|
|
|||
|
|
@ -21,9 +21,7 @@ class ValidateMeshSingleUVSet(pyblish.api.InstancePlugin):
|
|||
order = ValidateMeshOrder
|
||||
hosts = ['maya']
|
||||
families = ['model', 'pointcache']
|
||||
category = 'uv'
|
||||
optional = True
|
||||
version = (0, 1, 0)
|
||||
label = "Mesh Single UV Set"
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
|
||||
RepairAction]
|
||||
|
|
|
|||
|
|
@ -63,7 +63,6 @@ class ValidateMeshVerticesHaveEdges(pyblish.api.InstancePlugin):
|
|||
order = ValidateMeshOrder
|
||||
hosts = ['maya']
|
||||
families = ['model']
|
||||
category = 'geometry'
|
||||
label = 'Mesh Vertices Have Edges'
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
|
||||
RepairAction]
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ class ValidateNoDefaultCameras(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
hosts = ['maya']
|
||||
families = ['camera']
|
||||
version = (0, 1, 0)
|
||||
label = "No Default Cameras"
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
|
||||
|
||||
|
|
|
|||
|
|
@ -23,8 +23,6 @@ class ValidateNoNamespace(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
hosts = ['maya']
|
||||
families = ['model']
|
||||
category = 'cleanup'
|
||||
version = (0, 1, 0)
|
||||
label = 'No Namespaces'
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
|
||||
RepairAction]
|
||||
|
|
|
|||
|
|
@ -43,8 +43,6 @@ class ValidateNoNullTransforms(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
hosts = ['maya']
|
||||
families = ['model']
|
||||
category = 'cleanup'
|
||||
version = (0, 1, 0)
|
||||
label = 'No Empty/Null Transforms'
|
||||
actions = [RepairAction,
|
||||
openpype.hosts.maya.api.action.SelectInvalidAction]
|
||||
|
|
|
|||
|
|
@ -24,7 +24,6 @@ class ValidateRigJointsHidden(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
hosts = ['maya']
|
||||
families = ['rig']
|
||||
version = (0, 1, 0)
|
||||
label = "Joints Hidden"
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
|
||||
RepairAction]
|
||||
|
|
|
|||
|
|
@ -31,8 +31,6 @@ class ValidateSceneSetWorkspace(pyblish.api.ContextPlugin):
|
|||
|
||||
order = ValidatePipelineOrder
|
||||
hosts = ['maya']
|
||||
category = 'scene'
|
||||
version = (0, 1, 0)
|
||||
label = 'Maya Workspace Set'
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
|
|
@ -38,9 +38,7 @@ class ValidateShapeDefaultNames(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
hosts = ['maya']
|
||||
families = ['model']
|
||||
category = 'cleanup'
|
||||
optional = True
|
||||
version = (0, 1, 0)
|
||||
label = "Shape Default Naming"
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction,
|
||||
RepairAction]
|
||||
|
|
|
|||
|
|
@ -32,9 +32,7 @@ class ValidateTransformNamingSuffix(pyblish.api.InstancePlugin):
|
|||
order = ValidateContentsOrder
|
||||
hosts = ['maya']
|
||||
families = ['model']
|
||||
category = 'cleanup'
|
||||
optional = True
|
||||
version = (0, 1, 0)
|
||||
label = 'Suffix Naming Conventions'
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
|
||||
SUFFIX_NAMING_TABLE = {"mesh": ["_GEO", "_GES", "_GEP", "_OSD"],
|
||||
|
|
|
|||
|
|
@ -18,8 +18,6 @@ class ValidateTransformZero(pyblish.api.Validator):
|
|||
order = ValidateContentsOrder
|
||||
hosts = ["maya"]
|
||||
families = ["model"]
|
||||
category = "geometry"
|
||||
version = (0, 1, 0)
|
||||
label = "Transform Zero (Freeze)"
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ class ValidateUnrealMeshTriangulated(pyblish.api.InstancePlugin):
|
|||
order = ValidateMeshOrder
|
||||
hosts = ["maya"]
|
||||
families = ["staticMesh"]
|
||||
category = "geometry"
|
||||
label = "Mesh is Triangulated"
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
|
||||
active = False
|
||||
|
|
|
|||
18
openpype/hosts/maya/plugins/publish/validate_vray.py
Normal file
18
openpype/hosts/maya/plugins/publish/validate_vray.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class ValidateVray(pyblish.api.InstancePlugin):
|
||||
"""Validate general Vray setup."""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = 'VRay'
|
||||
hosts = ["maya"]
|
||||
families = ["vrayproxy"]
|
||||
|
||||
def process(self, instance):
|
||||
# Validate vray plugin is loaded.
|
||||
if not cmds.pluginInfo("vrayformaya", query=True, loaded=True):
|
||||
raise PublishValidationError("Vray plugin is not loaded.")
|
||||
59
openpype/hosts/maya/plugins/publish/validate_xgen.py
Normal file
59
openpype/hosts/maya/plugins/publish/validate_xgen.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
import json
|
||||
|
||||
import maya.cmds as cmds
|
||||
import xgenm
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import PublishValidationError
|
||||
|
||||
|
||||
class ValidateXgen(pyblish.api.InstancePlugin):
|
||||
"""Validate Xgen data."""
|
||||
|
||||
label = "Validate Xgen"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
host = ["maya"]
|
||||
families = ["xgen"]
|
||||
|
||||
def process(self, instance):
|
||||
set_members = instance.data.get("setMembers")
|
||||
|
||||
# Only 1 collection/node per instance.
|
||||
if len(set_members) != 1:
|
||||
raise PublishValidationError(
|
||||
"Only one collection per instance is allowed."
|
||||
" Found:\n{}".format(set_members)
|
||||
)
|
||||
|
||||
# Only xgen palette node is allowed.
|
||||
node_type = cmds.nodeType(set_members[0])
|
||||
if node_type != "xgmPalette":
|
||||
raise PublishValidationError(
|
||||
"Only node of type \"xgmPalette\" are allowed. Referred to as"
|
||||
" \"collection\" in the Maya UI."
|
||||
" Node type found: {}".format(node_type)
|
||||
)
|
||||
|
||||
# Cant have inactive modifiers in collection cause Xgen will try and
|
||||
# look for them when loading.
|
||||
palette = instance.data["xgmPalette"].replace("|", "")
|
||||
inactive_modifiers = {}
|
||||
for description in instance.data["xgmDescriptions"]:
|
||||
description = description.split("|")[-2]
|
||||
modifier_names = xgenm.fxModules(palette, description)
|
||||
for name in modifier_names:
|
||||
attr = xgenm.getAttr("active", palette, description, name)
|
||||
# Attribute value are lowercase strings of false/true.
|
||||
if attr == "false":
|
||||
try:
|
||||
inactive_modifiers[description].append(name)
|
||||
except KeyError:
|
||||
inactive_modifiers[description] = [name]
|
||||
|
||||
if inactive_modifiers:
|
||||
raise PublishValidationError(
|
||||
"There are inactive modifiers on the collection. "
|
||||
"Please delete these:\n{}".format(
|
||||
json.dumps(inactive_modifiers, indent=4, sort_keys=True)
|
||||
)
|
||||
)
|
||||
|
|
@ -1,16 +1,33 @@
|
|||
import os
|
||||
from functools import partial
|
||||
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.maya.api import MayaHost
|
||||
|
||||
from maya import cmds
|
||||
|
||||
host = MayaHost()
|
||||
install_host(host)
|
||||
|
||||
|
||||
print("starting OpenPype usersetup")
|
||||
print("Starting OpenPype usersetup...")
|
||||
|
||||
# build a shelf
|
||||
# Open Workfile Post Initialization.
|
||||
key = "OPENPYPE_OPEN_WORKFILE_POST_INITIALIZATION"
|
||||
if bool(int(os.environ.get(key, "0"))):
|
||||
cmds.evalDeferred(
|
||||
partial(
|
||||
cmds.file,
|
||||
os.environ["AVALON_LAST_WORKFILE"],
|
||||
open=True,
|
||||
force=True
|
||||
),
|
||||
lowestPriority=True
|
||||
)
|
||||
|
||||
|
||||
# Build a shelf.
|
||||
settings = get_project_settings(os.environ['AVALON_PROJECT'])
|
||||
shelf_preset = settings['maya'].get('project_shelf')
|
||||
|
||||
|
|
@ -26,7 +43,10 @@ if shelf_preset:
|
|||
print(import_string)
|
||||
exec(import_string)
|
||||
|
||||
cmds.evalDeferred("mlib.shelf(name=shelf_preset['name'], iconPath=icon_path, preset=shelf_preset)")
|
||||
cmds.evalDeferred(
|
||||
"mlib.shelf(name=shelf_preset['name'], iconPath=icon_path,"
|
||||
" preset=shelf_preset)"
|
||||
)
|
||||
|
||||
|
||||
print("finished OpenPype usersetup")
|
||||
print("Finished OpenPype usersetup.")
|
||||
|
|
|
|||
|
|
@ -53,12 +53,18 @@ class GizmoMenu():
|
|||
|
||||
item_type = item.get("sourcetype")
|
||||
|
||||
if item_type == ("python" or "file"):
|
||||
if item_type == "python":
|
||||
parent.addCommand(
|
||||
item["title"],
|
||||
command=str(item["command"]),
|
||||
icon=item.get("icon"),
|
||||
shortcut=item.get("hotkey")
|
||||
shortcut=item.get("shortcut")
|
||||
)
|
||||
elif item_type == "file":
|
||||
parent.addCommand(
|
||||
item['title'],
|
||||
"nuke.createNode('{}')".format(item.get('file_name')),
|
||||
shortcut=item.get('shortcut')
|
||||
)
|
||||
|
||||
# add separator
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import nuke
|
||||
import pyblish.api
|
||||
import openpype.api as api
|
||||
from openpype.lib import get_version_from_path
|
||||
import openpype.hosts.nuke.api as napi
|
||||
from openpype.pipeline import KnownPublishError
|
||||
|
||||
|
|
@ -57,7 +57,7 @@ class CollectContextData(pyblish.api.ContextPlugin):
|
|||
"fps": root_node['fps'].value(),
|
||||
|
||||
"currentFile": current_file,
|
||||
"version": int(api.get_version_from_path(current_file)),
|
||||
"version": int(get_version_from_path(current_file)),
|
||||
|
||||
"host": pyblish.api.current_host(),
|
||||
"hostVersion": nuke.NUKE_VERSION_STRING
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue