Merge branch 'develop' into feature/blender-blend_file_family

This commit is contained in:
Simone Barbieri 2023-06-15 09:50:02 +01:00
commit 1fd81887ce
1369 changed files with 68689 additions and 27119 deletions

View file

@ -1,6 +1,6 @@
{
"projectName": "OpenPype",
"projectOwner": "pypeclub",
"projectOwner": "ynput",
"repoType": "github",
"repoHost": "https://github.com",
"files": [
@ -319,8 +319,18 @@
"code",
"doc"
]
},
{
"login": "movalex",
"name": "Alexey Bogomolov",
"avatar_url": "https://avatars.githubusercontent.com/u/11698866?v=4",
"profile": "http://abogomolov.com",
"contributions": [
"code"
]
}
],
"contributorsPerLine": 7,
"skipCi": true
"skipCi": true,
"commitType": "docs"
}

View file

@ -1,33 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
**Running version**
[ex. 3.14.1-nightly.2]
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Desktop (please complete the following information):**
- OS: [e.g. windows]
- Host: [e.g. Maya, Nuke, Houdini]
**Additional context**
Add any other context about the problem here.

183
.github/ISSUE_TEMPLATE/bug_report.yml vendored Normal file
View file

@ -0,0 +1,183 @@
name: Bug Report
description: File a bug report
title: 'Bug: '
labels:
- 'type: bug'
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this bug report!
- type: checkboxes
attributes:
label: Is there an existing issue for this?
description: >-
Please search to see if an issue already exists for the bug you
encountered.
options:
- label: I have searched the existing issues
required: true
- type: textarea
attributes:
label: 'Current Behavior:'
description: A concise description of what you're experiencing.
validations:
required: true
- type: textarea
attributes:
label: 'Expected Behavior:'
description: A concise description of what you expected to happen.
validations:
required: false
- type: dropdown
id: _version
attributes:
label: Version
description: What version are you running? Look to OpenPype Tray
options:
- 3.15.11-nightly.2
- 3.15.11-nightly.1
- 3.15.10
- 3.15.10-nightly.2
- 3.15.10-nightly.1
- 3.15.9
- 3.15.9-nightly.2
- 3.15.9-nightly.1
- 3.15.8
- 3.15.8-nightly.3
- 3.15.8-nightly.2
- 3.15.8-nightly.1
- 3.15.7
- 3.15.7-nightly.3
- 3.15.7-nightly.2
- 3.15.7-nightly.1
- 3.15.6
- 3.15.6-nightly.3
- 3.15.6-nightly.2
- 3.15.6-nightly.1
- 3.15.5
- 3.15.5-nightly.2
- 3.15.5-nightly.1
- 3.15.4
- 3.15.4-nightly.3
- 3.15.4-nightly.2
- 3.15.4-nightly.1
- 3.15.3
- 3.15.3-nightly.4
- 3.15.3-nightly.3
- 3.15.3-nightly.2
- 3.15.3-nightly.1
- 3.15.2
- 3.15.2-nightly.6
- 3.15.2-nightly.5
- 3.15.2-nightly.4
- 3.15.2-nightly.3
- 3.15.2-nightly.2
- 3.15.2-nightly.1
- 3.15.1
- 3.15.1-nightly.6
- 3.15.1-nightly.5
- 3.15.1-nightly.4
- 3.15.1-nightly.3
- 3.15.1-nightly.2
- 3.15.1-nightly.1
- 3.15.0
- 3.15.0-nightly.1
- 3.14.11-nightly.4
- 3.14.11-nightly.3
- 3.14.11-nightly.2
- 3.14.11-nightly.1
- 3.14.10
- 3.14.10-nightly.9
- 3.14.10-nightly.8
- 3.14.10-nightly.7
- 3.14.10-nightly.6
- 3.14.10-nightly.5
- 3.14.10-nightly.4
- 3.14.10-nightly.3
- 3.14.10-nightly.2
- 3.14.10-nightly.1
- 3.14.9
- 3.14.9-nightly.5
- 3.14.9-nightly.4
- 3.14.9-nightly.3
- 3.14.9-nightly.2
- 3.14.9-nightly.1
- 3.14.8
- 3.14.8-nightly.4
- 3.14.8-nightly.3
- 3.14.8-nightly.2
- 3.14.8-nightly.1
- 3.14.7
- 3.14.7-nightly.8
- 3.14.7-nightly.7
- 3.14.7-nightly.6
- 3.14.7-nightly.5
- 3.14.7-nightly.4
- 3.14.7-nightly.3
- 3.14.7-nightly.2
- 3.14.7-nightly.1
- 3.14.6
- 3.14.6-nightly.3
- 3.14.6-nightly.2
- 3.14.6-nightly.1
- 3.14.5
- 3.14.5-nightly.3
- 3.14.5-nightly.2
- 3.14.5-nightly.1
- 3.14.4
- 3.14.4-nightly.4
- 3.14.4-nightly.3
- 3.14.4-nightly.2
- 3.14.4-nightly.1
- 3.14.3
- 3.14.3-nightly.7
- 3.14.3-nightly.6
- 3.14.3-nightly.5
- 3.14.3-nightly.4
validations:
required: true
- type: dropdown
validations:
required: true
attributes:
label: What platform you are running OpenPype on?
description: |
Please specify the operating systems you are running OpenPype with.
multiple: true
options:
- Windows
- Linux / Centos
- Linux / Ubuntu
- Linux / RedHat
- MacOS
- type: textarea
id: to-reproduce
attributes:
label: 'Steps To Reproduce:'
description: Steps to reproduce the behavior.
placeholder: |
1. How did the configuration look like
2. What type of action was made
validations:
required: true
- type: checkboxes
attributes:
label: Are there any labels you wish to add?
description: Please search labels and identify those related to your bug.
options:
- label: I have added the relevant labels to the bug report.
required: true
- type: textarea
id: logs
attributes:
label: 'Relevant log output:'
description: >-
Please copy and paste any relevant log output. This will be
automatically formatted into code, so no need for backticks.
render: shell
- type: textarea
id: additional-context
attributes:
label: 'Additional context:'
description: Add any other context about the problem here.

8
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View file

@ -0,0 +1,8 @@
blank_issues_enabled: false
contact_links:
- name: Ynput Community Discussions
url: https://community.ynput.io
about: Please ask and answer questions here.
- name: Ynput Discord Server
url: https://discord.gg/ynput
about: For community quick chats.

View file

@ -0,0 +1,52 @@
name: Enhancement Request
description: Create a report to help us enhance a particular feature
title: "Enhancement: "
labels:
- "type: enhancement"
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to fill out this enhancement request report!
- type: checkboxes
attributes:
label: Is there an existing issue for this?
description: Please search to see if an issue already exists for the bug you encountered.
options:
- label: I have searched the existing issues.
required: true
- type: textarea
id: related-feature
attributes:
label: Please describe the feature you have in mind and explain what the current shortcomings are?
description: A clear and concise description of what the problem is.
validations:
required: true
- type: textarea
id: enhancement-proposal
attributes:
label: How would you imagine the implementation of the feature?
description: A clear and concise description of what you want to happen.
validations:
required: true
- type: checkboxes
attributes:
label: Are there any labels you wish to add?
description: Please search labels and identify those related to your enhancement.
options:
- label: I have added the relevant labels to the enhancement request.
required: true
- type: textarea
id: alternatives
attributes:
label: "Describe alternatives you've considered:"
description: A clear and concise description of any alternative solutions or features you've considered.
validations:
required: false
- type: textarea
id: additional-context
attributes:
label: "Additional context:"
description: Add any other context or screenshots about the enhancement request here.
validations:
required: false

View file

@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

15
.github/pr-branch-labeler.yml vendored Normal file
View file

@ -0,0 +1,15 @@
# Apply label "feature" if head matches "feature/*"
'type: feature':
head: "feature/*"
# Apply label "feature" if head matches "feature/*"
'type: enhancement':
head: "enhancement/*"
# Apply label "bugfix" if head matches one of "bugfix/*" or "hotfix/*"
'type: bug':
head: ["bugfix/*", "hotfix/*"]
# Apply label "release" if base matches "release/*"
'Bump Minor':
base: "release/next-minor"

102
.github/pr-glob-labeler.yml vendored Normal file
View file

@ -0,0 +1,102 @@
# Add type: unittest label if any changes in tests folders
'type: unittest':
- '*/*tests*/**/*'
# any changes in documentation structure
'type: documentation':
- '*/**/*website*/**/*'
- '*/**/*docs*/**/*'
# hosts triage
'host: Nuke':
- '*/**/*nuke*'
- '*/**/*nuke*/**/*'
'host: Photoshop':
- '*/**/*photoshop*'
- '*/**/*photoshop*/**/*'
'host: Harmony':
- '*/**/*harmony*'
- '*/**/*harmony*/**/*'
'host: UE':
- '*/**/*unreal*'
- '*/**/*unreal*/**/*'
'host: Houdini':
- '*/**/*houdini*'
- '*/**/*houdini*/**/*'
'host: Maya':
- '*/**/*maya*'
- '*/**/*maya*/**/*'
'host: Resolve':
- '*/**/*resolve*'
- '*/**/*resolve*/**/*'
'host: Blender':
- '*/**/*blender*'
- '*/**/*blender*/**/*'
'host: Hiero':
- '*/**/*hiero*'
- '*/**/*hiero*/**/*'
'host: Fusion':
- '*/**/*fusion*'
- '*/**/*fusion*/**/*'
'host: Flame':
- '*/**/*flame*'
- '*/**/*flame*/**/*'
'host: TrayPublisher':
- '*/**/*traypublisher*'
- '*/**/*traypublisher*/**/*'
'host: 3dsmax':
- '*/**/*max*'
- '*/**/*max*/**/*'
'host: TV Paint':
- '*/**/*tvpaint*'
- '*/**/*tvpaint*/**/*'
'host: CelAction':
- '*/**/*celaction*'
- '*/**/*celaction*/**/*'
'host: After Effects':
- '*/**/*aftereffects*'
- '*/**/*aftereffects*/**/*'
'host: Substance Painter':
- '*/**/*substancepainter*'
- '*/**/*substancepainter*/**/*'
# modules triage
'module: Deadline':
- '*/**/*deadline*'
- '*/**/*deadline*/**/*'
'module: RoyalRender':
- '*/**/*royalrender*'
- '*/**/*royalrender*/**/*'
'module: Sitesync':
- '*/**/*sync_server*'
- '*/**/*sync_server*/**/*'
'module: Ftrack':
- '*/**/*ftrack*'
- '*/**/*ftrack*/**/*'
'module: Shotgrid':
- '*/**/*shotgrid*'
- '*/**/*shotgrid*/**/*'
'module: Kitsu':
- '*/**/*kitsu*'
- '*/**/*kitsu*/**/*'

View file

@ -1,16 +1,9 @@
## Brief description
First sentence is brief description.
## Description
Next paragraf is more elaborate text with more info. This will be displayed for example in collapsed form under the first sentence in a changelog.
## Changelog Description
Paragraphs contain detailed information on the changes made to the product or service, providing an in-depth description of the updates and enhancements. They can be used to explain the reasoning behind the changes, or to highlight the importance of the new features. Paragraphs can often include links to further information or support documentation.
## Additional info
The rest will be ignored in changelog and should contain any additional
technical information.
## Documentation (add _"type: documentation"_ label)
[feature_documentation](future_url_after_it_will_be_merged)
Paragraphs of text giving context of additional technical information or code examples.
## Testing notes:
1. start with this step
2. follow this step
2. follow this step

View file

@ -1,19 +0,0 @@
name: Automate Projects
on:
issues:
types: [opened, labeled]
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
jobs:
assign_one_project:
runs-on: ubuntu-latest
name: Assign to One Project
steps:
- name: Assign NEW bugs to triage
uses: srggrs/assign-one-project-github-action@1.2.0
if: contains(github.event.issue.labels.*.name, 'bug')
with:
project: 'https://github.com/pypeclub/pype/projects/2'
column_name: 'Needs triage'

View file

@ -1,4 +1,4 @@
name: documentation
name: 📜 Documentation
on:
pull_request:

View file

@ -1,4 +1,4 @@
name: Milestone - assign to PRs
name: 👉🏻 Milestone - assign to PRs
on:
pull_request_target:
@ -13,7 +13,7 @@ jobs:
if: github.event.pull_request.milestone == null
uses: zoispag/action-assign-milestone@v1
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
repo-token: "${{ secrets.YNPUT_BOT_TOKEN }}"
milestone: 'next-minor'
run_if_develop:
@ -24,5 +24,5 @@ jobs:
if: github.event.pull_request.milestone == null
uses: zoispag/action-assign-milestone@v1
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
milestone: 'next-patch'
repo-token: "${{ secrets.YNPUT_BOT_TOKEN }}"
milestone: 'next-patch'

View file

@ -1,4 +1,4 @@
name: Milestone - create default
name: Milestone - create default
on:
milestone:
@ -12,7 +12,7 @@ jobs:
uses: "WyriHaximus/github-action-get-milestones@master"
id: milestones
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
id: querymilestone
@ -31,7 +31,7 @@ jobs:
with:
title: 'next-patch'
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
generate-next-minor:
runs-on: ubuntu-latest
@ -40,7 +40,7 @@ jobs:
uses: "WyriHaximus/github-action-get-milestones@master"
id: milestones
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
id: querymilestone
@ -59,4 +59,4 @@ jobs:
with:
title: 'next-minor'
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"

View file

@ -0,0 +1,50 @@
name: 🚩 Milestone Release [trigger]
on:
workflow_dispatch:
inputs:
milestone:
required: true
release-type:
type: choice
description: What release should be created
options:
- release
- pre-release
milestone:
types: closed
jobs:
milestone-title:
runs-on: ubuntu-latest
outputs:
milestone: ${{ steps.milestoneTitle.outputs.value }}
steps:
- name: Switch input milestone
uses: haya14busa/action-cond@v1
id: milestoneTitle
with:
cond: ${{ inputs.milestone == '' }}
if_true: ${{ github.event.milestone.title }}
if_false: ${{ inputs.milestone }}
- name: Print resulted milestone
run: |
echo "${{ steps.milestoneTitle.outputs.value }}"
call-ci-tools-milestone-release:
needs: milestone-title
uses: ynput/ci-tools/.github/workflows/milestone_release_ref.yml@main
with:
milestone: ${{ needs.milestone-title.outputs.milestone }}
repo-owner: ${{ github.event.repository.owner.login }}
repo-name: ${{ github.event.repository.name }}
version-py-path: "./openpype/version.py"
pyproject-path: "./pyproject.toml"
secrets:
token: ${{ secrets.YNPUT_BOT_TOKEN }}
user_email: ${{ secrets.CI_EMAIL }}
user_name: ${{ secrets.CI_USER }}
cu_api_key: ${{ secrets.CLICKUP_API_KEY }}
cu_team_id: ${{ secrets.CLICKUP_TEAM_ID }}
cu_field_id: ${{ secrets.CLICKUP_RELEASE_FIELD_ID }}

View file

@ -1,4 +1,4 @@
name: Dev -> Main
name: 🔀 Dev -> Main
on:
schedule:
@ -14,10 +14,10 @@ jobs:
- name: 🚛 Checkout Code
uses: actions/checkout@v2
- name: 🔨 Merge develop to main
- name: 🔨 Merge develop to main
uses: everlytic/branch-merge@1.1.0
with:
github_token: ${{ secrets.ADMIN_TOKEN }}
github_token: ${{ secrets.YNPUT_BOT_TOKEN }}
source_ref: 'develop'
target_branch: 'main'
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
@ -25,5 +25,5 @@ jobs:
- name: Invoke pre-release workflow
uses: benc-uk/workflow-dispatch@v1
with:
workflow: Nightly Prerelease
token: ${{ secrets.ADMIN_TOKEN }}
workflow: prerelease.yml
token: ${{ secrets.YNPUT_BOT_TOKEN }}

49
.github/workflows/pr_labels.yml vendored Normal file
View file

@ -0,0 +1,49 @@
name: 🔖 PR labels
on:
pull_request_target:
types: [opened, assigned]
jobs:
size-label:
name: pr_size_label
runs-on: ubuntu-latest
if: github.event.action == 'assigned' || github.event.action == 'opened'
steps:
- name: Add size label
uses: "pascalgn/size-label-action@v0.4.3"
env:
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
IGNORED: ".gitignore\n*.md\n*.json"
with:
sizes: >
{
"0": "XS",
"100": "S",
"500": "M",
"1000": "L",
"1500": "XL",
"2500": "XXL"
}
label_prs_branch:
name: pr_branch_label
runs-on: ubuntu-latest
if: github.event.action == 'assigned' || github.event.action == 'opened'
steps:
- name: Label PRs - Branch name detection
uses: ffittschen/pr-branch-labeler@v1
with:
repo-token: ${{ secrets.YNPUT_BOT_TOKEN }}
label_prs_globe:
name: pr_globe_label
runs-on: ubuntu-latest
if: github.event.action == 'assigned' || github.event.action == 'opened'
steps:
- name: Label PRs - Globe detection
uses: actions/labeler@v4.0.3
with:
repo-token: ${{ secrets.YNPUT_BOT_TOKEN }}
configuration-path: ".github/pr-glob-labeler.yml"
sync-labels: false

View file

@ -1,4 +1,4 @@
name: Nightly Prerelease
name: Nightly Prerelease
on:
workflow_dispatch:
@ -17,7 +17,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.7
python-version: 3.9
- name: Install Python requirements
run: pip install gitpython semver PyGithub
@ -25,43 +25,15 @@ jobs:
- name: 🔎 Determine next version type
id: version_type
run: |
TYPE=$(python ./tools/ci_tools.py --bump --github_token ${{ secrets.GITHUB_TOKEN }})
echo ::set-output name=type::$TYPE
TYPE=$(python ./tools/ci_tools.py --bump --github_token ${{ secrets.YNPUT_BOT_TOKEN }})
echo "type=${TYPE}" >> $GITHUB_OUTPUT
- name: 💉 Inject new version into files
id: version
if: steps.version_type.outputs.type != 'skip'
run: |
RESULT=$(python ./tools/ci_tools.py --nightly --github_token ${{ secrets.GITHUB_TOKEN }})
echo ::set-output name=next_tag::$RESULT
# - name: "✏️ Generate full changelog"
# if: steps.version_type.outputs.type != 'skip'
# id: generate-full-changelog
# uses: heinrichreimer/github-changelog-generator-action@v2.3
# with:
# token: ${{ secrets.ADMIN_TOKEN }}
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
# issues: false
# issuesWoLabels: false
# sinceTag: "3.12.0"
# maxIssues: 100
# pullRequests: true
# prWoLabels: false
# author: false
# unreleased: true
# compareLink: true
# stripGeneratorNotice: true
# verbose: true
# unreleasedLabel: ${{ steps.version.outputs.next_tag }}
# excludeTagsRegex: "CI/.+"
# releaseBranch: "main"
- name: "🖨️ Print changelog to console"
if: steps.version_type.outputs.type != 'skip'
run: cat CHANGELOG.md
NEW_VERSION_TAG=$(python ./tools/ci_tools.py --nightly --github_token ${{ secrets.YNPUT_BOT_TOKEN }})
echo "next_tag=${NEW_VERSION_TAG}" >> $GITHUB_OUTPUT
- name: 💾 Commit and Tag
id: git_commit
@ -80,7 +52,7 @@ jobs:
- name: Push to protected main branch
uses: CasperWA/push-protected@v2.10.0
with:
token: ${{ secrets.ADMIN_TOKEN }}
token: ${{ secrets.YNPUT_BOT_TOKEN }}
branch: main
tags: true
unprotect_reviews: true
@ -89,7 +61,13 @@ jobs:
uses: everlytic/branch-merge@1.1.0
if: steps.version_type.outputs.type != 'skip'
with:
github_token: ${{ secrets.ADMIN_TOKEN }}
github_token: ${{ secrets.YNPUT_BOT_TOKEN }}
source_ref: 'main'
target_branch: 'develop'
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
- name: Invoke Update bug report workflow
uses: benc-uk/workflow-dispatch@v1
with:
workflow: update_bug_report.yml
token: ${{ secrets.YNPUT_BOT_TOKEN }}

View file

@ -0,0 +1,70 @@
name: 📊 Project task statuses
on:
pull_request_review:
types: [submitted]
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
jobs:
pr_review_started:
name: pr_review_started
runs-on: ubuntu-latest
# -----------------------------
# conditions are:
# - PR issue comment which is not form Ynbot
# - PR review comment which is not Hound (or any other bot)
# - PR review submitted which is not from Hound (or any other bot) and is not 'Changes requested'
# - make sure it only runs if not forked repo
# -----------------------------
if: |
(github.event_name == 'issue_comment' && github.event.pull_request.head.repo.owner.login == 'ynput' && github.event.comment.user.id != 82967070) ||
(github.event_name == 'pull_request_review_comment' && github.event.pull_request.head.repo.owner.login == 'ynput' && github.event.comment.user.type != 'Bot') ||
(github.event_name == 'pull_request_review' &&
github.event.pull_request.head.repo.owner.login == 'ynput' &&
github.event.review.state != 'changes_requested' &&
github.event.review.state != 'approved' &&
github.event.review.user.type != 'Bot')
steps:
- name: Move PR to 'Review In Progress'
uses: leonsteinhaeuser/project-beta-automations@v2.1.0
with:
gh_token: ${{ secrets.YNPUT_BOT_TOKEN }}
organization: ynput
project_id: 11
resource_node_id: ${{ github.event.pull_request.node_id || github.event.issue.node_id }}
status_value: Review In Progress
pr_review_requested:
# -----------------------------
# Resets Clickup Task status to 'In Progress' after 'Changes Requested' were submitted to PR
# It only runs if custom clickup task id was found in ref branch of PR
# -----------------------------
name: pr_review_requested
runs-on: ubuntu-latest
if: github.event_name == 'pull_request_review' && github.event.pull_request.head.repo.owner.login == 'ynput' && github.event.review.state == 'changes_requested'
steps:
- name: Set branch env
run: echo "BRANCH_NAME=${{ github.event.pull_request.head.ref}}" >> $GITHUB_ENV
- name: Get ClickUp ID from ref head name
id: get_cuID
run: |
echo ${{ env.BRANCH_NAME }}
echo "cuID=$(echo $BRANCH_NAME | sed 's/.*\/\(OP\-[0-9]\{4\}\).*/\1/')" >> $GITHUB_OUTPUT
- name: Print ClickUp ID
run: echo ${{ steps.get_cuID.outputs.cuID }}
- name: Move found Clickup task to 'Review in Progress'
if: steps.get_cuID.outputs.cuID
run: |
curl -i -X PUT \
'https://api.clickup.com/api/v2/task/${{ steps.get_cuID.outputs.cuID }}?custom_task_ids=true&team_id=${{secrets.CLICKUP_TEAM_ID}}' \
-H 'Authorization: ${{secrets.CLICKUP_API_KEY}}' \
-H 'Content-Type: application/json' \
-d '{
"status": "in progress"
}'

View file

@ -1,124 +0,0 @@
name: Stable Release
on:
release:
types:
- prereleased
jobs:
create_release:
runs-on: ubuntu-latest
if: github.actor != 'pypebot'
steps:
- name: 🚛 Checkout Code
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install Python requirements
run: pip install gitpython semver PyGithub
- name: 💉 Inject new version into files
id: version
run: |
echo ::set-output name=current_version::${GITHUB_REF#refs/*/}
RESULT=$(python ./tools/ci_tools.py --finalize ${GITHUB_REF#refs/*/})
LASTRELEASE=$(python ./tools/ci_tools.py --lastversion release)
echo ::set-output name=last_release::$LASTRELEASE
echo ::set-output name=release_tag::$RESULT
# - name: "✏️ Generate full changelog"
# if: steps.version.outputs.release_tag != 'skip'
# id: generate-full-changelog
# uses: heinrichreimer/github-changelog-generator-action@v2.3
# with:
# token: ${{ secrets.ADMIN_TOKEN }}
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
# issues: false
# issuesWoLabels: false
# sinceTag: "3.12.0"
# maxIssues: 100
# pullRequests: true
# prWoLabels: false
# author: false
# unreleased: true
# compareLink: true
# stripGeneratorNotice: true
# verbose: true
# futureRelease: ${{ steps.version.outputs.release_tag }}
# excludeTagsRegex: "CI/.+"
# releaseBranch: "main"
- name: 💾 Commit and Tag
id: git_commit
if: steps.version.outputs.release_tag != 'skip'
run: |
git config user.email ${{ secrets.CI_EMAIL }}
git config user.name ${{ secrets.CI_USER }}
git add .
git commit -m "[Automated] Release"
tag_name="${{ steps.version.outputs.release_tag }}"
git tag -a $tag_name -m "stable release"
- name: 🔏 Push to protected main branch
if: steps.version.outputs.release_tag != 'skip'
uses: CasperWA/push-protected@v2.10.0
with:
token: ${{ secrets.ADMIN_TOKEN }}
branch: main
tags: true
unprotect_reviews: true
- name: "✏️ Generate last changelog"
if: steps.version.outputs.release_tag != 'skip'
id: generate-last-changelog
uses: heinrichreimer/github-changelog-generator-action@v2.2
with:
token: ${{ secrets.ADMIN_TOKEN }}
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
issues: false
issuesWoLabels: false
sinceTag: ${{ steps.version.outputs.last_release }}
maxIssues: 100
pullRequests: true
prWoLabels: false
author: false
unreleased: true
compareLink: true
stripGeneratorNotice: true
verbose: true
futureRelease: ${{ steps.version.outputs.release_tag }}
excludeTagsRegex: "CI/.+"
releaseBranch: "main"
stripHeaders: true
base: 'none'
- name: 🚀 Github Release
if: steps.version.outputs.release_tag != 'skip'
uses: ncipollo/release-action@v1
with:
body: ${{ steps.generate-last-changelog.outputs.changelog }}
tag: ${{ steps.version.outputs.release_tag }}
token: ${{ secrets.ADMIN_TOKEN }}
- name: ☠ Delete Pre-release
if: steps.version.outputs.release_tag != 'skip'
uses: cb80/delrel@latest
with:
tag: "${{ steps.version.outputs.current_version }}"
- name: 🔁 Merge main back to develop
if: steps.version.outputs.release_tag != 'skip'
uses: everlytic/branch-merge@1.1.0
with:
github_token: ${{ secrets.ADMIN_TOKEN }}
source_ref: 'main'
target_branch: 'develop'
commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}'

View file

@ -1,7 +1,7 @@
# This workflow will upload a Python Package using Twine when a release is created
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
name: Test Build
name: 🏗️ Test Build
on:
pull_request:
@ -18,7 +18,7 @@ jobs:
runs-on: windows-latest
strategy:
matrix:
python-version: [3.7]
python-version: [3.9]
steps:
- name: 🚛 Checkout Code
@ -28,7 +28,7 @@ jobs:
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: 🧵 Install Requirements
shell: pwsh
run: |
@ -45,7 +45,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.7]
python-version: [3.9]
steps:
- name: 🚛 Checkout Code
@ -64,27 +64,3 @@ jobs:
run: |
export SKIP_THIRD_PARTY_VALIDATION="1"
./tools/build.sh
# MacOS-latest:
# runs-on: macos-latest
# strategy:
# matrix:
# python-version: [3.7]
# steps:
# - name: 🚛 Checkout Code
# uses: actions/checkout@v2
# - name: Set up Python
# uses: actions/setup-python@v2
# with:
# python-version: ${{ matrix.python-version }}
# - name: 🧵 Install Requirements
# run: |
# ./tools/create_env.sh
# - name: 🔨 Build
# run: |
# ./tools/build.sh

33
.github/workflows/update_bug_report.yml vendored Normal file
View file

@ -0,0 +1,33 @@
name: 🐞 Update Bug Report
on:
workflow_dispatch:
release:
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#release
types: [published]
jobs:
update-bug-report:
runs-on: ubuntu-latest
name: Update bug report
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.release.target_commitish }}
- name: Update version
uses: ynput/gha-populate-form-version@main
with:
github_token: ${{ secrets.YNPUT_BOT_TOKEN }}
registry: github
dropdown: _version
limit_to: 100
form: .github/ISSUE_TEMPLATE/bug_report.yml
commit_message: 'chore(): update bug report / version'
dry_run: no-push
- name: Push to protected develop branch
uses: CasperWA/push-protected@v2.10.0
with:
token: ${{ secrets.YNPUT_BOT_TOKEN }}
branch: develop
unprotect_reviews: true

6
.gitignore vendored
View file

@ -112,3 +112,9 @@ tools/run_eventserver.*
tools/dev_*
.github_changelog_generator
# Addons
########
/openpype/addons/*
!/openpype/addons/README.md

5
.gitmodules vendored
View file

@ -4,4 +4,7 @@
[submodule "tools/modules/powershell/PSWriteColor"]
path = tools/modules/powershell/PSWriteColor
url = https://github.com/EvotecIT/PSWriteColor.git
url = https://github.com/EvotecIT/PSWriteColor.git
[submodule "openpype/hosts/unreal/integration"]
path = openpype/hosts/unreal/integration
url = https://github.com/ynput/ayon-unreal-plugin.git

12
.pre-commit-config.yaml Normal file
View file

@ -0,0 +1,12 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- id: no-commit-to-branch
args: [ '--pattern', '^(?!((release|enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-_]+)$).*' ]

77
ARCHITECTURE.md Normal file
View file

@ -0,0 +1,77 @@
# Architecture
OpenPype is a monolithic Python project that bundles several parts, this document will try to give a birds eye overview of the project and, to a certain degree, each of the sub-projects.
The current file structure looks like this:
```
.
├── common - Code in this folder is backend portion of Addon distribution logic for v4 server.
├── docs - Documentation of the source code.
├── igniter - The OpenPype bootstrapper, deals with running version resolution and setting up the connection to the mongodb.
├── openpype - The actual OpenPype core package.
├── schema - Collection of JSON files describing schematics of objects. This follows Avalon's convention.
├── tests - Integration and unit tests.
├── tools - Conveninece scripts to perform common actions (in both bash and ps1).
├── vendor - When using the igniter, it deploys third party tools in here, such as ffmpeg.
└── website - Source files for https://openpype.io/ which is Docusaursus (https://docusaurus.io/).
```
The core functionality of the pipeline can be found in `igniter` and `openpype`, which in turn rely on the `schema` files, whenever you build (or download a pre-built) version of OpenPype, these two are bundled in there, and `Igniter` is the entry point.
## Igniter
It's the setup and update tool for OpenPype, unless you want to package `openpype` separately and deal with all the config manually, this will most likely be your entry point.
```
igniter/
├── bootstrap_repos.py - Module that will find or install OpenPype versions in the system.
├── __init__.py - Igniter entry point.
├── install_dialog.py- Show dialog for choosing central pype repository.
├── install_thread.py - Threading helpers for the install process.
├── __main__.py - Like `__init__.py` ?
├── message_dialog.py - Qt Dialog with a message and "Ok" button.
├── nice_progress_bar.py - Fancy Qt progress bar.
├── splash.txt - ASCII art for the terminal installer.
├── stylesheet.css - Installer Qt styles.
├── terminal_splash.py - Terminal installer animation, relies in `splash.txt`.
├── tools.py - Collection of methods that don't fit in other modules.
├── update_thread.py - Threading helper to update existing OpenPype installs.
├── update_window.py - Qt UI to update OpenPype installs.
├── user_settings.py - Interface for the OpenPype user settings.
└── version.py - Igniter's version number.
```
## OpenPype
This is the main package of the OpenPype logic, it could be loosely described as a combination of [Avalon](https://getavalon.github.io), [Pyblish](https://pyblish.com/) and glue around those with custom OpenPype only elements, things are in progress of being moved around to better prepare for V4, which will be released under a new name AYON.
```
openpype/
├── client - Interface for the MongoDB.
├── hooks - Hooks to be executed on certain OpenPype Applications defined in `openpype.lib.applications`.
├── host - Base class for the different hosts.
├── hosts - Integration with the different DCCs (hosts) using the `host` base class.
├── lib - Libraries that stitch together the package, some have been moved into other parts.
├── modules - OpenPype modules should contain separated logic of specific kind of implementation, such as Ftrack connection and its python API.
├── pipeline - Core of the OpenPype pipeline, handles creation of data, publishing, etc.
├── plugins - Global/core plugins for loader and publisher tool.
├── resources - Icons, fonts, etc.
├── scripts - Loose scipts that get run by tools/publishers.
├── settings - OpenPype settings interface.
├── style - Qt styling.
├── tests - Unit tests.
├── tools - Core tools, check out https://openpype.io/docs/artist_tools.
├── vendor - Vendoring of needed required Python packes.
├── widgets - Common re-usable Qt Widgets.
├── action.py - LEGACY: Lives now in `openpype.pipeline.publish.action` Pyblish actions.
├── cli.py - Command line interface, leverages `click`.
├── __init__.py - Sets two constants.
├── __main__.py - Entry point, calls the `cli.py`
├── plugin.py - Pyblish plugins.
├── pype_commands.py - Implementation of OpenPype commands.
└── version.py - Current version number.
```

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
# Build Pype docker image
FROM ubuntu:focal AS builder
ARG OPENPYPE_PYTHON_VERSION=3.7.12
ARG OPENPYPE_PYTHON_VERSION=3.9.12
ARG BUILD_DATE
ARG VERSION

View file

@ -1,6 +1,6 @@
# Build Pype docker image
FROM centos:7 AS builder
ARG OPENPYPE_PYTHON_VERSION=3.7.12
ARG OPENPYPE_PYTHON_VERSION=3.9.12
LABEL org.opencontainers.image.name="pypeclub/openpype"
LABEL org.opencontainers.image.title="OpenPype Docker Image"
@ -52,7 +52,7 @@ RUN yum -y install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.n
# we need to build our own patchelf
WORKDIR /temp-patchelf
RUN git clone https://github.com/NixOS/patchelf.git . \
RUN git clone -b 0.17.0 --single-branch https://github.com/NixOS/patchelf.git . \
&& source scl_source enable devtoolset-7 \
&& ./bootstrap.sh \
&& ./configure \
@ -96,11 +96,11 @@ RUN source $HOME/.bashrc \
RUN source $HOME/.bashrc \
&& bash ./tools/build.sh
RUN cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.7/lib \
&& cp /usr/lib64/libssl* ./build/exe.linux-x86_64-3.7/lib \
&& cp /usr/lib64/libcrypto* ./build/exe.linux-x86_64-3.7/lib \
&& cp /root/.pyenv/versions/${OPENPYPE_PYTHON_VERSION}/lib/libpython* ./build/exe.linux-x86_64-3.7/lib \
&& cp /usr/lib64/libxcb* ./build/exe.linux-x86_64-3.7/vendor/python/PySide2/Qt/lib
RUN cp /usr/lib64/libffi* ./build/exe.linux-x86_64-3.9/lib \
&& cp /usr/lib64/libssl* ./build/exe.linux-x86_64-3.9/lib \
&& cp /usr/lib64/libcrypto* ./build/exe.linux-x86_64-3.9/lib \
&& cp /root/.pyenv/versions/${OPENPYPE_PYTHON_VERSION}/lib/libpython* ./build/exe.linux-x86_64-3.9/lib \
&& cp /usr/lib64/libxcb* ./build/exe.linux-x86_64-3.9/vendor/python/PySide2/Qt/lib
RUN cd /opt/openpype \
rm -rf ./vendor/bin

81
Dockerfile.debian Normal file
View file

@ -0,0 +1,81 @@
# Build Pype docker image
FROM debian:bullseye AS builder
ARG OPENPYPE_PYTHON_VERSION=3.9.12
ARG BUILD_DATE
ARG VERSION
LABEL maintainer="info@openpype.io"
LABEL description="Docker Image to build and run OpenPype under Ubuntu 20.04"
LABEL org.opencontainers.image.name="pypeclub/openpype"
LABEL org.opencontainers.image.title="OpenPype Docker Image"
LABEL org.opencontainers.image.url="https://openpype.io/"
LABEL org.opencontainers.image.source="https://github.com/pypeclub/OpenPype"
LABEL org.opencontainers.image.documentation="https://openpype.io/docs/system_introduction"
LABEL org.opencontainers.image.created=$BUILD_DATE
LABEL org.opencontainers.image.version=$VERSION
USER root
ARG DEBIAN_FRONTEND=noninteractive
# update base
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
ca-certificates \
bash \
git \
cmake \
make \
curl \
wget \
build-essential \
libssl-dev \
zlib1g-dev \
libbz2-dev \
libreadline-dev \
libsqlite3-dev \
llvm \
libncursesw5-dev \
xz-utils \
tk-dev \
libxml2-dev \
libxmlsec1-dev \
libffi-dev \
liblzma-dev \
patchelf
SHELL ["/bin/bash", "-c"]
RUN mkdir /opt/openpype
# download and install pyenv
RUN curl https://pyenv.run | bash \
&& echo 'export PATH="$HOME/.pyenv/bin:$PATH"'>> $HOME/init_pyenv.sh \
&& echo 'eval "$(pyenv init -)"' >> $HOME/init_pyenv.sh \
&& echo 'eval "$(pyenv virtualenv-init -)"' >> $HOME/init_pyenv.sh \
&& echo 'eval "$(pyenv init --path)"' >> $HOME/init_pyenv.sh
# install python with pyenv
RUN source $HOME/init_pyenv.sh \
&& pyenv install ${OPENPYPE_PYTHON_VERSION}
COPY . /opt/openpype/
RUN chmod +x /opt/openpype/tools/create_env.sh && chmod +x /opt/openpype/tools/build.sh
WORKDIR /opt/openpype
# set local python version
RUN cd /opt/openpype \
&& source $HOME/init_pyenv.sh \
&& pyenv local ${OPENPYPE_PYTHON_VERSION}
# fetch third party tools/libraries
RUN source $HOME/init_pyenv.sh \
&& ./tools/create_env.sh \
&& ./tools/fetch_thirdparty_libs.sh
# build openpype
RUN source $HOME/init_pyenv.sh \
&& bash ./tools/build.sh

View file

@ -1,6 +1,84 @@
# Changelog
## [3.14.10](https://github.com/ynput/OpenPype/tree/HEAD)
## [3.15.0](https://github.com/ynput/OpenPype/tree/3.15.0)
[Full Changelog](https://github.com/ynput/OpenPype/compare/3.14.10...3.15.0)
**Deprecated:**
- General: Fill default values of new publish template profiles [\#4245](https://github.com/ynput/OpenPype/pull/4245)
### 📖 Documentation
- documentation: Split tools into separate entries [\#4342](https://github.com/ynput/OpenPype/pull/4342)
- Documentation: Fix harmony docs [\#4301](https://github.com/ynput/OpenPype/pull/4301)
- Remove staging logic set by OpenPype version [\#3979](https://github.com/ynput/OpenPype/pull/3979)
**🆕 New features**
- General: Push to studio library [\#4284](https://github.com/ynput/OpenPype/pull/4284)
- Colorspace Management and Distribution [\#4195](https://github.com/ynput/OpenPype/pull/4195)
- Nuke: refactor to latest publisher workfow [\#4006](https://github.com/ynput/OpenPype/pull/4006)
- Update to Python 3.9 [\#3546](https://github.com/ynput/OpenPype/pull/3546)
**🚀 Enhancements**
- Unreal: Don't use mongo queries in 'ExistingLayoutLoader' [\#4356](https://github.com/ynput/OpenPype/pull/4356)
- General: Loader and Creator plugins can be disabled [\#4310](https://github.com/ynput/OpenPype/pull/4310)
- General: Unbind poetry version [\#4306](https://github.com/ynput/OpenPype/pull/4306)
- General: Enhanced enum def items [\#4295](https://github.com/ynput/OpenPype/pull/4295)
- Git: add pre-commit hooks [\#4289](https://github.com/ynput/OpenPype/pull/4289)
- Tray Publisher: Improve Online family functionality [\#4263](https://github.com/ynput/OpenPype/pull/4263)
- General: Update MacOs to PySide6 [\#4255](https://github.com/ynput/OpenPype/pull/4255)
- Build: update to Gazu in toml [\#4208](https://github.com/ynput/OpenPype/pull/4208)
- Global: adding imageio to settings [\#4158](https://github.com/ynput/OpenPype/pull/4158)
- Blender: added project settings for validator no colons in name [\#4149](https://github.com/ynput/OpenPype/pull/4149)
- Dockerfile for Debian Bullseye [\#4108](https://github.com/ynput/OpenPype/pull/4108)
- AfterEffects: publish multiple compositions [\#4092](https://github.com/ynput/OpenPype/pull/4092)
- AfterEffects: make new publisher default [\#4056](https://github.com/ynput/OpenPype/pull/4056)
- Photoshop: make new publisher default [\#4051](https://github.com/ynput/OpenPype/pull/4051)
- Feature/multiverse [\#4046](https://github.com/ynput/OpenPype/pull/4046)
- Tests: add support for deadline for automatic tests [\#3989](https://github.com/ynput/OpenPype/pull/3989)
- Add version to shortcut name [\#3906](https://github.com/ynput/OpenPype/pull/3906)
- TrayPublisher: Removed from experimental tools [\#3667](https://github.com/ynput/OpenPype/pull/3667)
**🐛 Bug fixes**
- change 3.7 to 3.9 in folder name [\#4354](https://github.com/ynput/OpenPype/pull/4354)
- PushToProject: Fix hierarchy of project change [\#4350](https://github.com/ynput/OpenPype/pull/4350)
- Fix photoshop workfile save-as [\#4347](https://github.com/ynput/OpenPype/pull/4347)
- Nuke Input process node sourcing improvements [\#4341](https://github.com/ynput/OpenPype/pull/4341)
- New publisher: Some validation plugin tweaks [\#4339](https://github.com/ynput/OpenPype/pull/4339)
- Harmony: fix unable to change workfile on Mac [\#4334](https://github.com/ynput/OpenPype/pull/4334)
- Global: fixing in-place source publishing for editorial [\#4333](https://github.com/ynput/OpenPype/pull/4333)
- General: Use class constants of QMessageBox [\#4332](https://github.com/ynput/OpenPype/pull/4332)
- TVPaint: Fix plugin for TVPaint 11.7 [\#4328](https://github.com/ynput/OpenPype/pull/4328)
- Exctract OTIO review has improved quality [\#4325](https://github.com/ynput/OpenPype/pull/4325)
- Ftrack: fix typos causing bugs in sync [\#4322](https://github.com/ynput/OpenPype/pull/4322)
- General: Python 2 compatibility of instance collector [\#4320](https://github.com/ynput/OpenPype/pull/4320)
- Slack: user groups speedup [\#4318](https://github.com/ynput/OpenPype/pull/4318)
- Maya: Bug - Multiverse extractor executed on plain animation family [\#4315](https://github.com/ynput/OpenPype/pull/4315)
- Fix run\_documentation.ps1 [\#4312](https://github.com/ynput/OpenPype/pull/4312)
- Nuke: new creators fixes [\#4308](https://github.com/ynput/OpenPype/pull/4308)
- General: missing comment on standalone and tray publisher [\#4303](https://github.com/ynput/OpenPype/pull/4303)
- AfterEffects: Fix for audio from mp4 layer [\#4296](https://github.com/ynput/OpenPype/pull/4296)
- General: Update gazu in poetry lock [\#4247](https://github.com/ynput/OpenPype/pull/4247)
- Bug: Fixing version detection and filtering in Igniter [\#3914](https://github.com/ynput/OpenPype/pull/3914)
- Bug: Create missing version dir [\#3903](https://github.com/ynput/OpenPype/pull/3903)
**🔀 Refactored code**
- Remove redundant export\_alembic method. [\#4293](https://github.com/ynput/OpenPype/pull/4293)
- Igniter: Use qtpy modules instead of Qt [\#4237](https://github.com/ynput/OpenPype/pull/4237)
**Merged pull requests:**
- Sort families by alphabetical order in the Create plugin [\#4346](https://github.com/ynput/OpenPype/pull/4346)
- Global: Validate unique subsets [\#4336](https://github.com/ynput/OpenPype/pull/4336)
- Maya: Collect instances preserve handles even if frameStart + frameEnd matches context [\#3437](https://github.com/ynput/OpenPype/pull/3437)
## [3.14.10](https://github.com/ynput/OpenPype/tree/3.14.10)
[Full Changelog](https://github.com/ynput/OpenPype/compare/3.14.9...3.14.10)

107
README.md
View file

@ -1,12 +1,11 @@
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
[![All Contributors](https://img.shields.io/badge/all_contributors-27-orange.svg?style=flat-square)](#contributors-)
[![All Contributors](https://img.shields.io/badge/all_contributors-28-orange.svg?style=flat-square)](#contributors-)
<!-- ALL-CONTRIBUTORS-BADGE:END -->
OpenPype
====
[![documentation](https://github.com/pypeclub/pype/actions/workflows/documentation.yml/badge.svg)](https://github.com/pypeclub/pype/actions/workflows/documentation.yml) ![GitHub VFX Platform](https://img.shields.io/badge/vfx%20platform-2021-lightgrey?labelColor=303846)
[![documentation](https://github.com/pypeclub/pype/actions/workflows/documentation.yml/badge.svg)](https://github.com/pypeclub/pype/actions/workflows/documentation.yml) ![GitHub VFX Platform](https://img.shields.io/badge/vfx%20platform-2022-lightgrey?labelColor=303846)
Introduction
@ -31,7 +30,7 @@ The main things you will need to run and build OpenPype are:
- **Terminal** in your OS
- PowerShell 5.0+ (Windows)
- Bash (Linux)
- [**Python 3.7.8**](#python) or higher
- [**Python 3.9.6**](#python) or higher
- [**MongoDB**](#database) (needed only for local development)
@ -50,13 +49,14 @@ For more details on requirements visit [requirements documentation](https://open
Building OpenPype
-------------
To build OpenPype you currently need [Python 3.7](https://www.python.org/downloads/) as we are following
To build OpenPype you currently need [Python 3.9](https://www.python.org/downloads/) as we are following
[vfx platform](https://vfxplatform.com). Because of some Linux distros comes with newer Python version
already, you need to install **3.7** version and make use of it. You can use perhaps [pyenv](https://github.com/pyenv/pyenv) for this on Linux.
already, you need to install **3.9** version and make use of it. You can use perhaps [pyenv](https://github.com/pyenv/pyenv) for this on Linux.
**Note**: We do not support 3.9.0 because of [this bug](https://github.com/python/cpython/pull/22670). Please, use higher versions of 3.9.x.
### Windows
You will need [Python 3.7](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads).
You will need [Python >= 3.9.1](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads).
More tools might be needed for installing dependencies (for example for **OpenTimelineIO**) - mostly
development tools like [CMake](https://cmake.org/) and [Visual Studio](https://visualstudio.microsoft.com/cs/downloads/)
@ -82,7 +82,7 @@ OpenPype is build using [CX_Freeze](https://cx-freeze.readthedocs.io/en/latest)
### macOS
You will need [Python 3.7](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll need also other tools to build
You will need [Python >= 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll need also other tools to build
some OpenPype dependencies like [CMake](https://cmake.org/) and **XCode Command Line Tools** (or some other build system).
Easy way of installing everything necessary is to use [Homebrew](https://brew.sh):
@ -106,19 +106,19 @@ exec "$SHELL"
PATH=$(pyenv root)/shims:$PATH
```
4) Pull in required Python version 3.7.x
4) Pull in required Python version 3.9.x
```sh
# install Python build dependences
brew install openssl readline sqlite3 xz zlib
# replace with up-to-date 3.7.x version
pyenv install 3.7.9
# replace with up-to-date 3.9.x version
pyenv install 3.9.6
```
5) Set local Python version
```sh
# switch to OpenPype source directory
pyenv local 3.7.9
pyenv local 3.9.6
```
#### To build OpenPype:
@ -145,7 +145,7 @@ sudo ./tools/docker_build.sh centos7
If all is successful, you'll find built OpenPype in `./build/` folder.
#### Manual build
You will need [Python 3.7](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll also need [curl](https://curl.se) on systems that doesn't have one preinstalled.
You will need [Python >= 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll also need [curl](https://curl.se) on systems that doesn't have one preinstalled.
To build Python related stuff, you need Python header files installed (`python3-dev` on Ubuntu for example).
@ -222,14 +222,14 @@ eval "$(pyenv virtualenv-init -)"
# reload shell
exec $SHELL
# install Python 3.7.9
pyenv install -v 3.7.9
# install Python 3.9.x
pyenv install -v 3.9.6
# change path to OpenPype 3
cd /path/to/openpype-3
# set local python version
pyenv local 3.7.9
pyenv local 3.9.6
```
</details>
@ -303,41 +303,44 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
<!-- prettier-ignore-start -->
<!-- markdownlint-disable -->
<table>
<tr>
<td align="center"><a href="http://pype.club/"><img src="https://avatars.githubusercontent.com/u/3333008?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Milan Kolar</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=mkolar" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=mkolar" title="Documentation">📖</a> <a href="#infra-mkolar" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#business-mkolar" title="Business development">💼</a> <a href="#content-mkolar" title="Content">🖋</a> <a href="#fundingFinding-mkolar" title="Funding Finding">🔍</a> <a href="#maintenance-mkolar" title="Maintenance">🚧</a> <a href="#projectManagement-mkolar" title="Project Management">📆</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Amkolar" title="Reviewed Pull Requests">👀</a> <a href="#mentoring-mkolar" title="Mentoring">🧑‍🏫</a> <a href="#question-mkolar" title="Answering Questions">💬</a></td>
<td align="center"><a href="https://www.linkedin.com/in/jakubjezek79"><img src="https://avatars.githubusercontent.com/u/40640033?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jakub Ježek</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=jakubjezek001" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=jakubjezek001" title="Documentation">📖</a> <a href="#infra-jakubjezek001" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#content-jakubjezek001" title="Content">🖋</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Ajakubjezek001" title="Reviewed Pull Requests">👀</a> <a href="#maintenance-jakubjezek001" title="Maintenance">🚧</a> <a href="#mentoring-jakubjezek001" title="Mentoring">🧑‍🏫</a> <a href="#projectManagement-jakubjezek001" title="Project Management">📆</a> <a href="#question-jakubjezek001" title="Answering Questions">💬</a></td>
<td align="center"><a href="https://github.com/antirotor"><img src="https://avatars.githubusercontent.com/u/33513211?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Ondřej Samohel</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=antirotor" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=antirotor" title="Documentation">📖</a> <a href="#infra-antirotor" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#content-antirotor" title="Content">🖋</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Aantirotor" title="Reviewed Pull Requests">👀</a> <a href="#maintenance-antirotor" title="Maintenance">🚧</a> <a href="#mentoring-antirotor" title="Mentoring">🧑‍🏫</a> <a href="#projectManagement-antirotor" title="Project Management">📆</a> <a href="#question-antirotor" title="Answering Questions">💬</a></td>
<td align="center"><a href="https://github.com/iLLiCiTiT"><img src="https://avatars.githubusercontent.com/u/43494761?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jakub Trllo</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=iLLiCiTiT" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=iLLiCiTiT" title="Documentation">📖</a> <a href="#infra-iLLiCiTiT" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3AiLLiCiTiT" title="Reviewed Pull Requests">👀</a> <a href="#maintenance-iLLiCiTiT" title="Maintenance">🚧</a> <a href="#question-iLLiCiTiT" title="Answering Questions">💬</a></td>
<td align="center"><a href="https://github.com/kalisp"><img src="https://avatars.githubusercontent.com/u/4457962?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Petr Kalis</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=kalisp" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=kalisp" title="Documentation">📖</a> <a href="#infra-kalisp" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Akalisp" title="Reviewed Pull Requests">👀</a> <a href="#maintenance-kalisp" title="Maintenance">🚧</a> <a href="#question-kalisp" title="Answering Questions">💬</a></td>
<td align="center"><a href="https://github.com/64qam"><img src="https://avatars.githubusercontent.com/u/26925793?v=4?s=100" width="100px;" alt=""/><br /><sub><b>64qam</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=64qam" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3A64qam" title="Reviewed Pull Requests">👀</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=64qam" title="Documentation">📖</a> <a href="#infra-64qam" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#projectManagement-64qam" title="Project Management">📆</a> <a href="#maintenance-64qam" title="Maintenance">🚧</a> <a href="#content-64qam" title="Content">🖋</a> <a href="#userTesting-64qam" title="User Testing">📓</a></td>
<td align="center"><a href="http://www.colorbleed.nl/"><img src="https://avatars.githubusercontent.com/u/2439881?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Roy Nieterau</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=BigRoy" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=BigRoy" title="Documentation">📖</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3ABigRoy" title="Reviewed Pull Requests">👀</a> <a href="#mentoring-BigRoy" title="Mentoring">🧑‍🏫</a> <a href="#question-BigRoy" title="Answering Questions">💬</a></td>
</tr>
<tr>
<td align="center"><a href="https://github.com/tokejepsen"><img src="https://avatars.githubusercontent.com/u/1860085?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Toke Jepsen</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=tokejepsen" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=tokejepsen" title="Documentation">📖</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Atokejepsen" title="Reviewed Pull Requests">👀</a> <a href="#mentoring-tokejepsen" title="Mentoring">🧑‍🏫</a> <a href="#question-tokejepsen" title="Answering Questions">💬</a></td>
<td align="center"><a href="https://github.com/jrsndl"><img src="https://avatars.githubusercontent.com/u/45896205?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jiri Sindelar</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=jrsndl" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Ajrsndl" title="Reviewed Pull Requests">👀</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=jrsndl" title="Documentation">📖</a> <a href="#content-jrsndl" title="Content">🖋</a> <a href="#tutorial-jrsndl" title="Tutorials"></a> <a href="#userTesting-jrsndl" title="User Testing">📓</a></td>
<td align="center"><a href="https://barbierisimone.com/"><img src="https://avatars.githubusercontent.com/u/1087869?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Simone Barbieri</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=simonebarbieri" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=simonebarbieri" title="Documentation">📖</a></td>
<td align="center"><a href="http://karimmozilla.xyz/"><img src="https://avatars.githubusercontent.com/u/82811760?v=4?s=100" width="100px;" alt=""/><br /><sub><b>karimmozilla</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=karimmozilla" title="Code">💻</a></td>
<td align="center"><a href="https://github.com/Allan-I"><img src="https://avatars.githubusercontent.com/u/76656700?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Allan I. A.</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=Allan-I" title="Code">💻</a></td>
<td align="center"><a href="https://www.linkedin.com/in/mmuurrpphhyy/"><img src="https://avatars.githubusercontent.com/u/352795?v=4?s=100" width="100px;" alt=""/><br /><sub><b>murphy</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=m-u-r-p-h-y" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Am-u-r-p-h-y" title="Reviewed Pull Requests">👀</a> <a href="#userTesting-m-u-r-p-h-y" title="User Testing">📓</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=m-u-r-p-h-y" title="Documentation">📖</a> <a href="#projectManagement-m-u-r-p-h-y" title="Project Management">📆</a></td>
<td align="center"><a href="https://github.com/aardschok"><img src="https://avatars.githubusercontent.com/u/26920875?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Wijnand Koreman</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=aardschok" title="Code">💻</a></td>
</tr>
<tr>
<td align="center"><a href="http://jedimaster.cnblogs.com/"><img src="https://avatars.githubusercontent.com/u/1798206?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Bo Zhou</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=zhoub" title="Code">💻</a></td>
<td align="center"><a href="https://www.linkedin.com/in/clementhector/"><img src="https://avatars.githubusercontent.com/u/7068597?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Clément Hector</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=ClementHector" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3AClementHector" title="Reviewed Pull Requests">👀</a></td>
<td align="center"><a href="https://twitter.com/davidlatwe"><img src="https://avatars.githubusercontent.com/u/3357009?v=4?s=100" width="100px;" alt=""/><br /><sub><b>David Lai</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=davidlatwe" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/pulls?q=is%3Apr+reviewed-by%3Adavidlatwe" title="Reviewed Pull Requests">👀</a></td>
<td align="center"><a href="https://github.com/2-REC"><img src="https://avatars.githubusercontent.com/u/42170307?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Derek </b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=2-REC" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=2-REC" title="Documentation">📖</a></td>
<td align="center"><a href="https://github.com/gabormarinov"><img src="https://avatars.githubusercontent.com/u/8620515?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Gábor Marinov</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=gabormarinov" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=gabormarinov" title="Documentation">📖</a></td>
<td align="center"><a href="https://github.com/icyvapor"><img src="https://avatars.githubusercontent.com/u/1195278?v=4?s=100" width="100px;" alt=""/><br /><sub><b>icyvapor</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=icyvapor" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=icyvapor" title="Documentation">📖</a></td>
<td align="center"><a href="https://github.com/jlorrain"><img src="https://avatars.githubusercontent.com/u/7955673?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Jérôme LORRAIN</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=jlorrain" title="Code">💻</a></td>
</tr>
<tr>
<td align="center"><a href="https://github.com/dmo-j-cube"><img src="https://avatars.githubusercontent.com/u/89823400?v=4?s=100" width="100px;" alt=""/><br /><sub><b>David Morris-Oliveros</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=dmo-j-cube" title="Code">💻</a></td>
<td align="center"><a href="https://github.com/BenoitConnan"><img src="https://avatars.githubusercontent.com/u/82808268?v=4?s=100" width="100px;" alt=""/><br /><sub><b>BenoitConnan</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=BenoitConnan" title="Code">💻</a></td>
<td align="center"><a href="https://github.com/Malthaldar"><img src="https://avatars.githubusercontent.com/u/33671694?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Malthaldar</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=Malthaldar" title="Code">💻</a></td>
<td align="center"><a href="http://www.svenneve.com/"><img src="https://avatars.githubusercontent.com/u/2472863?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Sven Neve</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=svenneve" title="Code">💻</a></td>
<td align="center"><a href="https://github.com/zafrs"><img src="https://avatars.githubusercontent.com/u/26890002?v=4?s=100" width="100px;" alt=""/><br /><sub><b>zafrs</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=zafrs" title="Code">💻</a></td>
<td align="center"><a href="http://felixdavid.com/"><img src="https://avatars.githubusercontent.com/u/22875539?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Félix David</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=Tilix4" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=Tilix4" title="Documentation">📖</a></td>
</tr>
<tbody>
<tr>
<td align="center" valign="top" width="14.28%"><a href="http://pype.club/"><img src="https://avatars.githubusercontent.com/u/3333008?v=4?s=100" width="100px;" alt="Milan Kolar"/><br /><sub><b>Milan Kolar</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=mkolar" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/commits?author=mkolar" title="Documentation">📖</a> <a href="#infra-mkolar" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#business-mkolar" title="Business development">💼</a> <a href="#content-mkolar" title="Content">🖋</a> <a href="#fundingFinding-mkolar" title="Funding Finding">🔍</a> <a href="#maintenance-mkolar" title="Maintenance">🚧</a> <a href="#projectManagement-mkolar" title="Project Management">📆</a> <a href="https://github.com/ynput/OpenPype/pulls?q=is%3Apr+reviewed-by%3Amkolar" title="Reviewed Pull Requests">👀</a> <a href="#mentoring-mkolar" title="Mentoring">🧑‍🏫</a> <a href="#question-mkolar" title="Answering Questions">💬</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://www.linkedin.com/in/jakubjezek79"><img src="https://avatars.githubusercontent.com/u/40640033?v=4?s=100" width="100px;" alt="Jakub Ježek"/><br /><sub><b>Jakub Ježek</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=jakubjezek001" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/commits?author=jakubjezek001" title="Documentation">📖</a> <a href="#infra-jakubjezek001" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#content-jakubjezek001" title="Content">🖋</a> <a href="https://github.com/ynput/OpenPype/pulls?q=is%3Apr+reviewed-by%3Ajakubjezek001" title="Reviewed Pull Requests">👀</a> <a href="#maintenance-jakubjezek001" title="Maintenance">🚧</a> <a href="#mentoring-jakubjezek001" title="Mentoring">🧑‍🏫</a> <a href="#projectManagement-jakubjezek001" title="Project Management">📆</a> <a href="#question-jakubjezek001" title="Answering Questions">💬</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/antirotor"><img src="https://avatars.githubusercontent.com/u/33513211?v=4?s=100" width="100px;" alt="Ondřej Samohel"/><br /><sub><b>Ondřej Samohel</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=antirotor" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/commits?author=antirotor" title="Documentation">📖</a> <a href="#infra-antirotor" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#content-antirotor" title="Content">🖋</a> <a href="https://github.com/ynput/OpenPype/pulls?q=is%3Apr+reviewed-by%3Aantirotor" title="Reviewed Pull Requests">👀</a> <a href="#maintenance-antirotor" title="Maintenance">🚧</a> <a href="#mentoring-antirotor" title="Mentoring">🧑‍🏫</a> <a href="#projectManagement-antirotor" title="Project Management">📆</a> <a href="#question-antirotor" title="Answering Questions">💬</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/iLLiCiTiT"><img src="https://avatars.githubusercontent.com/u/43494761?v=4?s=100" width="100px;" alt="Jakub Trllo"/><br /><sub><b>Jakub Trllo</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=iLLiCiTiT" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/commits?author=iLLiCiTiT" title="Documentation">📖</a> <a href="#infra-iLLiCiTiT" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="https://github.com/ynput/OpenPype/pulls?q=is%3Apr+reviewed-by%3AiLLiCiTiT" title="Reviewed Pull Requests">👀</a> <a href="#maintenance-iLLiCiTiT" title="Maintenance">🚧</a> <a href="#question-iLLiCiTiT" title="Answering Questions">💬</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/kalisp"><img src="https://avatars.githubusercontent.com/u/4457962?v=4?s=100" width="100px;" alt="Petr Kalis"/><br /><sub><b>Petr Kalis</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=kalisp" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/commits?author=kalisp" title="Documentation">📖</a> <a href="#infra-kalisp" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="https://github.com/ynput/OpenPype/pulls?q=is%3Apr+reviewed-by%3Akalisp" title="Reviewed Pull Requests">👀</a> <a href="#maintenance-kalisp" title="Maintenance">🚧</a> <a href="#question-kalisp" title="Answering Questions">💬</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/64qam"><img src="https://avatars.githubusercontent.com/u/26925793?v=4?s=100" width="100px;" alt="64qam"/><br /><sub><b>64qam</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=64qam" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/pulls?q=is%3Apr+reviewed-by%3A64qam" title="Reviewed Pull Requests">👀</a> <a href="https://github.com/ynput/OpenPype/commits?author=64qam" title="Documentation">📖</a> <a href="#infra-64qam" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#projectManagement-64qam" title="Project Management">📆</a> <a href="#maintenance-64qam" title="Maintenance">🚧</a> <a href="#content-64qam" title="Content">🖋</a> <a href="#userTesting-64qam" title="User Testing">📓</a></td>
<td align="center" valign="top" width="14.28%"><a href="http://www.colorbleed.nl/"><img src="https://avatars.githubusercontent.com/u/2439881?v=4?s=100" width="100px;" alt="Roy Nieterau"/><br /><sub><b>Roy Nieterau</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=BigRoy" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/commits?author=BigRoy" title="Documentation">📖</a> <a href="https://github.com/ynput/OpenPype/pulls?q=is%3Apr+reviewed-by%3ABigRoy" title="Reviewed Pull Requests">👀</a> <a href="#mentoring-BigRoy" title="Mentoring">🧑‍🏫</a> <a href="#question-BigRoy" title="Answering Questions">💬</a></td>
</tr>
<tr>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/tokejepsen"><img src="https://avatars.githubusercontent.com/u/1860085?v=4?s=100" width="100px;" alt="Toke Jepsen"/><br /><sub><b>Toke Jepsen</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=tokejepsen" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/commits?author=tokejepsen" title="Documentation">📖</a> <a href="https://github.com/ynput/OpenPype/pulls?q=is%3Apr+reviewed-by%3Atokejepsen" title="Reviewed Pull Requests">👀</a> <a href="#mentoring-tokejepsen" title="Mentoring">🧑‍🏫</a> <a href="#question-tokejepsen" title="Answering Questions">💬</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/jrsndl"><img src="https://avatars.githubusercontent.com/u/45896205?v=4?s=100" width="100px;" alt="Jiri Sindelar"/><br /><sub><b>Jiri Sindelar</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=jrsndl" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/pulls?q=is%3Apr+reviewed-by%3Ajrsndl" title="Reviewed Pull Requests">👀</a> <a href="https://github.com/ynput/OpenPype/commits?author=jrsndl" title="Documentation">📖</a> <a href="#content-jrsndl" title="Content">🖋</a> <a href="#tutorial-jrsndl" title="Tutorials"></a> <a href="#userTesting-jrsndl" title="User Testing">📓</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://barbierisimone.com/"><img src="https://avatars.githubusercontent.com/u/1087869?v=4?s=100" width="100px;" alt="Simone Barbieri"/><br /><sub><b>Simone Barbieri</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=simonebarbieri" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/commits?author=simonebarbieri" title="Documentation">📖</a></td>
<td align="center" valign="top" width="14.28%"><a href="http://karimmozilla.xyz/"><img src="https://avatars.githubusercontent.com/u/82811760?v=4?s=100" width="100px;" alt="karimmozilla"/><br /><sub><b>karimmozilla</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=karimmozilla" title="Code">💻</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/Allan-I"><img src="https://avatars.githubusercontent.com/u/76656700?v=4?s=100" width="100px;" alt="Allan I. A."/><br /><sub><b>Allan I. A.</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=Allan-I" title="Code">💻</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://www.linkedin.com/in/mmuurrpphhyy/"><img src="https://avatars.githubusercontent.com/u/352795?v=4?s=100" width="100px;" alt="murphy"/><br /><sub><b>murphy</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=m-u-r-p-h-y" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/pulls?q=is%3Apr+reviewed-by%3Am-u-r-p-h-y" title="Reviewed Pull Requests">👀</a> <a href="#userTesting-m-u-r-p-h-y" title="User Testing">📓</a> <a href="https://github.com/ynput/OpenPype/commits?author=m-u-r-p-h-y" title="Documentation">📖</a> <a href="#projectManagement-m-u-r-p-h-y" title="Project Management">📆</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/aardschok"><img src="https://avatars.githubusercontent.com/u/26920875?v=4?s=100" width="100px;" alt="Wijnand Koreman"/><br /><sub><b>Wijnand Koreman</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=aardschok" title="Code">💻</a></td>
</tr>
<tr>
<td align="center" valign="top" width="14.28%"><a href="http://jedimaster.cnblogs.com/"><img src="https://avatars.githubusercontent.com/u/1798206?v=4?s=100" width="100px;" alt="Bo Zhou"/><br /><sub><b>Bo Zhou</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=zhoub" title="Code">💻</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://www.linkedin.com/in/clementhector/"><img src="https://avatars.githubusercontent.com/u/7068597?v=4?s=100" width="100px;" alt="Clément Hector"/><br /><sub><b>Clément Hector</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=ClementHector" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/pulls?q=is%3Apr+reviewed-by%3AClementHector" title="Reviewed Pull Requests">👀</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://twitter.com/davidlatwe"><img src="https://avatars.githubusercontent.com/u/3357009?v=4?s=100" width="100px;" alt="David Lai"/><br /><sub><b>David Lai</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=davidlatwe" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/pulls?q=is%3Apr+reviewed-by%3Adavidlatwe" title="Reviewed Pull Requests">👀</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/2-REC"><img src="https://avatars.githubusercontent.com/u/42170307?v=4?s=100" width="100px;" alt="Derek "/><br /><sub><b>Derek </b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=2-REC" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/commits?author=2-REC" title="Documentation">📖</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/gabormarinov"><img src="https://avatars.githubusercontent.com/u/8620515?v=4?s=100" width="100px;" alt="Gábor Marinov"/><br /><sub><b>Gábor Marinov</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=gabormarinov" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/commits?author=gabormarinov" title="Documentation">📖</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/icyvapor"><img src="https://avatars.githubusercontent.com/u/1195278?v=4?s=100" width="100px;" alt="icyvapor"/><br /><sub><b>icyvapor</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=icyvapor" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/commits?author=icyvapor" title="Documentation">📖</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/jlorrain"><img src="https://avatars.githubusercontent.com/u/7955673?v=4?s=100" width="100px;" alt="Jérôme LORRAIN"/><br /><sub><b>Jérôme LORRAIN</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=jlorrain" title="Code">💻</a></td>
</tr>
<tr>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/dmo-j-cube"><img src="https://avatars.githubusercontent.com/u/89823400?v=4?s=100" width="100px;" alt="David Morris-Oliveros"/><br /><sub><b>David Morris-Oliveros</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=dmo-j-cube" title="Code">💻</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/BenoitConnan"><img src="https://avatars.githubusercontent.com/u/82808268?v=4?s=100" width="100px;" alt="BenoitConnan"/><br /><sub><b>BenoitConnan</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=BenoitConnan" title="Code">💻</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/Malthaldar"><img src="https://avatars.githubusercontent.com/u/33671694?v=4?s=100" width="100px;" alt="Malthaldar"/><br /><sub><b>Malthaldar</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=Malthaldar" title="Code">💻</a></td>
<td align="center" valign="top" width="14.28%"><a href="http://www.svenneve.com/"><img src="https://avatars.githubusercontent.com/u/2472863?v=4?s=100" width="100px;" alt="Sven Neve"/><br /><sub><b>Sven Neve</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=svenneve" title="Code">💻</a></td>
<td align="center" valign="top" width="14.28%"><a href="https://github.com/zafrs"><img src="https://avatars.githubusercontent.com/u/26890002?v=4?s=100" width="100px;" alt="zafrs"/><br /><sub><b>zafrs</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=zafrs" title="Code">💻</a></td>
<td align="center" valign="top" width="14.28%"><a href="http://felixdavid.com/"><img src="https://avatars.githubusercontent.com/u/22875539?v=4?s=100" width="100px;" alt="Félix David"/><br /><sub><b>Félix David</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=Tilix4" title="Code">💻</a> <a href="https://github.com/ynput/OpenPype/commits?author=Tilix4" title="Documentation">📖</a></td>
<td align="center" valign="top" width="14.28%"><a href="http://abogomolov.com"><img src="https://avatars.githubusercontent.com/u/11698866?v=4?s=100" width="100px;" alt="Alexey Bogomolov"/><br /><sub><b>Alexey Bogomolov</b></sub></a><br /><a href="https://github.com/ynput/OpenPype/commits?author=movalex" title="Code">💻</a></td>
</tr>
</tbody>
</table>
<!-- markdownlint-restore -->
@ -345,4 +348,4 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
<!-- ALL-CONTRIBUTORS-LIST:END -->
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!

View file

@ -1,4 +1,3 @@
import enlighten
import os
import re
import urllib
@ -252,6 +251,11 @@ class RemoteFileHandler:
if key.startswith('download_warning'):
return value
# handle antivirus warning for big zips
found = re.search("(confirm=)([^&.+])", response.text)
if found:
return found.groups()[1]
return None
@staticmethod
@ -259,15 +263,9 @@ class RemoteFileHandler:
response_gen, destination,
):
with open(destination, "wb") as f:
pbar = enlighten.Counter(
total=None, desc="Save content", units="%", color="green")
progress = 0
for chunk in response_gen:
if chunk: # filter out keep-alive new chunks
f.write(chunk)
progress += len(chunk)
pbar.close()
@staticmethod
def _quota_exceeded(first_chunk):

View file

@ -24,7 +24,7 @@ def open_dialog():
if os.getenv("OPENPYPE_HEADLESS_MODE"):
print("!!! Can't open dialog in headless mode. Exiting.")
sys.exit(1)
from Qt import QtWidgets, QtCore
from qtpy import QtWidgets, QtCore
from .install_dialog import InstallDialog
scale_attr = getattr(QtCore.Qt, "AA_EnableHighDpiScaling", None)
@ -47,7 +47,7 @@ def open_update_window(openpype_version):
if os.getenv("OPENPYPE_HEADLESS_MODE"):
print("!!! Can't open dialog in headless mode. Exiting.")
sys.exit(1)
from Qt import QtWidgets, QtCore
from qtpy import QtWidgets, QtCore
from .update_window import UpdateWindow
scale_attr = getattr(QtCore.Qt, "AA_EnableHighDpiScaling", None)
@ -71,7 +71,7 @@ def show_message_dialog(title, message):
if os.getenv("OPENPYPE_HEADLESS_MODE"):
print("!!! Can't open dialog in headless mode. Exiting.")
sys.exit(1)
from Qt import QtWidgets, QtCore
from qtpy import QtWidgets, QtCore
from .message_dialog import MessageDialog
scale_attr = getattr(QtCore.Qt, "AA_EnableHighDpiScaling", None)

View file

@ -2,8 +2,7 @@
"""Open install dialog."""
import sys
from Qt import QtWidgets # noqa
from Qt.QtCore import Signal # noqa
from qtpy import QtWidgets
from .install_dialog import InstallDialog

View file

@ -57,11 +57,9 @@ class OpenPypeVersion(semver.VersionInfo):
"""Class for storing information about OpenPype version.
Attributes:
staging (bool): True if it is staging version
path (str): path to OpenPype
"""
staging = False
path = None
# this should match any string complying with https://semver.org/
_VERSION_REGEX = re.compile(r"(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>[a-zA-Z\d\-.]*))?(?:\+(?P<buildmetadata>[a-zA-Z\d\-.]*))?") # noqa: E501
@ -83,12 +81,10 @@ class OpenPypeVersion(semver.VersionInfo):
build (str): an optional build string
version (str): if set, it will be parsed and will override
parameters like `major`, `minor` and so on.
staging (bool): set to True if version is staging.
path (Path): path to version location.
"""
self.path = None
self.staging = False
if "version" in kwargs.keys():
if not kwargs.get("version"):
@ -113,29 +109,8 @@ class OpenPypeVersion(semver.VersionInfo):
if "path" in kwargs.keys():
kwargs.pop("path")
if kwargs.get("staging"):
self.staging = kwargs.get("staging", False)
kwargs.pop("staging")
if "staging" in kwargs.keys():
kwargs.pop("staging")
if self.staging:
if kwargs.get("build"):
if "staging" not in kwargs.get("build"):
kwargs["build"] = f"{kwargs.get('build')}-staging"
else:
kwargs["build"] = "staging"
if kwargs.get("build") and "staging" in kwargs.get("build", ""):
self.staging = True
super().__init__(*args, **kwargs)
def __eq__(self, other):
result = super().__eq__(other)
return bool(result and self.staging == other.staging)
def __repr__(self):
return f"<{self.__class__.__name__}: {str(self)} - path={self.path}>"
@ -150,43 +125,11 @@ class OpenPypeVersion(semver.VersionInfo):
return True
if self.finalize_version() == other.finalize_version() and \
self.prerelease == other.prerelease and \
self.is_staging() and not other.is_staging():
self.prerelease == other.prerelease:
return True
return result
def set_staging(self) -> OpenPypeVersion:
"""Set version as staging and return it.
This will preserve current one.
Returns:
OpenPypeVersion: Set as staging.
"""
if self.staging:
return self
return self.replace(parts={"build": f"{self.build}-staging"})
def set_production(self) -> OpenPypeVersion:
"""Set version as production and return it.
This will preserve current one.
Returns:
OpenPypeVersion: Set as production.
"""
if not self.staging:
return self
return self.replace(
parts={"build": self.build.replace("-staging", "")})
def is_staging(self) -> bool:
"""Test if current version is staging one."""
return self.staging
def get_main_version(self) -> str:
"""Return main version component.
@ -218,21 +161,8 @@ class OpenPypeVersion(semver.VersionInfo):
if not m:
return None
version = OpenPypeVersion.parse(string[m.start():m.end()])
if "staging" in string[m.start():m.end()]:
version.staging = True
return version
@classmethod
def parse(cls, version):
"""Extends parse to handle ta handle staging variant."""
v = super().parse(version)
openpype_version = cls(major=v.major, minor=v.minor,
patch=v.patch, prerelease=v.prerelease,
build=v.build)
if v.build and "staging" in v.build:
openpype_version.staging = True
return openpype_version
def __hash__(self):
return hash(self.path) if self.path else hash(str(self))
@ -382,80 +312,28 @@ class OpenPypeVersion(semver.VersionInfo):
return False
@classmethod
def get_local_versions(
cls, production: bool = None,
staging: bool = None
) -> List:
def get_local_versions(cls) -> List:
"""Get all versions available on this machine.
Arguments give ability to specify if filtering is needed. If both
arguments are set to None all found versions are returned.
Args:
production (bool): Return production versions.
staging (bool): Return staging versions.
Returns:
list: of compatible versions available on the machine.
"""
# Return all local versions if arguments are set to None
if production is None and staging is None:
production = True
staging = True
elif production is None and not staging:
production = True
elif staging is None and not production:
staging = True
# Just return empty output if both are disabled
if not production and not staging:
return []
# DEPRECATED: backwards compatible way to look for versions in root
dir_to_search = Path(user_data_dir("openpype", "pypeclub"))
versions = OpenPypeVersion.get_versions_from_directory(dir_to_search)
filtered_versions = []
for version in versions:
if version.is_staging():
if staging:
filtered_versions.append(version)
elif production:
filtered_versions.append(version)
return list(sorted(set(filtered_versions)))
return list(sorted(set(versions)))
@classmethod
def get_remote_versions(
cls, production: bool = None,
staging: bool = None
) -> List:
def get_remote_versions(cls) -> List:
"""Get all versions available in OpenPype Path.
Arguments give ability to specify if filtering is needed. If both
arguments are set to None all found versions are returned.
Args:
production (bool): Return production versions.
staging (bool): Return staging versions.
Returns:
list of OpenPypeVersions: Versions found in OpenPype path.
"""
# Return all local versions if arguments are set to None
if production is None and staging is None:
production = True
staging = True
elif production is None and not staging:
production = True
elif staging is None and not production:
staging = True
# Just return empty output if both are disabled
if not production and not staging:
return []
dir_to_search = None
if cls.openpype_path_is_accessible():
@ -476,14 +354,7 @@ class OpenPypeVersion(semver.VersionInfo):
versions = cls.get_versions_from_directory(dir_to_search)
filtered_versions = []
for version in versions:
if version.is_staging():
if staging:
filtered_versions.append(version)
elif production:
filtered_versions.append(version)
return list(sorted(set(filtered_versions)))
return list(sorted(set(versions)))
@staticmethod
def get_versions_from_directory(
@ -562,7 +433,6 @@ class OpenPypeVersion(semver.VersionInfo):
@staticmethod
def get_latest_version(
staging: bool = False,
local: bool = None,
remote: bool = None
) -> Union[OpenPypeVersion, None]:
@ -571,7 +441,6 @@ class OpenPypeVersion(semver.VersionInfo):
The version does not contain information about path and source.
This is utility version to get the latest version from all found.
Build version is not listed if staging is enabled.
Arguments 'local' and 'remote' define if local and remote repository
versions are used. All versions are used if both are not set (or set
@ -580,7 +449,6 @@ class OpenPypeVersion(semver.VersionInfo):
'False' in that case only build version can be used.
Args:
staging (bool, optional): List staging versions if True.
local (bool, optional): List local versions if True.
remote (bool, optional): List remote versions if True.
@ -599,22 +467,9 @@ class OpenPypeVersion(semver.VersionInfo):
remote = True
installed_version = OpenPypeVersion.get_installed_version()
local_versions = []
remote_versions = []
if local:
local_versions = OpenPypeVersion.get_local_versions(
staging=staging
)
if remote:
remote_versions = OpenPypeVersion.get_remote_versions(
staging=staging
)
all_versions = local_versions + remote_versions
if not staging:
all_versions.append(installed_version)
if not all_versions:
return None
local_versions = OpenPypeVersion.get_local_versions() if local else []
remote_versions = OpenPypeVersion.get_remote_versions() if remote else [] # noqa: E501
all_versions = local_versions + remote_versions + [installed_version]
all_versions.sort()
return all_versions[-1]
@ -705,7 +560,7 @@ class BootstrapRepos:
"""Get path for specific version in list of OpenPype versions.
Args:
version (str): Version string to look for (1.2.4+staging)
version (str): Version string to look for (1.2.4-nightly.1+test)
version_list (list of OpenPypeVersion): list of version to search.
Returns:
@ -807,6 +662,8 @@ class BootstrapRepos:
"""
version = OpenPypeVersion.version_in_str(zip_file.name)
destination_dir = self.data_dir / f"{version.major}.{version.minor}"
if not destination_dir.exists():
destination_dir.mkdir(parents=True)
destination = destination_dir / zip_file.name
if destination.exists():
@ -1131,14 +988,12 @@ class BootstrapRepos:
@staticmethod
def find_openpype_version(
version: Union[str, OpenPypeVersion],
staging: bool
version: Union[str, OpenPypeVersion]
) -> Union[OpenPypeVersion, None]:
"""Find location of specified OpenPype version.
Args:
version (Union[str, OpenPypeVersion): Version to find.
staging (bool): Filter staging versions.
Returns:
requested OpenPypeVersion.
@ -1151,9 +1006,7 @@ class BootstrapRepos:
if installed_version == version:
return installed_version
local_versions = OpenPypeVersion.get_local_versions(
staging=staging, production=not staging
)
local_versions = OpenPypeVersion.get_local_versions()
zip_version = None
for local_version in local_versions:
if local_version == version:
@ -1165,37 +1018,25 @@ class BootstrapRepos:
if zip_version is not None:
return zip_version
remote_versions = OpenPypeVersion.get_remote_versions(
staging=staging, production=not staging
)
for remote_version in remote_versions:
if remote_version == version:
return remote_version
return None
remote_versions = OpenPypeVersion.get_remote_versions()
return next(
(
remote_version for remote_version in remote_versions
if remote_version == version
), None)
@staticmethod
def find_latest_openpype_version(
staging: bool
) -> Union[OpenPypeVersion, None]:
def find_latest_openpype_version() -> Union[OpenPypeVersion, None]:
"""Find the latest available OpenPype version in all location.
Args:
staging (bool): True to look for staging versions.
Returns:
Latest OpenPype version on None if nothing was found.
"""
installed_version = OpenPypeVersion.get_installed_version()
local_versions = OpenPypeVersion.get_local_versions(
staging=staging
)
remote_versions = OpenPypeVersion.get_remote_versions(
staging=staging
)
all_versions = local_versions + remote_versions
if not staging:
all_versions.append(installed_version)
local_versions = OpenPypeVersion.get_local_versions()
remote_versions = OpenPypeVersion.get_remote_versions()
all_versions = local_versions + remote_versions + [installed_version]
if not all_versions:
return None
@ -1215,7 +1056,6 @@ class BootstrapRepos:
def find_openpype(
self,
openpype_path: Union[Path, str] = None,
staging: bool = False,
include_zips: bool = False
) -> Union[List[OpenPypeVersion], None]:
"""Get ordered dict of detected OpenPype version.
@ -1229,8 +1069,6 @@ class BootstrapRepos:
Args:
openpype_path (Path or str, optional): Try to find OpenPype on
the given path or url.
staging (bool, optional): Filter only staging version, skip them
otherwise.
include_zips (bool, optional): If set True it will try to find
OpenPype in zip files in given directory.
@ -1278,7 +1116,7 @@ class BootstrapRepos:
for dir_to_search in dirs_to_search:
try:
openpype_versions += self.get_openpype_versions(
dir_to_search, staging)
dir_to_search)
except ValueError:
# location is invalid, skip it
pass
@ -1643,15 +1481,11 @@ class BootstrapRepos:
return False
return True
def get_openpype_versions(
self,
openpype_dir: Path,
staging: bool = False) -> list:
def get_openpype_versions(self, openpype_dir: Path) -> list:
"""Get all detected OpenPype versions in directory.
Args:
openpype_dir (Path): Directory to scan.
staging (bool, optional): Find staging versions if True.
Returns:
list of OpenPypeVersion
@ -1669,8 +1503,7 @@ class BootstrapRepos:
for item in openpype_dir.iterdir():
# if the item is directory with major.minor version, dive deeper
if item.is_dir() and re.match(r"^\d+\.\d+$", item.name):
_versions = self.get_openpype_versions(
item, staging=staging)
_versions = self.get_openpype_versions(item)
if _versions:
openpype_versions += _versions
@ -1693,11 +1526,7 @@ class BootstrapRepos:
continue
detected_version.path = item
if staging and detected_version.is_staging():
openpype_versions.append(detected_version)
if not staging and not detected_version.is_staging():
openpype_versions.append(detected_version)
openpype_versions.append(detected_version)
return sorted(openpype_versions)

View file

@ -5,9 +5,7 @@ import sys
import re
import collections
from Qt import QtCore, QtGui, QtWidgets # noqa
from Qt.QtGui import QValidator # noqa
from Qt.QtCore import QTimer # noqa
from qtpy import QtCore, QtGui, QtWidgets
from .install_thread import InstallThread
from .tools import (

View file

@ -4,7 +4,7 @@ import os
import sys
from pathlib import Path
from Qt.QtCore import QThread, Signal, QObject # noqa
from qtpy import QtCore
from .bootstrap_repos import (
BootstrapRepos,
@ -17,7 +17,7 @@ from .bootstrap_repos import (
from .tools import validate_mongo_connection
class InstallThread(QThread):
class InstallThread(QtCore.QThread):
"""Install Worker thread.
This class takes care of finding OpenPype version on user entered path
@ -28,15 +28,14 @@ class InstallThread(QThread):
user data dir.
"""
progress = Signal(int)
message = Signal((str, bool))
progress = QtCore.Signal(int)
message = QtCore.Signal((str, bool))
def __init__(self, parent=None,):
self._mongo = None
self._path = None
self._result = None
QThread.__init__(self, parent)
super().__init__(parent)
def result(self):
"""Result of finished installation."""
@ -62,143 +61,117 @@ class InstallThread(QThread):
progress_callback=self.set_progress, message=self.message)
local_version = OpenPypeVersion.get_installed_version_str()
# if user did enter nothing, we install OpenPype from local version.
# zip content of `repos`, copy it to user data dir and append
# version to it.
if not self._path:
# user did not entered url
if not self._mongo:
# it not set in environment
if not os.getenv("OPENPYPE_MONGO"):
# try to get it from settings registry
try:
self._mongo = bs.secure_registry.get_item(
"openPypeMongo")
except ValueError:
self.message.emit(
"!!! We need MongoDB URL to proceed.", True)
self._set_result(-1)
return
else:
self._mongo = os.getenv("OPENPYPE_MONGO")
else:
self.message.emit("Saving mongo connection string ...", False)
bs.secure_registry.set_item("openPypeMongo", self._mongo)
os.environ["OPENPYPE_MONGO"] = self._mongo
self.message.emit(
f"Detecting installed OpenPype versions in {bs.data_dir}",
False)
detected = bs.find_openpype(include_zips=True)
if detected:
if not OpenPypeVersion.get_installed_version().is_compatible(
detected[-1]):
self.message.emit((
f"Latest detected version {detected[-1]} "
"is not compatible with the currently running "
f"{local_version}"
), True)
self.message.emit((
"Filtering detected versions to compatible ones..."
), False)
detected = [
version for version in detected
if version.is_compatible(
OpenPypeVersion.get_installed_version())
]
if OpenPypeVersion(
version=local_version, path=Path()) < detected[-1]:
self.message.emit((
f"Latest installed version {detected[-1]} is newer "
f"then currently running {local_version}"
), False)
self.message.emit("Skipping OpenPype install ...", False)
if detected[-1].path.suffix.lower() == ".zip":
bs.extract_openpype(detected[-1])
self._set_result(0)
return
if OpenPypeVersion(version=local_version).get_main_version() == detected[-1].get_main_version(): # noqa
self.message.emit((
f"Latest installed version is the same as "
f"currently running {local_version}"
), False)
self.message.emit("Skipping OpenPype install ...", False)
self._set_result(0)
return
self.message.emit((
"All installed versions are older then "
f"currently running one {local_version}"
), False)
else:
if getattr(sys, 'frozen', False):
self.message.emit("None detected.", True)
self.message.emit(("We will use OpenPype coming with "
"installer."), False)
openpype_version = bs.create_version_from_frozen_code()
if not openpype_version:
self.message.emit(
f"!!! Install failed - {openpype_version}", True)
self._set_result(-1)
return
self.message.emit(f"Using: {openpype_version}", False)
bs.install_version(openpype_version)
self.message.emit(f"Installed as {openpype_version}", False)
self.progress.emit(100)
self._set_result(1)
return
else:
self.message.emit("None detected.", False)
self.message.emit(
f"We will use local OpenPype version {local_version}", False)
local_openpype = bs.create_version_from_live_code()
if not local_openpype:
self.message.emit(
f"!!! Install failed - {local_openpype}", True)
self._set_result(-1)
return
# user did not entered url
if self._mongo:
self.message.emit("Saving mongo connection string ...", False)
bs.secure_registry.set_item("openPypeMongo", self._mongo)
elif os.getenv("OPENPYPE_MONGO"):
self._mongo = os.getenv("OPENPYPE_MONGO")
else:
# try to get it from settings registry
try:
bs.install_version(local_openpype)
except (OpenPypeVersionExists,
OpenPypeVersionInvalid,
OpenPypeVersionIOError) as e:
self.message.emit(f"Installed failed: ", True)
self.message.emit(str(e), True)
self._mongo = bs.secure_registry.get_item(
"openPypeMongo")
except ValueError:
self.message.emit(
"!!! We need MongoDB URL to proceed.", True)
self._set_result(-1)
return
os.environ["OPENPYPE_MONGO"] = self._mongo
self.message.emit(f"Installed as {local_openpype}", False)
self.message.emit(
f"Detecting installed OpenPype versions in {bs.data_dir}",
False)
detected = bs.find_openpype(include_zips=True)
if not detected and getattr(sys, 'frozen', False):
self.message.emit("None detected.", True)
self.message.emit(("We will use OpenPype coming with "
"installer."), False)
openpype_version = bs.create_version_from_frozen_code()
if not openpype_version:
self.message.emit(
f"!!! Install failed - {openpype_version}", True)
self._set_result(-1)
return
self.message.emit(f"Using: {openpype_version}", False)
bs.install_version(openpype_version)
self.message.emit(f"Installed as {openpype_version}", False)
self.progress.emit(100)
self._set_result(1)
return
else:
# if we have mongo connection string, validate it, set it to
# user settings and get OPENPYPE_PATH from there.
if self._mongo:
if not validate_mongo_connection(self._mongo):
self.message.emit(
f"!!! invalid mongo url {self._mongo}", True)
self._set_result(-1)
return
bs.secure_registry.set_item("openPypeMongo", self._mongo)
os.environ["OPENPYPE_MONGO"] = self._mongo
self.message.emit(f"processing {self._path}", True)
repo_file = bs.process_entered_location(self._path)
if detected and not OpenPypeVersion.get_installed_version().is_compatible(detected[-1]): # noqa: E501
self.message.emit((
f"Latest detected version {detected[-1]} "
"is not compatible with the currently running "
f"{local_version}"
), True)
self.message.emit((
"Filtering detected versions to compatible ones..."
), False)
if not repo_file:
self.message.emit("!!! Cannot install", True)
self._set_result(-1)
# filter results to get only compatible versions
detected = [
version for version in detected
if version.is_compatible(
OpenPypeVersion.get_installed_version())
]
if detected:
if OpenPypeVersion(
version=local_version, path=Path()) < detected[-1]:
self.message.emit((
f"Latest installed version {detected[-1]} is newer "
f"then currently running {local_version}"
), False)
self.message.emit("Skipping OpenPype install ...", False)
if detected[-1].path.suffix.lower() == ".zip":
bs.extract_openpype(detected[-1])
self._set_result(0)
return
if OpenPypeVersion(version=local_version).get_main_version() == detected[-1].get_main_version(): # noqa: E501
self.message.emit((
f"Latest installed version is the same as "
f"currently running {local_version}"
), False)
self.message.emit("Skipping OpenPype install ...", False)
self._set_result(0)
return
self.message.emit((
"All installed versions are older then "
f"currently running one {local_version}"
), False)
self.message.emit("None detected.", False)
self.message.emit(
f"We will use local OpenPype version {local_version}", False)
local_openpype = bs.create_version_from_live_code()
if not local_openpype:
self.message.emit(
f"!!! Install failed - {local_openpype}", True)
self._set_result(-1)
return
try:
bs.install_version(local_openpype)
except (OpenPypeVersionExists,
OpenPypeVersionInvalid,
OpenPypeVersionIOError) as e:
self.message.emit(f"Installed failed: ", True)
self.message.emit(str(e), True)
self._set_result(-1)
return
self.message.emit(f"Installed as {local_openpype}", False)
self.progress.emit(100)
self._set_result(1)
return
self.progress.emit(100)
self._set_result(1)
return

View file

@ -1,4 +1,4 @@
from Qt import QtWidgets, QtGui
from qtpy import QtWidgets, QtGui
from .tools import (
load_stylesheet,

View file

@ -1,4 +1,4 @@
from Qt import QtCore, QtGui, QtWidgets # noqa
from qtpy import QtWidgets
class NiceProgressBar(QtWidgets.QProgressBar):

View file

@ -153,7 +153,8 @@ def get_openpype_global_settings(url: str) -> dict:
# Create mongo connection
client = MongoClient(url, **kwargs)
# Access settings collection
col = client["openpype"]["settings"]
openpype_db = os.environ.get("OPENPYPE_DATABASE_NAME") or "openpype"
col = client[openpype_db]["settings"]
# Query global settings
global_settings = col.find_one({"type": "global_settings"}) or {}
# Close Mongo connection
@ -184,11 +185,7 @@ def get_openpype_path_from_settings(settings: dict) -> Union[str, None]:
if paths and isinstance(paths, str):
paths = [paths]
# Loop over paths and return only existing
for path in paths:
if os.path.exists(path):
return path
return None
return next((path for path in paths if os.path.exists(path)), None)
def get_expected_studio_version_str(
@ -206,10 +203,7 @@ def get_expected_studio_version_str(
mongo_url = os.environ.get("OPENPYPE_MONGO")
if global_settings is None:
global_settings = get_openpype_global_settings(mongo_url)
if staging:
key = "staging_version"
else:
key = "production_version"
key = "staging_version" if staging else "production_version"
return global_settings.get(key) or ""

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
"""Working thread for update."""
from Qt.QtCore import QThread, Signal, QObject # noqa
from qtpy import QtCore
from .bootstrap_repos import (
BootstrapRepos,
@ -8,7 +8,7 @@ from .bootstrap_repos import (
)
class UpdateThread(QThread):
class UpdateThread(QtCore.QThread):
"""Install Worker thread.
This class takes care of finding OpenPype version on user entered path
@ -19,13 +19,13 @@ class UpdateThread(QThread):
user data dir.
"""
progress = Signal(int)
message = Signal((str, bool))
progress = QtCore.Signal(int)
message = QtCore.Signal((str, bool))
def __init__(self, parent=None):
self._result = None
self._openpype_version = None
QThread.__init__(self, parent)
super().__init__(parent)
def set_version(self, openpype_version: OpenPypeVersion):
self._openpype_version = openpype_version

View file

@ -1,8 +1,10 @@
# -*- coding: utf-8 -*-
"""Progress window to show when OpenPype is updating/installing locally."""
import os
from qtpy import QtCore, QtGui, QtWidgets
from .update_thread import UpdateThread
from Qt import QtCore, QtGui, QtWidgets # noqa
from .bootstrap_repos import OpenPypeVersion
from .nice_progress_bar import NiceProgressBar
from .tools import load_stylesheet
@ -47,7 +49,6 @@ class UpdateWindow(QtWidgets.QDialog):
self._update_thread = None
self.resize(QtCore.QSize(self._width, self._height))
self._init_ui()
# Set stylesheet
@ -79,6 +80,16 @@ class UpdateWindow(QtWidgets.QDialog):
self._progress_bar = progress_bar
def showEvent(self, event):
super().showEvent(event)
current_size = self.size()
new_size = QtCore.QSize(
max(current_size.width(), self._width),
max(current_size.height(), self._height)
)
if current_size != new_size:
self.resize(new_size)
def _run_update(self):
"""Start install process.

View file

@ -14,10 +14,10 @@ AppId={{B9E9DF6A-5BDA-42DD-9F35-C09D564C4D93}
AppName={#MyAppName}
AppVersion={#AppVer}
AppVerName={#MyAppName} version {#AppVer}
AppPublisher=Orbi Tools s.r.o
AppPublisherURL=http://pype.club
AppSupportURL=http://pype.club
AppUpdatesURL=http://pype.club
AppPublisher=Ynput s.r.o
AppPublisherURL=https://ynput.io
AppSupportURL=https://ynput.io
AppUpdatesURL=https://ynput.io
DefaultDirName={autopf}\{#MyAppName}\{#AppVer}
UsePreviousAppDir=no
DisableProgramGroupPage=yes
@ -48,8 +48,8 @@ Source: "build\{#build}\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdir
; NOTE: Don't use "Flags: ignoreversion" on any shared system files
[Icons]
Name: "{autoprograms}\{#MyAppName}"; Filename: "{app}\openpype_gui.exe"
Name: "{autodesktop}\{#MyAppName}"; Filename: "{app}\openpype_gui.exe"; Tasks: desktopicon
Name: "{autoprograms}\{#MyAppName} {#AppVer}"; Filename: "{app}\openpype_gui.exe"
Name: "{autodesktop}\{#MyAppName} {#AppVer}"; Filename: "{app}\openpype_gui.exe"; Tasks: desktopicon
[Run]
Filename: "{app}\openpype_gui.exe"; Description: "{cm:LaunchProgram,OpenPype}"; Flags: nowait postinstall skipifsilent

View file

@ -0,0 +1,3 @@
This directory is for storing external addons that needs to be included in the pipeline when distributed.
The directory is ignored by Git, but included in the zip and installation files.

View file

@ -1,112 +0,0 @@
from .settings import (
get_system_settings,
get_project_settings,
get_current_project_settings,
get_anatomy_settings,
SystemSettings,
ProjectSettings
)
from .lib import (
PypeLogger,
Logger,
Anatomy,
execute,
run_subprocess,
version_up,
get_asset,
get_workdir_data,
get_version_from_path,
get_last_version_from_path,
get_app_environments_for_context,
source_hash,
get_latest_version,
get_local_site_id,
change_openpype_mongo_url,
create_project_folders,
get_project_basic_paths
)
from .lib.mongo import (
get_default_components
)
from .lib.applications import (
ApplicationManager
)
from .lib.avalon_context import (
BuildWorkfile
)
from . import resources
from .plugin import (
Extractor,
ValidatePipelineOrder,
ValidateContentsOrder,
ValidateSceneOrder,
ValidateMeshOrder,
)
# temporary fix, might
from .action import (
get_errored_instances_from_context,
RepairAction,
RepairContextAction
)
__all__ = [
"get_system_settings",
"get_project_settings",
"get_current_project_settings",
"get_anatomy_settings",
"get_project_basic_paths",
"SystemSettings",
"ProjectSettings",
"PypeLogger",
"Logger",
"Anatomy",
"execute",
"get_default_components",
"ApplicationManager",
"BuildWorkfile",
# Resources
"resources",
# plugin classes
"Extractor",
# ordering
"ValidatePipelineOrder",
"ValidateContentsOrder",
"ValidateSceneOrder",
"ValidateMeshOrder",
# action
"get_errored_instances_from_context",
"RepairAction",
"RepairContextAction",
# get contextual data
"version_up",
"get_asset",
"get_workdir_data",
"get_version_from_path",
"get_last_version_from_path",
"get_app_environments_for_context",
"source_hash",
"run_subprocess",
"get_latest_version",
"get_local_site_id",
"change_openpype_mongo_url",
"get_project_basic_paths",
"create_project_folders"
]

View file

@ -16,14 +16,15 @@ from .pype_commands import PypeCommands
@click.option("--use-staging", is_flag=True,
expose_value=False, help="use staging variants")
@click.option("--list-versions", is_flag=True, expose_value=False,
help=("list all detected versions. Use With `--use-staging "
"to list staging versions."))
help="list all detected versions.")
@click.option("--validate-version", expose_value=False,
help="validate given version integrity")
@click.option("--debug", is_flag=True, expose_value=False,
help=("Enable debug"))
help="Enable debug")
@click.option("--verbose", expose_value=False,
help=("Change OpenPype log level (debug - critical or 0-50)"))
@click.option("--automatic-tests", is_flag=True, expose_value=False,
help=("Run in automatic tests mode"))
def main(ctx):
"""Pype is main command serving as entry point to pipeline system.
@ -366,11 +367,15 @@ def run(script):
"--timeout",
help="Provide specific timeout value for test case",
default=None)
@click.option("-so",
"--setup_only",
help="Only create dbs, do not run tests",
default=None)
def runtests(folder, mark, pyargs, test_data_folder, persist, app_variant,
timeout):
timeout, setup_only):
"""Run all automatic tests after proper initialization via start.py"""
PypeCommands().run_tests(folder, mark, pyargs, test_data_folder,
persist, app_variant, timeout)
persist, app_variant, timeout, setup_only)
@main.command()
@ -410,11 +415,12 @@ def repack_version(directory):
@main.command()
@click.option("--project", help="Project name")
@click.option(
"--dirpath", help="Directory where package is stored", default=None
)
def pack_project(project, dirpath):
"--dirpath", help="Directory where package is stored", default=None)
@click.option(
"--dbonly", help="Store only Database data", default=False, is_flag=True)
def pack_project(project, dirpath, dbonly):
"""Create a package of project with all files and database dump."""
PypeCommands().pack_project(project, dirpath)
PypeCommands().pack_project(project, dirpath, dbonly)
@main.command()
@ -422,27 +428,27 @@ def pack_project(project, dirpath):
@click.option(
"--root", help="Replace root which was stored in project", default=None
)
def unpack_project(zipfile, root):
@click.option(
"--dbonly", help="Store only Database data", default=False, is_flag=True)
def unpack_project(zipfile, root, dbonly):
"""Create a package of project with all files and database dump."""
PypeCommands().unpack_project(zipfile, root)
PypeCommands().unpack_project(zipfile, root, dbonly)
@main.command()
def interactive():
"""Interative (Python like) console.
"""Interactive (Python like) console.
Helpfull command not only for development to directly work with python
Helpful command not only for development to directly work with python
interpreter.
Warning:
Executable 'openpype_gui' on windows won't work.
Executable 'openpype_gui' on Windows won't work.
"""
from openpype.version import __version__
banner = "OpenPype {}\nPython {} on {}".format(
__version__, sys.version, sys.platform
)
banner = f"OpenPype {__version__}\nPython {sys.version} on {sys.platform}"
code.interact(banner)

View file

@ -3,7 +3,7 @@
Goal is that most of functions here are called on (or with) an object
that has project name as a context (e.g. on 'ProjectEntity'?).
+ We will need more specific functions doing wery specific queires really fast.
+ We will need more specific functions doing very specific queries really fast.
"""
import re
@ -69,6 +69,19 @@ def convert_ids(in_ids):
def get_projects(active=True, inactive=False, fields=None):
"""Yield all project entity documents.
Args:
active (Optional[bool]): Include active projects. Defaults to True.
inactive (Optional[bool]): Include inactive projects.
Defaults to False.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Yields:
dict: Project entity data which can be reduced to specified 'fields'.
None is returned if project with specified filters was not found.
"""
mongodb = get_project_database()
for project_name in mongodb.collection_names():
if project_name in ("system.indexes",):
@ -81,6 +94,20 @@ def get_projects(active=True, inactive=False, fields=None):
def get_project(project_name, active=True, inactive=True, fields=None):
"""Return project entity document by project name.
Args:
project_name (str): Name of project.
active (Optional[bool]): Allow active project. Defaults to True.
inactive (Optional[bool]): Allow inactive project. Defaults to True.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
Union[Dict, None]: Project entity data which can be reduced to
specified 'fields'. None is returned if project with specified
filters was not found.
"""
# Skip if both are disabled
if not active and not inactive:
return None
@ -124,17 +151,18 @@ def get_whole_project(project_name):
def get_asset_by_id(project_name, asset_id, fields=None):
"""Receive asset data by it's id.
"""Receive asset data by its id.
Args:
project_name (str): Name of project where to look for queried entities.
asset_id (Union[str, ObjectId]): Asset's id.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
dict: Asset entity data.
None: Asset was not found by id.
Union[Dict, None]: Asset entity data which can be reduced to
specified 'fields'. None is returned if asset with specified
filters was not found.
"""
asset_id = convert_id(asset_id)
@ -147,17 +175,18 @@ def get_asset_by_id(project_name, asset_id, fields=None):
def get_asset_by_name(project_name, asset_name, fields=None):
"""Receive asset data by it's name.
"""Receive asset data by its name.
Args:
project_name (str): Name of project where to look for queried entities.
asset_name (str): Asset's name.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
dict: Asset entity data.
None: Asset was not found by name.
Union[Dict, None]: Asset entity data which can be reduced to
specified 'fields'. None is returned if asset with specified
filters was not found.
"""
if not asset_name:
@ -193,10 +222,10 @@ def _get_assets(
be found.
asset_names (Iterable[str]): Name assets that should be found.
parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids.
standard (bool): Query standart assets (type 'asset').
standard (bool): Query standard assets (type 'asset').
archived (bool): Query archived assets (type 'archived_asset').
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
Cursor: Query cursor as iterable which returns asset documents matching
@ -261,8 +290,8 @@ def get_assets(
asset_names (Iterable[str]): Name assets that should be found.
parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids.
archived (bool): Add also archived assets.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
Cursor: Query cursor as iterable which returns asset documents matching
@ -300,8 +329,8 @@ def get_archived_assets(
be found.
asset_names (Iterable[str]): Name assets that should be found.
parent_ids (Iterable[Union[str, ObjectId]]): Parent asset ids.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
Cursor: Query cursor as iterable which returns asset documents matching
@ -356,17 +385,18 @@ def get_asset_ids_with_subsets(project_name, asset_ids=None):
def get_subset_by_id(project_name, subset_id, fields=None):
"""Single subset entity data by it's id.
"""Single subset entity data by its id.
Args:
project_name (str): Name of project where to look for queried entities.
subset_id (Union[str, ObjectId]): Id of subset which should be found.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
None: If subset with specified filters was not found.
Dict: Subset document which can be reduced to specified 'fields'.
Union[Dict, None]: Subset entity data which can be reduced to
specified 'fields'. None is returned if subset with specified
filters was not found.
"""
subset_id = convert_id(subset_id)
@ -379,20 +409,19 @@ def get_subset_by_id(project_name, subset_id, fields=None):
def get_subset_by_name(project_name, subset_name, asset_id, fields=None):
"""Single subset entity data by it's name and it's version id.
"""Single subset entity data by its name and its version id.
Args:
project_name (str): Name of project where to look for queried entities.
subset_name (str): Name of subset.
asset_id (Union[str, ObjectId]): Id of parent asset.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
Union[None, Dict[str, Any]]: None if subset with specified filters was
not found or dict subset document which can be reduced to
specified 'fields'.
Union[Dict, None]: Subset entity data which can be reduced to
specified 'fields'. None is returned if subset with specified
filters was not found.
"""
if not subset_name:
return None
@ -434,8 +463,8 @@ def get_subsets(
names_by_asset_ids (dict[ObjectId, List[str]]): Complex filtering
using asset ids and list of subset names under the asset.
archived (bool): Look for archived subsets too.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
Cursor: Iterable cursor yielding all matching subsets.
@ -520,17 +549,18 @@ def get_subset_families(project_name, subset_ids=None):
def get_version_by_id(project_name, version_id, fields=None):
"""Single version entity data by it's id.
"""Single version entity data by its id.
Args:
project_name (str): Name of project where to look for queried entities.
version_id (Union[str, ObjectId]): Id of version which should be found.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
None: If version with specified filters was not found.
Dict: Version document which can be reduced to specified 'fields'.
Union[Dict, None]: Version entity data which can be reduced to
specified 'fields'. None is returned if version with specified
filters was not found.
"""
version_id = convert_id(version_id)
@ -546,18 +576,19 @@ def get_version_by_id(project_name, version_id, fields=None):
def get_version_by_name(project_name, version, subset_id, fields=None):
"""Single version entity data by it's name and subset id.
"""Single version entity data by its name and subset id.
Args:
project_name (str): Name of project where to look for queried entities.
version (int): name of version entity (it's version).
version (int): name of version entity (its version).
subset_id (Union[str, ObjectId]): Id of version which should be found.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
None: If version with specified filters was not found.
Dict: Version document which can be reduced to specified 'fields'.
Union[Dict, None]: Version entity data which can be reduced to
specified 'fields'. None is returned if version with specified
filters was not found.
"""
subset_id = convert_id(subset_id)
@ -574,7 +605,7 @@ def get_version_by_name(project_name, version, subset_id, fields=None):
def version_is_latest(project_name, version_id):
"""Is version the latest from it's subset.
"""Is version the latest from its subset.
Note:
Hero versions are considered as latest.
@ -680,8 +711,8 @@ def get_versions(
versions (Iterable[int]): Version names (as integers).
Filter ignored if 'None' is passed.
hero (bool): Look also for hero versions.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
Cursor: Iterable cursor yielding all matching versions.
@ -705,12 +736,13 @@ def get_hero_version_by_subset_id(project_name, subset_id, fields=None):
project_name (str): Name of project where to look for queried entities.
subset_id (Union[str, ObjectId]): Subset id under which
is hero version.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
None: If hero version for passed subset id does not exists.
Dict: Hero version entity data.
Union[Dict, None]: Hero version entity data which can be reduced to
specified 'fields'. None is returned if hero version with specified
filters was not found.
"""
subset_id = convert_id(subset_id)
@ -730,17 +762,18 @@ def get_hero_version_by_subset_id(project_name, subset_id, fields=None):
def get_hero_version_by_id(project_name, version_id, fields=None):
"""Hero version by it's id.
"""Hero version by its id.
Args:
project_name (str): Name of project where to look for queried entities.
version_id (Union[str, ObjectId]): Hero version id.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
None: If hero version with passed id was not found.
Dict: Hero version entity data.
Union[Dict, None]: Hero version entity data which can be reduced to
specified 'fields'. None is returned if hero version with specified
filters was not found.
"""
version_id = convert_id(version_id)
@ -773,8 +806,8 @@ def get_hero_versions(
should look for hero versions. Filter ignored if 'None' is passed.
version_ids (Iterable[Union[str, ObjectId]]): Hero version ids. Filter
ignored if 'None' is passed.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
Cursor|list: Iterable yielding hero versions matching passed filters.
@ -801,8 +834,8 @@ def get_output_link_versions(project_name, version_id, fields=None):
project_name (str): Name of project where to look for queried entities.
version_id (Union[str, ObjectId]): Version id which can be used
as input link for other versions.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
Iterable: Iterable cursor yielding versions that are used as input
@ -822,14 +855,15 @@ def get_output_link_versions(project_name, version_id, fields=None):
return conn.find(query_filter, _prepare_fields(fields))
def get_last_versions(project_name, subset_ids, fields=None):
def get_last_versions(project_name, subset_ids, active=None, fields=None):
"""Latest versions for entered subset_ids.
Args:
project_name (str): Name of project where to look for queried entities.
subset_ids (Iterable[Union[str, ObjectId]]): List of subset ids.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
active (Optional[bool]): If True only active versions are returned.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
dict[ObjectId, int]: Key is subset id and value is last version name.
@ -866,12 +900,21 @@ def get_last_versions(project_name, subset_ids, fields=None):
if name_needed:
group_item["name"] = {"$last": "$name"}
aggregate_filter = {
"type": "version",
"parent": {"$in": subset_ids}
}
if active is False:
aggregate_filter["data.active"] = active
elif active is True:
aggregate_filter["$or"] = [
{"data.active": {"$exists": 0}},
{"data.active": active},
]
aggregation_pipeline = [
# Find all versions of those subsets
{"$match": {
"type": "version",
"parent": {"$in": subset_ids}
}},
{"$match": aggregate_filter},
# Sorting versions all together
{"$sort": {"name": 1}},
# Group them by "parent", but only take the last
@ -913,12 +956,13 @@ def get_last_version_by_subset_id(project_name, subset_id, fields=None):
Args:
project_name (str): Name of project where to look for queried entities.
subset_id (Union[str, ObjectId]): Id of version which should be found.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
None: If version with specified filters was not found.
Dict: Version document which can be reduced to specified 'fields'.
Union[Dict, None]: Version entity data which can be reduced to
specified 'fields'. None is returned if version with specified
filters was not found.
"""
subset_id = convert_id(subset_id)
@ -945,12 +989,13 @@ def get_last_version_by_subset_name(
asset_id (Union[str, ObjectId]): Asset id which is parent of passed
subset name.
asset_name (str): Asset name which is parent of passed subset name.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
None: If version with specified filters was not found.
Dict: Version document which can be reduced to specified 'fields'.
Union[Dict, None]: Version entity data which can be reduced to
specified 'fields'. None is returned if version with specified
filters was not found.
"""
if not asset_id and not asset_name:
@ -972,18 +1017,18 @@ def get_last_version_by_subset_name(
def get_representation_by_id(project_name, representation_id, fields=None):
"""Representation entity data by it's id.
"""Representation entity data by its id.
Args:
project_name (str): Name of project where to look for queried entities.
representation_id (Union[str, ObjectId]): Representation id.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
None: If representation with specified filters was not found.
Dict: Representation entity data which can be reduced
to specified 'fields'.
Union[Dict, None]: Representation entity data which can be reduced to
specified 'fields'. None is returned if representation with
specified filters was not found.
"""
if not representation_id:
@ -1004,19 +1049,19 @@ def get_representation_by_id(project_name, representation_id, fields=None):
def get_representation_by_name(
project_name, representation_name, version_id, fields=None
):
"""Representation entity data by it's name and it's version id.
"""Representation entity data by its name and its version id.
Args:
project_name (str): Name of project where to look for queried entities.
representation_name (str): Representation name.
version_id (Union[str, ObjectId]): Id of parent version entity.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
None: If representation with specified filters was not found.
Dict: Representation entity data which can be reduced
to specified 'fields'.
Union[dict[str, Any], None]: Representation entity data which can be
reduced to specified 'fields'. None is returned if representation
with specified filters was not found.
"""
version_id = convert_id(version_id)
@ -1185,7 +1230,7 @@ def get_representations(
standard=True,
fields=None
):
"""Representaion entities data from one project filtered by filters.
"""Representation entities data from one project filtered by filters.
Filters are additive (all conditions must pass to return subset).
@ -1202,8 +1247,8 @@ def get_representations(
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
using version ids and list of names under the version.
archived (bool): Output will also contain archived representations.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
Cursor: Iterable cursor yielding all matching representations.
@ -1216,7 +1261,7 @@ def get_representations(
version_ids=version_ids,
context_filters=context_filters,
names_by_version_ids=names_by_version_ids,
standard=True,
standard=standard,
archived=archived,
fields=fields
)
@ -1231,7 +1276,7 @@ def get_archived_representations(
names_by_version_ids=None,
fields=None
):
"""Archived representaion entities data from project with applied filters.
"""Archived representation entities data from project with applied filters.
Filters are additive (all conditions must pass to return subset).
@ -1247,8 +1292,8 @@ def get_archived_representations(
representation context fields.
names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering
using version ids and list of names under the version.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
Cursor: Iterable cursor yielding all matching representations.
@ -1377,8 +1422,8 @@ def get_thumbnail_id_from_source(project_name, src_type, src_id):
src_id (Union[str, ObjectId]): Id of source entity.
Returns:
ObjectId: Thumbnail id assigned to entity.
None: If Source entity does not have any thumbnail id assigned.
Union[ObjectId, None]: Thumbnail id assigned to entity. If Source
entity does not have any thumbnail id assigned.
"""
if not src_type or not src_id:
@ -1397,14 +1442,14 @@ def get_thumbnails(project_name, thumbnail_ids, fields=None):
"""Receive thumbnails entity data.
Thumbnail entity can be used to receive binary content of thumbnail based
on it's content and ThumbnailResolvers.
on its content and ThumbnailResolvers.
Args:
project_name (str): Name of project where to look for queried entities.
thumbnail_ids (Iterable[Union[str, ObjectId]]): Ids of thumbnail
entities.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
cursor: Cursor of queried documents.
@ -1429,12 +1474,13 @@ def get_thumbnail(project_name, thumbnail_id, fields=None):
Args:
project_name (str): Name of project where to look for queried entities.
thumbnail_id (Union[str, ObjectId]): Id of thumbnail entity.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
None: If thumbnail with specified id was not found.
Dict: Thumbnail entity data which can be reduced to specified 'fields'.
Union[Dict, None]: Thumbnail entity data which can be reduced to
specified 'fields'.None is returned if thumbnail with specified
filters was not found.
"""
if not thumbnail_id:
@ -1458,8 +1504,13 @@ def get_workfile_info(
project_name (str): Name of project where to look for queried entities.
asset_id (Union[str, ObjectId]): Id of asset entity.
task_name (str): Task name on asset.
fields (Iterable[str]): Fields that should be returned. All fields are
returned if 'None' is passed.
fields (Optional[Iterable[str]]): Fields that should be returned. All
fields are returned if 'None' is passed.
Returns:
Union[Dict, None]: Workfile entity data which can be reduced to
specified 'fields'.None is returned if workfile with specified
filters was not found.
"""
if not asset_id or not task_name or not filename:

View file

@ -164,7 +164,6 @@ def get_linked_representation_id(
# Recursive graph lookup for inputs
{"$graphLookup": graph_lookup}
]
conn = get_project_connection(project_name)
result = conn.aggregate(query_pipeline)
referenced_version_ids = _process_referenced_pipeline_result(
@ -213,7 +212,7 @@ def _process_referenced_pipeline_result(result, link_type):
for output in sorted(outputs_recursive, key=lambda o: o["depth"]):
output_links = output.get("data", {}).get("inputLinks")
if not output_links:
if not output_links and output["type"] != "hero_version":
continue
# Leaf
@ -232,6 +231,9 @@ def _process_referenced_pipeline_result(result, link_type):
def _filter_input_links(input_links, link_type, correctly_linked_ids):
if not input_links: # to handle hero versions
return
for input_link in input_links:
if link_type and input_link["type"] != link_type:
continue

View file

@ -5,6 +5,12 @@ import logging
import pymongo
import certifi
from bson.json_util import (
loads,
dumps,
CANONICAL_JSON_OPTIONS
)
if sys.version_info[0] == 2:
from urlparse import urlparse, parse_qs
else:
@ -15,6 +21,49 @@ class MongoEnvNotSet(Exception):
pass
def documents_to_json(docs):
"""Convert documents to json string.
Args:
Union[list[dict[str, Any]], dict[str, Any]]: Document/s to convert to
json string.
Returns:
str: Json string with mongo documents.
"""
return dumps(docs, json_options=CANONICAL_JSON_OPTIONS)
def load_json_file(filepath):
"""Load mongo documents from a json file.
Args:
filepath (str): Path to a json file.
Returns:
Union[dict[str, Any], list[dict[str, Any]]]: Loaded content from a
json file.
"""
if not os.path.exists(filepath):
raise ValueError("Path {} was not found".format(filepath))
with open(filepath, "r") as stream:
content = stream.read()
return loads("".join(content))
def get_project_database_name():
"""Name of database name where projects are available.
Returns:
str: Name of database name where projects are.
"""
return os.environ.get("AVALON_DB") or "avalon"
def _decompose_url(url):
"""Decompose mongo url to basic components.
@ -210,12 +259,102 @@ class OpenPypeMongoConnection:
return mongo_client
def get_project_database():
db_name = os.environ.get("AVALON_DB") or "avalon"
return OpenPypeMongoConnection.get_mongo_client()[db_name]
# ------ Helper Mongo functions ------
# Functions can be helpful with custom tools to backup/restore mongo state.
# Not meant as API functionality that should be used in production codebase!
def get_collection_documents(database_name, collection_name, as_json=False):
"""Query all documents from a collection.
Args:
database_name (str): Name of database where to look for collection.
collection_name (str): Name of collection where to look for collection.
as_json (Optional[bool]): Output should be a json string.
Default: 'False'
Returns:
Union[list[dict[str, Any]], str]: Queried documents.
"""
client = OpenPypeMongoConnection.get_mongo_client()
output = list(client[database_name][collection_name].find({}))
if as_json:
output = documents_to_json(output)
return output
def get_project_connection(project_name):
def store_collection(filepath, database_name, collection_name):
"""Store collection documents to a json file.
Args:
filepath (str): Path to a json file where documents will be stored.
database_name (str): Name of database where to look for collection.
collection_name (str): Name of collection to store.
"""
# Make sure directory for output file exists
dirpath = os.path.dirname(filepath)
if not os.path.isdir(dirpath):
os.makedirs(dirpath)
content = get_collection_documents(database_name, collection_name, True)
with open(filepath, "w") as stream:
stream.write(content)
def replace_collection_documents(docs, database_name, collection_name):
"""Replace all documents in a collection with passed documents.
Warnings:
All existing documents in collection will be removed if there are any.
Args:
docs (list[dict[str, Any]]): New documents.
database_name (str): Name of database where to look for collection.
collection_name (str): Name of collection where new documents are
uploaded.
"""
client = OpenPypeMongoConnection.get_mongo_client()
database = client[database_name]
if collection_name in database.list_collection_names():
database.drop_collection(collection_name)
col = database[collection_name]
col.insert_many(docs)
def restore_collection(filepath, database_name, collection_name):
"""Restore/replace collection from a json filepath.
Warnings:
All existing documents in collection will be removed if there are any.
Args:
filepath (str): Path to a json with documents.
database_name (str): Name of database where to look for collection.
collection_name (str): Name of collection where new documents are
uploaded.
"""
docs = load_json_file(filepath)
replace_collection_documents(docs, database_name, collection_name)
def get_project_database(database_name=None):
"""Database object where project collections are.
Args:
database_name (Optional[str]): Custom name of database.
Returns:
pymongo.database.Database: Collection related to passed project.
"""
if not database_name:
database_name = get_project_database_name()
return OpenPypeMongoConnection.get_mongo_client()[database_name]
def get_project_connection(project_name, database_name=None):
"""Direct access to mongo collection.
We're trying to avoid using direct access to mongo. This should be used
@ -223,13 +362,83 @@ def get_project_connection(project_name):
api calls for that.
Args:
project_name(str): Project name for which collection should be
project_name (str): Project name for which collection should be
returned.
database_name (Optional[str]): Custom name of database.
Returns:
pymongo.Collection: Collection realated to passed project.
pymongo.collection.Collection: Collection related to passed project.
"""
if not project_name:
raise ValueError("Invalid project name {}".format(str(project_name)))
return get_project_database()[project_name]
return get_project_database(database_name)[project_name]
def get_project_documents(project_name, database_name=None):
"""Query all documents from project collection.
Args:
project_name (str): Name of project.
database_name (Optional[str]): Name of mongo database where to look for
project.
Returns:
list[dict[str, Any]]: Documents in project collection.
"""
if not database_name:
database_name = get_project_database_name()
return get_collection_documents(database_name, project_name)
def store_project_documents(project_name, filepath, database_name=None):
"""Store project documents to a file as json string.
Args:
project_name (str): Name of project to store.
filepath (str): Path to a json file where output will be stored.
database_name (Optional[str]): Name of mongo database where to look for
project.
"""
if not database_name:
database_name = get_project_database_name()
store_collection(filepath, database_name, project_name)
def replace_project_documents(project_name, docs, database_name=None):
"""Replace documents in mongo with passed documents.
Warnings:
Existing project collection is removed if exists in mongo.
Args:
project_name (str): Name of project.
docs (list[dict[str, Any]]): Documents to restore.
database_name (Optional[str]): Name of mongo database where project
collection will be created.
"""
if not database_name:
database_name = get_project_database_name()
replace_collection_documents(docs, database_name, project_name)
def restore_project_documents(project_name, filepath, database_name=None):
"""Replace documents in mongo with passed documents.
Warnings:
Existing project collection is removed if exists in mongo.
Args:
project_name (str): Name of project.
filepath (str): File to json file with project documents.
database_name (Optional[str]): Name of mongo database where project
collection will be created.
"""
if not database_name:
database_name = get_project_database_name()
restore_collection(filepath, database_name, project_name)

View file

@ -2,7 +2,7 @@
## Reason
Preparation for OpenPype v4 server. Goal is to remove direct mongo calls in code to prepare a little bit for different source of data for code before. To start think about database calls less as mongo calls but more universally. To do so was implemented simple wrapper around database calls to not use pymongo specific code.
Current goal is not to make universal database model which can be easily replaced with any different source of data but to make it close as possible. Current implementation of OpenPype is too tighly connected to pymongo and it's abilities so we're trying to get closer with long term changes that can be used even in current state.
Current goal is not to make universal database model which can be easily replaced with any different source of data but to make it close as possible. Current implementation of OpenPype is too tightly connected to pymongo and it's abilities so we're trying to get closer with long term changes that can be used even in current state.
## Queries
Query functions don't use full potential of mongo queries like very specific queries based on subdictionaries or unknown structures. We try to avoid these calls as much as possible because they'll probably won't be available in future. If it's really necessary a new function can be added but only if it's reasonable for overall logic. All query functions were moved to `~/client/entities.py`. Each function has arguments with available filters and possible reduce of returned keys for each entity.
@ -14,7 +14,7 @@ Changes are a little bit complicated. Mongo has many options how update can happ
Create operations expect already prepared document data, for that are prepared functions creating skeletal structures of documents (do not fill all required data), except `_id` all data should be right. Existence of entity is not validated so if the same creation operation is send n times it will create the entity n times which can cause issues.
### Update
Update operation require entity id and keys that should be changed, update dictionary must have {"key": value}. If value should be set in nested dictionary the key must have also all subkeys joined with dot `.` (e.g. `{"data": {"fps": 25}}` -> `{"data.fps": 25}`). To simplify update dictionaries were prepared functions which does that for you, their name has template `prepare_<entity type>_update_data` - they work on comparison of previous document and new document. If there is missing function for requested entity type it is because we didn't need it yet and require implementaion.
Update operation require entity id and keys that should be changed, update dictionary must have {"key": value}. If value should be set in nested dictionary the key must have also all subkeys joined with dot `.` (e.g. `{"data": {"fps": 25}}` -> `{"data.fps": 25}`). To simplify update dictionaries were prepared functions which does that for you, their name has template `prepare_<entity type>_update_data` - they work on comparison of previous document and new document. If there is missing function for requested entity type it is because we didn't need it yet and require implementation.
### Delete
Delete operation need entity id. Entity will be deleted from mongo.

View file

@ -368,7 +368,7 @@ def prepare_workfile_info_update_data(old_doc, new_doc, replace=True):
class AbstractOperation(object):
"""Base operation class.
Opration represent a call into database. The call can create, change or
Operation represent a call into database. The call can create, change or
remove data.
Args:
@ -409,7 +409,7 @@ class AbstractOperation(object):
pass
def to_data(self):
"""Convert opration to data that can be converted to json or others.
"""Convert operation to data that can be converted to json or others.
Warning:
Current state returns ObjectId objects which cannot be parsed by
@ -428,7 +428,7 @@ class AbstractOperation(object):
class CreateOperation(AbstractOperation):
"""Opeartion to create an entity.
"""Operation to create an entity.
Args:
project_name (str): On which project operation will happen.
@ -485,7 +485,7 @@ class CreateOperation(AbstractOperation):
class UpdateOperation(AbstractOperation):
"""Opeartion to update an entity.
"""Operation to update an entity.
Args:
project_name (str): On which project operation will happen.
@ -552,7 +552,7 @@ class UpdateOperation(AbstractOperation):
class DeleteOperation(AbstractOperation):
"""Opeartion to delete an entity.
"""Operation to delete an entity.
Args:
project_name (str): On which project operation will happen.

View file

@ -1,4 +1,5 @@
import os
from openpype.lib import PreLaunchHook
@ -13,6 +14,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
# Execute after workfile template copy
order = 10
app_groups = [
"3dsmax",
"maya",
"nuke",
"nukex",
@ -23,6 +25,7 @@ class AddLastWorkfileToLaunchArgs(PreLaunchHook):
"blender",
"photoshop",
"tvpaint",
"substancepainter",
"aftereffects"
]

View file

@ -3,10 +3,13 @@ from openpype.lib import PreLaunchHook
from openpype.pipeline.workfile import create_workdir_extra_folders
class AddLastWorkfileToLaunchArgs(PreLaunchHook):
"""Add last workfile path to launch arguments.
class CreateWorkdirExtraFolders(PreLaunchHook):
"""Create extra folders for the work directory.
Based on setting `project_settings/global/tools/Workfiles/extra_folders`
profile filtering will decide whether extra folders need to be created in
the work directory.
This is not possible to do for all applications the same way.
"""
# Execute after workfile template copy

View file

@ -7,18 +7,18 @@ class LaunchFoundryAppsWindows(PreLaunchHook):
Nuke is executed "like" python process so it is required to pass
`CREATE_NEW_CONSOLE` flag on windows to trigger creation of new console.
At the same time the newly created console won't create it's own stdout
At the same time the newly created console won't create its own stdout
and stderr handlers so they should not be redirected to DEVNULL.
"""
# Should be as last hook because must change launch arguments to string
order = 1000
app_groups = ["nuke", "nukex", "hiero", "nukestudio"]
app_groups = ["nuke", "nukeassist", "nukex", "hiero", "nukestudio"]
platforms = ["windows"]
def execute(self):
# Change `creationflags` to CREATE_NEW_CONSOLE
# - on Windows will nuke create new window using it's console
# - on Windows nuke will create new window using its console
# Set `stdout` and `stderr` to None so new created console does not
# have redirected output to DEVNULL in build
self.launch_context.kwargs.update({

View file

@ -0,0 +1,53 @@
from openpype.lib import PreLaunchHook
from openpype.pipeline.colorspace import (
get_imageio_config
)
from openpype.pipeline.template_data import get_template_data_with_names
class OCIOEnvHook(PreLaunchHook):
"""Set OCIO environment variable for hosts that use OpenColorIO."""
order = 0
hosts = [
"substancepainter",
"fusion",
"blender",
"aftereffects",
"max",
"houdini",
"maya",
"nuke",
"hiero",
"resolve"
]
def execute(self):
"""Hook entry method."""
template_data = get_template_data_with_names(
project_name=self.data["project_name"],
asset_name=self.data["asset_name"],
task_name=self.data["task_name"],
host_name=self.host_name,
system_settings=self.data["system_settings"]
)
config_data = get_imageio_config(
project_name=self.data["project_name"],
host_name=self.host_name,
project_settings=self.data["project_settings"],
anatomy_data=template_data,
anatomy=self.data["anatomy"]
)
if config_data:
ocio_path = config_data["path"]
self.log.info(
f"Setting OCIO environment to config path: {ocio_path}")
self.launch_context.env["OCIO"] = ocio_path
else:
self.log.debug("OCIO not set or enabled")

View file

@ -2,12 +2,13 @@
Idea for current dirmap implementation was used from Maya where is possible to
enter source and destination roots and maya will try each found source
in referenced file replace with each destionation paths. First path which
in referenced file replace with each destination paths. First path which
exists is used.
"""
import os
from abc import ABCMeta, abstractmethod
import platform
import six
@ -38,7 +39,6 @@ class HostDirmap(object):
self._project_settings = project_settings
self._sync_module = sync_module # to limit reinit of Modules
self._log = None
self._mapping = None # cache mapping
@property
def sync_module(self):
@ -69,29 +69,28 @@ class HostDirmap(object):
"""Run host dependent remapping from source_path to destination_path"""
pass
def process_dirmap(self):
def process_dirmap(self, mapping=None):
# type: (dict) -> None
"""Go through all paths in Settings and set them using `dirmap`.
If artists has Site Sync enabled, take dirmap mapping directly from
Local Settings when artist is syncing workfile locally.
Args:
project_settings (dict): Settings for current project.
"""
if not self._mapping:
self._mapping = self.get_mappings(self.project_settings)
if not self._mapping:
if not mapping:
mapping = self.get_mappings()
if not mapping:
return
self.log.info("Processing directory mapping ...")
self.on_enable_dirmap()
self.log.info("mapping:: {}".format(self._mapping))
for k, sp in enumerate(self._mapping["source-path"]):
dst = self._mapping["destination-path"][k]
for k, sp in enumerate(mapping["source-path"]):
dst = mapping["destination-path"][k]
try:
# add trailing slash if missing
sp = os.path.join(sp, '')
dst = os.path.join(dst, '')
print("{} -> {}".format(sp, dst))
self.dirmap_routine(sp, dst)
except IndexError:
@ -109,28 +108,24 @@ class HostDirmap(object):
)
continue
def get_mappings(self, project_settings):
def get_mappings(self):
"""Get translation from source-path to destination-path.
It checks if Site Sync is enabled and user chose to use local
site, in that case configuration in Local Settings takes precedence
"""
local_mapping = self._get_local_sync_dirmap(project_settings)
dirmap_label = "{}-dirmap".format(self.host_name)
if (
not self.project_settings[self.host_name].get(dirmap_label)
and not local_mapping
):
return {}
mapping_settings = self.project_settings[self.host_name][dirmap_label]
mapping_enabled = mapping_settings["enabled"] or bool(local_mapping)
mapping_sett = self.project_settings[self.host_name].get(dirmap_label,
{})
local_mapping = self._get_local_sync_dirmap()
mapping_enabled = mapping_sett.get("enabled") or bool(local_mapping)
if not mapping_enabled:
return {}
mapping = (
local_mapping
or mapping_settings["paths"]
or mapping_sett["paths"]
or {}
)
@ -140,28 +135,27 @@ class HostDirmap(object):
or not mapping.get("source-path")
):
return {}
self.log.info("Processing directory mapping ...")
self.log.info("mapping:: {}".format(mapping))
return mapping
def _get_local_sync_dirmap(self, project_settings):
def _get_local_sync_dirmap(self):
"""
Returns dirmap if synch to local project is enabled.
Only valid mapping is from roots of remote site to local site set
in Local Settings.
Args:
project_settings (dict)
Returns:
dict : { "source-path": [XXX], "destination-path": [YYYY]}
"""
project_name = os.getenv("AVALON_PROJECT")
mapping = {}
if not project_settings["global"]["sync_server"]["enabled"]:
if (not self.sync_module.enabled or
project_name not in self.sync_module.get_enabled_projects()):
return mapping
project_name = os.getenv("AVALON_PROJECT")
active_site = self.sync_module.get_local_normalized_site(
self.sync_module.get_active_site(project_name))
remote_site = self.sync_module.get_local_normalized_site(
@ -170,11 +164,7 @@ class HostDirmap(object):
"active {} - remote {}".format(active_site, remote_site)
)
if (
active_site == "local"
and project_name in self.sync_module.get_enabled_projects()
and active_site != remote_site
):
if active_site == "local" and active_site != remote_site:
sync_settings = self.sync_module.get_sync_project_setting(
project_name,
exclude_locals=False,
@ -187,11 +177,27 @@ class HostDirmap(object):
self.log.debug("local overrides {}".format(active_overrides))
self.log.debug("remote overrides {}".format(remote_overrides))
current_platform = platform.system().lower()
remote_provider = self.sync_module.get_provider_for_site(
project_name, remote_site
)
# dirmap has sense only with regular disk provider, in the workfile
# won't be root on cloud or sftp provider
if remote_provider != "local_drive":
remote_site = "studio"
for root_name, active_site_dir in active_overrides.items():
remote_site_dir = (
remote_overrides.get(root_name)
or sync_settings["sites"][remote_site]["root"][root_name]
)
if isinstance(remote_site_dir, dict):
remote_site_dir = remote_site_dir.get(current_platform)
if not remote_site_dir:
continue
if os.path.isdir(active_site_dir):
if "destination-path" not in mapping:
mapping["destination-path"] = []

View file

@ -1,3 +1,4 @@
import os
import logging
import contextlib
from abc import ABCMeta, abstractproperty
@ -17,7 +18,7 @@ class HostBase(object):
Compared to 'avalon' concept:
What was before considered as functions in host implementation folder. The
host implementation should primarily care about adding ability of creation
(mark subsets to be published) and optionaly about referencing published
(mark subsets to be published) and optionally about referencing published
representations as containers.
Host may need extend some functionality like working with workfiles
@ -100,30 +101,49 @@ class HostBase(object):
pass
def get_current_project_name(self):
"""
Returns:
Union[str, None]: Current project name.
"""
return os.environ.get("AVALON_PROJECT")
def get_current_asset_name(self):
"""
Returns:
Union[str, None]: Current asset name.
"""
return os.environ.get("AVALON_ASSET")
def get_current_task_name(self):
"""
Returns:
Union[str, None]: Current task name.
"""
return os.environ.get("AVALON_TASK")
def get_current_context(self):
"""Get current context information.
This method should be used to get current context of host. Usage of
this method can be crutial for host implementations in DCCs where
this method can be crucial for host implementations in DCCs where
can be opened multiple workfiles at one moment and change of context
can't be catched properly.
can't be caught properly.
Default implementation returns values from 'legacy_io.Session'.
Returns:
dict: Context with 3 keys 'project_name', 'asset_name' and
'task_name'. All of them can be 'None'.
Dict[str, Union[str, None]]: Context with 3 keys 'project_name',
'asset_name' and 'task_name'. All of them can be 'None'.
"""
from openpype.pipeline import legacy_io
if legacy_io.is_installed():
legacy_io.install()
return {
"project_name": legacy_io.Session["AVALON_PROJECT"],
"asset_name": legacy_io.Session["AVALON_ASSET"],
"task_name": legacy_io.Session["AVALON_TASK"]
"project_name": self.get_current_project_name(),
"asset_name": self.get_current_asset_name(),
"task_name": self.get_current_task_name()
}
def get_context_title(self):

View file

@ -81,7 +81,7 @@ class ILoadHost:
@abstractmethod
def get_containers(self):
"""Retreive referenced containers from scene.
"""Retrieve referenced containers from scene.
This can be implemented in hosts where referencing can be used.
@ -191,7 +191,7 @@ class IWorkfileHost:
@abstractmethod
def get_current_workfile(self):
"""Retreive path to current opened file.
"""Retrieve path to current opened file.
Returns:
str: Path to file which is currently opened.
@ -220,8 +220,8 @@ class IWorkfileHost:
Default implementation keeps workdir untouched.
Warnings:
We must handle this modification with more sofisticated way because
this can't be called out of DCC so opening of last workfile
We must handle this modification with more sophisticated way
because this can't be called out of DCC so opening of last workfile
(calculated before DCC is launched) is complicated. Also breaking
defined work template is not a good idea.
Only place where it's really used and can make sense is Maya. There
@ -302,7 +302,7 @@ class IPublishHost:
required methods.
Returns:
list[str]: Missing method implementations for new publsher
list[str]: Missing method implementations for new publisher
workflow.
"""

View file

@ -4,36 +4,21 @@ Anything that isn't defined here is INTERNAL and unreliable for external use.
"""
from .launch_logic import (
from .ws_stub import (
get_stub,
stub,
)
from .pipeline import (
AfterEffectsHost,
ls,
get_asset_settings,
install,
uninstall,
list_instances,
remove_instance,
containerise,
get_context_data,
update_context_data,
get_context_title
)
from .workio import (
file_extensions,
has_unsaved_changes,
save_file,
open_file,
current_file,
work_root,
containerise
)
from .lib import (
maintained_selection,
get_extension_manifest_path
get_extension_manifest_path,
get_asset_settings,
set_settings
)
from .plugin import (
@ -42,32 +27,18 @@ from .plugin import (
__all__ = [
# launch_logic
# ws_stub
"get_stub",
"stub",
# pipeline
"ls",
"get_asset_settings",
"install",
"uninstall",
"list_instances",
"remove_instance",
"containerise",
"get_context_data",
"update_context_data",
"get_context_title",
"file_extensions",
"has_unsaved_changes",
"save_file",
"open_file",
"current_file",
"work_root",
# lib
"maintained_selection",
"get_extension_manifest_path",
"get_asset_settings",
"set_settings",
# plugin
"AfterEffectsLoader"

View file

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<ExtensionManifest Version="8.0" ExtensionBundleId="com.openpype.AE.panel" ExtensionBundleVersion="1.0.23"
ExtensionBundleName="openpype" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ExtensionManifest Version="8.0" ExtensionBundleId="com.openpype.AE.panel" ExtensionBundleVersion="1.0.25"
ExtensionBundleName="com.openpype.AE.panel" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<ExtensionList>
<Extension Id="com.openpype.AE.panel" Version="1.0" />
</ExtensionList>

View file

@ -2,7 +2,7 @@
<html>
<head>
<meta charset="utf-8">
<link rel="stylesheet" href="css/topcoat-desktop-dark.min.css"/>
<link id="hostStyle" rel="stylesheet" href="css/styles.css"/>
@ -25,11 +25,11 @@
<title></title>
<script src="js/libs/jquery-2.0.2.min.js"></script>
<script type=text/javascript>
$(function() {
$("a#workfiles-button").bind("click", function() {
RPC.call('AfterEffects.workfiles_route').then(function (data) {
}, function (error) {
alert(error);
@ -37,18 +37,7 @@
});
});
</script>
<script type=text/javascript>
$(function() {
$("a#creator-button").bind("click", function() {
RPC.call('AfterEffects.creator_route').then(function (data) {
}, function (error) {
alert(error);
});
});
});
</script>
<script type=text/javascript>
$(function() {
$("a#loader-button").bind("click", function() {
@ -59,7 +48,7 @@
});
});
</script>
<script type=text/javascript>
$(function() {
$("a#publish-button").bind("click", function() {
@ -70,7 +59,7 @@
});
});
</script>
<script type=text/javascript>
$(function() {
$("a#sceneinventory-button").bind("click", function() {
@ -81,18 +70,40 @@
});
});
</script>
<script type=text/javascript>
$(function() {
$("a#subsetmanager-button").bind("click", function() {
RPC.call('AfterEffects.subsetmanager_route').then(function (data) {
}, function (error) {
alert(error);
});
});
});
</script>
$(function() {
$("a#setresolution-button").bind("click", function() {
RPC.call('AfterEffects.setresolution_route').then(function (data) {
}, function (error) {
alert(error);
});
});
});
</script>
<script type=text/javascript>
$(function() {
$("a#setframes-button").bind("click", function() {
RPC.call('AfterEffects.setframes_route').then(function (data) {
}, function (error) {
alert(error);
});
});
});
</script>
<script type=text/javascript>
$(function() {
$("a#setall-button").bind("click", function() {
RPC.call('AfterEffects.setall_route').then(function (data) {
}, function (error) {
alert(error);
});
});
});
</script>
<script type=text/javascript>
$(function() {
$("a#experimental-button").bind("click", function() {
@ -102,27 +113,28 @@
});
});
});
</script>
</script>
</head>
<body class="hostElt">
<div id="content">
<div>
<div id="content">
<div>
<div></div><a href=# id=workfiles-button><button class="hostFontSize">Workfiles...</button></a></div>
<div> <a href=# id=creator-button><button class="hostFontSize">Create...</button></a></div>
<div><a href=# id=loader-button><button class="hostFontSize">Load...</button></a></div>
<div><a href=# id=publish-button><button class="hostFontSize">Publish...</button></a></div>
<div><a href=# id=sceneinventory-button><button class="hostFontSize">Manage...</button></a></div>
<div><a href=# id=subsetmanager-button><button class="hostFontSize">Subset Manager...</button></a></div>
<div><a href=# id=setresolution-button><button class="hostFontSize">Set Resolution</button></a></div>
<div><a href=# id=setframes-button><button class="hostFontSize">Set Frame Range</button></a></div>
<div><a href=# id=setall-button><button class="hostFontSize">Apply All Settings</button></a></div>
<div><a href=# id=experimental-button><button class="hostFontSize">Experimental Tools...</button></a></div>
</div>
</div>
</div>
<!-- <script src="js/libs/PlayerDebugMode"></script> -->
<script src="js/libs/wsrpc.js"></script>
<script src="js/libs/loglevel.min.js"></script>
@ -131,6 +143,6 @@
<script src="js/themeManager.js"></script>
<script src="js/main.js"></script>
</body>
</html>
</html>

View file

@ -4,7 +4,7 @@ indent: 4, maxerr: 50 */
var csInterface = new CSInterface();
log.warn("script start");
WSRPC.DEBUG = false;
@ -14,7 +14,7 @@ WSRPC.TRACE = false;
async function startUp(url){
promis = runEvalScript("getEnv('" + url + "')");
var res = await promis;
var res = await promis;
log.warn("res: " + res);
promis = runEvalScript("getEnv('OPENPYPE_DEBUG')");
@ -56,7 +56,7 @@ function get_extension_version(){
}
function main(websocket_url){
// creates connection to 'websocket_url', registers routes
// creates connection to 'websocket_url', registers routes
var default_url = 'ws://localhost:8099/ws/';
if (websocket_url == ''){
@ -66,7 +66,7 @@ function main(websocket_url){
RPC.connect();
log.warn("connected");
log.warn("connected");
RPC.addRoute('AfterEffects.open', function (data) {
log.warn('Server called client route "open":', data);
@ -88,7 +88,7 @@ function main(websocket_url){
});
RPC.addRoute('AfterEffects.get_active_document_name', function (data) {
log.warn('Server called client route ' +
log.warn('Server called client route ' +
'"get_active_document_name":', data);
return runEvalScript("getActiveDocumentName()")
.then(function(result){
@ -98,7 +98,7 @@ function main(websocket_url){
});
RPC.addRoute('AfterEffects.get_active_document_full_name', function (data){
log.warn('Server called client route ' +
log.warn('Server called client route ' +
'"get_active_document_full_name":', data);
return runEvalScript("getActiveDocumentFullName()")
.then(function(result){
@ -118,7 +118,7 @@ function main(websocket_url){
});
});
RPC.addRoute('AfterEffects.get_selected_items', function (data) {
log.warn('Server called client route "get_selected_items":', data);
return runEvalScript("getSelectedItems(" + data.comps + "," +
@ -194,23 +194,25 @@ function main(websocket_url){
});
});
RPC.addRoute('AfterEffects.get_work_area', function (data) {
log.warn('Server called client route "get_work_area":', data);
return runEvalScript("getWorkArea(" + data.item_id + ")")
RPC.addRoute('AfterEffects.get_comp_properties', function (data) {
log.warn('Server called client route "get_comp_properties":', data);
return runEvalScript("getCompProperties(" + data.item_id + ")")
.then(function(result){
log.warn("getWorkArea: " + result);
log.warn("get_comp_properties: " + result);
return result;
});
});
RPC.addRoute('AfterEffects.set_work_area', function (data) {
RPC.addRoute('AfterEffects.set_comp_properties', function (data) {
log.warn('Server called client route "set_work_area":', data);
return runEvalScript("setWorkArea(" + data.item_id + ',' +
return runEvalScript("setCompProperties(" + data.item_id + ',' +
data.start + ',' +
data.duration + ',' +
data.frame_rate + ")")
data.frame_rate + ',' +
data.width + ',' +
data.height + ")")
.then(function(result){
log.warn("getWorkArea: " + result);
log.warn("set_comp_properties: " + result);
return result;
});
});
@ -237,7 +239,7 @@ function main(websocket_url){
RPC.addRoute('AfterEffects.get_render_info', function (data) {
log.warn('Server called client route "get_render_info":', data);
return runEvalScript("getRenderInfo()")
return runEvalScript("getRenderInfo(" + data.comp_id +")")
.then(function(result){
log.warn("get_render_info: " + result);
return result;
@ -255,7 +257,7 @@ function main(websocket_url){
RPC.addRoute('AfterEffects.import_background', function (data) {
log.warn('Server called client route "import_background":', data);
return runEvalScript("importBackground(" + data.comp_id + ", " +
return runEvalScript("importBackground(" + data.comp_id + ", " +
"'" + data.comp_name + "', " +
JSON.stringify(data.files) + ")")
.then(function(result){
@ -266,7 +268,7 @@ function main(websocket_url){
RPC.addRoute('AfterEffects.reload_background', function (data) {
log.warn('Server called client route "reload_background":', data);
return runEvalScript("reloadBackground(" + data.comp_id + ", " +
return runEvalScript("reloadBackground(" + data.comp_id + ", " +
"'" + data.comp_name + "', " +
JSON.stringify(data.files) + ")")
.then(function(result){
@ -289,7 +291,7 @@ function main(websocket_url){
RPC.addRoute('AfterEffects.render', function (data) {
log.warn('Server called client route "render":', data);
var escapedPath = EscapeStringForJSX(data.folder_url);
return runEvalScript("render('" + escapedPath +"')")
return runEvalScript("render('" + escapedPath +"', " + data.comp_id + ")")
.then(function(result){
log.warn("render: " + result);
return result;
@ -314,6 +316,16 @@ function main(websocket_url){
log.warn('Server called client route "close":', data);
return runEvalScript("close()");
});
RPC.addRoute('AfterEffects.print_msg', function (data) {
log.warn('Server called client route "print_msg":', data);
var escaped_msg = EscapeStringForJSX(data.msg);
return runEvalScript("printMsg('" + escaped_msg +"')")
.then(function(result){
log.warn("print_msg: " + result);
return result;
});
});
}
/** main entry point **/
@ -323,17 +335,17 @@ startUp("WEBSOCKET_URL");
'use strict';
var csInterface = new CSInterface();
function init() {
themeManager.init();
$("#btn_test").click(function () {
csInterface.evalScript('sayHello()');
});
}
init();
}());

View file

@ -1,7 +1,7 @@
/*jslint vars: true, plusplus: true, devel: true, nomen: true, regexp: true,
indent: 4, maxerr: 50 */
/*global $, Folder*/
#include "../js/libs/json.js";
//@include "../js/libs/json.js"
/* All public API function should return JSON! */
@ -29,13 +29,13 @@ function getEnv(variable){
function getMetadata(){
/**
* Returns payload in 'Label' field of project's metadata
*
*
**/
if (ExternalObject.AdobeXMPScript === undefined){
ExternalObject.AdobeXMPScript =
new ExternalObject('lib:AdobeXMPScript');
}
var proj = app.project;
var meta = new XMPMeta(app.project.xmpPacket);
var schemaNS = XMPMeta.getNamespaceURI("xmp");
@ -53,7 +53,7 @@ function getMetadata(){
function imprint(payload){
/**
* Stores payload in 'Label' field of project's metadata
*
*
* Args:
* payload (string): json content
*/
@ -61,14 +61,14 @@ function imprint(payload){
ExternalObject.AdobeXMPScript =
new ExternalObject('lib:AdobeXMPScript');
}
var proj = app.project;
var meta = new XMPMeta(app.project.xmpPacket);
var schemaNS = XMPMeta.getNamespaceURI("xmp");
var label = "xmp:Label";
meta.setProperty(schemaNS, label, payload);
app.project.xmpPacket = meta.serialize();
}
@ -116,14 +116,14 @@ function getItems(comps, folders, footages){
/**
* Returns JSON representation of compositions and
* if 'collectLayers' then layers in comps too.
*
*
* Args:
* comps (bool): return selected compositions
* folders (bool): return folders
* footages (bool): return FootageItem
* Returns:
* (list) of JSON items
*/
*/
var items = []
for (i = 1; i <= app.project.items.length; ++i){
var item = app.project.items[i];
@ -142,14 +142,14 @@ function getItems(comps, folders, footages){
function getSelectedItems(comps, folders, footages){
/**
* Returns list of selected items from Project menu
*
*
* Args:
* comps (bool): return selected compositions
* folders (bool): return folders
* footages (bool): return FootageItem
* Returns:
* (list) of JSON items
*/
*/
var items = []
for (i = 0; i < app.project.selection.length; ++i){
var item = app.project.selection[i];
@ -166,9 +166,9 @@ function getSelectedItems(comps, folders, footages){
function _getItem(item, comps, folders, footages){
/**
* Auxiliary function as project items and selections
* Auxiliary function as project items and selections
* are indexed in different way :/
* Refactor
* Refactor
*/
var item_type = '';
if (item instanceof FolderItem){
@ -189,7 +189,7 @@ function _getItem(item, comps, folders, footages){
return "{}";
}
}
var item = {"name": item.name,
"id": item.id,
"type": item_type};
@ -200,7 +200,7 @@ function importFile(path, item_name, import_options){
/**
* Imports file (image tested for now) as a FootageItem.
* Creates new composition
*
*
* Args:
* path (string): absolute path to image file
* item_name (string): label for composition
@ -218,7 +218,7 @@ function importFile(path, item_name, import_options){
app.beginUndoGroup("Import File");
fp = new File(path);
if (fp.exists){
try {
try {
im_opt = new ImportOptions(fp);
importAsType = import_options["ImportAsType"];
@ -234,18 +234,18 @@ function importFile(path, item_name, import_options){
}
if (importAsType.indexOf('PROJECT') > 0){
im_opt.importAs = ImportAsType.PROJECT;
}
}
}
if ('sequence' in import_options){
im_opt.sequence = true;
}
comp = app.project.importFile(im_opt);
if (app.project.selection.length == 2 &&
app.project.selection[0] instanceof FolderItem){
comp.parentFolder = app.project.selection[0]
comp.parentFolder = app.project.selection[0]
}
} catch (error) {
return _prepareError(error.toString() + importOptions.file.fsName);
@ -283,14 +283,14 @@ function setLabelColor(comp_id, color_idx){
function replaceItem(comp_id, path, item_name){
/**
* Replaces loaded file with new file and updates name
*
*
* Args:
* comp_id (int): id of composition, not a index!
* path (string): absolute path to new file
* item_name (string): new composition name
*/
app.beginUndoGroup("Replace File");
fp = new File(path);
if (!fp.exists){
return _prepareError("File " + path + " not found.");
@ -303,7 +303,7 @@ function replaceItem(comp_id, path, item_name){
}else{
item.replace(fp);
}
item.name = item_name;
} catch (error) {
return _prepareError(error.toString() + path);
@ -319,7 +319,7 @@ function replaceItem(comp_id, path, item_name){
function renameItem(item_id, new_name){
/**
* Renames item with 'item_id' to 'new_name'
*
*
* Args:
* item_id (int): id to search item
* new_name (str)
@ -335,7 +335,7 @@ function renameItem(item_id, new_name){
function deleteItem(item_id){
/**
* Delete any 'item_id'
*
*
* Not restricted only to comp, it could delete
* any item with 'id'
*/
@ -347,38 +347,76 @@ function deleteItem(item_id){
}
}
function getWorkArea(comp_id){
function getCompProperties(comp_id){
/**
* Returns information about workarea - are that will be
* rendered. All calculation will be done in OpenPype,
* easier to modify without redeploy of extension.
*
* Returns information about composition - are that will be
* rendered.
*
* Returns
* (dict)
*/
var item = app.project.itemByID(comp_id);
if (item){
return JSON.stringify({
"workAreaStart": item.displayStartFrame,
"workAreaDuration": item.duration,
"frameRate": item.frameRate});
}else{
var comp = app.project.itemByID(comp_id);
if (!comp){
return _prepareError("There is no composition with "+ comp_id);
}
return JSON.stringify({
"id": comp.id,
"name": comp.name,
"frameStart": comp.displayStartFrame,
"framesDuration": comp.duration * comp.frameRate,
"frameRate": comp.frameRate,
"width": comp.width,
"height": comp.height});
}
function setWorkArea(comp_id, workAreaStart, workAreaDuration, frameRate){
function setCompProperties(comp_id, frameStart, framesCount, frameRate,
width, height){
/**
* Sets work area info from outside (from Ftrack via OpenPype)
*/
var item = app.project.itemByID(comp_id);
if (item){
item.displayStartTime = workAreaStart;
item.duration = workAreaDuration;
item.frameRate = frameRate;
}else{
var comp = app.project.itemByID(comp_id);
if (!comp){
return _prepareError("There is no composition with "+ comp_id);
}
app.beginUndoGroup('change comp properties');
if (frameStart && framesCount && frameRate){
comp.displayStartFrame = frameStart;
comp.duration = framesCount / frameRate;
comp.frameRate = frameRate;
}
if (width && height){
var widthOld = comp.width;
var widthNew = width;
var widthDelta = widthNew - widthOld;
var heightOld = comp.height;
var heightNew = height;
var heightDelta = heightNew - heightOld;
var offset = [widthDelta / 2, heightDelta / 2];
comp.width = widthNew;
comp.height = heightNew;
for (var i = 1, il = comp.numLayers; i <= il; i++) {
var layer = comp.layer(i);
var positionProperty = layer.property('ADBE Transform Group').property('ADBE Position');
if (positionProperty.numKeys > 0) {
for (var j = 1, jl = positionProperty.numKeys; j <= jl; j++) {
var keyValue = positionProperty.keyValue(j);
positionProperty.setValueAtKey(j, keyValue + offset);
}
} else {
var positionValue = positionProperty.value;
positionProperty.setValue(positionValue + offset);
}
}
}
app.endUndoGroup();
}
function save(){
@ -395,41 +433,84 @@ function saveAs(path){
app.project.save(fp = new File(path));
}
function getRenderInfo(){
function getRenderInfo(comp_id){
/***
Get info from render queue.
Currently pulls only file name to parse extension and
Currently pulls only file name to parse extension and
if it is sequence in Python
Args:
comp_id (int): id of composition
Return:
(list) [{file_name:"xx.png", width:00, height:00}]
**/
var item = app.project.itemByID(comp_id);
if (!item){
return _prepareError("Composition with '" + comp_id + "' wasn't found! Recreate publishable instance(s)")
}
var comp_name = item.name;
var output_metadata = []
try{
var render_item = app.project.renderQueue.item(1);
if (render_item.status == RQItemStatus.DONE){
render_item.duplicate(); // create new, cannot change status if DONE
render_item.remove(); // remove existing to limit duplications
render_item = app.project.renderQueue.item(1);
// render_item.duplicate() should create new item on renderQueue
// BUT it works only sometimes, there are some weird synchronization issue
// this method will be called always before render, so prepare items here
// for render to spare the hassle
for (i = 1; i <= app.project.renderQueue.numItems; ++i){
var render_item = app.project.renderQueue.item(i);
if (render_item.comp.id != comp_id){
continue;
}
if (render_item.status == RQItemStatus.DONE){
render_item.duplicate(); // create new, cannot change status if DONE
render_item.remove(); // remove existing to limit duplications
continue;
}
}
render_item.render = true; // always set render queue to render
var item = render_item.outputModule(1);
// properly validate as `numItems` won't change magically
var comp_id_count = 0;
for (i = 1; i <= app.project.renderQueue.numItems; ++i){
var render_item = app.project.renderQueue.item(i);
if (render_item.comp.id != comp_id){
continue;
}
comp_id_count += 1;
var item = render_item.outputModule(1);
for (j = 1; j<= render_item.numOutputModules; ++j){
var file_url = item.file.toString();
output_metadata.push(
JSON.stringify({
"file_name": file_url,
"width": render_item.comp.width,
"height": render_item.comp.height
})
);
}
}
} catch (error) {
return _prepareError("There is no render queue, create one");
}
var file_url = item.file.toString();
return JSON.stringify({
"file_name": file_url,
"width": render_item.comp.width,
"height": render_item.comp.height
})
if (comp_id_count > 1){
return _prepareError("There cannot be more items in Render Queue for '" + comp_name + "'!")
}
if (comp_id_count == 0){
return _prepareError("There is no item in Render Queue for '" + comp_name + "'! Add composition to Render Queue.")
}
return '[' + output_metadata.join() + ']';
}
function getAudioUrlForComp(comp_id){
/**
* Searches composition for audio layer
*
*
* Only single AVLayer is expected!
* Used for collecting Audio
*
*
* Args:
* comp_id (int): id of composition
* Return:
@ -457,7 +538,7 @@ function addItemAsLayerToComp(comp_id, item_id, found_comp){
/**
* Adds already imported FootageItem ('item_id') as a new
* layer to composition ('comp_id').
*
*
* Args:
* comp_id (int): id of target composition
* item_id (int): FootageItem.id
@ -480,17 +561,17 @@ function addItemAsLayerToComp(comp_id, item_id, found_comp){
function importBackground(comp_id, composition_name, files_to_import){
/**
* Imports backgrounds images to existing or new composition.
*
*
* If comp_id is not provided, new composition is created, basic
* values (width, heights, frameRatio) takes from first imported
* image.
*
*
* Args:
* comp_id (int): id of existing composition (null if new)
* composition_name (str): used when new composition
* composition_name (str): used when new composition
* files_to_import (list): list of absolute paths to import and
* add as layers
*
*
* Returns:
* (str): json representation (id, name, members)
*/
@ -512,7 +593,7 @@ function importBackground(comp_id, composition_name, files_to_import){
}
}
}
if (files_to_import){
for (i = 0; i < files_to_import.length; ++i){
item = _importItem(files_to_import[i]);
@ -524,8 +605,8 @@ function importBackground(comp_id, composition_name, files_to_import){
if (!comp){
folder = app.project.items.addFolder(composition_name);
imported_ids.push(folder.id);
comp = app.project.items.addComp(composition_name, item.width,
item.height, item.pixelAspect,
comp = app.project.items.addComp(composition_name, item.width,
item.height, item.pixelAspect,
1, 26.7); // hardcode defaults
imported_ids.push(comp.id);
comp.parentFolder = folder;
@ -534,7 +615,7 @@ function importBackground(comp_id, composition_name, files_to_import){
item.parentFolder = folder;
addItemAsLayerToComp(comp.id, item.id, comp);
}
}
}
var item = {"name": comp.name,
"id": folder.id,
@ -545,19 +626,19 @@ function importBackground(comp_id, composition_name, files_to_import){
function reloadBackground(comp_id, composition_name, files_to_import){
/**
* Reloads existing composition.
*
*
* It deletes complete composition with encompassing folder, recreates
* from scratch via 'importBackground' functionality.
*
*
* Args:
* comp_id (int): id of existing composition (null if new)
* composition_name (str): used when new composition
* composition_name (str): used when new composition
* files_to_import (list): list of absolute paths to import and
* add as layers
*
*
* Returns:
* (str): json representation (id, name, members)
*
*
*/
var imported_ids = []; // keep track of members of composition
comp = app.project.itemByID(comp_id);
@ -620,7 +701,7 @@ function reloadBackground(comp_id, composition_name, files_to_import){
function _get_file_name(file_url){
/**
* Returns file name without extension from 'file_url'
*
*
* Args:
* file_url (str): full absolute url
* Returns:
@ -635,7 +716,7 @@ function _delete_obsolete_items(folder, new_filenames){
/***
* Goes through 'folder' and removes layers not in new
* background
*
*
* Args:
* folder (FolderItem)
* new_filenames (array): list of layer names in new bg
@ -660,14 +741,14 @@ function _delete_obsolete_items(folder, new_filenames){
function _importItem(file_url){
/**
* Imports 'file_url' as new FootageItem
*
*
* Args:
* file_url (str): file url with content
* Returns:
* (FootageItem)
*/
file_name = _get_file_name(file_url);
//importFile prepared previously to return json
item_json = importFile(file_url, file_name, JSON.stringify({"ImportAsType":"FOOTAGE"}));
item_json = JSON.parse(item_json);
@ -689,30 +770,42 @@ function isFileSequence (item){
return false;
}
function render(target_folder){
function render(target_folder, comp_id){
var out_dir = new Folder(target_folder);
var out_dir = out_dir.fsName;
for (i = 1; i <= app.project.renderQueue.numItems; ++i){
var render_item = app.project.renderQueue.item(i);
var om1 = app.project.renderQueue.item(i).outputModule(1);
var file_name = File.decode( om1.file.name ).replace('℗', ''); // Name contains special character, space?
var omItem1_settable_str = app.project.renderQueue.item(i).outputModule(1).getSettings( GetSettingsFormat.STRING_SETTABLE );
var composition = render_item.comp;
if (composition.id == comp_id){
if (render_item.status == RQItemStatus.DONE){
var new_item = render_item.duplicate();
render_item.remove();
render_item = new_item;
}
if (render_item.status == RQItemStatus.DONE){
render_item.duplicate();
render_item.remove();
continue;
render_item.render = true;
var om1 = app.project.renderQueue.item(i).outputModule(1);
var file_name = File.decode( om1.file.name ).replace('℗', ''); // Name contains special character, space?
var omItem1_settable_str = app.project.renderQueue.item(i).outputModule(1).getSettings( GetSettingsFormat.STRING_SETTABLE );
var targetFolder = new Folder(target_folder);
if (!targetFolder.exists) {
targetFolder.create();
}
om1.file = new File(targetFolder.fsName + '/' + file_name);
}else{
if (render_item.status != RQItemStatus.DONE){
render_item.render = false;
}
}
var targetFolder = new Folder(target_folder);
if (!targetFolder.exists) {
targetFolder.create();
}
om1.file = new File(targetFolder.fsName + '/' + file_name);
}
app.beginSuppressDialogs();
app.project.renderQueue.render();
app.endSuppressDialogs(false);
}
function close(){
@ -724,6 +817,10 @@ function getAppVersion(){
return _prepareSingleValue(app.version);
}
function printMsg(msg){
alert(msg);
}
function _prepareSingleValue(value){
return JSON.stringify({"result": value})
}

View file

@ -1,49 +1,77 @@
import os
import sys
import subprocess
import collections
import logging
import asyncio
import functools
import traceback
from wsrpc_aiohttp import (
WebSocketRoute,
WebSocketAsync
)
from qtpy import QtCore
from qtpy import QtCore, QtWidgets
from openpype.lib import Logger
from openpype.pipeline import legacy_io
from openpype.tools.utils import host_tools
from openpype.tests.lib import is_in_tests
from openpype.pipeline import install_host, legacy_io
from openpype.modules import ModulesManager
from openpype.tools.adobe_webserver.app import WebServerTool
from .ws_stub import AfterEffectsServerStub
from .ws_stub import get_stub
from .lib import set_settings
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
class ConnectionNotEstablishedYet(Exception):
pass
def safe_excepthook(*args):
traceback.print_exception(*args)
def get_stub():
"""
Convenience function to get server RPC stub to call methods directed
for host (Photoshop).
It expects already created connection, started from client.
Currently created when panel is opened (PS: Window>Extensions>Avalon)
:return: <PhotoshopClientStub> where functions could be called from
"""
ae_stub = AfterEffectsServerStub()
if not ae_stub.client:
raise ConnectionNotEstablishedYet("Connection is not created yet")
def main(*subprocess_args):
"""Main entrypoint to AE launching, called from pre hook."""
sys.excepthook = safe_excepthook
return ae_stub
from openpype.hosts.aftereffects.api import AfterEffectsHost
host = AfterEffectsHost()
install_host(host)
def stub():
return get_stub()
os.environ["OPENPYPE_LOG_NO_COLORS"] = "False"
app = QtWidgets.QApplication([])
app.setQuitOnLastWindowClosed(False)
launcher = ProcessLauncher(subprocess_args)
launcher.start()
if os.environ.get("HEADLESS_PUBLISH"):
manager = ModulesManager()
webpublisher_addon = manager["webpublisher"]
launcher.execute_in_main_thread(
functools.partial(
webpublisher_addon.headless_publish,
log,
"CloseAE",
is_in_tests()
)
)
elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True):
save = False
if os.getenv("WORKFILES_SAVE_AS"):
save = True
launcher.execute_in_main_thread(
lambda: host_tools.show_tool_by_name("workfiles", save=save)
)
sys.exit(app.exec_())
def show_tool_by_name(tool_name):
@ -55,6 +83,7 @@ def show_tool_by_name(tool_name):
class ProcessLauncher(QtCore.QObject):
"""Launches webserver, connects to it, runs main thread."""
route_name = "AfterEffects"
_main_thread_callbacks = collections.deque()
@ -284,9 +313,6 @@ class AfterEffectsRoute(WebSocketRoute):
return await self.socket.call('aftereffects.read')
# panel routes for tools
async def creator_route(self):
self._tool_route("creator")
async def workfiles_route(self):
self._tool_route("workfiles")
@ -294,13 +320,19 @@ class AfterEffectsRoute(WebSocketRoute):
self._tool_route("loader")
async def publish_route(self):
self._tool_route("publish")
self._tool_route("publisher")
async def sceneinventory_route(self):
self._tool_route("sceneinventory")
async def subsetmanager_route(self):
self._tool_route("subsetmanager")
async def setresolution_route(self):
self._settings_route(False, True)
async def setframes_route(self):
self._settings_route(True, False)
async def setall_route(self):
self._settings_route(True, True)
async def experimental_tools_route(self):
self._tool_route("experimental_tools")
@ -315,3 +347,13 @@ class AfterEffectsRoute(WebSocketRoute):
# Required return statement.
return "nothing"
def _settings_route(self, frames, resolution):
partial_method = functools.partial(set_settings,
frames,
resolution)
ProcessLauncher.execute_in_main_thread(partial_method)
# Required return statement.
return "nothing"

View file

@ -1,67 +1,17 @@
import os
import sys
import re
import json
import contextlib
import traceback
import logging
from functools import partial
from qtpy import QtWidgets
from openpype.pipeline import install_host
from openpype.modules import ModulesManager
from openpype.tools.utils import host_tools
from .launch_logic import ProcessLauncher, get_stub
from openpype.pipeline.context_tools import get_current_context
from openpype.client import get_asset_by_name
from .ws_stub import get_stub
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
def safe_excepthook(*args):
traceback.print_exception(*args)
def main(*subprocess_args):
sys.excepthook = safe_excepthook
from openpype.hosts.aftereffects import api
install_host(api)
os.environ["OPENPYPE_LOG_NO_COLORS"] = "False"
app = QtWidgets.QApplication([])
app.setQuitOnLastWindowClosed(False)
launcher = ProcessLauncher(subprocess_args)
launcher.start()
if os.environ.get("HEADLESS_PUBLISH"):
manager = ModulesManager()
webpublisher_addon = manager["webpublisher"]
launcher.execute_in_main_thread(
partial(
webpublisher_addon.headless_publish,
log,
"CloseAE",
os.environ.get("IS_TEST")
)
)
elif os.environ.get("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", True):
save = False
if os.getenv("WORKFILES_SAVE_AS"):
save = True
launcher.execute_in_main_thread(
lambda: host_tools.show_tool_by_name("workfiles", save=save)
)
sys.exit(app.exec_())
@contextlib.contextmanager
def maintained_selection():
"""Maintain selection during context."""
@ -133,3 +83,78 @@ def get_background_layers(file_url):
layer.get("filename")).
replace("\\", "/"))
return layers
def get_asset_settings(asset_doc):
"""Get settings on current asset from database.
Returns:
dict: Scene data.
"""
asset_data = asset_doc["data"]
fps = asset_data.get("fps", 0)
frame_start = asset_data.get("frameStart", 0)
frame_end = asset_data.get("frameEnd", 0)
handle_start = asset_data.get("handleStart", 0)
handle_end = asset_data.get("handleEnd", 0)
resolution_width = asset_data.get("resolutionWidth", 0)
resolution_height = asset_data.get("resolutionHeight", 0)
duration = (frame_end - frame_start + 1) + handle_start + handle_end
return {
"fps": fps,
"frameStart": frame_start,
"frameEnd": frame_end,
"handleStart": handle_start,
"handleEnd": handle_end,
"resolutionWidth": resolution_width,
"resolutionHeight": resolution_height,
"duration": duration
}
def set_settings(frames, resolution, comp_ids=None, print_msg=True):
"""Sets number of frames and resolution to selected comps.
Args:
frames (bool): True if set frame info
resolution (bool): True if set resolution
comp_ids (list): specific composition ids, if empty
it tries to look for currently selected
print_msg (bool): True throw JS alert with msg
"""
frame_start = frames_duration = fps = width = height = None
current_context = get_current_context()
asset_doc = get_asset_by_name(current_context["project_name"],
current_context["asset_name"])
settings = get_asset_settings(asset_doc)
msg = ''
if frames:
frame_start = settings["frameStart"] - settings["handleStart"]
frames_duration = settings["duration"]
fps = settings["fps"]
msg += f"frame start:{frame_start}, duration:{frames_duration}, "\
f"fps:{fps}"
if resolution:
width = settings["resolutionWidth"]
height = settings["resolutionHeight"]
msg += f"width:{width} and height:{height}"
stub = get_stub()
if not comp_ids:
comps = stub.get_selected_items(True, False, False)
comp_ids = [comp.id for comp in comps]
if not comp_ids:
stub.print_msg("Select at least one composition to apply settings.")
return
for comp_id in comp_ids:
msg = f"Setting for comp {comp_id} " + msg
log.debug(msg)
stub.set_comp_properties(comp_id, frame_start, frames_duration,
fps, width, height)
if print_msg:
stub.print_msg(msg)

View file

@ -8,15 +8,20 @@ from openpype.lib import Logger, register_event_callback
from openpype.pipeline import (
register_loader_plugin_path,
register_creator_plugin_path,
deregister_loader_plugin_path,
deregister_creator_plugin_path,
AVALON_CONTAINER_ID,
legacy_io,
)
from openpype.pipeline.load import any_outdated_containers
import openpype.hosts.aftereffects
from .launch_logic import get_stub, ConnectionNotEstablishedYet
from openpype.host import (
HostBase,
IWorkfileHost,
ILoadHost,
IPublishHost
)
from .launch_logic import get_stub
from .ws_stub import ConnectionNotEstablishedYet
log = Logger.get_logger(__name__)
@ -30,27 +35,139 @@ LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
def install():
print("Installing Pype config...")
class AfterEffectsHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost):
name = "aftereffects"
pyblish.api.register_host("aftereffects")
pyblish.api.register_plugin_path(PUBLISH_PATH)
def __init__(self):
self._stub = None
super(AfterEffectsHost, self).__init__()
register_loader_plugin_path(LOAD_PATH)
register_creator_plugin_path(CREATE_PATH)
log.info(PUBLISH_PATH)
@property
def stub(self):
"""
Handle pulling stub from PS to run operations on host
Returns:
(AEServerStub) or None
"""
if self._stub:
return self._stub
pyblish.api.register_callback(
"instanceToggled", on_pyblish_instance_toggled
)
try:
stub = get_stub() # only after Photoshop is up
except ConnectionNotEstablishedYet:
print("Not connected yet, ignoring")
return
register_event_callback("application.launched", application_launch)
self._stub = stub
return self._stub
def install(self):
print("Installing Pype config...")
def uninstall():
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
deregister_loader_plugin_path(LOAD_PATH)
deregister_creator_plugin_path(CREATE_PATH)
pyblish.api.register_host("aftereffects")
pyblish.api.register_plugin_path(PUBLISH_PATH)
register_loader_plugin_path(LOAD_PATH)
register_creator_plugin_path(CREATE_PATH)
log.info(PUBLISH_PATH)
pyblish.api.register_callback(
"instanceToggled", on_pyblish_instance_toggled
)
register_event_callback("application.launched", application_launch)
def get_workfile_extensions(self):
return [".aep"]
def save_workfile(self, dst_path=None):
self.stub.saveAs(dst_path, True)
def open_workfile(self, filepath):
self.stub.open(filepath)
return True
def get_current_workfile(self):
try:
full_name = get_stub().get_active_document_full_name()
if full_name and full_name != "null":
return os.path.normpath(full_name).replace("\\", "/")
except ValueError:
print("Nothing opened")
pass
return None
def get_containers(self):
return ls()
def get_context_data(self):
meta = self.stub.get_metadata()
for item in meta:
if item.get("id") == "publish_context":
item.pop("id")
return item
return {}
def update_context_data(self, data, changes):
item = data
item["id"] = "publish_context"
self.stub.imprint(item["id"], item)
# created instances section
def list_instances(self):
"""List all created instances from current workfile which
will be published.
Pulls from File > File Info
For SubsetManager
Returns:
(list) of dictionaries matching instances format
"""
stub = self.stub
if not stub:
return []
instances = []
layers_meta = stub.get_metadata()
for instance in layers_meta:
if instance.get("id") == "pyblish.avalon.instance":
instances.append(instance)
return instances
def remove_instance(self, instance):
"""Remove instance from current workfile metadata.
Updates metadata of current file in File > File Info and removes
icon highlight on group layer.
For SubsetManager
Args:
instance (dict): instance representation from subsetmanager model
"""
stub = self.stub
if not stub:
return
inst_id = instance.get("instance_id") or instance.get("uuid") # legacy
if not inst_id:
log.warning("No instance identifier for {}".format(instance))
return
stub.remove_instance(inst_id)
if instance.get("members"):
item = stub.get_item(instance["members"][0])
if item:
stub.rename_item(item.id,
item.name.replace(stub.PUBLISH_ICON, ''))
def application_launch():
@ -63,35 +180,6 @@ def on_pyblish_instance_toggled(instance, old_value, new_value):
instance[0].Visible = new_value
def get_asset_settings(asset_doc):
"""Get settings on current asset from database.
Returns:
dict: Scene data.
"""
asset_data = asset_doc["data"]
fps = asset_data.get("fps")
frame_start = asset_data.get("frameStart")
frame_end = asset_data.get("frameEnd")
handle_start = asset_data.get("handleStart")
handle_end = asset_data.get("handleEnd")
resolution_width = asset_data.get("resolutionWidth")
resolution_height = asset_data.get("resolutionHeight")
duration = (frame_end - frame_start + 1) + handle_start + handle_end
return {
"fps": fps,
"frameStart": frame_start,
"frameEnd": frame_end,
"handleStart": handle_start,
"handleEnd": handle_end,
"resolutionWidth": resolution_width,
"resolutionHeight": resolution_height,
"duration": duration
}
def ls():
"""Yields containers from active AfterEffects document.
@ -191,102 +279,17 @@ def containerise(name,
return comp
# created instances section
def list_instances():
"""
List all created instances from current workfile which
will be published.
def cache_and_get_instances(creator):
"""Cache instances in shared data.
Pulls from File > File Info
For SubsetManager
Returns:
(list) of dictionaries matching instances format
"""
stub = _get_stub()
if not stub:
return []
instances = []
layers_meta = stub.get_metadata()
for instance in layers_meta:
if instance.get("id") == "pyblish.avalon.instance":
instances.append(instance)
return instances
def remove_instance(instance):
"""
Remove instance from current workfile metadata.
Updates metadata of current file in File > File Info and removes
icon highlight on group layer.
For SubsetManager
Args:
instance (dict): instance representation from subsetmanager model
"""
stub = _get_stub()
if not stub:
return
inst_id = instance.get("instance_id") or instance.get("uuid") # legacy
if not inst_id:
log.warning("No instance identifier for {}".format(instance))
return
stub.remove_instance(inst_id)
if instance.get("members"):
item = stub.get_item(instance["members"][0])
if item:
stub.rename_item(item.id,
item.name.replace(stub.PUBLISH_ICON, ''))
# new publisher section
def get_context_data():
meta = _get_stub().get_metadata()
for item in meta:
if item.get("id") == "publish_context":
item.pop("id")
return item
return {}
def update_context_data(data, changes):
item = data
item["id"] = "publish_context"
_get_stub().imprint(item["id"], item)
def get_context_title():
"""Returns title for Creator window"""
project_name = legacy_io.Session["AVALON_PROJECT"]
asset_name = legacy_io.Session["AVALON_ASSET"]
task_name = legacy_io.Session["AVALON_TASK"]
return "{}/{}/{}".format(project_name, asset_name, task_name)
def _get_stub():
"""
Handle pulling stub from PS to run operations on host
Storing all instances as a list as legacy instances might be still present.
Args:
creator (Creator): Plugin which would like to get instances from host.
Returns:
(AEServerStub) or None
List[]: list of all instances stored in metadata
"""
try:
stub = get_stub() # only after Photoshop is up
except ConnectionNotEstablishedYet:
print("Not connected yet, ignoring")
return
if not stub.get_active_document_name():
return
return stub
shared_key = "openpype.photoshop.instances"
if shared_key not in creator.collection_shared_data:
creator.collection_shared_data[shared_key] = \
creator.host.list_instances()
return creator.collection_shared_data[shared_key]

View file

@ -1,53 +0,0 @@
"""Host API required Work Files tool"""
import os
from .launch_logic import get_stub
def file_extensions():
return [".aep"]
def has_unsaved_changes():
if _active_document():
return not get_stub().is_saved()
return False
def save_file(filepath):
get_stub().saveAs(filepath, True)
def open_file(filepath):
get_stub().open(filepath)
return True
def current_file():
try:
full_name = get_stub().get_active_document_full_name()
if full_name and full_name != "null":
return os.path.normpath(full_name).replace("\\", "/")
except ValueError:
print("Nothing opened")
pass
return None
def work_root(session):
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
def _active_document():
# TODO merge with current_file - even in extension
document_name = None
try:
document_name = get_stub().get_active_document_name()
except ValueError:
print("Nothing opened")
pass
return document_name

View file

@ -11,6 +11,10 @@ from wsrpc_aiohttp import WebSocketAsync
from openpype.tools.adobe_webserver.app import WebServerTool
class ConnectionNotEstablishedYet(Exception):
pass
@attr.s
class AEItem(object):
"""
@ -24,8 +28,8 @@ class AEItem(object):
# all imported elements, single for
# regular image, array for Backgrounds
members = attr.ib(factory=list)
workAreaStart = attr.ib(default=None)
workAreaDuration = attr.ib(default=None)
frameStart = attr.ib(default=None)
framesDuration = attr.ib(default=None)
frameRate = attr.ib(default=None)
file_name = attr.ib(default=None)
instance_id = attr.ib(default=None) # New Publisher
@ -80,7 +84,7 @@ class AfterEffectsServerStub():
Get complete stored JSON with metadata from AE.Metadata.Label
field.
It contains containers loaded by any Loader OR instances creted
It contains containers loaded by any Loader OR instances created
by Creator.
Returns:
@ -355,42 +359,50 @@ class AfterEffectsServerStub():
return self._handle_return(res)
def get_work_area(self, item_id):
""" Get work are information for render purposes
def get_comp_properties(self, comp_id):
""" Get composition information for render purposes
Returns startFrame, frameDuration, fps, width, height.
Args:
item_id (int):
comp_id (int):
Returns:
(AEItem)
"""
res = self.websocketserver.call(self.client.call
('AfterEffects.get_work_area',
item_id=item_id
('AfterEffects.get_comp_properties',
item_id=comp_id
))
records = self._to_records(self._handle_return(res))
if records:
return records.pop()
def set_work_area(self, item, start, duration, frame_rate):
def set_comp_properties(self, comp_id, start, duration, frame_rate,
width, height):
"""
Set work area to predefined values (from Ftrack).
Work area directs what gets rendered.
Beware of rounding, AE expects seconds, not frames directly.
Args:
item (dict):
start (float): workAreaStart in seconds
duration (float): in seconds
comp_id (int):
start (int): workAreaStart in frames
duration (int): in frames
frame_rate (float): frames in seconds
width (int): resolution width
height (int): resolution height
"""
res = self.websocketserver.call(self.client.call
('AfterEffects.set_work_area',
item_id=item.id,
('AfterEffects.set_comp_properties',
item_id=comp_id,
start=start,
duration=duration,
frame_rate=frame_rate))
frame_rate=frame_rate,
width=width,
height=height))
return self._handle_return(res)
def save(self):
@ -418,18 +430,18 @@ class AfterEffectsServerStub():
return self._handle_return(res)
def get_render_info(self):
def get_render_info(self, comp_id):
""" Get render queue info for render purposes
Returns:
(AEItem): with 'file_name' field
(list) of (AEItem): with 'file_name' field
"""
res = self.websocketserver.call(self.client.call
('AfterEffects.get_render_info'))
('AfterEffects.get_render_info',
comp_id=comp_id))
records = self._to_records(self._handle_return(res))
if records:
return records.pop()
return records
def get_audio_url(self, item_id):
""" Get audio layer absolute url for comp
@ -522,7 +534,7 @@ class AfterEffectsServerStub():
if records:
return records.pop()
def render(self, folder_url):
def render(self, folder_url, comp_id):
"""
Render all renderqueueitem to 'folder_url'
Args:
@ -531,7 +543,8 @@ class AfterEffectsServerStub():
"""
res = self.websocketserver.call(self.client.call
('AfterEffects.render',
folder_url=folder_url))
folder_url=folder_url,
comp_id=comp_id))
return self._handle_return(res)
def get_extension_version(self):
@ -553,6 +566,12 @@ class AfterEffectsServerStub():
return self._handle_return(res)
def print_msg(self, msg):
"""Triggers Javascript alert dialog."""
self.websocketserver.call(self.client.call
('AfterEffects.print_msg',
msg=msg))
def _handle_return(self, res):
"""Wraps return, throws ValueError if 'error' key is present."""
if res and isinstance(res, str) and res != "undefined":
@ -607,8 +626,8 @@ class AfterEffectsServerStub():
d.get('name'),
d.get('type'),
d.get('members'),
d.get('workAreaStart'),
d.get('workAreaDuration'),
d.get('frameStart'),
d.get('framesDuration'),
d.get('frameRate'),
d.get('file_name'),
d.get("instance_id"),
@ -617,3 +636,18 @@ class AfterEffectsServerStub():
ret.append(item)
return ret
def get_stub():
"""
Convenience function to get server RPC stub to call methods directed
for host (Photoshop).
It expects already created connection, started from client.
Currently created when panel is opened (PS: Window>Extensions>Avalon)
:return: <PhotoshopClientStub> where functions could be called from
"""
ae_stub = AfterEffectsServerStub()
if not ae_stub.client:
raise ConnectionNotEstablishedYet("Connection is not created yet")
return ae_stub

View file

@ -1,13 +0,0 @@
from openpype.hosts.aftereffects.plugins.create import create_legacy_render
class CreateLocalRender(create_legacy_render.CreateRender):
""" Creator to render locally.
Created only after default render on farm. So family 'render.local' is
used for backward compatibility.
"""
name = "renderDefault"
label = "Render Locally"
family = "renderLocal"

View file

@ -1,62 +0,0 @@
from openpype.pipeline import create
from openpype.pipeline import CreatorError
from openpype.hosts.aftereffects.api import (
get_stub,
list_instances
)
class CreateRender(create.LegacyCreator):
"""Render folder for publish.
Creates subsets in format 'familyTaskSubsetname',
eg 'renderCompositingMain'.
Create only single instance from composition at a time.
"""
name = "renderDefault"
label = "Render on Farm"
family = "render"
defaults = ["Main"]
def process(self):
stub = get_stub() # only after After Effects is up
items = []
if (self.options or {}).get("useSelection"):
items = stub.get_selected_items(
comps=True, folders=False, footages=False
)
if len(items) > 1:
raise CreatorError(
"Please select only single composition at time."
)
if not items:
raise CreatorError((
"Nothing to create. Select composition "
"if 'useSelection' or create at least "
"one composition."
))
existing_subsets = [
instance['subset'].lower()
for instance in list_instances()
]
item = items.pop()
if self.name.lower() in existing_subsets:
txt = "Instance with name \"{}\" already exists.".format(self.name)
raise CreatorError(txt)
self.data["members"] = [item.id]
self.data["uuid"] = item.id # for SubsetManager
self.data["subset"] = (
self.data["subset"]
.replace(stub.PUBLISH_ICON, '')
.replace(stub.LOADED_ICON, '')
)
stub.imprint(item, self.data)
stub.set_label_color(item.id, 14) # Cyan options 0 - 16
stub.rename_item(item.id, stub.PUBLISH_ICON + self.data["subset"])

View file

@ -1,15 +1,25 @@
import re
from openpype import resources
from openpype.lib import BoolDef, UISeparatorDef
from openpype.hosts.aftereffects import api
from openpype.pipeline import (
Creator,
CreatedInstance,
CreatorError,
legacy_io,
CreatorError
)
from openpype.hosts.aftereffects.api.pipeline import cache_and_get_instances
from openpype.hosts.aftereffects.api.lib import set_settings
from openpype.lib import prepare_template_data
from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS
class RenderCreator(Creator):
"""Creates 'render' instance for publishing.
Result of 'render' instance is video or sequence of images for particular
composition based of configuration in its RenderQueue.
"""
identifier = "render"
label = "Render"
family = "render"
@ -17,18 +27,110 @@ class RenderCreator(Creator):
create_allow_context_change = True
def __init__(self, project_settings, *args, **kwargs):
super(RenderCreator, self).__init__(project_settings, *args, **kwargs)
self._default_variants = (project_settings["aftereffects"]
["create"]
["RenderCreator"]
["defaults"])
# Settings
default_variants = []
mark_for_review = True
def create(self, subset_name_from_ui, data, pre_create_data):
stub = api.get_stub() # only after After Effects is up
try:
_ = stub.get_active_document_full_name()
except ValueError:
raise CreatorError(
"Please save workfile via Workfile app first!"
)
if pre_create_data.get("use_selection"):
comps = stub.get_selected_items(
comps=True, folders=False, footages=False
)
else:
comps = stub.get_items(comps=True, folders=False, footages=False)
if not comps:
raise CreatorError(
"Nothing to create. Select composition in Project Bin if "
"'Use selection' is toggled or create at least "
"one composition."
)
use_composition_name = (pre_create_data.get("use_composition_name") or
len(comps) > 1)
for comp in comps:
if use_composition_name:
if "{composition}" not in subset_name_from_ui.lower():
subset_name_from_ui += "{Composition}"
composition_name = re.sub(
"[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS),
"",
comp.name
)
dynamic_fill = prepare_template_data({"composition":
composition_name})
subset_name = subset_name_from_ui.format(**dynamic_fill)
data["composition_name"] = composition_name
else:
subset_name = subset_name_from_ui
subset_name = re.sub(r"\{composition\}", '', subset_name,
flags=re.IGNORECASE)
for inst in self.create_context.instances:
if subset_name == inst.subset_name:
raise CreatorError("{} already exists".format(
inst.subset_name))
data["members"] = [comp.id]
new_instance = CreatedInstance(self.family, subset_name, data,
self)
if "farm" in pre_create_data:
use_farm = pre_create_data["farm"]
new_instance.creator_attributes["farm"] = use_farm
review = pre_create_data["mark_for_review"]
new_instance.creator_attributes["mark_for_review"] = review
api.get_stub().imprint(new_instance.id,
new_instance.data_to_store())
self._add_instance_to_context(new_instance)
stub.rename_item(comp.id, subset_name)
set_settings(True, True, [comp.id], print_msg=False)
def get_pre_create_attr_defs(self):
output = [
BoolDef("use_selection",
tooltip="Composition for publishable instance should be "
"selected by default.",
default=True, label="Use selection"),
BoolDef("use_composition_name",
label="Use composition name in subset"),
UISeparatorDef(),
BoolDef("farm", label="Render on farm"),
BoolDef(
"mark_for_review",
label="Review",
default=self.mark_for_review
)
]
return output
def get_instance_attr_defs(self):
return [
BoolDef("farm", label="Render on farm"),
BoolDef(
"mark_for_review",
label="Review",
default=False
)
]
def get_icon(self):
return resources.get_openpype_splash_filepath()
def collect_instances(self):
for instance_data in api.list_instances():
for instance_data in cache_and_get_instances(self):
# legacy instances have family=='render' or 'renderLocal', use them
creator_id = (instance_data.get("creator_identifier") or
instance_data.get("family", '').replace("Local", ''))
@ -43,63 +145,72 @@ class RenderCreator(Creator):
for created_inst, _changes in update_list:
api.get_stub().imprint(created_inst.get("instance_id"),
created_inst.data_to_store())
subset_change = _changes.get("subset")
if subset_change:
api.get_stub().rename_item(created_inst.data["members"][0],
subset_change.new_value)
def remove_instances(self, instances):
for instance in instances:
api.remove_instance(instance)
self._remove_instance_from_context(instance)
self.host.remove_instance(instance)
def create(self, subset_name, data, pre_create_data):
stub = api.get_stub() # only after After Effects is up
if pre_create_data.get("use_selection"):
items = stub.get_selected_items(
comps=True, folders=False, footages=False
)
else:
items = stub.get_items(comps=True, folders=False, footages=False)
subset = instance.data["subset"]
comp_id = instance.data["members"][0]
comp = api.get_stub().get_item(comp_id)
if comp:
new_comp_name = comp.name.replace(subset, '')
if not new_comp_name:
new_comp_name = "dummyCompName"
api.get_stub().rename_item(comp_id,
new_comp_name)
if len(items) > 1:
raise CreatorError(
"Please select only single composition at time."
)
if not items:
raise CreatorError((
"Nothing to create. Select composition "
"if 'useSelection' or create at least "
"one composition."
))
def apply_settings(self, project_settings, system_settings):
plugin_settings = (
project_settings["aftereffects"]["create"]["RenderCreator"]
)
for inst in self.create_context.instances:
if subset_name == inst.subset_name:
raise CreatorError("{} already exists".format(
inst.subset_name))
data["members"] = [items[0].id]
new_instance = CreatedInstance(self.family, subset_name, data, self)
if "farm" in pre_create_data:
use_farm = pre_create_data["farm"]
new_instance.creator_attributes["farm"] = use_farm
api.get_stub().imprint(new_instance.id,
new_instance.data_to_store())
self._add_instance_to_context(new_instance)
def get_default_variants(self):
return self._default_variants
def get_instance_attr_defs(self):
return [BoolDef("farm", label="Render on farm")]
def get_pre_create_attr_defs(self):
output = [
BoolDef("use_selection", default=True, label="Use selection"),
UISeparatorDef(),
BoolDef("farm", label="Render on farm")
]
return output
self.mark_for_review = plugin_settings["mark_for_review"]
def get_detail_description(self):
return """Creator for Render instances"""
return """Creator for Render instances
Main publishable item in AfterEffects will be of `render` family.
Result of this item (instance) is picture sequence or video that could
be a final delivery product or loaded and used in another DCCs.
Select single composition and create instance of 'render' family or
turn off 'Use selection' to create instance for all compositions.
'Use composition name in subset' allows to explicitly add composition
name into created subset name.
Position of composition name could be set in
`project_settings/global/tools/creator/subset_name_profiles` with some
form of '{composition}' placeholder.
Composition name will be used implicitly if multiple composition should
be handled at same time.
If {composition} placeholder is not us 'subset_name_profiles'
composition name will be capitalized and set at the end of subset name
if necessary.
If composition name should be used, it will be cleaned up of characters
that would cause an issue in published file names.
"""
def get_dynamic_data(self, variant, task_name, asset_doc,
project_name, host_name, instance):
dynamic_data = {}
if instance is not None:
composition_name = instance.get("composition_name")
if composition_name:
dynamic_data["composition"] = composition_name
else:
dynamic_data["composition"] = "{composition}"
return dynamic_data
def _handle_legacy(self, instance_data):
"""Converts old instances to new format."""
@ -112,11 +223,14 @@ class RenderCreator(Creator):
instance_data.pop("uuid")
if not instance_data.get("task"):
instance_data["task"] = legacy_io.Session.get("AVALON_TASK")
instance_data["task"] = self.create_context.get_current_task_name()
if not instance_data.get("creator_attributes"):
is_old_farm = instance_data["family"] != "renderLocal"
instance_data["creator_attributes"] = {"farm": is_old_farm}
instance_data["family"] = self.family
if instance_data["creator_attributes"].get("mark_for_review") is None:
instance_data["creator_attributes"]["mark_for_review"] = True
return instance_data

View file

@ -2,9 +2,9 @@ import openpype.hosts.aftereffects.api as api
from openpype.client import get_asset_by_name
from openpype.pipeline import (
AutoCreator,
CreatedInstance,
legacy_io,
CreatedInstance
)
from openpype.hosts.aftereffects.api.pipeline import cache_and_get_instances
class AEWorkfileCreator(AutoCreator):
@ -17,7 +17,7 @@ class AEWorkfileCreator(AutoCreator):
return []
def collect_instances(self):
for instance_data in api.list_instances():
for instance_data in cache_and_get_instances(self):
creator_id = instance_data.get("creator_identifier")
if creator_id == self.identifier:
subset_name = instance_data["subset"]
@ -37,10 +37,11 @@ class AEWorkfileCreator(AutoCreator):
existing_instance = instance
break
project_name = legacy_io.Session["AVALON_PROJECT"]
asset_name = legacy_io.Session["AVALON_ASSET"]
task_name = legacy_io.Session["AVALON_TASK"]
host_name = legacy_io.Session["AVALON_APP"]
context = self.create_context
project_name = context.get_current_project_name()
asset_name = context.get_current_asset_name()
task_name = context.get_current_task_name()
host_name = context.host_name
if existing_instance is None:
asset_doc = get_asset_by_name(project_name, asset_name)
@ -55,7 +56,7 @@ class AEWorkfileCreator(AutoCreator):
}
data.update(self.get_dynamic_data(
self.default_variant, task_name, asset_doc,
project_name, host_name
project_name, host_name, None
))
new_instance = CreatedInstance(

View file

@ -22,7 +22,7 @@ class AERenderInstance(RenderInstance):
stagingDir = attr.ib(default=None)
app_version = attr.ib(default=None)
publish_attributes = attr.ib(default={})
file_name = attr.ib(default=None)
file_names = attr.ib(default=[])
class CollectAERender(publish.AbstractCollectRender):
@ -64,34 +64,35 @@ class CollectAERender(publish.AbstractCollectRender):
if family not in ["render", "renderLocal"]: # legacy
continue
item_id = inst.data["members"][0]
comp_id = int(inst.data["members"][0])
work_area_info = CollectAERender.get_stub().get_work_area(
int(item_id))
comp_info = CollectAERender.get_stub().get_comp_properties(
comp_id)
if not work_area_info:
if not comp_info:
self.log.warning("Orphaned instance, deleting metadata")
inst_id = inst.get("instance_id") or item_id
inst_id = inst.data.get("instance_id") or str(comp_id)
CollectAERender.get_stub().remove_instance(inst_id)
continue
frame_start = work_area_info.workAreaStart
frame_end = round(work_area_info.workAreaStart +
float(work_area_info.workAreaDuration) *
float(work_area_info.frameRate)) - 1
fps = work_area_info.frameRate
frame_start = comp_info.frameStart
frame_end = round(comp_info.frameStart +
comp_info.framesDuration) - 1
fps = comp_info.frameRate
# TODO add resolution when supported by extension
task_name = inst.data.get("task") # legacy
render_q = CollectAERender.get_stub().get_render_info()
render_q = CollectAERender.get_stub().get_render_info(comp_id)
if not render_q:
raise ValueError("No file extension set in Render Queue")
render_item = render_q[0]
instance_families = inst.data.get("families", [])
subset_name = inst.data["subset"]
instance = AERenderInstance(
family="render",
families=inst.data.get("families", []),
families=instance_families,
version=version,
time="",
source=current_file,
@ -103,28 +104,29 @@ class CollectAERender(publish.AbstractCollectRender):
setMembers='',
publish=True,
name=subset_name,
resolutionWidth=render_q.width,
resolutionHeight=render_q.height,
resolutionWidth=render_item.width,
resolutionHeight=render_item.height,
pixelAspect=1,
tileRendering=False,
tilesX=0,
tilesY=0,
review="review" in instance_families,
frameStart=frame_start,
frameEnd=frame_end,
frameStep=1,
fps=fps,
app_version=app_version,
publish_attributes=inst.data.get("publish_attributes", {}),
file_name=render_q.file_name
file_names=[item.file_name for item in render_q]
)
comp = compositions_by_id.get(int(item_id))
comp = compositions_by_id.get(comp_id)
if not comp:
raise ValueError("There is no composition for item {}".
format(item_id))
format(comp_id))
instance.outputDir = self._get_output_dir(instance)
instance.comp_name = comp.name
instance.comp_id = item_id
instance.comp_id = comp_id
is_local = "renderLocal" in inst.data["family"] # legacy
if inst.data.get("creator_attributes"):
@ -139,6 +141,9 @@ class CollectAERender(publish.AbstractCollectRender):
instance.toBeRenderedOn = "deadline"
instance.renderer = "aerender"
instance.farm = True # to skip integrate
if "review" in instance.families:
# to skip ExtractReview locally
instance.families.remove("review")
instances.append(instance)
instances_to_remove.append(inst)
@ -163,28 +168,30 @@ class CollectAERender(publish.AbstractCollectRender):
start = render_instance.frameStart
end = render_instance.frameEnd
_, ext = os.path.splitext(os.path.basename(render_instance.file_name))
base_dir = self._get_output_dir(render_instance)
expected_files = []
if "#" not in render_instance.file_name: # single frame (mov)W
path = os.path.join(base_dir, "{}_{}_{}.{}".format(
render_instance.asset,
render_instance.subset,
"v{:03d}".format(render_instance.version),
ext.replace('.', '')
))
expected_files.append(path)
else:
for frame in range(start, end + 1):
path = os.path.join(base_dir, "{}_{}_{}.{}.{}".format(
for file_name in render_instance.file_names:
_, ext = os.path.splitext(os.path.basename(file_name))
ext = ext.replace('.', '')
version_str = "v{:03d}".format(render_instance.version)
if "#" not in file_name: # single frame (mov)W
path = os.path.join(base_dir, "{}_{}_{}.{}".format(
render_instance.asset,
render_instance.subset,
"v{:03d}".format(render_instance.version),
str(frame).zfill(self.padding_width),
ext.replace('.', '')
version_str,
ext
))
expected_files.append(path)
else:
for frame in range(start, end + 1):
path = os.path.join(base_dir, "{}_{}_{}.{}.{}".format(
render_instance.asset,
render_instance.subset,
version_str,
str(frame).zfill(self.padding_width),
ext
))
expected_files.append(path)
return expected_files
def _get_output_dir(self, render_instance):
@ -216,15 +223,4 @@ class CollectAERender(publish.AbstractCollectRender):
if fam not in instance.families:
instance.families.append(fam)
settings = get_project_settings(os.getenv("AVALON_PROJECT"))
reviewable_subset_filter = (settings["deadline"]
["publish"]
["ProcessSubmittedJobOnFarm"]
["aov_filter"].get(self.hosts[0]))
for aov_pattern in reviewable_subset_filter:
if re.match(aov_pattern, instance.subset):
instance.families.append("review")
instance.review = True
break
return instance

View file

@ -0,0 +1,25 @@
"""
Requires:
None
Provides:
instance -> family ("review")
"""
import pyblish.api
class CollectReview(pyblish.api.ContextPlugin):
"""Add review to families if instance created with 'mark_for_review' flag
"""
label = "Collect Review"
hosts = ["aftereffects"]
order = pyblish.api.CollectorOrder + 0.1
def process(self, context):
for instance in context:
creator_attributes = instance.data.get("creator_attributes") or {}
if (
creator_attributes.get("mark_for_review")
and "review" not in instance.data["families"]
):
instance.data["families"].append("review")

View file

@ -53,10 +53,10 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
"active": True,
"asset": asset_entity["name"],
"task": task,
"frameStart": asset_entity["data"]["frameStart"],
"frameEnd": asset_entity["data"]["frameEnd"],
"handleStart": asset_entity["data"]["handleStart"],
"handleEnd": asset_entity["data"]["handleEnd"],
"frameStart": context.data['frameStart'],
"frameEnd": context.data['frameEnd'],
"handleStart": context.data['handleStart'],
"handleEnd": context.data['handleEnd'],
"fps": asset_entity["data"]["fps"],
"resolutionWidth": asset_entity["data"].get(
"resolutionWidth",

View file

@ -21,64 +21,54 @@ class ExtractLocalRender(publish.Extractor):
def process(self, instance):
stub = get_stub()
staging_dir = instance.data["stagingDir"]
self.log.info("staging_dir::{}".format(staging_dir))
self.log.debug("staging_dir::{}".format(staging_dir))
# pull file name from Render Queue Output module
render_q = stub.get_render_info()
stub.render(staging_dir)
if not render_q:
# pull file name collected value from Render Queue Output module
if not instance.data["file_names"]:
raise ValueError("No file extension set in Render Queue")
_, ext = os.path.splitext(os.path.basename(render_q.file_name))
ext = ext[1:]
first_file_path = None
files = []
self.log.info("files::{}".format(os.listdir(staging_dir)))
for file_name in os.listdir(staging_dir):
files.append(file_name)
if first_file_path is None:
first_file_path = os.path.join(staging_dir,
file_name)
comp_id = instance.data['comp_id']
stub.render(staging_dir, comp_id)
resulting_files = files
if len(files) == 1:
resulting_files = files[0]
representations = []
for file_name in instance.data["file_names"]:
_, ext = os.path.splitext(os.path.basename(file_name))
ext = ext[1:]
repre_data = {
"frameStart": instance.data["frameStart"],
"frameEnd": instance.data["frameEnd"],
"name": ext,
"ext": ext,
"files": resulting_files,
"stagingDir": staging_dir
}
if instance.data["review"]:
repre_data["tags"] = ["review"]
first_file_path = None
files = []
for found_file_name in os.listdir(staging_dir):
if not found_file_name.endswith(ext):
continue
instance.data["representations"] = [repre_data]
files.append(found_file_name)
if first_file_path is None:
first_file_path = os.path.join(staging_dir,
found_file_name)
ffmpeg_path = get_ffmpeg_tool_path("ffmpeg")
# Generate thumbnail.
thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg")
if not files:
self.log.info("no files")
return
args = [
ffmpeg_path, "-y",
"-i", first_file_path,
"-vf", "scale=300:-1",
"-vframes", "1",
thumbnail_path
]
self.log.debug("Thumbnail args:: {}".format(args))
try:
output = run_subprocess(args)
except TypeError:
self.log.warning("Error in creating thumbnail")
six.reraise(*sys.exc_info())
# single file cannot be wrapped in array
resulting_files = files
if len(files) == 1:
resulting_files = files[0]
instance.data["representations"].append({
"name": "thumbnail",
"ext": "jpg",
"files": os.path.basename(thumbnail_path),
"stagingDir": staging_dir,
"tags": ["thumbnail"]
})
repre_data = {
"frameStart": instance.data["frameStart"],
"frameEnd": instance.data["frameEnd"],
"name": ext,
"ext": ext,
"files": resulting_files,
"stagingDir": staging_dir
}
first_repre = not representations
if instance.data["review"] and first_repre:
repre_data["tags"] = ["review"]
thumbnail_path = os.path.join(staging_dir, files[0])
instance.data["thumbnailSource"] = thumbnail_path
representations.append(repre_data)
instance.data["representations"] = representations

View file

@ -9,7 +9,7 @@ Context of the given subset doesn't match your current scene.
### How to repair?
You can fix this with "repair" button on the right.
You can fix this with "repair" button on the right and refresh Publish at the bottom right.
</description>
<detail>
### __Detailed Info__ (optional)

View file

@ -1,6 +1,6 @@
import json
import pyblish.api
from openpype.hosts.aftereffects.api import list_instances
from openpype.hosts.aftereffects.api import AfterEffectsHost
class PreCollectRender(pyblish.api.ContextPlugin):
@ -25,7 +25,7 @@ class PreCollectRender(pyblish.api.ContextPlugin):
self.log.debug("Not applicable for New Publisher, skip")
return
for inst in list_instances():
for inst in AfterEffectsHost().list_instances():
if inst.get("creator_attributes"):
raise ValueError("Instance created in New publisher, "
"cannot be published in Pyblish.\n"

View file

@ -31,10 +31,13 @@ from .lib import (
lsattrs,
read,
maintained_selection,
maintained_time,
get_selection,
# unique_name,
)
from .capture import capture
__all__ = [
"install",
@ -56,9 +59,11 @@ __all__ = [
# Utility functions
"maintained_selection",
"maintained_time",
"lsattr",
"lsattrs",
"read",
"get_selection",
"capture",
# "unique_name",
]

View file

@ -0,0 +1,278 @@
"""Blender Capture
Playblasting with independent viewport, camera and display options
"""
import contextlib
import bpy
from .lib import maintained_time
from .plugin import deselect_all, create_blender_context
def capture(
camera=None,
width=None,
height=None,
filename=None,
start_frame=None,
end_frame=None,
step_frame=None,
sound=None,
isolate=None,
maintain_aspect_ratio=True,
overwrite=False,
image_settings=None,
display_options=None
):
"""Playblast in an independent windows
Arguments:
camera (str, optional): Name of camera, defaults to "Camera"
width (int, optional): Width of output in pixels
height (int, optional): Height of output in pixels
filename (str, optional): Name of output file path. Defaults to current
render output path.
start_frame (int, optional): Defaults to current start frame.
end_frame (int, optional): Defaults to current end frame.
step_frame (int, optional): Defaults to 1.
sound (str, optional): Specify the sound node to be used during
playblast. When None (default) no sound will be used.
isolate (list): List of nodes to isolate upon capturing
maintain_aspect_ratio (bool, optional): Modify height in order to
maintain aspect ratio.
overwrite (bool, optional): Whether or not to overwrite if file
already exists. If disabled and file exists and error will be
raised.
image_settings (dict, optional): Supplied image settings for render,
using `ImageSettings`
display_options (dict, optional): Supplied display options for render
"""
scene = bpy.context.scene
camera = camera or "Camera"
# Ensure camera exists.
if camera not in scene.objects and camera != "AUTO":
raise RuntimeError("Camera does not exist: {0}".format(camera))
# Ensure resolution.
if width and height:
maintain_aspect_ratio = False
width = width or scene.render.resolution_x
height = height or scene.render.resolution_y
if maintain_aspect_ratio:
ratio = scene.render.resolution_x / scene.render.resolution_y
height = round(width / ratio)
# Get frame range.
if start_frame is None:
start_frame = scene.frame_start
if end_frame is None:
end_frame = scene.frame_end
if step_frame is None:
step_frame = 1
frame_range = (start_frame, end_frame, step_frame)
if filename is None:
filename = scene.render.filepath
render_options = {
"filepath": "{}.".format(filename.rstrip(".")),
"resolution_x": width,
"resolution_y": height,
"use_overwrite": overwrite,
}
with _independent_window() as window:
applied_view(window, camera, isolate, options=display_options)
with contextlib.ExitStack() as stack:
stack.enter_context(maintain_camera(window, camera))
stack.enter_context(applied_frame_range(window, *frame_range))
stack.enter_context(applied_render_options(window, render_options))
stack.enter_context(applied_image_settings(window, image_settings))
stack.enter_context(maintained_time())
bpy.ops.render.opengl(
animation=True,
render_keyed_only=False,
sequencer=False,
write_still=False,
view_context=True
)
return filename
ImageSettings = {
"file_format": "FFMPEG",
"color_mode": "RGB",
"ffmpeg": {
"format": "QUICKTIME",
"use_autosplit": False,
"codec": "H264",
"constant_rate_factor": "MEDIUM",
"gopsize": 18,
"use_max_b_frames": False,
},
}
def isolate_objects(window, objects):
"""Isolate selection"""
deselect_all()
for obj in objects:
obj.select_set(True)
context = create_blender_context(selected=objects, window=window)
bpy.ops.view3d.view_axis(context, type="FRONT")
bpy.ops.view3d.localview(context)
deselect_all()
def _apply_options(entity, options):
for option, value in options.items():
if isinstance(value, dict):
_apply_options(getattr(entity, option), value)
else:
setattr(entity, option, value)
def applied_view(window, camera, isolate=None, options=None):
"""Apply view options to window."""
area = window.screen.areas[0]
space = area.spaces[0]
area.ui_type = "VIEW_3D"
meshes = [obj for obj in window.scene.objects if obj.type == "MESH"]
if camera == "AUTO":
space.region_3d.view_perspective = "ORTHO"
isolate_objects(window, isolate or meshes)
else:
isolate_objects(window, isolate or meshes)
space.camera = window.scene.objects.get(camera)
space.region_3d.view_perspective = "CAMERA"
if isinstance(options, dict):
_apply_options(space, options)
else:
space.shading.type = "SOLID"
space.shading.color_type = "MATERIAL"
space.show_gizmo = False
space.overlay.show_overlays = False
@contextlib.contextmanager
def applied_frame_range(window, start, end, step):
"""Context manager for setting frame range."""
# Store current frame range
current_frame_start = window.scene.frame_start
current_frame_end = window.scene.frame_end
current_frame_step = window.scene.frame_step
# Apply frame range
window.scene.frame_start = start
window.scene.frame_end = end
window.scene.frame_step = step
try:
yield
finally:
# Restore frame range
window.scene.frame_start = current_frame_start
window.scene.frame_end = current_frame_end
window.scene.frame_step = current_frame_step
@contextlib.contextmanager
def applied_render_options(window, options):
"""Context manager for setting render options."""
render = window.scene.render
# Store current settings
original = {}
for opt in options.copy():
try:
original[opt] = getattr(render, opt)
except ValueError:
options.pop(opt)
# Apply settings
_apply_options(render, options)
try:
yield
finally:
# Restore previous settings
_apply_options(render, original)
@contextlib.contextmanager
def applied_image_settings(window, options):
"""Context manager to override image settings."""
options = options or ImageSettings.copy()
ffmpeg = options.pop("ffmpeg", {})
render = window.scene.render
# Store current image settings
original = {}
for opt in options.copy():
try:
original[opt] = getattr(render.image_settings, opt)
except ValueError:
options.pop(opt)
# Store current ffmpeg settings
original_ffmpeg = {}
for opt in ffmpeg.copy():
try:
original_ffmpeg[opt] = getattr(render.ffmpeg, opt)
except ValueError:
ffmpeg.pop(opt)
# Apply image settings
for opt, value in options.items():
setattr(render.image_settings, opt, value)
# Apply ffmpeg settings
for opt, value in ffmpeg.items():
setattr(render.ffmpeg, opt, value)
try:
yield
finally:
# Restore previous settings
for opt, value in original.items():
setattr(render.image_settings, opt, value)
for opt, value in original_ffmpeg.items():
setattr(render.ffmpeg, opt, value)
@contextlib.contextmanager
def maintain_camera(window, camera):
"""Context manager to override camera."""
current_camera = window.scene.camera
if camera in window.scene.objects:
window.scene.camera = window.scene.objects.get(camera)
try:
yield
finally:
window.scene.camera = current_camera
@contextlib.contextmanager
def _independent_window():
"""Create capture-window context."""
context = create_blender_context()
current_windows = set(bpy.context.window_manager.windows)
bpy.ops.wm.window_new(context)
window = list(set(bpy.context.window_manager.windows) - current_windows)[0]
context["window"] = window
try:
yield window
finally:
bpy.ops.wm.window_close(context)

View file

@ -284,3 +284,13 @@ def maintained_selection():
# This could happen if the active node was deleted during the
# context.
log.exception("Failed to set active object.")
@contextlib.contextmanager
def maintained_time():
"""Maintain current frame during context."""
current_time = bpy.context.scene.frame_current
try:
yield
finally:
bpy.context.scene.frame_current = current_time

View file

@ -24,7 +24,7 @@ from .workio import OpenFileCacher
PREVIEW_COLLECTIONS: Dict = dict()
# This seems like a good value to keep the Qt app responsive and doesn't slow
# down Blender. At least on macOS I the interace of Blender gets very laggy if
# down Blender. At least on macOS I the interface of Blender gets very laggy if
# you make it smaller.
TIMER_INTERVAL: float = 0.01 if platform.system() == "Windows" else 0.1
@ -84,11 +84,11 @@ class MainThreadItem:
self.kwargs = kwargs
def execute(self):
"""Execute callback and store it's result.
"""Execute callback and store its result.
Method must be called from main thread. Item is marked as `done`
when callback execution finished. Store output of callback of exception
information when callback raise one.
information when callback raises one.
"""
print("Executing process in main thread")
if self.done:
@ -382,8 +382,8 @@ class TOPBAR_MT_avalon(bpy.types.Menu):
layout.operator(LaunchLibrary.bl_idname, text="Library...")
layout.separator()
layout.operator(LaunchWorkFiles.bl_idname, text="Work Files...")
# TODO (jasper): maybe add 'Reload Pipeline', 'Reset Frame Range' and
# 'Reset Resolution'?
# TODO (jasper): maybe add 'Reload Pipeline', 'Set Frame Range' and
# 'Set Resolution'?
def draw_avalon_menu(self, context):

View file

@ -26,6 +26,8 @@ from openpype.lib import (
emit_event
)
import openpype.hosts.blender
from openpype.settings import get_project_settings
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.blender.__file__))
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
@ -83,6 +85,31 @@ def uninstall():
ops.unregister()
def show_message(title, message):
from openpype.widgets.message_window import Window
from .ops import BlenderApplication
BlenderApplication.get_app()
Window(
parent=None,
title=title,
message=message,
level="warning")
def message_window(title, message):
from .ops import (
MainThreadItem,
execute_in_main_thread,
_process_app_events
)
mti = MainThreadItem(show_message, title, message)
execute_in_main_thread(mti)
_process_app_events()
def set_start_end_frames():
project_name = legacy_io.active_project()
asset_name = legacy_io.Session["AVALON_ASSET"]
@ -125,10 +152,36 @@ def set_start_end_frames():
def on_new():
set_start_end_frames()
project = os.environ.get("AVALON_PROJECT")
settings = get_project_settings(project)
unit_scale_settings = settings.get("blender").get("unit_scale_settings")
unit_scale_enabled = unit_scale_settings.get("enabled")
if unit_scale_enabled:
unit_scale = unit_scale_settings.get("base_file_unit_scale")
bpy.context.scene.unit_settings.scale_length = unit_scale
def on_open():
set_start_end_frames()
project = os.environ.get("AVALON_PROJECT")
settings = get_project_settings(project)
unit_scale_settings = settings.get("blender").get("unit_scale_settings")
unit_scale_enabled = unit_scale_settings.get("enabled")
apply_on_opening = unit_scale_settings.get("apply_on_opening")
if unit_scale_enabled and apply_on_opening:
unit_scale = unit_scale_settings.get("base_file_unit_scale")
prev_unit_scale = bpy.context.scene.unit_settings.scale_length
if unit_scale != prev_unit_scale:
bpy.context.scene.unit_settings.scale_length = unit_scale
message_window(
"Base file unit scale changed",
"Base file unit scale changed to match the project settings.")
@bpy.app.handlers.persistent
def _on_save_pre(*args):

View file

@ -62,7 +62,8 @@ def prepare_data(data, container_name=None):
def create_blender_context(active: Optional[bpy.types.Object] = None,
selected: Optional[bpy.types.Object] = None,):
selected: Optional[bpy.types.Object] = None,
window: Optional[bpy.types.Window] = None):
"""Create a new Blender context. If an object is passed as
parameter, it is set as selected and active.
"""
@ -72,7 +73,9 @@ def create_blender_context(active: Optional[bpy.types.Object] = None,
override_context = bpy.context.copy()
for win in bpy.context.window_manager.windows:
windows = [window] if window else bpy.context.window_manager.windows
for win in windows:
for area in win.screen.areas:
if area.type == 'VIEW_3D':
for region in area.regions:

View file

@ -0,0 +1,55 @@
from pathlib import Path
from openpype.lib import PreLaunchHook
class AddPythonScriptToLaunchArgs(PreLaunchHook):
"""Add python script to be executed before Blender launch."""
# Append after file argument
order = 15
app_groups = [
"blender",
]
def execute(self):
if not self.launch_context.data.get("python_scripts"):
return
# Add path to workfile to arguments
for python_script_path in self.launch_context.data["python_scripts"]:
self.log.info(
f"Adding python script {python_script_path} to launch"
)
# Test script path exists
python_script_path = Path(python_script_path)
if not python_script_path.exists():
self.log.warning(
f"Python script {python_script_path} doesn't exist. "
"Skipped..."
)
continue
if "--" in self.launch_context.launch_args:
# Insert before separator
separator_index = self.launch_context.launch_args.index("--")
self.launch_context.launch_args.insert(
separator_index,
"-P",
)
self.launch_context.launch_args.insert(
separator_index + 1,
python_script_path.as_posix(),
)
else:
self.launch_context.launch_args.extend(
["-P", python_script_path.as_posix()]
)
# Ensure separator
if "--" not in self.launch_context.launch_args:
self.launch_context.launch_args.append("--")
self.launch_context.launch_args.extend(
[*self.launch_context.data.get("script_args", [])]
)

View file

@ -0,0 +1,47 @@
"""Create review."""
import bpy
from openpype.pipeline import legacy_io
from openpype.hosts.blender.api import plugin, lib, ops
from openpype.hosts.blender.api.pipeline import AVALON_INSTANCES
class CreateReview(plugin.Creator):
"""Single baked camera"""
name = "reviewDefault"
label = "Review"
family = "review"
icon = "video-camera"
def process(self):
""" Run the creator on Blender main thread"""
mti = ops.MainThreadItem(self._process)
ops.execute_in_main_thread(mti)
def _process(self):
# Get Instance Container or create it if it does not exist
instances = bpy.data.collections.get(AVALON_INSTANCES)
if not instances:
instances = bpy.data.collections.new(name=AVALON_INSTANCES)
bpy.context.scene.collection.children.link(instances)
# Create instance object
asset = self.data["asset"]
subset = self.data["subset"]
name = plugin.asset_name(asset, subset)
asset_group = bpy.data.collections.new(name=name)
instances.children.link(asset_group)
self.data['task'] = legacy_io.Session.get('AVALON_TASK')
lib.imprint(asset_group, self.data)
if (self.options or {}).get("useSelection"):
selected = lib.get_selection()
for obj in selected:
asset_group.objects.link(obj)
elif (self.options or {}).get("asset_group"):
obj = (self.options or {}).get("asset_group")
asset_group.objects.link(obj)
return asset_group

View file

@ -44,7 +44,7 @@ class AppendBlendLoader(plugin.AssetLoader):
"""
representations = ["blend"]
families = ["*"]
families = ["workfile"]
label = "Append Workfile"
order = 9
@ -68,7 +68,7 @@ class ImportBlendLoader(plugin.AssetLoader):
"""
representations = ["blend"]
families = ["*"]
families = ["workfile"]
label = "Import Workfile"
order = 9

View file

@ -65,37 +65,19 @@ class CacheModelLoader(plugin.AssetLoader):
imported = lib.get_selection()
empties = [obj for obj in imported if obj.type == 'EMPTY']
container = None
for empty in empties:
if not empty.parent:
container = empty
break
assert container, "No asset group found"
# Children must be linked before parents,
# otherwise the hierarchy will break
objects = []
nodes = list(container.children)
for obj in nodes:
for obj in imported:
obj.parent = asset_group
bpy.data.objects.remove(container)
for obj in nodes:
for obj in imported:
objects.append(obj)
nodes.extend(list(obj.children))
imported.extend(list(obj.children))
objects.reverse()
for obj in objects:
parent.objects.link(obj)
collection.objects.unlink(obj)
for obj in objects:
name = obj.name
obj.name = f"{group_name}:{name}"
@ -138,13 +120,14 @@ class CacheModelLoader(plugin.AssetLoader):
group_name = plugin.asset_name(asset, subset, unique_number)
namespace = namespace or f"{asset}_{unique_number}"
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
if not avalon_container:
avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
bpy.context.scene.collection.children.link(avalon_container)
avalon_containers = bpy.data.collections.get(AVALON_CONTAINERS)
if not avalon_containers:
avalon_containers = bpy.data.collections.new(
name=AVALON_CONTAINERS)
bpy.context.scene.collection.children.link(avalon_containers)
asset_group = bpy.data.objects.new(group_name, object_data=None)
avalon_container.objects.link(asset_group)
avalon_containers.objects.link(asset_group)
objects = self._process(libpath, asset_group, group_name)

View file

@ -0,0 +1,209 @@
"""Load an asset in Blender from an Alembic file."""
from pathlib import Path
from pprint import pformat
from typing import Dict, List, Optional
import bpy
from openpype.pipeline import (
get_representation_path,
AVALON_CONTAINER_ID,
)
from openpype.hosts.blender.api import plugin, lib
from openpype.hosts.blender.api.pipeline import (
AVALON_CONTAINERS,
AVALON_PROPERTY,
)
class AbcCameraLoader(plugin.AssetLoader):
"""Load a camera from Alembic file.
Stores the imported asset in an empty named after the asset.
"""
families = ["camera"]
representations = ["abc"]
label = "Load Camera (ABC)"
icon = "code-fork"
color = "orange"
def _remove(self, asset_group):
objects = list(asset_group.children)
for obj in objects:
if obj.type == "CAMERA":
bpy.data.cameras.remove(obj.data)
elif obj.type == "EMPTY":
objects.extend(obj.children)
bpy.data.objects.remove(obj)
def _process(self, libpath, asset_group, group_name):
plugin.deselect_all()
bpy.ops.wm.alembic_import(filepath=libpath)
objects = lib.get_selection()
for obj in objects:
obj.parent = asset_group
for obj in objects:
name = obj.name
obj.name = f"{group_name}:{name}"
if obj.type != "EMPTY":
name_data = obj.data.name
obj.data.name = f"{group_name}:{name_data}"
if not obj.get(AVALON_PROPERTY):
obj[AVALON_PROPERTY] = dict()
avalon_info = obj[AVALON_PROPERTY]
avalon_info.update({"container_name": group_name})
plugin.deselect_all()
return objects
def process_asset(
self,
context: dict,
name: str,
namespace: Optional[str] = None,
options: Optional[Dict] = None,
) -> Optional[List]:
"""
Arguments:
name: Use pre-defined name
namespace: Use pre-defined namespace
context: Full parenthood of representation to load
options: Additional settings dictionary
"""
libpath = self.fname
asset = context["asset"]["name"]
subset = context["subset"]["name"]
asset_name = plugin.asset_name(asset, subset)
unique_number = plugin.get_unique_number(asset, subset)
group_name = plugin.asset_name(asset, subset, unique_number)
namespace = namespace or f"{asset}_{unique_number}"
avalon_container = bpy.data.collections.get(AVALON_CONTAINERS)
if not avalon_container:
avalon_container = bpy.data.collections.new(name=AVALON_CONTAINERS)
bpy.context.scene.collection.children.link(avalon_container)
asset_group = bpy.data.objects.new(group_name, object_data=None)
avalon_container.objects.link(asset_group)
objects = self._process(libpath, asset_group, group_name)
objects = []
nodes = list(asset_group.children)
for obj in nodes:
objects.append(obj)
nodes.extend(list(obj.children))
bpy.context.scene.collection.objects.link(asset_group)
asset_group[AVALON_PROPERTY] = {
"schema": "openpype:container-2.0",
"id": AVALON_CONTAINER_ID,
"name": name,
"namespace": namespace or "",
"loader": str(self.__class__.__name__),
"representation": str(context["representation"]["_id"]),
"libpath": libpath,
"asset_name": asset_name,
"parent": str(context["representation"]["parent"]),
"family": context["representation"]["context"]["family"],
"objectName": group_name,
}
self[:] = objects
return objects
def exec_update(self, container: Dict, representation: Dict):
"""Update the loaded asset.
This will remove all objects of the current collection, load the new
ones and add them to the collection.
If the objects of the collection are used in another collection they
will not be removed, only unlinked. Normally this should not be the
case though.
Warning:
No nested collections are supported at the moment!
"""
object_name = container["objectName"]
asset_group = bpy.data.objects.get(object_name)
libpath = Path(get_representation_path(representation))
extension = libpath.suffix.lower()
self.log.info(
"Container: %s\nRepresentation: %s",
pformat(container, indent=2),
pformat(representation, indent=2),
)
assert asset_group, (
f"The asset is not loaded: {container['objectName']}")
assert libpath, (
f"No existing library file found for {container['objectName']}")
assert libpath.is_file(), f"The file doesn't exist: {libpath}"
assert extension in plugin.VALID_EXTENSIONS, (
f"Unsupported file: {libpath}")
metadata = asset_group.get(AVALON_PROPERTY)
group_libpath = metadata["libpath"]
normalized_group_libpath = str(
Path(bpy.path.abspath(group_libpath)).resolve())
normalized_libpath = str(
Path(bpy.path.abspath(str(libpath))).resolve())
self.log.debug(
"normalized_group_libpath:\n %s\nnormalized_libpath:\n %s",
normalized_group_libpath,
normalized_libpath,
)
if normalized_group_libpath == normalized_libpath:
self.log.info("Library already loaded, not updating...")
return
mat = asset_group.matrix_basis.copy()
self._remove(asset_group)
self._process(str(libpath), asset_group, object_name)
asset_group.matrix_basis = mat
metadata["libpath"] = str(libpath)
metadata["representation"] = str(representation["_id"])
def exec_remove(self, container: Dict) -> bool:
"""Remove an existing container from a Blender scene.
Arguments:
container (openpype:container-1.0): Container to remove,
from `host.ls()`.
Returns:
bool: Whether the container was deleted.
Warning:
No nested collections are supported at the moment!
"""
object_name = container["objectName"]
asset_group = bpy.data.objects.get(object_name)
if not asset_group:
return False
self._remove(asset_group)
bpy.data.objects.remove(asset_group)
return True

View file

@ -0,0 +1,64 @@
import bpy
import pyblish.api
from openpype.pipeline import legacy_io
class CollectReview(pyblish.api.InstancePlugin):
"""Collect Review data
"""
order = pyblish.api.CollectorOrder + 0.3
label = "Collect Review Data"
families = ["review"]
def process(self, instance):
self.log.debug(f"instance: {instance}")
# get cameras
cameras = [
obj
for obj in instance
if isinstance(obj, bpy.types.Object) and obj.type == "CAMERA"
]
assert len(cameras) == 1, (
f"Not a single camera found in extraction: {cameras}"
)
camera = cameras[0].name
self.log.debug(f"camera: {camera}")
# get isolate objects list from meshes instance members .
isolate_objects = [
obj
for obj in instance
if isinstance(obj, bpy.types.Object) and obj.type == "MESH"
]
if not instance.data.get("remove"):
task = legacy_io.Session.get("AVALON_TASK")
instance.data.update({
"subset": f"{task}Review",
"review_camera": camera,
"frameStart": instance.context.data["frameStart"],
"frameEnd": instance.context.data["frameEnd"],
"fps": instance.context.data["fps"],
"isolate": isolate_objects,
})
self.log.debug(f"instance data: {instance.data}")
# TODO : Collect audio
audio_tracks = []
instance.data["audio"] = []
for track in audio_tracks:
instance.data["audio"].append(
{
"offset": track.offset.get(),
"filename": track.filename.get(),
}
)

View file

@ -0,0 +1,122 @@
import os
import clique
import bpy
import pyblish.api
from openpype.pipeline import publish
from openpype.hosts.blender.api import capture
from openpype.hosts.blender.api.lib import maintained_time
class ExtractPlayblast(publish.Extractor):
"""
Extract viewport playblast.
Takes review camera and creates review Quicktime video based on viewport
capture.
"""
label = "Extract Playblast"
hosts = ["blender"]
families = ["review"]
optional = True
order = pyblish.api.ExtractorOrder + 0.01
def process(self, instance):
self.log.info("Extracting capture..")
self.log.info(instance.data)
# get scene fps
fps = instance.data.get("fps")
if fps is None:
fps = bpy.context.scene.render.fps
instance.data["fps"] = fps
self.log.info(f"fps: {fps}")
# If start and end frames cannot be determined,
# get them from Blender timeline.
start = instance.data.get("frameStart", bpy.context.scene.frame_start)
end = instance.data.get("frameEnd", bpy.context.scene.frame_end)
self.log.info(f"start: {start}, end: {end}")
assert end > start, "Invalid time range !"
# get cameras
camera = instance.data("review_camera", None)
# get isolate objects list
isolate = instance.data("isolate", None)
# get output path
stagingdir = self.staging_dir(instance)
filename = instance.name
path = os.path.join(stagingdir, filename)
self.log.info(f"Outputting images to {path}")
project_settings = instance.context.data["project_settings"]["blender"]
presets = project_settings["publish"]["ExtractPlayblast"]["presets"]
preset = presets.get("default")
preset.update({
"camera": camera,
"start_frame": start,
"end_frame": end,
"filename": path,
"overwrite": True,
"isolate": isolate,
})
preset.setdefault(
"image_settings",
{
"file_format": "PNG",
"color_mode": "RGB",
"color_depth": "8",
"compression": 15,
},
)
with maintained_time():
path = capture(**preset)
self.log.debug(f"playblast path {path}")
collected_files = os.listdir(stagingdir)
collections, remainder = clique.assemble(
collected_files,
patterns=[f"{filename}\\.{clique.DIGITS_PATTERN}\\.png$"],
)
if len(collections) > 1:
raise RuntimeError(
f"More than one collection found in stagingdir: {stagingdir}"
)
elif len(collections) == 0:
raise RuntimeError(
f"No collection found in stagingdir: {stagingdir}"
)
frame_collection = collections[0]
self.log.info(f"We found collection of interest {frame_collection}")
instance.data.setdefault("representations", [])
tags = ["review"]
if not instance.data.get("keepImages"):
tags.append("delete")
representation = {
"name": "png",
"ext": "png",
"files": list(frame_collection),
"stagingDir": stagingdir,
"frameStart": start,
"frameEnd": end,
"fps": fps,
"tags": tags,
"camera_name": camera
}
instance.data["representations"].append(representation)

View file

@ -0,0 +1,99 @@
import os
import glob
import pyblish.api
from openpype.pipeline import publish
from openpype.hosts.blender.api import capture
from openpype.hosts.blender.api.lib import maintained_time
import bpy
class ExtractThumbnail(publish.Extractor):
"""Extract viewport thumbnail.
Takes review camera and creates a thumbnail based on viewport
capture.
"""
label = "Extract Thumbnail"
hosts = ["blender"]
families = ["review"]
order = pyblish.api.ExtractorOrder + 0.01
presets = {}
def process(self, instance):
self.log.info("Extracting capture..")
stagingdir = self.staging_dir(instance)
filename = instance.name
path = os.path.join(stagingdir, filename)
self.log.info(f"Outputting images to {path}")
camera = instance.data.get("review_camera", "AUTO")
start = instance.data.get("frameStart", bpy.context.scene.frame_start)
family = instance.data.get("family")
isolate = instance.data("isolate", None)
preset = self.presets.get(family, {})
preset.update({
"camera": camera,
"start_frame": start,
"end_frame": start,
"filename": path,
"overwrite": True,
"isolate": isolate,
})
preset.setdefault(
"image_settings",
{
"file_format": "JPEG",
"color_mode": "RGB",
"quality": 100,
},
)
with maintained_time():
path = capture(**preset)
thumbnail = os.path.basename(self._fix_output_path(path))
self.log.info(f"thumbnail: {thumbnail}")
instance.data.setdefault("representations", [])
representation = {
"name": "thumbnail",
"ext": "jpg",
"files": thumbnail,
"stagingDir": stagingdir,
"thumbnail": True
}
instance.data["representations"].append(representation)
def _fix_output_path(self, filepath):
""""Workaround to return correct filepath.
To workaround this we just glob.glob() for any file extensions and
assume the latest modified file is the correct file and return it.
"""
# Catch cancelled playblast
if filepath is None:
self.log.warning(
"Playblast did not result in output path. "
"Playblast is probably interrupted."
)
return None
if not os.path.exists(filepath):
files = glob.glob(f"{filepath}.*.jpg")
if not files:
raise RuntimeError(f"Couldn't find playblast from: {filepath}")
filepath = max(files, key=os.path.getmtime)
return filepath

View file

@ -19,7 +19,6 @@ class ValidateCameraZeroKeyframe(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["camera"]
version = (0, 1, 0)
label = "Zero Keyframe"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]

View file

@ -14,7 +14,6 @@ class ValidateMeshHasUvs(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model"]
category = "geometry"
label = "Mesh Has UV's"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]
optional = True

View file

@ -14,7 +14,6 @@ class ValidateMeshNoNegativeScale(pyblish.api.Validator):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model"]
category = "geometry"
label = "Mesh No Negative Scale"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]

View file

@ -19,7 +19,6 @@ class ValidateNoColonsInName(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model", "rig"]
version = (0, 1, 0)
label = "No Colons in names"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]

View file

@ -21,7 +21,6 @@ class ValidateTransformZero(pyblish.api.InstancePlugin):
order = ValidateContentsOrder
hosts = ["blender"]
families = ["model"]
version = (0, 1, 0)
label = "Transform Zero"
actions = [openpype.hosts.blender.api.action.SelectInvalidAction]

View file

@ -38,8 +38,9 @@ class CelactionPrelaunchHook(PreLaunchHook):
)
path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py")
subproces_args = get_openpype_execute_args("run", path_to_cli)
openpype_executable = subproces_args.pop(0)
subprocess_args = get_openpype_execute_args("run", path_to_cli)
openpype_executable = subprocess_args.pop(0)
workfile_settings = self.get_workfile_settings()
winreg.SetValueEx(
hKey,
@ -49,20 +50,34 @@ class CelactionPrelaunchHook(PreLaunchHook):
openpype_executable
)
parameters = subproces_args + [
"--currentFile", "*SCENE*",
"--chunk", "*CHUNK*",
"--frameStart", "*START*",
"--frameEnd", "*END*",
"--resolutionWidth", "*X*",
"--resolutionHeight", "*Y*"
# add required arguments for workfile path
parameters = subprocess_args + [
"--currentFile", "*SCENE*"
]
# Add custom parameters from workfile settings
if "render_chunk" in workfile_settings["submission_overrides"]:
parameters += [
"--chunk", "*CHUNK*"
]
if "resolution" in workfile_settings["submission_overrides"]:
parameters += [
"--resolutionWidth", "*X*",
"--resolutionHeight", "*Y*"
]
if "frame_range" in workfile_settings["submission_overrides"]:
parameters += [
"--frameStart", "*START*",
"--frameEnd", "*END*"
]
winreg.SetValueEx(
hKey, "SubmitParametersTitle", 0, winreg.REG_SZ,
subprocess.list2cmdline(parameters)
)
self.log.debug(f"__ parameters: \"{parameters}\"")
# setting resolution parameters
path_submit = "\\".join([
path_user_settings, "Dialogs", "SubmitOutput"
@ -135,3 +150,6 @@ class CelactionPrelaunchHook(PreLaunchHook):
self.log.info(f"Workfile to open: \"{workfile_path}\"")
return workfile_path
def get_workfile_settings(self):
return self.data["project_settings"]["celaction"]["workfile"]

View file

@ -1,5 +1,4 @@
import pyblish.api
import argparse
import sys
from pprint import pformat
@ -11,20 +10,40 @@ class CollectCelactionCliKwargs(pyblish.api.Collector):
order = pyblish.api.Collector.order - 0.1
def process(self, context):
parser = argparse.ArgumentParser(prog="celaction")
parser.add_argument("--currentFile",
help="Pass file to Context as `currentFile`")
parser.add_argument("--chunk",
help=("Render chanks on farm"))
parser.add_argument("--frameStart",
help=("Start of frame range"))
parser.add_argument("--frameEnd",
help=("End of frame range"))
parser.add_argument("--resolutionWidth",
help=("Width of resolution"))
parser.add_argument("--resolutionHeight",
help=("Height of resolution"))
passing_kwargs = parser.parse_args(sys.argv[1:]).__dict__
args = list(sys.argv[1:])
self.log.info(str(args))
missing_kwargs = []
passing_kwargs = {}
for key in (
"chunk",
"frameStart",
"frameEnd",
"resolutionWidth",
"resolutionHeight",
"currentFile",
):
arg_key = f"--{key}"
if arg_key not in args:
missing_kwargs.append(key)
continue
arg_idx = args.index(arg_key)
args.pop(arg_idx)
if key != "currentFile":
value = args.pop(arg_idx)
else:
path_parts = []
while arg_idx < len(args):
path_parts.append(args.pop(arg_idx))
value = " ".join(path_parts).strip('"')
passing_kwargs[key] = value
if missing_kwargs:
self.log.debug("Missing arguments {}".format(
", ".join(
[f'"{key}"' for key in missing_kwargs]
)
))
self.log.info("Storing kwargs ...")
self.log.debug("_ passing_kwargs: {}".format(pformat(passing_kwargs)))

Some files were not shown because too many files have changed in this diff Show more