Merge remote-tracking branch 'upstream/develop' into develop

This commit is contained in:
Alexey Bogomolov 2023-02-13 00:46:34 +03:00
commit 85aa0d2d21
59 changed files with 1050 additions and 643 deletions

View file

@ -1,19 +0,0 @@
name: Automate Projects
on:
issues:
types: [opened, labeled]
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
jobs:
assign_one_project:
runs-on: ubuntu-latest
name: Assign to One Project
steps:
- name: Assign NEW bugs to triage
uses: srggrs/assign-one-project-github-action@1.2.0
if: contains(github.event.issue.labels.*.name, 'bug')
with:
project: 'https://github.com/pypeclub/pype/projects/2'
column_name: 'Needs triage'

View file

@ -13,7 +13,7 @@ jobs:
if: github.event.pull_request.milestone == null
uses: zoispag/action-assign-milestone@v1
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
repo-token: "${{ secrets.YNPUT_BOT_TOKEN }}"
milestone: 'next-minor'
run_if_develop:
@ -24,5 +24,5 @@ jobs:
if: github.event.pull_request.milestone == null
uses: zoispag/action-assign-milestone@v1
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
milestone: 'next-patch'
repo-token: "${{ secrets.YNPUT_BOT_TOKEN }}"
milestone: 'next-patch'

View file

@ -12,7 +12,7 @@ jobs:
uses: "WyriHaximus/github-action-get-milestones@master"
id: milestones
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
id: querymilestone
@ -31,7 +31,7 @@ jobs:
with:
title: 'next-patch'
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
generate-next-minor:
runs-on: ubuntu-latest
@ -40,7 +40,7 @@ jobs:
uses: "WyriHaximus/github-action-get-milestones@master"
id: milestones
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"
- run: printf "name=number::%s" $(printenv MILESTONES | jq --arg MILESTONE $(printenv MILESTONE) '.[] | select(.title == $MILESTONE) | .number')
id: querymilestone
@ -59,4 +59,4 @@ jobs:
with:
title: 'next-minor'
env:
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
GITHUB_TOKEN: "${{ secrets.YNPUT_BOT_TOKEN }}"

View file

@ -14,10 +14,10 @@ jobs:
- name: 🚛 Checkout Code
uses: actions/checkout@v2
- name: 🔨 Merge develop to main
- name: 🔨 Merge develop to main
uses: everlytic/branch-merge@1.1.0
with:
github_token: ${{ secrets.ADMIN_TOKEN }}
github_token: ${{ secrets.YNPUT_BOT_TOKEN }}
source_ref: 'develop'
target_branch: 'main'
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
@ -26,4 +26,4 @@ jobs:
uses: benc-uk/workflow-dispatch@v1
with:
workflow: Nightly Prerelease
token: ${{ secrets.ADMIN_TOKEN }}
token: ${{ secrets.YNPUT_BOT_TOKEN }}

View file

@ -25,43 +25,15 @@ jobs:
- name: 🔎 Determine next version type
id: version_type
run: |
TYPE=$(python ./tools/ci_tools.py --bump --github_token ${{ secrets.GITHUB_TOKEN }})
echo ::set-output name=type::$TYPE
TYPE=$(python ./tools/ci_tools.py --bump --github_token ${{ secrets.YNPUT_BOT_TOKEN }})
echo "type=${TYPE}" >> $GITHUB_OUTPUT
- name: 💉 Inject new version into files
id: version
if: steps.version_type.outputs.type != 'skip'
run: |
RESULT=$(python ./tools/ci_tools.py --nightly --github_token ${{ secrets.GITHUB_TOKEN }})
echo ::set-output name=next_tag::$RESULT
# - name: "✏️ Generate full changelog"
# if: steps.version_type.outputs.type != 'skip'
# id: generate-full-changelog
# uses: heinrichreimer/github-changelog-generator-action@v2.3
# with:
# token: ${{ secrets.ADMIN_TOKEN }}
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
# issues: false
# issuesWoLabels: false
# sinceTag: "3.12.0"
# maxIssues: 100
# pullRequests: true
# prWoLabels: false
# author: false
# unreleased: true
# compareLink: true
# stripGeneratorNotice: true
# verbose: true
# unreleasedLabel: ${{ steps.version.outputs.next_tag }}
# excludeTagsRegex: "CI/.+"
# releaseBranch: "main"
- name: "🖨️ Print changelog to console"
if: steps.version_type.outputs.type != 'skip'
run: cat CHANGELOG.md
NEW_VERSION_TAG=$(python ./tools/ci_tools.py --nightly --github_token ${{ secrets.YNPUT_BOT_TOKEN }})
echo "next_tag=${NEW_VERSION_TAG}" >> $GITHUB_OUTPUT
- name: 💾 Commit and Tag
id: git_commit
@ -80,7 +52,7 @@ jobs:
- name: Push to protected main branch
uses: CasperWA/push-protected@v2.10.0
with:
token: ${{ secrets.ADMIN_TOKEN }}
token: ${{ secrets.YNPUT_BOT_TOKEN }}
branch: main
tags: true
unprotect_reviews: true
@ -89,7 +61,7 @@ jobs:
uses: everlytic/branch-merge@1.1.0
if: steps.version_type.outputs.type != 'skip'
with:
github_token: ${{ secrets.ADMIN_TOKEN }}
github_token: ${{ secrets.YNPUT_BOT_TOKEN }}
source_ref: 'main'
target_branch: 'develop'
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'

View file

@ -26,34 +26,12 @@ jobs:
- name: 💉 Inject new version into files
id: version
run: |
echo ::set-output name=current_version::${GITHUB_REF#refs/*/}
RESULT=$(python ./tools/ci_tools.py --finalize ${GITHUB_REF#refs/*/})
LASTRELEASE=$(python ./tools/ci_tools.py --lastversion release)
NEW_VERSION=$(python ./tools/ci_tools.py --finalize ${GITHUB_REF#refs/*/})
LAST_VERSION=$(python ./tools/ci_tools.py --lastversion release)
echo ::set-output name=last_release::$LASTRELEASE
echo ::set-output name=release_tag::$RESULT
# - name: "✏️ Generate full changelog"
# if: steps.version.outputs.release_tag != 'skip'
# id: generate-full-changelog
# uses: heinrichreimer/github-changelog-generator-action@v2.3
# with:
# token: ${{ secrets.ADMIN_TOKEN }}
# addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
# issues: false
# issuesWoLabels: false
# sinceTag: "3.12.0"
# maxIssues: 100
# pullRequests: true
# prWoLabels: false
# author: false
# unreleased: true
# compareLink: true
# stripGeneratorNotice: true
# verbose: true
# futureRelease: ${{ steps.version.outputs.release_tag }}
# excludeTagsRegex: "CI/.+"
# releaseBranch: "main"
echo "current_version=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
echo "last_release=${LAST_VERSION}" >> $GITHUB_OUTPUT
echo "release_tag=${NEW_VERSION}" >> $GITHUB_OUTPUT
- name: 💾 Commit and Tag
id: git_commit
@ -70,43 +48,17 @@ jobs:
if: steps.version.outputs.release_tag != 'skip'
uses: CasperWA/push-protected@v2.10.0
with:
token: ${{ secrets.ADMIN_TOKEN }}
token: ${{ secrets.YNPUT_BOT_TOKEN }}
branch: main
tags: true
unprotect_reviews: true
- name: "✏️ Generate last changelog"
if: steps.version.outputs.release_tag != 'skip'
id: generate-last-changelog
uses: heinrichreimer/github-changelog-generator-action@v2.2
with:
token: ${{ secrets.ADMIN_TOKEN }}
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}, "refactor":{"prefix":"**🔀 Refactored code**", "labels":["refactor"]}}'
issues: false
issuesWoLabels: false
sinceTag: ${{ steps.version.outputs.last_release }}
maxIssues: 100
pullRequests: true
prWoLabels: false
author: false
unreleased: true
compareLink: true
stripGeneratorNotice: true
verbose: true
futureRelease: ${{ steps.version.outputs.release_tag }}
excludeTagsRegex: "CI/.+"
releaseBranch: "main"
stripHeaders: true
base: 'none'
- name: 🚀 Github Release
if: steps.version.outputs.release_tag != 'skip'
uses: ncipollo/release-action@v1
with:
body: ${{ steps.generate-last-changelog.outputs.changelog }}
tag: ${{ steps.version.outputs.release_tag }}
token: ${{ secrets.ADMIN_TOKEN }}
token: ${{ secrets.YNPUT_BOT_TOKEN }}
- name: ☠ Delete Pre-release
if: steps.version.outputs.release_tag != 'skip'
@ -118,7 +70,7 @@ jobs:
if: steps.version.outputs.release_tag != 'skip'
uses: everlytic/branch-merge@1.1.0
with:
github_token: ${{ secrets.ADMIN_TOKEN }}
github_token: ${{ secrets.YNPUT_BOT_TOKEN }}
source_ref: 'main'
target_branch: 'develop'
commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}'

View file

@ -28,7 +28,7 @@ jobs:
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: 🧵 Install Requirements
shell: pwsh
run: |
@ -64,27 +64,3 @@ jobs:
run: |
export SKIP_THIRD_PARTY_VALIDATION="1"
./tools/build.sh
# MacOS-latest:
# runs-on: macos-latest
# strategy:
# matrix:
# python-version: [3.9]
# steps:
# - name: 🚛 Checkout Code
# uses: actions/checkout@v2
# - name: Set up Python
# uses: actions/setup-python@v2
# with:
# python-version: ${{ matrix.python-version }}
# - name: 🧵 Install Requirements
# run: |
# ./tools/create_env.sh
# - name: 🔨 Build
# run: |
# ./tools/build.sh

View file

@ -9,4 +9,4 @@ repos:
- id: check-yaml
- id: check-added-large-files
- id: no-commit-to-branch
args: [ '--pattern', '^(?!((release|enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-]+)$).*' ]
args: [ '--pattern', '^(?!((release|enhancement|feature|bugfix|documentation|tests|local|chore)\/[a-zA-Z0-9\-_]+)$).*' ]

View file

@ -1,112 +0,0 @@
from .settings import (
get_system_settings,
get_project_settings,
get_current_project_settings,
get_anatomy_settings,
SystemSettings,
ProjectSettings
)
from .lib import (
PypeLogger,
Logger,
Anatomy,
execute,
run_subprocess,
version_up,
get_asset,
get_workdir_data,
get_version_from_path,
get_last_version_from_path,
get_app_environments_for_context,
source_hash,
get_latest_version,
get_local_site_id,
change_openpype_mongo_url,
create_project_folders,
get_project_basic_paths
)
from .lib.mongo import (
get_default_components
)
from .lib.applications import (
ApplicationManager
)
from .lib.avalon_context import (
BuildWorkfile
)
from . import resources
from .plugin import (
Extractor,
ValidatePipelineOrder,
ValidateContentsOrder,
ValidateSceneOrder,
ValidateMeshOrder,
)
# temporary fix, might
from .action import (
get_errored_instances_from_context,
RepairAction,
RepairContextAction
)
__all__ = [
"get_system_settings",
"get_project_settings",
"get_current_project_settings",
"get_anatomy_settings",
"get_project_basic_paths",
"SystemSettings",
"ProjectSettings",
"PypeLogger",
"Logger",
"Anatomy",
"execute",
"get_default_components",
"ApplicationManager",
"BuildWorkfile",
# Resources
"resources",
# plugin classes
"Extractor",
# ordering
"ValidatePipelineOrder",
"ValidateContentsOrder",
"ValidateSceneOrder",
"ValidateMeshOrder",
# action
"get_errored_instances_from_context",
"RepairAction",
"RepairContextAction",
# get contextual data
"version_up",
"get_asset",
"get_workdir_data",
"get_version_from_path",
"get_last_version_from_path",
"get_app_environments_for_context",
"source_hash",
"run_subprocess",
"get_latest_version",
"get_local_site_id",
"change_openpype_mongo_url",
"get_project_basic_paths",
"create_project_folders"
]

View file

@ -164,7 +164,6 @@ def get_linked_representation_id(
# Recursive graph lookup for inputs
{"$graphLookup": graph_lookup}
]
conn = get_project_connection(project_name)
result = conn.aggregate(query_pipeline)
referenced_version_ids = _process_referenced_pipeline_result(
@ -213,7 +212,7 @@ def _process_referenced_pipeline_result(result, link_type):
for output in sorted(outputs_recursive, key=lambda o: o["depth"]):
output_links = output.get("data", {}).get("inputLinks")
if not output_links:
if not output_links and output["type"] != "hero_version":
continue
# Leaf
@ -232,6 +231,9 @@ def _process_referenced_pipeline_result(result, link_type):
def _filter_input_links(input_links, link_type, correctly_linked_ids):
if not input_links: # to handle hero versions
return
for input_link in input_links:
if link_type and input_link["type"] != link_type:
continue

View file

@ -8,6 +8,7 @@ exists is used.
import os
from abc import ABCMeta, abstractmethod
import platform
import six
@ -187,11 +188,19 @@ class HostDirmap(object):
self.log.debug("local overrides {}".format(active_overrides))
self.log.debug("remote overrides {}".format(remote_overrides))
current_platform = platform.system().lower()
for root_name, active_site_dir in active_overrides.items():
remote_site_dir = (
remote_overrides.get(root_name)
or sync_settings["sites"][remote_site]["root"][root_name]
)
if isinstance(remote_site_dir, dict):
remote_site_dir = remote_site_dir.get(current_platform)
if not remote_site_dir:
continue
if os.path.isdir(active_site_dir):
if "destination-path" not in mapping:
mapping["destination-path"] = []

View file

@ -11,9 +11,15 @@ from openpype.pipeline import (
)
from openpype.hosts.aftereffects.api.pipeline import cache_and_get_instances
from openpype.lib import prepare_template_data
from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS
class RenderCreator(Creator):
"""Creates 'render' instance for publishing.
Result of 'render' instance is video or sequence of images for particular
composition based of configuration in its RenderQueue.
"""
identifier = "render"
label = "Render"
family = "render"
@ -28,45 +34,6 @@ class RenderCreator(Creator):
["RenderCreator"]
["defaults"])
def get_icon(self):
return resources.get_openpype_splash_filepath()
def collect_instances(self):
for instance_data in cache_and_get_instances(self):
# legacy instances have family=='render' or 'renderLocal', use them
creator_id = (instance_data.get("creator_identifier") or
instance_data.get("family", '').replace("Local", ''))
if creator_id == self.identifier:
instance_data = self._handle_legacy(instance_data)
instance = CreatedInstance.from_existing(
instance_data, self
)
self._add_instance_to_context(instance)
def update_instances(self, update_list):
for created_inst, _changes in update_list:
api.get_stub().imprint(created_inst.get("instance_id"),
created_inst.data_to_store())
subset_change = _changes.get("subset")
if subset_change:
api.get_stub().rename_item(created_inst.data["members"][0],
subset_change[1])
def remove_instances(self, instances):
for instance in instances:
self._remove_instance_from_context(instance)
self.host.remove_instance(instance)
subset = instance.data["subset"]
comp_id = instance.data["members"][0]
comp = api.get_stub().get_item(comp_id)
if comp:
new_comp_name = comp.name.replace(subset, '')
if not new_comp_name:
new_comp_name = "dummyCompName"
api.get_stub().rename_item(comp_id,
new_comp_name)
def create(self, subset_name_from_ui, data, pre_create_data):
stub = api.get_stub() # only after After Effects is up
if pre_create_data.get("use_selection"):
@ -82,10 +49,19 @@ class RenderCreator(Creator):
"if 'useSelection' or create at least "
"one composition."
)
use_composition_name = (pre_create_data.get("use_composition_name") or
len(comps) > 1)
for comp in comps:
if pre_create_data.get("use_composition_name"):
composition_name = comp.name
if use_composition_name:
if "{composition}" not in subset_name_from_ui.lower():
subset_name_from_ui += "{Composition}"
composition_name = re.sub(
"[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS),
"",
comp.name
)
dynamic_fill = prepare_template_data({"composition":
composition_name})
subset_name = subset_name_from_ui.format(**dynamic_fill)
@ -129,8 +105,72 @@ class RenderCreator(Creator):
]
return output
def get_icon(self):
return resources.get_openpype_splash_filepath()
def collect_instances(self):
for instance_data in cache_and_get_instances(self):
# legacy instances have family=='render' or 'renderLocal', use them
creator_id = (instance_data.get("creator_identifier") or
instance_data.get("family", '').replace("Local", ''))
if creator_id == self.identifier:
instance_data = self._handle_legacy(instance_data)
instance = CreatedInstance.from_existing(
instance_data, self
)
self._add_instance_to_context(instance)
def update_instances(self, update_list):
for created_inst, _changes in update_list:
api.get_stub().imprint(created_inst.get("instance_id"),
created_inst.data_to_store())
subset_change = _changes.get("subset")
if subset_change:
api.get_stub().rename_item(created_inst.data["members"][0],
subset_change[1])
def remove_instances(self, instances):
for instance in instances:
self._remove_instance_from_context(instance)
self.host.remove_instance(instance)
subset = instance.data["subset"]
comp_id = instance.data["members"][0]
comp = api.get_stub().get_item(comp_id)
if comp:
new_comp_name = comp.name.replace(subset, '')
if not new_comp_name:
new_comp_name = "dummyCompName"
api.get_stub().rename_item(comp_id,
new_comp_name)
def get_detail_description(self):
return """Creator for Render instances"""
return """Creator for Render instances
Main publishable item in AfterEffects will be of `render` family.
Result of this item (instance) is picture sequence or video that could
be a final delivery product or loaded and used in another DCCs.
Select single composition and create instance of 'render' family or
turn off 'Use selection' to create instance for all compositions.
'Use composition name in subset' allows to explicitly add composition
name into created subset name.
Position of composition name could be set in
`project_settings/global/tools/creator/subset_name_profiles` with some
form of '{composition}' placeholder.
Composition name will be used implicitly if multiple composition should
be handled at same time.
If {composition} placeholder is not us 'subset_name_profiles'
composition name will be capitalized and set at the end of subset name
if necessary.
If composition name should be used, it will be cleaned up of characters
that would cause an issue in published file names.
"""
def get_dynamic_data(self, variant, task_name, asset_doc,
project_name, host_name, instance):

View file

@ -44,7 +44,7 @@ class AppendBlendLoader(plugin.AssetLoader):
"""
representations = ["blend"]
families = ["*"]
families = ["workfile"]
label = "Append Workfile"
order = 9
@ -68,7 +68,7 @@ class ImportBlendLoader(plugin.AssetLoader):
"""
representations = ["blend"]
families = ["*"]
families = ["workfile"]
label = "Import Workfile"
order = 9

View file

@ -108,9 +108,9 @@ class ExtractRender(pyblish.api.InstancePlugin):
output = process.communicate()[0]
if process.returncode != 0:
raise ValueError(output.decode("utf-8"))
raise ValueError(output.decode("utf-8", errors="backslashreplace"))
self.log.debug(output.decode("utf-8"))
self.log.debug(output.decode("utf-8", errors="backslashreplace"))
# Generate representations.
extension = collection.tail[1:]

View file

@ -225,12 +225,12 @@ class HoudiniCreator(NewCreator, HoudiniCreatorBase):
self._add_instance_to_context(created_instance)
def update_instances(self, update_list):
for created_inst, _changes in update_list:
for created_inst, changes in update_list:
instance_node = hou.node(created_inst.get("instance_node"))
new_values = {
key: new_value
for key, (_old_value, new_value) in _changes.items()
key: changes[key].new_value
for key in changes.changed_keys
}
imprint(
instance_node,

View file

@ -1,4 +1,5 @@
import os
import re
import logging
import platform
@ -66,7 +67,7 @@ def generate_shelves():
)
continue
mandatory_attributes = {'name', 'script'}
mandatory_attributes = {'label', 'script'}
for tool_definition in shelf_definition.get('tools_list'):
# We verify that the name and script attibutes of the tool
# are set
@ -152,31 +153,32 @@ def get_or_create_tool(tool_definition, shelf):
Returns:
hou.Tool: The tool updated or the new one
"""
existing_tools = shelf.tools()
tool_label = tool_definition.get('label')
tool_label = tool_definition.get("label")
if not tool_label:
log.warning("Skipped shelf without label")
return
script_path = tool_definition["script"]
if not script_path or not os.path.exists(script_path):
log.warning("This path doesn't exist - {}".format(script_path))
return
existing_tools = shelf.tools()
existing_tool = next(
(tool for tool in existing_tools if tool.label() == tool_label),
None
)
with open(script_path) as stream:
script = stream.read()
tool_definition["script"] = script
if existing_tool:
tool_definition.pop('name', None)
tool_definition.pop('label', None)
tool_definition.pop("label", None)
existing_tool.setData(**tool_definition)
return existing_tool
tool_name = tool_label.replace(' ', '_').lower()
if not os.path.exists(tool_definition['script']):
log.warning(
"This path doesn't exist - {}".format(tool_definition['script'])
)
return
with open(tool_definition['script']) as f:
script = f.read()
tool_definition.update({'script': script})
new_tool = hou.shelves.newTool(name=tool_name, **tool_definition)
return new_tool
tool_name = re.sub(r"[^\w\d]+", "_", tool_label).lower()
return hou.shelves.newTool(name=tool_name, **tool_definition)

View file

@ -78,12 +78,12 @@ class MaxCreator(Creator, MaxCreatorBase):
self._add_instance_to_context(created_instance)
def update_instances(self, update_list):
for created_inst, _changes in update_list:
for created_inst, changes in update_list:
instance_node = created_inst.get("instance_node")
new_values = {
key: new_value
for key, (_old_value, new_value) in _changes.items()
key: changes[key].new_value
for key in changes.changed_keys
}
imprint(
instance_node,

View file

@ -54,6 +54,7 @@ class CreateRender(plugin.Creator):
tileRendering (bool): Instance is set to tile rendering mode. We
won't submit actual render, but we'll make publish job to wait
for Tile Assembly job done and then publish.
strict_error_checking (bool): Enable/disable error checking on DL
See Also:
https://pype.club/docs/artist_hosts_maya#creating-basic-render-setup
@ -271,6 +272,9 @@ class CreateRender(plugin.Creator):
secondary_pool = pool_setting["secondary_pool"]
self.data["secondaryPool"] = self._set_default_pool(pool_names,
secondary_pool)
strict_error_checking = maya_submit_dl.get("strict_error_checking",
True)
self.data["strict_error_checking"] = strict_error_checking
if muster_enabled:
self.log.info(">>> Loading Muster credentials ...")

View file

@ -81,10 +81,11 @@ class VRayProxyLoader(load.LoaderPlugin):
c = colors.get(family)
if c is not None:
cmds.setAttr("{0}.useOutlinerColor".format(group_node), 1)
cmds.setAttr("{0}.outlinerColor".format(group_node),
(float(c[0])/255),
(float(c[1])/255),
(float(c[2])/255)
cmds.setAttr(
"{0}.outlinerColor".format(group_node),
(float(c[0]) / 255),
(float(c[1]) / 255),
(float(c[2]) / 255)
)
return containerise(
@ -101,7 +102,7 @@ class VRayProxyLoader(load.LoaderPlugin):
assert cmds.objExists(node), "Missing container"
members = cmds.sets(node, query=True) or []
vraymeshes = cmds.ls(members, type="VRayMesh")
vraymeshes = cmds.ls(members, type="VRayProxy")
assert vraymeshes, "Cannot find VRayMesh in container"
# get all representations for this version

View file

@ -318,7 +318,9 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
"aovSeparator": layer_render_products.layer_data.aov_separator, # noqa: E501
"renderSetupIncludeLights": render_instance.data.get(
"renderSetupIncludeLights"
)
),
"strict_error_checking": render_instance.data.get(
"strict_error_checking")
}
# Collect Deadline url if Deadline module is enabled

View file

@ -2,11 +2,13 @@ import os
import types
import maya.cmds as cmds
from mtoa.core import createOptions
import pyblish.api
from openpype.pipeline.publish import (
RepairAction,
ValidateContentsOrder,
PublishValidationError
)
@ -34,8 +36,9 @@ class ValidateAssRelativePaths(pyblish.api.InstancePlugin):
"defaultArnoldRenderOptions.pspath"
)
except ValueError:
assert False, ("Can not validate, render setting were not opened "
"yet so Arnold setting cannot be validate")
raise PublishValidationError(
"Default Arnold options has not been created yet."
)
scene_dir, scene_basename = os.path.split(cmds.file(q=True, loc=True))
scene_name, _ = os.path.splitext(scene_basename)
@ -66,6 +69,8 @@ class ValidateAssRelativePaths(pyblish.api.InstancePlugin):
@classmethod
def repair(cls, instance):
createOptions()
texture_path = cmds.getAttr("defaultArnoldRenderOptions.tspath")
procedural_path = cmds.getAttr("defaultArnoldRenderOptions.pspath")

View file

@ -0,0 +1,18 @@
from maya import cmds
import pyblish.api
from openpype.pipeline import PublishValidationError
class ValidateVray(pyblish.api.InstancePlugin):
"""Validate general Vray setup."""
order = pyblish.api.ValidatorOrder
label = 'VRay'
hosts = ["maya"]
families = ["vrayproxy"]
def process(self, instance):
# Validate vray plugin is loaded.
if not cmds.pluginInfo("vrayformaya", query=True, loaded=True):
raise PublishValidationError("Vray plugin is not loaded.")

View file

@ -53,12 +53,18 @@ class GizmoMenu():
item_type = item.get("sourcetype")
if item_type == ("python" or "file"):
if item_type == "python":
parent.addCommand(
item["title"],
command=str(item["command"]),
icon=item.get("icon"),
shortcut=item.get("hotkey")
shortcut=item.get("shortcut")
)
elif item_type == "file":
parent.addCommand(
item['title'],
"nuke.createNode('{}')".format(item.get('file_name')),
shortcut=item.get('shortcut')
)
# add separator

View file

@ -1,7 +1,7 @@
import os
import nuke
import pyblish.api
import openpype.api as api
from openpype.lib import get_version_from_path
import openpype.hosts.nuke.api as napi
from openpype.pipeline import KnownPublishError
@ -57,7 +57,7 @@ class CollectContextData(pyblish.api.ContextPlugin):
"fps": root_node['fps'].value(),
"currentFile": current_file,
"version": int(api.get_version_from_path(current_file)),
"version": int(get_version_from_path(current_file)),
"host": pyblish.api.current_host(),
"hostVersion": nuke.NUKE_VERSION_STRING

View file

@ -5,7 +5,7 @@ from openpype.lib import BoolDef
from openpype.pipeline import (
Creator,
CreatedInstance,
legacy_io
CreatorError
)
from openpype.lib import prepare_template_data
from openpype.pipeline.create import SUBSET_NAME_ALLOWED_SYMBOLS
@ -13,27 +13,16 @@ from openpype.hosts.photoshop.api.pipeline import cache_and_get_instances
class ImageCreator(Creator):
"""Creates image instance for publishing."""
"""Creates image instance for publishing.
Result of 'image' instance is image of all visible layers, or image(s) of
selected layers.
"""
identifier = "image"
label = "Image"
family = "image"
description = "Image creator"
def collect_instances(self):
for instance_data in cache_and_get_instances(self):
# legacy instances have family=='image'
creator_id = (instance_data.get("creator_identifier") or
instance_data.get("family"))
if creator_id == self.identifier:
instance_data = self._handle_legacy(instance_data)
layer = api.stub().get_layer(instance_data["members"][0])
instance_data["layer"] = layer
instance = CreatedInstance.from_existing(
instance_data, self
)
self._add_instance_to_context(instance)
def create(self, subset_name_from_ui, data, pre_create_data):
groups_to_create = []
top_layers_to_wrap = []
@ -59,9 +48,10 @@ class ImageCreator(Creator):
try:
group = stub.group_selected_layers(subset_name_from_ui)
except:
raise ValueError("Cannot group locked Bakcground layer!")
raise CreatorError("Cannot group locked Background layer!")
groups_to_create.append(group)
# create empty group if nothing selected
if not groups_to_create and not top_layers_to_wrap:
group = stub.create_group(subset_name_from_ui)
groups_to_create.append(group)
@ -73,13 +63,16 @@ class ImageCreator(Creator):
groups_to_create.append(group)
layer_name = ''
creating_multiple_groups = len(groups_to_create) > 1
# use artist chosen option OR force layer if more subsets are created
# to differentiate them
use_layer_name = (pre_create_data.get("use_layer_name") or
len(groups_to_create) > 1)
for group in groups_to_create:
subset_name = subset_name_from_ui # reset to name from creator UI
layer_names_in_hierarchy = []
created_group_name = self._clean_highlights(stub, group.name)
if creating_multiple_groups:
if use_layer_name:
layer_name = re.sub(
"[^{}]+".format(SUBSET_NAME_ALLOWED_SYMBOLS),
"",
@ -112,6 +105,21 @@ class ImageCreator(Creator):
stub.rename_layer(group.id,
stub.PUBLISH_ICON + created_group_name)
def collect_instances(self):
for instance_data in cache_and_get_instances(self):
# legacy instances have family=='image'
creator_id = (instance_data.get("creator_identifier") or
instance_data.get("family"))
if creator_id == self.identifier:
instance_data = self._handle_legacy(instance_data)
layer = api.stub().get_layer(instance_data["members"][0])
instance_data["layer"] = layer
instance = CreatedInstance.from_existing(
instance_data, self
)
self._add_instance_to_context(instance)
def update_instances(self, update_list):
self.log.debug("update_list:: {}".format(update_list))
for created_inst, _changes in update_list:
@ -137,12 +145,42 @@ class ImageCreator(Creator):
label="Create only for selected"),
BoolDef("create_multiple",
default=True,
label="Create separate instance for each selected")
label="Create separate instance for each selected"),
BoolDef("use_layer_name",
default=False,
label="Use layer name in subset")
]
return output
def get_detail_description(self):
return """Creator for Image instances"""
return """Creator for Image instances
Main publishable item in Photoshop will be of `image` family. Result of
this item (instance) is picture that could be loaded and used
in another DCCs (for example as single layer in composition in
AfterEffects, reference in Maya etc).
There are couple of options what to publish:
- separate image per selected layer (or group of layers)
- one image for all selected layers
- all visible layers (groups) flattened into single image
In most cases you would like to keep `Create only for selected`
toggled on and select what you would like to publish.
Toggling this option off will allow you to create instance for all
visible layers without a need to select them explicitly.
Use 'Create separate instance for each selected' to create separate
images per selected layer (group of layers).
'Use layer name in subset' will explicitly add layer name into subset
name. Position of this name is configurable in
`project_settings/global/tools/creator/subset_name_profiles`.
If layer placeholder ({layer}) is not used in `subset_name_profiles`
but layer name should be used (set explicitly in UI or implicitly if
multiple images should be created), it is added in capitalized form
as a suffix to subset name.
"""
def _handle_legacy(self, instance_data):
"""Converts old instances to new format."""

View file

@ -37,7 +37,7 @@ class TrayPublisherHost(HostBase, IPublishHost):
return HostContext.get_context_data()
def update_context_data(self, data, changes):
HostContext.save_context_data(data, changes)
HostContext.save_context_data(data)
def set_project_name(self, project_name):
# TODO Deregister project specific plugins and register new project

View file

@ -33,6 +33,8 @@ class BatchMovieCreator(TrayPublishCreator):
create_allow_context_change = False
version_regex = re.compile(r"^(.+)_v([0-9]+)$")
# Position batch creator after simple creators
order = 110
def __init__(self, project_settings, *args, **kwargs):
super(BatchMovieCreator, self).__init__(project_settings,

View file

@ -117,12 +117,12 @@ def run_subprocess(*args, **kwargs):
full_output = ""
_stdout, _stderr = proc.communicate()
if _stdout:
_stdout = _stdout.decode("utf-8")
_stdout = _stdout.decode("utf-8", errors="backslashreplace")
full_output += _stdout
logger.debug(_stdout)
if _stderr:
_stderr = _stderr.decode("utf-8")
_stderr = _stderr.decode("utf-8", errors="backslashreplace")
# Add additional line break if output already contains stdout
if full_output:
full_output += "\n"

View file

@ -64,6 +64,7 @@ class MayaPluginInfo(object):
# Include all lights flag
RenderSetupIncludeLights = attr.ib(
default="1", validator=_validate_deadline_bool_value)
StrictErrorChecking = attr.ib(default=True)
@attr.s
@ -219,6 +220,8 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
"renderSetupIncludeLights", default_rs_include_lights)
if rs_include_lights not in {"1", "0", True, False}:
rs_include_lights = default_rs_include_lights
strict_error_checking = instance.data.get("strict_error_checking",
True)
plugin_info = MayaPluginInfo(
SceneFile=self.scene_path,
Version=cmds.about(version=True),
@ -227,6 +230,7 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
RenderSetupIncludeLights=rs_include_lights, # noqa
ProjectPath=context.data["workspaceDir"],
UsingRenderLayers=True,
StrictErrorChecking=strict_error_checking
)
plugin_payload = attr.asdict(plugin_info)

View file

@ -35,7 +35,7 @@ class OpenPypeVersion:
self.prerelease = prerelease
is_valid = True
if not major or not minor or not patch:
if major is None or minor is None or patch is None:
is_valid = False
self.is_valid = is_valid
@ -157,7 +157,7 @@ def get_openpype_version_from_path(path, build=True):
# fix path for application bundle on macos
if platform.system().lower() == "darwin":
path = os.path.join(path, "Contents", "MacOS", "lib", "Python")
path = os.path.join(path, "MacOS")
version_file = os.path.join(path, "openpype", "version.py")
if not os.path.isfile(version_file):
@ -189,6 +189,11 @@ def get_openpype_executable():
exe_list = config.GetConfigEntryWithDefault("OpenPypeExecutable", "")
dir_list = config.GetConfigEntryWithDefault(
"OpenPypeInstallationDirs", "")
# clean '\ ' for MacOS pasting
if platform.system().lower() == "darwin":
exe_list = exe_list.replace("\\ ", " ")
dir_list = dir_list.replace("\\ ", " ")
return exe_list, dir_list
@ -218,8 +223,8 @@ def get_requested_openpype_executable(
requested_version_obj = OpenPypeVersion.from_string(requested_version)
if not requested_version_obj:
print((
">>> Requested version does not match version regex \"{}\""
).format(VERSION_REGEX))
">>> Requested version '{}' does not match version regex '{}'"
).format(requested_version, VERSION_REGEX))
return None
print((
@ -272,7 +277,8 @@ def get_requested_openpype_executable(
# Deadline decide.
exe_list = [
os.path.join(version_dir, "openpype_console.exe"),
os.path.join(version_dir, "openpype_console")
os.path.join(version_dir, "openpype_console"),
os.path.join(version_dir, "MacOS", "openpype_console")
]
return FileUtils.SearchFileList(";".join(exe_list))

View file

@ -73,7 +73,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin):
"""
# fix path for application bundle on macos
if platform.system().lower() == "darwin":
path = os.path.join(path, "Contents", "MacOS", "lib", "Python")
path = os.path.join(path, "MacOS")
version_file = os.path.join(path, "openpype", "version.py")
if not os.path.isfile(version_file):
@ -107,8 +107,11 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin):
"Scanning for compatible requested "
f"version {requested_version}"))
dir_list = self.GetConfigEntry("OpenPypeInstallationDirs")
# clean '\ ' for MacOS pasting
if platform.system().lower() == "darwin":
dir_list = dir_list.replace("\\ ", " ")
install_dir = DirectoryUtils.SearchDirectoryList(dir_list)
if dir:
if install_dir:
sub_dirs = [
f.path for f in os.scandir(install_dir)
if f.is_dir()
@ -120,6 +123,9 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin):
openpype_versions.append((version, subdir))
exe_list = self.GetConfigEntry("OpenPypeExecutable")
# clean '\ ' for MacOS pasting
if platform.system().lower() == "darwin":
exe_list = exe_list.replace("\\ ", " ")
exe = FileUtils.SearchFileList(exe_list)
if openpype_versions:
# if looking for requested compatible version,
@ -161,7 +167,9 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin):
os.path.join(
compatible_versions[-1][1], "openpype_console.exe"),
os.path.join(
compatible_versions[-1][1], "openpype_console")
compatible_versions[-1][1], "openpype_console"),
os.path.join(
compatible_versions[-1][1], "MacOS", "openpype_console")
]
exe = FileUtils.SearchFileList(";".join(exe_list))

View file

@ -204,10 +204,10 @@ def info_about_input(oiiotool_path, filepath):
_stdout, _stderr = popen.communicate()
output = ""
if _stdout:
output += _stdout.decode("utf-8")
output += _stdout.decode("utf-8", errors="backslashreplace")
if _stderr:
output += _stderr.decode("utf-8")
output += _stderr.decode("utf-8", errors="backslashreplace")
output = output.replace("\r\n", "\n")
xml_started = False

View file

@ -316,7 +316,7 @@ def main_loop(ftrack_url):
statuser_failed_count = 0
# If thread failed test Ftrack and Mongo connection
elif not statuser_thread.isAlive():
elif not statuser_thread.is_alive():
statuser_thread.join()
statuser_thread = None
ftrack_accessible = False
@ -359,7 +359,7 @@ def main_loop(ftrack_url):
storer_failed_count = 0
# If thread failed test Ftrack and Mongo connection
elif not storer_thread.isAlive():
elif not storer_thread.is_alive():
if storer_thread.mongo_error:
raise MongoPermissionsError()
storer_thread.join()
@ -396,7 +396,7 @@ def main_loop(ftrack_url):
processor_failed_count = 0
# If thread failed test Ftrack and Mongo connection
elif not processor_thread.isAlive():
elif not processor_thread.is_alive():
if processor_thread.mongo_error:
raise Exception(
"Exiting because have issue with acces to MongoDB"

View file

@ -259,7 +259,7 @@ class CredentialsDialog(QtWidgets.QDialog):
# If there is an existing server thread running we need to stop it.
if self._login_server_thread:
if self._login_server_thread.isAlive():
if self._login_server_thread.is_alive():
self._login_server_thread.stop()
self._login_server_thread.join()
self._login_server_thread = None

View file

@ -19,6 +19,8 @@ oauth_config:
- chat:write.public
- files:write
- channels:read
- users:read
- usergroups:read
settings:
org_deploy_enabled: false
socket_mode_enabled: false

View file

@ -60,6 +60,7 @@ class BaseAnatomy(object):
def __init__(self, project_doc, local_settings, site_name):
project_name = project_doc["name"]
self.project_name = project_name
self.project_code = project_doc["data"]["code"]
if (site_name and
site_name not in ["studio", "local", get_local_site_id()]):

View file

@ -183,6 +183,319 @@ def prepare_failed_creator_operation_info(
}
_EMPTY_VALUE = object()
class TrackChangesItem(object):
"""Helper object to track changes in data.
Has access to full old and new data and will create deep copy of them,
so it is not needed to create copy before passed in.
Can work as a dictionary if old or new value is a dictionary. In
that case received object is another object of 'TrackChangesItem'.
Goal is to be able to get old or new value as was or only changed values
or get information about removed/changed keys, and all of that on
any "dictionary level".
```
# Example of possible usages
>>> old_value = {
... "key_1": "value_1",
... "key_2": {
... "key_sub_1": 1,
... "key_sub_2": {
... "enabled": True
... }
... },
... "key_3": "value_2"
... }
>>> new_value = {
... "key_1": "value_1",
... "key_2": {
... "key_sub_2": {
... "enabled": False
... },
... "key_sub_3": 3
... },
... "key_3": "value_3"
... }
>>> changes = TrackChangesItem(old_value, new_value)
>>> changes.changed
True
>>> changes["key_2"]["key_sub_1"].new_value is None
True
>>> list(sorted(changes.changed_keys))
['key_2', 'key_3']
>>> changes["key_2"]["key_sub_2"]["enabled"].changed
True
>>> changes["key_2"].removed_keys
{'key_sub_1'}
>>> list(sorted(changes["key_2"].available_keys))
['key_sub_1', 'key_sub_2', 'key_sub_3']
>>> changes.new_value == new_value
True
# Get only changed values
only_changed_new_values = {
key: changes[key].new_value
for key in changes.changed_keys
}
```
Args:
old_value (Any): Old value.
new_value (Any): New value.
"""
def __init__(self, old_value, new_value):
self._changed = old_value != new_value
# Resolve if value is '_EMPTY_VALUE' after comparison of the values
if old_value is _EMPTY_VALUE:
old_value = None
if new_value is _EMPTY_VALUE:
new_value = None
self._old_value = copy.deepcopy(old_value)
self._new_value = copy.deepcopy(new_value)
self._old_is_dict = isinstance(old_value, dict)
self._new_is_dict = isinstance(new_value, dict)
self._old_keys = None
self._new_keys = None
self._available_keys = None
self._removed_keys = None
self._changed_keys = None
self._sub_items = None
def __getitem__(self, key):
"""Getter looks into subitems if object is dictionary."""
if self._sub_items is None:
self._prepare_sub_items()
return self._sub_items[key]
def __bool__(self):
"""Boolean of object is if old and new value are the same."""
return self._changed
def get(self, key, default=None):
"""Try to get sub item."""
if self._sub_items is None:
self._prepare_sub_items()
return self._sub_items.get(key, default)
@property
def old_value(self):
"""Get copy of old value.
Returns:
Any: Whatever old value was.
"""
return copy.deepcopy(self._old_value)
@property
def new_value(self):
"""Get copy of new value.
Returns:
Any: Whatever new value was.
"""
return copy.deepcopy(self._new_value)
@property
def changed(self):
"""Value changed.
Returns:
bool: If data changed.
"""
return self._changed
@property
def is_dict(self):
"""Object can be used as dictionary.
Returns:
bool: When can be used that way.
"""
return self._old_is_dict or self._new_is_dict
@property
def changes(self):
"""Get changes in raw data.
This method should be used only if 'is_dict' value is 'True'.
Returns:
Dict[str, Tuple[Any, Any]]: Changes are by key in tuple
(<old value>, <new value>). If 'is_dict' is 'False' then
output is always empty dictionary.
"""
output = {}
if not self.is_dict:
return output
old_value = self.old_value
new_value = self.new_value
for key in self.changed_keys:
_old = None
_new = None
if self._old_is_dict:
_old = old_value.get(key)
if self._new_is_dict:
_new = new_value.get(key)
output[key] = (_old, _new)
return output
# Methods/properties that can be used when 'is_dict' is 'True'
@property
def old_keys(self):
"""Keys from old value.
Empty set is returned if old value is not a dict.
Returns:
Set[str]: Keys from old value.
"""
if self._old_keys is None:
self._prepare_keys()
return set(self._old_keys)
@property
def new_keys(self):
"""Keys from new value.
Empty set is returned if old value is not a dict.
Returns:
Set[str]: Keys from new value.
"""
if self._new_keys is None:
self._prepare_keys()
return set(self._new_keys)
@property
def changed_keys(self):
"""Keys that has changed from old to new value.
Empty set is returned if both old and new value are not a dict.
Returns:
Set[str]: Keys of changed keys.
"""
if self._changed_keys is None:
self._prepare_sub_items()
return set(self._changed_keys)
@property
def available_keys(self):
"""All keys that are available in old and new value.
Empty set is returned if both old and new value are not a dict.
Output is Union of 'old_keys' and 'new_keys'.
Returns:
Set[str]: All keys from old and new value.
"""
if self._available_keys is None:
self._prepare_keys()
return set(self._available_keys)
@property
def removed_keys(self):
"""Key that are not available in new value but were in old value.
Returns:
Set[str]: All removed keys.
"""
if self._removed_keys is None:
self._prepare_sub_items()
return set(self._removed_keys)
def _prepare_keys(self):
old_keys = set()
new_keys = set()
if self._old_is_dict and self._new_is_dict:
old_keys = set(self._old_value.keys())
new_keys = set(self._new_value.keys())
elif self._old_is_dict:
old_keys = set(self._old_value.keys())
elif self._new_is_dict:
new_keys = set(self._new_value.keys())
self._old_keys = old_keys
self._new_keys = new_keys
self._available_keys = old_keys | new_keys
self._removed_keys = old_keys - new_keys
def _prepare_sub_items(self):
sub_items = {}
changed_keys = set()
old_keys = self.old_keys
new_keys = self.new_keys
new_value = self.new_value
old_value = self.old_value
if self._old_is_dict and self._new_is_dict:
for key in self.available_keys:
item = TrackChangesItem(
old_value.get(key), new_value.get(key)
)
sub_items[key] = item
if item.changed or key not in old_keys or key not in new_keys:
changed_keys.add(key)
elif self._old_is_dict:
old_keys = set(old_value.keys())
available_keys = set(old_keys)
changed_keys = set(available_keys)
for key in available_keys:
# NOTE Use '_EMPTY_VALUE' because old value could be 'None'
# which would result in "unchanged" item
sub_items[key] = TrackChangesItem(
old_value.get(key), _EMPTY_VALUE
)
elif self._new_is_dict:
new_keys = set(new_value.keys())
available_keys = set(new_keys)
changed_keys = set(available_keys)
for key in available_keys:
# NOTE Use '_EMPTY_VALUE' because new value could be 'None'
# which would result in "unchanged" item
sub_items[key] = TrackChangesItem(
_EMPTY_VALUE, new_value.get(key)
)
self._sub_items = sub_items
self._changed_keys = changed_keys
class InstanceMember:
"""Representation of instance member.
@ -300,6 +613,10 @@ class AttributeValues(object):
return list(self._attr_defs)
@property
def origin_data(self):
return copy.deepcopy(self._origin_data)
def data_to_store(self):
"""Create new dictionary with data to store.
@ -316,30 +633,6 @@ class AttributeValues(object):
output[key] = attr_def.default
return output
@staticmethod
def calculate_changes(new_data, old_data):
"""Calculate changes of 2 dictionary objects."""
changes = {}
for key, new_value in new_data.items():
old_value = old_data.get(key)
if old_value != new_value:
changes[key] = (old_value, new_value)
return changes
def changes(self):
return self.calculate_changes(self._data, self._origin_data)
def apply_changes(self, changes):
for key, item in changes.items():
old_value, new_value = item
if new_value is None:
if key in self:
self.pop(key)
elif self.get(key) != new_value:
self[key] = new_value
def get_serialized_attr_defs(self):
"""Serialize attribute definitions to json serializable types.
@ -467,36 +760,9 @@ class PublishAttributes:
output[key] = attr_value.data_to_store()
return output
def changes(self):
"""Return changes per each key."""
changes = {}
for key, attr_val in self._data.items():
attr_changes = attr_val.changes()
if attr_changes:
if key not in changes:
changes[key] = {}
changes[key].update(attr_val)
for key, value in self._origin_data.items():
if key not in self._data:
changes[key] = (value, None)
return changes
def apply_changes(self, changes):
for key, item in changes.items():
if isinstance(item, dict):
self._data[key].apply_changes(item)
continue
old_value, new_value = item
if new_value is not None:
raise ValueError(
"Unexpected type \"{}\" expected None".format(
str(type(new_value))
)
)
self.pop(key)
@property
def origin_data(self):
return copy.deepcopy(self._origin_data)
def set_publish_plugins(self, attr_plugins):
"""Set publish plugins attribute definitions."""
@ -763,6 +1029,10 @@ class CreatedInstance:
return label
return self._group_label
@property
def origin_data(self):
return copy.deepcopy(self._orig_data)
@property
def creator_identifier(self):
return self._data["creator_identifier"]
@ -817,29 +1087,7 @@ class CreatedInstance:
def changes(self):
"""Calculate and return changes."""
changes = {}
new_keys = set()
for key, new_value in self._data.items():
new_keys.add(key)
if key in ("creator_attributes", "publish_attributes"):
continue
old_value = self._orig_data.get(key)
if old_value != new_value:
changes[key] = (old_value, new_value)
creator_attr_changes = self.creator_attributes.changes()
if creator_attr_changes:
changes["creator_attributes"] = creator_attr_changes
publish_attr_changes = self.publish_attributes.changes()
if publish_attr_changes:
changes["publish_attributes"] = publish_attr_changes
for key, old_value in self._orig_data.items():
if key not in new_keys:
changes[key] = (old_value, None)
return changes
return TrackChangesItem(self._orig_data, self.data_to_store())
def mark_as_stored(self):
"""Should be called when instance data are stored.
@ -1002,59 +1250,6 @@ class CreatedInstance:
return obj
def remote_changes(self):
"""Prepare serializable changes on remote side.
Returns:
Dict[str, Any]: Prepared changes that can be send to client side.
"""
return {
"changes": self.changes(),
"asset_is_valid": self._asset_is_valid,
"task_is_valid": self._task_is_valid,
}
def update_from_remote(self, remote_changes):
"""Apply changes from remote side on client side.
Args:
remote_changes (Dict[str, Any]): Changes created on remote side.
"""
self._asset_is_valid = remote_changes["asset_is_valid"]
self._task_is_valid = remote_changes["task_is_valid"]
changes = remote_changes["changes"]
creator_attributes = changes.pop("creator_attributes", None) or {}
publish_attributes = changes.pop("publish_attributes", None) or {}
if changes:
self.apply_changes(changes)
if creator_attributes:
self.creator_attributes.apply_changes(creator_attributes)
if publish_attributes:
self.publish_attributes.apply_changes(publish_attributes)
def apply_changes(self, changes):
"""Apply changes created via 'changes'.
Args:
Dict[str, Tuple[Any, Any]]: Instance changes to apply. Same values
are kept untouched.
"""
for key, item in changes.items():
old_value, new_value = item
if new_value is None:
if key in self:
self.pop(key)
else:
current_value = self.get(key)
if current_value != new_value:
self[key] = new_value
# Context validation related methods/properties
@property
def has_set_asset(self):
@ -1237,6 +1432,53 @@ class CreateContext:
"""Access to global publish attributes."""
return self._publish_attributes
def get_sorted_creators(self, identifiers=None):
"""Sorted creators by 'order' attribute.
Args:
identifiers (Iterable[str]): Filter creators by identifiers. All
creators are returned if 'None' is passed.
Returns:
List[BaseCreator]: Sorted creator plugins by 'order' value.
"""
if identifiers is not None:
identifiers = set(identifiers)
creators = [
creator
for identifier, creator in self.creators.items()
if identifier in identifiers
]
else:
creators = self.creators.values()
return sorted(
creators, key=lambda creator: creator.order
)
@property
def sorted_creators(self):
"""Sorted creators by 'order' attribute.
Returns:
List[BaseCreator]: Sorted creator plugins by 'order' value.
"""
return self.get_sorted_creators()
@property
def sorted_autocreators(self):
"""Sorted auto-creators by 'order' attribute.
Returns:
List[AutoCreator]: Sorted plugins by 'order' value.
"""
return sorted(
self.autocreators.values(), key=lambda creator: creator.order
)
@classmethod
def get_host_misssing_methods(cls, host):
"""Collect missing methods from host.
@ -1515,11 +1757,10 @@ class CreateContext:
def context_data_changes(self):
"""Changes of attributes."""
changes = {}
publish_attribute_changes = self._publish_attributes.changes()
if publish_attribute_changes:
changes["publish_attributes"] = publish_attribute_changes
return changes
return TrackChangesItem(
self._original_context_data, self.context_data_to_store()
)
def creator_adds_instance(self, instance):
"""Creator adds new instance to context.
@ -1599,6 +1840,9 @@ class CreateContext:
)
])
def _remove_instance(self, instance):
self._instances_by_id.pop(instance.id, None)
def creator_removed_instance(self, instance):
"""When creator removes instance context should be acknowledged.
@ -1610,7 +1854,7 @@ class CreateContext:
from scene metadata.
"""
self._instances_by_id.pop(instance.id, None)
self._remove_instance(instance)
def add_convertor_item(self, convertor_identifier, label):
self.convertor_items_by_id[convertor_identifier] = ConvertorItem(
@ -1654,7 +1898,7 @@ class CreateContext:
# Collect instances
error_message = "Collection of instances for creator {} failed. {}"
failed_info = []
for creator in self.creators.values():
for creator in self.sorted_creators:
label = creator.label
identifier = creator.identifier
failed = False
@ -1726,7 +1970,8 @@ class CreateContext:
error_message = "Failed to run AutoCreator with identifier \"{}\". {}"
failed_info = []
for identifier, creator in self.autocreators.items():
for creator in self.sorted_autocreators:
identifier = creator.identifier
label = creator.label
failed = False
add_traceback = False
@ -1831,19 +2076,26 @@ class CreateContext:
"""Save instance specific values."""
instances_by_identifier = collections.defaultdict(list)
for instance in self._instances_by_id.values():
instance_changes = instance.changes()
if not instance_changes:
continue
identifier = instance.creator_identifier
instances_by_identifier[identifier].append(instance)
instances_by_identifier[identifier].append(
UpdateData(instance, instance_changes)
)
if not instances_by_identifier:
return
error_message = "Instances update of creator \"{}\" failed. {}"
failed_info = []
for identifier, creator_instances in instances_by_identifier.items():
update_list = []
for instance in creator_instances:
instance_changes = instance.changes()
if instance_changes:
update_list.append(UpdateData(instance, instance_changes))
creator = self.creators[identifier]
for creator in self.get_sorted_creators(
instances_by_identifier.keys()
):
identifier = creator.identifier
update_list = instances_by_identifier[identifier]
if not update_list:
continue
@ -1879,9 +2131,13 @@ class CreateContext:
def remove_instances(self, instances):
"""Remove instances from context.
All instances that don't have creator identifier leading to existing
creator are just removed from context.
Args:
instances(list<CreatedInstance>): Instances that should be removed
from context.
instances(List[CreatedInstance]): Instances that should be removed.
Remove logic is done using creator, which may require to
do other cleanup than just remove instance from context.
"""
instances_by_identifier = collections.defaultdict(list)
@ -1889,10 +2145,21 @@ class CreateContext:
identifier = instance.creator_identifier
instances_by_identifier[identifier].append(instance)
# Just remove instances from context if creator is not available
missing_creators = set(instances_by_identifier) - set(self.creators)
for identifier in missing_creators:
for instance in instances_by_identifier[identifier]:
self._remove_instance(instance)
error_message = "Instances removement of creator \"{}\" failed. {}"
failed_info = []
for identifier, creator_instances in instances_by_identifier.items():
creator = self.creators.get(identifier)
# Remove instances by creator plugin order
for creator in self.get_sorted_creators(
instances_by_identifier.keys()
):
identifier = creator.identifier
creator_instances = instances_by_identifier[identifier]
label = creator.label
failed = False
add_traceback = False
@ -1935,6 +2202,7 @@ class CreateContext:
family(str): Instance family for which should be attribute
definitions returned.
"""
if family not in self._attr_plugins_by_family:
import pyblish.logic
@ -1950,7 +2218,13 @@ class CreateContext:
return self._attr_plugins_by_family[family]
def _get_publish_plugins_with_attr_for_context(self):
"""Publish plugins attributes for Context plugins."""
"""Publish plugins attributes for Context plugins.
Returns:
List[pyblish.api.Plugin]: Publish plugins that have attribute
definitions for context.
"""
plugins = []
for plugin in self.plugins_with_defs:
if not plugin.__instanceEnabled__:
@ -1975,7 +2249,7 @@ class CreateContext:
return self._collection_shared_data
def run_convertor(self, convertor_identifier):
"""Run convertor plugin by it's idenfitifier.
"""Run convertor plugin by identifier.
Conversion is skipped if convertor is not available.
@ -1988,7 +2262,7 @@ class CreateContext:
convertor.convert()
def run_convertors(self, convertor_identifiers):
"""Run convertor plugins by idenfitifiers.
"""Run convertor plugins by identifiers.
Conversion is skipped if convertor is not available. It is recommended
to trigger reset after conversion to reload instances.

View file

@ -107,7 +107,11 @@ class SubsetConvertorPlugin(object):
@property
def create_context(self):
"""Quick access to create context."""
"""Quick access to create context.
Returns:
CreateContext: Context which initialized the plugin.
"""
return self._create_context
@ -157,6 +161,10 @@ class BaseCreator:
# Cached group label after first call 'get_group_label'
_cached_group_label = None
# Order in which will be plugin executed (collect & update instances)
# less == earlier -> Order '90' will be processed before '100'
order = 100
# Variable to store logger
_log = None
@ -489,6 +497,17 @@ class Creator(BaseCreator):
# - similar to instance attribute definitions
pre_create_attr_defs = []
@property
def show_order(self):
"""Order in which is creator shown in UI.
Returns:
int: Order in which is creator shown (less == earlier). By default
is using Creator's 'order' or processing.
"""
return self.order
@abstractmethod
def create(self, subset_name, instance_data, pre_create_data):
"""Create new instance and store it.

View file

@ -2,7 +2,10 @@ import os
import logging
from openpype.settings import get_system_settings, get_project_settings
from openpype.pipeline import legacy_io
from openpype.pipeline import (
schema,
legacy_io,
)
from openpype.pipeline.plugin_discover import (
discover,
register_plugin,
@ -79,6 +82,45 @@ class LoaderPlugin(list):
print(" - setting `{}`: `{}`".format(option, value))
setattr(cls, option, value)
@classmethod
def is_compatible_loader(cls, context):
"""Return whether a loader is compatible with a context.
This checks the version's families and the representation for the given
Loader.
Returns:
bool
"""
plugin_repre_names = cls.get_representations()
plugin_families = cls.families
if not plugin_repre_names or not plugin_families:
return False
repre_doc = context.get("representation")
if not repre_doc:
return False
plugin_repre_names = set(plugin_repre_names)
if (
"*" not in plugin_repre_names
and repre_doc["name"] not in plugin_repre_names
):
return False
maj_version, _ = schema.get_schema_version(context["subset"]["schema"])
if maj_version < 3:
families = context["version"]["data"].get("families", [])
else:
families = context["subset"]["data"]["families"]
plugin_families = set(plugin_families)
return (
"*" in plugin_families
or any(family in plugin_families for family in families)
)
@classmethod
def get_representations(cls):
return cls.representations

View file

@ -748,25 +748,9 @@ def is_compatible_loader(Loader, context):
Returns:
bool
"""
maj_version, _ = schema.get_schema_version(context["subset"]["schema"])
if maj_version < 3:
families = context["version"]["data"].get("families", [])
else:
families = context["subset"]["data"]["families"]
representation = context["representation"]
has_family = (
"*" in Loader.families or any(
family in Loader.families for family in families
)
)
representations = Loader.get_representations()
has_representation = (
"*" in representations or representation["name"] in representations
)
return has_family and has_representation
return Loader.is_compatible_loader(context)
def loaders_from_repre_context(loaders, repre_context):

View file

@ -34,12 +34,24 @@ class AddSyncSite(load.LoaderPlugin):
return self._sync_server
def load(self, context, name=None, namespace=None, data=None):
self.log.info("Adding {} to representation: {}".format(
data["site_name"], data["_id"]))
family = context["representation"]["context"]["family"]
project_name = data["project_name"]
repre_id = data["_id"]
""""Adds site skeleton information on representation_id
Looks for loaded containers for workfile, adds them site skeleton too
(eg. they should be downloaded too).
Args:
context (dict):
name (str):
namespace (str):
data (dict): expects {"site_name": SITE_NAME_TO_ADD}
"""
# self.log wont propagate
project_name = context["project"]["name"]
repre_doc = context["representation"]
family = repre_doc["context"]["family"]
repre_id = repre_doc["_id"]
site_name = data["site_name"]
print("Adding {} to representation: {}".format(
data["site_name"], repre_id))
self.sync_server.add_site(project_name, repre_id, site_name,
force=True)
@ -52,6 +64,8 @@ class AddSyncSite(load.LoaderPlugin):
)
for link_repre_id in links:
try:
print("Adding {} to linked representation: {}".format(
data["site_name"], link_repre_id))
self.sync_server.add_site(project_name, link_repre_id,
site_name,
force=False)

View file

@ -3,7 +3,10 @@ from openpype.pipeline import load
class RemoveSyncSite(load.LoaderPlugin):
"""Remove sync site and its files on representation"""
"""Remove sync site and its files on representation.
Removes files only on local site!
"""
representations = ["*"]
families = ["*"]
@ -24,13 +27,18 @@ class RemoveSyncSite(load.LoaderPlugin):
return self._sync_server
def load(self, context, name=None, namespace=None, data=None):
self.log.info("Removing {} on representation: {}".format(
data["site_name"], data["_id"]))
self.sync_server.remove_site(data["project_name"],
data["_id"],
data["site_name"],
project_name = context["project"]["name"]
repre_doc = context["representation"]
repre_id = repre_doc["_id"]
site_name = data["site_name"]
print("Removing {} on representation: {}".format(site_name, repre_id))
self.sync_server.remove_site(project_name,
repre_id,
site_name,
True)
self.log.debug("Site added.")
self.log.debug("Site removed.")
def filepath_from_context(self, context):
"""No real file loading"""

View file

@ -506,6 +506,43 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
return version_doc
def _validate_repre_files(self, files, is_sequence_representation):
"""Validate representation files before transfer preparation.
Check if files contain only filenames instead of full paths and check
if sequence don't contain more than one sequence or has remainders.
Args:
files (Union[str, List[str]]): Files from representation.
is_sequence_representation (bool): Files are for sequence.
Raises:
KnownPublishError: If validations don't pass.
"""
if not files:
return
if not is_sequence_representation:
files = [files]
if any(os.path.isabs(fname) for fname in files):
raise KnownPublishError("Given file names contain full paths")
if not is_sequence_representation:
return
src_collections, remainders = clique.assemble(files)
if len(files) < 2 or len(src_collections) != 1 or remainders:
raise KnownPublishError((
"Files of representation does not contain proper"
" sequence files.\nCollected collections: {}"
"\nCollected remainders: {}"
).format(
", ".join([str(col) for col in src_collections]),
", ".join([str(rem) for rem in remainders])
))
def prepare_representation(self, repre,
template_name,
existing_repres_by_name,
@ -587,7 +624,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
is_udim = bool(repre.get("udim"))
# handle publish in place
if "originalDirname" in template:
if "{originalDirname}" in template:
# store as originalDirname only original value without project root
# if instance collected originalDirname is present, it should be
# used for all represe
@ -606,24 +643,64 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
template_data["originalDirname"] = without_root
is_sequence_representation = isinstance(files, (list, tuple))
if is_sequence_representation:
# Collection of files (sequence)
if any(os.path.isabs(fname) for fname in files):
raise KnownPublishError("Given file names contain full paths")
self._validate_repre_files(files, is_sequence_representation)
# Output variables of conditions below:
# - transfers (List[Tuple[str, str]]): src -> dst filepaths to copy
# - repre_context (Dict[str, Any]): context data used to fill template
# - template_data (Dict[str, Any]): source data used to fill template
# - to add required data to 'repre_context' not used for
# formatting
# - anatomy_filled (Dict[str, Any]): filled anatomy of last file
# - to fill 'publishDir' on instance.data -> not ideal
# Treat template with 'orignalBasename' in special way
if "{originalBasename}" in template:
# Remove 'frame' from template data
template_data.pop("frame", None)
# Find out first frame string value
first_index_padded = None
if not is_udim and is_sequence_representation:
col = clique.assemble(files)[0][0]
sorted_frames = tuple(sorted(col.indexes))
# First frame used for end value
first_frame = sorted_frames[0]
# Get last frame for padding
last_frame = sorted_frames[-1]
# Use padding from collection of length of last frame as string
padding = max(col.padding, len(str(last_frame)))
first_index_padded = get_frame_padded(
frame=first_frame,
padding=padding
)
# Convert files to list for single file as remaining part is only
# transfers creation (iteration over files)
if not is_sequence_representation:
files = [files]
repre_context = None
transfers = []
for src_file_name in files:
template_data["originalBasename"], _ = os.path.splitext(
src_file_name)
anatomy_filled = anatomy.format(template_data)
dst = anatomy_filled[template_name]["path"]
src = os.path.join(stagingdir, src_file_name)
transfers.append((src, dst))
if repre_context is None:
repre_context = dst.used_values
if not is_udim and first_index_padded is not None:
repre_context["frame"] = first_index_padded
elif is_sequence_representation:
# Collection of files (sequence)
src_collections, remainders = clique.assemble(files)
if len(files) < 2 or len(src_collections) != 1 or remainders:
raise KnownPublishError((
"Files of representation does not contain proper"
" sequence files.\nCollected collections: {}"
"\nCollected remainders: {}"
).format(
", ".join([str(col) for col in src_collections]),
", ".join([str(rem) for rem in remainders])
))
src_collection = src_collections[0]
template_data["originalBasename"] = src_collection.head[:-1]
destination_indexes = list(src_collection.indexes)
# Use last frame for minimum padding
# - that should cover both 'udim' and 'frame' minimum padding
@ -645,11 +722,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# In case source are published in place we need to
# skip renumbering
repre_frame_start = repre.get("frameStart")
if (
"originalBasename" not in template
and repre_frame_start is not None
):
index_frame_start = int(repre["frameStart"])
if repre_frame_start is not None:
index_frame_start = int(repre_frame_start)
# Shift destination sequence to the start frame
destination_indexes = [
index_frame_start + idx
@ -705,15 +779,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
else:
# Single file
fname = files
if os.path.isabs(fname):
self.log.error(
"Filename in representation is filepath {}".format(fname)
)
raise KnownPublishError(
"This is a bug. Representation file name is full path"
)
template_data["originalBasename"], _ = os.path.splitext(fname)
# Manage anatomy template data
template_data.pop("frame", None)
if is_udim:
@ -725,7 +790,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
dst = os.path.normpath(template_filled)
# Single file transfer
src = os.path.join(stagingdir, fname)
src = os.path.join(stagingdir, files)
transfers = [(src, dst)]
# todo: Are we sure the assumption each representation

View file

@ -386,6 +386,25 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
repre["_id"] = old_repre["_id"]
update_data = prepare_representation_update_data(
old_repre, repre)
# Keep previously synchronized sites up-to-date
# by comparing old and new sites and adding old sites
# if missing in new ones
old_repre_files_sites = [
f.get("sites", []) for f in old_repre.get("files", [])
]
for i, file in enumerate(repre.get("files", [])):
repre_sites_names = {
s["name"] for s in file.get("sites", [])
}
for site in old_repre_files_sites[i]:
if site["name"] not in repre_sites_names:
# Pop the date to tag for sync
site.pop("created_dt", None)
file["sites"].append(site)
update_data["files"][i] = file
op_session.update_entity(
project_name,
old_repre["type"],

View file

@ -340,13 +340,11 @@ class ModifiedBurnins(ffmpeg_burnins.Burnins):
_stdout, _stderr = proc.communicate()
if _stdout:
for line in _stdout.split(b"\r\n"):
print(line.decode("utf-8"))
print(_stdout.decode("utf-8", errors="backslashreplace"))
# This will probably never happen as ffmpeg use stdout
if _stderr:
for line in _stderr.split(b"\r\n"):
print(line.decode("utf-8"))
print(_stderr.decode("utf-8", errors="backslashreplace"))
if proc.returncode != 0:
raise RuntimeError(

View file

@ -33,7 +33,8 @@
"limit": [],
"jobInfo": {},
"pluginInfo": {},
"scene_patches": []
"scene_patches": [],
"strict_error_checking": true
},
"NukeSubmitDeadline": {
"enabled": true,

View file

@ -195,6 +195,12 @@
]
}
},
{
"type": "boolean",
"key": "strict_error_checking",
"label": "Strict Error Checking",
"default": true
}
]
},

View file

@ -72,6 +72,11 @@
"key": "command",
"label": "Python command"
},
{
"type": "text",
"key": "icon",
"label": "Icon Path"
},
{
"type": "text",
"key": "shortcut",

View file

@ -16,7 +16,11 @@ from openpype.lib.attribute_definitions import (
UISeparatorDef,
UILabelDef
)
from openpype.tools.utils import CustomTextComboBox
from openpype.tools.utils import (
CustomTextComboBox,
FocusSpinBox,
FocusDoubleSpinBox,
)
from openpype.widgets.nice_checkbox import NiceCheckbox
from .files_widget import FilesWidget
@ -142,6 +146,9 @@ class AttributeDefinitionsWidget(QtWidgets.QWidget):
if attr_def.label:
label_widget = QtWidgets.QLabel(attr_def.label, self)
tooltip = attr_def.tooltip
if tooltip:
label_widget.setToolTip(tooltip)
layout.addWidget(
label_widget, row, 0, 1, expand_cols
)
@ -243,10 +250,10 @@ class NumberAttrWidget(_BaseAttrDefWidget):
def _ui_init(self):
decimals = self.attr_def.decimals
if decimals > 0:
input_widget = QtWidgets.QDoubleSpinBox(self)
input_widget = FocusDoubleSpinBox(self)
input_widget.setDecimals(decimals)
else:
input_widget = QtWidgets.QSpinBox(self)
input_widget = FocusSpinBox(self)
if self.attr_def.tooltip:
input_widget.setToolTip(self.attr_def.tooltip)

View file

@ -1480,23 +1480,21 @@ class RepresentationWidget(QtWidgets.QWidget):
repre_ids = []
data_by_repre_id = {}
selected_side = action_representation.get("selected_side")
site_name = "{}_site_name".format(selected_side)
is_sync_loader = tools_lib.is_sync_loader(loader)
for item in items:
item_id = item.get("_id")
repre_ids.append(item_id)
repre_id = item["_id"]
repre_ids.append(repre_id)
if not is_sync_loader:
continue
site_name = "{}_site_name".format(selected_side)
data_site_name = item.get(site_name)
if not data_site_name:
continue
data_by_repre_id[item_id] = {
"_id": item_id,
"site_name": data_site_name,
"project_name": self.dbcon.active_project()
data_by_repre_id[repre_id] = {
"site_name": data_site_name
}
repre_contexts = get_repres_contexts(repre_ids, self.dbcon)
@ -1586,8 +1584,8 @@ def _load_representations_by_loader(loader, repre_contexts,
version_name = version_doc.get("name")
try:
if data_by_repre_id:
_id = repre_context["representation"]["_id"]
data = data_by_repre_id.get(_id)
repre_id = repre_context["representation"]["_id"]
data = data_by_repre_id.get(repre_id)
options.update(data)
load_with_repre_context(
loader,

View file

@ -24,6 +24,7 @@ CREATOR_THUMBNAIL_ENABLED_ROLE = QtCore.Qt.UserRole + 5
FAMILY_ROLE = QtCore.Qt.UserRole + 6
GROUP_ROLE = QtCore.Qt.UserRole + 7
CONVERTER_IDENTIFIER_ROLE = QtCore.Qt.UserRole + 8
CREATOR_SORT_ROLE = QtCore.Qt.UserRole + 9
__all__ = (
@ -36,6 +37,7 @@ __all__ = (
"IS_GROUP_ROLE",
"CREATOR_IDENTIFIER_ROLE",
"CREATOR_THUMBNAIL_ENABLED_ROLE",
"CREATOR_SORT_ROLE",
"FAMILY_ROLE",
"GROUP_ROLE",
"CONVERTER_IDENTIFIER_ROLE",

View file

@ -832,7 +832,8 @@ class CreatorItem:
default_variants,
create_allow_context_change,
create_allow_thumbnail,
pre_create_attributes_defs
show_order,
pre_create_attributes_defs,
):
self.identifier = identifier
self.creator_type = creator_type
@ -846,6 +847,7 @@ class CreatorItem:
self.default_variants = default_variants
self.create_allow_context_change = create_allow_context_change
self.create_allow_thumbnail = create_allow_thumbnail
self.show_order = show_order
self.pre_create_attributes_defs = pre_create_attributes_defs
def get_group_label(self):
@ -869,6 +871,7 @@ class CreatorItem:
pre_create_attr_defs = None
create_allow_context_change = None
create_allow_thumbnail = None
show_order = creator.order
if creator_type is CreatorTypes.artist:
description = creator.get_description()
detail_description = creator.get_detail_description()
@ -877,6 +880,7 @@ class CreatorItem:
pre_create_attr_defs = creator.get_pre_create_attr_defs()
create_allow_context_change = creator.create_allow_context_change
create_allow_thumbnail = creator.create_allow_thumbnail
show_order = creator.show_order
identifier = creator.identifier
return cls(
@ -892,7 +896,8 @@ class CreatorItem:
default_variants,
create_allow_context_change,
create_allow_thumbnail,
pre_create_attr_defs
show_order,
pre_create_attr_defs,
)
def to_data(self):
@ -915,6 +920,7 @@ class CreatorItem:
"default_variants": self.default_variants,
"create_allow_context_change": self.create_allow_context_change,
"create_allow_thumbnail": self.create_allow_thumbnail,
"show_order": self.show_order,
"pre_create_attributes_defs": pre_create_attributes_defs,
}
@ -1502,9 +1508,6 @@ class BasePublisherController(AbstractPublisherController):
def _reset_attributes(self):
"""Reset most of attributes that can be reset."""
# Reset creator items
self._creator_items = None
self.publish_is_running = False
self.publish_has_validated = False
self.publish_has_crashed = False
@ -1760,6 +1763,8 @@ class PublisherController(BasePublisherController):
self._resetting_plugins = True
self._create_context.reset_plugins()
# Reset creator items
self._creator_items = None
self._resetting_plugins = False

View file

@ -18,9 +18,10 @@ from .tasks_widget import CreateWidgetTasksWidget
from .precreate_widget import PreCreateWidget
from ..constants import (
VARIANT_TOOLTIP,
CREATOR_IDENTIFIER_ROLE,
FAMILY_ROLE,
CREATOR_IDENTIFIER_ROLE,
CREATOR_THUMBNAIL_ENABLED_ROLE,
CREATOR_SORT_ROLE,
)
SEPARATORS = ("---separator---", "---")
@ -90,12 +91,19 @@ class CreatorShortDescWidget(QtWidgets.QWidget):
self._description_label.setText(description)
class CreatorsProxyModel(QtCore.QSortFilterProxyModel):
def lessThan(self, left, right):
l_show_order = left.data(CREATOR_SORT_ROLE)
r_show_order = right.data(CREATOR_SORT_ROLE)
if l_show_order == r_show_order:
return super(CreatorsProxyModel, self).lessThan(left, right)
return l_show_order < r_show_order
class CreateWidget(QtWidgets.QWidget):
def __init__(self, controller, parent=None):
super(CreateWidget, self).__init__(parent)
self.setWindowTitle("Create new instance")
self._controller = controller
self._asset_name = None
@ -141,7 +149,7 @@ class CreateWidget(QtWidgets.QWidget):
creators_view = QtWidgets.QListView(creators_view_widget)
creators_model = QtGui.QStandardItemModel()
creators_sort_model = QtCore.QSortFilterProxyModel()
creators_sort_model = CreatorsProxyModel()
creators_sort_model.setSourceModel(creators_model)
creators_view.setModel(creators_sort_model)
@ -441,7 +449,8 @@ class CreateWidget(QtWidgets.QWidget):
# Add new families
new_creators = set()
for identifier, creator_item in self._controller.creator_items.items():
creator_items_by_identifier = self._controller.creator_items
for identifier, creator_item in creator_items_by_identifier.items():
if creator_item.creator_type != "artist":
continue
@ -457,6 +466,7 @@ class CreateWidget(QtWidgets.QWidget):
self._creators_model.appendRow(item)
item.setData(creator_item.label, QtCore.Qt.DisplayRole)
item.setData(creator_item.show_order, CREATOR_SORT_ROLE)
item.setData(identifier, CREATOR_IDENTIFIER_ROLE)
item.setData(
creator_item.create_allow_thumbnail,
@ -482,8 +492,9 @@ class CreateWidget(QtWidgets.QWidget):
index = indexes[0]
identifier = index.data(CREATOR_IDENTIFIER_ROLE)
create_item = creator_items_by_identifier.get(identifier)
self._set_creator_by_identifier(identifier)
self._set_creator(create_item)
def _on_plugins_refresh(self):
# Trigger refresh only if is visible

View file

@ -1,4 +1,6 @@
from .widgets import (
FocusSpinBox,
FocusDoubleSpinBox,
CustomTextComboBox,
PlaceholderLineEdit,
BaseClickableFrame,
@ -34,6 +36,8 @@ from .overlay_messages import (
__all__ = (
"FocusSpinBox",
"FocusDoubleSpinBox",
"CustomTextComboBox",
"PlaceholderLineEdit",
"BaseClickableFrame",

View file

@ -13,6 +13,34 @@ from openpype.lib.attribute_definitions import AbstractAttrDef
log = logging.getLogger(__name__)
class FocusSpinBox(QtWidgets.QSpinBox):
"""QSpinBox which allow scroll wheel changes only in active state."""
def __init__(self, *args, **kwargs):
super(FocusSpinBox, self).__init__(*args, **kwargs)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
def wheelEvent(self, event):
if not self.hasFocus():
event.ignore()
else:
super(FocusSpinBox, self).wheelEvent(event)
class FocusDoubleSpinBox(QtWidgets.QDoubleSpinBox):
"""QDoubleSpinBox which allow scroll wheel changes only in active state."""
def __init__(self, *args, **kwargs):
super(FocusDoubleSpinBox, self).__init__(*args, **kwargs)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
def wheelEvent(self, event):
if not self.hasFocus():
event.ignore()
else:
super(FocusDoubleSpinBox, self).wheelEvent(event)
class CustomTextComboBox(QtWidgets.QComboBox):
"""Combobox which can have different text showed."""

View file

@ -1,3 +1,3 @@
# -*- coding: utf-8 -*-
"""Package declaring Pype version."""
__version__ = "3.15.1-nightly.2"
__version__ = "3.15.1-nightly.5"

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "OpenPype"
version = "3.15.1" # OpenPype
version = "3.15.0" # OpenPype
description = "Open VFX and Animation pipeline with support."
authors = ["OpenPype Team <info@openpype.io>"]
license = "MIT License"

View file

@ -7,16 +7,10 @@ from github import Github
import os
def get_release_type_github(Log, github_token):
# print(Log)
minor_labels = ["Bump Minor"]
# patch_labels = [
# "type: enhancement",
# "type: bug",
# "type: deprecated",
# "type: Feature"]
g = Github(github_token)
repo = g.get_repo("pypeclub/OpenPype")
repo = g.get_repo("ynput/OpenPype")
labels = set()
for line in Log.splitlines():
@ -35,12 +29,12 @@ def get_release_type_github(Log, github_token):
else:
return "patch"
# TODO: if all is working fine, this part can be cleaned up eventually
# TODO: if all is working fine, this part can be cleaned up eventually
# if any(label in labels for label in patch_labels):
# return "patch"
return None
def remove_prefix(text, prefix):
return text[text.startswith(prefix) and len(prefix):]
@ -93,12 +87,16 @@ def file_regex_replace(filename, regex, version):
f.truncate()
def bump_file_versions(version):
def bump_file_versions(version, nightly=False):
filename = "./openpype/version.py"
regex = "(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(-((0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(\+([0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*))?"
file_regex_replace(filename, regex, version)
if nightly:
# skip nightly reversion in pyproject.toml
return
# bump pyproject.toml
filename = "pyproject.toml"
regex = "version = \"(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(\+((0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(\+([0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*))?\" # OpenPype"
@ -196,7 +194,7 @@ def main():
if options.nightly:
next_tag_v = calculate_next_nightly(github_token=options.github_token)
print(next_tag_v)
bump_file_versions(next_tag_v)
bump_file_versions(next_tag_v, True)
if options.finalize:
new_release = finalize_prerelease(options.finalize)
@ -222,7 +220,7 @@ def main():
new_prerelease = current_prerelease.bump_prerelease().__str__()
print(new_prerelease)
bump_file_versions(new_prerelease)
if options.version:
bump_file_versions(options.version)
print(f"Injected version {options.version} into the release")

View file

@ -4273,9 +4273,9 @@ htmlparser2@^6.1.0:
entities "^2.0.0"
http-cache-semantics@^4.0.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390"
integrity sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==
version "4.1.1"
resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a"
integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==
http-deceiver@^1.2.7:
version "1.2.7"