mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
f3bc1e2e32
37 changed files with 574 additions and 1114 deletions
47
.github/workflows/miletone_release_trigger.yml
vendored
Normal file
47
.github/workflows/miletone_release_trigger.yml
vendored
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
name: Milestone Release [trigger]
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
milestone:
|
||||
required: true
|
||||
release-type:
|
||||
type: choice
|
||||
description: What release should be created
|
||||
options:
|
||||
- release
|
||||
- pre-release
|
||||
milestone:
|
||||
types: closed
|
||||
|
||||
|
||||
jobs:
|
||||
milestone-title:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
milestone: ${{ steps.milestoneTitle.outputs.value }}
|
||||
steps:
|
||||
- name: Switch input milestone
|
||||
uses: haya14busa/action-cond@v1
|
||||
id: milestoneTitle
|
||||
with:
|
||||
cond: ${{ inputs.milestone == '' }}
|
||||
if_true: ${{ github.event.milestone.title }}
|
||||
if_false: ${{ inputs.milestone }}
|
||||
- name: Print resulted milestone
|
||||
run: |
|
||||
echo "${{ steps.milestoneTitle.outputs.value }}"
|
||||
|
||||
call-ci-tools-milestone-release:
|
||||
needs: milestone-title
|
||||
uses: ynput/ci-tools/.github/workflows/milestone_release_ref.yml@main
|
||||
with:
|
||||
milestone: ${{ needs.milestone-title.outputs.milestone }}
|
||||
repo-owner: ${{ github.event.repository.owner.login }}
|
||||
repo-name: ${{ github.event.repository.name }}
|
||||
version-py-path: "./openpype/version.py"
|
||||
pyproject-path: "./pyproject.toml"
|
||||
secrets:
|
||||
token: ${{ secrets.YNPUT_BOT_TOKEN }}
|
||||
user_email: ${{ secrets.CI_EMAIL }}
|
||||
user_name: ${{ secrets.CI_USER }}
|
||||
29
.github/workflows/nightly_merge.yml
vendored
29
.github/workflows/nightly_merge.yml
vendored
|
|
@ -1,29 +0,0 @@
|
|||
name: Dev -> Main
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '21 3 * * 3,6'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
develop-to-main:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: 🚛 Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: 🔨 Merge develop to main
|
||||
uses: everlytic/branch-merge@1.1.0
|
||||
with:
|
||||
github_token: ${{ secrets.YNPUT_BOT_TOKEN }}
|
||||
source_ref: 'develop'
|
||||
target_branch: 'main'
|
||||
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
|
||||
|
||||
- name: Invoke pre-release workflow
|
||||
uses: benc-uk/workflow-dispatch@v1
|
||||
with:
|
||||
workflow: Nightly Prerelease
|
||||
token: ${{ secrets.YNPUT_BOT_TOKEN }}
|
||||
67
.github/workflows/prerelease.yml
vendored
67
.github/workflows/prerelease.yml
vendored
|
|
@ -1,67 +0,0 @@
|
|||
name: Nightly Prerelease
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
jobs:
|
||||
create_nightly:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: 🚛 Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
- name: Install Python requirements
|
||||
run: pip install gitpython semver PyGithub
|
||||
|
||||
- name: 🔎 Determine next version type
|
||||
id: version_type
|
||||
run: |
|
||||
TYPE=$(python ./tools/ci_tools.py --bump --github_token ${{ secrets.YNPUT_BOT_TOKEN }})
|
||||
echo "type=${TYPE}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: 💉 Inject new version into files
|
||||
id: version
|
||||
if: steps.version_type.outputs.type != 'skip'
|
||||
run: |
|
||||
NEW_VERSION_TAG=$(python ./tools/ci_tools.py --nightly --github_token ${{ secrets.YNPUT_BOT_TOKEN }})
|
||||
echo "next_tag=${NEW_VERSION_TAG}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: 💾 Commit and Tag
|
||||
id: git_commit
|
||||
if: steps.version_type.outputs.type != 'skip'
|
||||
run: |
|
||||
git config user.email ${{ secrets.CI_EMAIL }}
|
||||
git config user.name ${{ secrets.CI_USER }}
|
||||
git checkout main
|
||||
git pull
|
||||
git add .
|
||||
git commit -m "[Automated] Bump version"
|
||||
tag_name="CI/${{ steps.version.outputs.next_tag }}"
|
||||
echo $tag_name
|
||||
git tag -a $tag_name -m "nightly build"
|
||||
|
||||
- name: Push to protected main branch
|
||||
uses: CasperWA/push-protected@v2.10.0
|
||||
with:
|
||||
token: ${{ secrets.YNPUT_BOT_TOKEN }}
|
||||
branch: main
|
||||
tags: true
|
||||
unprotect_reviews: true
|
||||
|
||||
- name: 🔨 Merge main back to develop
|
||||
uses: everlytic/branch-merge@1.1.0
|
||||
if: steps.version_type.outputs.type != 'skip'
|
||||
with:
|
||||
github_token: ${{ secrets.YNPUT_BOT_TOKEN }}
|
||||
source_ref: 'main'
|
||||
target_branch: 'develop'
|
||||
commit_message_template: '[Automated] Merged {source_ref} into {target_branch}'
|
||||
76
.github/workflows/release.yml
vendored
76
.github/workflows/release.yml
vendored
|
|
@ -1,76 +0,0 @@
|
|||
name: Stable Release
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- prereleased
|
||||
|
||||
jobs:
|
||||
create_release:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.actor != 'pypebot'
|
||||
|
||||
steps:
|
||||
- name: 🚛 Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python requirements
|
||||
run: pip install gitpython semver PyGithub
|
||||
|
||||
- name: 💉 Inject new version into files
|
||||
id: version
|
||||
run: |
|
||||
NEW_VERSION=$(python ./tools/ci_tools.py --finalize ${GITHUB_REF#refs/*/})
|
||||
LAST_VERSION=$(python ./tools/ci_tools.py --lastversion release)
|
||||
|
||||
echo "current_version=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT
|
||||
echo "last_release=${LAST_VERSION}" >> $GITHUB_OUTPUT
|
||||
echo "release_tag=${NEW_VERSION}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: 💾 Commit and Tag
|
||||
id: git_commit
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
run: |
|
||||
git config user.email ${{ secrets.CI_EMAIL }}
|
||||
git config user.name ${{ secrets.CI_USER }}
|
||||
git add .
|
||||
git commit -m "[Automated] Release"
|
||||
tag_name="${{ steps.version.outputs.release_tag }}"
|
||||
git tag -a $tag_name -m "stable release"
|
||||
|
||||
- name: 🔏 Push to protected main branch
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
uses: CasperWA/push-protected@v2.10.0
|
||||
with:
|
||||
token: ${{ secrets.YNPUT_BOT_TOKEN }}
|
||||
branch: main
|
||||
tags: true
|
||||
unprotect_reviews: true
|
||||
|
||||
- name: 🚀 Github Release
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
tag: ${{ steps.version.outputs.release_tag }}
|
||||
token: ${{ secrets.YNPUT_BOT_TOKEN }}
|
||||
|
||||
- name: ☠ Delete Pre-release
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
uses: cb80/delrel@latest
|
||||
with:
|
||||
tag: "${{ steps.version.outputs.current_version }}"
|
||||
|
||||
- name: 🔁 Merge main back to develop
|
||||
if: steps.version.outputs.release_tag != 'skip'
|
||||
uses: everlytic/branch-merge@1.1.0
|
||||
with:
|
||||
github_token: ${{ secrets.YNPUT_BOT_TOKEN }}
|
||||
source_ref: 'main'
|
||||
target_branch: 'develop'
|
||||
commit_message_template: '[Automated] Merged release {source_ref} into {target_branch}'
|
||||
|
|
@ -8,7 +8,6 @@ OpenPype
|
|||
[](https://github.com/pypeclub/pype/actions/workflows/documentation.yml) 
|
||||
|
||||
|
||||
this
|
||||
Introduction
|
||||
------------
|
||||
|
||||
|
|
|
|||
|
|
@ -6,8 +6,7 @@ from openpype.hosts.aftereffects import api
|
|||
from openpype.pipeline import (
|
||||
Creator,
|
||||
CreatedInstance,
|
||||
CreatorError,
|
||||
legacy_io,
|
||||
CreatorError
|
||||
)
|
||||
from openpype.hosts.aftereffects.api.pipeline import cache_and_get_instances
|
||||
from openpype.lib import prepare_template_data
|
||||
|
|
@ -127,7 +126,7 @@ class RenderCreator(Creator):
|
|||
subset_change = _changes.get("subset")
|
||||
if subset_change:
|
||||
api.get_stub().rename_item(created_inst.data["members"][0],
|
||||
subset_change[1])
|
||||
subset_change.new_value)
|
||||
|
||||
def remove_instances(self, instances):
|
||||
for instance in instances:
|
||||
|
|
@ -195,7 +194,7 @@ class RenderCreator(Creator):
|
|||
instance_data.pop("uuid")
|
||||
|
||||
if not instance_data.get("task"):
|
||||
instance_data["task"] = legacy_io.Session.get("AVALON_TASK")
|
||||
instance_data["task"] = self.create_context.get_current_task_name()
|
||||
|
||||
if not instance_data.get("creator_attributes"):
|
||||
is_old_farm = instance_data["family"] != "renderLocal"
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@ import openpype.hosts.aftereffects.api as api
|
|||
from openpype.client import get_asset_by_name
|
||||
from openpype.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance,
|
||||
legacy_io,
|
||||
CreatedInstance
|
||||
)
|
||||
from openpype.hosts.aftereffects.api.pipeline import cache_and_get_instances
|
||||
|
||||
|
|
@ -38,10 +37,11 @@ class AEWorkfileCreator(AutoCreator):
|
|||
existing_instance = instance
|
||||
break
|
||||
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
host_name = legacy_io.Session["AVALON_APP"]
|
||||
context = self.create_context
|
||||
project_name = context.get_current_project_name()
|
||||
asset_name = context.get_current_asset_name()
|
||||
task_name = context.get_current_task_name()
|
||||
host_name = context.host_name
|
||||
|
||||
if existing_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from maya import cmds
|
|||
|
||||
from openpype.client import get_asset_by_name, get_project
|
||||
from openpype.pipeline import legacy_io
|
||||
from . import lib
|
||||
|
||||
|
||||
class ToolWindows:
|
||||
|
|
@ -59,25 +60,11 @@ def edit_shader_definitions():
|
|||
|
||||
def reset_frame_range():
|
||||
"""Set frame range to current asset"""
|
||||
# Set FPS first
|
||||
fps = {15: 'game',
|
||||
24: 'film',
|
||||
25: 'pal',
|
||||
30: 'ntsc',
|
||||
48: 'show',
|
||||
50: 'palf',
|
||||
60: 'ntscf',
|
||||
23.98: '23.976fps',
|
||||
23.976: '23.976fps',
|
||||
29.97: '29.97fps',
|
||||
47.952: '47.952fps',
|
||||
47.95: '47.952fps',
|
||||
59.94: '59.94fps',
|
||||
44100: '44100fps',
|
||||
48000: '48000fps'
|
||||
}.get(float(legacy_io.Session.get("AVALON_FPS", 25)), "pal")
|
||||
|
||||
cmds.currentUnit(time=fps)
|
||||
fps = lib.convert_to_maya_fps(
|
||||
float(legacy_io.Session.get("AVALON_FPS", 25))
|
||||
)
|
||||
lib.set_scene_fps(fps)
|
||||
|
||||
# Set frame start/end
|
||||
project_name = legacy_io.active_project()
|
||||
|
|
|
|||
|
|
@ -1970,8 +1970,6 @@ def get_id_from_sibling(node, history_only=True):
|
|||
return first_id
|
||||
|
||||
|
||||
|
||||
# Project settings
|
||||
def set_scene_fps(fps, update=True):
|
||||
"""Set FPS from project configuration
|
||||
|
||||
|
|
@ -1984,28 +1982,21 @@ def set_scene_fps(fps, update=True):
|
|||
|
||||
"""
|
||||
|
||||
fps_mapping = {'15': 'game',
|
||||
'24': 'film',
|
||||
'25': 'pal',
|
||||
'30': 'ntsc',
|
||||
'48': 'show',
|
||||
'50': 'palf',
|
||||
'60': 'ntscf',
|
||||
'23.98': '23.976fps',
|
||||
'23.976': '23.976fps',
|
||||
'29.97': '29.97fps',
|
||||
'47.952': '47.952fps',
|
||||
'47.95': '47.952fps',
|
||||
'59.94': '59.94fps',
|
||||
'44100': '44100fps',
|
||||
'48000': '48000fps'}
|
||||
|
||||
# pull from mapping
|
||||
# this should convert float string to float and int to int
|
||||
# so 25.0 is converted to 25, but 23.98 will be still float.
|
||||
dec, ipart = math.modf(fps)
|
||||
if dec == 0.0:
|
||||
fps = int(ipart)
|
||||
fps_mapping = {
|
||||
'15': 'game',
|
||||
'24': 'film',
|
||||
'25': 'pal',
|
||||
'30': 'ntsc',
|
||||
'48': 'show',
|
||||
'50': 'palf',
|
||||
'60': 'ntscf',
|
||||
'23.976023976023978': '23.976fps',
|
||||
'29.97002997002997': '29.97fps',
|
||||
'47.952047952047955': '47.952fps',
|
||||
'59.94005994005994': '59.94fps',
|
||||
'44100': '44100fps',
|
||||
'48000': '48000fps'
|
||||
}
|
||||
|
||||
unit = fps_mapping.get(str(fps), None)
|
||||
if unit is None:
|
||||
|
|
@ -2125,7 +2116,9 @@ def set_context_settings():
|
|||
asset_data = asset_doc.get("data", {})
|
||||
|
||||
# Set project fps
|
||||
fps = asset_data.get("fps", project_data.get("fps", 25))
|
||||
fps = convert_to_maya_fps(
|
||||
asset_data.get("fps", project_data.get("fps", 25))
|
||||
)
|
||||
legacy_io.Session["AVALON_FPS"] = str(fps)
|
||||
set_scene_fps(fps)
|
||||
|
||||
|
|
@ -2147,15 +2140,12 @@ def validate_fps():
|
|||
|
||||
"""
|
||||
|
||||
fps = get_current_project_asset(fields=["data.fps"])["data"]["fps"]
|
||||
# TODO(antirotor): This is hack as for framerates having multiple
|
||||
# decimal places. FTrack is ceiling decimal values on
|
||||
# fps to two decimal places but Maya 2019+ is reporting those fps
|
||||
# with much higher resolution. As we currently cannot fix Ftrack
|
||||
# rounding, we have to round those numbers coming from Maya.
|
||||
current_fps = float_round(mel.eval('currentTimeUnitToFPS()'), 2)
|
||||
expected_fps = convert_to_maya_fps(
|
||||
get_current_project_asset(fields=["data.fps"])["data"]["fps"]
|
||||
)
|
||||
current_fps = mel.eval('currentTimeUnitToFPS()')
|
||||
|
||||
fps_match = current_fps == fps
|
||||
fps_match = current_fps == expected_fps
|
||||
if not fps_match and not IS_HEADLESS:
|
||||
from openpype.widgets import popup
|
||||
|
||||
|
|
@ -2164,14 +2154,19 @@ def validate_fps():
|
|||
dialog = popup.PopupUpdateKeys(parent=parent)
|
||||
dialog.setModal(True)
|
||||
dialog.setWindowTitle("Maya scene does not match project FPS")
|
||||
dialog.setMessage("Scene %i FPS does not match project %i FPS" %
|
||||
(current_fps, fps))
|
||||
dialog.setMessage(
|
||||
"Scene {} FPS does not match project {} FPS".format(
|
||||
current_fps, expected_fps
|
||||
)
|
||||
)
|
||||
dialog.setButtonText("Fix")
|
||||
|
||||
# Set new text for button (add optional argument for the popup?)
|
||||
toggle = dialog.widgets["toggle"]
|
||||
update = toggle.isChecked()
|
||||
dialog.on_clicked_state.connect(lambda: set_scene_fps(fps, update))
|
||||
dialog.on_clicked_state.connect(
|
||||
lambda: set_scene_fps(expected_fps, update)
|
||||
)
|
||||
|
||||
dialog.show()
|
||||
|
||||
|
|
@ -3356,6 +3351,88 @@ def get_attribute_input(attr):
|
|||
return connections[0] if connections else None
|
||||
|
||||
|
||||
def convert_to_maya_fps(fps):
|
||||
"""Convert any fps to supported Maya framerates."""
|
||||
float_framerates = [
|
||||
23.976023976023978,
|
||||
# WTF is 29.97 df vs fps?
|
||||
29.97002997002997,
|
||||
47.952047952047955,
|
||||
59.94005994005994
|
||||
]
|
||||
# 44100 fps evaluates as 41000.0. Why? Omitting for now.
|
||||
int_framerates = [
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
8,
|
||||
10,
|
||||
12,
|
||||
15,
|
||||
16,
|
||||
20,
|
||||
24,
|
||||
25,
|
||||
30,
|
||||
40,
|
||||
48,
|
||||
50,
|
||||
60,
|
||||
75,
|
||||
80,
|
||||
90,
|
||||
100,
|
||||
120,
|
||||
125,
|
||||
150,
|
||||
200,
|
||||
240,
|
||||
250,
|
||||
300,
|
||||
375,
|
||||
400,
|
||||
500,
|
||||
600,
|
||||
750,
|
||||
1200,
|
||||
1500,
|
||||
2000,
|
||||
3000,
|
||||
6000,
|
||||
48000
|
||||
]
|
||||
|
||||
# If input fps is a whole number we'll return.
|
||||
if float(fps).is_integer():
|
||||
# Validate fps is part of Maya's fps selection.
|
||||
if fps not in int_framerates:
|
||||
raise ValueError(
|
||||
"Framerate \"{}\" is not supported in Maya".format(fps)
|
||||
)
|
||||
return fps
|
||||
else:
|
||||
# Differences to supported float frame rates.
|
||||
differences = []
|
||||
for i in float_framerates:
|
||||
differences.append(abs(i - fps))
|
||||
|
||||
# Validate difference does not stray too far from supported framerates.
|
||||
min_difference = min(differences)
|
||||
min_index = differences.index(min_difference)
|
||||
supported_framerate = float_framerates[min_index]
|
||||
if min_difference > 0.1:
|
||||
raise ValueError(
|
||||
"Framerate \"{}\" strays too far from any supported framerate"
|
||||
" in Maya. Closest supported framerate is \"{}\"".format(
|
||||
fps, supported_framerate
|
||||
)
|
||||
)
|
||||
|
||||
return supported_framerate
|
||||
|
||||
|
||||
def write_xgen_file(data, filepath):
|
||||
"""Overwrites data in .xgen files.
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import json
|
|||
|
||||
from maya import cmds
|
||||
|
||||
from openpype.pipeline import registered_host
|
||||
from openpype.pipeline import registered_host, get_current_asset_name
|
||||
from openpype.pipeline.workfile.workfile_template_builder import (
|
||||
TemplateAlreadyImported,
|
||||
AbstractTemplateBuilder,
|
||||
|
|
@ -41,10 +41,27 @@ class MayaTemplateBuilder(AbstractTemplateBuilder):
|
|||
))
|
||||
|
||||
cmds.sets(name=PLACEHOLDER_SET, empty=True)
|
||||
cmds.file(path, i=True, returnNewNodes=True)
|
||||
new_nodes = cmds.file(path, i=True, returnNewNodes=True)
|
||||
|
||||
cmds.setAttr(PLACEHOLDER_SET + ".hiddenInOutliner", True)
|
||||
|
||||
imported_sets = cmds.ls(new_nodes, set=True)
|
||||
if not imported_sets:
|
||||
return True
|
||||
|
||||
# update imported sets information
|
||||
asset_name = get_current_asset_name()
|
||||
for node in imported_sets:
|
||||
if not cmds.attributeQuery("id", node=node, exists=True):
|
||||
continue
|
||||
if cmds.getAttr("{}.id".format(node)) != "pyblish.avalon.instance":
|
||||
continue
|
||||
if not cmds.attributeQuery("asset", node=node, exists=True):
|
||||
continue
|
||||
|
||||
cmds.setAttr(
|
||||
"{}.asset".format(node), asset_name, type="string")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -33,18 +33,11 @@ class ValidateMayaUnits(pyblish.api.ContextPlugin):
|
|||
linearunits = context.data.get('linearUnits')
|
||||
angularunits = context.data.get('angularUnits')
|
||||
|
||||
# TODO(antirotor): This is hack as for framerates having multiple
|
||||
# decimal places. FTrack is ceiling decimal values on
|
||||
# fps to two decimal places but Maya 2019+ is reporting those fps
|
||||
# with much higher resolution. As we currently cannot fix Ftrack
|
||||
# rounding, we have to round those numbers coming from Maya.
|
||||
# NOTE: this must be revisited yet again as it seems that Ftrack is
|
||||
# now flooring the value?
|
||||
fps = mayalib.float_round(context.data.get('fps'), 2, ceil)
|
||||
fps = context.data.get('fps')
|
||||
|
||||
# TODO repace query with using 'context.data["assetEntity"]'
|
||||
asset_doc = get_current_project_asset()
|
||||
asset_fps = asset_doc["data"]["fps"]
|
||||
asset_fps = mayalib.convert_to_maya_fps(asset_doc["data"]["fps"])
|
||||
|
||||
self.log.info('Units (linear): {0}'.format(linearunits))
|
||||
self.log.info('Units (angular): {0}'.format(angularunits))
|
||||
|
|
|
|||
|
|
@ -193,7 +193,7 @@ class ImageCreator(Creator):
|
|||
instance_data.pop("uuid")
|
||||
|
||||
if not instance_data.get("task"):
|
||||
instance_data["task"] = legacy_io.Session.get("AVALON_TASK")
|
||||
instance_data["task"] = self.create_context.get_current_task_name()
|
||||
|
||||
if not instance_data.get("variant"):
|
||||
instance_data["variant"] = ''
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@ import openpype.hosts.photoshop.api as api
|
|||
from openpype.client import get_asset_by_name
|
||||
from openpype.pipeline import (
|
||||
AutoCreator,
|
||||
CreatedInstance,
|
||||
legacy_io
|
||||
CreatedInstance
|
||||
)
|
||||
from openpype.hosts.photoshop.api.pipeline import cache_and_get_instances
|
||||
|
||||
|
|
@ -38,10 +37,11 @@ class PSWorkfileCreator(AutoCreator):
|
|||
existing_instance = instance
|
||||
break
|
||||
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
host_name = legacy_io.Session["AVALON_APP"]
|
||||
context = self.create_context
|
||||
project_name = context.get_current_project_name()
|
||||
asset_name = context.get_current_asset_name()
|
||||
task_name = context.get_current_task_name()
|
||||
host_name = context.host_name
|
||||
if existing_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
|
|
|
|||
|
|
@ -82,9 +82,6 @@ from .mongo import (
|
|||
validate_mongo_connection,
|
||||
OpenPypeMongoConnection
|
||||
)
|
||||
from .anatomy import (
|
||||
Anatomy
|
||||
)
|
||||
|
||||
from .dateutils import (
|
||||
get_datetime_data,
|
||||
|
|
@ -119,36 +116,19 @@ from .transcoding import (
|
|||
)
|
||||
from .avalon_context import (
|
||||
CURRENT_DOC_SCHEMAS,
|
||||
PROJECT_NAME_ALLOWED_SYMBOLS,
|
||||
PROJECT_NAME_REGEX,
|
||||
create_project,
|
||||
is_latest,
|
||||
any_outdated,
|
||||
get_asset,
|
||||
get_linked_assets,
|
||||
get_latest_version,
|
||||
get_system_general_anatomy_data,
|
||||
|
||||
get_workfile_template_key,
|
||||
get_workfile_template_key_from_context,
|
||||
get_workdir_data,
|
||||
get_workdir,
|
||||
get_workdir_with_workdir_data,
|
||||
get_last_workfile_with_version,
|
||||
get_last_workfile,
|
||||
|
||||
create_workfile_doc,
|
||||
save_workfile_data_to_doc,
|
||||
get_workfile_doc,
|
||||
|
||||
BuildWorkfile,
|
||||
|
||||
get_creator_by_name,
|
||||
|
||||
get_custom_workfile_template,
|
||||
|
||||
change_timer_to_current_context,
|
||||
|
||||
get_custom_workfile_template_by_context,
|
||||
get_custom_workfile_template_by_string_context,
|
||||
get_custom_workfile_template
|
||||
|
|
@ -186,8 +166,6 @@ from .plugin_tools import (
|
|||
get_subset_name,
|
||||
get_subset_name_with_asset_doc,
|
||||
prepare_template_data,
|
||||
filter_pyblish_plugins,
|
||||
set_plugin_attributes_from_settings,
|
||||
source_hash,
|
||||
)
|
||||
|
||||
|
|
@ -278,34 +256,17 @@ __all__ = [
|
|||
"convert_ffprobe_fps_to_float",
|
||||
|
||||
"CURRENT_DOC_SCHEMAS",
|
||||
"PROJECT_NAME_ALLOWED_SYMBOLS",
|
||||
"PROJECT_NAME_REGEX",
|
||||
"create_project",
|
||||
"is_latest",
|
||||
"any_outdated",
|
||||
"get_asset",
|
||||
"get_linked_assets",
|
||||
"get_latest_version",
|
||||
"get_system_general_anatomy_data",
|
||||
|
||||
"get_workfile_template_key",
|
||||
"get_workfile_template_key_from_context",
|
||||
"get_workdir_data",
|
||||
"get_workdir",
|
||||
"get_workdir_with_workdir_data",
|
||||
"get_last_workfile_with_version",
|
||||
"get_last_workfile",
|
||||
|
||||
"create_workfile_doc",
|
||||
"save_workfile_data_to_doc",
|
||||
"get_workfile_doc",
|
||||
|
||||
"BuildWorkfile",
|
||||
|
||||
"get_creator_by_name",
|
||||
|
||||
"change_timer_to_current_context",
|
||||
|
||||
"get_custom_workfile_template_by_context",
|
||||
"get_custom_workfile_template_by_string_context",
|
||||
"get_custom_workfile_template",
|
||||
|
|
@ -338,8 +299,6 @@ __all__ = [
|
|||
"TaskNotSetError",
|
||||
"get_subset_name",
|
||||
"get_subset_name_with_asset_doc",
|
||||
"filter_pyblish_plugins",
|
||||
"set_plugin_attributes_from_settings",
|
||||
"source_hash",
|
||||
|
||||
"format_file_size",
|
||||
|
|
@ -358,8 +317,6 @@ __all__ = [
|
|||
|
||||
"terminal",
|
||||
|
||||
"Anatomy",
|
||||
|
||||
"get_datetime_data",
|
||||
"get_formatted_current_time",
|
||||
|
||||
|
|
|
|||
|
|
@ -1,38 +0,0 @@
|
|||
"""Code related to project Anatomy was moved
|
||||
to 'openpype.pipeline.anatomy' please change your imports as soon as
|
||||
possible. File will be probably removed in OpenPype 3.14.*
|
||||
"""
|
||||
|
||||
import warnings
|
||||
import functools
|
||||
|
||||
|
||||
class AnatomyDeprecatedWarning(DeprecationWarning):
|
||||
pass
|
||||
|
||||
|
||||
def anatomy_deprecated(func):
|
||||
"""Mark functions as deprecated.
|
||||
|
||||
It will result in a warning being emitted when the function is used.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def new_func(*args, **kwargs):
|
||||
warnings.simplefilter("always", AnatomyDeprecatedWarning)
|
||||
warnings.warn(
|
||||
(
|
||||
"Deprecated import of 'Anatomy'."
|
||||
" Class was moved to 'openpype.pipeline.anatomy'."
|
||||
" Please change your imports of Anatomy in codebase."
|
||||
),
|
||||
category=AnatomyDeprecatedWarning
|
||||
)
|
||||
return func(*args, **kwargs)
|
||||
return new_func
|
||||
|
||||
|
||||
@anatomy_deprecated
|
||||
def Anatomy(*args, **kwargs):
|
||||
from openpype.pipeline.anatomy import Anatomy
|
||||
return Anatomy(*args, **kwargs)
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
"""Should be used only inside of hosts."""
|
||||
import os
|
||||
import copy
|
||||
|
||||
import platform
|
||||
import logging
|
||||
import functools
|
||||
|
|
@ -10,17 +9,12 @@ import six
|
|||
|
||||
from openpype.client import (
|
||||
get_project,
|
||||
get_assets,
|
||||
get_asset_by_name,
|
||||
get_last_version_by_subset_name,
|
||||
get_workfile_info,
|
||||
)
|
||||
from openpype.client.operations import (
|
||||
CURRENT_ASSET_DOC_SCHEMA,
|
||||
CURRENT_PROJECT_SCHEMA,
|
||||
CURRENT_PROJECT_CONFIG_SCHEMA,
|
||||
PROJECT_NAME_ALLOWED_SYMBOLS,
|
||||
PROJECT_NAME_REGEX,
|
||||
)
|
||||
from .profiles_filtering import filter_profiles
|
||||
from .path_templates import StringTemplate
|
||||
|
|
@ -128,70 +122,6 @@ def with_pipeline_io(func):
|
|||
return wrapped
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.context_tools.is_representation_from_latest")
|
||||
def is_latest(representation):
|
||||
"""Return whether the representation is from latest version
|
||||
|
||||
Args:
|
||||
representation (dict): The representation document from the database.
|
||||
|
||||
Returns:
|
||||
bool: Whether the representation is of latest version.
|
||||
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.15.*
|
||||
"""
|
||||
|
||||
from openpype.pipeline.context_tools import is_representation_from_latest
|
||||
|
||||
return is_representation_from_latest(representation)
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.load.any_outdated_containers")
|
||||
def any_outdated():
|
||||
"""Return whether the current scene has any outdated content.
|
||||
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.15.*
|
||||
"""
|
||||
|
||||
from openpype.pipeline.load import any_outdated_containers
|
||||
|
||||
return any_outdated_containers()
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.context_tools.get_current_project_asset")
|
||||
def get_asset(asset_name=None):
|
||||
""" Returning asset document from database by its name.
|
||||
|
||||
Doesn't count with duplicities on asset names!
|
||||
|
||||
Args:
|
||||
asset_name (str)
|
||||
|
||||
Returns:
|
||||
(MongoDB document)
|
||||
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.15.*
|
||||
"""
|
||||
|
||||
from openpype.pipeline.context_tools import get_current_project_asset
|
||||
|
||||
return get_current_project_asset(asset_name=asset_name)
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.template_data.get_general_template_data")
|
||||
def get_system_general_anatomy_data(system_settings=None):
|
||||
"""
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.15.*
|
||||
"""
|
||||
from openpype.pipeline.template_data import get_general_template_data
|
||||
|
||||
return get_general_template_data(system_settings)
|
||||
|
||||
|
||||
@deprecated("openpype.client.get_linked_asset_ids")
|
||||
def get_linked_asset_ids(asset_doc):
|
||||
"""Return linked asset ids for `asset_doc` from DB
|
||||
|
|
@ -214,66 +144,6 @@ def get_linked_asset_ids(asset_doc):
|
|||
return get_linked_asset_ids(project_name, asset_doc=asset_doc)
|
||||
|
||||
|
||||
@deprecated("openpype.client.get_linked_assets")
|
||||
def get_linked_assets(asset_doc):
|
||||
"""Return linked assets for `asset_doc` from DB
|
||||
|
||||
Args:
|
||||
asset_doc (dict): Asset document from DB
|
||||
|
||||
Returns:
|
||||
(list) Asset documents of input links for passed asset doc.
|
||||
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.15.*
|
||||
"""
|
||||
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.client import get_linked_assets
|
||||
|
||||
project_name = legacy_io.active_project()
|
||||
|
||||
return get_linked_assets(project_name, asset_doc=asset_doc)
|
||||
|
||||
|
||||
@deprecated("openpype.client.get_last_version_by_subset_name")
|
||||
def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None):
|
||||
"""Retrieve latest version from `asset_name`, and `subset_name`.
|
||||
|
||||
Do not use if you want to query more than 5 latest versions as this method
|
||||
query 3 times to mongo for each call. For those cases is better to use
|
||||
more efficient way, e.g. with help of aggregations.
|
||||
|
||||
Args:
|
||||
asset_name (str): Name of asset.
|
||||
subset_name (str): Name of subset.
|
||||
dbcon (AvalonMongoDB, optional): Avalon Mongo connection with Session.
|
||||
project_name (str, optional): Find latest version in specific project.
|
||||
|
||||
Returns:
|
||||
None: If asset, subset or version were not found.
|
||||
dict: Last version document for entered.
|
||||
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.15.*
|
||||
"""
|
||||
|
||||
if not project_name:
|
||||
if not dbcon:
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
log.debug("Using `legacy_io` for query.")
|
||||
dbcon = legacy_io
|
||||
# Make sure is installed
|
||||
dbcon.install()
|
||||
|
||||
project_name = dbcon.active_project()
|
||||
|
||||
return get_last_version_by_subset_name(
|
||||
project_name, subset_name, asset_name=asset_name
|
||||
)
|
||||
|
||||
|
||||
@deprecated(
|
||||
"openpype.pipeline.workfile.get_workfile_template_key_from_context")
|
||||
def get_workfile_template_key_from_context(
|
||||
|
|
@ -361,142 +231,6 @@ def get_workfile_template_key(
|
|||
)
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.template_data.get_template_data")
|
||||
def get_workdir_data(project_doc, asset_doc, task_name, host_name):
|
||||
"""Prepare data for workdir template filling from entered information.
|
||||
|
||||
Args:
|
||||
project_doc (dict): Mongo document of project from MongoDB.
|
||||
asset_doc (dict): Mongo document of asset from MongoDB.
|
||||
task_name (str): Task name for which are workdir data preapred.
|
||||
host_name (str): Host which is used to workdir. This is required
|
||||
because workdir template may contain `{app}` key.
|
||||
|
||||
Returns:
|
||||
dict: Data prepared for filling workdir template.
|
||||
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.15.*
|
||||
"""
|
||||
|
||||
from openpype.pipeline.template_data import get_template_data
|
||||
|
||||
return get_template_data(
|
||||
project_doc, asset_doc, task_name, host_name
|
||||
)
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.workfile.get_workdir_with_workdir_data")
|
||||
def get_workdir_with_workdir_data(
|
||||
workdir_data, anatomy=None, project_name=None, template_key=None
|
||||
):
|
||||
"""Fill workdir path from entered data and project's anatomy.
|
||||
|
||||
It is possible to pass only project's name instead of project's anatomy but
|
||||
one of them **must** be entered. It is preferred to enter anatomy if is
|
||||
available as initialization of a new Anatomy object may be time consuming.
|
||||
|
||||
Args:
|
||||
workdir_data (dict): Data to fill workdir template.
|
||||
anatomy (Anatomy): Anatomy object for specific project. Optional if
|
||||
`project_name` is entered.
|
||||
project_name (str): Project's name. Optional if `anatomy` is entered
|
||||
otherwise Anatomy object is created with using the project name.
|
||||
template_key (str): Key of work templates in anatomy templates. If not
|
||||
passed `get_workfile_template_key_from_context` is used to get it.
|
||||
dbcon(AvalonMongoDB): Mongo connection. Required only if 'template_key'
|
||||
and 'project_name' are not passed.
|
||||
|
||||
Returns:
|
||||
TemplateResult: Workdir path.
|
||||
|
||||
Raises:
|
||||
ValueError: When both `anatomy` and `project_name` are set to None.
|
||||
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.15.*
|
||||
"""
|
||||
|
||||
if not anatomy and not project_name:
|
||||
raise ValueError((
|
||||
"Missing required arguments one of `project_name` or `anatomy`"
|
||||
" must be entered."
|
||||
))
|
||||
|
||||
if not project_name:
|
||||
project_name = anatomy.project_name
|
||||
|
||||
from openpype.pipeline.workfile import get_workdir_with_workdir_data
|
||||
|
||||
return get_workdir_with_workdir_data(
|
||||
workdir_data, project_name, anatomy, template_key
|
||||
)
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.workfile.get_workdir_with_workdir_data")
|
||||
def get_workdir(
|
||||
project_doc,
|
||||
asset_doc,
|
||||
task_name,
|
||||
host_name,
|
||||
anatomy=None,
|
||||
template_key=None
|
||||
):
|
||||
"""Fill workdir path from entered data and project's anatomy.
|
||||
|
||||
Args:
|
||||
project_doc (dict): Mongo document of project from MongoDB.
|
||||
asset_doc (dict): Mongo document of asset from MongoDB.
|
||||
task_name (str): Task name for which are workdir data preapred.
|
||||
host_name (str): Host which is used to workdir. This is required
|
||||
because workdir template may contain `{app}` key. In `Session`
|
||||
is stored under `AVALON_APP` key.
|
||||
anatomy (Anatomy): Optional argument. Anatomy object is created using
|
||||
project name from `project_doc`. It is preferred to pass this
|
||||
argument as initialization of a new Anatomy object may be time
|
||||
consuming.
|
||||
template_key (str): Key of work templates in anatomy templates. Default
|
||||
value is defined in `get_workdir_with_workdir_data`.
|
||||
|
||||
Returns:
|
||||
TemplateResult: Workdir path.
|
||||
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.15.*
|
||||
"""
|
||||
|
||||
from openpype.pipeline.workfile import get_workdir
|
||||
# Output is TemplateResult object which contain useful data
|
||||
return get_workdir(
|
||||
project_doc,
|
||||
asset_doc,
|
||||
task_name,
|
||||
host_name,
|
||||
anatomy,
|
||||
template_key
|
||||
)
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.context_tools.get_template_data_from_session")
|
||||
def template_data_from_session(session=None):
|
||||
""" Return dictionary with template from session keys.
|
||||
|
||||
Args:
|
||||
session (dict, Optional): The Session to use. If not provided use the
|
||||
currently active global Session.
|
||||
|
||||
Returns:
|
||||
dict: All available data from session.
|
||||
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.15.*
|
||||
"""
|
||||
|
||||
from openpype.pipeline.context_tools import get_template_data_from_session
|
||||
|
||||
return get_template_data_from_session(session)
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.context_tools.compute_session_changes")
|
||||
def compute_session_changes(
|
||||
session, task=None, asset=None, app=None, template_key=None
|
||||
|
|
@ -588,133 +322,6 @@ def update_current_task(task=None, asset=None, app=None, template_key=None):
|
|||
return change_current_context(asset, task, template_key)
|
||||
|
||||
|
||||
@deprecated("openpype.client.get_workfile_info")
|
||||
def get_workfile_doc(asset_id, task_name, filename, dbcon=None):
|
||||
"""Return workfile document for entered context.
|
||||
|
||||
Do not use this method to get more than one document. In that cases use
|
||||
custom query as this will return documents from database one by one.
|
||||
|
||||
Args:
|
||||
asset_id (ObjectId): Mongo ID of an asset under which workfile belongs.
|
||||
task_name (str): Name of task under which the workfile belongs.
|
||||
filename (str): Name of a workfile.
|
||||
dbcon (AvalonMongoDB): Optionally enter avalon AvalonMongoDB object and
|
||||
`legacy_io` is used if not entered.
|
||||
|
||||
Returns:
|
||||
dict: Workfile document or None.
|
||||
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.15.*
|
||||
"""
|
||||
|
||||
# Use legacy_io if dbcon is not entered
|
||||
if not dbcon:
|
||||
from openpype.pipeline import legacy_io
|
||||
dbcon = legacy_io
|
||||
|
||||
project_name = dbcon.active_project()
|
||||
return get_workfile_info(project_name, asset_id, task_name, filename)
|
||||
|
||||
|
||||
@deprecated
|
||||
def create_workfile_doc(asset_doc, task_name, filename, workdir, dbcon=None):
|
||||
"""Creates or replace workfile document in mongo.
|
||||
|
||||
Do not use this method to update data. This method will remove all
|
||||
additional data from existing document.
|
||||
|
||||
Args:
|
||||
asset_doc (dict): Document of asset under which workfile belongs.
|
||||
task_name (str): Name of task for which is workfile related to.
|
||||
filename (str): Filename of workfile.
|
||||
workdir (str): Path to directory where `filename` is located.
|
||||
dbcon (AvalonMongoDB): Optionally enter avalon AvalonMongoDB object and
|
||||
`legacy_io` is used if not entered.
|
||||
"""
|
||||
|
||||
from openpype.pipeline import Anatomy
|
||||
from openpype.pipeline.template_data import get_template_data
|
||||
|
||||
# Use legacy_io if dbcon is not entered
|
||||
if not dbcon:
|
||||
from openpype.pipeline import legacy_io
|
||||
dbcon = legacy_io
|
||||
|
||||
# Filter of workfile document
|
||||
doc_filter = {
|
||||
"type": "workfile",
|
||||
"parent": asset_doc["_id"],
|
||||
"task_name": task_name,
|
||||
"filename": filename
|
||||
}
|
||||
# Document data are copy of filter
|
||||
doc_data = copy.deepcopy(doc_filter)
|
||||
|
||||
# Prepare project for workdir data
|
||||
project_name = dbcon.active_project()
|
||||
project_doc = get_project(project_name)
|
||||
workdir_data = get_template_data(
|
||||
project_doc, asset_doc, task_name, dbcon.Session["AVALON_APP"]
|
||||
)
|
||||
# Prepare anatomy
|
||||
anatomy = Anatomy(project_name)
|
||||
# Get workdir path (result is anatomy.TemplateResult)
|
||||
template_workdir = get_workdir_with_workdir_data(
|
||||
workdir_data, anatomy
|
||||
)
|
||||
template_workdir_path = str(template_workdir).replace("\\", "/")
|
||||
|
||||
# Replace slashses in workdir path where workfile is located
|
||||
mod_workdir = workdir.replace("\\", "/")
|
||||
|
||||
# Replace workdir from templates with rootless workdir
|
||||
rootles_workdir = mod_workdir.replace(
|
||||
template_workdir_path,
|
||||
template_workdir.rootless.replace("\\", "/")
|
||||
)
|
||||
|
||||
doc_data["schema"] = "pype:workfile-1.0"
|
||||
doc_data["files"] = ["/".join([rootles_workdir, filename])]
|
||||
doc_data["data"] = {}
|
||||
|
||||
dbcon.replace_one(
|
||||
doc_filter,
|
||||
doc_data,
|
||||
upsert=True
|
||||
)
|
||||
|
||||
|
||||
@deprecated
|
||||
def save_workfile_data_to_doc(workfile_doc, data, dbcon=None):
|
||||
if not workfile_doc:
|
||||
# TODO add log message
|
||||
return
|
||||
|
||||
if not data:
|
||||
return
|
||||
|
||||
# Use legacy_io if dbcon is not entered
|
||||
if not dbcon:
|
||||
from openpype.pipeline import legacy_io
|
||||
dbcon = legacy_io
|
||||
|
||||
# Convert data to mongo modification keys/values
|
||||
# - this is naive implementation which does not expect nested
|
||||
# dictionaries
|
||||
set_data = {}
|
||||
for key, value in data.items():
|
||||
new_key = "data.{}".format(key)
|
||||
set_data[new_key] = value
|
||||
|
||||
# Update workfile document with data
|
||||
dbcon.update_one(
|
||||
{"_id": workfile_doc["_id"]},
|
||||
{"$set": set_data}
|
||||
)
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.workfile.BuildWorkfile")
|
||||
def BuildWorkfile():
|
||||
"""Build workfile class was moved to workfile pipeline.
|
||||
|
|
@ -747,38 +354,6 @@ def get_creator_by_name(creator_name, case_sensitive=False):
|
|||
return get_legacy_creator_by_name(creator_name, case_sensitive)
|
||||
|
||||
|
||||
@deprecated
|
||||
def change_timer_to_current_context():
|
||||
"""Called after context change to change timers.
|
||||
|
||||
Deprecated:
|
||||
This method is specific for TimersManager module so please use the
|
||||
functionality from there. Function will be removed after release
|
||||
version 3.15.*
|
||||
"""
|
||||
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL")
|
||||
if not webserver_url:
|
||||
log.warning("Couldn't find webserver url")
|
||||
return
|
||||
|
||||
rest_api_url = "{}/timers_manager/start_timer".format(webserver_url)
|
||||
try:
|
||||
import requests
|
||||
except Exception:
|
||||
log.warning("Couldn't start timer")
|
||||
return
|
||||
data = {
|
||||
"project_name": legacy_io.Session["AVALON_PROJECT"],
|
||||
"asset_name": legacy_io.Session["AVALON_ASSET"],
|
||||
"task_name": legacy_io.Session["AVALON_TASK"]
|
||||
}
|
||||
|
||||
requests.post(rest_api_url, json=data)
|
||||
|
||||
|
||||
def _get_task_context_data_for_anatomy(
|
||||
project_doc, asset_doc, task_name, anatomy=None
|
||||
):
|
||||
|
|
@ -800,6 +375,8 @@ def _get_task_context_data_for_anatomy(
|
|||
dict: With Anatomy context data.
|
||||
"""
|
||||
|
||||
from openpype.pipeline.template_data import get_general_template_data
|
||||
|
||||
if anatomy is None:
|
||||
from openpype.pipeline import Anatomy
|
||||
anatomy = Anatomy(project_doc["name"])
|
||||
|
|
@ -840,7 +417,7 @@ def _get_task_context_data_for_anatomy(
|
|||
}
|
||||
}
|
||||
|
||||
system_general_data = get_system_general_anatomy_data()
|
||||
system_general_data = get_general_template_data()
|
||||
data.update(system_general_data)
|
||||
|
||||
return data
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ import warnings
|
|||
import functools
|
||||
|
||||
from openpype.client import get_asset_by_id
|
||||
from openpype.settings import get_project_settings
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -101,8 +100,6 @@ def get_subset_name_with_asset_doc(
|
|||
is not passed.
|
||||
dynamic_data (dict): Dynamic data specific for a creator which creates
|
||||
instance.
|
||||
dbcon (AvalonMongoDB): Mongo connection to be able query asset document
|
||||
if 'asset_doc' is not passed.
|
||||
"""
|
||||
|
||||
from openpype.pipeline.create import get_subset_name
|
||||
|
|
@ -202,122 +199,6 @@ def prepare_template_data(fill_pairs):
|
|||
return fill_data
|
||||
|
||||
|
||||
@deprecated("openpype.pipeline.publish.lib.filter_pyblish_plugins")
|
||||
def filter_pyblish_plugins(plugins):
|
||||
"""Filter pyblish plugins by presets.
|
||||
|
||||
This servers as plugin filter / modifier for pyblish. It will load plugin
|
||||
definitions from presets and filter those needed to be excluded.
|
||||
|
||||
Args:
|
||||
plugins (dict): Dictionary of plugins produced by :mod:`pyblish-base`
|
||||
`discover()` method.
|
||||
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.15.*
|
||||
"""
|
||||
|
||||
from openpype.pipeline.publish.lib import filter_pyblish_plugins
|
||||
|
||||
filter_pyblish_plugins(plugins)
|
||||
|
||||
|
||||
@deprecated
|
||||
def set_plugin_attributes_from_settings(
|
||||
plugins, superclass, host_name=None, project_name=None
|
||||
):
|
||||
"""Change attribute values on Avalon plugins by project settings.
|
||||
|
||||
This function should be used only in host context. Modify
|
||||
behavior of plugins.
|
||||
|
||||
Args:
|
||||
plugins (list): Plugins discovered by origin avalon discover method.
|
||||
superclass (object): Superclass of plugin type (e.g. Cretor, Loader).
|
||||
host_name (str): Name of host for which plugins are loaded and from.
|
||||
Value from environment `AVALON_APP` is used if not entered.
|
||||
project_name (str): Name of project for which settings will be loaded.
|
||||
Value from environment `AVALON_PROJECT` is used if not entered.
|
||||
|
||||
Deprecated:
|
||||
Function will be removed after release version 3.15.*
|
||||
"""
|
||||
|
||||
# Function is not used anymore
|
||||
from openpype.pipeline import LegacyCreator, LoaderPlugin
|
||||
|
||||
# determine host application to use for finding presets
|
||||
if host_name is None:
|
||||
host_name = os.environ.get("AVALON_APP")
|
||||
|
||||
if project_name is None:
|
||||
project_name = os.environ.get("AVALON_PROJECT")
|
||||
|
||||
# map plugin superclass to preset json. Currently supported is load and
|
||||
# create (LoaderPlugin and LegacyCreator)
|
||||
plugin_type = None
|
||||
if superclass is LoaderPlugin or issubclass(superclass, LoaderPlugin):
|
||||
plugin_type = "load"
|
||||
elif superclass is LegacyCreator or issubclass(superclass, LegacyCreator):
|
||||
plugin_type = "create"
|
||||
|
||||
if not host_name or not project_name or plugin_type is None:
|
||||
msg = "Skipped attributes override from settings."
|
||||
if not host_name:
|
||||
msg += " Host name is not defined."
|
||||
|
||||
if not project_name:
|
||||
msg += " Project name is not defined."
|
||||
|
||||
if plugin_type is None:
|
||||
msg += " Plugin type is unsupported for class {}.".format(
|
||||
superclass.__name__
|
||||
)
|
||||
|
||||
print(msg)
|
||||
return
|
||||
|
||||
print(">>> Finding presets for {}:{} ...".format(host_name, plugin_type))
|
||||
|
||||
project_settings = get_project_settings(project_name)
|
||||
plugin_type_settings = (
|
||||
project_settings
|
||||
.get(host_name, {})
|
||||
.get(plugin_type, {})
|
||||
)
|
||||
global_type_settings = (
|
||||
project_settings
|
||||
.get("global", {})
|
||||
.get(plugin_type, {})
|
||||
)
|
||||
if not global_type_settings and not plugin_type_settings:
|
||||
return
|
||||
|
||||
for plugin in plugins:
|
||||
plugin_name = plugin.__name__
|
||||
|
||||
plugin_settings = None
|
||||
# Look for plugin settings in host specific settings
|
||||
if plugin_name in plugin_type_settings:
|
||||
plugin_settings = plugin_type_settings[plugin_name]
|
||||
|
||||
# Look for plugin settings in global settings
|
||||
elif plugin_name in global_type_settings:
|
||||
plugin_settings = global_type_settings[plugin_name]
|
||||
|
||||
if not plugin_settings:
|
||||
continue
|
||||
|
||||
print(">>> We have preset for {}".format(plugin_name))
|
||||
for option, value in plugin_settings.items():
|
||||
if option == "enabled" and value is False:
|
||||
setattr(plugin, "active", False)
|
||||
print(" - is disabled by preset")
|
||||
else:
|
||||
setattr(plugin, option, value)
|
||||
print(" - setting `{}`: `{}`".format(option, value))
|
||||
|
||||
|
||||
def source_hash(filepath, *args):
|
||||
"""Generate simple identifier for a source file.
|
||||
This is used to identify whether a source file has previously been
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import json
|
|||
import copy
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline.publish import get_publish_repre_path
|
||||
from openpype.lib.openpype_version import get_openpype_version
|
||||
from openpype.lib.transcoding import (
|
||||
get_ffprobe_streams,
|
||||
|
|
@ -153,7 +154,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
|
||||
if not review_representations or has_movie_review:
|
||||
for repre in thumbnail_representations:
|
||||
repre_path = self._get_repre_path(instance, repre, False)
|
||||
repre_path = get_publish_repre_path(instance, repre, False)
|
||||
if not repre_path:
|
||||
self.log.warning(
|
||||
"Published path is not set and source was removed."
|
||||
|
|
@ -210,7 +211,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
"from {}".format(repre))
|
||||
continue
|
||||
|
||||
repre_path = self._get_repre_path(instance, repre, False)
|
||||
repre_path = get_publish_repre_path(instance, repre, False)
|
||||
if not repre_path:
|
||||
self.log.warning(
|
||||
"Published path is not set and source was removed."
|
||||
|
|
@ -324,7 +325,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
|
||||
# Add others representations as component
|
||||
for repre in other_representations:
|
||||
published_path = self._get_repre_path(instance, repre, True)
|
||||
published_path = get_publish_repre_path(instance, repre, True)
|
||||
if not published_path:
|
||||
continue
|
||||
# Create copy of base comp item and append it
|
||||
|
|
@ -364,51 +365,6 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
def _collect_additional_metadata(self, streams):
|
||||
pass
|
||||
|
||||
def _get_repre_path(self, instance, repre, only_published):
|
||||
"""Get representation path that can be used for integration.
|
||||
|
||||
When 'only_published' is set to true the validation of path is not
|
||||
relevant. In that case we just need what is set in 'published_path'
|
||||
as "reference". The reference is not used to get or upload the file but
|
||||
for reference where the file was published.
|
||||
|
||||
Args:
|
||||
instance (pyblish.Instance): Processed instance object. Used
|
||||
for source of staging dir if representation does not have
|
||||
filled it.
|
||||
repre (dict): Representation on instance which could be and
|
||||
could not be integrated with main integrator.
|
||||
only_published (bool): Care only about published paths and
|
||||
ignore if filepath is not existing anymore.
|
||||
|
||||
Returns:
|
||||
str: Path to representation file.
|
||||
None: Path is not filled or does not exists.
|
||||
"""
|
||||
|
||||
published_path = repre.get("published_path")
|
||||
if published_path:
|
||||
published_path = os.path.normpath(published_path)
|
||||
if os.path.exists(published_path):
|
||||
return published_path
|
||||
|
||||
if only_published:
|
||||
return published_path
|
||||
|
||||
comp_files = repre["files"]
|
||||
if isinstance(comp_files, (tuple, list, set)):
|
||||
filename = comp_files[0]
|
||||
else:
|
||||
filename = comp_files
|
||||
|
||||
staging_dir = repre.get("stagingDir")
|
||||
if not staging_dir:
|
||||
staging_dir = instance.data["stagingDir"]
|
||||
src_path = os.path.normpath(os.path.join(staging_dir, filename))
|
||||
if os.path.exists(src_path):
|
||||
return src_path
|
||||
return None
|
||||
|
||||
def _get_asset_version_status_name(self, instance):
|
||||
if not self.asset_versions_status_profiles:
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline.publish import get_publish_repre_path
|
||||
|
||||
|
||||
class IntegrateShotgridPublish(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
|
|
@ -22,7 +24,9 @@ class IntegrateShotgridPublish(pyblish.api.InstancePlugin):
|
|||
|
||||
for representation in instance.data.get("representations", []):
|
||||
|
||||
local_path = representation.get("published_path")
|
||||
local_path = get_publish_repre_path(
|
||||
instance, representation, False
|
||||
)
|
||||
code = os.path.basename(local_path)
|
||||
|
||||
if representation.get("tags", []):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline.publish import get_publish_repre_path
|
||||
|
||||
|
||||
class IntegrateShotgridVersion(pyblish.api.InstancePlugin):
|
||||
"""Integrate Shotgrid Version"""
|
||||
|
|
@ -41,8 +42,9 @@ class IntegrateShotgridVersion(pyblish.api.InstancePlugin):
|
|||
data_to_update["sg_status_list"] = status
|
||||
|
||||
for representation in instance.data.get("representations", []):
|
||||
local_path = representation.get("published_path")
|
||||
code = os.path.basename(local_path)
|
||||
local_path = get_publish_repre_path(
|
||||
instance, representation, False
|
||||
)
|
||||
|
||||
if "shotgridreview" in representation.get("tags", []):
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ from abc import ABCMeta, abstractmethod
|
|||
import time
|
||||
|
||||
from openpype.client import OpenPypeMongoConnection
|
||||
from openpype.pipeline.publish import get_publish_repre_path
|
||||
from openpype.lib.plugin_tools import prepare_template_data
|
||||
|
||||
|
||||
|
|
@ -167,9 +168,8 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
thumbnail_path = None
|
||||
for repre in instance.data.get("representations", []):
|
||||
if repre.get('thumbnail') or "thumbnail" in repre.get('tags', []):
|
||||
repre_thumbnail_path = (
|
||||
repre.get("published_path") or
|
||||
os.path.join(repre["stagingDir"], repre["files"])
|
||||
repre_thumbnail_path = get_publish_repre_path(
|
||||
instance, repre, False
|
||||
)
|
||||
if os.path.exists(repre_thumbnail_path):
|
||||
thumbnail_path = repre_thumbnail_path
|
||||
|
|
@ -184,9 +184,8 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
|
|||
if (repre.get("review")
|
||||
or "review" in tags
|
||||
or "burnin" in tags):
|
||||
repre_review_path = (
|
||||
repre.get("published_path") or
|
||||
os.path.join(repre["stagingDir"], repre["files"])
|
||||
repre_review_path = get_publish_repre_path(
|
||||
instance, repre, False
|
||||
)
|
||||
if os.path.exists(repre_review_path):
|
||||
review_path = repre_review_path
|
||||
|
|
|
|||
|
|
@ -8,7 +8,10 @@ import inspect
|
|||
from uuid import uuid4
|
||||
from contextlib import contextmanager
|
||||
|
||||
from openpype.client import get_assets
|
||||
import pyblish.logic
|
||||
import pyblish.api
|
||||
|
||||
from openpype.client import get_assets, get_asset_by_name
|
||||
from openpype.settings import (
|
||||
get_system_settings,
|
||||
get_project_settings
|
||||
|
|
@ -17,13 +20,11 @@ from openpype.lib.attribute_definitions import (
|
|||
UnknownDef,
|
||||
serialize_attr_defs,
|
||||
deserialize_attr_defs,
|
||||
get_default_values,
|
||||
)
|
||||
from openpype.host import IPublishHost
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.mongodb import (
|
||||
AvalonMongoDB,
|
||||
session_data_from_environment,
|
||||
)
|
||||
from openpype.pipeline.plugin_discover import DiscoverResult
|
||||
|
||||
from .creator_plugins import (
|
||||
Creator,
|
||||
|
|
@ -1338,8 +1339,6 @@ class CreateContext:
|
|||
Args:
|
||||
host(ModuleType): Host implementation which handles implementation and
|
||||
global metadata.
|
||||
dbcon(AvalonMongoDB): Connection to mongo with context (at least
|
||||
project).
|
||||
headless(bool): Context is created out of UI (Current not used).
|
||||
reset(bool): Reset context on initialization.
|
||||
discover_publish_plugins(bool): Discover publish plugins during reset
|
||||
|
|
@ -1347,16 +1346,8 @@ class CreateContext:
|
|||
"""
|
||||
|
||||
def __init__(
|
||||
self, host, dbcon=None, headless=False, reset=True,
|
||||
discover_publish_plugins=True
|
||||
self, host, headless=False, reset=True, discover_publish_plugins=True
|
||||
):
|
||||
# Create conncetion if is not passed
|
||||
if dbcon is None:
|
||||
session = session_data_from_environment(True)
|
||||
dbcon = AvalonMongoDB(session)
|
||||
dbcon.install()
|
||||
|
||||
self.dbcon = dbcon
|
||||
self.host = host
|
||||
|
||||
# Prepare attribute for logger (Created on demand in `log` property)
|
||||
|
|
@ -1380,6 +1371,10 @@ class CreateContext:
|
|||
" Missing methods: {}"
|
||||
).format(joined_methods))
|
||||
|
||||
self._current_project_name = None
|
||||
self._current_asset_name = None
|
||||
self._current_task_name = None
|
||||
|
||||
self._host_is_valid = host_is_valid
|
||||
# Currently unused variable
|
||||
self.headless = headless
|
||||
|
|
@ -1387,6 +1382,8 @@ class CreateContext:
|
|||
# Instances by their ID
|
||||
self._instances_by_id = {}
|
||||
|
||||
self.creator_discover_result = None
|
||||
self.convertor_discover_result = None
|
||||
# Discovered creators
|
||||
self.creators = {}
|
||||
# Prepare categories of creators
|
||||
|
|
@ -1499,11 +1496,20 @@ class CreateContext:
|
|||
|
||||
@property
|
||||
def host_name(self):
|
||||
if hasattr(self.host, "name"):
|
||||
return self.host.name
|
||||
return os.environ["AVALON_APP"]
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
return self.dbcon.active_project()
|
||||
def get_current_project_name(self):
|
||||
return self._current_project_name
|
||||
|
||||
def get_current_asset_name(self):
|
||||
return self._current_asset_name
|
||||
|
||||
def get_current_task_name(self):
|
||||
return self._current_task_name
|
||||
|
||||
project_name = property(get_current_project_name)
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
|
|
@ -1520,7 +1526,7 @@ class CreateContext:
|
|||
|
||||
self.reset_preparation()
|
||||
|
||||
self.reset_avalon_context()
|
||||
self.reset_current_context()
|
||||
self.reset_plugins(discover_publish_plugins)
|
||||
self.reset_context_data()
|
||||
|
||||
|
|
@ -1567,14 +1573,22 @@ class CreateContext:
|
|||
self._collection_shared_data = None
|
||||
self.refresh_thumbnails()
|
||||
|
||||
def reset_avalon_context(self):
|
||||
"""Give ability to reset avalon context.
|
||||
def reset_current_context(self):
|
||||
"""Refresh current context.
|
||||
|
||||
Reset is based on optional host implementation of `get_current_context`
|
||||
function or using `legacy_io.Session`.
|
||||
|
||||
Some hosts have ability to change context file without using workfiles
|
||||
tool but that change is not propagated to
|
||||
tool but that change is not propagated to 'legacy_io.Session'
|
||||
nor 'os.environ'.
|
||||
|
||||
Todos:
|
||||
UI: Current context should be also checked on save - compare
|
||||
initial values vs. current values.
|
||||
Related to UI checks: Current workfile can be also considered
|
||||
as current context information as that's where the metadata
|
||||
are stored. We should store the workfile (if is available) too.
|
||||
"""
|
||||
|
||||
project_name = asset_name = task_name = None
|
||||
|
|
@ -1592,12 +1606,9 @@ class CreateContext:
|
|||
if not task_name:
|
||||
task_name = legacy_io.Session.get("AVALON_TASK")
|
||||
|
||||
if project_name:
|
||||
self.dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
if asset_name:
|
||||
self.dbcon.Session["AVALON_ASSET"] = asset_name
|
||||
if task_name:
|
||||
self.dbcon.Session["AVALON_TASK"] = task_name
|
||||
self._current_project_name = project_name
|
||||
self._current_asset_name = asset_name
|
||||
self._current_task_name = task_name
|
||||
|
||||
def reset_plugins(self, discover_publish_plugins=True):
|
||||
"""Reload plugins.
|
||||
|
|
@ -1611,18 +1622,15 @@ class CreateContext:
|
|||
self._reset_convertor_plugins()
|
||||
|
||||
def _reset_publish_plugins(self, discover_publish_plugins):
|
||||
import pyblish.logic
|
||||
|
||||
from openpype.pipeline import OpenPypePyblishPluginMixin
|
||||
from openpype.pipeline.publish import (
|
||||
publish_plugins_discover,
|
||||
DiscoverResult
|
||||
publish_plugins_discover
|
||||
)
|
||||
|
||||
# Reset publish plugins
|
||||
self._attr_plugins_by_family = {}
|
||||
|
||||
discover_result = DiscoverResult()
|
||||
discover_result = DiscoverResult(pyblish.api.Plugin)
|
||||
plugins_with_defs = []
|
||||
plugins_by_targets = []
|
||||
plugins_mismatch_targets = []
|
||||
|
|
@ -1661,7 +1669,9 @@ class CreateContext:
|
|||
creators = {}
|
||||
autocreators = {}
|
||||
manual_creators = {}
|
||||
for creator_class in discover_creator_plugins():
|
||||
report = discover_creator_plugins(return_report=True)
|
||||
self.creator_discover_result = report
|
||||
for creator_class in report.plugins:
|
||||
if inspect.isabstract(creator_class):
|
||||
self.log.info(
|
||||
"Skipping abstract Creator {}".format(str(creator_class))
|
||||
|
|
@ -1706,7 +1716,9 @@ class CreateContext:
|
|||
|
||||
def _reset_convertor_plugins(self):
|
||||
convertors_plugins = {}
|
||||
for convertor_class in discover_convertor_plugins():
|
||||
report = discover_convertor_plugins(return_report=True)
|
||||
self.convertor_discover_result = report
|
||||
for convertor_class in report.plugins:
|
||||
if inspect.isabstract(convertor_class):
|
||||
self.log.info(
|
||||
"Skipping abstract Creator {}".format(str(convertor_class))
|
||||
|
|
@ -1792,40 +1804,128 @@ class CreateContext:
|
|||
with self.bulk_instances_collection():
|
||||
self._bulk_instances_to_process.append(instance)
|
||||
|
||||
def create(self, identifier, *args, **kwargs):
|
||||
"""Wrapper for creators to trigger created.
|
||||
def _get_creator_in_create(self, identifier):
|
||||
"""Creator by identifier with unified error.
|
||||
|
||||
Different types of creators may expect different arguments thus the
|
||||
hints for args are blind.
|
||||
Helper method to get creator by identifier with same error when creator
|
||||
is not available.
|
||||
|
||||
Args:
|
||||
identifier (str): Creator's identifier.
|
||||
*args (Tuple[Any]): Arguments for create method.
|
||||
**kwargs (Dict[Any, Any]): Keyword argument for create method.
|
||||
identifier (str): Identifier of creator plugin.
|
||||
|
||||
Returns:
|
||||
BaseCreator: Creator found by identifier.
|
||||
|
||||
Raises:
|
||||
CreatorError: When identifier is not known.
|
||||
"""
|
||||
|
||||
error_message = "Failed to run Creator with identifier \"{}\". {}"
|
||||
creator = self.creators.get(identifier)
|
||||
label = getattr(creator, "label", None)
|
||||
failed = False
|
||||
add_traceback = False
|
||||
exc_info = None
|
||||
try:
|
||||
# Fake CreatorError (Could be maybe specific exception?)
|
||||
if creator is None:
|
||||
# Fake CreatorError (Could be maybe specific exception?)
|
||||
if creator is None:
|
||||
raise CreatorError(
|
||||
"Creator {} was not found".format(identifier)
|
||||
)
|
||||
return creator
|
||||
|
||||
def create(
|
||||
self,
|
||||
creator_identifier,
|
||||
variant,
|
||||
asset_doc=None,
|
||||
task_name=None,
|
||||
pre_create_data=None
|
||||
):
|
||||
"""Trigger create of plugins with standartized arguments.
|
||||
|
||||
Arguments 'asset_doc' and 'task_name' use current context as default
|
||||
values. If only 'task_name' is provided it will be overriden by
|
||||
task name from current context. If 'task_name' is not provided
|
||||
when 'asset_doc' is, it is considered that task name is not specified,
|
||||
which can lead to error if subset name template requires task name.
|
||||
|
||||
Args:
|
||||
creator_identifier (str): Identifier of creator plugin.
|
||||
variant (str): Variant used for subset name.
|
||||
asset_doc (Dict[str, Any]): Asset document which define context of
|
||||
creation (possible context of created instance/s).
|
||||
task_name (str): Name of task to which is context related.
|
||||
pre_create_data (Dict[str, Any]): Pre-create attribute values.
|
||||
|
||||
Returns:
|
||||
Any: Output of triggered creator's 'create' method.
|
||||
|
||||
Raises:
|
||||
CreatorError: If creator was not found or asset is empty.
|
||||
"""
|
||||
|
||||
creator = self._get_creator_in_create(creator_identifier)
|
||||
|
||||
project_name = self.project_name
|
||||
if asset_doc is None:
|
||||
asset_name = self.get_current_asset_name()
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
task_name = self.get_current_task_name()
|
||||
if asset_doc is None:
|
||||
raise CreatorError(
|
||||
"Creator {} was not found".format(identifier)
|
||||
"Asset with name {} was not found".format(asset_name)
|
||||
)
|
||||
|
||||
creator.create(*args, **kwargs)
|
||||
if pre_create_data is None:
|
||||
pre_create_data = {}
|
||||
|
||||
precreate_attr_defs = creator.get_pre_create_attr_defs() or []
|
||||
# Create default values of precreate data
|
||||
_pre_create_data = get_default_values(precreate_attr_defs)
|
||||
# Update passed precreate data to default values
|
||||
# TODO validate types
|
||||
_pre_create_data.update(pre_create_data)
|
||||
|
||||
subset_name = creator.get_subset_name(
|
||||
variant,
|
||||
task_name,
|
||||
asset_doc,
|
||||
project_name,
|
||||
self.host_name
|
||||
)
|
||||
instance_data = {
|
||||
"asset": asset_doc["name"],
|
||||
"task": task_name,
|
||||
"family": creator.family,
|
||||
"variant": variant
|
||||
}
|
||||
return creator.create(
|
||||
subset_name,
|
||||
instance_data,
|
||||
_pre_create_data
|
||||
)
|
||||
|
||||
def _create_with_unified_error(
|
||||
self, identifier, creator, *args, **kwargs
|
||||
):
|
||||
error_message = "Failed to run Creator with identifier \"{}\". {}"
|
||||
|
||||
label = None
|
||||
add_traceback = False
|
||||
result = None
|
||||
fail_info = None
|
||||
success = False
|
||||
|
||||
try:
|
||||
# Try to get creator and his label
|
||||
if creator is None:
|
||||
creator = self._get_creator_in_create(identifier)
|
||||
label = getattr(creator, "label", label)
|
||||
|
||||
# Run create
|
||||
result = creator.create(*args, **kwargs)
|
||||
success = True
|
||||
|
||||
except CreatorError:
|
||||
failed = True
|
||||
exc_info = sys.exc_info()
|
||||
self.log.warning(error_message.format(identifier, exc_info[1]))
|
||||
|
||||
except:
|
||||
failed = True
|
||||
add_traceback = True
|
||||
exc_info = sys.exc_info()
|
||||
self.log.warning(
|
||||
|
|
@ -1833,12 +1933,35 @@ class CreateContext:
|
|||
exc_info=True
|
||||
)
|
||||
|
||||
if failed:
|
||||
raise CreatorsCreateFailed([
|
||||
prepare_failed_creator_operation_info(
|
||||
identifier, label, exc_info, add_traceback
|
||||
)
|
||||
])
|
||||
if not success:
|
||||
fail_info = prepare_failed_creator_operation_info(
|
||||
identifier, label, exc_info, add_traceback
|
||||
)
|
||||
return result, fail_info
|
||||
|
||||
def create_with_unified_error(self, identifier, *args, **kwargs):
|
||||
"""Trigger create but raise only one error if anything fails.
|
||||
|
||||
Added to raise unified exception. Capture any possible issues and
|
||||
reraise it with unified information.
|
||||
|
||||
Args:
|
||||
identifier (str): Identifier of creator.
|
||||
*args (Tuple[Any]): Arguments for create method.
|
||||
**kwargs (Dict[Any, Any]): Keyword argument for create method.
|
||||
|
||||
Raises:
|
||||
CreatorsCreateFailed: When creation fails due to any possible
|
||||
reason. If anything goes wrong this is only possible exception
|
||||
the method should raise.
|
||||
"""
|
||||
|
||||
result, fail_info = self._create_with_unified_error(
|
||||
identifier, None, *args, **kwargs
|
||||
)
|
||||
if fail_info is not None:
|
||||
raise CreatorsCreateFailed([fail_info])
|
||||
return result
|
||||
|
||||
def _remove_instance(self, instance):
|
||||
self._instances_by_id.pop(instance.id, None)
|
||||
|
|
@ -1968,38 +2091,12 @@ class CreateContext:
|
|||
Reset instances if any autocreator executed properly.
|
||||
"""
|
||||
|
||||
error_message = "Failed to run AutoCreator with identifier \"{}\". {}"
|
||||
failed_info = []
|
||||
for creator in self.sorted_autocreators:
|
||||
identifier = creator.identifier
|
||||
label = creator.label
|
||||
failed = False
|
||||
add_traceback = False
|
||||
try:
|
||||
creator.create()
|
||||
|
||||
except CreatorError:
|
||||
failed = True
|
||||
exc_info = sys.exc_info()
|
||||
self.log.warning(error_message.format(identifier, exc_info[1]))
|
||||
|
||||
# Use bare except because some hosts raise their exceptions that
|
||||
# do not inherit from python's `BaseException`
|
||||
except:
|
||||
failed = True
|
||||
add_traceback = True
|
||||
exc_info = sys.exc_info()
|
||||
self.log.warning(
|
||||
error_message.format(identifier, ""),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
if failed:
|
||||
failed_info.append(
|
||||
prepare_failed_creator_operation_info(
|
||||
identifier, label, exc_info, add_traceback
|
||||
)
|
||||
)
|
||||
_, fail_info = self._create_with_unified_error(identifier, creator)
|
||||
if fail_info is not None:
|
||||
failed_info.append(fail_info)
|
||||
|
||||
if failed_info:
|
||||
raise CreatorsCreateFailed(failed_info)
|
||||
|
|
|
|||
|
|
@ -153,6 +153,12 @@ class BaseCreator:
|
|||
Single object should be used for multiple instances instead of single
|
||||
instance per one creator object. Do not store temp data or mid-process data
|
||||
to `self` if it's not Plugin specific.
|
||||
|
||||
Args:
|
||||
project_settings (Dict[str, Any]): Project settings.
|
||||
system_settings (Dict[str, Any]): System settings.
|
||||
create_context (CreateContext): Context which initialized creator.
|
||||
headless (bool): Running in headless mode.
|
||||
"""
|
||||
|
||||
# Label shown in UI
|
||||
|
|
@ -605,12 +611,12 @@ class AutoCreator(BaseCreator):
|
|||
pass
|
||||
|
||||
|
||||
def discover_creator_plugins():
|
||||
return discover(BaseCreator)
|
||||
def discover_creator_plugins(*args, **kwargs):
|
||||
return discover(BaseCreator, *args, **kwargs)
|
||||
|
||||
|
||||
def discover_convertor_plugins():
|
||||
return discover(SubsetConvertorPlugin)
|
||||
def discover_convertor_plugins(*args, **kwargs):
|
||||
return discover(SubsetConvertorPlugin, *args, **kwargs)
|
||||
|
||||
|
||||
def discover_legacy_creator_plugins():
|
||||
|
|
|
|||
|
|
@ -135,11 +135,12 @@ class PluginDiscoverContext(object):
|
|||
allow_duplicates (bool): Validate class name duplications.
|
||||
ignore_classes (list): List of classes that will be ignored
|
||||
and not added to result.
|
||||
return_report (bool): Output will be full report if set to 'True'.
|
||||
|
||||
Returns:
|
||||
DiscoverResult: Object holding succesfully discovered plugins,
|
||||
ignored plugins, plugins with missing abstract implementation
|
||||
and duplicated plugin.
|
||||
Union[DiscoverResult, list[Any]]: Object holding successfully
|
||||
discovered plugins, ignored plugins, plugins with missing
|
||||
abstract implementation and duplicated plugin.
|
||||
"""
|
||||
|
||||
if not ignore_classes:
|
||||
|
|
@ -268,9 +269,34 @@ class _GlobalDiscover:
|
|||
return cls._context
|
||||
|
||||
|
||||
def discover(superclass, allow_duplicates=True):
|
||||
def discover(
|
||||
superclass,
|
||||
allow_duplicates=True,
|
||||
ignore_classes=None,
|
||||
return_report=False
|
||||
):
|
||||
"""Find and return subclasses of `superclass`
|
||||
|
||||
Args:
|
||||
superclass (type): Class which determines discovered subclasses.
|
||||
allow_duplicates (bool): Validate class name duplications.
|
||||
ignore_classes (list): List of classes that will be ignored
|
||||
and not added to result.
|
||||
return_report (bool): Output will be full report if set to 'True'.
|
||||
|
||||
Returns:
|
||||
Union[DiscoverResult, list[Any]]: Object holding successfully
|
||||
discovered plugins, ignored plugins, plugins with missing
|
||||
abstract implementation and duplicated plugin.
|
||||
"""
|
||||
|
||||
context = _GlobalDiscover.get_context()
|
||||
return context.discover(superclass, allow_duplicates)
|
||||
return context.discover(
|
||||
superclass,
|
||||
allow_duplicates,
|
||||
ignore_classes,
|
||||
return_report
|
||||
)
|
||||
|
||||
|
||||
def get_last_discovered_plugins(superclass):
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ from .publish_plugins import (
|
|||
from .lib import (
|
||||
get_publish_template_name,
|
||||
|
||||
DiscoverResult,
|
||||
publish_plugins_discover,
|
||||
load_help_content_from_plugin,
|
||||
load_help_content_from_filepath,
|
||||
|
|
@ -36,6 +35,7 @@ from .lib import (
|
|||
filter_instances_for_context_plugin,
|
||||
context_plugin_should_run,
|
||||
get_instance_staging_dir,
|
||||
get_publish_repre_path,
|
||||
)
|
||||
|
||||
from .abstract_expected_files import ExpectedFiles
|
||||
|
|
@ -68,7 +68,6 @@ __all__ = (
|
|||
|
||||
"get_publish_template_name",
|
||||
|
||||
"DiscoverResult",
|
||||
"publish_plugins_discover",
|
||||
"load_help_content_from_plugin",
|
||||
"load_help_content_from_filepath",
|
||||
|
|
@ -79,6 +78,7 @@ __all__ = (
|
|||
"filter_instances_for_context_plugin",
|
||||
"context_plugin_should_run",
|
||||
"get_instance_staging_dir",
|
||||
"get_publish_repre_path",
|
||||
|
||||
"ExpectedFiles",
|
||||
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ from openpype.settings import (
|
|||
from openpype.pipeline import (
|
||||
tempdir
|
||||
)
|
||||
from openpype.pipeline.plugin_discover import DiscoverResult
|
||||
|
||||
from .contants import (
|
||||
DEFAULT_PUBLISH_TEMPLATE,
|
||||
|
|
@ -202,28 +203,6 @@ def get_publish_template_name(
|
|||
return template or default_template
|
||||
|
||||
|
||||
class DiscoverResult:
|
||||
"""Hold result of publish plugins discovery.
|
||||
|
||||
Stores discovered plugins duplicated plugins and file paths which
|
||||
crashed on execution of file.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.plugins = []
|
||||
self.crashed_file_paths = {}
|
||||
self.duplicated_plugins = []
|
||||
|
||||
def __iter__(self):
|
||||
for plugin in self.plugins:
|
||||
yield plugin
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.plugins[item]
|
||||
|
||||
def __setitem__(self, item, value):
|
||||
self.plugins[item] = value
|
||||
|
||||
|
||||
class HelpContent:
|
||||
def __init__(self, title, description, detail=None):
|
||||
self.title = title
|
||||
|
|
@ -291,7 +270,7 @@ def publish_plugins_discover(paths=None):
|
|||
"""
|
||||
|
||||
# The only difference with `pyblish.api.discover`
|
||||
result = DiscoverResult()
|
||||
result = DiscoverResult(pyblish.api.Plugin)
|
||||
|
||||
plugins = dict()
|
||||
plugin_names = []
|
||||
|
|
@ -662,3 +641,49 @@ def get_instance_staging_dir(instance):
|
|||
instance.data['stagingDir'] = staging_dir
|
||||
|
||||
return staging_dir
|
||||
|
||||
|
||||
def get_publish_repre_path(instance, repre, only_published=False):
|
||||
"""Get representation path that can be used for integration.
|
||||
|
||||
When 'only_published' is set to true the validation of path is not
|
||||
relevant. In that case we just need what is set in 'published_path'
|
||||
as "reference". The reference is not used to get or upload the file but
|
||||
for reference where the file was published.
|
||||
|
||||
Args:
|
||||
instance (pyblish.Instance): Processed instance object. Used
|
||||
for source of staging dir if representation does not have
|
||||
filled it.
|
||||
repre (dict): Representation on instance which could be and
|
||||
could not be integrated with main integrator.
|
||||
only_published (bool): Care only about published paths and
|
||||
ignore if filepath is not existing anymore.
|
||||
|
||||
Returns:
|
||||
str: Path to representation file.
|
||||
None: Path is not filled or does not exists.
|
||||
"""
|
||||
|
||||
published_path = repre.get("published_path")
|
||||
if published_path:
|
||||
published_path = os.path.normpath(published_path)
|
||||
if os.path.exists(published_path):
|
||||
return published_path
|
||||
|
||||
if only_published:
|
||||
return published_path
|
||||
|
||||
comp_files = repre["files"]
|
||||
if isinstance(comp_files, (tuple, list, set)):
|
||||
filename = comp_files[0]
|
||||
else:
|
||||
filename = comp_files
|
||||
|
||||
staging_dir = repre.get("stagingDir")
|
||||
if not staging_dir:
|
||||
staging_dir = get_instance_staging_dir(instance)
|
||||
src_path = os.path.normpath(os.path.join(staging_dir, filename))
|
||||
if os.path.exists(src_path):
|
||||
return src_path
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -54,6 +54,6 @@ def create_custom_tempdir(project_name, anatomy=None):
|
|||
os.makedirs(custom_tempdir)
|
||||
except IOError as error:
|
||||
raise IOError(
|
||||
"Path couldn't be created: {}".format(error)) from error
|
||||
"Path couldn't be created: {}".format(error))
|
||||
|
||||
return custom_tempdir
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
|||
thumbnail_paths_by_instance_id.get(None)
|
||||
)
|
||||
|
||||
project_name = create_context.project_name
|
||||
project_name = create_context.get_current_project_name()
|
||||
if project_name:
|
||||
context.data["projectName"] = project_name
|
||||
|
||||
|
|
@ -53,11 +53,15 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin):
|
|||
context.data.update(create_context.context_data_to_store())
|
||||
context.data["newPublishing"] = True
|
||||
# Update context data
|
||||
for key in ("AVALON_PROJECT", "AVALON_ASSET", "AVALON_TASK"):
|
||||
value = create_context.dbcon.Session.get(key)
|
||||
if value is not None:
|
||||
legacy_io.Session[key] = value
|
||||
os.environ[key] = value
|
||||
asset_name = create_context.get_current_asset_name()
|
||||
task_name = create_context.get_current_task_name()
|
||||
for key, value in (
|
||||
("AVALON_PROJECT", project_name),
|
||||
("AVALON_ASSET", asset_name),
|
||||
("AVALON_TASK", task_name)
|
||||
):
|
||||
legacy_io.Session[key] = value
|
||||
os.environ[key] = value
|
||||
|
||||
def create_instance(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -246,6 +246,7 @@
|
|||
"sourcetype": "python",
|
||||
"title": "Gizmo Note",
|
||||
"command": "nuke.nodes.StickyNote(label='You can create your own toolbar menu in the Nuke GizmoMenu of OpenPype')",
|
||||
"icon": "",
|
||||
"shortcut": ""
|
||||
}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -17,6 +17,11 @@
|
|||
"key": "menu",
|
||||
"label": "OpenPype Menu shortcuts",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "create",
|
||||
"label": "Create..."
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "publish",
|
||||
|
|
@ -288,4 +293,4 @@
|
|||
"name": "schema_publish_gui_filter"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,11 +5,9 @@
|
|||
def test_backward_compatibility(printer):
|
||||
printer("Test if imports still work")
|
||||
try:
|
||||
from openpype.lib import filter_pyblish_plugins
|
||||
from openpype.lib import execute_hook
|
||||
from openpype.lib import PypeHook
|
||||
|
||||
from openpype.lib import get_latest_version
|
||||
from openpype.lib import ApplicationLaunchFailed
|
||||
|
||||
from openpype.lib import get_ffmpeg_tool_path
|
||||
|
|
@ -18,10 +16,6 @@ def test_backward_compatibility(printer):
|
|||
from openpype.lib import get_version_from_path
|
||||
from openpype.lib import version_up
|
||||
|
||||
from openpype.lib import is_latest
|
||||
from openpype.lib import any_outdated
|
||||
from openpype.lib import get_asset
|
||||
from openpype.lib import get_linked_assets
|
||||
from openpype.lib import get_ffprobe_streams
|
||||
|
||||
from openpype.hosts.fusion.lib import switch_item
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
from . import lib
|
||||
import os
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
import pyblish.plugin
|
||||
from openpype.lib import filter_pyblish_plugins
|
||||
import os
|
||||
from openpype.pipeline.publish.lib import filter_pyblish_plugins
|
||||
from . import lib
|
||||
|
||||
|
||||
def test_pyblish_plugin_filter_modifier(printer, monkeypatch):
|
||||
|
|
|
|||
|
|
@ -169,6 +169,8 @@ class PublishReport:
|
|||
|
||||
def __init__(self, controller):
|
||||
self.controller = controller
|
||||
self._create_discover_result = None
|
||||
self._convert_discover_result = None
|
||||
self._publish_discover_result = None
|
||||
self._plugin_data = []
|
||||
self._plugin_data_with_plugin = []
|
||||
|
|
@ -181,6 +183,10 @@ class PublishReport:
|
|||
def reset(self, context, create_context):
|
||||
"""Reset report and clear all data."""
|
||||
|
||||
self._create_discover_result = create_context.creator_discover_result
|
||||
self._convert_discover_result = (
|
||||
create_context.convertor_discover_result
|
||||
)
|
||||
self._publish_discover_result = create_context.publish_discover_result
|
||||
self._plugin_data = []
|
||||
self._plugin_data_with_plugin = []
|
||||
|
|
@ -293,9 +299,19 @@ class PublishReport:
|
|||
if plugin not in self._stored_plugins:
|
||||
plugins_data.append(self._create_plugin_data_item(plugin))
|
||||
|
||||
crashed_file_paths = {}
|
||||
reports = []
|
||||
if self._create_discover_result is not None:
|
||||
reports.append(self._create_discover_result)
|
||||
|
||||
if self._convert_discover_result is not None:
|
||||
reports.append(self._convert_discover_result)
|
||||
|
||||
if self._publish_discover_result is not None:
|
||||
items = self._publish_discover_result.crashed_file_paths.items()
|
||||
reports.append(self._publish_discover_result)
|
||||
|
||||
crashed_file_paths = {}
|
||||
for report in reports:
|
||||
items = report.crashed_file_paths.items()
|
||||
for filepath, exc_info in items:
|
||||
crashed_file_paths[filepath] = "".join(
|
||||
traceback.format_exception(*exc_info)
|
||||
|
|
@ -1573,20 +1589,19 @@ class PublisherController(BasePublisherController):
|
|||
Handle both creation and publishing parts.
|
||||
|
||||
Args:
|
||||
dbcon (AvalonMongoDB): Connection to mongo with context.
|
||||
headless (bool): Headless publishing. ATM not implemented or used.
|
||||
"""
|
||||
|
||||
_log = None
|
||||
|
||||
def __init__(self, dbcon=None, headless=False):
|
||||
def __init__(self, headless=False):
|
||||
super(PublisherController, self).__init__()
|
||||
|
||||
self._host = registered_host()
|
||||
self._headless = headless
|
||||
|
||||
self._create_context = CreateContext(
|
||||
self._host, dbcon, headless=headless, reset=False
|
||||
self._host, headless=headless, reset=False
|
||||
)
|
||||
|
||||
self._publish_plugins_proxy = None
|
||||
|
|
@ -1740,7 +1755,7 @@ class PublisherController(BasePublisherController):
|
|||
self._create_context.reset_preparation()
|
||||
|
||||
# Reset avalon context
|
||||
self._create_context.reset_avalon_context()
|
||||
self._create_context.reset_current_context()
|
||||
|
||||
self._asset_docs_cache.reset()
|
||||
|
||||
|
|
@ -2004,9 +2019,10 @@ class PublisherController(BasePublisherController):
|
|||
|
||||
success = True
|
||||
try:
|
||||
self._create_context.create(
|
||||
self._create_context.create_with_unified_error(
|
||||
creator_identifier, subset_name, instance_data, options
|
||||
)
|
||||
|
||||
except CreatorsOperationFailed as exc:
|
||||
success = False
|
||||
self._emit_event(
|
||||
|
|
|
|||
|
|
@ -457,13 +457,14 @@ class CreateWidget(QtWidgets.QWidget):
|
|||
# TODO add details about creator
|
||||
new_creators.add(identifier)
|
||||
if identifier in existing_items:
|
||||
is_new = False
|
||||
item = existing_items[identifier]
|
||||
else:
|
||||
is_new = True
|
||||
item = QtGui.QStandardItem()
|
||||
item.setFlags(
|
||||
QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
|
||||
)
|
||||
self._creators_model.appendRow(item)
|
||||
|
||||
item.setData(creator_item.label, QtCore.Qt.DisplayRole)
|
||||
item.setData(creator_item.show_order, CREATOR_SORT_ROLE)
|
||||
|
|
@ -473,6 +474,8 @@ class CreateWidget(QtWidgets.QWidget):
|
|||
CREATOR_THUMBNAIL_ENABLED_ROLE
|
||||
)
|
||||
item.setData(creator_item.family, FAMILY_ROLE)
|
||||
if is_new:
|
||||
self._creators_model.appendRow(item)
|
||||
|
||||
# Remove families that are no more available
|
||||
for identifier in (old_creators - new_creators):
|
||||
|
|
|
|||
|
|
@ -250,21 +250,25 @@ class PublishReportBtn(PublishIconBtn):
|
|||
self._actions = []
|
||||
|
||||
def add_action(self, label, identifier):
|
||||
action = QtWidgets.QAction(label)
|
||||
action.setData(identifier)
|
||||
action.triggered.connect(
|
||||
functools.partial(self._on_action_trigger, action)
|
||||
self._actions.append(
|
||||
(label, identifier)
|
||||
)
|
||||
self._actions.append(action)
|
||||
|
||||
def _on_action_trigger(self, action):
|
||||
identifier = action.data()
|
||||
def _on_action_trigger(self, identifier):
|
||||
self.triggered.emit(identifier)
|
||||
|
||||
def mouseReleaseEvent(self, event):
|
||||
super(PublishReportBtn, self).mouseReleaseEvent(event)
|
||||
menu = QtWidgets.QMenu(self)
|
||||
menu.addActions(self._actions)
|
||||
actions = []
|
||||
for item in self._actions:
|
||||
label, identifier = item
|
||||
action = QtWidgets.QAction(label, menu)
|
||||
action.triggered.connect(
|
||||
functools.partial(self._on_action_trigger, identifier)
|
||||
)
|
||||
actions.append(action)
|
||||
menu.addActions(actions)
|
||||
menu.exec_(event.globalPos())
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -366,7 +366,7 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
|
||||
def make_sure_is_visible(self):
|
||||
if self._window_is_visible:
|
||||
self.setWindowState(QtCore.Qt.ActiveWindow)
|
||||
self.setWindowState(QtCore.Qt.WindowActive)
|
||||
|
||||
else:
|
||||
self.show()
|
||||
|
|
@ -566,24 +566,24 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
def _go_to_publish_tab(self):
|
||||
self._set_current_tab("publish")
|
||||
|
||||
def _go_to_details_tab(self):
|
||||
self._set_current_tab("details")
|
||||
|
||||
def _go_to_report_tab(self):
|
||||
self._set_current_tab("report")
|
||||
|
||||
def _go_to_details_tab(self):
|
||||
self._set_current_tab("details")
|
||||
|
||||
def _is_on_create_tab(self):
|
||||
return self._is_current_tab("create")
|
||||
|
||||
def _is_on_publish_tab(self):
|
||||
return self._is_current_tab("publish")
|
||||
|
||||
def _is_on_details_tab(self):
|
||||
return self._is_current_tab("details")
|
||||
|
||||
def _is_on_report_tab(self):
|
||||
return self._is_current_tab("report")
|
||||
|
||||
def _is_on_details_tab(self):
|
||||
return self._is_current_tab("details")
|
||||
|
||||
def _set_publish_overlay_visibility(self, visible):
|
||||
if visible:
|
||||
widget = self._publish_overlay
|
||||
|
|
@ -647,16 +647,10 @@ class PublisherWindow(QtWidgets.QDialog):
|
|||
# otherwise 'create' is used
|
||||
# - this happens only on first show
|
||||
if first_reset:
|
||||
if self._overview_widget.has_items():
|
||||
self._go_to_publish_tab()
|
||||
else:
|
||||
self._go_to_create_tab()
|
||||
self._go_to_create_tab()
|
||||
|
||||
elif (
|
||||
not self._is_on_create_tab()
|
||||
and not self._is_on_publish_tab()
|
||||
):
|
||||
# If current tab is not 'Create' or 'Publish' go to 'Publish'
|
||||
elif self._is_on_report_tab():
|
||||
# Go to 'Publish' tab if is on 'Details' tab
|
||||
# - this can happen when publishing started and was reset
|
||||
# at that moment it doesn't make sense to stay at publish
|
||||
# specific tabs.
|
||||
|
|
|
|||
|
|
@ -621,7 +621,7 @@ class FilesWidget(QtWidgets.QWidget):
|
|||
"caption": "Work Files",
|
||||
"filter": ext_filter
|
||||
}
|
||||
if qtpy.API in ("pyside", "pyside2"):
|
||||
if qtpy.API in ("pyside", "pyside2", "pyside6"):
|
||||
kwargs["dir"] = self._workfiles_root
|
||||
else:
|
||||
kwargs["directory"] = self._workfiles_root
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue