[Automated] Merged develop into main

This commit is contained in:
pypebot 2022-09-10 06:08:00 +02:00 committed by GitHub
commit bef1ef3dd3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
49 changed files with 749 additions and 474 deletions

3
.gitignore vendored
View file

@ -107,3 +107,6 @@ website/.docusaurus
mypy.ini
tools/run_eventserver.*
# Developer tools
tools/dev_*

View file

@ -41,7 +41,7 @@ It can be built and ran on all common platforms. We develop and test on the foll
- **Linux**
- **Ubuntu** 20.04 LTS
- **Centos** 7
- **Mac OSX**
- **Mac OSX**
- **10.15** Catalina
- **11.1** Big Sur (using Rosetta2)
@ -287,6 +287,14 @@ To run tests, execute `.\tools\run_tests(.ps1|.sh)`.
**Note that it needs existing virtual environment.**
Developer tools
-------------
In case you wish to add your own tools to `.\tools` folder without git tracking, it is possible by adding it with `dev_*` suffix (example: `dev_clear_pyc(.ps1|.sh)`).
## Contributors ✨
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):

View file

@ -388,8 +388,11 @@ class InstallDialog(QtWidgets.QDialog):
install_thread.start()
def _installation_finished(self):
# TODO we should find out why status can be set to 'None'?
# - 'InstallThread.run' should handle all cases so not sure where
# that come from
status = self._install_thread.result()
if status >= 0:
if status is not None and status >= 0:
self._update_progress(100)
QtWidgets.QApplication.processEvents()
self.done(3)

View file

@ -14,6 +14,8 @@ from bson.objectid import ObjectId
from .mongo import get_project_database, get_project_connection
PatternType = type(re.compile(""))
def _prepare_fields(fields, required_fields=None):
if not fields:
@ -1054,11 +1056,11 @@ def _regex_filters(filters):
for key, value in filters.items():
regexes = []
a_values = []
if isinstance(value, re.Pattern):
if isinstance(value, PatternType):
regexes.append(value)
elif isinstance(value, (list, tuple, set)):
for item in value:
if isinstance(item, re.Pattern):
if isinstance(item, PatternType):
regexes.append(item)
else:
a_values.append(item)
@ -1194,7 +1196,7 @@ def get_representations(
as filter. Filter ignored if 'None' is passed.
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
ignored if 'None' is passed.
context_filters (Dict[str, List[str, re.Pattern]]): Filter by
context_filters (Dict[str, List[str, PatternType]]): Filter by
representation context fields.
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
using version ids and list of names under the version.
@ -1240,7 +1242,7 @@ def get_archived_representations(
as filter. Filter ignored if 'None' is passed.
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
ignored if 'None' is passed.
context_filters (Dict[str, List[str, re.Pattern]]): Filter by
context_filters (Dict[str, List[str, PatternType]]): Filter by
representation context fields.
names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering
using version ids and list of names under the version.

View file

@ -1,8 +1,6 @@
import os
from openpype.lib import (
PreLaunchHook,
create_workdir_extra_folders
)
from openpype.lib import PreLaunchHook
from openpype.pipeline.workfile import create_workdir_extra_folders
class AddLastWorkfileToLaunchArgs(PreLaunchHook):

View file

@ -1,113 +0,0 @@
import os
import collections
from pprint import pformat
import pyblish.api
from openpype.client import (
get_subsets,
get_last_versions,
get_representations
)
from openpype.pipeline import legacy_io
class AppendCelactionAudio(pyblish.api.ContextPlugin):
label = "Colect Audio for publishing"
order = pyblish.api.CollectorOrder + 0.1
def process(self, context):
self.log.info('Collecting Audio Data')
asset_doc = context.data["assetEntity"]
# get all available representations
subsets = self.get_subsets(
asset_doc,
representations=["audio", "wav"]
)
self.log.info(f"subsets is: {pformat(subsets)}")
if not subsets.get("audioMain"):
raise AttributeError("`audioMain` subset does not exist")
reprs = subsets.get("audioMain", {}).get("representations", [])
self.log.info(f"reprs is: {pformat(reprs)}")
repr = next((r for r in reprs), None)
if not repr:
raise "Missing `audioMain` representation"
self.log.info(f"representation is: {repr}")
audio_file = repr.get('data', {}).get('path', "")
if os.path.exists(audio_file):
context.data["audioFile"] = audio_file
self.log.info(
'audio_file: {}, has been added to context'.format(audio_file))
else:
self.log.warning("Couldn't find any audio file on Ftrack.")
def get_subsets(self, asset_doc, representations):
"""
Query subsets with filter on name.
The method will return all found subsets and its defined version
and subsets. Version could be specified with number. Representation
can be filtered.
Arguments:
asset_doct (dict): Asset (shot) mongo document
representations (list): list for all representations
Returns:
dict: subsets with version and representations in keys
"""
# Query all subsets for asset
project_name = legacy_io.active_project()
subset_docs = get_subsets(
project_name, asset_ids=[asset_doc["_id"]], fields=["_id"]
)
# Collect all subset ids
subset_ids = [
subset_doc["_id"]
for subset_doc in subset_docs
]
# Check if we found anything
assert subset_ids, (
"No subsets found. Check correct filter. "
"Try this for start `r'.*'`: asset: `{}`"
).format(asset_doc["name"])
last_versions_by_subset_id = get_last_versions(
project_name, subset_ids, fields=["_id", "parent"]
)
version_docs_by_id = {}
for version_doc in last_versions_by_subset_id.values():
version_docs_by_id[version_doc["_id"]] = version_doc
repre_docs = get_representations(
project_name,
version_ids=version_docs_by_id.keys(),
representation_names=representations
)
repre_docs_by_version_id = collections.defaultdict(list)
for repre_doc in repre_docs:
version_id = repre_doc["parent"]
repre_docs_by_version_id[version_id].append(repre_doc)
output_dict = {}
for version_id, repre_docs in repre_docs_by_version_id.items():
version_doc = version_docs_by_id[version_id]
subset_id = version_doc["parent"]
subset_doc = last_versions_by_subset_id[subset_id]
# Store queried docs by subset name
output_dict[subset_doc["name"]] = {
"representations": repre_docs,
"version": version_doc
}
return output_dict

View file

@ -51,7 +51,8 @@ from .pipeline import (
)
from .menu import (
FlameMenuProjectConnect,
FlameMenuTimeline
FlameMenuTimeline,
FlameMenuUniversal
)
from .plugin import (
Creator,
@ -131,6 +132,7 @@ __all__ = [
# menu
"FlameMenuProjectConnect",
"FlameMenuTimeline",
"FlameMenuUniversal",
# plugin
"Creator",

View file

@ -201,3 +201,53 @@ class FlameMenuTimeline(_FlameMenuApp):
if self.flame:
self.flame.execute_shortcut('Rescan Python Hooks')
self.log.info('Rescan Python Hooks')
class FlameMenuUniversal(_FlameMenuApp):
# flameMenuProjectconnect app takes care of the preferences dialog as well
def __init__(self, framework):
_FlameMenuApp.__init__(self, framework)
def __getattr__(self, name):
def method(*args, **kwargs):
project = self.dynamic_menu_data.get(name)
if project:
self.link_project(project)
return method
def build_menu(self):
if not self.flame:
return []
menu = deepcopy(self.menu)
menu['actions'].append({
"name": "Load...",
"execute": lambda x: self.tools_helper.show_loader()
})
menu['actions'].append({
"name": "Manage...",
"execute": lambda x: self.tools_helper.show_scene_inventory()
})
menu['actions'].append({
"name": "Library...",
"execute": lambda x: self.tools_helper.show_library_loader()
})
return menu
def refresh(self, *args, **kwargs):
self.rescan()
def rescan(self, *args, **kwargs):
if not self.flame:
try:
import flame
self.flame = flame
except ImportError:
self.flame = None
if self.flame:
self.flame.execute_shortcut('Rescan Python Hooks')
self.log.info('Rescan Python Hooks')

View file

@ -361,6 +361,8 @@ class PublishableClip:
index_from_segment_default = False
use_shot_name_default = False
include_handles_default = False
retimed_handles_default = True
retimed_framerange_default = True
def __init__(self, segment, **kwargs):
self.rename_index = kwargs["rename_index"]
@ -496,6 +498,14 @@ class PublishableClip:
"audio", {}).get("value") or False
self.include_handles = self.ui_inputs.get(
"includeHandles", {}).get("value") or self.include_handles_default
self.retimed_handles = (
self.ui_inputs.get("retimedHandles", {}).get("value")
or self.retimed_handles_default
)
self.retimed_framerange = (
self.ui_inputs.get("retimedFramerange", {}).get("value")
or self.retimed_framerange_default
)
# build subset name from layer name
if self.subset_name == "[ track name ]":

View file

@ -276,6 +276,22 @@ class CreateShotClip(opfapi.Creator):
"target": "tag",
"toolTip": "By default handles are excluded", # noqa
"order": 3
},
"retimedHandles": {
"value": True,
"type": "QCheckBox",
"label": "Retimed handles",
"target": "tag",
"toolTip": "By default handles are retimed.", # noqa
"order": 4
},
"retimedFramerange": {
"value": True,
"type": "QCheckBox",
"label": "Retimed framerange",
"target": "tag",
"toolTip": "By default framerange is retimed.", # noqa
"order": 5
}
}
}

View file

@ -131,6 +131,10 @@ class CollectTimelineInstances(pyblish.api.ContextPlugin):
"fps": self.fps,
"workfileFrameStart": workfile_start,
"sourceFirstFrame": int(first_frame),
"notRetimedHandles": (
not marker_data.get("retimedHandles")),
"notRetimedFramerange": (
not marker_data.get("retimedFramerange")),
"path": file_path,
"flameAddTasks": self.add_tasks,
"tasks": {

View file

@ -90,26 +90,38 @@ class ExtractSubsetResources(openpype.api.Extractor):
handle_end = instance.data["handleEnd"]
handles = max(handle_start, handle_end)
include_handles = instance.data.get("includeHandles")
retimed_handles = instance.data.get("retimedHandles")
# get media source range with handles
source_start_handles = instance.data["sourceStartH"]
source_end_handles = instance.data["sourceEndH"]
# retime if needed
if r_speed != 1.0:
source_start_handles = (
instance.data["sourceStart"] - r_handle_start)
source_end_handles = (
source_start_handles
+ (r_source_dur - 1)
+ r_handle_start
+ r_handle_end
)
if retimed_handles:
# handles are retimed
source_start_handles = (
instance.data["sourceStart"] - r_handle_start)
source_end_handles = (
source_start_handles
+ (r_source_dur - 1)
+ r_handle_start
+ r_handle_end
)
else:
# handles are not retimed
source_end_handles = (
source_start_handles
+ (r_source_dur - 1)
+ handle_start
+ handle_end
)
# get frame range with handles for representation range
frame_start_handle = frame_start - handle_start
repre_frame_start = frame_start_handle
if include_handles:
if r_speed == 1.0:
if r_speed == 1.0 or not retimed_handles:
frame_start_handle = frame_start
else:
frame_start_handle = (

View file

@ -73,6 +73,8 @@ def load_apps():
opfapi.FlameMenuProjectConnect(opfapi.CTX.app_framework))
opfapi.CTX.flame_apps.append(
opfapi.FlameMenuTimeline(opfapi.CTX.app_framework))
opfapi.CTX.flame_apps.append(
opfapi.FlameMenuUniversal(opfapi.CTX.app_framework))
opfapi.CTX.app_framework.log.info("Apps are loaded")
@ -191,3 +193,27 @@ def get_timeline_custom_ui_actions():
openpype_install()
return _build_app_menu("FlameMenuTimeline")
def get_batch_custom_ui_actions():
"""Hook to create submenu in batch
Returns:
list: menu object
"""
# install openpype and the host
openpype_install()
return _build_app_menu("FlameMenuUniversal")
def get_media_panel_custom_ui_actions():
"""Hook to create submenu in desktop
Returns:
list: menu object
"""
# install openpype and the host
openpype_install()
return _build_app_menu("FlameMenuUniversal")

View file

@ -0,0 +1,10 @@
from .addon import (
FusionAddon,
FUSION_HOST_DIR,
)
__all__ = (
"FusionAddon",
"FUSION_HOST_DIR",
)

View file

@ -0,0 +1,23 @@
import os
from openpype.modules import OpenPypeModule
from openpype.modules.interfaces import IHostAddon
FUSION_HOST_DIR = os.path.dirname(os.path.abspath(__file__))
class FusionAddon(OpenPypeModule, IHostAddon):
name = "fusion"
host_name = "fusion"
def initialize(self, module_settings):
self.enabled = True
def get_launch_hook_paths(self, app):
if app.host_name != self.host_name:
return []
return [
os.path.join(FUSION_HOST_DIR, "hooks")
]
def get_workfile_extensions(self):
return [".comp"]

View file

@ -18,12 +18,11 @@ from openpype.pipeline import (
deregister_inventory_action_path,
AVALON_CONTAINER_ID,
)
import openpype.hosts.fusion
from openpype.hosts.fusion import FUSION_HOST_DIR
log = Logger.get_logger(__name__)
HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.fusion.__file__))
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
PLUGINS_DIR = os.path.join(FUSION_HOST_DIR, "plugins")
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")

View file

@ -2,13 +2,11 @@
import sys
import os
from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
from .pipeline import get_current_comp
def file_extensions():
return HOST_WORKFILE_EXTENSIONS["fusion"]
return [".comp"]
def has_unsaved_changes():

View file

@ -318,10 +318,9 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
@staticmethod
def create_otio_time_range_from_timeline_item_data(track_item):
speed = track_item.playbackSpeed()
timeline = phiero.get_current_sequence()
frame_start = int(track_item.timelineIn())
frame_duration = int((track_item.duration() - 1) / speed)
frame_duration = int(track_item.duration())
fps = timeline.framerate().toFloat()
return hiero_export.create_otio_time_range(

View file

@ -70,7 +70,7 @@ class CollectAssembly(pyblish.api.InstancePlugin):
data[representation_id].append(instance_data)
instance.data["scenedata"] = dict(data)
instance.data["hierarchy"] = list(set(hierarchy_nodes))
instance.data["nodesHierarchy"] = list(set(hierarchy_nodes))
def get_file_rule(self, rule):
return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule))

View file

@ -33,7 +33,7 @@ class ExtractAssembly(openpype.api.Extractor):
json.dump(instance.data["scenedata"], filepath, ensure_ascii=False)
self.log.info("Extracting point cache ..")
cmds.select(instance.data["hierarchy"])
cmds.select(instance.data["nodesHierarchy"])
# Run basic alembic exporter
extract_alembic(file=hierarchy_path,

View file

@ -48,7 +48,7 @@ class ValidateAssemblyModelTransforms(pyblish.api.InstancePlugin):
from openpype.hosts.maya.api import lib
# Get all transforms in the loaded containers
container_roots = cmds.listRelatives(instance.data["hierarchy"],
container_roots = cmds.listRelatives(instance.data["nodesHierarchy"],
children=True,
type="transform",
fullPath=True)

View file

@ -201,34 +201,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
if not instance.data["review"]:
instance.data["useSequenceForReview"] = False
project_name = legacy_io.active_project()
asset_name = instance.data["asset"]
# * Add audio to instance if exists.
# Find latest versions document
last_version_doc = get_last_version_by_subset_name(
project_name, "audioMain", asset_name=asset_name, fields=["_id"]
)
repre_doc = None
if last_version_doc:
# Try to find it's representation (Expected there is only one)
repre_docs = list(get_representations(
project_name, version_ids=[last_version_doc["_id"]]
))
if not repre_docs:
self.log.warning(
"Version document does not contain any representations"
)
else:
repre_doc = repre_docs[0]
# Add audio to instance if representation was found
if repre_doc:
instance.data["audio"] = [{
"offset": 0,
"filename": get_representation_path(repre_doc)
}]
self.log.debug("instance.data: {}".format(pformat(instance.data)))
def is_prerender(self, families):

View file

@ -1,21 +1,60 @@
import os
import re
import abc
import json
import logging
import six
import platform
import functools
import warnings
import clique
from openpype.client import get_project
from openpype.settings import get_project_settings
from .profiles_filtering import filter_profiles
log = logging.getLogger(__name__)
class PathToolsDeprecatedWarning(DeprecationWarning):
pass
def deprecated(new_destination):
"""Mark functions as deprecated.
It will result in a warning being emitted when the function is used.
"""
func = None
if callable(new_destination):
func = new_destination
new_destination = None
def _decorator(decorated_func):
if new_destination is None:
warning_message = (
" Please check content of deprecated function to figure out"
" possible replacement."
)
else:
warning_message = " Please replace your usage with '{}'.".format(
new_destination
)
@functools.wraps(decorated_func)
def wrapper(*args, **kwargs):
warnings.simplefilter("always", PathToolsDeprecatedWarning)
warnings.warn(
(
"Call to deprecated function '{}'"
"\nFunction was moved or removed.{}"
).format(decorated_func.__name__, warning_message),
category=PathToolsDeprecatedWarning,
stacklevel=4
)
return decorated_func(*args, **kwargs)
return wrapper
if func is None:
return _decorator
return _decorator(func)
def format_file_size(file_size, suffix=None):
"""Returns formatted string with size in appropriate unit.
@ -232,107 +271,69 @@ def get_last_version_from_path(path_dir, filter):
return None
@deprecated("openpype.pipeline.project_folders.concatenate_splitted_paths")
def concatenate_splitted_paths(split_paths, anatomy):
pattern_array = re.compile(r"\[.*\]")
output = []
for path_items in split_paths:
clean_items = []
if isinstance(path_items, str):
path_items = [path_items]
"""
Deprecated:
Function will be removed after release version 3.16.*
"""
for path_item in path_items:
if not re.match(r"{.+}", path_item):
path_item = re.sub(pattern_array, "", path_item)
clean_items.append(path_item)
from openpype.pipeline.project_folders import concatenate_splitted_paths
# backward compatibility
if "__project_root__" in path_items:
for root, root_path in anatomy.roots.items():
if not os.path.exists(str(root_path)):
log.debug("Root {} path path {} not exist on \
computer!".format(root, root_path))
continue
clean_items = ["{{root[{}]}}".format(root),
r"{project[name]}"] + clean_items[1:]
output.append(os.path.normpath(os.path.sep.join(clean_items)))
continue
output.append(os.path.normpath(os.path.sep.join(clean_items)))
return output
return concatenate_splitted_paths(split_paths, anatomy)
@deprecated
def get_format_data(anatomy):
project_doc = get_project(anatomy.project_name, fields=["data.code"])
project_code = project_doc["data"]["code"]
"""
Deprecated:
Function will be removed after release version 3.16.*
"""
return {
"root": anatomy.roots,
"project": {
"name": anatomy.project_name,
"code": project_code
},
}
from openpype.pipeline.template_data import get_project_template_data
data = get_project_template_data(project_name=anatomy.project_name)
data["root"] = anatomy.roots
return data
@deprecated("openpype.pipeline.project_folders.fill_paths")
def fill_paths(path_list, anatomy):
format_data = get_format_data(anatomy)
filled_paths = []
"""
Deprecated:
Function will be removed after release version 3.16.*
"""
for path in path_list:
new_path = path.format(**format_data)
filled_paths.append(new_path)
from openpype.pipeline.project_folders import fill_paths
return filled_paths
return fill_paths(path_list, anatomy)
@deprecated("openpype.pipeline.project_folders.create_project_folders")
def create_project_folders(basic_paths, project_name):
from openpype.pipeline import Anatomy
anatomy = Anatomy(project_name)
"""
Deprecated:
Function will be removed after release version 3.16.*
"""
concat_paths = concatenate_splitted_paths(basic_paths, anatomy)
filled_paths = fill_paths(concat_paths, anatomy)
from openpype.pipeline.project_folders import create_project_folders
# Create folders
for path in filled_paths:
if os.path.exists(path):
log.debug("Folder already exists: {}".format(path))
else:
log.debug("Creating folder: {}".format(path))
os.makedirs(path)
def _list_path_items(folder_structure):
output = []
for key, value in folder_structure.items():
if not value:
output.append(key)
else:
paths = _list_path_items(value)
for path in paths:
if not isinstance(path, (list, tuple)):
path = [path]
item = [key]
item.extend(path)
output.append(item)
return output
return create_project_folders(project_name, basic_paths)
@deprecated("openpype.pipeline.project_folders.get_project_basic_paths")
def get_project_basic_paths(project_name):
project_settings = get_project_settings(project_name)
folder_structure = (
project_settings["global"]["project_folder_structure"]
)
if not folder_structure:
return []
"""
Deprecated:
Function will be removed after release version 3.16.*
"""
if isinstance(folder_structure, str):
folder_structure = json.loads(folder_structure)
return _list_path_items(folder_structure)
from openpype.pipeline.project_folders import get_project_basic_paths
return get_project_basic_paths(project_name)
@deprecated("openpype.pipeline.workfile.create_workdir_extra_folders")
def create_workdir_extra_folders(
workdir, host_name, task_type, task_name, project_name,
project_settings=None
@ -349,37 +350,18 @@ def create_workdir_extra_folders(
project_name (str): Name of project on which task is.
project_settings (dict): Prepared project settings. Are loaded if not
passed.
Deprecated:
Function will be removed after release version 3.16.*
"""
# Load project settings if not set
if not project_settings:
project_settings = get_project_settings(project_name)
# Load extra folders profiles
extra_folders_profiles = (
project_settings["global"]["tools"]["Workfiles"]["extra_folders"]
from openpype.pipeline.project_folders import create_workdir_extra_folders
return create_workdir_extra_folders(
workdir,
host_name,
task_type,
task_name,
project_name,
project_settings
)
# Skip if are empty
if not extra_folders_profiles:
return
# Prepare profiles filters
filter_data = {
"task_types": task_type,
"task_names": task_name,
"hosts": host_name
}
profile = filter_profiles(extra_folders_profiles, filter_data)
if profile is None:
return
for subfolder in profile["folders"]:
# Make sure backslashes are converted to forwards slashes
# and does not start with slash
subfolder = subfolder.replace("\\", "/").lstrip("/")
# Skip empty strings
if not subfolder:
continue
fullpath = os.path.join(workdir, subfolder)
if not os.path.exists(fullpath):
os.makedirs(fullpath)

View file

@ -3,7 +3,6 @@
import os
import logging
import re
import json
import warnings
import functools

View file

@ -1,7 +1,10 @@
import re
from openpype.pipeline.project_folders import (
get_project_basic_paths,
create_project_folders,
)
from openpype_modules.ftrack.lib import BaseAction, statics_icon
from openpype.api import get_project_basic_paths, create_project_folders
class CreateProjectFolders(BaseAction):
@ -81,7 +84,7 @@ class CreateProjectFolders(BaseAction):
}
# Invoking OpenPype API to create the project folders
create_project_folders(basic_paths, project_name)
create_project_folders(project_name, basic_paths)
self.create_ftrack_entities(basic_paths, project_entity)
self.trigger_event(

View file

@ -1,5 +1,8 @@
"""Loads publishing context from json and continues in publish process.
Should run before 'CollectAnatomyContextData' so the user on context is
changed before it's stored to context anatomy data or instance anatomy data.
Requires:
anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11)
@ -13,7 +16,7 @@ import os
import pyblish.api
class CollectUsername(pyblish.api.ContextPlugin):
class CollectUsernameForWebpublish(pyblish.api.ContextPlugin):
"""
Translates user email to Ftrack username.
@ -32,10 +35,8 @@ class CollectUsername(pyblish.api.ContextPlugin):
hosts = ["webpublisher", "photoshop"]
targets = ["remotepublish", "filespublish", "tvpaint_worker"]
_context = None
def process(self, context):
self.log.info("CollectUsername")
self.log.info("{}".format(self.__class__.__name__))
os.environ["FTRACK_API_USER"] = os.environ["FTRACK_BOT_API_USER"]
os.environ["FTRACK_API_KEY"] = os.environ["FTRACK_BOT_API_KEY"]
@ -54,12 +55,14 @@ class CollectUsername(pyblish.api.ContextPlugin):
return
session = ftrack_api.Session(auto_connect_event_hub=False)
user = session.query("User where email like '{}'".format(user_email))
user = session.query(
"User where email like '{}'".format(user_email)
).first()
if not user:
raise ValueError(
"Couldn't find user with {} email".format(user_email))
user = user[0]
username = user.get("username")
self.log.debug("Resolved ftrack username:: {}".format(username))
os.environ["FTRACK_API_USER"] = username
@ -67,5 +70,4 @@ class CollectUsername(pyblish.api.ContextPlugin):
burnin_name = username
if '@' in burnin_name:
burnin_name = burnin_name[:burnin_name.index('@')]
os.environ["WEBPUBLISH_OPENPYPE_USERNAME"] = burnin_name
context.data["user"] = burnin_name

View file

@ -166,50 +166,21 @@ def update_op_assets(
# Substitute item type for general classification (assets or shots)
if item_type in ["Asset", "AssetType"]:
substitute_item_type = "assets"
entity_root_asset_name = "Assets"
elif item_type in ["Episode", "Sequence"]:
substitute_item_type = "shots"
else:
substitute_item_type = f"{item_type.lower()}s"
entity_parent_folders = [
f
for f in project_module_settings["entities_root"]
.get(substitute_item_type)
.split("/")
if f
]
entity_root_asset_name = "Shots"
# Root parent folder if exist
visual_parent_doc_id = (
asset_doc_ids[parent_zou_id]["_id"] if parent_zou_id else None
)
if visual_parent_doc_id is None:
# Find root folder docs
root_folder_docs = get_assets(
# Find root folder doc ("Assets" or "Shots")
root_folder_doc = get_asset_by_name(
project_name,
asset_names=[entity_parent_folders[-1]],
asset_name=entity_root_asset_name,
fields=["_id", "data.root_of"],
)
# NOTE: Not sure why it's checking for entity type?
# OP3 does not support multiple assets with same names so type
# filtering is irelevant.
# This way mimics previous implementation:
# ```
# root_folder_doc = dbcon.find_one(
# {
# "type": "asset",
# "name": entity_parent_folders[-1],
# "data.root_of": substitute_item_type,
# },
# ["_id"],
# )
# ```
root_folder_doc = None
for folder_doc in root_folder_docs:
root_of = folder_doc.get("data", {}).get("root_of")
if root_of == substitute_item_type:
root_folder_doc = folder_doc
break
if root_folder_doc:
visual_parent_doc_id = root_folder_doc["_id"]
@ -240,7 +211,7 @@ def update_op_assets(
item_name = item["name"]
# Set root folders parents
item_data["parents"] = entity_parent_folders + item_data["parents"]
item_data["parents"] = [entity_root_asset_name] + item_data["parents"]
# Update 'data' different in zou DB
updated_data = {
@ -318,13 +289,13 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
)
def sync_all_projects(login: str, password: str):
def sync_all_projects(login: str, password: str, ignore_projects: list = None):
"""Update all OP projects in DB with Zou data.
Args:
login (str): Kitsu user login
password (str): Kitsu user password
ignore_projects (list): List of unsynced project names
Raises:
gazu.exception.AuthFailedException: Wrong user login and/or password
"""
@ -340,6 +311,8 @@ def sync_all_projects(login: str, password: str):
dbcon.install()
all_projects = gazu.project.all_open_projects()
for project in all_projects:
if ignore_projects and project["name"] in ignore_projects:
continue
sync_project_from_kitsu(dbcon, project)
@ -396,54 +369,30 @@ def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
zou_ids_and_asset_docs[project["id"]] = project_doc
# Create entities root folders
project_module_settings = get_project_settings(project_name)["kitsu"]
for entity_type, root in project_module_settings["entities_root"].items():
parent_folders = root.split("/")
direct_parent_doc = None
for i, folder in enumerate(parent_folders, 1):
parent_doc = get_asset_by_name(
project_name, folder, fields=["_id", "data.root_of"]
)
# NOTE: Not sure why it's checking for entity type?
# OP3 does not support multiple assets with same names so type
# filtering is irelevant.
# Also all of the entities could find be queried at once using
# 'get_assets'.
# This way mimics previous implementation:
# ```
# parent_doc = dbcon.find_one(
# {"type": "asset", "name": folder, "data.root_of": entity_type}
# )
# ```
if (
parent_doc
and parent_doc.get("data", {}).get("root_of") != entity_type
):
parent_doc = None
if not parent_doc:
direct_parent_doc = dbcon.insert_one(
{
"name": folder,
"type": "asset",
"schema": "openpype:asset-3.0",
"data": {
"root_of": entity_type,
"parents": parent_folders[:i],
"visualParent": direct_parent_doc.inserted_id
if direct_parent_doc
else None,
"tasks": {},
},
}
)
to_insert = [
{
"name": r,
"type": "asset",
"schema": "openpype:asset-3.0",
"data": {
"root_of": r,
"tasks": {},
},
}
for r in ["Assets", "Shots"]
if not get_asset_by_name(
project_name, r, fields=["_id", "data.root_of"]
)
]
# Create
to_insert = [
create_op_asset(item)
for item in all_entities
if item["id"] not in zou_ids_and_asset_docs.keys()
]
to_insert.extend(
[
create_op_asset(item)
for item in all_entities
if item["id"] not in zou_ids_and_asset_docs.keys()
]
)
if to_insert:
# Insert doc in DB
dbcon.insert_many(to_insert)

View file

@ -95,13 +95,15 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
Reviews might be large, so allow only adding link to message instead of
uploading only.
"""
fill_data = copy.deepcopy(instance.context.data["anatomyData"])
username = fill_data.get("user")
fill_pairs = [
("asset", instance.data.get("asset", fill_data.get("asset"))),
("subset", instance.data.get("subset", fill_data.get("subset"))),
("username", instance.data.get("username",
fill_data.get("username"))),
("user", username),
("username", username),
("app", instance.data.get("app", fill_data.get("app"))),
("family", instance.data.get("family", fill_data.get("family"))),
("version", str(instance.data.get("version",
@ -110,13 +112,19 @@ class IntegrateSlackAPI(pyblish.api.InstancePlugin):
if review_path:
fill_pairs.append(("review_filepath", review_path))
task_data = instance.data.get("task")
if not task_data:
task_data = fill_data.get("task")
for key, value in task_data.items():
fill_key = "task[{}]".format(key)
fill_pairs.append((fill_key, value))
fill_pairs.append(("task", task_data["name"]))
task_data = fill_data.get("task")
if task_data:
if (
"{task}" in message_templ
or "{Task}" in message_templ
or "{TASK}" in message_templ
):
fill_pairs.append(("task", task_data["name"]))
else:
for key, value in task_data.items():
fill_key = "task[{}]".format(key)
fill_pairs.append((fill_key, value))
self.log.debug("fill_pairs ::{}".format(fill_pairs))
multiple_case_variants = prepare_template_data(fill_pairs)

View file

@ -0,0 +1,107 @@
import os
import re
import json
import six
from openpype.settings import get_project_settings
from openpype.lib import Logger
from .anatomy import Anatomy
from .template_data import get_project_template_data
def concatenate_splitted_paths(split_paths, anatomy):
log = Logger.get_logger("concatenate_splitted_paths")
pattern_array = re.compile(r"\[.*\]")
output = []
for path_items in split_paths:
clean_items = []
if isinstance(path_items, str):
path_items = [path_items]
for path_item in path_items:
if not re.match(r"{.+}", path_item):
path_item = re.sub(pattern_array, "", path_item)
clean_items.append(path_item)
# backward compatibility
if "__project_root__" in path_items:
for root, root_path in anatomy.roots.items():
if not os.path.exists(str(root_path)):
log.debug("Root {} path path {} not exist on \
computer!".format(root, root_path))
continue
clean_items = ["{{root[{}]}}".format(root),
r"{project[name]}"] + clean_items[1:]
output.append(os.path.normpath(os.path.sep.join(clean_items)))
continue
output.append(os.path.normpath(os.path.sep.join(clean_items)))
return output
def fill_paths(path_list, anatomy):
format_data = get_project_template_data(project_name=anatomy.project_name)
format_data["root"] = anatomy.roots
filled_paths = []
for path in path_list:
new_path = path.format(**format_data)
filled_paths.append(new_path)
return filled_paths
def create_project_folders(project_name, basic_paths=None):
log = Logger.get_logger("create_project_folders")
anatomy = Anatomy(project_name)
if basic_paths is None:
basic_paths = get_project_basic_paths(project_name)
if not basic_paths:
return
concat_paths = concatenate_splitted_paths(basic_paths, anatomy)
filled_paths = fill_paths(concat_paths, anatomy)
# Create folders
for path in filled_paths:
if os.path.exists(path):
log.debug("Folder already exists: {}".format(path))
else:
log.debug("Creating folder: {}".format(path))
os.makedirs(path)
def _list_path_items(folder_structure):
output = []
for key, value in folder_structure.items():
if not value:
output.append(key)
continue
paths = _list_path_items(value)
for path in paths:
if not isinstance(path, (list, tuple)):
path = [path]
item = [key]
item.extend(path)
output.append(item)
return output
def get_project_basic_paths(project_name):
project_settings = get_project_settings(project_name)
folder_structure = (
project_settings["global"]["project_folder_structure"]
)
if not folder_structure:
return []
if isinstance(folder_structure, six.string_types):
folder_structure = json.loads(folder_structure)
return _list_path_items(folder_structure)

View file

@ -53,7 +53,7 @@ def get_project_template_data(project_doc=None, project_name=None):
project_name = project_doc["name"]
if not project_doc:
project_code = get_project(project_name, fields=["data.code"])
project_doc = get_project(project_name, fields=["data.code"])
project_code = project_doc.get("data", {}).get("code")
return {

View file

@ -9,6 +9,8 @@ from .path_resolving import (
get_custom_workfile_template,
get_custom_workfile_template_by_string_context,
create_workdir_extra_folders,
)
from .build_workfile import BuildWorkfile
@ -26,5 +28,7 @@ __all__ = (
"get_custom_workfile_template",
"get_custom_workfile_template_by_string_context",
"create_workdir_extra_folders",
"BuildWorkfile",
)

View file

@ -467,3 +467,60 @@ def get_custom_workfile_template_by_string_context(
return get_custom_workfile_template(
project_doc, asset_doc, task_name, host_name, anatomy, project_settings
)
def create_workdir_extra_folders(
workdir,
host_name,
task_type,
task_name,
project_name,
project_settings=None
):
"""Create extra folders in work directory based on context.
Args:
workdir (str): Path to workdir where workfiles is stored.
host_name (str): Name of host implementation.
task_type (str): Type of task for which extra folders should be
created.
task_name (str): Name of task for which extra folders should be
created.
project_name (str): Name of project on which task is.
project_settings (dict): Prepared project settings. Are loaded if not
passed.
"""
# Load project settings if not set
if not project_settings:
project_settings = get_project_settings(project_name)
# Load extra folders profiles
extra_folders_profiles = (
project_settings["global"]["tools"]["Workfiles"]["extra_folders"]
)
# Skip if are empty
if not extra_folders_profiles:
return
# Prepare profiles filters
filter_data = {
"task_types": task_type,
"task_names": task_name,
"hosts": host_name
}
profile = filter_profiles(extra_folders_profiles, filter_data)
if profile is None:
return
for subfolder in profile["folders"]:
# Make sure backslashes are converted to forwards slashes
# and does not start with slash
subfolder = subfolder.replace("\\", "/").lstrip("/")
# Skip empty strings
if not subfolder:
continue
fullpath = os.path.join(workdir, subfolder)
if not os.path.exists(fullpath):
os.makedirs(fullpath)

View file

@ -0,0 +1,105 @@
import pyblish.api
from openpype.client import (
get_last_version_by_subset_name,
get_representations,
)
from openpype.pipeline import (
legacy_io,
get_representation_path,
)
class CollectAudio(pyblish.api.InstancePlugin):
"""Collect asset's last published audio.
The audio subset name searched for is defined in:
project settings > Collect Audio
"""
label = "Collect Asset Audio"
order = pyblish.api.CollectorOrder + 0.1
families = ["review"]
hosts = [
"nuke",
"maya",
"shell",
"hiero",
"premiere",
"harmony",
"traypublisher",
"standalonepublisher",
"fusion",
"tvpaint",
"resolve",
"webpublisher",
"aftereffects",
"flame",
"unreal"
]
audio_subset_name = "audioMain"
def process(self, instance):
if instance.data.get("audio"):
self.log.info(
"Skipping Audio collecion. It is already collected"
)
return
# Add audio to instance if exists.
self.log.info((
"Searching for audio subset '{subset}'"
" in asset '{asset}'"
).format(
subset=self.audio_subset_name,
asset=instance.data["asset"]
))
repre_doc = self._get_repre_doc(instance)
# Add audio to instance if representation was found
if repre_doc:
instance.data["audio"] = [{
"offset": 0,
"filename": get_representation_path(repre_doc)
}]
self.log.info("Audio Data added to instance ...")
def _get_repre_doc(self, instance):
cache = instance.context.data.get("__cache_asset_audio")
if cache is None:
cache = {}
instance.context.data["__cache_asset_audio"] = cache
asset_name = instance.data["asset"]
# first try to get it from cache
if asset_name in cache:
return cache[asset_name]
project_name = legacy_io.active_project()
# Find latest versions document
last_version_doc = get_last_version_by_subset_name(
project_name,
self.audio_subset_name,
asset_name=asset_name,
fields=["_id"]
)
repre_doc = None
if last_version_doc:
# Try to find it's representation (Expected there is only one)
repre_docs = list(get_representations(
project_name, version_ids=[last_version_doc["_id"]]
))
if not repre_docs:
self.log.warning(
"Version document does not contain any representations"
)
else:
repre_doc = repre_docs[0]
# update cache
cache[asset_name] = repre_doc
return repre_doc

View file

@ -29,6 +29,7 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin):
# get basic variables
otio_clip = instance.data["otioClip"]
workfile_start = instance.data["workfileFrameStart"]
workfile_source_duration = instance.data.get("notRetimedFramerange")
# get ranges
otio_tl_range = otio_clip.range_in_parent()
@ -54,6 +55,11 @@ class CollectOtioFrameRanges(pyblish.api.InstancePlugin):
frame_end = frame_start + otio.opentime.to_frames(
otio_tl_range.duration, otio_tl_range.duration.rate) - 1
# in case of retimed clip and frame range should not be retimed
if workfile_source_duration:
frame_end = frame_start + otio.opentime.to_frames(
otio_src_range.duration, otio_src_range.duration.rate) - 1
data = {
"frameStart": frame_start,
"frameEnd": frame_end,

View file

@ -488,12 +488,6 @@ class ExtractBurnin(publish.Extractor):
"frame_end_handle": frame_end_handle
}
# use explicit username for webpublishes as rewriting
# OPENPYPE_USERNAME might have side effects
webpublish_user_name = os.environ.get("WEBPUBLISH_OPENPYPE_USERNAME")
if webpublish_user_name:
burnin_data["username"] = webpublish_user_name
self.log.debug(
"Basic burnin_data: {}".format(json.dumps(burnin_data, indent=4))
)

View file

@ -135,7 +135,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
# the database even if not used by the destination template
db_representation_context_keys = [
"project", "asset", "task", "subset", "version", "representation",
"family", "hierarchy", "username", "output"
"family", "hierarchy", "username", "user", "output"
]
skip_host_families = []

View file

@ -46,7 +46,7 @@ class IntegrateHeroVersion(pyblish.api.InstancePlugin):
ignored_representation_names = []
db_representation_context_keys = [
"project", "asset", "task", "subset", "representation",
"family", "hierarchy", "task", "username"
"family", "hierarchy", "task", "username", "user"
]
# QUESTION/TODO this process should happen on server if crashed due to
# permissions error on files (files were used or user didn't have perms)

View file

@ -127,7 +127,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
exclude_families = ["render.farm"]
db_representation_context_keys = [
"project", "asset", "task", "subset", "version", "representation",
"family", "hierarchy", "task", "username"
"family", "hierarchy", "task", "username", "user"
]
default_template_name = "publish"

View file

@ -3,6 +3,10 @@
"CollectAnatomyInstanceData": {
"follow_workfile_version": false
},
"CollectAudio": {
"enabled": false,
"audio_subset_name": "audioMain"
},
"CollectSceneVersion": {
"hosts": [
"aftereffects",

View file

@ -1,8 +1,4 @@
{
"entities_root": {
"assets": "Assets",
"shots": "Shots"
},
"entities_naming_pattern": {
"episode": "E##",
"sequence": "SQ##",

View file

@ -5,23 +5,6 @@
"collapsible": true,
"is_file": true,
"children": [
{
"type": "dict",
"key": "entities_root",
"label": "Entities root folder",
"children": [
{
"type": "text",
"key": "assets",
"label": "Assets:"
},
{
"type": "text",
"key": "shots",
"label": "Shots (includes Episodes & Sequences if any):"
}
]
},
{
"type": "dict",
"key": "entities_naming_pattern",

View file

@ -18,6 +18,27 @@
}
]
},
{
"type": "dict",
"collapsible": true,
"checkbox_key": "enabled",
"key": "CollectAudio",
"label": "Collect Audio",
"is_group": true,
"children": [
{
"type": "boolean",
"key": "enabled",
"label": "Enabled"
},
{
"key": "audio_subset_name",
"label": "Name of audio variant",
"type": "text",
"placeholder": "audioMain"
}
]
},
{
"type": "dict",
"collapsible": true,

View file

@ -281,18 +281,25 @@ class ActionModel(QtGui.QStandardItemModel):
if not action_item:
return
action = action_item.data(ACTION_ROLE)
actual_data = self._prepare_compare_data(action)
actions = action_item.data(ACTION_ROLE)
if not isinstance(actions, list):
actions = [actions]
action_actions_data = [
self._prepare_compare_data(action)
for action in actions
]
stored = self.launcher_registry.get_item("force_not_open_workfile")
if is_checked:
stored.append(actual_data)
else:
final_values = []
for config in stored:
if config != actual_data:
final_values.append(config)
stored = final_values
for actual_data in action_actions_data:
if is_checked:
stored.append(actual_data)
else:
final_values = []
for config in stored:
if config != actual_data:
final_values.append(config)
stored = final_values
self.launcher_registry.set_item("force_not_open_workfile", stored)
self.launcher_registry._get_item.cache_clear()
@ -329,21 +336,24 @@ class ActionModel(QtGui.QStandardItemModel):
item (QStandardItem)
stored (list) of dict
"""
action = item.data(ACTION_ROLE)
if not self.is_application_action(action):
actions = item.data(ACTION_ROLE)
if not isinstance(actions, list):
actions = [actions]
if not self.is_application_action(actions[0]):
return False
actual_data = self._prepare_compare_data(action)
action_actions_data = [
self._prepare_compare_data(action)
for action in actions
]
for config in stored:
if config == actual_data:
if config in action_actions_data:
return True
return False
def _prepare_compare_data(self, action):
if isinstance(action, list) and action:
action = action[0]
compare_data = {}
if action and action.label:
compare_data = {

View file

@ -312,11 +312,12 @@ class ActionBar(QtWidgets.QWidget):
is_group = index.data(GROUP_ROLE)
is_variant_group = index.data(VARIANT_GROUP_ROLE)
force_not_open_workfile = index.data(FORCE_NOT_OPEN_WORKFILE_ROLE)
if not is_group and not is_variant_group:
action = index.data(ACTION_ROLE)
# Change data of application action
if issubclass(action, ApplicationAction):
if index.data(FORCE_NOT_OPEN_WORKFILE_ROLE):
if force_not_open_workfile:
action.data["start_last_workfile"] = False
else:
action.data.pop("start_last_workfile", None)
@ -385,10 +386,18 @@ class ActionBar(QtWidgets.QWidget):
menu.addMenu(sub_menu)
result = menu.exec_(QtGui.QCursor.pos())
if result:
action = actions_mapping[result]
self._start_animation(index)
self.action_clicked.emit(action)
if not result:
return
action = actions_mapping[result]
if issubclass(action, ApplicationAction):
if force_not_open_workfile:
action.data["start_last_workfile"] = False
else:
action.data.pop("start_last_workfile", None)
self._start_animation(index)
self.action_clicked.emit(action)
class ActionHistory(QtWidgets.QPushButton):

View file

@ -1,5 +1,12 @@
from Qt import QtWidgets, QtCore, QtGui
from openpype import resources
from openpype.style import load_stylesheet
from openpype.widgets import PasswordDialog
from openpype.lib import is_admin_password_required, Logger
from openpype.pipeline import AvalonMongoDB
from openpype.pipeline.project_folders import create_project_folders
from . import (
ProjectModel,
ProjectProxyFilter,
@ -13,17 +20,6 @@ from . import (
)
from .widgets import ConfirmProjectDeletion
from .style import ResourceCache
from openpype.style import load_stylesheet
from openpype.lib import is_admin_password_required
from openpype.widgets import PasswordDialog
from openpype.pipeline import AvalonMongoDB
from openpype import resources
from openpype.api import (
get_project_basic_paths,
create_project_folders,
Logger
)
class ProjectManagerWindow(QtWidgets.QWidget):
@ -259,12 +255,8 @@ class ProjectManagerWindow(QtWidgets.QWidget):
qm.Yes | qm.No)
if ans == qm.Yes:
try:
# Get paths based on presets
basic_paths = get_project_basic_paths(project_name)
if not basic_paths:
pass
# Invoking OpenPype API to create the project folders
create_project_folders(basic_paths, project_name)
create_project_folders(project_name)
except Exception as exc:
self.log.warning(
"Cannot create starting folders: {}".format(exc),

View file

@ -34,7 +34,8 @@ from .lib import (
class InventoryModel(TreeModel):
"""The model for the inventory"""
Columns = ["Name", "version", "count", "family", "loader", "objectName"]
Columns = ["Name", "version", "count", "family",
"group", "loader", "objectName"]
OUTDATED_COLOR = QtGui.QColor(235, 30, 30)
CHILD_OUTDATED_COLOR = QtGui.QColor(200, 160, 30)
@ -157,8 +158,13 @@ class InventoryModel(TreeModel):
# Family icon
return item.get("familyIcon", None)
column_name = self.Columns[index.column()]
if column_name == "group" and item.get("group"):
return qtawesome.icon("fa.object-group",
color=get_default_entity_icon_color())
if item.get("isGroupNode"):
column_name = self.Columns[index.column()]
if column_name == "active_site":
provider = item.get("active_site_provider")
return self._site_icons.get(provider)
@ -423,6 +429,7 @@ class InventoryModel(TreeModel):
group_node["familyIcon"] = family_icon
group_node["count"] = len(group_items)
group_node["isGroupNode"] = True
group_node["group"] = subset["data"].get("subsetGroup")
if self.sync_enabled:
progress = get_progress_for_repre(

View file

@ -89,7 +89,8 @@ class SceneInventoryWindow(QtWidgets.QDialog):
view.setColumnWidth(1, 55) # version
view.setColumnWidth(2, 55) # count
view.setColumnWidth(3, 150) # family
view.setColumnWidth(4, 100) # namespace
view.setColumnWidth(4, 120) # group
view.setColumnWidth(5, 150) # loader
# apply delegates
version_delegate = VersionDelegate(legacy_io, self)

View file

@ -9,11 +9,11 @@ import platform
from Qt import QtCore, QtGui, QtWidgets
import openpype.version
from openpype.api import (
resources,
get_system_settings
from openpype import resources, style
from openpype.lib import (
get_openpype_execute_args,
Logger,
)
from openpype.lib import get_openpype_execute_args, Logger
from openpype.lib.openpype_version import (
op_version_control_available,
get_expected_version,
@ -25,8 +25,8 @@ from openpype.lib.openpype_version import (
get_openpype_version,
)
from openpype.modules import TrayModulesManager
from openpype import style
from openpype.settings import (
get_system_settings,
SystemSettings,
ProjectSettings,
DefaultsNotDefined
@ -774,10 +774,24 @@ class PypeTrayStarter(QtCore.QObject):
def main():
log = Logger.get_logger(__name__)
app = QtWidgets.QApplication.instance()
if not app:
app = QtWidgets.QApplication([])
for attr_name in (
"AA_EnableHighDpiScaling",
"AA_UseHighDpiPixmaps"
):
attr = getattr(QtCore.Qt, attr_name, None)
if attr is None:
log.debug((
"Missing QtCore.Qt attribute \"{}\"."
" UI quality may be affected."
).format(attr_name))
else:
app.setAttribute(attr)
starter = PypeTrayStarter(app)
# TODO remove when pype.exe will have an icon

View file

@ -10,10 +10,7 @@ from openpype.host import IWorkfileHost
from openpype.client import get_asset_by_id
from openpype.tools.utils import PlaceholderLineEdit
from openpype.tools.utils.delegates import PrettyTimeDelegate
from openpype.lib import (
emit_event,
create_workdir_extra_folders,
)
from openpype.lib import emit_event
from openpype.pipeline import (
registered_host,
legacy_io,
@ -23,7 +20,10 @@ from openpype.pipeline.context_tools import (
compute_session_changes,
change_current_context
)
from openpype.pipeline.workfile import get_workfile_template_key
from openpype.pipeline.workfile import (
get_workfile_template_key,
create_workdir_extra_folders,
)
from .model import (
WorkAreaFilesModel,