[Automated] Merged develop into main

This commit is contained in:
pypebot 2022-07-27 05:56:07 +02:00 committed by GitHub
commit 7aaa9c1c14
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
42 changed files with 778 additions and 456 deletions

View file

@ -1,3 +1,7 @@
from .mongo import (
OpenPypeMongoConnection,
)
from .entities import ( from .entities import (
get_projects, get_projects,
get_project, get_project,
@ -25,6 +29,8 @@ from .entities import (
get_last_version_by_subset_name, get_last_version_by_subset_name,
get_output_link_versions, get_output_link_versions,
version_is_latest,
get_representation_by_id, get_representation_by_id,
get_representation_by_name, get_representation_by_name,
get_representations, get_representations,
@ -40,6 +46,8 @@ from .entities import (
) )
__all__ = ( __all__ = (
"OpenPypeMongoConnection",
"get_projects", "get_projects",
"get_project", "get_project",
"get_whole_project", "get_whole_project",
@ -66,6 +74,8 @@ __all__ = (
"get_last_version_by_subset_name", "get_last_version_by_subset_name",
"get_output_link_versions", "get_output_link_versions",
"version_is_latest",
"get_representation_by_id", "get_representation_by_id",
"get_representation_by_name", "get_representation_by_name",
"get_representations", "get_representations",

View file

@ -12,7 +12,7 @@ import collections
import six import six
from bson.objectid import ObjectId from bson.objectid import ObjectId
from openpype.lib.mongo import OpenPypeMongoConnection from .mongo import OpenPypeMongoConnection
def _get_project_database(): def _get_project_database():
@ -20,7 +20,21 @@ def _get_project_database():
return OpenPypeMongoConnection.get_mongo_client()[db_name] return OpenPypeMongoConnection.get_mongo_client()[db_name]
def _get_project_connection(project_name): def get_project_connection(project_name):
"""Direct access to mongo collection.
We're trying to avoid using direct access to mongo. This should be used
only for Create, Update and Remove operations until there are implemented
api calls for that.
Args:
project_name(str): Project name for which collection should be
returned.
Returns:
pymongo.Collection: Collection realated to passed project.
"""
if not project_name: if not project_name:
raise ValueError("Invalid project name {}".format(str(project_name))) raise ValueError("Invalid project name {}".format(str(project_name)))
return _get_project_database()[project_name] return _get_project_database()[project_name]
@ -93,7 +107,7 @@ def get_project(project_name, active=True, inactive=False, fields=None):
{"data.active": False}, {"data.active": False},
] ]
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find_one(query_filter, _prepare_fields(fields)) return conn.find_one(query_filter, _prepare_fields(fields))
@ -108,7 +122,7 @@ def get_whole_project(project_name):
project collection. project collection.
""" """
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find({}) return conn.find({})
@ -131,7 +145,7 @@ def get_asset_by_id(project_name, asset_id, fields=None):
return None return None
query_filter = {"type": "asset", "_id": asset_id} query_filter = {"type": "asset", "_id": asset_id}
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find_one(query_filter, _prepare_fields(fields)) return conn.find_one(query_filter, _prepare_fields(fields))
@ -153,7 +167,7 @@ def get_asset_by_name(project_name, asset_name, fields=None):
return None return None
query_filter = {"type": "asset", "name": asset_name} query_filter = {"type": "asset", "name": asset_name}
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find_one(query_filter, _prepare_fields(fields)) return conn.find_one(query_filter, _prepare_fields(fields))
@ -223,7 +237,7 @@ def _get_assets(
return [] return []
query_filter["data.visualParent"] = {"$in": parent_ids} query_filter["data.visualParent"] = {"$in": parent_ids}
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find(query_filter, _prepare_fields(fields)) return conn.find(query_filter, _prepare_fields(fields))
@ -323,7 +337,7 @@ def get_asset_ids_with_subsets(project_name, asset_ids=None):
return [] return []
subset_query["parent"] = {"$in": asset_ids} subset_query["parent"] = {"$in": asset_ids}
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
result = conn.aggregate([ result = conn.aggregate([
{ {
"$match": subset_query "$match": subset_query
@ -363,7 +377,7 @@ def get_subset_by_id(project_name, subset_id, fields=None):
return None return None
query_filters = {"type": "subset", "_id": subset_id} query_filters = {"type": "subset", "_id": subset_id}
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find_one(query_filters, _prepare_fields(fields)) return conn.find_one(query_filters, _prepare_fields(fields))
@ -394,7 +408,7 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None):
"name": subset_name, "name": subset_name,
"parent": asset_id "parent": asset_id
} }
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find_one(query_filters, _prepare_fields(fields)) return conn.find_one(query_filters, _prepare_fields(fields))
@ -467,7 +481,7 @@ def get_subsets(
return [] return []
query_filter["$or"] = or_query query_filter["$or"] = or_query
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find(query_filter, _prepare_fields(fields)) return conn.find(query_filter, _prepare_fields(fields))
@ -491,7 +505,7 @@ def get_subset_families(project_name, subset_ids=None):
return set() return set()
subset_filter["_id"] = {"$in": list(subset_ids)} subset_filter["_id"] = {"$in": list(subset_ids)}
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
result = list(conn.aggregate([ result = list(conn.aggregate([
{"$match": subset_filter}, {"$match": subset_filter},
{"$project": { {"$project": {
@ -529,7 +543,7 @@ def get_version_by_id(project_name, version_id, fields=None):
"type": {"$in": ["version", "hero_version"]}, "type": {"$in": ["version", "hero_version"]},
"_id": version_id "_id": version_id
} }
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find_one(query_filter, _prepare_fields(fields)) return conn.find_one(query_filter, _prepare_fields(fields))
@ -552,7 +566,7 @@ def get_version_by_name(project_name, version, subset_id, fields=None):
if not subset_id: if not subset_id:
return None return None
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
query_filter = { query_filter = {
"type": "version", "type": "version",
"parent": subset_id, "parent": subset_id,
@ -561,6 +575,42 @@ def get_version_by_name(project_name, version, subset_id, fields=None):
return conn.find_one(query_filter, _prepare_fields(fields)) return conn.find_one(query_filter, _prepare_fields(fields))
def version_is_latest(project_name, version_id):
"""Is version the latest from it's subset.
Note:
Hero versions are considered as latest.
Todo:
Maybe raise exception when version was not found?
Args:
project_name (str):Name of project where to look for queried entities.
version_id (Union[str, ObjectId]): Version id which is checked.
Returns:
bool: True if is latest version from subset else False.
"""
version_id = _convert_id(version_id)
if not version_id:
return False
version_doc = get_version_by_id(
project_name, version_id, fields=["_id", "type", "parent"]
)
# What to do when version is not found?
if not version_doc:
return False
if version_doc["type"] == "hero_version":
return True
last_version = get_last_version_by_subset_id(
project_name, version_doc["parent"], fields=["_id"]
)
return last_version["_id"] == version_id
def _get_versions( def _get_versions(
project_name, project_name,
subset_ids=None, subset_ids=None,
@ -606,7 +656,7 @@ def _get_versions(
else: else:
query_filter["name"] = {"$in": versions} query_filter["name"] = {"$in": versions}
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find(query_filter, _prepare_fields(fields)) return conn.find(query_filter, _prepare_fields(fields))
@ -765,7 +815,7 @@ def get_output_link_versions(project_name, version_id, fields=None):
if not version_id: if not version_id:
return [] return []
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
# Does make sense to look for hero versions? # Does make sense to look for hero versions?
query_filter = { query_filter = {
"type": "version", "type": "version",
@ -830,7 +880,7 @@ def get_last_versions(project_name, subset_ids, fields=None):
{"$group": group_item} {"$group": group_item}
] ]
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
aggregate_result = conn.aggregate(aggregation_pipeline) aggregate_result = conn.aggregate(aggregation_pipeline)
if limit_query: if limit_query:
output = {} output = {}
@ -948,7 +998,7 @@ def get_representation_by_id(project_name, representation_id, fields=None):
if representation_id is not None: if representation_id is not None:
query_filter["_id"] = _convert_id(representation_id) query_filter["_id"] = _convert_id(representation_id)
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find_one(query_filter, _prepare_fields(fields)) return conn.find_one(query_filter, _prepare_fields(fields))
@ -981,7 +1031,7 @@ def get_representation_by_name(
"parent": version_id "parent": version_id
} }
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find_one(query_filter, _prepare_fields(fields)) return conn.find_one(query_filter, _prepare_fields(fields))
@ -1044,7 +1094,7 @@ def _get_representations(
return [] return []
query_filter["$or"] = or_query query_filter["$or"] = or_query
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find(query_filter, _prepare_fields(fields)) return conn.find(query_filter, _prepare_fields(fields))
@ -1255,7 +1305,7 @@ def get_thumbnail_id_from_source(project_name, src_type, src_id):
query_filter = {"_id": _convert_id(src_id)} query_filter = {"_id": _convert_id(src_id)}
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
src_doc = conn.find_one(query_filter, {"data.thumbnail_id"}) src_doc = conn.find_one(query_filter, {"data.thumbnail_id"})
if src_doc: if src_doc:
return src_doc.get("data", {}).get("thumbnail_id") return src_doc.get("data", {}).get("thumbnail_id")
@ -1288,7 +1338,7 @@ def get_thumbnails(project_name, thumbnail_ids, fields=None):
"type": "thumbnail", "type": "thumbnail",
"_id": {"$in": thumbnail_ids} "_id": {"$in": thumbnail_ids}
} }
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find(query_filter, _prepare_fields(fields)) return conn.find(query_filter, _prepare_fields(fields))
@ -1309,7 +1359,7 @@ def get_thumbnail(project_name, thumbnail_id, fields=None):
if not thumbnail_id: if not thumbnail_id:
return None return None
query_filter = {"type": "thumbnail", "_id": _convert_id(thumbnail_id)} query_filter = {"type": "thumbnail", "_id": _convert_id(thumbnail_id)}
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find_one(query_filter, _prepare_fields(fields)) return conn.find_one(query_filter, _prepare_fields(fields))
@ -1340,7 +1390,7 @@ def get_workfile_info(
"task_name": task_name, "task_name": task_name,
"filename": filename "filename": filename
} }
conn = _get_project_connection(project_name) conn = get_project_connection(project_name)
return conn.find_one(query_filter, _prepare_fields(fields)) return conn.find_one(query_filter, _prepare_fields(fields))

210
openpype/client/mongo.py Normal file
View file

@ -0,0 +1,210 @@
import os
import sys
import time
import logging
import pymongo
import certifi
if sys.version_info[0] == 2:
from urlparse import urlparse, parse_qs
else:
from urllib.parse import urlparse, parse_qs
class MongoEnvNotSet(Exception):
pass
def _decompose_url(url):
"""Decompose mongo url to basic components.
Used for creation of MongoHandler which expect mongo url components as
separated kwargs. Components are at the end not used as we're setting
connection directly this is just a dumb components for MongoHandler
validation pass.
"""
# Use first url from passed url
# - this is because it is possible to pass multiple urls for multiple
# replica sets which would crash on urlparse otherwise
# - please don't use comma in username of password
url = url.split(",")[0]
components = {
"scheme": None,
"host": None,
"port": None,
"username": None,
"password": None,
"auth_db": None
}
result = urlparse(url)
if result.scheme is None:
_url = "mongodb://{}".format(url)
result = urlparse(_url)
components["scheme"] = result.scheme
components["host"] = result.hostname
try:
components["port"] = result.port
except ValueError:
raise RuntimeError("invalid port specified")
components["username"] = result.username
components["password"] = result.password
try:
components["auth_db"] = parse_qs(result.query)['authSource'][0]
except KeyError:
# no auth db provided, mongo will use the one we are connecting to
pass
return components
def get_default_components():
mongo_url = os.environ.get("OPENPYPE_MONGO")
if mongo_url is None:
raise MongoEnvNotSet(
"URL for Mongo logging connection is not set."
)
return _decompose_url(mongo_url)
def should_add_certificate_path_to_mongo_url(mongo_url):
"""Check if should add ca certificate to mongo url.
Since 30.9.2021 cloud mongo requires newer certificates that are not
available on most of workstation. This adds path to certifi certificate
which is valid for it. To add the certificate path url must have scheme
'mongodb+srv' or has 'ssl=true' or 'tls=true' in url query.
"""
parsed = urlparse(mongo_url)
query = parse_qs(parsed.query)
lowered_query_keys = set(key.lower() for key in query.keys())
add_certificate = False
# Check if url 'ssl' or 'tls' are set to 'true'
for key in ("ssl", "tls"):
if key in query and "true" in query["ssl"]:
add_certificate = True
break
# Check if url contains 'mongodb+srv'
if not add_certificate and parsed.scheme == "mongodb+srv":
add_certificate = True
# Check if url does already contain certificate path
if add_certificate and "tlscafile" in lowered_query_keys:
add_certificate = False
return add_certificate
def validate_mongo_connection(mongo_uri):
"""Check if provided mongodb URL is valid.
Args:
mongo_uri (str): URL to validate.
Raises:
ValueError: When port in mongo uri is not valid.
pymongo.errors.InvalidURI: If passed mongo is invalid.
pymongo.errors.ServerSelectionTimeoutError: If connection timeout
passed so probably couldn't connect to mongo server.
"""
client = OpenPypeMongoConnection.create_connection(
mongo_uri, retry_attempts=1
)
client.close()
class OpenPypeMongoConnection:
"""Singleton MongoDB connection.
Keeps MongoDB connections by url.
"""
mongo_clients = {}
log = logging.getLogger("OpenPypeMongoConnection")
@staticmethod
def get_default_mongo_url():
return os.environ["OPENPYPE_MONGO"]
@classmethod
def get_mongo_client(cls, mongo_url=None):
if mongo_url is None:
mongo_url = cls.get_default_mongo_url()
connection = cls.mongo_clients.get(mongo_url)
if connection:
# Naive validation of existing connection
try:
connection.server_info()
with connection.start_session():
pass
except Exception:
connection = None
if not connection:
cls.log.debug("Creating mongo connection to {}".format(mongo_url))
connection = cls.create_connection(mongo_url)
cls.mongo_clients[mongo_url] = connection
return connection
@classmethod
def create_connection(cls, mongo_url, timeout=None, retry_attempts=None):
parsed = urlparse(mongo_url)
# Force validation of scheme
if parsed.scheme not in ["mongodb", "mongodb+srv"]:
raise pymongo.errors.InvalidURI((
"Invalid URI scheme:"
" URI must begin with 'mongodb://' or 'mongodb+srv://'"
))
if timeout is None:
timeout = int(os.environ.get("AVALON_TIMEOUT") or 1000)
kwargs = {
"serverSelectionTimeoutMS": timeout
}
if should_add_certificate_path_to_mongo_url(mongo_url):
kwargs["ssl_ca_certs"] = certifi.where()
mongo_client = pymongo.MongoClient(mongo_url, **kwargs)
if retry_attempts is None:
retry_attempts = 3
elif not retry_attempts:
retry_attempts = 1
last_exc = None
valid = False
t1 = time.time()
for attempt in range(1, retry_attempts + 1):
try:
mongo_client.server_info()
with mongo_client.start_session():
pass
valid = True
break
except Exception as exc:
last_exc = exc
if attempt < retry_attempts:
cls.log.warning(
"Attempt {} failed. Retrying... ".format(attempt)
)
time.sleep(1)
if not valid:
raise last_exc
cls.log.info("Connected to {}, delay {:.3f}s".format(
mongo_url, time.time() - t1
))
return mongo_client

View file

@ -1,5 +1,4 @@
import os import os
import sys
from Qt import QtWidgets from Qt import QtWidgets
@ -15,6 +14,7 @@ from openpype.pipeline import (
AVALON_CONTAINER_ID, AVALON_CONTAINER_ID,
legacy_io, legacy_io,
) )
from openpype.pipeline.load import any_outdated_containers
import openpype.hosts.aftereffects import openpype.hosts.aftereffects
from openpype.lib import register_event_callback from openpype.lib import register_event_callback
@ -136,7 +136,7 @@ def ls():
def check_inventory(): def check_inventory():
"""Checks loaded containers if they are of highest version""" """Checks loaded containers if they are of highest version"""
if not lib.any_outdated(): if not any_outdated_containers():
return return
# Warn about outdated containers. # Warn about outdated containers.

View file

@ -4,17 +4,15 @@ import logging
import pyblish.api import pyblish.api
from openpype import lib
from openpype.client import get_representation_by_id
from openpype.lib import register_event_callback from openpype.lib import register_event_callback
from openpype.pipeline import ( from openpype.pipeline import (
legacy_io,
register_loader_plugin_path, register_loader_plugin_path,
register_creator_plugin_path, register_creator_plugin_path,
deregister_loader_plugin_path, deregister_loader_plugin_path,
deregister_creator_plugin_path, deregister_creator_plugin_path,
AVALON_CONTAINER_ID, AVALON_CONTAINER_ID,
) )
from openpype.pipeline.load import get_outdated_containers
from openpype.pipeline.context_tools import get_current_project_asset from openpype.pipeline.context_tools import get_current_project_asset
import openpype.hosts.harmony import openpype.hosts.harmony
import openpype.hosts.harmony.api as harmony import openpype.hosts.harmony.api as harmony
@ -108,16 +106,7 @@ def check_inventory():
in Harmony. in Harmony.
""" """
project_name = legacy_io.active_project() outdated_containers = get_outdated_containers()
outdated_containers = []
for container in ls():
representation_id = container['representation']
representation_doc = get_representation_by_id(
project_name, representation_id, fields=["parent"]
)
if representation_doc and not lib.is_latest(representation_doc):
outdated_containers.append(container)
if not outdated_containers: if not outdated_containers:
return return

View file

@ -5,8 +5,8 @@ from openpype.pipeline import (
load, load,
get_representation_path, get_representation_path,
) )
from openpype.pipeline.context_tools import is_representation_from_latest
import openpype.hosts.harmony.api as harmony import openpype.hosts.harmony.api as harmony
import openpype.lib
copy_files = """function copyFile(srcFilename, dstFilename) copy_files = """function copyFile(srcFilename, dstFilename)
@ -280,9 +280,7 @@ class BackgroundLoader(load.LoaderPlugin):
) )
def update(self, container, representation): def update(self, container, representation):
path = get_representation_path(representation) path = get_representation_path(representation)
with open(path) as json_file: with open(path) as json_file:
data = json.load(json_file) data = json.load(json_file)
@ -300,10 +298,9 @@ class BackgroundLoader(load.LoaderPlugin):
bg_folder = os.path.dirname(path) bg_folder = os.path.dirname(path)
path = get_representation_path(representation)
print(container) print(container)
is_latest = is_representation_from_latest(representation)
for layer in sorted(layers): for layer in sorted(layers):
file_to_import = [ file_to_import = [
os.path.join(bg_folder, layer).replace("\\", "/") os.path.join(bg_folder, layer).replace("\\", "/")
@ -347,7 +344,7 @@ class BackgroundLoader(load.LoaderPlugin):
} }
%s %s
""" % (sig, sig) """ % (sig, sig)
if openpype.lib.is_latest(representation): if is_latest:
harmony.send({"function": func, "args": [node, "green"]}) harmony.send({"function": func, "args": [node, "green"]})
else: else:
harmony.send({"function": func, "args": [node, "red"]}) harmony.send({"function": func, "args": [node, "red"]})

View file

@ -10,8 +10,8 @@ from openpype.pipeline import (
load, load,
get_representation_path, get_representation_path,
) )
from openpype.pipeline.context_tools import is_representation_from_latest
import openpype.hosts.harmony.api as harmony import openpype.hosts.harmony.api as harmony
import openpype.lib
class ImageSequenceLoader(load.LoaderPlugin): class ImageSequenceLoader(load.LoaderPlugin):
@ -109,7 +109,7 @@ class ImageSequenceLoader(load.LoaderPlugin):
) )
# Colour node. # Colour node.
if openpype.lib.is_latest(representation): if is_representation_from_latest(representation):
harmony.send( harmony.send(
{ {
"function": "PypeHarmony.setColor", "function": "PypeHarmony.setColor",

View file

@ -10,8 +10,8 @@ from openpype.pipeline import (
load, load,
get_representation_path, get_representation_path,
) )
from openpype.pipeline.context_tools import is_representation_from_latest
import openpype.hosts.harmony.api as harmony import openpype.hosts.harmony.api as harmony
import openpype.lib
class TemplateLoader(load.LoaderPlugin): class TemplateLoader(load.LoaderPlugin):
@ -83,7 +83,7 @@ class TemplateLoader(load.LoaderPlugin):
self_name = self.__class__.__name__ self_name = self.__class__.__name__
update_and_replace = False update_and_replace = False
if openpype.lib.is_latest(representation): if is_representation_from_latest(representation):
self._set_green(node) self._set_green(node)
else: else:
self._set_red(node) self._set_red(node)

View file

@ -12,13 +12,13 @@ from openpype.pipeline import (
register_loader_plugin_path, register_loader_plugin_path,
AVALON_CONTAINER_ID, AVALON_CONTAINER_ID,
) )
from openpype.pipeline.load import any_outdated_containers
import openpype.hosts.houdini import openpype.hosts.houdini
from openpype.hosts.houdini.api import lib from openpype.hosts.houdini.api import lib
from openpype.lib import ( from openpype.lib import (
register_event_callback, register_event_callback,
emit_event, emit_event,
any_outdated,
) )
from .lib import get_asset_fps from .lib import get_asset_fps
@ -245,7 +245,7 @@ def on_open():
# ensure it is using correct FPS for the asset # ensure it is using correct FPS for the asset
lib.validate_fps() lib.validate_fps()
if any_outdated(): if any_outdated_containers():
from openpype.widgets import popup from openpype.widgets import popup
log.warning("Scene has outdated content.") log.warning("Scene has outdated content.")

View file

@ -13,7 +13,6 @@ from openpype.host import HostBase, IWorkfileHost, ILoadHost
import openpype.hosts.maya import openpype.hosts.maya
from openpype.tools.utils import host_tools from openpype.tools.utils import host_tools
from openpype.lib import ( from openpype.lib import (
any_outdated,
register_event_callback, register_event_callback,
emit_event emit_event
) )
@ -28,6 +27,7 @@ from openpype.pipeline import (
deregister_creator_plugin_path, deregister_creator_plugin_path,
AVALON_CONTAINER_ID, AVALON_CONTAINER_ID,
) )
from openpype.pipeline.load import any_outdated_containers
from openpype.hosts.maya.lib import copy_workspace_mel from openpype.hosts.maya.lib import copy_workspace_mel
from . import menu, lib from . import menu, lib
from .workio import ( from .workio import (
@ -470,7 +470,7 @@ def on_open():
lib.validate_fps() lib.validate_fps()
lib.fix_incompatible_containers() lib.fix_incompatible_containers()
if any_outdated(): if any_outdated_containers():
log.warning("Scene has outdated content.") log.warning("Scene has outdated content.")
# Find maya main window # Find maya main window

View file

@ -6,7 +6,7 @@ Shader names are stored as simple text file over GridFS in mongodb.
""" """
import os import os
from Qt import QtWidgets, QtCore, QtGui from Qt import QtWidgets, QtCore, QtGui
from openpype.lib.mongo import OpenPypeMongoConnection from openpype.client.mongo import OpenPypeMongoConnection
from openpype import resources from openpype import resources
import gridfs import gridfs

View file

@ -128,8 +128,10 @@ class ExtractPlayblast(openpype.api.Extractor):
# Update preset with current panel setting # Update preset with current panel setting
# if override_viewport_options is turned off # if override_viewport_options is turned off
if not override_viewport_options: if not override_viewport_options:
panel = cmds.getPanel(with_focus=True)
panel_preset = capture.parse_active_view() panel_preset = capture.parse_active_view()
preset.update(panel_preset) preset.update(panel_preset)
cmds.setFocus(panel)
path = capture.capture(**preset) path = capture.capture(**preset)

View file

@ -101,6 +101,13 @@ class ExtractThumbnail(openpype.api.Extractor):
if preset.pop("isolate_view", False) and instance.data.get("isolate"): if preset.pop("isolate_view", False) and instance.data.get("isolate"):
preset["isolate"] = instance.data["setMembers"] preset["isolate"] = instance.data["setMembers"]
# Show or Hide Image Plane
image_plane = instance.data.get("imagePlane", True)
if "viewport_options" in preset:
preset["viewport_options"]["imagePlane"] = image_plane
else:
preset["viewport_options"] = {"imagePlane": image_plane}
with lib.maintained_time(): with lib.maintained_time():
# Force viewer to False in call to capture because we have our own # Force viewer to False in call to capture because we have our own
# viewer opening call to allow a signal to trigger between # viewer opening call to allow a signal to trigger between
@ -110,14 +117,17 @@ class ExtractThumbnail(openpype.api.Extractor):
# Update preset with current panel setting # Update preset with current panel setting
# if override_viewport_options is turned off # if override_viewport_options is turned off
if not override_viewport_options: if not override_viewport_options:
panel = cmds.getPanel(with_focus=True)
panel_preset = capture.parse_active_view() panel_preset = capture.parse_active_view()
preset.update(panel_preset) preset.update(panel_preset)
cmds.setFocus(panel)
path = capture.capture(**preset) path = capture.capture(**preset)
playblast = self._fix_playblast_output_path(path) playblast = self._fix_playblast_output_path(path)
_, thumbnail = os.path.split(playblast) _, thumbnail = os.path.split(playblast)
self.log.info("file list {}".format(thumbnail)) self.log.info("file list {}".format(thumbnail))
if "representations" not in instance.data: if "representations" not in instance.data:

View file

@ -10,7 +10,7 @@ from openpype.pipeline import legacy_io
import openpype.hosts.maya.api.action import openpype.hosts.maya.api.action
from openpype.hosts.maya.api.shader_definition_editor import ( from openpype.hosts.maya.api.shader_definition_editor import (
DEFINITION_FILENAME) DEFINITION_FILENAME)
from openpype.lib.mongo import OpenPypeMongoConnection from openpype.client.mongo import OpenPypeMongoConnection
import gridfs import gridfs

View file

@ -94,6 +94,7 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
# Farm rendering # Farm rendering
self.log.info("flagged for farm render") self.log.info("flagged for farm render")
instance.data["transfer"] = False instance.data["transfer"] = False
instance.data["farm"] = True
families.append("{}.farm".format(family)) families.append("{}.farm".format(family))
family = families_ak.lower() family = families_ak.lower()

View file

@ -1,6 +1,5 @@
import os import os
from Qt import QtWidgets from Qt import QtWidgets
from bson.objectid import ObjectId
import pyblish.api import pyblish.api
@ -13,8 +12,8 @@ from openpype.pipeline import (
deregister_loader_plugin_path, deregister_loader_plugin_path,
deregister_creator_plugin_path, deregister_creator_plugin_path,
AVALON_CONTAINER_ID, AVALON_CONTAINER_ID,
registered_host,
) )
from openpype.pipeline.load import any_outdated_containers
import openpype.hosts.photoshop import openpype.hosts.photoshop
from . import lib from . import lib
@ -30,7 +29,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
def check_inventory(): def check_inventory():
if not lib.any_outdated(): if not any_outdated_containers():
return return
# Warn about outdated containers. # Warn about outdated containers.

View file

@ -10,9 +10,9 @@ from aiohttp.web_response import Response
from openpype.client import ( from openpype.client import (
get_projects, get_projects,
get_assets, get_assets,
OpenPypeMongoConnection,
) )
from openpype.lib import ( from openpype.lib import (
OpenPypeMongoConnection,
PypeLogger, PypeLogger,
) )
from openpype.lib.remote_publish import ( from openpype.lib.remote_publish import (

View file

@ -6,6 +6,7 @@ import requests
import json import json
import subprocess import subprocess
from openpype.client import OpenPypeMongoConnection
from openpype.lib import PypeLogger from openpype.lib import PypeLogger
from .webpublish_routes import ( from .webpublish_routes import (
@ -121,8 +122,6 @@ def run_webserver(*args, **kwargs):
def reprocess_failed(upload_dir, webserver_url): def reprocess_failed(upload_dir, webserver_url):
# log.info("check_reprocesable_records") # log.info("check_reprocesable_records")
from openpype.lib import OpenPypeMongoConnection
mongo_client = OpenPypeMongoConnection.get_mongo_client() mongo_client = OpenPypeMongoConnection.get_mongo_client()
database_name = os.environ["OPENPYPE_DATABASE_NAME"] database_name = os.environ["OPENPYPE_DATABASE_NAME"]
dbcon = mongo_client[database_name]["webpublishes"] dbcon = mongo_client[database_name]["webpublishes"]

View file

@ -15,11 +15,10 @@ from openpype.client import (
get_asset_by_name, get_asset_by_name,
get_subset_by_name, get_subset_by_name,
get_subsets, get_subsets,
get_version_by_id,
get_last_versions, get_last_versions,
get_last_version_by_subset_id, get_last_version_by_subset_id,
get_last_version_by_subset_name,
get_representations, get_representations,
get_representation_by_id,
get_workfile_info, get_workfile_info,
) )
from openpype.settings import ( from openpype.settings import (
@ -180,7 +179,7 @@ def with_pipeline_io(func):
return wrapped return wrapped
@with_pipeline_io @deprecated("openpype.pipeline.context_tools.is_representation_from_latest")
def is_latest(representation): def is_latest(representation):
"""Return whether the representation is from latest version """Return whether the representation is from latest version
@ -191,49 +190,18 @@ def is_latest(representation):
bool: Whether the representation is of latest version. bool: Whether the representation is of latest version.
""" """
project_name = legacy_io.active_project() from openpype.pipeline.context_tools import is_representation_from_latest
version = get_version_by_id(
project_name,
representation["parent"],
fields=["_id", "type", "parent"]
)
if version["type"] == "hero_version":
return True
# Get highest version under the parent return is_representation_from_latest(representation)
last_version = get_last_version_by_subset_id(
project_name, version["parent"], fields=["_id"]
)
return version["_id"] == last_version["_id"]
@with_pipeline_io @deprecated("openpype.pipeline.load.any_outdated_containers")
def any_outdated(): def any_outdated():
"""Return whether the current scene has any outdated content""" """Return whether the current scene has any outdated content"""
from openpype.pipeline import registered_host
project_name = legacy_io.active_project() from openpype.pipeline.load import any_outdated_containers
checked = set()
host = registered_host()
for container in host.ls():
representation = container['representation']
if representation in checked:
continue
representation_doc = get_representation_by_id( return any_outdated_containers()
project_name, representation, fields=["parent"]
)
if representation_doc and not is_latest(representation_doc):
return True
elif not representation_doc:
log.debug("Container '{objectName}' has an invalid "
"representation, it is missing in the "
"database".format(**container))
checked.add(representation)
return False
@deprecated("openpype.pipeline.context_tools.get_current_project_asset") @deprecated("openpype.pipeline.context_tools.get_current_project_asset")
@ -313,7 +281,7 @@ def get_linked_assets(asset_doc):
return list(get_assets(project_name, link_ids)) return list(get_assets(project_name, link_ids))
@with_pipeline_io @deprecated("openpype.client.get_last_version_by_subset_name")
def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None): def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None):
"""Retrieve latest version from `asset_name`, and `subset_name`. """Retrieve latest version from `asset_name`, and `subset_name`.
@ -334,6 +302,8 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None):
if not project_name: if not project_name:
if not dbcon: if not dbcon:
from openpype.pipeline import legacy_io
log.debug("Using `legacy_io` for query.") log.debug("Using `legacy_io` for query.")
dbcon = legacy_io dbcon = legacy_io
# Make sure is installed # Make sure is installed
@ -341,37 +311,9 @@ def get_latest_version(asset_name, subset_name, dbcon=None, project_name=None):
project_name = dbcon.active_project() project_name = dbcon.active_project()
log.debug(( return get_last_version_by_subset_name(
"Getting latest version for Project: \"{}\" Asset: \"{}\"" project_name, subset_name, asset_name=asset_name
" and Subset: \"{}\""
).format(project_name, asset_name, subset_name))
# Query asset document id by asset name
asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"])
if not asset_doc:
log.info(
"Asset \"{}\" was not found in Database.".format(asset_name)
)
return None
subset_doc = get_subset_by_name(
project_name, subset_name, asset_doc["_id"]
) )
if not subset_doc:
log.info(
"Subset \"{}\" was not found in Database.".format(subset_name)
)
return None
version_doc = get_last_version_by_subset_id(
project_name, subset_doc["_id"]
)
if not version_doc:
log.info(
"Subset \"{}\" does not have any version yet.".format(subset_name)
)
return None
return version_doc
def get_workfile_template_key_from_context( def get_workfile_template_key_from_context(

View file

@ -34,7 +34,7 @@ from openpype.settings import (
get_system_settings get_system_settings
) )
from .import validate_mongo_connection from openpype.client.mongo import validate_mongo_connection
_PLACEHOLDER = object() _PLACEHOLDER = object()

View file

@ -24,12 +24,13 @@ import traceback
import threading import threading
import copy import copy
from . import Terminal from openpype.client.mongo import (
from .mongo import (
MongoEnvNotSet, MongoEnvNotSet,
get_default_components, get_default_components,
OpenPypeMongoConnection OpenPypeMongoConnection,
) )
from . import Terminal
try: try:
import log4mongo import log4mongo
from log4mongo.handlers import MongoHandler from log4mongo.handlers import MongoHandler

View file

@ -1,206 +1,61 @@
import os import warnings
import sys import functools
import time from openpype.client.mongo import (
import logging MongoEnvNotSet,
import pymongo OpenPypeMongoConnection,
import certifi )
if sys.version_info[0] == 2:
from urlparse import urlparse, parse_qs
else:
from urllib.parse import urlparse, parse_qs
class MongoEnvNotSet(Exception): class MongoDeprecatedWarning(DeprecationWarning):
pass pass
def _decompose_url(url): def mongo_deprecated(func):
"""Decompose mongo url to basic components. """Mark functions as deprecated.
Used for creation of MongoHandler which expect mongo url components as It will result in a warning being emitted when the function is used.
separated kwargs. Components are at the end not used as we're setting
connection directly this is just a dumb components for MongoHandler
validation pass.
""" """
# Use first url from passed url
# - this is because it is possible to pass multiple urls for multiple
# replica sets which would crash on urlparse otherwise
# - please don't use comma in username of password
url = url.split(",")[0]
components = {
"scheme": None,
"host": None,
"port": None,
"username": None,
"password": None,
"auth_db": None
}
result = urlparse(url) @functools.wraps(func)
if result.scheme is None: def new_func(*args, **kwargs):
_url = "mongodb://{}".format(url) warnings.simplefilter("always", MongoDeprecatedWarning)
result = urlparse(_url) warnings.warn(
(
components["scheme"] = result.scheme "Call to deprecated function '{}'."
components["host"] = result.hostname " Function was moved to 'openpype.client.mongo'."
try: ).format(func.__name__),
components["port"] = result.port category=MongoDeprecatedWarning,
except ValueError: stacklevel=2
raise RuntimeError("invalid port specified")
components["username"] = result.username
components["password"] = result.password
try:
components["auth_db"] = parse_qs(result.query)['authSource'][0]
except KeyError:
# no auth db provided, mongo will use the one we are connecting to
pass
return components
def get_default_components():
mongo_url = os.environ.get("OPENPYPE_MONGO")
if mongo_url is None:
raise MongoEnvNotSet(
"URL for Mongo logging connection is not set."
) )
return _decompose_url(mongo_url) return func(*args, **kwargs)
return new_func
@mongo_deprecated
def get_default_components():
from openpype.client.mongo import get_default_components
return get_default_components()
@mongo_deprecated
def should_add_certificate_path_to_mongo_url(mongo_url): def should_add_certificate_path_to_mongo_url(mongo_url):
"""Check if should add ca certificate to mongo url. from openpype.client.mongo import should_add_certificate_path_to_mongo_url
Since 30.9.2021 cloud mongo requires newer certificates that are not return should_add_certificate_path_to_mongo_url(mongo_url)
available on most of workstation. This adds path to certifi certificate
which is valid for it. To add the certificate path url must have scheme
'mongodb+srv' or has 'ssl=true' or 'tls=true' in url query.
"""
parsed = urlparse(mongo_url)
query = parse_qs(parsed.query)
lowered_query_keys = set(key.lower() for key in query.keys())
add_certificate = False
# Check if url 'ssl' or 'tls' are set to 'true'
for key in ("ssl", "tls"):
if key in query and "true" in query["ssl"]:
add_certificate = True
break
# Check if url contains 'mongodb+srv'
if not add_certificate and parsed.scheme == "mongodb+srv":
add_certificate = True
# Check if url does already contain certificate path
if add_certificate and "tlscafile" in lowered_query_keys:
add_certificate = False
return add_certificate
@mongo_deprecated
def validate_mongo_connection(mongo_uri): def validate_mongo_connection(mongo_uri):
"""Check if provided mongodb URL is valid. from openpype.client.mongo import validate_mongo_connection
Args: return validate_mongo_connection(mongo_uri)
mongo_uri (str): URL to validate.
Raises:
ValueError: When port in mongo uri is not valid.
pymongo.errors.InvalidURI: If passed mongo is invalid.
pymongo.errors.ServerSelectionTimeoutError: If connection timeout
passed so probably couldn't connect to mongo server.
"""
client = OpenPypeMongoConnection.create_connection(
mongo_uri, retry_attempts=1
)
client.close()
class OpenPypeMongoConnection: __all__ = (
"""Singleton MongoDB connection. "MongoEnvNotSet",
"OpenPypeMongoConnection",
Keeps MongoDB connections by url. "get_default_components",
""" "should_add_certificate_path_to_mongo_url",
mongo_clients = {} "validate_mongo_connection",
log = logging.getLogger("OpenPypeMongoConnection") )
@staticmethod
def get_default_mongo_url():
return os.environ["OPENPYPE_MONGO"]
@classmethod
def get_mongo_client(cls, mongo_url=None):
if mongo_url is None:
mongo_url = cls.get_default_mongo_url()
connection = cls.mongo_clients.get(mongo_url)
if connection:
# Naive validation of existing connection
try:
connection.server_info()
with connection.start_session():
pass
except Exception:
connection = None
if not connection:
cls.log.debug("Creating mongo connection to {}".format(mongo_url))
connection = cls.create_connection(mongo_url)
cls.mongo_clients[mongo_url] = connection
return connection
@classmethod
def create_connection(cls, mongo_url, timeout=None, retry_attempts=None):
parsed = urlparse(mongo_url)
# Force validation of scheme
if parsed.scheme not in ["mongodb", "mongodb+srv"]:
raise pymongo.errors.InvalidURI((
"Invalid URI scheme:"
" URI must begin with 'mongodb://' or 'mongodb+srv://'"
))
if timeout is None:
timeout = int(os.environ.get("AVALON_TIMEOUT") or 1000)
kwargs = {
"serverSelectionTimeoutMS": timeout
}
if should_add_certificate_path_to_mongo_url(mongo_url):
kwargs["ssl_ca_certs"] = certifi.where()
mongo_client = pymongo.MongoClient(mongo_url, **kwargs)
if retry_attempts is None:
retry_attempts = 3
elif not retry_attempts:
retry_attempts = 1
last_exc = None
valid = False
t1 = time.time()
for attempt in range(1, retry_attempts + 1):
try:
mongo_client.server_info()
with mongo_client.start_session():
pass
valid = True
break
except Exception as exc:
last_exc = exc
if attempt < retry_attempts:
cls.log.warning(
"Attempt {} failed. Retrying... ".format(attempt)
)
time.sleep(1)
if not valid:
raise last_exc
cls.log.info("Connected to {}, delay {:.3f}s".format(
mongo_url, time.time() - t1
))
return mongo_client

View file

@ -7,7 +7,7 @@ from bson.objectid import ObjectId
import pyblish.util import pyblish.util
import pyblish.api import pyblish.api
from openpype.lib.mongo import OpenPypeMongoConnection from openpype.client.mongo import OpenPypeMongoConnection
from openpype.lib.plugin_tools import parse_json from openpype.lib.plugin_tools import parse_json
ERROR_STATUS = "error" ERROR_STATUS = "error"

View file

@ -10,8 +10,10 @@ import clique
import pyblish.api import pyblish.api
import openpype.api from openpype.client import (
from openpype.client import get_representations get_last_version_by_subset_name,
get_representations,
)
from openpype.pipeline import ( from openpype.pipeline import (
get_representation_path, get_representation_path,
legacy_io, legacy_io,
@ -343,8 +345,13 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# get latest version of subset # get latest version of subset
# this will stop if subset wasn't published yet # this will stop if subset wasn't published yet
version = openpype.api.get_latest_version(instance.data.get("asset"), project_name = legacy_io.active_project()
instance.data.get("subset")) version = get_last_version_by_subset_name(
project_name,
instance.data.get("subset"),
asset_name=instance.data.get("asset")
)
# get its files based on extension # get its files based on extension
subset_resources = get_resources( subset_resources = get_resources(
project_name, version, representation.get("ext") project_name, version, representation.get("ext")
@ -1025,9 +1032,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
prev_start = None prev_start = None
prev_end = None prev_end = None
version = openpype.api.get_latest_version(asset_name=asset, project_name = legacy_io.active_project()
subset_name=subset version = get_last_version_by_subset_name(
) project_name,
subset,
asset_name=asset
)
# Set prev start / end frames for comparison # Set prev start / end frames for comparison
if not prev_start and not prev_end: if not prev_start and not prev_end:
@ -1072,7 +1082,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
based on 'publish' template based on 'publish' template
""" """
if not version: if not version:
version = openpype.api.get_latest_version(asset, subset) project_name = legacy_io.active_project()
version = get_last_version_by_subset_name(
project_name,
subset,
asset_name=asset
)
if version: if version:
version = int(version["name"]) + 1 version = int(version["name"]) + 1
else: else:

View file

@ -1,11 +1,9 @@
import os import os
import sys
import signal import signal
import datetime import datetime
import subprocess import subprocess
import socket import socket
import json import json
import platform
import getpass import getpass
import atexit import atexit
import time import time
@ -13,12 +11,14 @@ import uuid
import ftrack_api import ftrack_api
import pymongo import pymongo
from openpype.client.mongo import (
OpenPypeMongoConnection,
validate_mongo_connection,
)
from openpype.lib import ( from openpype.lib import (
get_openpype_execute_args, get_openpype_execute_args,
OpenPypeMongoConnection,
get_openpype_version, get_openpype_version,
get_build_version, get_build_version,
validate_mongo_connection
) )
from openpype_modules.ftrack import FTRACK_MODULE_DIR from openpype_modules.ftrack import FTRACK_MODULE_DIR
from openpype_modules.ftrack.lib import credentials from openpype_modules.ftrack.lib import credentials

View file

@ -24,7 +24,7 @@ except ImportError:
from ftrack_api._weakref import WeakMethod from ftrack_api._weakref import WeakMethod
from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info
from openpype.lib import OpenPypeMongoConnection from openpype.client import OpenPypeMongoConnection
from openpype.api import Logger from openpype.api import Logger
TOPIC_STATUS_SERVER = "openpype.event.server.status" TOPIC_STATUS_SERVER = "openpype.event.server.status"

View file

@ -6,6 +6,8 @@ import socket
import pymongo import pymongo
import ftrack_api import ftrack_api
from openpype.client import OpenPypeMongoConnection
from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer
from openpype_modules.ftrack.ftrack_server.lib import ( from openpype_modules.ftrack.ftrack_server.lib import (
SocketSession, SocketSession,
@ -15,7 +17,6 @@ from openpype_modules.ftrack.ftrack_server.lib import (
) )
from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info from openpype_modules.ftrack.lib import get_ftrack_event_mongo_info
from openpype.lib import ( from openpype.lib import (
OpenPypeMongoConnection,
get_openpype_version, get_openpype_version,
get_build_version get_build_version
) )

View file

@ -4,8 +4,8 @@ import pyblish.api
import copy import copy
from datetime import datetime from datetime import datetime
from openpype.client import OpenPypeMongoConnection
from openpype.lib.plugin_tools import prepare_template_data from openpype.lib.plugin_tools import prepare_template_data
from openpype.lib import OpenPypeMongoConnection
class IntegrateSlackAPI(pyblish.api.InstancePlugin): class IntegrateSlackAPI(pyblish.api.InstancePlugin):

View file

@ -14,6 +14,7 @@ from openpype.client import (
get_project, get_project,
get_asset_by_id, get_asset_by_id,
get_asset_by_name, get_asset_by_name,
version_is_latest,
) )
from openpype.modules import load_modules, ModulesManager from openpype.modules import load_modules, ModulesManager
from openpype.settings import get_project_settings from openpype.settings import get_project_settings
@ -334,3 +335,16 @@ def get_current_project_asset(asset_name=None, asset_id=None, fields=None):
if not asset_name: if not asset_name:
return None return None
return get_asset_by_name(project_name, asset_name, fields=fields) return get_asset_by_name(project_name, asset_name, fields=fields)
def is_representation_from_latest(representation):
"""Return whether the representation is from latest version
Args:
representation (dict): The representation document from the database.
Returns:
bool: Whether the representation is of latest version.
"""
project_name = legacy_io.active_project()
return version_is_latest(project_name, representation["parent"])

View file

@ -24,6 +24,10 @@ from .utils import (
loaders_from_repre_context, loaders_from_repre_context,
loaders_from_representation, loaders_from_representation,
any_outdated_containers,
get_outdated_containers,
filter_containers,
) )
from .plugins import ( from .plugins import (
@ -66,6 +70,10 @@ __all__ = (
"loaders_from_repre_context", "loaders_from_repre_context",
"loaders_from_representation", "loaders_from_representation",
"any_outdated_containers",
"get_outdated_containers",
"filter_containers",
# plugins.py # plugins.py
"LoaderPlugin", "LoaderPlugin",
"SubsetLoaderPlugin", "SubsetLoaderPlugin",

View file

@ -4,8 +4,10 @@ import copy
import getpass import getpass
import logging import logging
import inspect import inspect
import collections
import numbers import numbers
from openpype.host import ILoadHost
from openpype.client import ( from openpype.client import (
get_project, get_project,
get_assets, get_assets,
@ -15,6 +17,7 @@ from openpype.client import (
get_last_version_by_subset_id, get_last_version_by_subset_id,
get_hero_version_by_subset_id, get_hero_version_by_subset_id,
get_version_by_name, get_version_by_name,
get_last_versions,
get_representations, get_representations,
get_representation_by_id, get_representation_by_id,
get_representation_by_name, get_representation_by_name,
@ -28,6 +31,11 @@ from openpype.pipeline import (
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
ContainersFilterResult = collections.namedtuple(
"ContainersFilterResult",
["latest", "outdated", "not_foud", "invalid"]
)
class HeroVersionType(object): class HeroVersionType(object):
def __init__(self, version): def __init__(self, version):
@ -685,3 +693,164 @@ def loaders_from_representation(loaders, representation):
context = get_representation_context(representation) context = get_representation_context(representation)
return loaders_from_repre_context(loaders, context) return loaders_from_repre_context(loaders, context)
def any_outdated_containers(host=None, project_name=None):
"""Check if there are any outdated containers in scene."""
if get_outdated_containers(host, project_name):
return True
return False
def get_outdated_containers(host=None, project_name=None):
"""Collect outdated containers from host scene.
Currently registered host and project in global session are used if
arguments are not passed.
Args:
host (ModuleType): Host implementation with 'ls' function available.
project_name (str): Name of project in which context we are.
"""
if host is None:
from openpype.pipeline import registered_host
host = registered_host()
if project_name is None:
project_name = legacy_io.active_project()
if isinstance(host, ILoadHost):
containers = host.get_containers()
else:
containers = host.ls()
return filter_containers(containers, project_name).outdated
def filter_containers(containers, project_name):
"""Filter containers and split them into 4 categories.
Categories are 'latest', 'outdated', 'invalid' and 'not_found'.
The 'lastest' containers are from last version, 'outdated' are not,
'invalid' are invalid containers (invalid content) and 'not_foud' has
some missing entity in database.
Args:
containers (Iterable[dict]): List of containers referenced into scene.
project_name (str): Name of project in which context shoud look for
versions.
Returns:
ContainersFilterResult: Named tuple with 'latest', 'outdated',
'invalid' and 'not_found' containers.
"""
# Make sure containers is list that won't change
containers = list(containers)
outdated_containers = []
uptodate_containers = []
not_found_containers = []
invalid_containers = []
output = ContainersFilterResult(
uptodate_containers,
outdated_containers,
not_found_containers,
invalid_containers
)
# Query representation docs to get it's version ids
repre_ids = {
container["representation"]
for container in containers
if container["representation"]
}
if not repre_ids:
if containers:
invalid_containers.extend(containers)
return output
repre_docs = get_representations(
project_name,
representation_ids=repre_ids,
fields=["_id", "parent"]
)
# Store representations by stringified representation id
repre_docs_by_str_id = {}
repre_docs_by_version_id = collections.defaultdict(list)
for repre_doc in repre_docs:
repre_id = str(repre_doc["_id"])
version_id = repre_doc["parent"]
repre_docs_by_str_id[repre_id] = repre_doc
repre_docs_by_version_id[version_id].append(repre_doc)
# Query version docs to get it's subset ids
# - also query hero version to be able identify if representation
# belongs to existing version
version_docs = get_versions(
project_name,
version_ids=repre_docs_by_version_id.keys(),
hero=True,
fields=["_id", "parent", "type"]
)
verisons_by_id = {}
versions_by_subset_id = collections.defaultdict(list)
hero_version_ids = set()
for version_doc in version_docs:
version_id = version_doc["_id"]
# Store versions by their ids
verisons_by_id[version_id] = version_doc
# There's no need to query subsets for hero versions
# - they are considered as latest?
if version_doc["type"] == "hero_version":
hero_version_ids.add(version_id)
continue
subset_id = version_doc["parent"]
versions_by_subset_id[subset_id].append(version_doc)
last_versions = get_last_versions(
project_name,
subset_ids=versions_by_subset_id.keys(),
fields=["_id"]
)
# Figure out which versions are outdated
outdated_version_ids = set()
for subset_id, last_version_doc in last_versions.items():
for version_doc in versions_by_subset_id[subset_id]:
version_id = version_doc["_id"]
if version_id != last_version_doc["_id"]:
outdated_version_ids.add(version_id)
# Based on all collected data figure out which containers are outdated
# - log out if there are missing representation or version documents
for container in containers:
container_name = container["objectName"]
repre_id = container["representation"]
if not repre_id:
invalid_containers.append(container)
continue
repre_doc = repre_docs_by_str_id.get(repre_id)
if not repre_doc:
log.debug((
"Container '{}' has an invalid representation."
" It is missing in the database."
).format(container_name))
not_found_containers.append(container)
continue
version_id = repre_doc["parent"]
if version_id in outdated_version_ids:
outdated_containers.append(container)
elif version_id not in verisons_by_id:
log.debug((
"Representation on container '{}' has an invalid version."
" It is missing in the database."
).format(container_name))
not_found_containers.append(container)
else:
uptodate_containers.append(container)
return output

View file

@ -5,6 +5,8 @@ import logging
import pymongo import pymongo
from uuid import uuid4 from uuid import uuid4
from openpype.client import OpenPypeMongoConnection
from . import schema from . import schema
@ -156,8 +158,6 @@ class AvalonMongoDB:
@property @property
def mongo_client(self): def mongo_client(self):
from openpype.lib import OpenPypeMongoConnection
return OpenPypeMongoConnection.get_mongo_client() return OpenPypeMongoConnection.get_mongo_client()
@property @property

View file

@ -1,4 +1,5 @@
import os import os
import tempfile
import pyblish.api import pyblish.api
from openpype.lib import ( from openpype.lib import (
@ -8,8 +9,6 @@ from openpype.lib import (
run_subprocess, run_subprocess,
path_to_subprocess_arg, path_to_subprocess_arg,
execute,
) )
@ -29,7 +28,27 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
ffmpeg_args = None ffmpeg_args = None
def process(self, instance): def process(self, instance):
self.log.info("subset {}".format(instance.data['subset'])) subset_name = instance.data["subset"]
instance_repres = instance.data.get("representations")
if not instance_repres:
self.log.debug((
"Instance {} does not have representations. Skipping"
).format(subset_name))
return
self.log.info(
"Processing instance with subset name {}".format(subset_name)
)
# Skip if instance have 'review' key in data set to 'False'
if not self._is_review_instance(instance):
self.log.info("Skipping - no review set on instance.")
return
# Check if already has thumbnail created
if self._already_has_thumbnail(instance_repres):
self.log.info("Thumbnail representation already present.")
return
# skip crypto passes. # skip crypto passes.
# TODO: This is just a quick fix and has its own side-effects - it is # TODO: This is just a quick fix and has its own side-effects - it is
@ -37,20 +56,29 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
# This must be solved properly, maybe using tags on # This must be solved properly, maybe using tags on
# representation that can be determined much earlier and # representation that can be determined much earlier and
# with better precision. # with better precision.
if 'crypto' in instance.data['subset'].lower(): if "crypto" in subset_name.lower():
self.log.info("Skipping crypto passes.") self.log.info("Skipping crypto passes.")
return return
# Skip if review not set.
if not instance.data.get("review", True):
self.log.info("Skipping - no review set on instance.")
return
if self._already_has_thumbnail(instance):
self.log.info("Thumbnail representation already present.")
return
filtered_repres = self._get_filtered_repres(instance) filtered_repres = self._get_filtered_repres(instance)
if not filtered_repres:
self.log.info((
"Instance don't have representations"
" that can be used as source for thumbnail. Skipping"
))
return
# Create temp directory for thumbnail
# - this is to avoid "override" of source file
dst_staging = tempfile.mkdtemp(prefix="pyblish_tmp_")
self.log.debug(
"Create temp directory {} for thumbnail".format(dst_staging)
)
# Store new staging to cleanup paths
instance.context.data["cleanupFullPaths"].append(dst_staging)
thumbnail_created = False
oiio_supported = is_oiio_supported()
for repre in filtered_repres: for repre in filtered_repres:
repre_files = repre["files"] repre_files = repre["files"]
if not isinstance(repre_files, (list, tuple)): if not isinstance(repre_files, (list, tuple)):
@ -59,41 +87,43 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
file_index = int(float(len(repre_files)) * 0.5) file_index = int(float(len(repre_files)) * 0.5)
input_file = repre_files[file_index] input_file = repre_files[file_index]
stagingdir = os.path.normpath(repre["stagingDir"]) src_staging = os.path.normpath(repre["stagingDir"])
full_input_path = os.path.join(src_staging, input_file)
full_input_path = os.path.join(stagingdir, input_file)
self.log.info("input {}".format(full_input_path)) self.log.info("input {}".format(full_input_path))
filename = os.path.splitext(input_file)[0] filename = os.path.splitext(input_file)[0]
if not filename.endswith('.'): jpeg_file = filename + ".jpg"
filename += "." full_output_path = os.path.join(dst_staging, jpeg_file)
jpeg_file = filename + "jpg"
full_output_path = os.path.join(stagingdir, jpeg_file)
thumbnail_created = False if oiio_supported:
# Try to use FFMPEG if OIIO is not supported (for cases when self.log.info("Trying to convert with OIIO")
# oiiotool isn't available)
if not is_oiio_supported():
thumbnail_created = self.create_thumbnail_ffmpeg(full_input_path, full_output_path) # noqa
else:
# If the input can read by OIIO then use OIIO method for # If the input can read by OIIO then use OIIO method for
# conversion otherwise use ffmpeg # conversion otherwise use ffmpeg
self.log.info("Trying to convert with OIIO") # noqa thumbnail_created = self.create_thumbnail_oiio(
thumbnail_created = self.create_thumbnail_oiio(full_input_path, full_output_path) # noqa full_input_path, full_output_path
)
if not thumbnail_created: # Try to use FFMPEG if OIIO is not supported or for cases when
self.log.info("Converting with FFMPEG because input can't be read by OIIO.") # noqa # oiiotool isn't available
thumbnail_created = self.create_thumbnail_ffmpeg(full_input_path, full_output_path) # noqa
# Skip the rest of the process if the thumbnail wasn't created
if not thumbnail_created: if not thumbnail_created:
self.log.warning("Thumbanil has not been created.") if oiio_supported:
return self.log.info((
"Converting with FFMPEG because input"
" can't be read by OIIO."
))
thumbnail_created = self.create_thumbnail_ffmpeg(
full_input_path, full_output_path
)
# Skip representation and try next one if wasn't created
if not thumbnail_created:
continue
new_repre = { new_repre = {
"name": "thumbnail", "name": "thumbnail",
"ext": "jpg", "ext": "jpg",
"files": jpeg_file, "files": jpeg_file,
"stagingDir": stagingdir, "stagingDir": dst_staging,
"thumbnail": True, "thumbnail": True,
"tags": ["thumbnail"] "tags": ["thumbnail"]
} }
@ -106,12 +136,21 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
# There is no need to create more then one thumbnail # There is no need to create more then one thumbnail
break break
def _already_has_thumbnail(self, instance): if not thumbnail_created:
for repre in instance.data.get("representations", []): self.log.warning("Thumbanil has not been created.")
def _is_review_instance(self, instance):
# TODO: We should probably handle "not creating" of thumbnail
# other way then checking for "review" key on instance data?
if instance.data.get("review", True):
return True
return False
def _already_has_thumbnail(self, repres):
for repre in repres:
self.log.info("repre {}".format(repre)) self.log.info("repre {}".format(repre))
if repre["name"] == "thumbnail": if repre["name"] == "thumbnail":
return True return True
return False return False
def _get_filtered_repres(self, instance): def _get_filtered_repres(self, instance):
@ -136,12 +175,12 @@ class ExtractThumbnail(pyblish.api.InstancePlugin):
def create_thumbnail_oiio(self, src_path, dst_path): def create_thumbnail_oiio(self, src_path, dst_path):
self.log.info("outputting {}".format(dst_path)) self.log.info("outputting {}".format(dst_path))
oiio_tool_path = get_oiio_tools_path() oiio_tool_path = get_oiio_tools_path()
oiio_cmd = [oiio_tool_path, "-a", oiio_cmd = [
src_path, "-o", oiio_tool_path,
dst_path "-a", src_path,
] "-o", dst_path
subprocess_exr = " ".join(oiio_cmd) ]
self.log.info(f"running: {subprocess_exr}") self.log.info("running: {}".format(" ".join(oiio_cmd)))
try: try:
run_subprocess(oiio_cmd, logger=self.log) run_subprocess(oiio_cmd, logger=self.log)
return True return True

View file

@ -10,6 +10,11 @@ from pymongo import DeleteMany, ReplaceOne, InsertOne, UpdateOne
import pyblish.api import pyblish.api
import openpype.api import openpype.api
from openpype.client import (
get_representations,
get_subset_by_name,
get_version_by_name,
)
from openpype.lib.profiles_filtering import filter_profiles from openpype.lib.profiles_filtering import filter_profiles
from openpype.lib.file_transaction import FileTransaction from openpype.lib.file_transaction import FileTransaction
from openpype.pipeline import legacy_io from openpype.pipeline import legacy_io
@ -156,7 +161,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
"mvUsdOverride", "mvUsdOverride",
"simpleUnrealTexture" "simpleUnrealTexture"
] ]
exclude_families = ["clip", "render.farm"]
default_template_name = "publish" default_template_name = "publish"
# Representation context keys that should always be written to # Representation context keys that should always be written to
@ -190,14 +195,6 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
).format(instance.data["family"])) ).format(instance.data["family"]))
return return
# Exclude instances that also contain families from exclude families
families = set(get_instance_families(instance))
exclude = families & set(self.exclude_families)
if exclude:
self.log.debug("Instance not integrated due to exclude "
"families found: {}".format(", ".join(exclude)))
return
file_transactions = FileTransaction(log=self.log) file_transactions = FileTransaction(log=self.log)
try: try:
self.register(instance, file_transactions, filtered_repres) self.register(instance, file_transactions, filtered_repres)
@ -274,6 +271,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
return filtered_repres return filtered_repres
def register(self, instance, file_transactions, filtered_repres): def register(self, instance, file_transactions, filtered_repres):
project_name = legacy_io.active_project()
instance_stagingdir = instance.data.get("stagingDir") instance_stagingdir = instance.data.get("stagingDir")
if not instance_stagingdir: if not instance_stagingdir:
self.log.info(( self.log.info((
@ -289,19 +288,19 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
template_name = self.get_template_name(instance) template_name = self.get_template_name(instance)
subset, subset_writes = self.prepare_subset(instance) subset, subset_writes = self.prepare_subset(instance, project_name)
version, version_writes = self.prepare_version(instance, subset) version, version_writes = self.prepare_version(
instance, subset, project_name
)
instance.data["versionEntity"] = version instance.data["versionEntity"] = version
# Get existing representations (if any) # Get existing representations (if any)
existing_repres_by_name = { existing_repres_by_name = {
repres["name"].lower(): repres for repres in legacy_io.find( repre_doc["name"].lower(): repre_doc
{ for repre_doc in get_representations(
"parent": version["_id"], project_name,
"type": "representation" version_ids=[version["_id"]],
}, fields=["_id", "name"]
# Only care about id and name of existing representations
projection={"_id": True, "name": True}
) )
} }
@ -426,17 +425,15 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
self.log.info("Registered {} representations" self.log.info("Registered {} representations"
"".format(len(prepared_representations))) "".format(len(prepared_representations)))
def prepare_subset(self, instance): def prepare_subset(self, instance, project_name):
asset = instance.data.get("assetEntity") asset_doc = instance.data.get("assetEntity")
subset_name = instance.data["subset"] subset_name = instance.data["subset"]
self.log.debug("Subset: {}".format(subset_name)) self.log.debug("Subset: {}".format(subset_name))
# Get existing subset if it exists # Get existing subset if it exists
subset = legacy_io.find_one({ subset_doc = get_subset_by_name(
"type": "subset", project_name, subset_name, asset_doc["_id"]
"parent": asset["_id"], )
"name": subset_name
})
# Define subset data # Define subset data
data = { data = {
@ -448,68 +445,68 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
data["subsetGroup"] = subset_group data["subsetGroup"] = subset_group
bulk_writes = [] bulk_writes = []
if subset is None: if subset_doc is None:
# Create a new subset # Create a new subset
self.log.info("Subset '%s' not found, creating ..." % subset_name) self.log.info("Subset '%s' not found, creating ..." % subset_name)
subset = { subset_doc = {
"_id": ObjectId(), "_id": ObjectId(),
"schema": "openpype:subset-3.0", "schema": "openpype:subset-3.0",
"type": "subset", "type": "subset",
"name": subset_name, "name": subset_name,
"data": data, "data": data,
"parent": asset["_id"] "parent": asset_doc["_id"]
} }
bulk_writes.append(InsertOne(subset)) bulk_writes.append(InsertOne(subset_doc))
else: else:
# Update existing subset data with new data and set in database. # Update existing subset data with new data and set in database.
# We also change the found subset in-place so we don't need to # We also change the found subset in-place so we don't need to
# re-query the subset afterwards # re-query the subset afterwards
subset["data"].update(data) subset_doc["data"].update(data)
bulk_writes.append(UpdateOne( bulk_writes.append(UpdateOne(
{"type": "subset", "_id": subset["_id"]}, {"type": "subset", "_id": subset_doc["_id"]},
{"$set": { {"$set": {
"data": subset["data"] "data": subset_doc["data"]
}} }}
)) ))
self.log.info("Prepared subset: {}".format(subset_name)) self.log.info("Prepared subset: {}".format(subset_name))
return subset, bulk_writes return subset_doc, bulk_writes
def prepare_version(self, instance, subset):
def prepare_version(self, instance, subset_doc, project_name):
version_number = instance.data["version"] version_number = instance.data["version"]
version = { version_doc = {
"schema": "openpype:version-3.0", "schema": "openpype:version-3.0",
"type": "version", "type": "version",
"parent": subset["_id"], "parent": subset_doc["_id"],
"name": version_number, "name": version_number,
"data": self.create_version_data(instance) "data": self.create_version_data(instance)
} }
existing_version = legacy_io.find_one({ existing_version = get_version_by_name(
'type': 'version', project_name,
'parent': subset["_id"], version_number,
'name': version_number subset_doc["_id"],
}, projection={"_id": True}) fields=["_id"]
)
if existing_version: if existing_version:
self.log.debug("Updating existing version ...") self.log.debug("Updating existing version ...")
version["_id"] = existing_version["_id"] version_doc["_id"] = existing_version["_id"]
else: else:
self.log.debug("Creating new version ...") self.log.debug("Creating new version ...")
version["_id"] = ObjectId() version_doc["_id"] = ObjectId()
bulk_writes = [ReplaceOne( bulk_writes = [ReplaceOne(
filter={"_id": version["_id"]}, filter={"_id": version_doc["_id"]},
replacement=version, replacement=version_doc,
upsert=True upsert=True
)] )]
self.log.info("Prepared version: v{0:03d}".format(version["name"])) self.log.info("Prepared version: v{0:03d}".format(version_doc["name"]))
return version, bulk_writes return version_doc, bulk_writes
def prepare_representation(self, repre, def prepare_representation(self, repre,
template_name, template_name,

View file

@ -1,5 +1,5 @@
import pyblish.api import pyblish.api
import openpype.lib from openpype.pipeline.load import any_outdated_containers
class ShowInventory(pyblish.api.Action): class ShowInventory(pyblish.api.Action):
@ -19,10 +19,10 @@ class ValidateContainers(pyblish.api.ContextPlugin):
label = "Validate Containers" label = "Validate Containers"
order = pyblish.api.ValidatorOrder order = pyblish.api.ValidatorOrder
hosts = ["maya", "houdini", "nuke", "harmony", "photoshop"] hosts = ["maya", "houdini", "nuke", "harmony", "photoshop", "aftereffects"]
optional = True optional = True
actions = [ShowInventory] actions = [ShowInventory]
def process(self, context): def process(self, context):
if openpype.lib.any_outdated(): if any_outdated_containers():
raise ValueError("There are outdated containers in the scene.") raise ValueError("There are outdated containers in the scene.")

View file

@ -7,6 +7,8 @@ from abc import ABCMeta, abstractmethod
import six import six
import openpype.version import openpype.version
from openpype.client.mongo import OpenPypeMongoConnection
from openpype.client.entities import get_project_connection, get_project
from .constants import ( from .constants import (
GLOBAL_SETTINGS_KEY, GLOBAL_SETTINGS_KEY,
@ -337,9 +339,6 @@ class MongoSettingsHandler(SettingsHandler):
def __init__(self): def __init__(self):
# Get mongo connection # Get mongo connection
from openpype.lib import OpenPypeMongoConnection
from openpype.pipeline import AvalonMongoDB
settings_collection = OpenPypeMongoConnection.get_mongo_client() settings_collection = OpenPypeMongoConnection.get_mongo_client()
self._anatomy_keys = None self._anatomy_keys = None
@ -362,7 +361,6 @@ class MongoSettingsHandler(SettingsHandler):
self.collection_name = collection_name self.collection_name = collection_name
self.collection = settings_collection[database_name][collection_name] self.collection = settings_collection[database_name][collection_name]
self.avalon_db = AvalonMongoDB()
self.system_settings_cache = CacheValues() self.system_settings_cache = CacheValues()
self.project_settings_cache = collections.defaultdict(CacheValues) self.project_settings_cache = collections.defaultdict(CacheValues)
@ -607,16 +605,14 @@ class MongoSettingsHandler(SettingsHandler):
new_data = data_cache.data_copy() new_data = data_cache.data_copy()
# Prepare avalon project document # Prepare avalon project document
collection = self.avalon_db.database[project_name] project_doc = get_project(project_name)
project_doc = collection.find_one({
"type": "project"
})
if not project_doc: if not project_doc:
raise ValueError(( raise ValueError((
"Project document of project \"{}\" does not exists." "Project document of project \"{}\" does not exists."
" Create project first." " Create project first."
).format(project_name)) ).format(project_name))
collection = get_project_connection(project_name)
# Project's data # Project's data
update_dict_data = {} update_dict_data = {}
project_doc_data = project_doc.get("data") or {} project_doc_data = project_doc.get("data") or {}
@ -1145,8 +1141,7 @@ class MongoSettingsHandler(SettingsHandler):
document, version document, version
) )
else: else:
collection = self.avalon_db.database[project_name] project_doc = get_project(project_name)
project_doc = collection.find_one({"type": "project"})
self.project_anatomy_cache[project_name].update_data( self.project_anatomy_cache[project_name].update_data(
self.project_doc_to_anatomy_data(project_doc), self.project_doc_to_anatomy_data(project_doc),
self._current_version self._current_version

View file

@ -22,7 +22,6 @@ def test_backward_compatibility(printer):
from openpype.lib import any_outdated from openpype.lib import any_outdated
from openpype.lib import get_asset from openpype.lib import get_asset
from openpype.lib import get_linked_assets from openpype.lib import get_linked_assets
from openpype.lib import get_latest_version
from openpype.lib import get_ffprobe_streams from openpype.lib import get_ffprobe_streams
from openpype.hosts.fusion.lib import switch_item from openpype.hosts.fusion.lib import switch_item

View file

@ -1,9 +1,9 @@
import uuid import uuid
import html
from Qt import QtCore, QtGui from Qt import QtCore, QtGui
import pyblish.api import pyblish.api
from openpype.tools.utils.lib import html_escape
from .constants import ( from .constants import (
ITEM_ID_ROLE, ITEM_ID_ROLE,
ITEM_IS_GROUP_ROLE, ITEM_IS_GROUP_ROLE,
@ -46,7 +46,7 @@ class InstancesModel(QtGui.QStandardItemModel):
all_removed = True all_removed = True
for instance_item in instance_items: for instance_item in instance_items:
item = QtGui.QStandardItem(instance_item.label) item = QtGui.QStandardItem(instance_item.label)
instance_label = html.escape(instance_item.label) instance_label = html_escape(instance_item.label)
item.setData(instance_label, ITEM_LABEL_ROLE) item.setData(instance_label, ITEM_LABEL_ROLE)
item.setData(instance_item.errored, ITEM_ERRORED_ROLE) item.setData(instance_item.errored, ITEM_ERRORED_ROLE)
item.setData(instance_item.id, ITEM_ID_ROLE) item.setData(instance_item.id, ITEM_ID_ROLE)

View file

@ -22,13 +22,13 @@ Only one item can be selected at a time.
import re import re
import collections import collections
import html
from Qt import QtWidgets, QtCore from Qt import QtWidgets, QtCore
from openpype.widgets.nice_checkbox import NiceCheckbox from openpype.widgets.nice_checkbox import NiceCheckbox
from openpype.tools.utils import BaseClickableFrame from openpype.tools.utils import BaseClickableFrame
from openpype.tools.utils.lib import html_escape
from .widgets import ( from .widgets import (
AbstractInstanceView, AbstractInstanceView,
ContextWarningLabel, ContextWarningLabel,
@ -308,7 +308,7 @@ class InstanceCardWidget(CardWidget):
self._last_variant = variant self._last_variant = variant
self._last_subset_name = subset_name self._last_subset_name = subset_name
# Make `variant` bold # Make `variant` bold
label = html.escape(self.instance.label) label = html_escape(self.instance.label)
found_parts = set(re.findall(variant, label, re.IGNORECASE)) found_parts = set(re.findall(variant, label, re.IGNORECASE))
if found_parts: if found_parts:
for part in found_parts: for part in found_parts:

View file

@ -23,12 +23,12 @@ selection can be enabled disabled using checkbox or keyboard key presses:
``` ```
""" """
import collections import collections
import html
from Qt import QtWidgets, QtCore, QtGui from Qt import QtWidgets, QtCore, QtGui
from openpype.style import get_objected_colors from openpype.style import get_objected_colors
from openpype.widgets.nice_checkbox import NiceCheckbox from openpype.widgets.nice_checkbox import NiceCheckbox
from openpype.tools.utils.lib import html_escape
from .widgets import AbstractInstanceView from .widgets import AbstractInstanceView
from ..constants import ( from ..constants import (
INSTANCE_ID_ROLE, INSTANCE_ID_ROLE,
@ -114,7 +114,7 @@ class InstanceListItemWidget(QtWidgets.QWidget):
self.instance = instance self.instance = instance
instance_label = html.escape(instance.label) instance_label = html_escape(instance.label)
subset_name_label = QtWidgets.QLabel(instance_label, self) subset_name_label = QtWidgets.QLabel(instance_label, self)
subset_name_label.setObjectName("ListViewSubsetName") subset_name_label.setObjectName("ListViewSubsetName")
@ -181,7 +181,7 @@ class InstanceListItemWidget(QtWidgets.QWidget):
# Check subset name # Check subset name
label = self.instance.label label = self.instance.label
if label != self._instance_label_widget.text(): if label != self._instance_label_widget.text():
self._instance_label_widget.setText(html.escape(label)) self._instance_label_widget.setText(html_escape(label))
# Check active state # Check active state
self.set_active(self.instance["active"]) self.set_active(self.instance["active"])
# Check valid states # Check valid states

View file

@ -37,6 +37,19 @@ def center_window(window):
window.move(geo.topLeft()) window.move(geo.topLeft())
def html_escape(text):
"""Basic escape of html syntax symbols in text."""
return (
text
.replace("&", "&amp;")
.replace("<", "&lt;")
.replace(">", "&gt;")
.replace('"', "&quot;")
.replace("'", "&#x27;")
)
def set_style_property(widget, property_name, property_value): def set_style_property(widget, property_name, property_value):
"""Set widget's property that may affect style. """Set widget's property that may affect style.

View file

@ -665,7 +665,10 @@ def _applied_camera_options(options, panel):
_iteritems = getattr(options, "iteritems", options.items) _iteritems = getattr(options, "iteritems", options.items)
for opt, value in _iteritems(): for opt, value in _iteritems():
_safe_setAttr(camera + "." + opt, value) if cmds.getAttr(camera + "." + opt, lock=True):
continue
else:
_safe_setAttr(camera + "." + opt, value)
try: try:
yield yield
@ -673,7 +676,11 @@ def _applied_camera_options(options, panel):
if old_options: if old_options:
_iteritems = getattr(old_options, "iteritems", old_options.items) _iteritems = getattr(old_options, "iteritems", old_options.items)
for opt, value in _iteritems(): for opt, value in _iteritems():
_safe_setAttr(camera + "." + opt, value) #
if cmds.getAttr(camera + "." + opt, lock=True):
continue
else:
_safe_setAttr(camera + "." + opt, value)
@contextlib.contextmanager @contextlib.contextmanager