mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-24 21:04:40 +01:00
[Automated] Merged develop into main
This commit is contained in:
commit
beadcabd5e
56 changed files with 2697 additions and 365 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -102,5 +102,8 @@ website/.docusaurus
|
|||
|
||||
.poetry/
|
||||
.python-version
|
||||
.editorconfig
|
||||
.pre-commit-config.yaml
|
||||
mypy.ini
|
||||
|
||||
tools/run_eventserver.*
|
||||
|
|
|
|||
|
|
@ -122,7 +122,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
if self.staging:
|
||||
if kwargs.get("build"):
|
||||
if "staging" not in kwargs.get("build"):
|
||||
kwargs["build"] = "{}-staging".format(kwargs.get("build"))
|
||||
kwargs["build"] = f"{kwargs.get('build')}-staging"
|
||||
else:
|
||||
kwargs["build"] = "staging"
|
||||
|
||||
|
|
@ -136,8 +136,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
return bool(result and self.staging == other.staging)
|
||||
|
||||
def __repr__(self):
|
||||
return "<{}: {} - path={}>".format(
|
||||
self.__class__.__name__, str(self), self.path)
|
||||
return f"<{self.__class__.__name__}: {str(self)} - path={self.path}>"
|
||||
|
||||
def __lt__(self, other: OpenPypeVersion):
|
||||
result = super().__lt__(other)
|
||||
|
|
@ -232,10 +231,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
return openpype_version
|
||||
|
||||
def __hash__(self):
|
||||
if self.path:
|
||||
return hash(self.path)
|
||||
else:
|
||||
return hash(str(self))
|
||||
return hash(self.path) if self.path else hash(str(self))
|
||||
|
||||
@staticmethod
|
||||
def is_version_in_dir(
|
||||
|
|
@ -384,7 +380,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
|
||||
@classmethod
|
||||
def get_local_versions(
|
||||
cls, production: bool = None, staging: bool = None
|
||||
cls, production: bool = None,
|
||||
staging: bool = None, compatible_with: OpenPypeVersion = None
|
||||
) -> List:
|
||||
"""Get all versions available on this machine.
|
||||
|
||||
|
|
@ -394,6 +391,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
Args:
|
||||
production (bool): Return production versions.
|
||||
staging (bool): Return staging versions.
|
||||
compatible_with (OpenPypeVersion): Return only those compatible
|
||||
with specified version.
|
||||
"""
|
||||
# Return all local versions if arguments are set to None
|
||||
if production is None and staging is None:
|
||||
|
|
@ -410,10 +409,19 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
if not production and not staging:
|
||||
return []
|
||||
|
||||
# DEPRECATED: backwards compatible way to look for versions in root
|
||||
dir_to_search = Path(user_data_dir("openpype", "pypeclub"))
|
||||
versions = OpenPypeVersion.get_versions_from_directory(
|
||||
dir_to_search
|
||||
dir_to_search, compatible_with=compatible_with
|
||||
)
|
||||
if compatible_with:
|
||||
dir_to_search = Path(
|
||||
user_data_dir("openpype", "pypeclub")) / f"{compatible_with.major}.{compatible_with.minor}" # noqa
|
||||
versions += OpenPypeVersion.get_versions_from_directory(
|
||||
dir_to_search, compatible_with=compatible_with
|
||||
)
|
||||
|
||||
|
||||
filtered_versions = []
|
||||
for version in versions:
|
||||
if version.is_staging():
|
||||
|
|
@ -425,7 +433,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
|
||||
@classmethod
|
||||
def get_remote_versions(
|
||||
cls, production: bool = None, staging: bool = None
|
||||
cls, production: bool = None,
|
||||
staging: bool = None, compatible_with: OpenPypeVersion = None
|
||||
) -> List:
|
||||
"""Get all versions available in OpenPype Path.
|
||||
|
||||
|
|
@ -435,6 +444,8 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
Args:
|
||||
production (bool): Return production versions.
|
||||
staging (bool): Return staging versions.
|
||||
compatible_with (OpenPypeVersion): Return only those compatible
|
||||
with specified version.
|
||||
"""
|
||||
# Return all local versions if arguments are set to None
|
||||
if production is None and staging is None:
|
||||
|
|
@ -468,7 +479,14 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
if not dir_to_search:
|
||||
return []
|
||||
|
||||
versions = cls.get_versions_from_directory(dir_to_search)
|
||||
# DEPRECATED: look for version in root directory
|
||||
versions = cls.get_versions_from_directory(
|
||||
dir_to_search, compatible_with=compatible_with)
|
||||
if compatible_with:
|
||||
dir_to_search = dir_to_search / f"{compatible_with.major}.{compatible_with.minor}" # noqa
|
||||
versions += cls.get_versions_from_directory(
|
||||
dir_to_search, compatible_with=compatible_with)
|
||||
|
||||
filtered_versions = []
|
||||
for version in versions:
|
||||
if version.is_staging():
|
||||
|
|
@ -479,11 +497,15 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
return list(sorted(set(filtered_versions)))
|
||||
|
||||
@staticmethod
|
||||
def get_versions_from_directory(openpype_dir: Path) -> List:
|
||||
def get_versions_from_directory(
|
||||
openpype_dir: Path,
|
||||
compatible_with: OpenPypeVersion = None) -> List:
|
||||
"""Get all detected OpenPype versions in directory.
|
||||
|
||||
Args:
|
||||
openpype_dir (Path): Directory to scan.
|
||||
compatible_with (OpenPypeVersion): Return only versions compatible
|
||||
with build version specified as OpenPypeVersion.
|
||||
|
||||
Returns:
|
||||
list of OpenPypeVersion
|
||||
|
|
@ -492,10 +514,10 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
ValueError: if invalid path is specified.
|
||||
|
||||
"""
|
||||
if not openpype_dir.exists() and not openpype_dir.is_dir():
|
||||
raise ValueError("specified directory is invalid")
|
||||
|
||||
_openpype_versions = []
|
||||
if not openpype_dir.exists() and not openpype_dir.is_dir():
|
||||
return _openpype_versions
|
||||
|
||||
# iterate over directory in first level and find all that might
|
||||
# contain OpenPype.
|
||||
for item in openpype_dir.iterdir():
|
||||
|
|
@ -518,6 +540,10 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
)[0]:
|
||||
continue
|
||||
|
||||
if compatible_with and not detected_version.is_compatible(
|
||||
compatible_with):
|
||||
continue
|
||||
|
||||
detected_version.path = item
|
||||
_openpype_versions.append(detected_version)
|
||||
|
||||
|
|
@ -549,8 +575,9 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
def get_latest_version(
|
||||
staging: bool = False,
|
||||
local: bool = None,
|
||||
remote: bool = None
|
||||
) -> OpenPypeVersion:
|
||||
remote: bool = None,
|
||||
compatible_with: OpenPypeVersion = None
|
||||
) -> Union[OpenPypeVersion, None]:
|
||||
"""Get latest available version.
|
||||
|
||||
The version does not contain information about path and source.
|
||||
|
|
@ -568,6 +595,9 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
staging (bool, optional): List staging versions if True.
|
||||
local (bool, optional): List local versions if True.
|
||||
remote (bool, optional): List remote versions if True.
|
||||
compatible_with (OpenPypeVersion, optional) Return only version
|
||||
compatible with compatible_with.
|
||||
|
||||
"""
|
||||
if local is None and remote is None:
|
||||
local = True
|
||||
|
|
@ -598,7 +628,12 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
return None
|
||||
|
||||
all_versions.sort()
|
||||
return all_versions[-1]
|
||||
latest_version: OpenPypeVersion
|
||||
latest_version = all_versions[-1]
|
||||
if compatible_with and not latest_version.is_compatible(
|
||||
compatible_with):
|
||||
return None
|
||||
return latest_version
|
||||
|
||||
@classmethod
|
||||
def get_expected_studio_version(cls, staging=False, global_settings=None):
|
||||
|
|
@ -621,6 +656,21 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
return None
|
||||
return OpenPypeVersion(version=result)
|
||||
|
||||
def is_compatible(self, version: OpenPypeVersion):
|
||||
"""Test build compatibility.
|
||||
|
||||
This will simply compare major and minor versions (ignoring patch
|
||||
and the rest).
|
||||
|
||||
Args:
|
||||
version (OpenPypeVersion): Version to check compatibility with.
|
||||
|
||||
Returns:
|
||||
bool: if the version is compatible
|
||||
|
||||
"""
|
||||
return self.major == version.major and self.minor == version.minor
|
||||
|
||||
|
||||
class BootstrapRepos:
|
||||
"""Class for bootstrapping local OpenPype installation.
|
||||
|
|
@ -741,8 +791,9 @@ class BootstrapRepos:
|
|||
return
|
||||
|
||||
# create destination directory
|
||||
if not self.data_dir.exists():
|
||||
self.data_dir.mkdir(parents=True)
|
||||
destination = self.data_dir / f"{installed_version.major}.{installed_version.minor}" # noqa
|
||||
if not destination.exists():
|
||||
destination.mkdir(parents=True)
|
||||
|
||||
# create zip inside temporary directory.
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
|
|
@ -770,7 +821,9 @@ class BootstrapRepos:
|
|||
Path to moved zip on success.
|
||||
|
||||
"""
|
||||
destination = self.data_dir / zip_file.name
|
||||
version = OpenPypeVersion.version_in_str(zip_file.name)
|
||||
destination_dir = self.data_dir / f"{version.major}.{version.minor}"
|
||||
destination = destination_dir / zip_file.name
|
||||
|
||||
if destination.exists():
|
||||
self._print(
|
||||
|
|
@ -782,7 +835,7 @@ class BootstrapRepos:
|
|||
self._print(str(e), LOG_ERROR, exc_info=True)
|
||||
return None
|
||||
try:
|
||||
shutil.move(zip_file.as_posix(), self.data_dir.as_posix())
|
||||
shutil.move(zip_file.as_posix(), destination_dir.as_posix())
|
||||
except shutil.Error as e:
|
||||
self._print(str(e), LOG_ERROR, exc_info=True)
|
||||
return None
|
||||
|
|
@ -995,6 +1048,16 @@ class BootstrapRepos:
|
|||
|
||||
@staticmethod
|
||||
def _validate_dir(path: Path) -> tuple:
|
||||
"""Validate checksums in a given path.
|
||||
|
||||
Args:
|
||||
path (Path): path to folder to validate.
|
||||
|
||||
Returns:
|
||||
tuple(bool, str): returns status and reason as a bool
|
||||
and str in a tuple.
|
||||
|
||||
"""
|
||||
checksums_file = Path(path / "checksums")
|
||||
if not checksums_file.exists():
|
||||
# FIXME: This should be set to False sometimes in the future
|
||||
|
|
@ -1076,7 +1139,20 @@ class BootstrapRepos:
|
|||
sys.path.insert(0, directory.as_posix())
|
||||
|
||||
@staticmethod
|
||||
def find_openpype_version(version, staging):
|
||||
def find_openpype_version(
|
||||
version: Union[str, OpenPypeVersion],
|
||||
staging: bool,
|
||||
compatible_with: OpenPypeVersion = None
|
||||
) -> Union[OpenPypeVersion, None]:
|
||||
"""Find location of specified OpenPype version.
|
||||
|
||||
Args:
|
||||
version (Union[str, OpenPypeVersion): Version to find.
|
||||
staging (bool): Filter staging versions.
|
||||
compatible_with (OpenPypeVersion, optional): Find only
|
||||
versions compatible with specified one.
|
||||
|
||||
"""
|
||||
if isinstance(version, str):
|
||||
version = OpenPypeVersion(version=version)
|
||||
|
||||
|
|
@ -1085,7 +1161,8 @@ class BootstrapRepos:
|
|||
return installed_version
|
||||
|
||||
local_versions = OpenPypeVersion.get_local_versions(
|
||||
staging=staging, production=not staging
|
||||
staging=staging, production=not staging,
|
||||
compatible_with=compatible_with
|
||||
)
|
||||
zip_version = None
|
||||
for local_version in local_versions:
|
||||
|
|
@ -1099,7 +1176,8 @@ class BootstrapRepos:
|
|||
return zip_version
|
||||
|
||||
remote_versions = OpenPypeVersion.get_remote_versions(
|
||||
staging=staging, production=not staging
|
||||
staging=staging, production=not staging,
|
||||
compatible_with=compatible_with
|
||||
)
|
||||
for remote_version in remote_versions:
|
||||
if remote_version == version:
|
||||
|
|
@ -1107,13 +1185,14 @@ class BootstrapRepos:
|
|||
return None
|
||||
|
||||
@staticmethod
|
||||
def find_latest_openpype_version(staging):
|
||||
def find_latest_openpype_version(
|
||||
staging, compatible_with: OpenPypeVersion = None):
|
||||
installed_version = OpenPypeVersion.get_installed_version()
|
||||
local_versions = OpenPypeVersion.get_local_versions(
|
||||
staging=staging
|
||||
staging=staging, compatible_with=compatible_with
|
||||
)
|
||||
remote_versions = OpenPypeVersion.get_remote_versions(
|
||||
staging=staging
|
||||
staging=staging, compatible_with=compatible_with
|
||||
)
|
||||
all_versions = local_versions + remote_versions
|
||||
if not staging:
|
||||
|
|
@ -1138,7 +1217,9 @@ class BootstrapRepos:
|
|||
self,
|
||||
openpype_path: Union[Path, str] = None,
|
||||
staging: bool = False,
|
||||
include_zips: bool = False) -> Union[List[OpenPypeVersion], None]:
|
||||
include_zips: bool = False,
|
||||
compatible_with: OpenPypeVersion = None
|
||||
) -> Union[List[OpenPypeVersion], None]:
|
||||
"""Get ordered dict of detected OpenPype version.
|
||||
|
||||
Resolution order for OpenPype is following:
|
||||
|
|
@ -1154,6 +1235,8 @@ class BootstrapRepos:
|
|||
otherwise.
|
||||
include_zips (bool, optional): If set True it will try to find
|
||||
OpenPype in zip files in given directory.
|
||||
compatible_with (OpenPypeVersion, optional): Find only those
|
||||
versions compatible with the one specified.
|
||||
|
||||
Returns:
|
||||
dict of Path: Dictionary of detected OpenPype version.
|
||||
|
|
@ -1172,30 +1255,56 @@ class BootstrapRepos:
|
|||
("Finding OpenPype in non-filesystem locations is"
|
||||
" not implemented yet."))
|
||||
|
||||
dir_to_search = self.data_dir
|
||||
user_versions = self.get_openpype_versions(self.data_dir, staging)
|
||||
# if we have openpype_path specified, search only there.
|
||||
version_dir = ""
|
||||
if compatible_with:
|
||||
version_dir = f"{compatible_with.major}.{compatible_with.minor}"
|
||||
|
||||
# if checks bellow for OPENPYPE_PATH and registry fails, use data_dir
|
||||
# DEPRECATED: lookup in root of this folder is deprecated in favour
|
||||
# of major.minor sub-folders.
|
||||
dirs_to_search = [
|
||||
self.data_dir
|
||||
]
|
||||
if compatible_with:
|
||||
dirs_to_search.append(self.data_dir / version_dir)
|
||||
|
||||
if openpype_path:
|
||||
dir_to_search = openpype_path
|
||||
dirs_to_search = [openpype_path]
|
||||
|
||||
if compatible_with:
|
||||
dirs_to_search.append(openpype_path / version_dir)
|
||||
else:
|
||||
if os.getenv("OPENPYPE_PATH"):
|
||||
if Path(os.getenv("OPENPYPE_PATH")).exists():
|
||||
dir_to_search = Path(os.getenv("OPENPYPE_PATH"))
|
||||
# first try OPENPYPE_PATH and if that is not available,
|
||||
# try registry.
|
||||
if os.getenv("OPENPYPE_PATH") \
|
||||
and Path(os.getenv("OPENPYPE_PATH")).exists():
|
||||
dirs_to_search = [Path(os.getenv("OPENPYPE_PATH"))]
|
||||
|
||||
if compatible_with:
|
||||
dirs_to_search.append(
|
||||
Path(os.getenv("OPENPYPE_PATH")) / version_dir)
|
||||
else:
|
||||
try:
|
||||
registry_dir = Path(
|
||||
str(self.registry.get_item("openPypePath")))
|
||||
if registry_dir.exists():
|
||||
dir_to_search = registry_dir
|
||||
dirs_to_search = [registry_dir]
|
||||
if compatible_with:
|
||||
dirs_to_search.append(registry_dir / version_dir)
|
||||
|
||||
except ValueError:
|
||||
# nothing found in registry, we'll use data dir
|
||||
pass
|
||||
|
||||
openpype_versions = self.get_openpype_versions(dir_to_search, staging)
|
||||
openpype_versions += user_versions
|
||||
openpype_versions = []
|
||||
for dir_to_search in dirs_to_search:
|
||||
try:
|
||||
openpype_versions += self.get_openpype_versions(
|
||||
dir_to_search, staging, compatible_with=compatible_with)
|
||||
except ValueError:
|
||||
# location is invalid, skip it
|
||||
pass
|
||||
|
||||
# remove zip file version if needed.
|
||||
if not include_zips:
|
||||
openpype_versions = [
|
||||
v for v in openpype_versions if v.path.suffix != ".zip"
|
||||
|
|
@ -1308,9 +1417,8 @@ class BootstrapRepos:
|
|||
raise ValueError(
|
||||
f"version {version} is not associated with any file")
|
||||
|
||||
destination = self.data_dir / version.path.stem
|
||||
if destination.exists():
|
||||
assert destination.is_dir()
|
||||
destination = self.data_dir / f"{version.major}.{version.minor}" / version.path.stem # noqa
|
||||
if destination.exists() and destination.is_dir():
|
||||
try:
|
||||
shutil.rmtree(destination)
|
||||
except OSError as e:
|
||||
|
|
@ -1379,7 +1487,7 @@ class BootstrapRepos:
|
|||
else:
|
||||
dir_name = openpype_version.path.stem
|
||||
|
||||
destination = self.data_dir / dir_name
|
||||
destination = self.data_dir / f"{openpype_version.major}.{openpype_version.minor}" / dir_name # noqa
|
||||
|
||||
# test if destination directory already exist, if so lets delete it.
|
||||
if destination.exists() and force:
|
||||
|
|
@ -1557,14 +1665,18 @@ class BootstrapRepos:
|
|||
return False
|
||||
return True
|
||||
|
||||
def get_openpype_versions(self,
|
||||
openpype_dir: Path,
|
||||
staging: bool = False) -> list:
|
||||
def get_openpype_versions(
|
||||
self,
|
||||
openpype_dir: Path,
|
||||
staging: bool = False,
|
||||
compatible_with: OpenPypeVersion = None) -> list:
|
||||
"""Get all detected OpenPype versions in directory.
|
||||
|
||||
Args:
|
||||
openpype_dir (Path): Directory to scan.
|
||||
staging (bool, optional): Find staging versions if True.
|
||||
compatible_with (OpenPypeVersion, optional): Get only versions
|
||||
compatible with the one specified.
|
||||
|
||||
Returns:
|
||||
list of OpenPypeVersion
|
||||
|
|
@ -1574,7 +1686,7 @@ class BootstrapRepos:
|
|||
|
||||
"""
|
||||
if not openpype_dir.exists() and not openpype_dir.is_dir():
|
||||
raise ValueError("specified directory is invalid")
|
||||
raise ValueError(f"specified directory {openpype_dir} is invalid")
|
||||
|
||||
_openpype_versions = []
|
||||
# iterate over directory in first level and find all that might
|
||||
|
|
@ -1599,6 +1711,10 @@ class BootstrapRepos:
|
|||
):
|
||||
continue
|
||||
|
||||
if compatible_with and \
|
||||
not detected_version.is_compatible(compatible_with):
|
||||
continue
|
||||
|
||||
detected_version.path = item
|
||||
if staging and detected_version.is_staging():
|
||||
_openpype_versions.append(detected_version)
|
||||
|
|
|
|||
|
|
@ -21,6 +21,11 @@ class OpenPypeVersionNotFound(Exception):
|
|||
pass
|
||||
|
||||
|
||||
class OpenPypeVersionIncompatible(Exception):
|
||||
"""OpenPype version is not compatible with the installed one (build)."""
|
||||
pass
|
||||
|
||||
|
||||
def should_add_certificate_path_to_mongo_url(mongo_url):
|
||||
"""Check if should add ca certificate to mongo url.
|
||||
|
||||
|
|
|
|||
|
|
@ -443,3 +443,26 @@ def interactive():
|
|||
__version__, sys.version, sys.platform
|
||||
)
|
||||
code.interact(banner)
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.option("--build", help="Print only build version",
|
||||
is_flag=True, default=False)
|
||||
def version(build):
|
||||
"""Print OpenPype version."""
|
||||
|
||||
from openpype.version import __version__
|
||||
from igniter.bootstrap_repos import BootstrapRepos, OpenPypeVersion
|
||||
from pathlib import Path
|
||||
import os
|
||||
|
||||
if getattr(sys, 'frozen', False):
|
||||
local_version = BootstrapRepos.get_version(
|
||||
Path(os.getenv("OPENPYPE_ROOT")))
|
||||
else:
|
||||
local_version = OpenPypeVersion.get_installed_version_str()
|
||||
|
||||
if build:
|
||||
print(local_version)
|
||||
return
|
||||
print(f"{__version__} (booted: {local_version})")
|
||||
|
|
|
|||
|
|
@ -6,38 +6,12 @@ that has project name as a context (e.g. on 'ProjectEntity'?).
|
|||
+ We will need more specific functions doing wery specific queires really fast.
|
||||
"""
|
||||
|
||||
import os
|
||||
import collections
|
||||
|
||||
import six
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from .mongo import OpenPypeMongoConnection
|
||||
|
||||
|
||||
def _get_project_database():
|
||||
db_name = os.environ.get("AVALON_DB") or "avalon"
|
||||
return OpenPypeMongoConnection.get_mongo_client()[db_name]
|
||||
|
||||
|
||||
def get_project_connection(project_name):
|
||||
"""Direct access to mongo collection.
|
||||
|
||||
We're trying to avoid using direct access to mongo. This should be used
|
||||
only for Create, Update and Remove operations until there are implemented
|
||||
api calls for that.
|
||||
|
||||
Args:
|
||||
project_name(str): Project name for which collection should be
|
||||
returned.
|
||||
|
||||
Returns:
|
||||
pymongo.Collection: Collection realated to passed project.
|
||||
"""
|
||||
|
||||
if not project_name:
|
||||
raise ValueError("Invalid project name {}".format(str(project_name)))
|
||||
return _get_project_database()[project_name]
|
||||
from .mongo import get_project_database, get_project_connection
|
||||
|
||||
|
||||
def _prepare_fields(fields, required_fields=None):
|
||||
|
|
@ -72,7 +46,7 @@ def _convert_ids(in_ids):
|
|||
|
||||
|
||||
def get_projects(active=True, inactive=False, fields=None):
|
||||
mongodb = _get_project_database()
|
||||
mongodb = get_project_database()
|
||||
for project_name in mongodb.collection_names():
|
||||
if project_name in ("system.indexes",):
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -208,3 +208,28 @@ class OpenPypeMongoConnection:
|
|||
mongo_url, time.time() - t1
|
||||
))
|
||||
return mongo_client
|
||||
|
||||
|
||||
def get_project_database():
|
||||
db_name = os.environ.get("AVALON_DB") or "avalon"
|
||||
return OpenPypeMongoConnection.get_mongo_client()[db_name]
|
||||
|
||||
|
||||
def get_project_connection(project_name):
|
||||
"""Direct access to mongo collection.
|
||||
|
||||
We're trying to avoid using direct access to mongo. This should be used
|
||||
only for Create, Update and Remove operations until there are implemented
|
||||
api calls for that.
|
||||
|
||||
Args:
|
||||
project_name(str): Project name for which collection should be
|
||||
returned.
|
||||
|
||||
Returns:
|
||||
pymongo.Collection: Collection realated to passed project.
|
||||
"""
|
||||
|
||||
if not project_name:
|
||||
raise ValueError("Invalid project name {}".format(str(project_name)))
|
||||
return get_project_database()[project_name]
|
||||
|
|
|
|||
587
openpype/client/operations.py
Normal file
587
openpype/client/operations.py
Normal file
|
|
@ -0,0 +1,587 @@
|
|||
import uuid
|
||||
import copy
|
||||
import collections
|
||||
from abc import ABCMeta, abstractmethod, abstractproperty
|
||||
|
||||
import six
|
||||
from bson.objectid import ObjectId
|
||||
from pymongo import DeleteOne, InsertOne, UpdateOne
|
||||
|
||||
from .mongo import get_project_connection
|
||||
|
||||
REMOVED_VALUE = object()
|
||||
|
||||
CURRENT_PROJECT_SCHEMA = "openpype:project-3.0"
|
||||
CURRENT_PROJECT_CONFIG_SCHEMA = "openpype:config-2.0"
|
||||
CURRENT_ASSET_DOC_SCHEMA = "openpype:asset-3.0"
|
||||
CURRENT_SUBSET_SCHEMA = "openpype:subset-3.0"
|
||||
CURRENT_VERSION_SCHEMA = "openpype:version-3.0"
|
||||
CURRENT_REPRESENTATION_SCHEMA = "openpype:representation-2.0"
|
||||
|
||||
|
||||
def _create_or_convert_to_mongo_id(mongo_id):
|
||||
if mongo_id is None:
|
||||
return ObjectId()
|
||||
return ObjectId(mongo_id)
|
||||
|
||||
|
||||
def new_project_document(
|
||||
project_name, project_code, config, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of project document.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project. Used as identifier of a project.
|
||||
project_code (str): Shorter version of projet without spaces and
|
||||
special characters (in most of cases). Should be also considered
|
||||
as unique name across projects.
|
||||
config (Dic[str, Any]): Project config consist of roots, templates,
|
||||
applications and other project Anatomy related data.
|
||||
data (Dict[str, Any]): Project data with information about it's
|
||||
attributes (e.g. 'fps' etc.) or integration specific keys.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of project document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
data["code"] = project_code
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||
"name": project_name,
|
||||
"type": CURRENT_PROJECT_SCHEMA,
|
||||
"entity_data": data,
|
||||
"config": config
|
||||
}
|
||||
|
||||
|
||||
def new_asset_document(
|
||||
name, project_id, parent_id, parents, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of asset document.
|
||||
|
||||
Args:
|
||||
name (str): Is considered as unique identifier of asset in project.
|
||||
project_id (Union[str, ObjectId]): Id of project doument.
|
||||
parent_id (Union[str, ObjectId]): Id of parent asset.
|
||||
parents (List[str]): List of parent assets names.
|
||||
data (Dict[str, Any]): Asset document data. Empty dictionary is used
|
||||
if not passed. Value of 'parent_id' is used to fill 'visualParent'.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of asset document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
if parent_id is not None:
|
||||
parent_id = ObjectId(parent_id)
|
||||
data["visualParent"] = parent_id
|
||||
data["parents"] = parents
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||
"type": "asset",
|
||||
"name": name,
|
||||
"parent": ObjectId(project_id),
|
||||
"data": data,
|
||||
"schema": CURRENT_ASSET_DOC_SCHEMA
|
||||
}
|
||||
|
||||
|
||||
def new_subset_document(name, family, asset_id, data=None, entity_id=None):
|
||||
"""Create skeleton data of subset document.
|
||||
|
||||
Args:
|
||||
name (str): Is considered as unique identifier of subset under asset.
|
||||
family (str): Subset's family.
|
||||
asset_id (Union[str, ObjectId]): Id of parent asset.
|
||||
data (Dict[str, Any]): Subset document data. Empty dictionary is used
|
||||
if not passed. Value of 'family' is used to fill 'family'.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of subset document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
data["family"] = family
|
||||
return {
|
||||
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||
"schema": CURRENT_SUBSET_SCHEMA,
|
||||
"type": "subset",
|
||||
"name": name,
|
||||
"data": data,
|
||||
"parent": asset_id
|
||||
}
|
||||
|
||||
|
||||
def new_version_doc(version, subset_id, data=None, entity_id=None):
|
||||
"""Create skeleton data of version document.
|
||||
|
||||
Args:
|
||||
version (int): Is considered as unique identifier of version
|
||||
under subset.
|
||||
subset_id (Union[str, ObjectId]): Id of parent subset.
|
||||
data (Dict[str, Any]): Version document data.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of version document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||
"schema": CURRENT_VERSION_SCHEMA,
|
||||
"type": "version",
|
||||
"name": int(version),
|
||||
"parent": subset_id,
|
||||
"data": data
|
||||
}
|
||||
|
||||
|
||||
def new_representation_doc(
|
||||
name, version_id, context, data=None, entity_id=None
|
||||
):
|
||||
"""Create skeleton data of asset document.
|
||||
|
||||
Args:
|
||||
version (int): Is considered as unique identifier of version
|
||||
under subset.
|
||||
version_id (Union[str, ObjectId]): Id of parent version.
|
||||
context (Dict[str, Any]): Representation context used for fill template
|
||||
of to query.
|
||||
data (Dict[str, Any]): Representation document data.
|
||||
entity_id (Union[str, ObjectId]): Predefined id of document. New id is
|
||||
created if not passed.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Skeleton of version document.
|
||||
"""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
return {
|
||||
"_id": _create_or_convert_to_mongo_id(entity_id),
|
||||
"schema": CURRENT_REPRESENTATION_SCHEMA,
|
||||
"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": name,
|
||||
"data": data,
|
||||
|
||||
# Imprint shortcut to context for performance reasons.
|
||||
"context": context
|
||||
}
|
||||
|
||||
|
||||
def _prepare_update_data(old_doc, new_doc, replace):
|
||||
changes = {}
|
||||
for key, value in new_doc.items():
|
||||
if key not in old_doc or value != old_doc[key]:
|
||||
changes[key] = value
|
||||
|
||||
if replace:
|
||||
for key in old_doc.keys():
|
||||
if key not in new_doc:
|
||||
changes[key] = REMOVED_VALUE
|
||||
return changes
|
||||
|
||||
|
||||
def prepare_subset_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two subset documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for 'UpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
def prepare_version_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two version documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for 'UpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
def prepare_representation_update_data(old_doc, new_doc, replace=True):
|
||||
"""Compare two representation documents and prepare update data.
|
||||
|
||||
Based on compared values will create update data for 'UpdateOperation'.
|
||||
|
||||
Empty output means that documents are identical.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Changes between old and new document.
|
||||
"""
|
||||
|
||||
return _prepare_update_data(old_doc, new_doc, replace)
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class AbstractOperation(object):
|
||||
"""Base operation class.
|
||||
|
||||
Opration represent a call into database. The call can create, change or
|
||||
remove data.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
"""
|
||||
|
||||
def __init__(self, project_name, entity_type):
|
||||
self._project_name = project_name
|
||||
self._entity_type = entity_type
|
||||
self._id = str(uuid.uuid4())
|
||||
|
||||
@property
|
||||
def project_name(self):
|
||||
return self._project_name
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
"""Identifier of operation."""
|
||||
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def entity_type(self):
|
||||
return self._entity_type
|
||||
|
||||
@abstractproperty
|
||||
def operation_name(self):
|
||||
"""Stringified type of operation."""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def to_mongo_operation(self):
|
||||
"""Convert operation to Mongo batch operation."""
|
||||
|
||||
pass
|
||||
|
||||
def to_data(self):
|
||||
"""Convert opration to data that can be converted to json or others.
|
||||
|
||||
Warning:
|
||||
Current state returns ObjectId objects which cannot be parsed by
|
||||
json.
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Description of operation.
|
||||
"""
|
||||
|
||||
return {
|
||||
"id": self._id,
|
||||
"entity_type": self.entity_type,
|
||||
"project_name": self.project_name,
|
||||
"operation": self.operation_name
|
||||
}
|
||||
|
||||
|
||||
class CreateOperation(AbstractOperation):
|
||||
"""Opeartion to create an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
data (Dict[str, Any]): Data of entity that will be created.
|
||||
"""
|
||||
|
||||
operation_name = "create"
|
||||
|
||||
def __init__(self, project_name, entity_type, data):
|
||||
super(CreateOperation, self).__init__(project_name, entity_type)
|
||||
|
||||
if not data:
|
||||
data = {}
|
||||
else:
|
||||
data = copy.deepcopy(dict(data))
|
||||
|
||||
if "_id" not in data:
|
||||
data["_id"] = ObjectId()
|
||||
else:
|
||||
data["_id"] = ObjectId(data["_id"])
|
||||
|
||||
self._entity_id = data["_id"]
|
||||
self._data = data
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.set_value(key, value)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.data[key]
|
||||
|
||||
def set_value(self, key, value):
|
||||
self.data[key] = value
|
||||
|
||||
def get(self, key, *args, **kwargs):
|
||||
return self.data.get(key, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def entity_id(self):
|
||||
return self._entity_id
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
return self._data
|
||||
|
||||
def to_mongo_operation(self):
|
||||
return InsertOne(copy.deepcopy(self._data))
|
||||
|
||||
def to_data(self):
|
||||
output = super(CreateOperation, self).to_data()
|
||||
output["data"] = copy.deepcopy(self.data)
|
||||
return output
|
||||
|
||||
|
||||
class UpdateOperation(AbstractOperation):
|
||||
"""Opeartion to update an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
entity_id (Union[str, ObjectId]): Identifier of an entity.
|
||||
update_data (Dict[str, Any]): Key -> value changes that will be set in
|
||||
database. If value is set to 'REMOVED_VALUE' the key will be
|
||||
removed. Only first level of dictionary is checked (on purpose).
|
||||
"""
|
||||
|
||||
operation_name = "update"
|
||||
|
||||
def __init__(self, project_name, entity_type, entity_id, update_data):
|
||||
super(UpdateOperation, self).__init__(project_name, entity_type)
|
||||
|
||||
self._entity_id = ObjectId(entity_id)
|
||||
self._update_data = update_data
|
||||
|
||||
@property
|
||||
def entity_id(self):
|
||||
return self._entity_id
|
||||
|
||||
@property
|
||||
def update_data(self):
|
||||
return self._update_data
|
||||
|
||||
def to_mongo_operation(self):
|
||||
unset_data = {}
|
||||
set_data = {}
|
||||
for key, value in self._update_data.items():
|
||||
if value is REMOVED_VALUE:
|
||||
unset_data[key] = value
|
||||
else:
|
||||
set_data[key] = value
|
||||
|
||||
op_data = {}
|
||||
if unset_data:
|
||||
op_data["$unset"] = unset_data
|
||||
if set_data:
|
||||
op_data["$set"] = set_data
|
||||
|
||||
if not op_data:
|
||||
return None
|
||||
|
||||
return UpdateOne(
|
||||
{"_id": self.entity_id},
|
||||
op_data
|
||||
)
|
||||
|
||||
def to_data(self):
|
||||
changes = {}
|
||||
for key, value in self._update_data.items():
|
||||
if value is REMOVED_VALUE:
|
||||
value = None
|
||||
changes[key] = value
|
||||
|
||||
output = super(UpdateOperation, self).to_data()
|
||||
output.update({
|
||||
"entity_id": self.entity_id,
|
||||
"changes": changes
|
||||
})
|
||||
return output
|
||||
|
||||
|
||||
class DeleteOperation(AbstractOperation):
|
||||
"""Opeartion to delete an entity.
|
||||
|
||||
Args:
|
||||
project_name (str): On which project operation will happen.
|
||||
entity_type (str): Type of entity on which change happens.
|
||||
e.g. 'asset', 'representation' etc.
|
||||
entity_id (Union[str, ObjectId]): Entity id that will be removed.
|
||||
"""
|
||||
|
||||
operation_name = "delete"
|
||||
|
||||
def __init__(self, project_name, entity_type, entity_id):
|
||||
super(DeleteOperation, self).__init__(project_name, entity_type)
|
||||
|
||||
self._entity_id = ObjectId(entity_id)
|
||||
|
||||
@property
|
||||
def entity_id(self):
|
||||
return self._entity_id
|
||||
|
||||
def to_mongo_operation(self):
|
||||
return DeleteOne({"_id": self.entity_id})
|
||||
|
||||
def to_data(self):
|
||||
output = super(DeleteOperation, self).to_data()
|
||||
output["entity_id"] = self.entity_id
|
||||
return output
|
||||
|
||||
|
||||
class OperationsSession(object):
|
||||
"""Session storing operations that should happen in an order.
|
||||
|
||||
At this moment does not handle anything special can be sonsidered as
|
||||
stupid list of operations that will happen after each other. If creation
|
||||
of same entity is there multiple times it's handled in any way and document
|
||||
values are not validated.
|
||||
|
||||
All operations must be related to single project.
|
||||
|
||||
Args:
|
||||
project_name (str): Project name to which are operations related.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._operations = []
|
||||
|
||||
def add(self, operation):
|
||||
"""Add operation to be processed.
|
||||
|
||||
Args:
|
||||
operation (BaseOperation): Operation that should be processed.
|
||||
"""
|
||||
if not isinstance(
|
||||
operation,
|
||||
(CreateOperation, UpdateOperation, DeleteOperation)
|
||||
):
|
||||
raise TypeError("Expected Operation object got {}".format(
|
||||
str(type(operation))
|
||||
))
|
||||
|
||||
self._operations.append(operation)
|
||||
|
||||
def append(self, operation):
|
||||
"""Add operation to be processed.
|
||||
|
||||
Args:
|
||||
operation (BaseOperation): Operation that should be processed.
|
||||
"""
|
||||
|
||||
self.add(operation)
|
||||
|
||||
def extend(self, operations):
|
||||
"""Add operations to be processed.
|
||||
|
||||
Args:
|
||||
operations (List[BaseOperation]): Operations that should be
|
||||
processed.
|
||||
"""
|
||||
|
||||
for operation in operations:
|
||||
self.add(operation)
|
||||
|
||||
def remove(self, operation):
|
||||
"""Remove operation."""
|
||||
|
||||
self._operations.remove(operation)
|
||||
|
||||
def clear(self):
|
||||
"""Clear all registered operations."""
|
||||
|
||||
self._operations = []
|
||||
|
||||
def to_data(self):
|
||||
return [
|
||||
operation.to_data()
|
||||
for operation in self._operations
|
||||
]
|
||||
|
||||
def commit(self):
|
||||
"""Commit session operations."""
|
||||
|
||||
operations, self._operations = self._operations, []
|
||||
if not operations:
|
||||
return
|
||||
|
||||
operations_by_project = collections.defaultdict(list)
|
||||
for operation in operations:
|
||||
operations_by_project[operation.project_name].append(operation)
|
||||
|
||||
for project_name, operations in operations_by_project.items():
|
||||
bulk_writes = []
|
||||
for operation in operations:
|
||||
mongo_op = operation.to_mongo_operation()
|
||||
if mongo_op is not None:
|
||||
bulk_writes.append(mongo_op)
|
||||
|
||||
if bulk_writes:
|
||||
collection = get_project_connection(project_name)
|
||||
collection.bulk_write(bulk_writes)
|
||||
|
||||
def create_entity(self, project_name, entity_type, data):
|
||||
"""Fast access to 'CreateOperation'.
|
||||
|
||||
Returns:
|
||||
CreateOperation: Object of update operation.
|
||||
"""
|
||||
|
||||
operation = CreateOperation(project_name, entity_type, data)
|
||||
self.add(operation)
|
||||
return operation
|
||||
|
||||
def update_entity(self, project_name, entity_type, entity_id, update_data):
|
||||
"""Fast access to 'UpdateOperation'.
|
||||
|
||||
Returns:
|
||||
UpdateOperation: Object of update operation.
|
||||
"""
|
||||
|
||||
operation = UpdateOperation(
|
||||
project_name, entity_type, entity_id, update_data
|
||||
)
|
||||
self.add(operation)
|
||||
return operation
|
||||
|
||||
def delete_entity(self, project_name, entity_type, entity_id):
|
||||
"""Fast access to 'DeleteOperation'.
|
||||
|
||||
Returns:
|
||||
DeleteOperation: Object of delete operation.
|
||||
"""
|
||||
|
||||
operation = DeleteOperation(project_name, entity_type, entity_id)
|
||||
self.add(operation)
|
||||
return operation
|
||||
|
|
@ -309,6 +309,42 @@ class ARenderProducts:
|
|||
|
||||
return lib.get_attr_in_layer(plug, layer=self.layer)
|
||||
|
||||
@staticmethod
|
||||
def extract_separator(file_prefix):
|
||||
"""Extract AOV separator character from the prefix.
|
||||
|
||||
Default behavior extracts the part between
|
||||
last occurrences of <RenderLayer> and <RenderPass>
|
||||
|
||||
Todo:
|
||||
This code also triggers for V-Ray which overrides it explicitly
|
||||
so this code will invalidly debug log it couldn't extract the
|
||||
AOV separator even though it does set it in RenderProductsVray.
|
||||
|
||||
Args:
|
||||
file_prefix (str): File prefix with tokens.
|
||||
|
||||
Returns:
|
||||
str or None: prefix character if it can be extracted.
|
||||
"""
|
||||
layer_tokens = ["<renderlayer>", "<layer>"]
|
||||
aov_tokens = ["<aov>", "<renderpass>"]
|
||||
|
||||
def match_last(tokens, text):
|
||||
"""regex match the last occurence from a list of tokens"""
|
||||
pattern = "(?:.*)({})".format("|".join(tokens))
|
||||
return re.search(pattern, text, re.IGNORECASE)
|
||||
|
||||
layer_match = match_last(layer_tokens, file_prefix)
|
||||
aov_match = match_last(aov_tokens, file_prefix)
|
||||
separator = None
|
||||
if layer_match and aov_match:
|
||||
matches = sorted((layer_match, aov_match),
|
||||
key=lambda match: match.end(1))
|
||||
separator = file_prefix[matches[0].end(1):matches[1].start(1)]
|
||||
return separator
|
||||
|
||||
|
||||
def _get_layer_data(self):
|
||||
# type: () -> LayerMetadata
|
||||
# ______________________________________________
|
||||
|
|
@ -317,7 +353,7 @@ class ARenderProducts:
|
|||
# ____________________/
|
||||
_, scene_basename = os.path.split(cmds.file(q=True, loc=True))
|
||||
scene_name, _ = os.path.splitext(scene_basename)
|
||||
|
||||
kwargs = {}
|
||||
file_prefix = self.get_renderer_prefix()
|
||||
|
||||
# If the Render Layer belongs to a Render Setup layer then the
|
||||
|
|
@ -332,26 +368,8 @@ class ARenderProducts:
|
|||
# defaultRenderLayer renders as masterLayer
|
||||
layer_name = "masterLayer"
|
||||
|
||||
# AOV separator - default behavior extracts the part between
|
||||
# last occurences of <RenderLayer> and <RenderPass>
|
||||
# todo: This code also triggers for V-Ray which overrides it explicitly
|
||||
# so this code will invalidly debug log it couldn't extract the
|
||||
# aov separator even though it does set it in RenderProductsVray
|
||||
layer_tokens = ["<renderlayer>", "<layer>"]
|
||||
aov_tokens = ["<aov>", "<renderpass>"]
|
||||
|
||||
def match_last(tokens, text):
|
||||
"""regex match the last occurence from a list of tokens"""
|
||||
pattern = "(?:.*)({})".format("|".join(tokens))
|
||||
return re.search(pattern, text, re.IGNORECASE)
|
||||
|
||||
layer_match = match_last(layer_tokens, file_prefix)
|
||||
aov_match = match_last(aov_tokens, file_prefix)
|
||||
kwargs = {}
|
||||
if layer_match and aov_match:
|
||||
matches = sorted((layer_match, aov_match),
|
||||
key=lambda match: match.end(1))
|
||||
separator = file_prefix[matches[0].end(1):matches[1].start(1)]
|
||||
separator = self.extract_separator(file_prefix)
|
||||
if separator:
|
||||
kwargs["aov_separator"] = separator
|
||||
else:
|
||||
log.debug("Couldn't extract aov separator from "
|
||||
|
|
@ -962,8 +980,9 @@ class RenderProductsRedshift(ARenderProducts):
|
|||
:func:`ARenderProducts.get_renderer_prefix()`
|
||||
|
||||
"""
|
||||
prefix = super(RenderProductsRedshift, self).get_renderer_prefix()
|
||||
prefix = "{}{}<aov>".format(prefix, self.aov_separator)
|
||||
file_prefix = super(RenderProductsRedshift, self).get_renderer_prefix()
|
||||
separator = self.extract_separator(file_prefix)
|
||||
prefix = "{}{}<aov>".format(file_prefix, separator or "_")
|
||||
return prefix
|
||||
|
||||
def get_render_products(self):
|
||||
|
|
|
|||
|
|
@ -205,7 +205,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
.get('maya')\
|
||||
.get('create')\
|
||||
.get('CreateRender')\
|
||||
.get('default_render_image_folder')
|
||||
.get('default_render_image_folder') or ""
|
||||
# replace relative paths with absolute. Render products are
|
||||
# returned as list of dictionaries.
|
||||
publish_meta_path = None
|
||||
|
|
@ -318,7 +318,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
"useReferencedAovs": render_instance.data.get(
|
||||
"useReferencedAovs") or render_instance.data.get(
|
||||
"vrayUseReferencedAovs") or False,
|
||||
"aovSeparator": aov_separator
|
||||
"aovSeparator": layer_render_products.layer_data.aov_separator # noqa: E501
|
||||
}
|
||||
|
||||
# Collect Deadline url if Deadline module is enabled
|
||||
|
|
|
|||
|
|
@ -80,7 +80,8 @@ class AfterEffectsSubmitDeadline(
|
|||
"AVALON_TASK",
|
||||
"AVALON_APP_NAME",
|
||||
"OPENPYPE_DEV",
|
||||
"OPENPYPE_LOG_NO_COLORS"
|
||||
"OPENPYPE_LOG_NO_COLORS",
|
||||
"OPENPYPE_VERSION"
|
||||
]
|
||||
# Add mongo url if it's enabled
|
||||
if self._instance.context.data.get("deadlinePassMongoUrl"):
|
||||
|
|
|
|||
|
|
@ -274,7 +274,8 @@ class HarmonySubmitDeadline(
|
|||
"AVALON_TASK",
|
||||
"AVALON_APP_NAME",
|
||||
"OPENPYPE_DEV",
|
||||
"OPENPYPE_LOG_NO_COLORS"
|
||||
"OPENPYPE_LOG_NO_COLORS",
|
||||
"OPENPYPE_VERSION"
|
||||
]
|
||||
# Add mongo url if it's enabled
|
||||
if self._instance.context.data.get("deadlinePassMongoUrl"):
|
||||
|
|
|
|||
|
|
@ -130,6 +130,7 @@ class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin):
|
|||
# this application with so the Render Slave can build its own
|
||||
# similar environment using it, e.g. "houdini17.5;pluginx2.3"
|
||||
"AVALON_TOOLS",
|
||||
"OPENPYPE_VERSION"
|
||||
]
|
||||
# Add mongo url if it's enabled
|
||||
if context.data.get("deadlinePassMongoUrl"):
|
||||
|
|
|
|||
|
|
@ -101,6 +101,7 @@ class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin):
|
|||
# this application with so the Render Slave can build its own
|
||||
# similar environment using it, e.g. "maya2018;vray4.x;yeti3.1.9"
|
||||
"AVALON_TOOLS",
|
||||
"OPENPYPE_VERSION"
|
||||
]
|
||||
# Add mongo url if it's enabled
|
||||
if context.data.get("deadlinePassMongoUrl"):
|
||||
|
|
|
|||
|
|
@ -519,12 +519,14 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"FTRACK_API_KEY",
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_SG_USER",
|
||||
"AVALON_PROJECT",
|
||||
"AVALON_ASSET",
|
||||
"AVALON_TASK",
|
||||
"AVALON_APP_NAME",
|
||||
"OPENPYPE_DEV",
|
||||
"OPENPYPE_LOG_NO_COLORS"
|
||||
"OPENPYPE_LOG_NO_COLORS",
|
||||
"OPENPYPE_VERSION"
|
||||
]
|
||||
# Add mongo url if it's enabled
|
||||
if instance.context.data.get("deadlinePassMongoUrl"):
|
||||
|
|
|
|||
|
|
@ -100,7 +100,8 @@ class MayaSubmitRemotePublishDeadline(pyblish.api.InstancePlugin):
|
|||
keys = [
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER"
|
||||
"FTRACK_SERVER",
|
||||
"OPENPYPE_VERSION"
|
||||
]
|
||||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **legacy_io.Session)
|
||||
|
|
|
|||
|
|
@ -253,7 +253,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
"PYBLISHPLUGINPATH",
|
||||
"NUKE_PATH",
|
||||
"TOOL_ENV",
|
||||
"FOUNDRY_LICENSE"
|
||||
"FOUNDRY_LICENSE",
|
||||
"OPENPYPE_VERSION"
|
||||
]
|
||||
# Add mongo url if it's enabled
|
||||
if instance.context.data.get("deadlinePassMongoUrl"):
|
||||
|
|
|
|||
|
|
@ -141,7 +141,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
|
|||
"OPENPYPE_USERNAME",
|
||||
"OPENPYPE_RENDER_JOB",
|
||||
"OPENPYPE_PUBLISH_JOB",
|
||||
"OPENPYPE_MONGO"
|
||||
"OPENPYPE_MONGO",
|
||||
"OPENPYPE_VERSION"
|
||||
]
|
||||
|
||||
# custom deadline attributes
|
||||
|
|
|
|||
|
|
@ -6,13 +6,52 @@ import subprocess
|
|||
import json
|
||||
import platform
|
||||
import uuid
|
||||
from Deadline.Scripting import RepositoryUtils, FileUtils
|
||||
import re
|
||||
from Deadline.Scripting import RepositoryUtils, FileUtils, DirectoryUtils
|
||||
|
||||
|
||||
def get_openpype_version_from_path(path, build=True):
|
||||
"""Get OpenPype version from provided path.
|
||||
path (str): Path to scan.
|
||||
build (bool, optional): Get only builds, not sources
|
||||
|
||||
Returns:
|
||||
str or None: version of OpenPype if found.
|
||||
|
||||
"""
|
||||
# fix path for application bundle on macos
|
||||
if platform.system().lower() == "darwin":
|
||||
path = os.path.join(path, "Contents", "MacOS", "lib", "Python")
|
||||
|
||||
version_file = os.path.join(path, "openpype", "version.py")
|
||||
if not os.path.isfile(version_file):
|
||||
return None
|
||||
|
||||
# skip if the version is not build
|
||||
exe = os.path.join(path, "openpype_console.exe")
|
||||
if platform.system().lower() in ["linux", "darwin"]:
|
||||
exe = os.path.join(path, "openpype_console")
|
||||
|
||||
# if only builds are requested
|
||||
if build and not os.path.isfile(exe): # noqa: E501
|
||||
print(f" ! path is not a build: {path}")
|
||||
return None
|
||||
|
||||
version = {}
|
||||
with open(version_file, "r") as vf:
|
||||
exec(vf.read(), version)
|
||||
|
||||
version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"])
|
||||
return version_match[1]
|
||||
|
||||
|
||||
def get_openpype_executable():
|
||||
"""Return OpenPype Executable from Event Plug-in Settings"""
|
||||
config = RepositoryUtils.GetPluginConfig("OpenPype")
|
||||
return config.GetConfigEntryWithDefault("OpenPypeExecutable", "")
|
||||
exe_list = config.GetConfigEntryWithDefault("OpenPypeExecutable", "")
|
||||
dir_list = config.GetConfigEntryWithDefault(
|
||||
"OpenPypeInstallationDirs", "")
|
||||
return exe_list, dir_list
|
||||
|
||||
|
||||
def inject_openpype_environment(deadlinePlugin):
|
||||
|
|
@ -25,16 +64,89 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
print(">>> Injecting OpenPype environments ...")
|
||||
try:
|
||||
print(">>> Getting OpenPype executable ...")
|
||||
exe_list = get_openpype_executable()
|
||||
openpype_app = FileUtils.SearchFileList(exe_list)
|
||||
if openpype_app == "":
|
||||
exe_list, dir_list = get_openpype_executable()
|
||||
openpype_versions = []
|
||||
# if the job requires specific OpenPype version,
|
||||
# lets go over all available and find compatible build.
|
||||
requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION")
|
||||
if requested_version:
|
||||
print((">>> Scanning for compatible requested "
|
||||
f"version {requested_version}"))
|
||||
install_dir = DirectoryUtils.SearchDirectoryList(dir_list)
|
||||
if install_dir:
|
||||
print(f"--- Looking for OpenPype at: {install_dir}")
|
||||
sub_dirs = [
|
||||
f.path for f in os.scandir(install_dir)
|
||||
if f.is_dir()
|
||||
]
|
||||
for subdir in sub_dirs:
|
||||
version = get_openpype_version_from_path(subdir)
|
||||
if not version:
|
||||
continue
|
||||
print(f" - found: {version} - {subdir}")
|
||||
openpype_versions.append((version, subdir))
|
||||
|
||||
exe = FileUtils.SearchFileList(exe_list)
|
||||
if openpype_versions:
|
||||
# if looking for requested compatible version,
|
||||
# add the implicitly specified to the list too.
|
||||
print(f"Looking for OpenPype at: {os.path.dirname(exe)}")
|
||||
version = get_openpype_version_from_path(
|
||||
os.path.dirname(exe))
|
||||
if version:
|
||||
print(f" - found: {version} - {os.path.dirname(exe)}")
|
||||
openpype_versions.append((version, os.path.dirname(exe)))
|
||||
|
||||
if requested_version:
|
||||
# sort detected versions
|
||||
if openpype_versions:
|
||||
# use natural sorting
|
||||
openpype_versions.sort(
|
||||
key=lambda ver: [
|
||||
int(t) if t.isdigit() else t.lower()
|
||||
for t in re.split(r"(\d+)", ver[0])
|
||||
])
|
||||
print(("*** Latest available version found is "
|
||||
f"{openpype_versions[-1][0]}"))
|
||||
requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501
|
||||
compatible_versions = []
|
||||
for version in openpype_versions:
|
||||
v = version[0].split(".")[:3]
|
||||
if v[0] == requested_major and v[1] == requested_minor:
|
||||
compatible_versions.append(version)
|
||||
if not compatible_versions:
|
||||
raise RuntimeError(
|
||||
("Cannot find compatible version available "
|
||||
"for version {} requested by the job. "
|
||||
"Please add it through plugin configuration "
|
||||
"in Deadline or install it to configured "
|
||||
"directory.").format(requested_version))
|
||||
# sort compatible versions nad pick the last one
|
||||
compatible_versions.sort(
|
||||
key=lambda ver: [
|
||||
int(t) if t.isdigit() else t.lower()
|
||||
for t in re.split(r"(\d+)", ver[0])
|
||||
])
|
||||
print(("*** Latest compatible version found is "
|
||||
f"{compatible_versions[-1][0]}"))
|
||||
# create list of executables for different platform and let
|
||||
# Deadline decide.
|
||||
exe_list = [
|
||||
os.path.join(
|
||||
compatible_versions[-1][1], "openpype_console.exe"),
|
||||
os.path.join(
|
||||
compatible_versions[-1][1], "openpype_console")
|
||||
]
|
||||
exe = FileUtils.SearchFileList(";".join(exe_list))
|
||||
if exe == "":
|
||||
raise RuntimeError(
|
||||
"OpenPype executable was not found " +
|
||||
"in the semicolon separated list \"" + exe_list + "\". " +
|
||||
"in the semicolon separated list " +
|
||||
"\"" + ";".join(exe_list) + "\". " +
|
||||
"The path to the render executable can be configured " +
|
||||
"from the Plugin Configuration in the Deadline Monitor.")
|
||||
|
||||
print("--- OpenPype executable: {}".format(openpype_app))
|
||||
print("--- OpenPype executable: {}".format(exe))
|
||||
|
||||
# tempfile.TemporaryFile cannot be used because of locking
|
||||
temp_file_name = "{}_{}.json".format(
|
||||
|
|
@ -45,7 +157,7 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
print(">>> Temporary path: {}".format(export_url))
|
||||
|
||||
args = [
|
||||
openpype_app,
|
||||
exe,
|
||||
"--headless",
|
||||
'extractenvironments',
|
||||
export_url
|
||||
|
|
@ -75,9 +187,9 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
env["OPENPYPE_HEADLESS_MODE"] = "1"
|
||||
env["AVALON_TIMEOUT"] = "5000"
|
||||
|
||||
print(">>> Executing: {}".format(args))
|
||||
print(">>> Executing: {}".format(" ".join(args)))
|
||||
std_output = subprocess.check_output(args,
|
||||
cwd=os.path.dirname(openpype_app),
|
||||
cwd=os.path.dirname(exe),
|
||||
env=env)
|
||||
print(">>> Process result {}".format(std_output))
|
||||
|
||||
|
|
|
|||
|
|
@ -7,11 +7,20 @@ Index=0
|
|||
Default=OpenPype Plugin for Deadline
|
||||
Description=Not configurable
|
||||
|
||||
[OpenPypeInstallationDirs]
|
||||
Type=multilinemultifolder
|
||||
Label=Directories where OpenPype versions are installed
|
||||
Category=OpenPype Installation Directories
|
||||
CategoryOrder=0
|
||||
Index=0
|
||||
Default=C:\Program Files (x86)\OpenPype
|
||||
Description=Path or paths to directories where multiple versions of OpenPype might be installed. Enter every such path on separate lines.
|
||||
|
||||
[OpenPypeExecutable]
|
||||
Type=multilinemultifilename
|
||||
Label=OpenPype Executable
|
||||
Category=OpenPype Executables
|
||||
CategoryOrder=0
|
||||
CategoryOrder=1
|
||||
Index=0
|
||||
Default=
|
||||
Description=The path to the OpenPype executable. Enter alternative paths on separate lines.
|
||||
|
|
|
|||
|
|
@ -1,10 +1,19 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from System.IO import Path
|
||||
from System.Text.RegularExpressions import Regex
|
||||
|
||||
from Deadline.Plugins import PluginType, DeadlinePlugin
|
||||
from Deadline.Scripting import StringUtils, FileUtils, RepositoryUtils
|
||||
from Deadline.Scripting import (
|
||||
StringUtils,
|
||||
FileUtils,
|
||||
DirectoryUtils,
|
||||
RepositoryUtils
|
||||
)
|
||||
|
||||
import re
|
||||
import os
|
||||
import platform
|
||||
|
||||
|
||||
######################################################################
|
||||
|
|
@ -52,13 +61,115 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin):
|
|||
self.AddStdoutHandlerCallback(
|
||||
".*Progress: (\d+)%.*").HandleCallback += self.HandleProgress
|
||||
|
||||
@staticmethod
|
||||
def get_openpype_version_from_path(path, build=True):
|
||||
"""Get OpenPype version from provided path.
|
||||
path (str): Path to scan.
|
||||
build (bool, optional): Get only builds, not sources
|
||||
|
||||
Returns:
|
||||
str or None: version of OpenPype if found.
|
||||
|
||||
"""
|
||||
# fix path for application bundle on macos
|
||||
if platform.system().lower() == "darwin":
|
||||
path = os.path.join(path, "Contents", "MacOS", "lib", "Python")
|
||||
|
||||
version_file = os.path.join(path, "openpype", "version.py")
|
||||
if not os.path.isfile(version_file):
|
||||
return None
|
||||
|
||||
# skip if the version is not build
|
||||
exe = os.path.join(path, "openpype_console.exe")
|
||||
if platform.system().lower() in ["linux", "darwin"]:
|
||||
exe = os.path.join(path, "openpype_console")
|
||||
|
||||
# if only builds are requested
|
||||
if build and not os.path.isfile(exe): # noqa: E501
|
||||
print(f" ! path is not a build: {path}")
|
||||
return None
|
||||
|
||||
version = {}
|
||||
with open(version_file, "r") as vf:
|
||||
exec(vf.read(), version)
|
||||
|
||||
version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"])
|
||||
return version_match[1]
|
||||
|
||||
def RenderExecutable(self):
|
||||
exeList = self.GetConfigEntry("OpenPypeExecutable")
|
||||
exe = FileUtils.SearchFileList(exeList)
|
||||
job = self.GetJob()
|
||||
openpype_versions = []
|
||||
# if the job requires specific OpenPype version,
|
||||
# lets go over all available and find compatible build.
|
||||
requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION")
|
||||
if requested_version:
|
||||
self.LogInfo((
|
||||
"Scanning for compatible requested "
|
||||
f"version {requested_version}"))
|
||||
dir_list = self.GetConfigEntry("OpenPypeInstallationDirs")
|
||||
install_dir = DirectoryUtils.SearchDirectoryList(dir_list)
|
||||
if dir:
|
||||
sub_dirs = [
|
||||
f.path for f in os.scandir(install_dir)
|
||||
if f.is_dir()
|
||||
]
|
||||
for subdir in sub_dirs:
|
||||
version = self.get_openpype_version_from_path(subdir)
|
||||
if not version:
|
||||
continue
|
||||
openpype_versions.append((version, subdir))
|
||||
|
||||
exe_list = self.GetConfigEntry("OpenPypeExecutable")
|
||||
exe = FileUtils.SearchFileList(exe_list)
|
||||
if openpype_versions:
|
||||
# if looking for requested compatible version,
|
||||
# add the implicitly specified to the list too.
|
||||
version = self.get_openpype_version_from_path(
|
||||
os.path.dirname(exe))
|
||||
if version:
|
||||
openpype_versions.append((version, os.path.dirname(exe)))
|
||||
|
||||
if requested_version:
|
||||
# sort detected versions
|
||||
if openpype_versions:
|
||||
openpype_versions.sort(
|
||||
key=lambda ver: [
|
||||
int(t) if t.isdigit() else t.lower()
|
||||
for t in re.split(r"(\d+)", ver[0])
|
||||
])
|
||||
requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501
|
||||
compatible_versions = []
|
||||
for version in openpype_versions:
|
||||
v = version[0].split(".")[:3]
|
||||
if v[0] == requested_major and v[1] == requested_minor:
|
||||
compatible_versions.append(version)
|
||||
if not compatible_versions:
|
||||
self.FailRender(("Cannot find compatible version available "
|
||||
"for version {} requested by the job. "
|
||||
"Please add it through plugin configuration "
|
||||
"in Deadline or install it to configured "
|
||||
"directory.").format(requested_version))
|
||||
# sort compatible versions nad pick the last one
|
||||
compatible_versions.sort(
|
||||
key=lambda ver: [
|
||||
int(t) if t.isdigit() else t.lower()
|
||||
for t in re.split(r"(\d+)", ver[0])
|
||||
])
|
||||
# create list of executables for different platform and let
|
||||
# Deadline decide.
|
||||
exe_list = [
|
||||
os.path.join(
|
||||
compatible_versions[-1][1], "openpype_console.exe"),
|
||||
os.path.join(
|
||||
compatible_versions[-1][1], "openpype_console")
|
||||
]
|
||||
exe = FileUtils.SearchFileList(";".join(exe_list))
|
||||
|
||||
if exe == "":
|
||||
self.FailRender(
|
||||
"OpenPype executable was not found " +
|
||||
"in the semicolon separated list \"" + exeList + "\". " +
|
||||
"in the semicolon separated list " +
|
||||
"\"" + ";".join(exe_list) + "\". " +
|
||||
"The path to the render executable can be configured " +
|
||||
"from the Plugin Configuration in the Deadline Monitor.")
|
||||
return exe
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import threading
|
|||
import datetime
|
||||
import time
|
||||
import queue
|
||||
import collections
|
||||
import appdirs
|
||||
import pymongo
|
||||
|
||||
|
|
@ -309,7 +310,20 @@ class CustomEventHubSession(ftrack_api.session.Session):
|
|||
|
||||
# Currently pending operations.
|
||||
self.recorded_operations = ftrack_api.operation.Operations()
|
||||
self.record_operations = True
|
||||
|
||||
# OpenPype change - In new API are operations properties
|
||||
new_api = hasattr(self.__class__, "record_operations")
|
||||
|
||||
if new_api:
|
||||
self._record_operations = collections.defaultdict(
|
||||
lambda: True
|
||||
)
|
||||
self._auto_populate = collections.defaultdict(
|
||||
lambda: auto_populate
|
||||
)
|
||||
else:
|
||||
self.record_operations = True
|
||||
self.auto_populate = auto_populate
|
||||
|
||||
self.cache_key_maker = cache_key_maker
|
||||
if self.cache_key_maker is None:
|
||||
|
|
@ -328,6 +342,9 @@ class CustomEventHubSession(ftrack_api.session.Session):
|
|||
if cache is not None:
|
||||
self.cache.caches.append(cache)
|
||||
|
||||
if new_api:
|
||||
self.merge_lock = threading.RLock()
|
||||
|
||||
self._managed_request = None
|
||||
self._request = requests.Session()
|
||||
self._request.auth = ftrack_api.session.SessionAuthentication(
|
||||
|
|
@ -335,8 +352,6 @@ class CustomEventHubSession(ftrack_api.session.Session):
|
|||
)
|
||||
self.request_timeout = timeout
|
||||
|
||||
self.auto_populate = auto_populate
|
||||
|
||||
# Fetch server information and in doing so also check credentials.
|
||||
self._server_information = self._fetch_server_information()
|
||||
|
||||
|
|
|
|||
19
openpype/modules/shotgrid/README.md
Normal file
19
openpype/modules/shotgrid/README.md
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
## Shotgrid Module
|
||||
|
||||
### Pre-requisites
|
||||
|
||||
Install and launch a [shotgrid leecher](https://github.com/Ellipsanime/shotgrid-leecher) server
|
||||
|
||||
### Quickstart
|
||||
|
||||
The goal of this tutorial is to synchronize an already existing shotgrid project with OpenPype.
|
||||
|
||||
- Activate the shotgrid module in the **system settings** and inform the shotgrid leecher server API url
|
||||
|
||||
- Create a new OpenPype project with the **project manager**
|
||||
|
||||
- Inform the shotgrid authentication infos (url, script name, api key) and the shotgrid project ID related to this OpenPype project in the **project settings**
|
||||
|
||||
- Use the batch interface (Tray > shotgrid > Launch batch), select your project and click "batch"
|
||||
|
||||
- You can now access your shotgrid entities within the **avalon launcher** and publish informations to shotgrid with **pyblish**
|
||||
5
openpype/modules/shotgrid/__init__.py
Normal file
5
openpype/modules/shotgrid/__init__.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
from .shotgrid_module import (
|
||||
ShotgridModule,
|
||||
)
|
||||
|
||||
__all__ = ("ShotgridModule",)
|
||||
0
openpype/modules/shotgrid/lib/__init__.py
Normal file
0
openpype/modules/shotgrid/lib/__init__.py
Normal file
1
openpype/modules/shotgrid/lib/const.py
Normal file
1
openpype/modules/shotgrid/lib/const.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
MODULE_NAME = "shotgrid"
|
||||
125
openpype/modules/shotgrid/lib/credentials.py
Normal file
125
openpype/modules/shotgrid/lib/credentials.py
Normal file
|
|
@ -0,0 +1,125 @@
|
|||
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import shotgun_api3
|
||||
from shotgun_api3.shotgun import AuthenticationFault
|
||||
|
||||
from openpype.lib import OpenPypeSecureRegistry, OpenPypeSettingsRegistry
|
||||
from openpype.modules.shotgrid.lib.record import Credentials
|
||||
|
||||
|
||||
def _get_shotgrid_secure_key(hostname, key):
|
||||
"""Secure item key for entered hostname."""
|
||||
return f"shotgrid/{hostname}/{key}"
|
||||
|
||||
|
||||
def _get_secure_value_and_registry(
|
||||
hostname,
|
||||
name,
|
||||
):
|
||||
key = _get_shotgrid_secure_key(hostname, name)
|
||||
registry = OpenPypeSecureRegistry(key)
|
||||
return registry.get_item(name, None), registry
|
||||
|
||||
|
||||
def get_shotgrid_hostname(shotgrid_url):
|
||||
|
||||
if not shotgrid_url:
|
||||
raise Exception("Shotgrid url cannot be a null")
|
||||
valid_shotgrid_url = (
|
||||
f"//{shotgrid_url}" if "//" not in shotgrid_url else shotgrid_url
|
||||
)
|
||||
return urlparse(valid_shotgrid_url).hostname
|
||||
|
||||
|
||||
# Credentials storing function (using keyring)
|
||||
|
||||
|
||||
def get_credentials(shotgrid_url):
|
||||
hostname = get_shotgrid_hostname(shotgrid_url)
|
||||
if not hostname:
|
||||
return None
|
||||
login_value, _ = _get_secure_value_and_registry(
|
||||
hostname,
|
||||
Credentials.login_key_prefix(),
|
||||
)
|
||||
password_value, _ = _get_secure_value_and_registry(
|
||||
hostname,
|
||||
Credentials.password_key_prefix(),
|
||||
)
|
||||
return Credentials(login_value, password_value)
|
||||
|
||||
|
||||
def save_credentials(login, password, shotgrid_url):
|
||||
hostname = get_shotgrid_hostname(shotgrid_url)
|
||||
_, login_registry = _get_secure_value_and_registry(
|
||||
hostname,
|
||||
Credentials.login_key_prefix(),
|
||||
)
|
||||
_, password_registry = _get_secure_value_and_registry(
|
||||
hostname,
|
||||
Credentials.password_key_prefix(),
|
||||
)
|
||||
clear_credentials(shotgrid_url)
|
||||
login_registry.set_item(Credentials.login_key_prefix(), login)
|
||||
password_registry.set_item(Credentials.password_key_prefix(), password)
|
||||
|
||||
|
||||
def clear_credentials(shotgrid_url):
|
||||
hostname = get_shotgrid_hostname(shotgrid_url)
|
||||
login_value, login_registry = _get_secure_value_and_registry(
|
||||
hostname,
|
||||
Credentials.login_key_prefix(),
|
||||
)
|
||||
password_value, password_registry = _get_secure_value_and_registry(
|
||||
hostname,
|
||||
Credentials.password_key_prefix(),
|
||||
)
|
||||
|
||||
if login_value is not None:
|
||||
login_registry.delete_item(Credentials.login_key_prefix())
|
||||
|
||||
if password_value is not None:
|
||||
password_registry.delete_item(Credentials.password_key_prefix())
|
||||
|
||||
|
||||
# Login storing function (using json)
|
||||
|
||||
|
||||
def get_local_login():
|
||||
reg = OpenPypeSettingsRegistry()
|
||||
try:
|
||||
return str(reg.get_item("shotgrid_login"))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def save_local_login(login):
|
||||
reg = OpenPypeSettingsRegistry()
|
||||
reg.set_item("shotgrid_login", login)
|
||||
|
||||
|
||||
def clear_local_login():
|
||||
reg = OpenPypeSettingsRegistry()
|
||||
reg.delete_item("shotgrid_login")
|
||||
|
||||
|
||||
def check_credentials(
|
||||
login,
|
||||
password,
|
||||
shotgrid_url,
|
||||
):
|
||||
|
||||
if not shotgrid_url or not login or not password:
|
||||
return False
|
||||
try:
|
||||
session = shotgun_api3.Shotgun(
|
||||
shotgrid_url,
|
||||
login=login,
|
||||
password=password,
|
||||
)
|
||||
session.preferences_read()
|
||||
session.close()
|
||||
except AuthenticationFault:
|
||||
return False
|
||||
return True
|
||||
20
openpype/modules/shotgrid/lib/record.py
Normal file
20
openpype/modules/shotgrid/lib/record.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
|
||||
class Credentials:
|
||||
login = None
|
||||
password = None
|
||||
|
||||
def __init__(self, login, password) -> None:
|
||||
super().__init__()
|
||||
self.login = login
|
||||
self.password = password
|
||||
|
||||
def is_empty(self):
|
||||
return not (self.login and self.password)
|
||||
|
||||
@staticmethod
|
||||
def login_key_prefix():
|
||||
return "login"
|
||||
|
||||
@staticmethod
|
||||
def password_key_prefix():
|
||||
return "password"
|
||||
18
openpype/modules/shotgrid/lib/settings.py
Normal file
18
openpype/modules/shotgrid/lib/settings.py
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
from openpype.api import get_system_settings, get_project_settings
|
||||
from openpype.modules.shotgrid.lib.const import MODULE_NAME
|
||||
|
||||
|
||||
def get_shotgrid_project_settings(project):
|
||||
return get_project_settings(project).get(MODULE_NAME, {})
|
||||
|
||||
|
||||
def get_shotgrid_settings():
|
||||
return get_system_settings().get("modules", {}).get(MODULE_NAME, {})
|
||||
|
||||
|
||||
def get_shotgrid_servers():
|
||||
return get_shotgrid_settings().get("shotgrid_settings", {})
|
||||
|
||||
|
||||
def get_leecher_backend_url():
|
||||
return get_shotgrid_settings().get("leecher_backend_url")
|
||||
|
|
@ -0,0 +1,100 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
from openpype.lib.mongo import OpenPypeMongoConnection
|
||||
|
||||
|
||||
class CollectShotgridEntities(pyblish.api.ContextPlugin):
|
||||
"""Collect shotgrid entities according to the current context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = "Shotgrid entities"
|
||||
|
||||
def process(self, context):
|
||||
|
||||
avalon_project = context.data.get("projectEntity")
|
||||
avalon_asset = context.data.get("assetEntity")
|
||||
avalon_task_name = os.getenv("AVALON_TASK")
|
||||
|
||||
self.log.info(avalon_project)
|
||||
self.log.info(avalon_asset)
|
||||
|
||||
sg_project = _get_shotgrid_project(context)
|
||||
sg_task = _get_shotgrid_task(
|
||||
avalon_project,
|
||||
avalon_asset,
|
||||
avalon_task_name
|
||||
)
|
||||
sg_entity = _get_shotgrid_entity(avalon_project, avalon_asset)
|
||||
|
||||
if sg_project:
|
||||
context.data["shotgridProject"] = sg_project
|
||||
self.log.info(
|
||||
"Collected correspondig shotgrid project : {}".format(
|
||||
sg_project
|
||||
)
|
||||
)
|
||||
|
||||
if sg_task:
|
||||
context.data["shotgridTask"] = sg_task
|
||||
self.log.info(
|
||||
"Collected correspondig shotgrid task : {}".format(sg_task)
|
||||
)
|
||||
|
||||
if sg_entity:
|
||||
context.data["shotgridEntity"] = sg_entity
|
||||
self.log.info(
|
||||
"Collected correspondig shotgrid entity : {}".format(sg_entity)
|
||||
)
|
||||
|
||||
def _find_existing_version(self, code, context):
|
||||
|
||||
filters = [
|
||||
["project", "is", context.data.get("shotgridProject")],
|
||||
["sg_task", "is", context.data.get("shotgridTask")],
|
||||
["entity", "is", context.data.get("shotgridEntity")],
|
||||
["code", "is", code],
|
||||
]
|
||||
|
||||
sg = context.data.get("shotgridSession")
|
||||
return sg.find_one("Version", filters, [])
|
||||
|
||||
|
||||
def _get_shotgrid_collection(project):
|
||||
client = OpenPypeMongoConnection.get_mongo_client()
|
||||
return client.get_database("shotgrid_openpype").get_collection(project)
|
||||
|
||||
|
||||
def _get_shotgrid_project(context):
|
||||
shotgrid_project_id = context.data["project_settings"].get(
|
||||
"shotgrid_project_id")
|
||||
if shotgrid_project_id:
|
||||
return {"type": "Project", "id": shotgrid_project_id}
|
||||
return {}
|
||||
|
||||
|
||||
def _get_shotgrid_task(avalon_project, avalon_asset, avalon_task):
|
||||
sg_col = _get_shotgrid_collection(avalon_project["name"])
|
||||
shotgrid_task_hierarchy_row = sg_col.find_one(
|
||||
{
|
||||
"type": "Task",
|
||||
"_id": {"$regex": "^" + avalon_task + "_[0-9]*"},
|
||||
"parent": {"$regex": ".*," + avalon_asset["name"] + ","},
|
||||
}
|
||||
)
|
||||
if shotgrid_task_hierarchy_row:
|
||||
return {"type": "Task", "id": shotgrid_task_hierarchy_row["src_id"]}
|
||||
return {}
|
||||
|
||||
|
||||
def _get_shotgrid_entity(avalon_project, avalon_asset):
|
||||
sg_col = _get_shotgrid_collection(avalon_project["name"])
|
||||
shotgrid_entity_hierarchy_row = sg_col.find_one(
|
||||
{"_id": avalon_asset["name"]}
|
||||
)
|
||||
if shotgrid_entity_hierarchy_row:
|
||||
return {
|
||||
"type": shotgrid_entity_hierarchy_row["type"],
|
||||
"id": shotgrid_entity_hierarchy_row["src_id"],
|
||||
}
|
||||
return {}
|
||||
|
|
@ -0,0 +1,123 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import shotgun_api3
|
||||
from shotgun_api3.shotgun import AuthenticationFault
|
||||
|
||||
from openpype.lib import OpenPypeSettingsRegistry
|
||||
from openpype.modules.shotgrid.lib.settings import (
|
||||
get_shotgrid_servers,
|
||||
get_shotgrid_project_settings,
|
||||
)
|
||||
|
||||
|
||||
class CollectShotgridSession(pyblish.api.ContextPlugin):
|
||||
"""Collect shotgrid session using user credentials"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Shotgrid user session"
|
||||
|
||||
def process(self, context):
|
||||
|
||||
certificate_path = os.getenv("SHOTGUN_API_CACERTS")
|
||||
if certificate_path is None or not os.path.exists(certificate_path):
|
||||
self.log.info(
|
||||
"SHOTGUN_API_CACERTS does not contains a valid \
|
||||
path: {}".format(
|
||||
certificate_path
|
||||
)
|
||||
)
|
||||
certificate_path = get_shotgrid_certificate()
|
||||
self.log.info("Get Certificate from shotgrid_api")
|
||||
|
||||
if not os.path.exists(certificate_path):
|
||||
self.log.error(
|
||||
"Could not find certificate in shotgun_api3: \
|
||||
{}".format(
|
||||
certificate_path
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
set_shotgrid_certificate(certificate_path)
|
||||
self.log.info("Set Certificate: {}".format(certificate_path))
|
||||
|
||||
avalon_project = os.getenv("AVALON_PROJECT")
|
||||
|
||||
shotgrid_settings = get_shotgrid_project_settings(avalon_project)
|
||||
self.log.info("shotgrid settings: {}".format(shotgrid_settings))
|
||||
shotgrid_servers_settings = get_shotgrid_servers()
|
||||
self.log.info(
|
||||
"shotgrid_servers_settings: {}".format(shotgrid_servers_settings)
|
||||
)
|
||||
|
||||
shotgrid_server = shotgrid_settings.get("shotgrid_server", "")
|
||||
if not shotgrid_server:
|
||||
self.log.error(
|
||||
"No Shotgrid server found, please choose a credential"
|
||||
"in script name and script key in OpenPype settings"
|
||||
)
|
||||
|
||||
shotgrid_server_setting = shotgrid_servers_settings.get(
|
||||
shotgrid_server, {}
|
||||
)
|
||||
shotgrid_url = shotgrid_server_setting.get("shotgrid_url", "")
|
||||
|
||||
shotgrid_script_name = shotgrid_server_setting.get(
|
||||
"shotgrid_script_name", ""
|
||||
)
|
||||
shotgrid_script_key = shotgrid_server_setting.get(
|
||||
"shotgrid_script_key", ""
|
||||
)
|
||||
if not shotgrid_script_name and not shotgrid_script_key:
|
||||
self.log.error(
|
||||
"No Shotgrid api credential found, please enter "
|
||||
"script name and script key in OpenPype settings"
|
||||
)
|
||||
|
||||
login = get_login() or os.getenv("OPENPYPE_SG_USER")
|
||||
|
||||
if not login:
|
||||
self.log.error(
|
||||
"No Shotgrid login found, please "
|
||||
"login to shotgrid withing openpype Tray"
|
||||
)
|
||||
|
||||
session = shotgun_api3.Shotgun(
|
||||
base_url=shotgrid_url,
|
||||
script_name=shotgrid_script_name,
|
||||
api_key=shotgrid_script_key,
|
||||
sudo_as_login=login,
|
||||
)
|
||||
|
||||
try:
|
||||
session.preferences_read()
|
||||
except AuthenticationFault:
|
||||
raise ValueError(
|
||||
"Could not connect to shotgrid {} with user {}".format(
|
||||
shotgrid_url, login
|
||||
)
|
||||
)
|
||||
|
||||
self.log.info(
|
||||
"Logged to shotgrid {} with user {}".format(shotgrid_url, login)
|
||||
)
|
||||
context.data["shotgridSession"] = session
|
||||
context.data["shotgridUser"] = login
|
||||
|
||||
|
||||
def get_shotgrid_certificate():
|
||||
shotgun_api_path = os.path.dirname(shotgun_api3.__file__)
|
||||
return os.path.join(shotgun_api_path, "lib", "certifi", "cacert.pem")
|
||||
|
||||
|
||||
def set_shotgrid_certificate(certificate):
|
||||
os.environ["SHOTGUN_API_CACERTS"] = certificate
|
||||
|
||||
|
||||
def get_login():
|
||||
reg = OpenPypeSettingsRegistry()
|
||||
try:
|
||||
return str(reg.get_item("shotgrid_login"))
|
||||
except Exception:
|
||||
return None
|
||||
|
|
@ -0,0 +1,77 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class IntegrateShotgridPublish(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
Create published Files from representations and add it to version. If
|
||||
representation is tagged add shotgrid review, it will add it in
|
||||
path to movie for a movie file or path to frame for an image sequence.
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 0.499
|
||||
label = "Shotgrid Published Files"
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
context = instance.context
|
||||
|
||||
self.sg = context.data.get("shotgridSession")
|
||||
|
||||
shotgrid_version = instance.data.get("shotgridVersion")
|
||||
|
||||
for representation in instance.data.get("representations", []):
|
||||
|
||||
local_path = representation.get("published_path")
|
||||
code = os.path.basename(local_path)
|
||||
|
||||
if representation.get("tags", []):
|
||||
continue
|
||||
|
||||
published_file = self._find_existing_publish(
|
||||
code, context, shotgrid_version
|
||||
)
|
||||
|
||||
published_file_data = {
|
||||
"project": context.data.get("shotgridProject"),
|
||||
"code": code,
|
||||
"entity": context.data.get("shotgridEntity"),
|
||||
"task": context.data.get("shotgridTask"),
|
||||
"version": shotgrid_version,
|
||||
"path": {"local_path": local_path},
|
||||
}
|
||||
if not published_file:
|
||||
published_file = self._create_published(published_file_data)
|
||||
self.log.info(
|
||||
"Create Shotgrid PublishedFile: {}".format(published_file)
|
||||
)
|
||||
else:
|
||||
self.sg.update(
|
||||
published_file["type"],
|
||||
published_file["id"],
|
||||
published_file_data,
|
||||
)
|
||||
self.log.info(
|
||||
"Update Shotgrid PublishedFile: {}".format(published_file)
|
||||
)
|
||||
|
||||
if instance.data["family"] == "image":
|
||||
self.sg.upload_thumbnail(
|
||||
published_file["type"], published_file["id"], local_path
|
||||
)
|
||||
instance.data["shotgridPublishedFile"] = published_file
|
||||
|
||||
def _find_existing_publish(self, code, context, shotgrid_version):
|
||||
|
||||
filters = [
|
||||
["project", "is", context.data.get("shotgridProject")],
|
||||
["task", "is", context.data.get("shotgridTask")],
|
||||
["entity", "is", context.data.get("shotgridEntity")],
|
||||
["version", "is", shotgrid_version],
|
||||
["code", "is", code],
|
||||
]
|
||||
return self.sg.find_one("PublishedFile", filters, [])
|
||||
|
||||
def _create_published(self, published_file_data):
|
||||
|
||||
return self.sg.create("PublishedFile", published_file_data)
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class IntegrateShotgridVersion(pyblish.api.InstancePlugin):
|
||||
"""Integrate Shotgrid Version"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 0.497
|
||||
label = "Shotgrid Version"
|
||||
|
||||
sg = None
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
context = instance.context
|
||||
self.sg = context.data.get("shotgridSession")
|
||||
|
||||
# TODO: Use path template solver to build version code from settings
|
||||
anatomy = instance.data.get("anatomyData", {})
|
||||
code = "_".join(
|
||||
[
|
||||
anatomy["project"]["code"],
|
||||
anatomy["parent"],
|
||||
anatomy["asset"],
|
||||
anatomy["task"]["name"],
|
||||
"v{:03}".format(int(anatomy["version"])),
|
||||
]
|
||||
)
|
||||
|
||||
version = self._find_existing_version(code, context)
|
||||
|
||||
if not version:
|
||||
version = self._create_version(code, context)
|
||||
self.log.info("Create Shotgrid version: {}".format(version))
|
||||
else:
|
||||
self.log.info("Use existing Shotgrid version: {}".format(version))
|
||||
|
||||
data_to_update = {}
|
||||
status = context.data.get("intent", {}).get("value")
|
||||
if status:
|
||||
data_to_update["sg_status_list"] = status
|
||||
|
||||
for representation in instance.data.get("representations", []):
|
||||
local_path = representation.get("published_path")
|
||||
code = os.path.basename(local_path)
|
||||
|
||||
if "shotgridreview" in representation.get("tags", []):
|
||||
|
||||
if representation["ext"] in ["mov", "avi"]:
|
||||
self.log.info(
|
||||
"Upload review: {} for version shotgrid {}".format(
|
||||
local_path, version.get("id")
|
||||
)
|
||||
)
|
||||
self.sg.upload(
|
||||
"Version",
|
||||
version.get("id"),
|
||||
local_path,
|
||||
field_name="sg_uploaded_movie",
|
||||
)
|
||||
|
||||
data_to_update["sg_path_to_movie"] = local_path
|
||||
|
||||
elif representation["ext"] in ["jpg", "png", "exr", "tga"]:
|
||||
path_to_frame = local_path.replace("0000", "#")
|
||||
data_to_update["sg_path_to_frames"] = path_to_frame
|
||||
|
||||
self.log.info("Update Shotgrid version with {}".format(data_to_update))
|
||||
self.sg.update("Version", version["id"], data_to_update)
|
||||
|
||||
instance.data["shotgridVersion"] = version
|
||||
|
||||
def _find_existing_version(self, code, context):
|
||||
|
||||
filters = [
|
||||
["project", "is", context.data.get("shotgridProject")],
|
||||
["sg_task", "is", context.data.get("shotgridTask")],
|
||||
["entity", "is", context.data.get("shotgridEntity")],
|
||||
["code", "is", code],
|
||||
]
|
||||
return self.sg.find_one("Version", filters, [])
|
||||
|
||||
def _create_version(self, code, context):
|
||||
|
||||
version_data = {
|
||||
"project": context.data.get("shotgridProject"),
|
||||
"sg_task": context.data.get("shotgridTask"),
|
||||
"entity": context.data.get("shotgridEntity"),
|
||||
"code": code,
|
||||
}
|
||||
|
||||
return self.sg.create("Version", version_data)
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
|
||||
class ValidateShotgridUser(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Check if user is valid and have access to the project.
|
||||
"""
|
||||
|
||||
label = "Validate Shotgrid User"
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
|
||||
def process(self, context):
|
||||
sg = context.data.get("shotgridSession")
|
||||
|
||||
login = context.data.get("shotgridUser")
|
||||
self.log.info("Login shotgrid set in OpenPype is {}".format(login))
|
||||
project = context.data.get("shotgridProject")
|
||||
self.log.info("Current shotgun project is {}".format(project))
|
||||
|
||||
if not (login and sg and project):
|
||||
raise KeyError()
|
||||
|
||||
user = sg.find_one("HumanUser", [["login", "is", login]], ["projects"])
|
||||
|
||||
self.log.info(user)
|
||||
self.log.info(login)
|
||||
user_projects_id = [p["id"] for p in user.get("projects", [])]
|
||||
if not project.get("id") in user_projects_id:
|
||||
raise PermissionError(
|
||||
"Login {} don't have access to the project {}".format(
|
||||
login, project
|
||||
)
|
||||
)
|
||||
|
||||
self.log.info(
|
||||
"Login {} have access to the project {}".format(login, project)
|
||||
)
|
||||
5
openpype/modules/shotgrid/server/README.md
Normal file
5
openpype/modules/shotgrid/server/README.md
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
|
||||
### Shotgrid server
|
||||
|
||||
Please refer to the external project that covers Openpype/Shotgrid communication:
|
||||
- https://github.com/Ellipsanime/shotgrid-leecher
|
||||
58
openpype/modules/shotgrid/shotgrid_module.py
Normal file
58
openpype/modules/shotgrid/shotgrid_module.py
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
import os
|
||||
|
||||
from openpype_interfaces import (
|
||||
ITrayModule,
|
||||
IPluginPaths,
|
||||
ILaunchHookPaths,
|
||||
)
|
||||
|
||||
from openpype.modules import OpenPypeModule
|
||||
|
||||
SHOTGRID_MODULE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class ShotgridModule(
|
||||
OpenPypeModule, ITrayModule, IPluginPaths, ILaunchHookPaths
|
||||
):
|
||||
leecher_manager_url = None
|
||||
name = "shotgrid"
|
||||
enabled = False
|
||||
project_id = None
|
||||
tray_wrapper = None
|
||||
|
||||
def initialize(self, modules_settings):
|
||||
shotgrid_settings = modules_settings.get(self.name, dict())
|
||||
self.enabled = shotgrid_settings.get("enabled", False)
|
||||
self.leecher_manager_url = shotgrid_settings.get(
|
||||
"leecher_manager_url", ""
|
||||
)
|
||||
|
||||
def connect_with_modules(self, enabled_modules):
|
||||
pass
|
||||
|
||||
def get_global_environments(self):
|
||||
return {"PROJECT_ID": self.project_id}
|
||||
|
||||
def get_plugin_paths(self):
|
||||
return {
|
||||
"publish": [
|
||||
os.path.join(SHOTGRID_MODULE_DIR, "plugins", "publish")
|
||||
]
|
||||
}
|
||||
|
||||
def get_launch_hook_paths(self):
|
||||
return os.path.join(SHOTGRID_MODULE_DIR, "hooks")
|
||||
|
||||
def tray_init(self):
|
||||
from .tray.shotgrid_tray import ShotgridTrayWrapper
|
||||
|
||||
self.tray_wrapper = ShotgridTrayWrapper(self)
|
||||
|
||||
def tray_start(self):
|
||||
return self.tray_wrapper.validate()
|
||||
|
||||
def tray_exit(self, *args, **kwargs):
|
||||
return self.tray_wrapper
|
||||
|
||||
def tray_menu(self, tray_menu):
|
||||
return self.tray_wrapper.tray_menu(tray_menu)
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
import pytest
|
||||
from assertpy import assert_that
|
||||
|
||||
import openpype.modules.shotgrid.lib.credentials as sut
|
||||
|
||||
|
||||
def test_missing_shotgrid_url():
|
||||
with pytest.raises(Exception) as ex:
|
||||
# arrange
|
||||
url = ""
|
||||
# act
|
||||
sut.get_shotgrid_hostname(url)
|
||||
# assert
|
||||
assert_that(ex).is_equal_to("Shotgrid url cannot be a null")
|
||||
|
||||
|
||||
def test_full_shotgrid_url():
|
||||
# arrange
|
||||
url = "https://shotgrid.com/myinstance"
|
||||
# act
|
||||
actual = sut.get_shotgrid_hostname(url)
|
||||
# assert
|
||||
assert_that(actual).is_not_empty()
|
||||
assert_that(actual).is_equal_to("shotgrid.com")
|
||||
|
||||
|
||||
def test_incomplete_shotgrid_url():
|
||||
# arrange
|
||||
url = "shotgrid.com/myinstance"
|
||||
# act
|
||||
actual = sut.get_shotgrid_hostname(url)
|
||||
# assert
|
||||
assert_that(actual).is_not_empty()
|
||||
assert_that(actual).is_equal_to("shotgrid.com")
|
||||
201
openpype/modules/shotgrid/tray/credential_dialog.py
Normal file
201
openpype/modules/shotgrid/tray/credential_dialog.py
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
import os
|
||||
from Qt import QtCore, QtWidgets, QtGui
|
||||
|
||||
from openpype import style
|
||||
from openpype import resources
|
||||
from openpype.modules.shotgrid.lib import settings, credentials
|
||||
|
||||
|
||||
class CredentialsDialog(QtWidgets.QDialog):
|
||||
SIZE_W = 450
|
||||
SIZE_H = 200
|
||||
|
||||
_module = None
|
||||
_is_logged = False
|
||||
url_label = None
|
||||
login_label = None
|
||||
password_label = None
|
||||
url_input = None
|
||||
login_input = None
|
||||
password_input = None
|
||||
input_layout = None
|
||||
login_button = None
|
||||
buttons_layout = None
|
||||
main_widget = None
|
||||
|
||||
login_changed = QtCore.Signal()
|
||||
|
||||
def __init__(self, module, parent=None):
|
||||
super(CredentialsDialog, self).__init__(parent)
|
||||
|
||||
self._module = module
|
||||
self._is_logged = False
|
||||
|
||||
self.setWindowTitle("OpenPype - Shotgrid Login")
|
||||
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowCloseButtonHint
|
||||
| QtCore.Qt.WindowMinimizeButtonHint
|
||||
)
|
||||
self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H))
|
||||
self.setMaximumSize(QtCore.QSize(self.SIZE_W + 100, self.SIZE_H + 100))
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
self.ui_init()
|
||||
|
||||
def ui_init(self):
|
||||
self.url_label = QtWidgets.QLabel("Shotgrid server:")
|
||||
self.login_label = QtWidgets.QLabel("Login:")
|
||||
self.password_label = QtWidgets.QLabel("Password:")
|
||||
|
||||
self.url_input = QtWidgets.QComboBox()
|
||||
# self.url_input.setReadOnly(True)
|
||||
|
||||
self.login_input = QtWidgets.QLineEdit()
|
||||
self.login_input.setPlaceholderText("login")
|
||||
|
||||
self.password_input = QtWidgets.QLineEdit()
|
||||
self.password_input.setPlaceholderText("password")
|
||||
self.password_input.setEchoMode(QtWidgets.QLineEdit.Password)
|
||||
|
||||
self.error_label = QtWidgets.QLabel("")
|
||||
self.error_label.setStyleSheet("color: red;")
|
||||
self.error_label.setWordWrap(True)
|
||||
self.error_label.hide()
|
||||
|
||||
self.input_layout = QtWidgets.QFormLayout()
|
||||
self.input_layout.setContentsMargins(10, 15, 10, 5)
|
||||
|
||||
self.input_layout.addRow(self.url_label, self.url_input)
|
||||
self.input_layout.addRow(self.login_label, self.login_input)
|
||||
self.input_layout.addRow(self.password_label, self.password_input)
|
||||
self.input_layout.addRow(self.error_label)
|
||||
|
||||
self.login_button = QtWidgets.QPushButton("Login")
|
||||
self.login_button.setToolTip("Log in shotgrid instance")
|
||||
self.login_button.clicked.connect(self._on_shotgrid_login_clicked)
|
||||
|
||||
self.logout_button = QtWidgets.QPushButton("Logout")
|
||||
self.logout_button.setToolTip("Log out shotgrid instance")
|
||||
self.logout_button.clicked.connect(self._on_shotgrid_logout_clicked)
|
||||
|
||||
self.buttons_layout = QtWidgets.QHBoxLayout()
|
||||
self.buttons_layout.addWidget(self.logout_button)
|
||||
self.buttons_layout.addWidget(self.login_button)
|
||||
|
||||
self.main_widget = QtWidgets.QVBoxLayout(self)
|
||||
self.main_widget.addLayout(self.input_layout)
|
||||
self.main_widget.addLayout(self.buttons_layout)
|
||||
self.setLayout(self.main_widget)
|
||||
|
||||
def show(self, *args, **kwargs):
|
||||
super(CredentialsDialog, self).show(*args, **kwargs)
|
||||
self._fill_shotgrid_url()
|
||||
self._fill_shotgrid_login()
|
||||
|
||||
def _fill_shotgrid_url(self):
|
||||
servers = settings.get_shotgrid_servers()
|
||||
|
||||
if servers:
|
||||
for _, v in servers.items():
|
||||
self.url_input.addItem("{}".format(v.get('shotgrid_url')))
|
||||
self._valid_input(self.url_input)
|
||||
self.login_button.show()
|
||||
self.logout_button.show()
|
||||
enabled = True
|
||||
else:
|
||||
self.set_error("Ask your admin to add shotgrid server in settings")
|
||||
self._invalid_input(self.url_input)
|
||||
self.login_button.hide()
|
||||
self.logout_button.hide()
|
||||
enabled = False
|
||||
|
||||
self.login_input.setEnabled(enabled)
|
||||
self.password_input.setEnabled(enabled)
|
||||
|
||||
def _fill_shotgrid_login(self):
|
||||
login = credentials.get_local_login()
|
||||
|
||||
if login:
|
||||
self.login_input.setText(login)
|
||||
|
||||
def _clear_shotgrid_login(self):
|
||||
self.login_input.setText("")
|
||||
self.password_input.setText("")
|
||||
|
||||
def _on_shotgrid_login_clicked(self):
|
||||
login = self.login_input.text().strip()
|
||||
password = self.password_input.text().strip()
|
||||
missing = []
|
||||
|
||||
if login == "":
|
||||
missing.append("login")
|
||||
self._invalid_input(self.login_input)
|
||||
|
||||
if password == "":
|
||||
missing.append("password")
|
||||
self._invalid_input(self.password_input)
|
||||
|
||||
url = self.url_input.currentText()
|
||||
if url == "":
|
||||
missing.append("url")
|
||||
self._invalid_input(self.url_input)
|
||||
|
||||
if len(missing) > 0:
|
||||
self.set_error("You didn't enter {}".format(" and ".join(missing)))
|
||||
return
|
||||
|
||||
# if credentials.check_credentials(
|
||||
# login=login,
|
||||
# password=password,
|
||||
# shotgrid_url=url,
|
||||
# ):
|
||||
credentials.save_local_login(
|
||||
login=login
|
||||
)
|
||||
os.environ['OPENPYPE_SG_USER'] = login
|
||||
self._on_login()
|
||||
|
||||
self.set_error("CANT LOGIN")
|
||||
|
||||
def _on_shotgrid_logout_clicked(self):
|
||||
credentials.clear_local_login()
|
||||
del os.environ['OPENPYPE_SG_USER']
|
||||
self._clear_shotgrid_login()
|
||||
self._on_logout()
|
||||
|
||||
def set_error(self, msg):
|
||||
self.error_label.setText(msg)
|
||||
self.error_label.show()
|
||||
|
||||
def _on_login(self):
|
||||
self._is_logged = True
|
||||
self.login_changed.emit()
|
||||
self._close_widget()
|
||||
|
||||
def _on_logout(self):
|
||||
self._is_logged = False
|
||||
self.login_changed.emit()
|
||||
|
||||
def _close_widget(self):
|
||||
self.hide()
|
||||
|
||||
def _valid_input(self, input_widget):
|
||||
input_widget.setStyleSheet("")
|
||||
|
||||
def _invalid_input(self, input_widget):
|
||||
input_widget.setStyleSheet("border: 1px solid red;")
|
||||
|
||||
def login_with_credentials(
|
||||
self, url, login, password
|
||||
):
|
||||
verification = credentials.check_credentials(url, login, password)
|
||||
if verification:
|
||||
credentials.save_credentials(login, password, False)
|
||||
self._module.set_credentials_to_env(login, password)
|
||||
self.set_credentials(login, password)
|
||||
self.login_changed.emit()
|
||||
return verification
|
||||
75
openpype/modules/shotgrid/tray/shotgrid_tray.py
Normal file
75
openpype/modules/shotgrid/tray/shotgrid_tray.py
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
import os
|
||||
import webbrowser
|
||||
|
||||
from Qt import QtWidgets
|
||||
|
||||
from openpype.modules.shotgrid.lib import credentials
|
||||
from openpype.modules.shotgrid.tray.credential_dialog import (
|
||||
CredentialsDialog,
|
||||
)
|
||||
|
||||
|
||||
class ShotgridTrayWrapper:
|
||||
module = None
|
||||
credentials_dialog = None
|
||||
logged_user_label = None
|
||||
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
self.credentials_dialog = CredentialsDialog(module)
|
||||
self.credentials_dialog.login_changed.connect(self.set_login_label)
|
||||
self.logged_user_label = QtWidgets.QAction("")
|
||||
self.logged_user_label.setDisabled(True)
|
||||
self.set_login_label()
|
||||
|
||||
def show_batch_dialog(self):
|
||||
if self.module.leecher_manager_url:
|
||||
webbrowser.open(self.module.leecher_manager_url)
|
||||
|
||||
def show_connect_dialog(self):
|
||||
self.show_credential_dialog()
|
||||
|
||||
def show_credential_dialog(self):
|
||||
self.credentials_dialog.show()
|
||||
self.credentials_dialog.activateWindow()
|
||||
self.credentials_dialog.raise_()
|
||||
|
||||
def set_login_label(self):
|
||||
login = credentials.get_local_login()
|
||||
if login:
|
||||
self.logged_user_label.setText("{}".format(login))
|
||||
else:
|
||||
self.logged_user_label.setText(
|
||||
"No User logged in {0}".format(login)
|
||||
)
|
||||
|
||||
def tray_menu(self, tray_menu):
|
||||
# Add login to user menu
|
||||
menu = QtWidgets.QMenu("Shotgrid", tray_menu)
|
||||
show_connect_action = QtWidgets.QAction("Connect to Shotgrid", menu)
|
||||
show_connect_action.triggered.connect(self.show_connect_dialog)
|
||||
menu.addAction(self.logged_user_label)
|
||||
menu.addSeparator()
|
||||
menu.addAction(show_connect_action)
|
||||
tray_menu.addMenu(menu)
|
||||
|
||||
# Add manager to Admin menu
|
||||
for m in tray_menu.findChildren(QtWidgets.QMenu):
|
||||
if m.title() == "Admin":
|
||||
shotgrid_manager_action = QtWidgets.QAction(
|
||||
"Shotgrid manager", menu
|
||||
)
|
||||
shotgrid_manager_action.triggered.connect(
|
||||
self.show_batch_dialog
|
||||
)
|
||||
m.addAction(shotgrid_manager_action)
|
||||
|
||||
def validate(self):
|
||||
login = credentials.get_local_login()
|
||||
|
||||
if not login:
|
||||
self.show_credential_dialog()
|
||||
else:
|
||||
os.environ["OPENPYPE_SG_USER"] = login
|
||||
|
||||
return True
|
||||
|
|
@ -57,15 +57,7 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
|||
audio_inputs.insert(0, empty)
|
||||
|
||||
# create cmd
|
||||
cmd = path_to_subprocess_arg(self.ffmpeg_path) + " "
|
||||
cmd += self.create_cmd(audio_inputs)
|
||||
cmd += path_to_subprocess_arg(audio_temp_fpath)
|
||||
|
||||
# run subprocess
|
||||
self.log.debug("Executing: {}".format(cmd))
|
||||
openpype.api.run_subprocess(
|
||||
cmd, shell=True, logger=self.log
|
||||
)
|
||||
self.mix_audio(audio_inputs, audio_temp_fpath)
|
||||
|
||||
# remove empty
|
||||
os.remove(empty["mediaPath"])
|
||||
|
|
@ -245,46 +237,80 @@ class ExtractOtioAudioTracks(pyblish.api.ContextPlugin):
|
|||
"durationSec": max_duration_sec
|
||||
}
|
||||
|
||||
def create_cmd(self, inputs):
|
||||
def mix_audio(self, audio_inputs, audio_temp_fpath):
|
||||
"""Creating multiple input cmd string
|
||||
|
||||
Args:
|
||||
inputs (list): list of input dicts. Order mater.
|
||||
audio_inputs (list): list of input dicts. Order mater.
|
||||
|
||||
Returns:
|
||||
str: the command body
|
||||
|
||||
"""
|
||||
|
||||
longest_input = 0
|
||||
for audio_input in audio_inputs:
|
||||
audio_len = audio_input["durationSec"]
|
||||
if audio_len > longest_input:
|
||||
longest_input = audio_len
|
||||
|
||||
# create cmd segments
|
||||
_inputs = ""
|
||||
_filters = "-filter_complex \""
|
||||
_channels = ""
|
||||
for index, input in enumerate(inputs):
|
||||
input_format = input.copy()
|
||||
input_format.update({"i": index})
|
||||
input_format["mediaPath"] = path_to_subprocess_arg(
|
||||
input_format["mediaPath"]
|
||||
input_args = []
|
||||
filters = []
|
||||
tag_names = []
|
||||
for index, audio_input in enumerate(audio_inputs):
|
||||
input_args.extend([
|
||||
"-ss", str(audio_input["startSec"]),
|
||||
"-t", str(audio_input["durationSec"]),
|
||||
"-i", audio_input["mediaPath"]
|
||||
])
|
||||
|
||||
# Output tag of a filtered audio input
|
||||
tag_name = "[r{}]".format(index)
|
||||
tag_names.append(tag_name)
|
||||
# Delay in audio by delay in item
|
||||
filters.append("[{}]adelay={}:all=1{}".format(
|
||||
index, audio_input["delayMilSec"], tag_name
|
||||
))
|
||||
|
||||
# Mixing filter
|
||||
# - dropout transition (when audio will get loader) is set to be
|
||||
# higher then any input audio item
|
||||
# - volume is set to number of inputs - each mix adds 1/n volume
|
||||
# where n is input inder (to get more info read ffmpeg docs and
|
||||
# send a giftcard to contributor)
|
||||
filters.append(
|
||||
(
|
||||
"{}amix=inputs={}:duration=first:"
|
||||
"dropout_transition={},volume={}[a]"
|
||||
).format(
|
||||
"".join(tag_names),
|
||||
len(audio_inputs),
|
||||
(longest_input * 1000) + 1000,
|
||||
len(audio_inputs),
|
||||
)
|
||||
)
|
||||
|
||||
_inputs += (
|
||||
"-ss {startSec} "
|
||||
"-t {durationSec} "
|
||||
"-i {mediaPath} "
|
||||
).format(**input_format)
|
||||
# Store filters to a file (separated by ',')
|
||||
# - this is to avoid "too long" command issue in ffmpeg
|
||||
with tempfile.NamedTemporaryFile(
|
||||
delete=False, mode="w", suffix=".txt"
|
||||
) as tmp_file:
|
||||
filters_tmp_filepath = tmp_file.name
|
||||
tmp_file.write(",".join(filters))
|
||||
|
||||
_filters += "[{i}]adelay={delayMilSec}:all=1[r{i}]; ".format(
|
||||
**input_format)
|
||||
_channels += "[r{}]".format(index)
|
||||
args = [self.ffmpeg_path]
|
||||
args.extend(input_args)
|
||||
args.extend([
|
||||
"-filter_complex_script", filters_tmp_filepath,
|
||||
"-map", "[a]"
|
||||
])
|
||||
args.append(audio_temp_fpath)
|
||||
|
||||
# merge all cmd segments together
|
||||
cmd = _inputs + _filters + _channels
|
||||
cmd += str(
|
||||
"amix=inputs={inputs}:duration=first:"
|
||||
"dropout_transition=1000,volume={inputs}[a]\" "
|
||||
).format(inputs=len(inputs))
|
||||
cmd += "-map \"[a]\" "
|
||||
# run subprocess
|
||||
self.log.debug("Executing: {}".format(args))
|
||||
openpype.api.run_subprocess(args, logger=self.log)
|
||||
|
||||
return cmd
|
||||
os.remove(filters_tmp_filepath)
|
||||
|
||||
def create_temp_file(self, name):
|
||||
"""Create temp wav file
|
||||
|
|
|
|||
|
|
@ -5,8 +5,16 @@ import copy
|
|||
import clique
|
||||
import six
|
||||
|
||||
from openpype.client.operations import (
|
||||
OperationsSession,
|
||||
new_subset_document,
|
||||
new_version_doc,
|
||||
new_representation_doc,
|
||||
prepare_subset_update_data,
|
||||
prepare_version_update_data,
|
||||
prepare_representation_update_data,
|
||||
)
|
||||
from bson.objectid import ObjectId
|
||||
from pymongo import DeleteMany, ReplaceOne, InsertOne, UpdateOne
|
||||
import pyblish.api
|
||||
|
||||
from openpype.client import (
|
||||
|
|
@ -247,9 +255,12 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
template_name = self.get_template_name(instance)
|
||||
|
||||
subset, subset_writes = self.prepare_subset(instance, project_name)
|
||||
version, version_writes = self.prepare_version(
|
||||
instance, subset, project_name
|
||||
op_session = OperationsSession()
|
||||
subset = self.prepare_subset(
|
||||
instance, op_session, project_name
|
||||
)
|
||||
version = self.prepare_version(
|
||||
instance, op_session, subset, project_name
|
||||
)
|
||||
instance.data["versionEntity"] = version
|
||||
|
||||
|
|
@ -299,7 +310,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
# Transaction to reduce the chances of another publish trying to
|
||||
# publish to the same version number since that chance can greatly
|
||||
# increase if the file transaction takes a long time.
|
||||
legacy_io.bulk_write(subset_writes + version_writes)
|
||||
op_session.commit()
|
||||
|
||||
self.log.info("Subset {subset[name]} and Version {version[name]} "
|
||||
"written to database..".format(subset=subset,
|
||||
version=version))
|
||||
|
|
@ -331,49 +343,49 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
# Finalize the representations now the published files are integrated
|
||||
# Get 'files' info for representations and its attached resources
|
||||
representation_writes = []
|
||||
new_repre_names_low = set()
|
||||
for prepared in prepared_representations:
|
||||
representation = prepared["representation"]
|
||||
repre_doc = prepared["representation"]
|
||||
repre_update_data = prepared["repre_doc_update_data"]
|
||||
transfers = prepared["transfers"]
|
||||
destinations = [dst for src, dst in transfers]
|
||||
representation["files"] = self.get_files_info(
|
||||
repre_doc["files"] = self.get_files_info(
|
||||
destinations, sites=sites, anatomy=anatomy
|
||||
)
|
||||
|
||||
# Add the version resource file infos to each representation
|
||||
representation["files"] += resource_file_infos
|
||||
repre_doc["files"] += resource_file_infos
|
||||
|
||||
# Set up representation for writing to the database. Since
|
||||
# we *might* be overwriting an existing entry if the version
|
||||
# already existed we'll use ReplaceOnce with `upsert=True`
|
||||
representation_writes.append(ReplaceOne(
|
||||
filter={"_id": representation["_id"]},
|
||||
replacement=representation,
|
||||
upsert=True
|
||||
))
|
||||
if repre_update_data is None:
|
||||
op_session.create_entity(
|
||||
project_name, repre_doc["type"], repre_doc
|
||||
)
|
||||
else:
|
||||
op_session.update_entity(
|
||||
project_name,
|
||||
repre_doc["type"],
|
||||
repre_doc["_id"],
|
||||
repre_update_data
|
||||
)
|
||||
|
||||
new_repre_names_low.add(representation["name"].lower())
|
||||
new_repre_names_low.add(repre_doc["name"].lower())
|
||||
|
||||
# Delete any existing representations that didn't get any new data
|
||||
# if the instance is not set to append mode
|
||||
if not instance.data.get("append", False):
|
||||
delete_names = set()
|
||||
for name, existing_repres in existing_repres_by_name.items():
|
||||
if name not in new_repre_names_low:
|
||||
# We add the exact representation name because `name` is
|
||||
# lowercase for name matching only and not in the database
|
||||
delete_names.add(existing_repres["name"])
|
||||
if delete_names:
|
||||
representation_writes.append(DeleteMany(
|
||||
filter={
|
||||
"parent": version["_id"],
|
||||
"name": {"$in": list(delete_names)}
|
||||
}
|
||||
))
|
||||
op_session.delete_entity(
|
||||
project_name, "representation", existing_repres["_id"]
|
||||
)
|
||||
|
||||
# Write representations to the database
|
||||
legacy_io.bulk_write(representation_writes)
|
||||
self.log.debug("{}".format(op_session.to_data()))
|
||||
op_session.commit()
|
||||
|
||||
# Backwards compatibility
|
||||
# todo: can we avoid the need to store this?
|
||||
|
|
@ -384,13 +396,14 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
self.log.info("Registered {} representations"
|
||||
"".format(len(prepared_representations)))
|
||||
|
||||
def prepare_subset(self, instance, project_name):
|
||||
def prepare_subset(self, instance, op_session, project_name):
|
||||
asset_doc = instance.data["assetEntity"]
|
||||
subset_name = instance.data["subset"]
|
||||
family = instance.data["family"]
|
||||
self.log.debug("Subset: {}".format(subset_name))
|
||||
|
||||
# Get existing subset if it exists
|
||||
subset_doc = get_subset_by_name(
|
||||
existing_subset_doc = get_subset_by_name(
|
||||
project_name, subset_name, asset_doc["_id"]
|
||||
)
|
||||
|
||||
|
|
@ -403,69 +416,79 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
if subset_group:
|
||||
data["subsetGroup"] = subset_group
|
||||
|
||||
bulk_writes = []
|
||||
if subset_doc is None:
|
||||
subset_id = None
|
||||
if existing_subset_doc:
|
||||
subset_id = existing_subset_doc["_id"]
|
||||
subset_doc = new_subset_document(
|
||||
subset_name, family, asset_doc["_id"], data, subset_id
|
||||
)
|
||||
|
||||
if existing_subset_doc is None:
|
||||
# Create a new subset
|
||||
self.log.info("Subset '%s' not found, creating ..." % subset_name)
|
||||
subset_doc = {
|
||||
"_id": ObjectId(),
|
||||
"schema": "openpype:subset-3.0",
|
||||
"type": "subset",
|
||||
"name": subset_name,
|
||||
"data": data,
|
||||
"parent": asset_doc["_id"]
|
||||
}
|
||||
bulk_writes.append(InsertOne(subset_doc))
|
||||
op_session.create_entity(
|
||||
project_name, subset_doc["type"], subset_doc
|
||||
)
|
||||
|
||||
else:
|
||||
# Update existing subset data with new data and set in database.
|
||||
# We also change the found subset in-place so we don't need to
|
||||
# re-query the subset afterwards
|
||||
subset_doc["data"].update(data)
|
||||
bulk_writes.append(UpdateOne(
|
||||
{"type": "subset", "_id": subset_doc["_id"]},
|
||||
{"$set": {
|
||||
"data": subset_doc["data"]
|
||||
}}
|
||||
))
|
||||
update_data = prepare_subset_update_data(
|
||||
existing_subset_doc, subset_doc
|
||||
)
|
||||
op_session.update_entity(
|
||||
project_name,
|
||||
subset_doc["type"],
|
||||
subset_doc["_id"],
|
||||
update_data
|
||||
)
|
||||
|
||||
self.log.info("Prepared subset: {}".format(subset_name))
|
||||
return subset_doc, bulk_writes
|
||||
return subset_doc
|
||||
|
||||
def prepare_version(self, instance, subset_doc, project_name):
|
||||
def prepare_version(self, instance, op_session, subset_doc, project_name):
|
||||
version_number = instance.data["version"]
|
||||
|
||||
version_doc = {
|
||||
"schema": "openpype:version-3.0",
|
||||
"type": "version",
|
||||
"parent": subset_doc["_id"],
|
||||
"name": version_number,
|
||||
"data": self.create_version_data(instance)
|
||||
}
|
||||
|
||||
existing_version = get_version_by_name(
|
||||
project_name,
|
||||
version_number,
|
||||
subset_doc["_id"],
|
||||
fields=["_id"]
|
||||
)
|
||||
version_id = None
|
||||
if existing_version:
|
||||
version_id = existing_version["_id"]
|
||||
|
||||
version_data = self.create_version_data(instance)
|
||||
version_doc = new_version_doc(
|
||||
version_number,
|
||||
subset_doc["_id"],
|
||||
version_data,
|
||||
version_id
|
||||
)
|
||||
|
||||
if existing_version:
|
||||
self.log.debug("Updating existing version ...")
|
||||
version_doc["_id"] = existing_version["_id"]
|
||||
update_data = prepare_version_update_data(
|
||||
existing_version, version_doc
|
||||
)
|
||||
op_session.update_entity(
|
||||
project_name,
|
||||
version_doc["type"],
|
||||
version_doc["_id"],
|
||||
update_data
|
||||
)
|
||||
else:
|
||||
self.log.debug("Creating new version ...")
|
||||
version_doc["_id"] = ObjectId()
|
||||
|
||||
bulk_writes = [ReplaceOne(
|
||||
filter={"_id": version_doc["_id"]},
|
||||
replacement=version_doc,
|
||||
upsert=True
|
||||
)]
|
||||
op_session.create_entity(
|
||||
project_name, version_doc["type"], version_doc
|
||||
)
|
||||
|
||||
self.log.info("Prepared version: v{0:03d}".format(version_doc["name"]))
|
||||
|
||||
return version_doc, bulk_writes
|
||||
return version_doc
|
||||
|
||||
def prepare_representation(self, repre,
|
||||
template_name,
|
||||
|
|
@ -676,10 +699,9 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
|
||||
# Use previous representation's id if there is a name match
|
||||
existing = existing_repres_by_name.get(repre["name"].lower())
|
||||
repre_id = None
|
||||
if existing:
|
||||
repre_id = existing["_id"]
|
||||
else:
|
||||
repre_id = ObjectId()
|
||||
|
||||
# Store first transferred destination as published path data
|
||||
# - used primarily for reviews that are integrated to custom modules
|
||||
|
|
@ -693,20 +715,18 @@ class IntegrateAsset(pyblish.api.InstancePlugin):
|
|||
# and the actual representation entity for the database
|
||||
data = repre.get("data", {})
|
||||
data.update({"path": published_path, "template": template})
|
||||
representation = {
|
||||
"_id": repre_id,
|
||||
"schema": "openpype:representation-2.0",
|
||||
"type": "representation",
|
||||
"parent": version["_id"],
|
||||
"name": repre["name"],
|
||||
"data": data,
|
||||
|
||||
# Imprint shortcut to context for performance reasons.
|
||||
"context": repre_context
|
||||
}
|
||||
repre_doc = new_representation_doc(
|
||||
repre["name"], version["_id"], repre_context, data, repre_id
|
||||
)
|
||||
update_data = None
|
||||
if repre_id is not None:
|
||||
update_data = prepare_representation_update_data(
|
||||
existing, repre_doc
|
||||
)
|
||||
|
||||
return {
|
||||
"representation": representation,
|
||||
"representation": repre_doc,
|
||||
"repre_doc_update_data": update_data,
|
||||
"anatomy_data": template_data,
|
||||
"transfers": transfers,
|
||||
# todo: avoid the need for 'published_files' used by Integrate Hero
|
||||
|
|
|
|||
BIN
openpype/resources/app_icons/shotgrid.png
Normal file
BIN
openpype/resources/app_icons/shotgrid.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 45 KiB |
22
openpype/settings/defaults/project_settings/shotgrid.json
Normal file
22
openpype/settings/defaults/project_settings/shotgrid.json
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
"shotgrid_project_id": 0,
|
||||
"shotgrid_server": "",
|
||||
"event": {
|
||||
"enabled": false
|
||||
},
|
||||
"fields": {
|
||||
"asset": {
|
||||
"type": "sg_asset_type"
|
||||
},
|
||||
"sequence": {
|
||||
"episode_link": "episode"
|
||||
},
|
||||
"shot": {
|
||||
"episode_link": "sg_episode",
|
||||
"sequence_link": "sg_sequence"
|
||||
},
|
||||
"task": {
|
||||
"step": "step"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -136,6 +136,13 @@
|
|||
"enabled": false,
|
||||
"server": ""
|
||||
},
|
||||
"shotgrid": {
|
||||
"enabled": false,
|
||||
"leecher_manager_url": "http://127.0.0.1:3000",
|
||||
"leecher_backend_url": "http://127.0.0.1:8090",
|
||||
"filter_projects_by_login": true,
|
||||
"shotgrid_settings": {}
|
||||
},
|
||||
"timers_manager": {
|
||||
"enabled": true,
|
||||
"auto_stop": true,
|
||||
|
|
|
|||
|
|
@ -107,6 +107,7 @@ from .enum_entity import (
|
|||
TaskTypeEnumEntity,
|
||||
DeadlineUrlEnumEntity,
|
||||
AnatomyTemplatesEnumEntity,
|
||||
ShotgridUrlEnumEntity
|
||||
)
|
||||
|
||||
from .list_entity import ListEntity
|
||||
|
|
@ -171,6 +172,7 @@ __all__ = (
|
|||
"ToolsEnumEntity",
|
||||
"TaskTypeEnumEntity",
|
||||
"DeadlineUrlEnumEntity",
|
||||
"ShotgridUrlEnumEntity",
|
||||
"AnatomyTemplatesEnumEntity",
|
||||
|
||||
"ListEntity",
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
import copy
|
||||
from .input_entities import InputEntity
|
||||
from .exceptions import EntitySchemaError
|
||||
from .lib import (
|
||||
NOT_SET,
|
||||
STRING_TYPE
|
||||
)
|
||||
from .lib import NOT_SET, STRING_TYPE
|
||||
|
||||
|
||||
class BaseEnumEntity(InputEntity):
|
||||
|
|
@ -26,7 +23,7 @@ class BaseEnumEntity(InputEntity):
|
|||
for item in self.enum_items:
|
||||
key = tuple(item.keys())[0]
|
||||
if key in enum_keys:
|
||||
reason = "Key \"{}\" is more than once in enum items.".format(
|
||||
reason = 'Key "{}" is more than once in enum items.'.format(
|
||||
key
|
||||
)
|
||||
raise EntitySchemaError(self, reason)
|
||||
|
|
@ -34,7 +31,7 @@ class BaseEnumEntity(InputEntity):
|
|||
enum_keys.add(key)
|
||||
|
||||
if not isinstance(key, STRING_TYPE):
|
||||
reason = "Key \"{}\" has invalid type {}, expected {}.".format(
|
||||
reason = 'Key "{}" has invalid type {}, expected {}.'.format(
|
||||
key, type(key), STRING_TYPE
|
||||
)
|
||||
raise EntitySchemaError(self, reason)
|
||||
|
|
@ -59,7 +56,7 @@ class BaseEnumEntity(InputEntity):
|
|||
for item in check_values:
|
||||
if item not in self.valid_keys:
|
||||
raise ValueError(
|
||||
"{} Invalid value \"{}\". Expected one of: {}".format(
|
||||
'{} Invalid value "{}". Expected one of: {}'.format(
|
||||
self.path, item, self.valid_keys
|
||||
)
|
||||
)
|
||||
|
|
@ -84,7 +81,7 @@ class EnumEntity(BaseEnumEntity):
|
|||
self.valid_keys = set(all_keys)
|
||||
|
||||
if self.multiselection:
|
||||
self.valid_value_types = (list, )
|
||||
self.valid_value_types = (list,)
|
||||
value_on_not_set = []
|
||||
if enum_default:
|
||||
if not isinstance(enum_default, list):
|
||||
|
|
@ -109,7 +106,7 @@ class EnumEntity(BaseEnumEntity):
|
|||
self.value_on_not_set = key
|
||||
break
|
||||
|
||||
self.valid_value_types = (STRING_TYPE, )
|
||||
self.valid_value_types = (STRING_TYPE,)
|
||||
|
||||
# GUI attribute
|
||||
self.placeholder = self.schema_data.get("placeholder")
|
||||
|
|
@ -152,6 +149,7 @@ class HostsEnumEntity(BaseEnumEntity):
|
|||
Host name is not the same as application name. Host name defines
|
||||
implementation instead of application name.
|
||||
"""
|
||||
|
||||
schema_types = ["hosts-enum"]
|
||||
all_host_names = [
|
||||
"aftereffects",
|
||||
|
|
@ -211,7 +209,7 @@ class HostsEnumEntity(BaseEnumEntity):
|
|||
self.valid_keys = valid_keys
|
||||
|
||||
if self.multiselection:
|
||||
self.valid_value_types = (list, )
|
||||
self.valid_value_types = (list,)
|
||||
self.value_on_not_set = []
|
||||
else:
|
||||
for key in valid_keys:
|
||||
|
|
@ -219,7 +217,7 @@ class HostsEnumEntity(BaseEnumEntity):
|
|||
self.value_on_not_set = key
|
||||
break
|
||||
|
||||
self.valid_value_types = (STRING_TYPE, )
|
||||
self.valid_value_types = (STRING_TYPE,)
|
||||
|
||||
# GUI attribute
|
||||
self.placeholder = self.schema_data.get("placeholder")
|
||||
|
|
@ -227,14 +225,10 @@ class HostsEnumEntity(BaseEnumEntity):
|
|||
def schema_validations(self):
|
||||
if self.hosts_filter:
|
||||
enum_len = len(self.enum_items)
|
||||
if (
|
||||
enum_len == 0
|
||||
or (enum_len == 1 and self.use_empty_value)
|
||||
):
|
||||
joined_filters = ", ".join([
|
||||
'"{}"'.format(item)
|
||||
for item in self.hosts_filter
|
||||
])
|
||||
if enum_len == 0 or (enum_len == 1 and self.use_empty_value):
|
||||
joined_filters = ", ".join(
|
||||
['"{}"'.format(item) for item in self.hosts_filter]
|
||||
)
|
||||
reason = (
|
||||
"All host names were removed after applying"
|
||||
" host filters. {}"
|
||||
|
|
@ -247,24 +241,25 @@ class HostsEnumEntity(BaseEnumEntity):
|
|||
invalid_filters.add(item)
|
||||
|
||||
if invalid_filters:
|
||||
joined_filters = ", ".join([
|
||||
'"{}"'.format(item)
|
||||
for item in self.hosts_filter
|
||||
])
|
||||
expected_hosts = ", ".join([
|
||||
'"{}"'.format(item)
|
||||
for item in self.all_host_names
|
||||
])
|
||||
self.log.warning((
|
||||
"Host filters containt invalid host names:"
|
||||
" \"{}\" Expected values are {}"
|
||||
).format(joined_filters, expected_hosts))
|
||||
joined_filters = ", ".join(
|
||||
['"{}"'.format(item) for item in self.hosts_filter]
|
||||
)
|
||||
expected_hosts = ", ".join(
|
||||
['"{}"'.format(item) for item in self.all_host_names]
|
||||
)
|
||||
self.log.warning(
|
||||
(
|
||||
"Host filters containt invalid host names:"
|
||||
' "{}" Expected values are {}'
|
||||
).format(joined_filters, expected_hosts)
|
||||
)
|
||||
|
||||
super(HostsEnumEntity, self).schema_validations()
|
||||
|
||||
|
||||
class AppsEnumEntity(BaseEnumEntity):
|
||||
"""Enum of applications for project anatomy attributes."""
|
||||
|
||||
schema_types = ["apps-enum"]
|
||||
|
||||
def _item_initialization(self):
|
||||
|
|
@ -272,7 +267,7 @@ class AppsEnumEntity(BaseEnumEntity):
|
|||
self.value_on_not_set = []
|
||||
self.enum_items = []
|
||||
self.valid_keys = set()
|
||||
self.valid_value_types = (list, )
|
||||
self.valid_value_types = (list,)
|
||||
self.placeholder = None
|
||||
|
||||
def _get_enum_values(self):
|
||||
|
|
@ -353,7 +348,7 @@ class ToolsEnumEntity(BaseEnumEntity):
|
|||
self.value_on_not_set = []
|
||||
self.enum_items = []
|
||||
self.valid_keys = set()
|
||||
self.valid_value_types = (list, )
|
||||
self.valid_value_types = (list,)
|
||||
self.placeholder = None
|
||||
|
||||
def _get_enum_values(self):
|
||||
|
|
@ -410,10 +405,10 @@ class TaskTypeEnumEntity(BaseEnumEntity):
|
|||
def _item_initialization(self):
|
||||
self.multiselection = self.schema_data.get("multiselection", True)
|
||||
if self.multiselection:
|
||||
self.valid_value_types = (list, )
|
||||
self.valid_value_types = (list,)
|
||||
self.value_on_not_set = []
|
||||
else:
|
||||
self.valid_value_types = (STRING_TYPE, )
|
||||
self.valid_value_types = (STRING_TYPE,)
|
||||
self.value_on_not_set = ""
|
||||
|
||||
self.enum_items = []
|
||||
|
|
@ -508,7 +503,8 @@ class DeadlineUrlEnumEntity(BaseEnumEntity):
|
|||
enum_items_list = []
|
||||
for server_name, url_entity in deadline_urls_entity.items():
|
||||
enum_items_list.append(
|
||||
{server_name: "{}: {}".format(server_name, url_entity.value)})
|
||||
{server_name: "{}: {}".format(server_name, url_entity.value)}
|
||||
)
|
||||
valid_keys.add(server_name)
|
||||
return enum_items_list, valid_keys
|
||||
|
||||
|
|
@ -531,6 +527,50 @@ class DeadlineUrlEnumEntity(BaseEnumEntity):
|
|||
self._current_value = tuple(self.valid_keys)[0]
|
||||
|
||||
|
||||
class ShotgridUrlEnumEntity(BaseEnumEntity):
|
||||
schema_types = ["shotgrid_url-enum"]
|
||||
|
||||
def _item_initialization(self):
|
||||
self.multiselection = False
|
||||
|
||||
self.enum_items = []
|
||||
self.valid_keys = set()
|
||||
|
||||
self.valid_value_types = (STRING_TYPE,)
|
||||
self.value_on_not_set = ""
|
||||
|
||||
# GUI attribute
|
||||
self.placeholder = self.schema_data.get("placeholder")
|
||||
|
||||
def _get_enum_values(self):
|
||||
shotgrid_settings = self.get_entity_from_path(
|
||||
"system_settings/modules/shotgrid/shotgrid_settings"
|
||||
)
|
||||
|
||||
valid_keys = set()
|
||||
enum_items_list = []
|
||||
for server_name, settings in shotgrid_settings.items():
|
||||
enum_items_list.append(
|
||||
{
|
||||
server_name: "{}: {}".format(
|
||||
server_name, settings["shotgrid_url"].value
|
||||
)
|
||||
}
|
||||
)
|
||||
valid_keys.add(server_name)
|
||||
return enum_items_list, valid_keys
|
||||
|
||||
def set_override_state(self, *args, **kwargs):
|
||||
super(ShotgridUrlEnumEntity, self).set_override_state(*args, **kwargs)
|
||||
|
||||
self.enum_items, self.valid_keys = self._get_enum_values()
|
||||
if not self.valid_keys:
|
||||
self._current_value = ""
|
||||
|
||||
elif self._current_value not in self.valid_keys:
|
||||
self._current_value = tuple(self.valid_keys)[0]
|
||||
|
||||
|
||||
class AnatomyTemplatesEnumEntity(BaseEnumEntity):
|
||||
schema_types = ["anatomy-templates-enum"]
|
||||
|
||||
|
|
|
|||
|
|
@ -62,6 +62,10 @@
|
|||
"type": "schema",
|
||||
"name": "schema_project_ftrack"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_project_shotgrid"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_project_kitsu"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,98 @@
|
|||
{
|
||||
"type": "dict",
|
||||
"key": "shotgrid",
|
||||
"label": "Shotgrid",
|
||||
"collapsible": true,
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "number",
|
||||
"key": "shotgrid_project_id",
|
||||
"label": "Shotgrid project id"
|
||||
},
|
||||
{
|
||||
"type": "shotgrid_url-enum",
|
||||
"key": "shotgrid_server",
|
||||
"label": "Shotgrid Server"
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "event",
|
||||
"label": "Event Handler",
|
||||
"collapsible": true,
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "fields",
|
||||
"label": "Fields Template",
|
||||
"collapsible": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "asset",
|
||||
"label": "Asset",
|
||||
"collapsible": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "type",
|
||||
"label": "Asset Type"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "sequence",
|
||||
"label": "Sequence",
|
||||
"collapsible": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "episode_link",
|
||||
"label": "Episode link"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "shot",
|
||||
"label": "Shot",
|
||||
"collapsible": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "episode_link",
|
||||
"label": "Episode link"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "sequence_link",
|
||||
"label": "Sequence link"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "task",
|
||||
"label": "Task",
|
||||
"collapsible": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "step",
|
||||
"label": "Step link"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -13,6 +13,9 @@
|
|||
{
|
||||
"ftrackreview": "Add review to Ftrack"
|
||||
},
|
||||
{
|
||||
"shotgridreview": "Add review to Shotgrid"
|
||||
},
|
||||
{
|
||||
"delete": "Delete output"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -48,6 +48,60 @@
|
|||
"type": "schema",
|
||||
"name": "schema_kitsu"
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "shotgrid",
|
||||
"label": "Shotgrid",
|
||||
"collapsible": true,
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "leecher_manager_url",
|
||||
"label": "Shotgrid Leecher Manager URL"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "leecher_backend_url",
|
||||
"label": "Shotgrid Leecher Backend URL"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "filter_projects_by_login",
|
||||
"label": "Filter projects by SG login"
|
||||
},
|
||||
{
|
||||
"type": "dict-modifiable",
|
||||
"key": "shotgrid_settings",
|
||||
"label": "Shotgrid Servers",
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"key": "shotgrid_url",
|
||||
"label": "Server URL",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "shotgrid_script_name",
|
||||
"label": "Script Name",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"key": "shotgrid_script_key",
|
||||
"label": "Script api key",
|
||||
"type": "text"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "timers_manager",
|
||||
|
|
|
|||
50
poetry.lock
generated
50
poetry.lock
generated
|
|
@ -92,7 +92,14 @@ version = "1.4.4"
|
|||
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
develop = false
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/ActiveState/appdirs.git"
|
||||
reference = "master"
|
||||
resolved_reference = "193a2cbba58cce2542882fcedd0e49f6763672ed"
|
||||
|
||||
[[package]]
|
||||
name = "arrow"
|
||||
|
|
@ -221,7 +228,7 @@ python-versions = "~=3.7"
|
|||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2022.5.18.1"
|
||||
version = "2022.6.15"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
category = "main"
|
||||
optional = false
|
||||
|
|
@ -456,19 +463,20 @@ python-versions = ">=3.7"
|
|||
|
||||
[[package]]
|
||||
name = "ftrack-python-api"
|
||||
version = "2.0.0"
|
||||
version = "2.3.3"
|
||||
description = "Python API for ftrack."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7.9, <4.0"
|
||||
python-versions = ">=2.7.9, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, < 3.10"
|
||||
|
||||
[package.dependencies]
|
||||
appdirs = ">=1,<2"
|
||||
arrow = ">=0.4.4,<1"
|
||||
clique = ">=1.2.0,<2"
|
||||
clique = "1.6.1"
|
||||
future = ">=0.16.0,<1"
|
||||
pyparsing = ">=2.0,<3"
|
||||
requests = ">=2,<3"
|
||||
six = ">=1,<2"
|
||||
six = ">=1.13.0,<2"
|
||||
termcolor = ">=1.1.0,<2"
|
||||
websocket-client = ">=0.40.0,<1"
|
||||
|
||||
|
|
@ -1375,6 +1383,21 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
|
||||
[[package]]
|
||||
name = "shotgun-api3"
|
||||
version = "3.3.3"
|
||||
description = "Shotgun Python API"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
develop = false
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/shotgunsoftware/python-api.git"
|
||||
reference = "v3.3.3"
|
||||
resolved_reference = "b9f066c0edbea6e0733242e18f32f75489064840"
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.16.0"
|
||||
|
|
@ -1812,10 +1835,7 @@ ansicon = [
|
|||
{file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"},
|
||||
{file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"},
|
||||
]
|
||||
appdirs = [
|
||||
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
|
||||
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
|
||||
]
|
||||
appdirs = []
|
||||
arrow = [
|
||||
{file = "arrow-0.17.0-py2.py3-none-any.whl", hash = "sha256:e098abbd9af3665aea81bdd6c869e93af4feb078e98468dd351c383af187aac5"},
|
||||
{file = "arrow-0.17.0.tar.gz", hash = "sha256:ff08d10cda1d36c68657d6ad20d74fbea493d980f8b2d45344e00d6ed2bf6ed4"},
|
||||
|
|
@ -1870,8 +1890,8 @@ cachetools = [
|
|||
{file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"},
|
||||
]
|
||||
certifi = [
|
||||
{file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"},
|
||||
{file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"},
|
||||
{file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
|
||||
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
|
||||
]
|
||||
cffi = [
|
||||
{file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"},
|
||||
|
|
@ -2137,10 +2157,7 @@ frozenlist = [
|
|||
{file = "frozenlist-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:772965f773757a6026dea111a15e6e2678fbd6216180f82a48a40b27de1ee2ab"},
|
||||
{file = "frozenlist-1.3.0.tar.gz", hash = "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b"},
|
||||
]
|
||||
ftrack-python-api = [
|
||||
{file = "ftrack-python-api-2.0.0.tar.gz", hash = "sha256:dd6f02c31daf5a10078196dc9eac4671e4297c762fbbf4df98de668ac12281d9"},
|
||||
{file = "ftrack_python_api-2.0.0-py2.py3-none-any.whl", hash = "sha256:d0df0f2df4b53947272f95e179ec98b477ee425bf4217b37bb59030ad989771e"},
|
||||
]
|
||||
ftrack-python-api = []
|
||||
future = [
|
||||
{file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"},
|
||||
]
|
||||
|
|
@ -2820,6 +2837,7 @@ semver = [
|
|||
{file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"},
|
||||
{file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"},
|
||||
]
|
||||
shotgun-api3 = []
|
||||
six = [
|
||||
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||
|
|
|
|||
|
|
@ -33,13 +33,14 @@ aiohttp = "^3.7"
|
|||
aiohttp_json_rpc = "*" # TVPaint server
|
||||
acre = { git = "https://github.com/pypeclub/acre.git" }
|
||||
opentimelineio = { version = "0.14.0.dev1", source = "openpype" }
|
||||
appdirs = "^1.4.3"
|
||||
appdirs = { git = "https://github.com/ActiveState/appdirs.git", branch = "master" }
|
||||
blessed = "^1.17" # openpype terminal formatting
|
||||
coolname = "*"
|
||||
clique = "1.6.*"
|
||||
Click = "^7"
|
||||
dnspython = "^2.1.0"
|
||||
ftrack-python-api = "2.0.*"
|
||||
ftrack-python-api = "^2.3.3"
|
||||
shotgun_api3 = {git = "https://github.com/shotgunsoftware/python-api.git", rev = "v3.3.3"}
|
||||
gazu = "^0.8.28"
|
||||
google-api-python-client = "^1.12.8" # sync server google support (should be separate?)
|
||||
jsonschema = "^2.6.0"
|
||||
|
|
|
|||
2
setup.py
2
setup.py
|
|
@ -152,7 +152,7 @@ build_exe_options = dict(
|
|||
)
|
||||
|
||||
bdist_mac_options = dict(
|
||||
bundle_name="OpenPype",
|
||||
bundle_name=f"OpenPype {__version__}",
|
||||
iconfile=mac_icon_path
|
||||
)
|
||||
|
||||
|
|
|
|||
157
start.py
157
start.py
|
|
@ -103,6 +103,9 @@ import site
|
|||
import distutils.spawn
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
silent_mode = False
|
||||
|
||||
# OPENPYPE_ROOT is variable pointing to build (or code) directory
|
||||
# WARNING `OPENPYPE_ROOT` must be defined before igniter import
|
||||
# - igniter changes cwd which cause that filepath of this script won't lead
|
||||
|
|
@ -138,40 +141,44 @@ if sys.__stdout__:
|
|||
term = blessed.Terminal()
|
||||
|
||||
def _print(message: str):
|
||||
if silent_mode:
|
||||
return
|
||||
if message.startswith("!!! "):
|
||||
print("{}{}".format(term.orangered2("!!! "), message[4:]))
|
||||
print(f'{term.orangered2("!!! ")}{message[4:]}')
|
||||
return
|
||||
if message.startswith(">>> "):
|
||||
print("{}{}".format(term.aquamarine3(">>> "), message[4:]))
|
||||
print(f'{term.aquamarine3(">>> ")}{message[4:]}')
|
||||
return
|
||||
if message.startswith("--- "):
|
||||
print("{}{}".format(term.darkolivegreen3("--- "), message[4:]))
|
||||
print(f'{term.darkolivegreen3("--- ")}{message[4:]}')
|
||||
return
|
||||
if message.startswith("*** "):
|
||||
print("{}{}".format(term.gold("*** "), message[4:]))
|
||||
print(f'{term.gold("*** ")}{message[4:]}')
|
||||
return
|
||||
if message.startswith(" - "):
|
||||
print("{}{}".format(term.wheat(" - "), message[4:]))
|
||||
print(f'{term.wheat(" - ")}{message[4:]}')
|
||||
return
|
||||
if message.startswith(" . "):
|
||||
print("{}{}".format(term.tan(" . "), message[4:]))
|
||||
print(f'{term.tan(" . ")}{message[4:]}')
|
||||
return
|
||||
if message.startswith(" - "):
|
||||
print("{}{}".format(term.seagreen3(" - "), message[7:]))
|
||||
print(f'{term.seagreen3(" - ")}{message[7:]}')
|
||||
return
|
||||
if message.startswith(" ! "):
|
||||
print("{}{}".format(term.goldenrod(" ! "), message[7:]))
|
||||
print(f'{term.goldenrod(" ! ")}{message[7:]}')
|
||||
return
|
||||
if message.startswith(" * "):
|
||||
print("{}{}".format(term.aquamarine1(" * "), message[7:]))
|
||||
print(f'{term.aquamarine1(" * ")}{message[7:]}')
|
||||
return
|
||||
if message.startswith(" "):
|
||||
print("{}{}".format(term.darkseagreen3(" "), message[4:]))
|
||||
print(f'{term.darkseagreen3(" ")}{message[4:]}')
|
||||
return
|
||||
|
||||
print(message)
|
||||
else:
|
||||
def _print(message: str):
|
||||
if silent_mode:
|
||||
return
|
||||
print(message)
|
||||
|
||||
|
||||
|
|
@ -187,9 +194,8 @@ else:
|
|||
if "--headless" in sys.argv:
|
||||
os.environ["OPENPYPE_HEADLESS_MODE"] = "1"
|
||||
sys.argv.remove("--headless")
|
||||
else:
|
||||
if os.getenv("OPENPYPE_HEADLESS_MODE") != "1":
|
||||
os.environ.pop("OPENPYPE_HEADLESS_MODE", None)
|
||||
elif os.getenv("OPENPYPE_HEADLESS_MODE") != "1":
|
||||
os.environ.pop("OPENPYPE_HEADLESS_MODE", None)
|
||||
|
||||
# Enabled logging debug mode when "--debug" is passed
|
||||
if "--verbose" in sys.argv:
|
||||
|
|
@ -203,8 +209,8 @@ if "--verbose" in sys.argv:
|
|||
value = sys.argv.pop(idx)
|
||||
else:
|
||||
raise RuntimeError((
|
||||
"Expect value after \"--verbose\" argument. {}"
|
||||
).format(expected_values))
|
||||
f"Expect value after \"--verbose\" argument. {expected_values}"
|
||||
))
|
||||
|
||||
log_level = None
|
||||
low_value = value.lower()
|
||||
|
|
@ -225,8 +231,9 @@ if "--verbose" in sys.argv:
|
|||
|
||||
if log_level is None:
|
||||
raise RuntimeError((
|
||||
"Unexpected value after \"--verbose\" argument \"{}\". {}"
|
||||
).format(value, expected_values))
|
||||
"Unexpected value after \"--verbose\" "
|
||||
f"argument \"{value}\". {expected_values}"
|
||||
))
|
||||
|
||||
os.environ["OPENPYPE_LOG_LEVEL"] = str(log_level)
|
||||
|
||||
|
|
@ -242,13 +249,14 @@ from igniter.tools import (
|
|||
get_openpype_global_settings,
|
||||
get_openpype_path_from_settings,
|
||||
validate_mongo_connection,
|
||||
OpenPypeVersionNotFound
|
||||
OpenPypeVersionNotFound,
|
||||
OpenPypeVersionIncompatible
|
||||
) # noqa
|
||||
from igniter.bootstrap_repos import OpenPypeVersion # noqa: E402
|
||||
|
||||
bootstrap = BootstrapRepos()
|
||||
silent_commands = {"run", "igniter", "standalonepublisher",
|
||||
"extractenvironments"}
|
||||
"extractenvironments", "version"}
|
||||
|
||||
|
||||
def list_versions(openpype_versions: list, local_version=None) -> None:
|
||||
|
|
@ -270,8 +278,11 @@ def set_openpype_global_environments() -> None:
|
|||
|
||||
general_env = get_general_environments()
|
||||
|
||||
# first resolve general environment because merge doesn't expect
|
||||
# values to be list.
|
||||
# TODO: switch to OpenPype environment functions
|
||||
merged_env = acre.merge(
|
||||
acre.parse(general_env),
|
||||
acre.compute(acre.parse(general_env), cleanup=False),
|
||||
dict(os.environ)
|
||||
)
|
||||
env = acre.compute(
|
||||
|
|
@ -333,34 +344,33 @@ def run_disk_mapping_commands(settings):
|
|||
destination = destination.rstrip('/')
|
||||
source = source.rstrip('/')
|
||||
|
||||
if low_platform == "windows":
|
||||
args = ["subst", destination, source]
|
||||
elif low_platform == "darwin":
|
||||
scr = "do shell script \"ln -s {} {}\" with administrator privileges".format(source, destination) # noqa: E501
|
||||
if low_platform == "darwin":
|
||||
scr = f'do shell script "ln -s {source} {destination}" with administrator privileges' # noqa
|
||||
|
||||
args = ["osascript", "-e", scr]
|
||||
elif low_platform == "windows":
|
||||
args = ["subst", destination, source]
|
||||
else:
|
||||
args = ["sudo", "ln", "-s", source, destination]
|
||||
|
||||
_print("disk mapping args:: {}".format(args))
|
||||
_print(f"*** disk mapping arguments: {args}")
|
||||
try:
|
||||
if not os.path.exists(destination):
|
||||
output = subprocess.Popen(args)
|
||||
if output.returncode and output.returncode != 0:
|
||||
exc_msg = "Executing was not successful: \"{}\"".format(
|
||||
args)
|
||||
exc_msg = f'Executing was not successful: "{args}"'
|
||||
|
||||
raise RuntimeError(exc_msg)
|
||||
except TypeError as exc:
|
||||
_print("Error {} in mapping drive {}, {}".format(str(exc),
|
||||
source,
|
||||
destination))
|
||||
_print(
|
||||
f"Error {str(exc)} in mapping drive {source}, {destination}")
|
||||
raise
|
||||
|
||||
|
||||
def set_avalon_environments():
|
||||
"""Set avalon specific environments.
|
||||
|
||||
These are non modifiable environments for avalon workflow that must be set
|
||||
These are non-modifiable environments for avalon workflow that must be set
|
||||
before avalon module is imported because avalon works with globals set with
|
||||
environment variables.
|
||||
"""
|
||||
|
|
@ -505,7 +515,7 @@ def _process_arguments() -> tuple:
|
|||
)
|
||||
if m and m.group('version'):
|
||||
use_version = m.group('version')
|
||||
_print(">>> Requested version [ {} ]".format(use_version))
|
||||
_print(f">>> Requested version [ {use_version} ]")
|
||||
if "+staging" in use_version:
|
||||
use_staging = True
|
||||
break
|
||||
|
|
@ -611,8 +621,8 @@ def _determine_mongodb() -> str:
|
|||
try:
|
||||
openpype_mongo = bootstrap.secure_registry.get_item(
|
||||
"openPypeMongo")
|
||||
except ValueError:
|
||||
raise RuntimeError("Missing MongoDB url")
|
||||
except ValueError as e:
|
||||
raise RuntimeError("Missing MongoDB url") from e
|
||||
|
||||
return openpype_mongo
|
||||
|
||||
|
|
@ -684,40 +694,47 @@ def _find_frozen_openpype(use_version: str = None,
|
|||
# Specific version is defined
|
||||
if use_version.lower() == "latest":
|
||||
# Version says to use latest version
|
||||
_print("Finding latest version defined by use version")
|
||||
_print(">>> Finding latest version defined by use version")
|
||||
openpype_version = bootstrap.find_latest_openpype_version(
|
||||
use_staging
|
||||
use_staging, compatible_with=installed_version
|
||||
)
|
||||
else:
|
||||
_print("Finding specified version \"{}\"".format(use_version))
|
||||
_print(f">>> Finding specified version \"{use_version}\"")
|
||||
openpype_version = bootstrap.find_openpype_version(
|
||||
use_version, use_staging
|
||||
)
|
||||
|
||||
if openpype_version is None:
|
||||
raise OpenPypeVersionNotFound(
|
||||
"Requested version \"{}\" was not found.".format(
|
||||
use_version
|
||||
)
|
||||
f"Requested version \"{use_version}\" was not found."
|
||||
)
|
||||
|
||||
if not openpype_version.is_compatible(installed_version):
|
||||
raise OpenPypeVersionIncompatible((
|
||||
f"Requested version \"{use_version}\" is not compatible "
|
||||
f"with installed version \"{installed_version}\""
|
||||
))
|
||||
|
||||
elif studio_version is not None:
|
||||
# Studio has defined a version to use
|
||||
_print("Finding studio version \"{}\"".format(studio_version))
|
||||
_print(f">>> Finding studio version \"{studio_version}\"")
|
||||
openpype_version = bootstrap.find_openpype_version(
|
||||
studio_version, use_staging
|
||||
studio_version, use_staging, compatible_with=installed_version
|
||||
)
|
||||
if openpype_version is None:
|
||||
raise OpenPypeVersionNotFound((
|
||||
"Requested OpenPype version \"{}\" defined by settings"
|
||||
"Requested OpenPype version "
|
||||
f"\"{studio_version}\" defined by settings"
|
||||
" was not found."
|
||||
).format(studio_version))
|
||||
))
|
||||
|
||||
else:
|
||||
# Default behavior to use latest version
|
||||
_print("Finding latest version")
|
||||
_print((
|
||||
">>> Finding latest version compatible "
|
||||
f"with [ {installed_version} ]"))
|
||||
openpype_version = bootstrap.find_latest_openpype_version(
|
||||
use_staging
|
||||
use_staging, compatible_with=installed_version
|
||||
)
|
||||
if openpype_version is None:
|
||||
if use_staging:
|
||||
|
|
@ -798,7 +815,7 @@ def _bootstrap_from_code(use_version, use_staging):
|
|||
|
||||
if getattr(sys, 'frozen', False):
|
||||
local_version = bootstrap.get_version(Path(_openpype_root))
|
||||
switch_str = f" - will switch to {use_version}" if use_version else ""
|
||||
switch_str = f" - will switch to {use_version}" if use_version and use_version != local_version else "" # noqa
|
||||
_print(f" - booting version: {local_version}{switch_str}")
|
||||
assert local_version
|
||||
else:
|
||||
|
|
@ -813,11 +830,8 @@ def _bootstrap_from_code(use_version, use_staging):
|
|||
use_version, use_staging
|
||||
)
|
||||
if version_to_use is None:
|
||||
raise OpenPypeVersionNotFound(
|
||||
"Requested version \"{}\" was not found.".format(
|
||||
use_version
|
||||
)
|
||||
)
|
||||
raise OpenPypeVersionIncompatible(
|
||||
f"Requested version \"{use_version}\" was not found.")
|
||||
else:
|
||||
# Staging version should be used
|
||||
version_to_use = bootstrap.find_latest_openpype_version(
|
||||
|
|
@ -903,7 +917,7 @@ def _boot_validate_versions(use_version, local_version):
|
|||
use_version, openpype_versions
|
||||
)
|
||||
valid, message = bootstrap.validate_openpype_version(version_path)
|
||||
_print("{}{}".format(">>> " if valid else "!!! ", message))
|
||||
_print(f'{">>> " if valid else "!!! "}{message}')
|
||||
|
||||
|
||||
def _boot_print_versions(use_staging, local_version, openpype_root):
|
||||
|
|
@ -914,13 +928,24 @@ def _boot_print_versions(use_staging, local_version, openpype_root):
|
|||
_print("--- This will list only staging versions detected.")
|
||||
_print(" To see other version, omit --use-staging argument.")
|
||||
|
||||
openpype_versions = bootstrap.find_openpype(include_zips=True,
|
||||
staging=use_staging)
|
||||
if getattr(sys, 'frozen', False):
|
||||
local_version = bootstrap.get_version(Path(openpype_root))
|
||||
else:
|
||||
local_version = OpenPypeVersion.get_installed_version_str()
|
||||
|
||||
compatible_with = OpenPypeVersion(version=local_version)
|
||||
if "--all" in sys.argv:
|
||||
compatible_with = None
|
||||
_print("--- Showing all version (even those not compatible).")
|
||||
else:
|
||||
_print(("--- Showing only compatible versions "
|
||||
f"with [ {compatible_with.major}.{compatible_with.minor} ]"))
|
||||
|
||||
openpype_versions = bootstrap.find_openpype(
|
||||
include_zips=True,
|
||||
staging=use_staging,
|
||||
compatible_with=compatible_with)
|
||||
|
||||
list_versions(openpype_versions, local_version)
|
||||
|
||||
|
||||
|
|
@ -937,6 +962,9 @@ def _boot_handle_missing_version(local_version, use_staging, message):
|
|||
|
||||
def boot():
|
||||
"""Bootstrap OpenPype."""
|
||||
global silent_mode
|
||||
if any(arg in silent_commands for arg in sys.argv):
|
||||
silent_mode = True
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# Set environment to OpenPype root path
|
||||
|
|
@ -1040,7 +1068,7 @@ def boot():
|
|||
if not result[0]:
|
||||
_print(f"!!! Invalid version: {result[1]}")
|
||||
sys.exit(1)
|
||||
_print(f"--- version is valid")
|
||||
_print("--- version is valid")
|
||||
else:
|
||||
try:
|
||||
version_path = _bootstrap_from_code(use_version, use_staging)
|
||||
|
|
@ -1113,8 +1141,12 @@ def boot():
|
|||
|
||||
def get_info(use_staging=None) -> list:
|
||||
"""Print additional information to console."""
|
||||
from openpype.lib.mongo import get_default_components
|
||||
from openpype.lib.log import PypeLogger
|
||||
from openpype.client.mongo import get_default_components
|
||||
try:
|
||||
from openpype.lib.log import Logger
|
||||
except ImportError:
|
||||
# Backwards compatibility for 'PypeLogger'
|
||||
from openpype.lib.log import PypeLogger as Logger
|
||||
|
||||
components = get_default_components()
|
||||
|
||||
|
|
@ -1141,14 +1173,14 @@ def get_info(use_staging=None) -> list:
|
|||
os.environ.get("MUSTER_REST_URL")))
|
||||
|
||||
# Reinitialize
|
||||
PypeLogger.initialize()
|
||||
Logger.initialize()
|
||||
|
||||
mongo_components = get_default_components()
|
||||
if mongo_components["host"]:
|
||||
inf.append(("Logging to MongoDB", mongo_components["host"]))
|
||||
inf.append((" - port", mongo_components["port"] or "<N/A>"))
|
||||
inf.append((" - database", PypeLogger.log_database_name))
|
||||
inf.append((" - collection", PypeLogger.log_collection_name))
|
||||
inf.append((" - database", Logger.log_database_name))
|
||||
inf.append((" - collection", Logger.log_collection_name))
|
||||
inf.append((" - user", mongo_components["username"] or "<N/A>"))
|
||||
if mongo_components["auth_db"]:
|
||||
inf.append((" - auth source", mongo_components["auth_db"]))
|
||||
|
|
@ -1157,8 +1189,7 @@ def get_info(use_staging=None) -> list:
|
|||
formatted = []
|
||||
for info in inf:
|
||||
padding = (maximum - len(info[0])) + 1
|
||||
formatted.append(
|
||||
"... {}:{}[ {} ]".format(info[0], " " * padding, info[1]))
|
||||
formatted.append(f'... {info[0]}:{" " * padding}[ {info[1]} ]')
|
||||
return formatted
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -193,15 +193,15 @@ if [ "$disable_submodule_update" == 1 ]; then
|
|||
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
# fix code signing issue
|
||||
codesign --remove-signature "$openpype_root/build/OpenPype.app/Contents/MacOS/lib/Python"
|
||||
codesign --remove-signature "$openpype_root/build/OpenPype $openpype_version.app/Contents/MacOS/lib/Python"
|
||||
if command -v create-dmg > /dev/null 2>&1; then
|
||||
create-dmg \
|
||||
--volname "OpenPype Installer" \
|
||||
--volname "OpenPype $openpype_version Installer" \
|
||||
--window-pos 200 120 \
|
||||
--window-size 600 300 \
|
||||
--app-drop-link 100 50 \
|
||||
"$openpype_root/build/OpenPype-Installer.dmg" \
|
||||
"$openpype_root/build/OpenPype.app"
|
||||
"$openpype_root/build/OpenPype-Installer-$openpype_version.dmg" \
|
||||
"$openpype_root/build/OpenPype $openpype_version.app"
|
||||
else
|
||||
echo -e "${BIYellow}!!!${RST} ${BIWhite}create-dmg${RST} command is not available."
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ import shutil
|
|||
import blessed
|
||||
import enlighten
|
||||
import time
|
||||
import re
|
||||
|
||||
|
||||
term = blessed.Terminal()
|
||||
|
|
@ -52,7 +53,7 @@ def _print(msg: str, type: int = 0) -> None:
|
|||
else:
|
||||
header = term.darkolivegreen3("--- ")
|
||||
|
||||
print("{}{}".format(header, msg))
|
||||
print(f"{header}{msg}")
|
||||
|
||||
|
||||
def count_folders(path: Path) -> int:
|
||||
|
|
@ -95,16 +96,22 @@ assert site_pkg, "No venv site-packages are found."
|
|||
_print(f"Working with: {site_pkg}", 2)
|
||||
|
||||
openpype_root = Path(os.path.dirname(__file__)).parent
|
||||
version = {}
|
||||
with open(openpype_root / "openpype" / "version.py") as fp:
|
||||
exec(fp.read(), version)
|
||||
|
||||
version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"])
|
||||
openpype_version = version_match[1]
|
||||
|
||||
# create full path
|
||||
if platform.system().lower() == "darwin":
|
||||
build_dir = openpype_root.joinpath(
|
||||
"build",
|
||||
"OpenPype.app",
|
||||
f"OpenPype {openpype_version}.app",
|
||||
"Contents",
|
||||
"MacOS")
|
||||
else:
|
||||
build_subdir = "exe.{}-{}".format(get_platform(), sys.version[0:3])
|
||||
build_subdir = f"exe.{get_platform()}-{sys.version[:3]}"
|
||||
build_dir = openpype_root / "build" / build_subdir
|
||||
|
||||
_print(f"Using build at {build_dir}", 2)
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ def _print(msg: str, message_type: int = 0) -> None:
|
|||
else:
|
||||
header = term.darkolivegreen3("--- ")
|
||||
|
||||
print("{}{}".format(header, msg))
|
||||
print(f"{header}{msg}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue