mirror of
https://github.com/ynput/ayon-core.git
synced 2026-01-02 00:44:52 +01:00
Merge branch 'develop' into feature/validation_exceptions_nuke
This commit is contained in:
commit
99033333db
204 changed files with 12505 additions and 3871 deletions
5
.gitmodules
vendored
5
.gitmodules
vendored
|
|
@ -4,7 +4,4 @@
|
|||
|
||||
[submodule "tools/modules/powershell/PSWriteColor"]
|
||||
path = tools/modules/powershell/PSWriteColor
|
||||
url = https://github.com/EvotecIT/PSWriteColor.git
|
||||
[submodule "vendor/configs/OpenColorIO-Configs"]
|
||||
path = vendor/configs/OpenColorIO-Configs
|
||||
url = https://github.com/imageworks/OpenColorIO-Configs
|
||||
url = https://github.com/EvotecIT/PSWriteColor.git
|
||||
58
CHANGELOG.md
58
CHANGELOG.md
|
|
@ -1,22 +1,58 @@
|
|||
# Changelog
|
||||
|
||||
## [3.13.1-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
## [3.14.1-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.13.0...HEAD)
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.0...HEAD)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- Ftrack: More logs related to auto sync value change [\#3671](https://github.com/pypeclub/OpenPype/pull/3671)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- RoyalRender: handle host name that is not set [\#3695](https://github.com/pypeclub/OpenPype/pull/3695)
|
||||
|
||||
## [3.14.0](https://github.com/pypeclub/OpenPype/tree/3.14.0) (2022-08-18)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.14.0-nightly.1...3.14.0)
|
||||
|
||||
**🆕 New features**
|
||||
|
||||
- Maya: Build workfile by template [\#3578](https://github.com/pypeclub/OpenPype/pull/3578)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- Ftrack: Addiotional component metadata [\#3685](https://github.com/pypeclub/OpenPype/pull/3685)
|
||||
- Ftrack: Set task status on farm publishing [\#3680](https://github.com/pypeclub/OpenPype/pull/3680)
|
||||
- Ftrack: Set task status on task creation in integrate hierarchy [\#3675](https://github.com/pypeclub/OpenPype/pull/3675)
|
||||
- Maya: Disable rendering of all lights for render instances submitted through Deadline. [\#3661](https://github.com/pypeclub/OpenPype/pull/3661)
|
||||
- General: Optimized OCIO configs [\#3650](https://github.com/pypeclub/OpenPype/pull/3650)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- General: Switch from hero version to versioned works [\#3691](https://github.com/pypeclub/OpenPype/pull/3691)
|
||||
- General: Fix finding of last version [\#3656](https://github.com/pypeclub/OpenPype/pull/3656)
|
||||
- General: Extract Review can scale with pixel aspect ratio [\#3644](https://github.com/pypeclub/OpenPype/pull/3644)
|
||||
- Maya: Refactor moved usage of CreateRender settings [\#3643](https://github.com/pypeclub/OpenPype/pull/3643)
|
||||
- General: Hero version representations have full context [\#3638](https://github.com/pypeclub/OpenPype/pull/3638)
|
||||
- Nuke: color settings for render write node is working now [\#3632](https://github.com/pypeclub/OpenPype/pull/3632)
|
||||
- Maya: FBX support for update in reference loader [\#3631](https://github.com/pypeclub/OpenPype/pull/3631)
|
||||
|
||||
**🔀 Refactored code**
|
||||
|
||||
- General: Use client projects getter [\#3673](https://github.com/pypeclub/OpenPype/pull/3673)
|
||||
- Resolve: Match folder structure to other hosts [\#3653](https://github.com/pypeclub/OpenPype/pull/3653)
|
||||
- Maya: Hosts as modules [\#3647](https://github.com/pypeclub/OpenPype/pull/3647)
|
||||
- TimersManager: Plugins are in timers manager module [\#3639](https://github.com/pypeclub/OpenPype/pull/3639)
|
||||
- General: Move workfiles functions into pipeline [\#3637](https://github.com/pypeclub/OpenPype/pull/3637)
|
||||
- General: Workfiles builder using query functions [\#3598](https://github.com/pypeclub/OpenPype/pull/3598)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Deadline: Global job pre load is not Pype 2 compatible [\#3666](https://github.com/pypeclub/OpenPype/pull/3666)
|
||||
- Maya: Remove unused get current renderer logic [\#3645](https://github.com/pypeclub/OpenPype/pull/3645)
|
||||
- Kitsu|Fix: Movie project type fails & first loop children names [\#3636](https://github.com/pypeclub/OpenPype/pull/3636)
|
||||
- fix the bug of failing to extract look when UDIMs format used in AiImage [\#3628](https://github.com/pypeclub/OpenPype/pull/3628)
|
||||
|
||||
## [3.13.0](https://github.com/pypeclub/OpenPype/tree/3.13.0) (2022-08-09)
|
||||
|
||||
|
|
@ -55,7 +91,6 @@
|
|||
- General: Update imports in start script [\#3579](https://github.com/pypeclub/OpenPype/pull/3579)
|
||||
- Nuke: render family integration consistency [\#3576](https://github.com/pypeclub/OpenPype/pull/3576)
|
||||
- Ftrack: Handle missing published path in integrator [\#3570](https://github.com/pypeclub/OpenPype/pull/3570)
|
||||
- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569)
|
||||
- Nuke: publish existing frames with slate with correct range [\#3555](https://github.com/pypeclub/OpenPype/pull/3555)
|
||||
|
||||
**🔀 Refactored code**
|
||||
|
|
@ -84,35 +119,20 @@
|
|||
**🚀 Enhancements**
|
||||
|
||||
- General: Global thumbnail extractor is ready for more cases [\#3561](https://github.com/pypeclub/OpenPype/pull/3561)
|
||||
- Maya: add additional validators to Settings [\#3540](https://github.com/pypeclub/OpenPype/pull/3540)
|
||||
- General: Interactive console in cli [\#3526](https://github.com/pypeclub/OpenPype/pull/3526)
|
||||
- Ftrack: Automatic daily review session creation can define trigger hour [\#3516](https://github.com/pypeclub/OpenPype/pull/3516)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- Maya: fix Review image plane attribute [\#3569](https://github.com/pypeclub/OpenPype/pull/3569)
|
||||
- Maya: Fix animated attributes \(ie. overscan\) on loaded cameras breaking review publishing. [\#3562](https://github.com/pypeclub/OpenPype/pull/3562)
|
||||
- NewPublisher: Python 2 compatible html escape [\#3559](https://github.com/pypeclub/OpenPype/pull/3559)
|
||||
- Remove invalid submodules from `/vendor` [\#3557](https://github.com/pypeclub/OpenPype/pull/3557)
|
||||
- General: Remove hosts filter on integrator plugins [\#3556](https://github.com/pypeclub/OpenPype/pull/3556)
|
||||
- Settings: Clean default values of environments [\#3550](https://github.com/pypeclub/OpenPype/pull/3550)
|
||||
- Module interfaces: Fix import error [\#3547](https://github.com/pypeclub/OpenPype/pull/3547)
|
||||
- Workfiles tool: Show of tool and it's flags [\#3539](https://github.com/pypeclub/OpenPype/pull/3539)
|
||||
- General: Create workfile documents works again [\#3538](https://github.com/pypeclub/OpenPype/pull/3538)
|
||||
- Additional fixes for powershell scripts [\#3525](https://github.com/pypeclub/OpenPype/pull/3525)
|
||||
- Maya: Added wrapper around cmds.setAttr [\#3523](https://github.com/pypeclub/OpenPype/pull/3523)
|
||||
- Nuke: double slate [\#3521](https://github.com/pypeclub/OpenPype/pull/3521)
|
||||
- General: Fix hash of centos oiio archive [\#3519](https://github.com/pypeclub/OpenPype/pull/3519)
|
||||
- Maya: Renderman display output fix [\#3514](https://github.com/pypeclub/OpenPype/pull/3514)
|
||||
- TrayPublisher: Simple creation enhancements and fixes [\#3513](https://github.com/pypeclub/OpenPype/pull/3513)
|
||||
|
||||
**🔀 Refactored code**
|
||||
|
||||
- General: Use query functions in integrator [\#3563](https://github.com/pypeclub/OpenPype/pull/3563)
|
||||
- General: Mongo core connection moved to client [\#3531](https://github.com/pypeclub/OpenPype/pull/3531)
|
||||
- Refactor Integrate Asset [\#3530](https://github.com/pypeclub/OpenPype/pull/3530)
|
||||
- General: Client docstrings cleanup [\#3529](https://github.com/pypeclub/OpenPype/pull/3529)
|
||||
- General: Move load related functions into pipeline [\#3527](https://github.com/pypeclub/OpenPype/pull/3527)
|
||||
- General: Get current context document functions [\#3522](https://github.com/pypeclub/OpenPype/pull/3522)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
|
|
|
|||
|
|
@ -381,7 +381,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
@classmethod
|
||||
def get_local_versions(
|
||||
cls, production: bool = None,
|
||||
staging: bool = None, compatible_with: OpenPypeVersion = None
|
||||
staging: bool = None
|
||||
) -> List:
|
||||
"""Get all versions available on this machine.
|
||||
|
||||
|
|
@ -391,8 +391,10 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
Args:
|
||||
production (bool): Return production versions.
|
||||
staging (bool): Return staging versions.
|
||||
compatible_with (OpenPypeVersion): Return only those compatible
|
||||
with specified version.
|
||||
|
||||
Returns:
|
||||
list: of compatible versions available on the machine.
|
||||
|
||||
"""
|
||||
# Return all local versions if arguments are set to None
|
||||
if production is None and staging is None:
|
||||
|
|
@ -411,16 +413,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
|
||||
# DEPRECATED: backwards compatible way to look for versions in root
|
||||
dir_to_search = Path(user_data_dir("openpype", "pypeclub"))
|
||||
versions = OpenPypeVersion.get_versions_from_directory(
|
||||
dir_to_search, compatible_with=compatible_with
|
||||
)
|
||||
if compatible_with:
|
||||
dir_to_search = Path(
|
||||
user_data_dir("openpype", "pypeclub")) / f"{compatible_with.major}.{compatible_with.minor}" # noqa
|
||||
versions += OpenPypeVersion.get_versions_from_directory(
|
||||
dir_to_search, compatible_with=compatible_with
|
||||
)
|
||||
|
||||
versions = OpenPypeVersion.get_versions_from_directory(dir_to_search)
|
||||
|
||||
filtered_versions = []
|
||||
for version in versions:
|
||||
|
|
@ -434,7 +427,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
@classmethod
|
||||
def get_remote_versions(
|
||||
cls, production: bool = None,
|
||||
staging: bool = None, compatible_with: OpenPypeVersion = None
|
||||
staging: bool = None
|
||||
) -> List:
|
||||
"""Get all versions available in OpenPype Path.
|
||||
|
||||
|
|
@ -444,8 +437,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
Args:
|
||||
production (bool): Return production versions.
|
||||
staging (bool): Return staging versions.
|
||||
compatible_with (OpenPypeVersion): Return only those compatible
|
||||
with specified version.
|
||||
|
||||
"""
|
||||
# Return all local versions if arguments are set to None
|
||||
if production is None and staging is None:
|
||||
|
|
@ -479,13 +471,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
if not dir_to_search:
|
||||
return []
|
||||
|
||||
# DEPRECATED: look for version in root directory
|
||||
versions = cls.get_versions_from_directory(
|
||||
dir_to_search, compatible_with=compatible_with)
|
||||
if compatible_with:
|
||||
dir_to_search = dir_to_search / f"{compatible_with.major}.{compatible_with.minor}" # noqa
|
||||
versions += cls.get_versions_from_directory(
|
||||
dir_to_search, compatible_with=compatible_with)
|
||||
versions = cls.get_versions_from_directory(dir_to_search)
|
||||
|
||||
filtered_versions = []
|
||||
for version in versions:
|
||||
|
|
@ -498,14 +484,11 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
|
||||
@staticmethod
|
||||
def get_versions_from_directory(
|
||||
openpype_dir: Path,
|
||||
compatible_with: OpenPypeVersion = None) -> List:
|
||||
openpype_dir: Path) -> List:
|
||||
"""Get all detected OpenPype versions in directory.
|
||||
|
||||
Args:
|
||||
openpype_dir (Path): Directory to scan.
|
||||
compatible_with (OpenPypeVersion): Return only versions compatible
|
||||
with build version specified as OpenPypeVersion.
|
||||
|
||||
Returns:
|
||||
list of OpenPypeVersion
|
||||
|
|
@ -514,15 +497,22 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
ValueError: if invalid path is specified.
|
||||
|
||||
"""
|
||||
_openpype_versions = []
|
||||
openpype_versions = []
|
||||
if not openpype_dir.exists() and not openpype_dir.is_dir():
|
||||
return _openpype_versions
|
||||
return openpype_versions
|
||||
|
||||
# iterate over directory in first level and find all that might
|
||||
# contain OpenPype.
|
||||
for item in openpype_dir.iterdir():
|
||||
# if the item is directory with major.minor version, dive deeper
|
||||
|
||||
# if file, strip extension, in case of dir not.
|
||||
if item.is_dir() and re.match(r"^\d+\.\d+$", item.name):
|
||||
_versions = OpenPypeVersion.get_versions_from_directory(
|
||||
item)
|
||||
if _versions:
|
||||
openpype_versions += _versions
|
||||
|
||||
# if file exists, strip extension, in case of dir don't.
|
||||
name = item.name if item.is_dir() else item.stem
|
||||
result = OpenPypeVersion.version_in_str(name)
|
||||
|
||||
|
|
@ -540,14 +530,10 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
)[0]:
|
||||
continue
|
||||
|
||||
if compatible_with and not detected_version.is_compatible(
|
||||
compatible_with):
|
||||
continue
|
||||
|
||||
detected_version.path = item
|
||||
_openpype_versions.append(detected_version)
|
||||
openpype_versions.append(detected_version)
|
||||
|
||||
return sorted(_openpype_versions)
|
||||
return sorted(openpype_versions)
|
||||
|
||||
@staticmethod
|
||||
def get_installed_version_str() -> str:
|
||||
|
|
@ -575,15 +561,14 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
def get_latest_version(
|
||||
staging: bool = False,
|
||||
local: bool = None,
|
||||
remote: bool = None,
|
||||
compatible_with: OpenPypeVersion = None
|
||||
remote: bool = None
|
||||
) -> Union[OpenPypeVersion, None]:
|
||||
"""Get latest available version.
|
||||
"""Get the latest available version.
|
||||
|
||||
The version does not contain information about path and source.
|
||||
|
||||
This is utility version to get latest version from all found. Build
|
||||
version is not listed if staging is enabled.
|
||||
This is utility version to get the latest version from all found.
|
||||
Build version is not listed if staging is enabled.
|
||||
|
||||
Arguments 'local' and 'remote' define if local and remote repository
|
||||
versions are used. All versions are used if both are not set (or set
|
||||
|
|
@ -595,8 +580,9 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
staging (bool, optional): List staging versions if True.
|
||||
local (bool, optional): List local versions if True.
|
||||
remote (bool, optional): List remote versions if True.
|
||||
compatible_with (OpenPypeVersion, optional) Return only version
|
||||
compatible with compatible_with.
|
||||
|
||||
Returns:
|
||||
Latest OpenPypeVersion or None
|
||||
|
||||
"""
|
||||
if local is None and remote is None:
|
||||
|
|
@ -628,12 +614,7 @@ class OpenPypeVersion(semver.VersionInfo):
|
|||
return None
|
||||
|
||||
all_versions.sort()
|
||||
latest_version: OpenPypeVersion
|
||||
latest_version = all_versions[-1]
|
||||
if compatible_with and not latest_version.is_compatible(
|
||||
compatible_with):
|
||||
return None
|
||||
return latest_version
|
||||
return all_versions[-1]
|
||||
|
||||
@classmethod
|
||||
def get_expected_studio_version(cls, staging=False, global_settings=None):
|
||||
|
|
@ -764,9 +745,9 @@ class BootstrapRepos:
|
|||
self, repo_dir: Path = None) -> Union[OpenPypeVersion, None]:
|
||||
"""Copy zip created from OpenPype repositories to user data dir.
|
||||
|
||||
This detect OpenPype version either in local "live" OpenPype
|
||||
This detects OpenPype version either in local "live" OpenPype
|
||||
repository or in user provided path. Then it will zip it in temporary
|
||||
directory and finally it will move it to destination which is user
|
||||
directory, and finally it will move it to destination which is user
|
||||
data directory. Existing files will be replaced.
|
||||
|
||||
Args:
|
||||
|
|
@ -777,7 +758,7 @@ class BootstrapRepos:
|
|||
|
||||
"""
|
||||
# if repo dir is not set, we detect local "live" OpenPype repository
|
||||
# version and use it as a source. Otherwise repo_dir is user
|
||||
# version and use it as a source. Otherwise, repo_dir is user
|
||||
# entered location.
|
||||
if repo_dir:
|
||||
version = self.get_version(repo_dir)
|
||||
|
|
@ -1141,28 +1122,27 @@ class BootstrapRepos:
|
|||
@staticmethod
|
||||
def find_openpype_version(
|
||||
version: Union[str, OpenPypeVersion],
|
||||
staging: bool,
|
||||
compatible_with: OpenPypeVersion = None
|
||||
staging: bool
|
||||
) -> Union[OpenPypeVersion, None]:
|
||||
"""Find location of specified OpenPype version.
|
||||
|
||||
Args:
|
||||
version (Union[str, OpenPypeVersion): Version to find.
|
||||
staging (bool): Filter staging versions.
|
||||
compatible_with (OpenPypeVersion, optional): Find only
|
||||
versions compatible with specified one.
|
||||
|
||||
Returns:
|
||||
requested OpenPypeVersion.
|
||||
|
||||
"""
|
||||
installed_version = OpenPypeVersion.get_installed_version()
|
||||
if isinstance(version, str):
|
||||
version = OpenPypeVersion(version=version)
|
||||
|
||||
installed_version = OpenPypeVersion.get_installed_version()
|
||||
if installed_version == version:
|
||||
return installed_version
|
||||
|
||||
local_versions = OpenPypeVersion.get_local_versions(
|
||||
staging=staging, production=not staging,
|
||||
compatible_with=compatible_with
|
||||
staging=staging, production=not staging
|
||||
)
|
||||
zip_version = None
|
||||
for local_version in local_versions:
|
||||
|
|
@ -1176,8 +1156,7 @@ class BootstrapRepos:
|
|||
return zip_version
|
||||
|
||||
remote_versions = OpenPypeVersion.get_remote_versions(
|
||||
staging=staging, production=not staging,
|
||||
compatible_with=compatible_with
|
||||
staging=staging, production=not staging
|
||||
)
|
||||
for remote_version in remote_versions:
|
||||
if remote_version == version:
|
||||
|
|
@ -1186,13 +1165,23 @@ class BootstrapRepos:
|
|||
|
||||
@staticmethod
|
||||
def find_latest_openpype_version(
|
||||
staging, compatible_with: OpenPypeVersion = None):
|
||||
staging: bool
|
||||
) -> Union[OpenPypeVersion, None]:
|
||||
"""Find the latest available OpenPype version in all location.
|
||||
|
||||
Args:
|
||||
staging (bool): True to look for staging versions.
|
||||
|
||||
Returns:
|
||||
Latest OpenPype version on None if nothing was found.
|
||||
|
||||
"""
|
||||
installed_version = OpenPypeVersion.get_installed_version()
|
||||
local_versions = OpenPypeVersion.get_local_versions(
|
||||
staging=staging, compatible_with=compatible_with
|
||||
staging=staging
|
||||
)
|
||||
remote_versions = OpenPypeVersion.get_remote_versions(
|
||||
staging=staging, compatible_with=compatible_with
|
||||
staging=staging
|
||||
)
|
||||
all_versions = local_versions + remote_versions
|
||||
if not staging:
|
||||
|
|
@ -1217,8 +1206,7 @@ class BootstrapRepos:
|
|||
self,
|
||||
openpype_path: Union[Path, str] = None,
|
||||
staging: bool = False,
|
||||
include_zips: bool = False,
|
||||
compatible_with: OpenPypeVersion = None
|
||||
include_zips: bool = False
|
||||
) -> Union[List[OpenPypeVersion], None]:
|
||||
"""Get ordered dict of detected OpenPype version.
|
||||
|
||||
|
|
@ -1235,8 +1223,6 @@ class BootstrapRepos:
|
|||
otherwise.
|
||||
include_zips (bool, optional): If set True it will try to find
|
||||
OpenPype in zip files in given directory.
|
||||
compatible_with (OpenPypeVersion, optional): Find only those
|
||||
versions compatible with the one specified.
|
||||
|
||||
Returns:
|
||||
dict of Path: Dictionary of detected OpenPype version.
|
||||
|
|
@ -1255,52 +1241,34 @@ class BootstrapRepos:
|
|||
("Finding OpenPype in non-filesystem locations is"
|
||||
" not implemented yet."))
|
||||
|
||||
version_dir = ""
|
||||
if compatible_with:
|
||||
version_dir = f"{compatible_with.major}.{compatible_with.minor}"
|
||||
|
||||
# if checks bellow for OPENPYPE_PATH and registry fails, use data_dir
|
||||
# DEPRECATED: lookup in root of this folder is deprecated in favour
|
||||
# of major.minor sub-folders.
|
||||
dirs_to_search = [
|
||||
self.data_dir
|
||||
]
|
||||
if compatible_with:
|
||||
dirs_to_search.append(self.data_dir / version_dir)
|
||||
dirs_to_search = [self.data_dir]
|
||||
|
||||
if openpype_path:
|
||||
dirs_to_search = [openpype_path]
|
||||
|
||||
if compatible_with:
|
||||
dirs_to_search.append(openpype_path / version_dir)
|
||||
else:
|
||||
elif os.getenv("OPENPYPE_PATH") \
|
||||
and Path(os.getenv("OPENPYPE_PATH")).exists():
|
||||
# first try OPENPYPE_PATH and if that is not available,
|
||||
# try registry.
|
||||
if os.getenv("OPENPYPE_PATH") \
|
||||
and Path(os.getenv("OPENPYPE_PATH")).exists():
|
||||
dirs_to_search = [Path(os.getenv("OPENPYPE_PATH"))]
|
||||
dirs_to_search = [Path(os.getenv("OPENPYPE_PATH"))]
|
||||
else:
|
||||
try:
|
||||
registry_dir = Path(
|
||||
str(self.registry.get_item("openPypePath")))
|
||||
if registry_dir.exists():
|
||||
dirs_to_search = [registry_dir]
|
||||
|
||||
if compatible_with:
|
||||
dirs_to_search.append(
|
||||
Path(os.getenv("OPENPYPE_PATH")) / version_dir)
|
||||
else:
|
||||
try:
|
||||
registry_dir = Path(
|
||||
str(self.registry.get_item("openPypePath")))
|
||||
if registry_dir.exists():
|
||||
dirs_to_search = [registry_dir]
|
||||
if compatible_with:
|
||||
dirs_to_search.append(registry_dir / version_dir)
|
||||
|
||||
except ValueError:
|
||||
# nothing found in registry, we'll use data dir
|
||||
pass
|
||||
except ValueError:
|
||||
# nothing found in registry, we'll use data dir
|
||||
pass
|
||||
|
||||
openpype_versions = []
|
||||
for dir_to_search in dirs_to_search:
|
||||
try:
|
||||
openpype_versions += self.get_openpype_versions(
|
||||
dir_to_search, staging, compatible_with=compatible_with)
|
||||
dir_to_search, staging)
|
||||
except ValueError:
|
||||
# location is invalid, skip it
|
||||
pass
|
||||
|
|
@ -1668,15 +1636,12 @@ class BootstrapRepos:
|
|||
def get_openpype_versions(
|
||||
self,
|
||||
openpype_dir: Path,
|
||||
staging: bool = False,
|
||||
compatible_with: OpenPypeVersion = None) -> list:
|
||||
staging: bool = False) -> list:
|
||||
"""Get all detected OpenPype versions in directory.
|
||||
|
||||
Args:
|
||||
openpype_dir (Path): Directory to scan.
|
||||
staging (bool, optional): Find staging versions if True.
|
||||
compatible_with (OpenPypeVersion, optional): Get only versions
|
||||
compatible with the one specified.
|
||||
|
||||
Returns:
|
||||
list of OpenPypeVersion
|
||||
|
|
@ -1688,12 +1653,18 @@ class BootstrapRepos:
|
|||
if not openpype_dir.exists() and not openpype_dir.is_dir():
|
||||
raise ValueError(f"specified directory {openpype_dir} is invalid")
|
||||
|
||||
_openpype_versions = []
|
||||
openpype_versions = []
|
||||
# iterate over directory in first level and find all that might
|
||||
# contain OpenPype.
|
||||
for item in openpype_dir.iterdir():
|
||||
# if the item is directory with major.minor version, dive deeper
|
||||
if item.is_dir() and re.match(r"^\d+\.\d+$", item.name):
|
||||
_versions = self.get_openpype_versions(
|
||||
item, staging=staging)
|
||||
if _versions:
|
||||
openpype_versions += _versions
|
||||
|
||||
# if file, strip extension, in case of dir not.
|
||||
# if it is file, strip extension, in case of dir don't.
|
||||
name = item.name if item.is_dir() else item.stem
|
||||
result = OpenPypeVersion.version_in_str(name)
|
||||
|
||||
|
|
@ -1711,18 +1682,14 @@ class BootstrapRepos:
|
|||
):
|
||||
continue
|
||||
|
||||
if compatible_with and \
|
||||
not detected_version.is_compatible(compatible_with):
|
||||
continue
|
||||
|
||||
detected_version.path = item
|
||||
if staging and detected_version.is_staging():
|
||||
_openpype_versions.append(detected_version)
|
||||
openpype_versions.append(detected_version)
|
||||
|
||||
if not staging and not detected_version.is_staging():
|
||||
_openpype_versions.append(detected_version)
|
||||
openpype_versions.append(detected_version)
|
||||
|
||||
return sorted(_openpype_versions)
|
||||
return sorted(openpype_versions)
|
||||
|
||||
|
||||
class OpenPypeVersionExists(Exception):
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ class InstallThread(QThread):
|
|||
progress_callback=self.set_progress, message=self.message)
|
||||
local_version = OpenPypeVersion.get_installed_version_str()
|
||||
|
||||
# if user did entered nothing, we install OpenPype from local version.
|
||||
# if user did enter nothing, we install OpenPype from local version.
|
||||
# zip content of `repos`, copy it to user data dir and append
|
||||
# version to it.
|
||||
if not self._path:
|
||||
|
|
@ -93,6 +93,23 @@ class InstallThread(QThread):
|
|||
detected = bs.find_openpype(include_zips=True)
|
||||
|
||||
if detected:
|
||||
if not OpenPypeVersion.get_installed_version().is_compatible(
|
||||
detected[-1]):
|
||||
self.message.emit((
|
||||
f"Latest detected version {detected[-1]} "
|
||||
"is not compatible with the currently running "
|
||||
f"{local_version}"
|
||||
), True)
|
||||
self.message.emit((
|
||||
"Filtering detected versions to compatible ones..."
|
||||
), False)
|
||||
|
||||
detected = [
|
||||
version for version in detected
|
||||
if version.is_compatible(
|
||||
OpenPypeVersion.get_installed_version())
|
||||
]
|
||||
|
||||
if OpenPypeVersion(
|
||||
version=local_version, path=Path()) < detected[-1]:
|
||||
self.message.emit((
|
||||
|
|
|
|||
|
|
@ -40,18 +40,6 @@ def settings(dev):
|
|||
PypeCommands().launch_settings_gui(dev)
|
||||
|
||||
|
||||
@main.command()
|
||||
def standalonepublisher():
|
||||
"""Show Pype Standalone publisher UI."""
|
||||
PypeCommands().launch_standalone_publisher()
|
||||
|
||||
|
||||
@main.command()
|
||||
def traypublisher():
|
||||
"""Show new OpenPype Standalone publisher UI."""
|
||||
PypeCommands().launch_traypublisher()
|
||||
|
||||
|
||||
@main.command()
|
||||
def tray():
|
||||
"""Launch pype tray.
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ that has project name as a context (e.g. on 'ProjectEntity'?).
|
|||
+ We will need more specific functions doing wery specific queires really fast.
|
||||
"""
|
||||
|
||||
import re
|
||||
import collections
|
||||
|
||||
import six
|
||||
|
|
@ -1009,17 +1010,70 @@ def get_representation_by_name(
|
|||
return conn.find_one(query_filter, _prepare_fields(fields))
|
||||
|
||||
|
||||
def _flatten_dict(data):
|
||||
flatten_queue = collections.deque()
|
||||
flatten_queue.append(data)
|
||||
output = {}
|
||||
while flatten_queue:
|
||||
item = flatten_queue.popleft()
|
||||
for key, value in item.items():
|
||||
if not isinstance(value, dict):
|
||||
output[key] = value
|
||||
continue
|
||||
|
||||
tmp = {}
|
||||
for subkey, subvalue in value.items():
|
||||
new_key = "{}.{}".format(key, subkey)
|
||||
tmp[new_key] = subvalue
|
||||
flatten_queue.append(tmp)
|
||||
return output
|
||||
|
||||
|
||||
def _regex_filters(filters):
|
||||
output = []
|
||||
for key, value in filters.items():
|
||||
regexes = []
|
||||
a_values = []
|
||||
if isinstance(value, re.Pattern):
|
||||
regexes.append(value)
|
||||
elif isinstance(value, (list, tuple, set)):
|
||||
for item in value:
|
||||
if isinstance(item, re.Pattern):
|
||||
regexes.append(item)
|
||||
else:
|
||||
a_values.append(item)
|
||||
else:
|
||||
a_values.append(value)
|
||||
|
||||
key_filters = []
|
||||
if len(a_values) == 1:
|
||||
key_filters.append({key: a_values[0]})
|
||||
elif a_values:
|
||||
key_filters.append({key: {"$in": a_values}})
|
||||
|
||||
for regex in regexes:
|
||||
key_filters.append({key: {"$regex": regex}})
|
||||
|
||||
if len(key_filters) == 1:
|
||||
output.append(key_filters[0])
|
||||
else:
|
||||
output.append({"$or": key_filters})
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def _get_representations(
|
||||
project_name,
|
||||
representation_ids,
|
||||
representation_names,
|
||||
version_ids,
|
||||
extensions,
|
||||
context_filters,
|
||||
names_by_version_ids,
|
||||
standard,
|
||||
archived,
|
||||
fields
|
||||
):
|
||||
default_output = []
|
||||
repre_types = []
|
||||
if standard:
|
||||
repre_types.append("representation")
|
||||
|
|
@ -1027,7 +1081,7 @@ def _get_representations(
|
|||
repre_types.append("archived_representation")
|
||||
|
||||
if not repre_types:
|
||||
return []
|
||||
return default_output
|
||||
|
||||
if len(repre_types) == 1:
|
||||
query_filter = {"type": repre_types[0]}
|
||||
|
|
@ -1037,25 +1091,21 @@ def _get_representations(
|
|||
if representation_ids is not None:
|
||||
representation_ids = _convert_ids(representation_ids)
|
||||
if not representation_ids:
|
||||
return []
|
||||
return default_output
|
||||
query_filter["_id"] = {"$in": representation_ids}
|
||||
|
||||
if representation_names is not None:
|
||||
if not representation_names:
|
||||
return []
|
||||
return default_output
|
||||
query_filter["name"] = {"$in": list(representation_names)}
|
||||
|
||||
if version_ids is not None:
|
||||
version_ids = _convert_ids(version_ids)
|
||||
if not version_ids:
|
||||
return []
|
||||
return default_output
|
||||
query_filter["parent"] = {"$in": version_ids}
|
||||
|
||||
if extensions is not None:
|
||||
if not extensions:
|
||||
return []
|
||||
query_filter["context.ext"] = {"$in": list(extensions)}
|
||||
|
||||
or_queries = []
|
||||
if names_by_version_ids is not None:
|
||||
or_query = []
|
||||
for version_id, names in names_by_version_ids.items():
|
||||
|
|
@ -1065,8 +1115,36 @@ def _get_representations(
|
|||
"name": {"$in": list(names)}
|
||||
})
|
||||
if not or_query:
|
||||
return default_output
|
||||
or_queries.append(or_query)
|
||||
|
||||
if context_filters is not None:
|
||||
if not context_filters:
|
||||
return []
|
||||
query_filter["$or"] = or_query
|
||||
_flatten_filters = _flatten_dict(context_filters)
|
||||
flatten_filters = {}
|
||||
for key, value in _flatten_filters.items():
|
||||
if not key.startswith("context"):
|
||||
key = "context.{}".format(key)
|
||||
flatten_filters[key] = value
|
||||
|
||||
for item in _regex_filters(flatten_filters):
|
||||
for key, value in item.items():
|
||||
if key != "$or":
|
||||
query_filter[key] = value
|
||||
|
||||
elif value:
|
||||
or_queries.append(value)
|
||||
|
||||
if len(or_queries) == 1:
|
||||
query_filter["$or"] = or_queries[0]
|
||||
elif or_queries:
|
||||
and_query = []
|
||||
for or_query in or_queries:
|
||||
if isinstance(or_query, list):
|
||||
or_query = {"$or": or_query}
|
||||
and_query.append(or_query)
|
||||
query_filter["$and"] = and_query
|
||||
|
||||
conn = get_project_connection(project_name)
|
||||
|
||||
|
|
@ -1078,7 +1156,7 @@ def get_representations(
|
|||
representation_ids=None,
|
||||
representation_names=None,
|
||||
version_ids=None,
|
||||
extensions=None,
|
||||
context_filters=None,
|
||||
names_by_version_ids=None,
|
||||
archived=False,
|
||||
standard=True,
|
||||
|
|
@ -1096,8 +1174,8 @@ def get_representations(
|
|||
as filter. Filter ignored if 'None' is passed.
|
||||
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
|
||||
ignored if 'None' is passed.
|
||||
extensions (Iterable[str]): Filter by extension of main representation
|
||||
file (without dot).
|
||||
context_filters (Dict[str, List[str, re.Pattern]]): Filter by
|
||||
representation context fields.
|
||||
names_by_version_ids (dict[ObjectId, list[str]]): Complex filtering
|
||||
using version ids and list of names under the version.
|
||||
archived (bool): Output will also contain archived representations.
|
||||
|
|
@ -1113,7 +1191,7 @@ def get_representations(
|
|||
representation_ids=representation_ids,
|
||||
representation_names=representation_names,
|
||||
version_ids=version_ids,
|
||||
extensions=extensions,
|
||||
context_filters=context_filters,
|
||||
names_by_version_ids=names_by_version_ids,
|
||||
standard=True,
|
||||
archived=archived,
|
||||
|
|
@ -1126,7 +1204,7 @@ def get_archived_representations(
|
|||
representation_ids=None,
|
||||
representation_names=None,
|
||||
version_ids=None,
|
||||
extensions=None,
|
||||
context_filters=None,
|
||||
names_by_version_ids=None,
|
||||
fields=None
|
||||
):
|
||||
|
|
@ -1142,8 +1220,8 @@ def get_archived_representations(
|
|||
as filter. Filter ignored if 'None' is passed.
|
||||
version_ids (Iterable[str]): Subset ids used as parent filter. Filter
|
||||
ignored if 'None' is passed.
|
||||
extensions (Iterable[str]): Filter by extension of main representation
|
||||
file (without dot).
|
||||
context_filters (Dict[str, List[str, re.Pattern]]): Filter by
|
||||
representation context fields.
|
||||
names_by_version_ids (dict[ObjectId, List[str]]): Complex filtering
|
||||
using version ids and list of names under the version.
|
||||
fields (Iterable[str]): Fields that should be returned. All fields are
|
||||
|
|
@ -1158,7 +1236,7 @@ def get_archived_representations(
|
|||
representation_ids=representation_ids,
|
||||
representation_names=representation_names,
|
||||
version_ids=version_ids,
|
||||
extensions=extensions,
|
||||
context_filters=context_filters,
|
||||
names_by_version_ids=names_by_version_ids,
|
||||
standard=False,
|
||||
archived=True,
|
||||
|
|
@ -1181,58 +1259,64 @@ def get_representations_parents(project_name, representations):
|
|||
dict[ObjectId, tuple]: Parents by representation id.
|
||||
"""
|
||||
|
||||
repres_by_version_id = collections.defaultdict(list)
|
||||
versions_by_version_id = {}
|
||||
versions_by_subset_id = collections.defaultdict(list)
|
||||
subsets_by_subset_id = {}
|
||||
subsets_by_asset_id = collections.defaultdict(list)
|
||||
repre_docs_by_version_id = collections.defaultdict(list)
|
||||
version_docs_by_version_id = {}
|
||||
version_docs_by_subset_id = collections.defaultdict(list)
|
||||
subset_docs_by_subset_id = {}
|
||||
subset_docs_by_asset_id = collections.defaultdict(list)
|
||||
output = {}
|
||||
for representation in representations:
|
||||
repre_id = representation["_id"]
|
||||
for repre_doc in representations:
|
||||
repre_id = repre_doc["_id"]
|
||||
version_id = repre_doc["parent"]
|
||||
output[repre_id] = (None, None, None, None)
|
||||
version_id = representation["parent"]
|
||||
repres_by_version_id[version_id].append(representation)
|
||||
repre_docs_by_version_id[version_id].append(repre_doc)
|
||||
|
||||
versions = get_versions(
|
||||
project_name, version_ids=repres_by_version_id.keys()
|
||||
version_docs = get_versions(
|
||||
project_name,
|
||||
version_ids=repre_docs_by_version_id.keys(),
|
||||
hero=True
|
||||
)
|
||||
for version in versions:
|
||||
version_id = version["_id"]
|
||||
subset_id = version["parent"]
|
||||
versions_by_version_id[version_id] = version
|
||||
versions_by_subset_id[subset_id].append(version)
|
||||
for version_doc in version_docs:
|
||||
version_id = version_doc["_id"]
|
||||
subset_id = version_doc["parent"]
|
||||
version_docs_by_version_id[version_id] = version_doc
|
||||
version_docs_by_subset_id[subset_id].append(version_doc)
|
||||
|
||||
subsets = get_subsets(
|
||||
project_name, subset_ids=versions_by_subset_id.keys()
|
||||
subset_docs = get_subsets(
|
||||
project_name, subset_ids=version_docs_by_subset_id.keys()
|
||||
)
|
||||
for subset in subsets:
|
||||
subset_id = subset["_id"]
|
||||
asset_id = subset["parent"]
|
||||
subsets_by_subset_id[subset_id] = subset
|
||||
subsets_by_asset_id[asset_id].append(subset)
|
||||
for subset_doc in subset_docs:
|
||||
subset_id = subset_doc["_id"]
|
||||
asset_id = subset_doc["parent"]
|
||||
subset_docs_by_subset_id[subset_id] = subset_doc
|
||||
subset_docs_by_asset_id[asset_id].append(subset_doc)
|
||||
|
||||
assets = get_assets(project_name, asset_ids=subsets_by_asset_id.keys())
|
||||
assets_by_id = {
|
||||
asset["_id"]: asset
|
||||
for asset in assets
|
||||
asset_docs = get_assets(
|
||||
project_name, asset_ids=subset_docs_by_asset_id.keys()
|
||||
)
|
||||
asset_docs_by_id = {
|
||||
asset_doc["_id"]: asset_doc
|
||||
for asset_doc in asset_docs
|
||||
}
|
||||
|
||||
project = get_project(project_name)
|
||||
project_doc = get_project(project_name)
|
||||
|
||||
for version_id, representations in repres_by_version_id.items():
|
||||
asset = None
|
||||
subset = None
|
||||
version = versions_by_version_id.get(version_id)
|
||||
if version:
|
||||
subset_id = version["parent"]
|
||||
subset = subsets_by_subset_id.get(subset_id)
|
||||
if subset:
|
||||
asset_id = subset["parent"]
|
||||
asset = assets_by_id.get(asset_id)
|
||||
for version_id, repre_docs in repre_docs_by_version_id.items():
|
||||
asset_doc = None
|
||||
subset_doc = None
|
||||
version_doc = version_docs_by_version_id.get(version_id)
|
||||
if version_doc:
|
||||
subset_id = version_doc["parent"]
|
||||
subset_doc = subset_docs_by_subset_id.get(subset_id)
|
||||
if subset_doc:
|
||||
asset_id = subset_doc["parent"]
|
||||
asset_doc = asset_docs_by_id.get(asset_id)
|
||||
|
||||
for representation in representations:
|
||||
repre_id = representation["_id"]
|
||||
output[repre_id] = (version, subset, asset, project)
|
||||
for repre_doc in repre_docs:
|
||||
repre_id = repre_doc["_id"]
|
||||
output[repre_id] = (
|
||||
version_doc, subset_doc, asset_doc, project_doc
|
||||
)
|
||||
return output
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -19,8 +19,15 @@ class MissingMethodsError(ValueError):
|
|||
joined_missing = ", ".join(
|
||||
['"{}"'.format(item) for item in missing_methods]
|
||||
)
|
||||
if isinstance(host, HostBase):
|
||||
host_name = host.name
|
||||
else:
|
||||
try:
|
||||
host_name = host.__file__.replace("\\", "/").split("/")[-3]
|
||||
except Exception:
|
||||
host_name = str(host)
|
||||
message = (
|
||||
"Host \"{}\" miss methods {}".format(host.name, joined_missing)
|
||||
"Host \"{}\" miss methods {}".format(host_name, joined_missing)
|
||||
)
|
||||
super(MissingMethodsError, self).__init__(message)
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ class AEWorkfileCreator(AutoCreator):
|
|||
identifier = "workfile"
|
||||
family = "workfile"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return []
|
||||
|
||||
|
|
@ -35,7 +37,6 @@ class AEWorkfileCreator(AutoCreator):
|
|||
existing_instance = instance
|
||||
break
|
||||
|
||||
variant = ''
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
|
|
@ -44,15 +45,17 @@ class AEWorkfileCreator(AutoCreator):
|
|||
if existing_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
)
|
||||
data = {
|
||||
"asset": asset_name,
|
||||
"task": task_name,
|
||||
"variant": variant
|
||||
"variant": self.default_variant
|
||||
}
|
||||
data.update(self.get_dynamic_data(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
))
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
|
|
@ -69,7 +72,9 @@ class AEWorkfileCreator(AutoCreator):
|
|||
):
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
)
|
||||
existing_instance["asset"] = asset_name
|
||||
existing_instance["task"] = task_name
|
||||
existing_instance["subset"] = subset_name
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
label = "Collect After Effects Workfile Instance"
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def process(self, context):
|
||||
existing_instance = None
|
||||
for instance in context:
|
||||
|
|
@ -71,7 +73,7 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
family = "workfile"
|
||||
subset = get_subset_name_with_asset_doc(
|
||||
family,
|
||||
"",
|
||||
self.default_variant,
|
||||
context.data["anatomyData"]["task"]["name"],
|
||||
context.data["assetEntity"],
|
||||
context.data["anatomyData"]["project"]["name"],
|
||||
|
|
|
|||
|
|
@ -180,7 +180,7 @@ class ExtractLayout(openpype.api.Extractor):
|
|||
"rotation": {
|
||||
"x": asset.rotation_euler.x,
|
||||
"y": asset.rotation_euler.y,
|
||||
"z": asset.rotation_euler.z,
|
||||
"z": asset.rotation_euler.z
|
||||
},
|
||||
"scale": {
|
||||
"x": asset.scale.x,
|
||||
|
|
@ -189,6 +189,18 @@ class ExtractLayout(openpype.api.Extractor):
|
|||
}
|
||||
}
|
||||
|
||||
json_element["transform_matrix"] = []
|
||||
|
||||
for row in list(asset.matrix_world.transposed()):
|
||||
json_element["transform_matrix"].append(list(row))
|
||||
|
||||
json_element["basis"] = [
|
||||
[1, 0, 0, 0],
|
||||
[0, -1, 0, 0],
|
||||
[0, 0, 1, 0],
|
||||
[0, 0, 0, 1]
|
||||
]
|
||||
|
||||
# Extract the animation as well
|
||||
if family == "rig":
|
||||
f, n = self._export_animation(
|
||||
|
|
|
|||
|
|
@ -30,7 +30,8 @@ from .lib import (
|
|||
maintained_temp_file_path,
|
||||
get_clip_segment,
|
||||
get_batch_group_from_desktop,
|
||||
MediaInfoFile
|
||||
MediaInfoFile,
|
||||
TimeEffectMetadata
|
||||
)
|
||||
from .utils import (
|
||||
setup,
|
||||
|
|
@ -107,6 +108,7 @@ __all__ = [
|
|||
"get_clip_segment",
|
||||
"get_batch_group_from_desktop",
|
||||
"MediaInfoFile",
|
||||
"TimeEffectMetadata",
|
||||
|
||||
# pipeline
|
||||
"install",
|
||||
|
|
|
|||
|
|
@ -5,10 +5,11 @@ import json
|
|||
import pickle
|
||||
import clique
|
||||
import tempfile
|
||||
import traceback
|
||||
import itertools
|
||||
import contextlib
|
||||
import xml.etree.cElementTree as cET
|
||||
from copy import deepcopy
|
||||
from copy import deepcopy, copy
|
||||
from xml.etree import ElementTree as ET
|
||||
from pprint import pformat
|
||||
from .constants import (
|
||||
|
|
@ -266,7 +267,7 @@ def get_current_sequence(selection):
|
|||
def rescan_hooks():
|
||||
import flame
|
||||
try:
|
||||
flame.execute_shortcut('Rescan Python Hooks')
|
||||
flame.execute_shortcut("Rescan Python Hooks")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
@ -1082,21 +1083,21 @@ class MediaInfoFile(object):
|
|||
xml_data (ET.Element): clip data
|
||||
"""
|
||||
try:
|
||||
for out_track in xml_data.iter('track'):
|
||||
for out_feed in out_track.iter('feed'):
|
||||
for out_track in xml_data.iter("track"):
|
||||
for out_feed in out_track.iter("feed"):
|
||||
# start frame
|
||||
out_feed_nb_ticks_obj = out_feed.find(
|
||||
'startTimecode/nbTicks')
|
||||
"startTimecode/nbTicks")
|
||||
self.start_frame = out_feed_nb_ticks_obj.text
|
||||
|
||||
# fps
|
||||
out_feed_fps_obj = out_feed.find(
|
||||
'startTimecode/rate')
|
||||
"startTimecode/rate")
|
||||
self.fps = out_feed_fps_obj.text
|
||||
|
||||
# drop frame mode
|
||||
out_feed_drop_mode_obj = out_feed.find(
|
||||
'startTimecode/dropMode')
|
||||
"startTimecode/dropMode")
|
||||
self.drop_mode = out_feed_drop_mode_obj.text
|
||||
break
|
||||
except Exception as msg:
|
||||
|
|
@ -1118,8 +1119,153 @@ class MediaInfoFile(object):
|
|||
tree = cET.ElementTree(xml_element_data)
|
||||
tree.write(
|
||||
fpath, xml_declaration=True,
|
||||
method='xml', encoding='UTF-8'
|
||||
method="xml", encoding="UTF-8"
|
||||
)
|
||||
except IOError as error:
|
||||
raise IOError(
|
||||
"Not able to write data to file: {}".format(error))
|
||||
|
||||
|
||||
class TimeEffectMetadata(object):
|
||||
log = log
|
||||
_data = {}
|
||||
_retime_modes = {
|
||||
0: "speed",
|
||||
1: "timewarp",
|
||||
2: "duration"
|
||||
}
|
||||
|
||||
def __init__(self, segment, logger=None):
|
||||
if logger:
|
||||
self.log = logger
|
||||
|
||||
self._data = self._get_metadata(segment)
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
""" Returns timewarp effect data
|
||||
|
||||
Returns:
|
||||
dict: retime data
|
||||
"""
|
||||
return self._data
|
||||
|
||||
def _get_metadata(self, segment):
|
||||
effects = segment.effects or []
|
||||
for effect in effects:
|
||||
if effect.type == "Timewarp":
|
||||
with maintained_temp_file_path(".timewarp_node") as tmp_path:
|
||||
self.log.info("Temp File: {}".format(tmp_path))
|
||||
effect.save_setup(tmp_path)
|
||||
return self._get_attributes_from_xml(tmp_path)
|
||||
|
||||
return {}
|
||||
|
||||
def _get_attributes_from_xml(self, tmp_path):
|
||||
with open(tmp_path, "r") as tw_setup_file:
|
||||
tw_setup_string = tw_setup_file.read()
|
||||
tw_setup_file.close()
|
||||
|
||||
tw_setup_xml = ET.fromstring(tw_setup_string)
|
||||
tw_setup = self._dictify(tw_setup_xml)
|
||||
# pprint(tw_setup)
|
||||
try:
|
||||
tw_setup_state = tw_setup["Setup"]["State"][0]
|
||||
mode = int(
|
||||
tw_setup_state["TW_RetimerMode"][0]["_text"]
|
||||
)
|
||||
r_data = {
|
||||
"type": self._retime_modes[mode],
|
||||
"effectStart": int(
|
||||
tw_setup["Setup"]["Base"][0]["Range"][0]["Start"]),
|
||||
"effectEnd": int(
|
||||
tw_setup["Setup"]["Base"][0]["Range"][0]["End"])
|
||||
}
|
||||
|
||||
if mode == 0: # speed
|
||||
r_data[self._retime_modes[mode]] = float(
|
||||
tw_setup_state["TW_Speed"]
|
||||
[0]["Channel"][0]["Value"][0]["_text"]
|
||||
) / 100
|
||||
elif mode == 1: # timewarp
|
||||
print("timing")
|
||||
r_data[self._retime_modes[mode]] = self._get_anim_keys(
|
||||
tw_setup_state["TW_Timing"]
|
||||
)
|
||||
elif mode == 2: # duration
|
||||
r_data[self._retime_modes[mode]] = {
|
||||
"start": {
|
||||
"source": int(
|
||||
tw_setup_state["TW_DurationTiming"][0]["Channel"]
|
||||
[0]["KFrames"][0]["Key"][0]["Value"][0]["_text"]
|
||||
),
|
||||
"timeline": int(
|
||||
tw_setup_state["TW_DurationTiming"][0]["Channel"]
|
||||
[0]["KFrames"][0]["Key"][0]["Frame"][0]["_text"]
|
||||
)
|
||||
},
|
||||
"end": {
|
||||
"source": int(
|
||||
tw_setup_state["TW_DurationTiming"][0]["Channel"]
|
||||
[0]["KFrames"][0]["Key"][1]["Value"][0]["_text"]
|
||||
),
|
||||
"timeline": int(
|
||||
tw_setup_state["TW_DurationTiming"][0]["Channel"]
|
||||
[0]["KFrames"][0]["Key"][1]["Frame"][0]["_text"]
|
||||
)
|
||||
}
|
||||
}
|
||||
except Exception:
|
||||
lines = traceback.format_exception(*sys.exc_info())
|
||||
self.log.error("\n".join(lines))
|
||||
return
|
||||
|
||||
return r_data
|
||||
|
||||
def _get_anim_keys(self, setup_cat, index=None):
|
||||
return_data = {
|
||||
"extrapolation": (
|
||||
setup_cat[0]["Channel"][0]["Extrap"][0]["_text"]
|
||||
),
|
||||
"animKeys": []
|
||||
}
|
||||
for key in setup_cat[0]["Channel"][0]["KFrames"][0]["Key"]:
|
||||
if index and int(key["Index"]) != index:
|
||||
continue
|
||||
key_data = {
|
||||
"source": float(key["Value"][0]["_text"]),
|
||||
"timeline": float(key["Frame"][0]["_text"]),
|
||||
"index": int(key["Index"]),
|
||||
"curveMode": key["CurveMode"][0]["_text"],
|
||||
"curveOrder": key["CurveOrder"][0]["_text"]
|
||||
}
|
||||
if key.get("TangentMode"):
|
||||
key_data["tangentMode"] = key["TangentMode"][0]["_text"]
|
||||
|
||||
return_data["animKeys"].append(key_data)
|
||||
|
||||
return return_data
|
||||
|
||||
def _dictify(self, xml_, root=True):
|
||||
""" Convert xml object to dictionary
|
||||
|
||||
Args:
|
||||
xml_ (xml.etree.ElementTree.Element): xml data
|
||||
root (bool, optional): is root available. Defaults to True.
|
||||
|
||||
Returns:
|
||||
dict: dictionarized xml
|
||||
"""
|
||||
|
||||
if root:
|
||||
return {xml_.tag: self._dictify(xml_, False)}
|
||||
|
||||
d = copy(xml_.attrib)
|
||||
if xml_.text:
|
||||
d["_text"] = xml_.text
|
||||
|
||||
for x in xml_.findall("./*"):
|
||||
if x.tag not in d:
|
||||
d[x.tag] = []
|
||||
d[x.tag].append(self._dictify(x, False))
|
||||
return d
|
||||
|
|
|
|||
|
|
@ -275,7 +275,7 @@ def create_otio_reference(clip_data, fps=None):
|
|||
|
||||
|
||||
def create_otio_clip(clip_data):
|
||||
from openpype.hosts.flame.api import MediaInfoFile
|
||||
from openpype.hosts.flame.api import MediaInfoFile, TimeEffectMetadata
|
||||
|
||||
segment = clip_data["PySegment"]
|
||||
|
||||
|
|
@ -284,14 +284,31 @@ def create_otio_clip(clip_data):
|
|||
media_timecode_start = media_info.start_frame
|
||||
media_fps = media_info.fps
|
||||
|
||||
# Timewarp metadata
|
||||
tw_data = TimeEffectMetadata(segment, logger=log).data
|
||||
log.debug("__ tw_data: {}".format(tw_data))
|
||||
|
||||
# define first frame
|
||||
first_frame = media_timecode_start or utils.get_frame_from_filename(
|
||||
clip_data["fpath"]) or 0
|
||||
file_first_frame = utils.get_frame_from_filename(
|
||||
clip_data["fpath"])
|
||||
if file_first_frame:
|
||||
file_first_frame = int(file_first_frame)
|
||||
|
||||
first_frame = media_timecode_start or file_first_frame or 0
|
||||
|
||||
_clip_source_in = int(clip_data["source_in"])
|
||||
_clip_source_out = int(clip_data["source_out"])
|
||||
_clip_record_in = clip_data["record_in"]
|
||||
_clip_record_out = clip_data["record_out"]
|
||||
_clip_record_duration = int(clip_data["record_duration"])
|
||||
|
||||
log.debug("_ file_first_frame: {}".format(file_first_frame))
|
||||
log.debug("_ first_frame: {}".format(first_frame))
|
||||
log.debug("_ _clip_source_in: {}".format(_clip_source_in))
|
||||
log.debug("_ _clip_source_out: {}".format(_clip_source_out))
|
||||
log.debug("_ _clip_record_in: {}".format(_clip_record_in))
|
||||
log.debug("_ _clip_record_out: {}".format(_clip_record_out))
|
||||
|
||||
# first solve if the reverse timing
|
||||
speed = 1
|
||||
if clip_data["source_in"] > clip_data["source_out"]:
|
||||
|
|
@ -302,16 +319,28 @@ def create_otio_clip(clip_data):
|
|||
source_in = _clip_source_in - int(first_frame)
|
||||
source_out = _clip_source_out - int(first_frame)
|
||||
|
||||
log.debug("_ source_in: {}".format(source_in))
|
||||
log.debug("_ source_out: {}".format(source_out))
|
||||
|
||||
if file_first_frame:
|
||||
log.debug("_ file_source_in: {}".format(
|
||||
file_first_frame + source_in))
|
||||
log.debug("_ file_source_in: {}".format(
|
||||
file_first_frame + source_out))
|
||||
|
||||
source_duration = (source_out - source_in + 1)
|
||||
|
||||
# secondly check if any change of speed
|
||||
if source_duration != _clip_record_duration:
|
||||
retime_speed = float(source_duration) / float(_clip_record_duration)
|
||||
log.debug("_ retime_speed: {}".format(retime_speed))
|
||||
log.debug("_ calculated speed: {}".format(retime_speed))
|
||||
speed *= retime_speed
|
||||
|
||||
log.debug("_ source_in: {}".format(source_in))
|
||||
log.debug("_ source_out: {}".format(source_out))
|
||||
# get speed from metadata if available
|
||||
if tw_data.get("speed"):
|
||||
speed = tw_data["speed"]
|
||||
log.debug("_ metadata speed: {}".format(speed))
|
||||
|
||||
log.debug("_ speed: {}".format(speed))
|
||||
log.debug("_ source_duration: {}".format(source_duration))
|
||||
log.debug("_ _clip_record_duration: {}".format(_clip_record_duration))
|
||||
|
|
|
|||
|
|
@ -8,6 +8,9 @@ import pyblish.api
|
|||
import openpype.api
|
||||
from openpype.hosts.flame import api as opfapi
|
||||
from openpype.hosts.flame.api import MediaInfoFile
|
||||
from openpype.pipeline.editorial import (
|
||||
get_media_range_with_retimes
|
||||
)
|
||||
|
||||
import flame
|
||||
|
||||
|
|
@ -47,7 +50,6 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
export_presets_mapping = {}
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
if not self.keep_original_representation:
|
||||
# remove previeous representation if not needed
|
||||
instance.data["representations"] = []
|
||||
|
|
@ -67,18 +69,60 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
# get media source first frame
|
||||
source_first_frame = instance.data["sourceFirstFrame"]
|
||||
|
||||
self.log.debug("_ frame_start: {}".format(frame_start))
|
||||
self.log.debug("_ source_first_frame: {}".format(source_first_frame))
|
||||
|
||||
# get timeline in/out of segment
|
||||
clip_in = instance.data["clipIn"]
|
||||
clip_out = instance.data["clipOut"]
|
||||
|
||||
# get retimed attributres
|
||||
retimed_data = self._get_retimed_attributes(instance)
|
||||
|
||||
# get individual keys
|
||||
r_handle_start = retimed_data["handle_start"]
|
||||
r_handle_end = retimed_data["handle_end"]
|
||||
r_source_dur = retimed_data["source_duration"]
|
||||
r_speed = retimed_data["speed"]
|
||||
|
||||
# get handles value - take only the max from both
|
||||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
handles = max(handle_start, handle_end)
|
||||
include_handles = instance.data.get("includeHandles")
|
||||
|
||||
# get media source range with handles
|
||||
source_start_handles = instance.data["sourceStartH"]
|
||||
source_end_handles = instance.data["sourceEndH"]
|
||||
# retime if needed
|
||||
if r_speed != 1.0:
|
||||
source_start_handles = (
|
||||
instance.data["sourceStart"] - r_handle_start)
|
||||
source_end_handles = (
|
||||
source_start_handles
|
||||
+ (r_source_dur - 1)
|
||||
+ r_handle_start
|
||||
+ r_handle_end
|
||||
)
|
||||
|
||||
# get frame range with handles for representation range
|
||||
frame_start_handle = frame_start - handle_start
|
||||
repre_frame_start = frame_start_handle
|
||||
if include_handles:
|
||||
if r_speed == 1.0:
|
||||
frame_start_handle = frame_start
|
||||
else:
|
||||
frame_start_handle = (
|
||||
frame_start - handle_start) + r_handle_start
|
||||
|
||||
self.log.debug("_ frame_start_handle: {}".format(
|
||||
frame_start_handle))
|
||||
self.log.debug("_ repre_frame_start: {}".format(
|
||||
repre_frame_start))
|
||||
|
||||
# calculate duration with handles
|
||||
source_duration_handles = (
|
||||
source_end_handles - source_start_handles) + 1
|
||||
|
||||
# create staging dir path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
|
@ -93,6 +137,28 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
}
|
||||
export_presets.update(self.export_presets_mapping)
|
||||
|
||||
if not instance.data.get("versionData"):
|
||||
instance.data["versionData"] = {}
|
||||
|
||||
# set versiondata if any retime
|
||||
version_data = retimed_data.get("version_data")
|
||||
self.log.debug("_ version_data: {}".format(version_data))
|
||||
|
||||
if version_data:
|
||||
instance.data["versionData"].update(version_data)
|
||||
|
||||
if r_speed != 1.0:
|
||||
instance.data["versionData"].update({
|
||||
"frameStart": frame_start_handle,
|
||||
"frameEnd": (
|
||||
(frame_start_handle + source_duration_handles - 1)
|
||||
- (r_handle_start + r_handle_end)
|
||||
)
|
||||
})
|
||||
self.log.debug("_ i_version_data: {}".format(
|
||||
instance.data["versionData"]
|
||||
))
|
||||
|
||||
# loop all preset names and
|
||||
for unique_name, preset_config in export_presets.items():
|
||||
modify_xml_data = {}
|
||||
|
|
@ -115,20 +181,10 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
)
|
||||
)
|
||||
|
||||
# get frame range with handles for representation range
|
||||
frame_start_handle = frame_start - handle_start
|
||||
|
||||
# calculate duration with handles
|
||||
source_duration_handles = (
|
||||
source_end_handles - source_start_handles)
|
||||
|
||||
# define in/out marks
|
||||
in_mark = (source_start_handles - source_first_frame) + 1
|
||||
out_mark = in_mark + source_duration_handles
|
||||
|
||||
exporting_clip = None
|
||||
name_patern_xml = "<name>_{}.".format(
|
||||
unique_name)
|
||||
|
||||
if export_type == "Sequence Publish":
|
||||
# change export clip to sequence
|
||||
exporting_clip = flame.duplicate(sequence_clip)
|
||||
|
|
@ -142,19 +198,25 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
"<segment name>_<shot name>_{}.").format(
|
||||
unique_name)
|
||||
|
||||
# change in/out marks to timeline in/out
|
||||
# only for h264 with baked retime
|
||||
in_mark = clip_in
|
||||
out_mark = clip_out
|
||||
out_mark = clip_out + 1
|
||||
modify_xml_data.update({
|
||||
"exportHandles": True,
|
||||
"nbHandles": handles
|
||||
})
|
||||
else:
|
||||
in_mark = (source_start_handles - source_first_frame) + 1
|
||||
out_mark = in_mark + source_duration_handles
|
||||
exporting_clip = self.import_clip(clip_path)
|
||||
exporting_clip.name.set_value("{}_{}".format(
|
||||
asset_name, segment_name))
|
||||
|
||||
# add xml tags modifications
|
||||
modify_xml_data.update({
|
||||
"exportHandles": True,
|
||||
"nbHandles": handles,
|
||||
"startFrame": frame_start,
|
||||
# enum position low start from 0
|
||||
"frameIndex": 0,
|
||||
"startFrame": repre_frame_start,
|
||||
"namePattern": name_patern_xml
|
||||
})
|
||||
|
||||
|
|
@ -162,6 +224,9 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
# add any xml overrides collected form segment.comment
|
||||
modify_xml_data.update(instance.data["xml_overrides"])
|
||||
|
||||
self.log.debug("_ in_mark: {}".format(in_mark))
|
||||
self.log.debug("_ out_mark: {}".format(out_mark))
|
||||
|
||||
export_kwargs = {}
|
||||
# validate xml preset file is filled
|
||||
if preset_file == "":
|
||||
|
|
@ -196,9 +261,8 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
"namePattern": "__thumbnail"
|
||||
})
|
||||
thumb_frame_number = int(in_mark + (
|
||||
source_duration_handles / 2))
|
||||
(out_mark - in_mark + 1) / 2))
|
||||
|
||||
self.log.debug("__ in_mark: {}".format(in_mark))
|
||||
self.log.debug("__ thumb_frame_number: {}".format(
|
||||
thumb_frame_number
|
||||
))
|
||||
|
|
@ -210,9 +274,6 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
"out_mark": out_mark
|
||||
})
|
||||
|
||||
self.log.debug("__ modify_xml_data: {}".format(
|
||||
pformat(modify_xml_data)
|
||||
))
|
||||
preset_path = opfapi.modify_preset_file(
|
||||
preset_orig_xml_path, staging_dir, modify_xml_data)
|
||||
|
||||
|
|
@ -281,9 +342,9 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
# add frame range
|
||||
if preset_config["representation_add_range"]:
|
||||
representation_data.update({
|
||||
"frameStart": frame_start_handle,
|
||||
"frameStart": repre_frame_start,
|
||||
"frameEnd": (
|
||||
frame_start_handle + source_duration_handles),
|
||||
repre_frame_start + source_duration_handles) - 1,
|
||||
"fps": instance.data["fps"]
|
||||
})
|
||||
|
||||
|
|
@ -300,8 +361,32 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
# at the end remove the duplicated clip
|
||||
flame.delete(exporting_clip)
|
||||
|
||||
self.log.debug("All representations: {}".format(
|
||||
pformat(instance.data["representations"])))
|
||||
def _get_retimed_attributes(self, instance):
|
||||
handle_start = instance.data["handleStart"]
|
||||
handle_end = instance.data["handleEnd"]
|
||||
|
||||
# get basic variables
|
||||
otio_clip = instance.data["otioClip"]
|
||||
|
||||
# get available range trimmed with processed retimes
|
||||
retimed_attributes = get_media_range_with_retimes(
|
||||
otio_clip, handle_start, handle_end)
|
||||
self.log.debug(
|
||||
">> retimed_attributes: {}".format(retimed_attributes))
|
||||
|
||||
r_media_in = int(retimed_attributes["mediaIn"])
|
||||
r_media_out = int(retimed_attributes["mediaOut"])
|
||||
version_data = retimed_attributes.get("versionData")
|
||||
|
||||
return {
|
||||
"version_data": version_data,
|
||||
"handle_start": int(retimed_attributes["handleStart"]),
|
||||
"handle_end": int(retimed_attributes["handleEnd"]),
|
||||
"source_duration": (
|
||||
(r_media_out - r_media_in) + 1
|
||||
),
|
||||
"speed": float(retimed_attributes["speed"])
|
||||
}
|
||||
|
||||
def _should_skip(self, preset_config, clip_path, unique_name):
|
||||
# get activating attributes
|
||||
|
|
@ -313,8 +398,6 @@ class ExtractSubsetResources(openpype.api.Extractor):
|
|||
unique_name, activated_preset, filter_path_regex
|
||||
)
|
||||
)
|
||||
self.log.debug(
|
||||
"__ clip_path: `{}`".format(clip_path))
|
||||
|
||||
# skip if not activated presete
|
||||
if not activated_preset:
|
||||
|
|
|
|||
|
|
@ -1,27 +1,6 @@
|
|||
import os
|
||||
from .module import OpenPypeMaya
|
||||
|
||||
|
||||
def add_implementation_envs(env, _app):
|
||||
# Add requirements to PYTHONPATH
|
||||
pype_root = os.environ["OPENPYPE_REPOS_ROOT"]
|
||||
new_python_paths = [
|
||||
os.path.join(pype_root, "openpype", "hosts", "maya", "startup")
|
||||
]
|
||||
old_python_path = env.get("PYTHONPATH") or ""
|
||||
for path in old_python_path.split(os.pathsep):
|
||||
if not path:
|
||||
continue
|
||||
|
||||
norm_path = os.path.normpath(path)
|
||||
if norm_path not in new_python_paths:
|
||||
new_python_paths.append(norm_path)
|
||||
|
||||
env["PYTHONPATH"] = os.pathsep.join(new_python_paths)
|
||||
|
||||
# Set default values if are not already set via settings
|
||||
defaults = {
|
||||
"OPENPYPE_LOG_NO_COLORS": "Yes"
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
__all__ = (
|
||||
"OpenPypeMaya",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -60,8 +60,7 @@ class RenderSettings(object):
|
|||
try:
|
||||
aov_separator = self._aov_chars[(
|
||||
self._project_settings["maya"]
|
||||
["create"]
|
||||
["CreateRender"]
|
||||
["RenderSettings"]
|
||||
["aov_separator"]
|
||||
)]
|
||||
except KeyError:
|
||||
|
|
|
|||
253
openpype/hosts/maya/api/lib_template_builder.py
Normal file
253
openpype/hosts/maya/api/lib_template_builder.py
Normal file
|
|
@ -0,0 +1,253 @@
|
|||
import json
|
||||
from collections import OrderedDict
|
||||
import maya.cmds as cmds
|
||||
|
||||
import qargparse
|
||||
from openpype.tools.utils.widgets import OptionDialog
|
||||
from .lib import get_main_window, imprint
|
||||
|
||||
# To change as enum
|
||||
build_types = ["context_asset", "linked_asset", "all_assets"]
|
||||
|
||||
|
||||
def get_placeholder_attributes(node):
|
||||
return {
|
||||
attr: cmds.getAttr("{}.{}".format(node, attr))
|
||||
for attr in cmds.listAttr(node, userDefined=True)}
|
||||
|
||||
|
||||
def delete_placeholder_attributes(node):
|
||||
'''
|
||||
function to delete all extra placeholder attributes
|
||||
'''
|
||||
extra_attributes = get_placeholder_attributes(node)
|
||||
for attribute in extra_attributes:
|
||||
cmds.deleteAttr(node + '.' + attribute)
|
||||
|
||||
|
||||
def create_placeholder():
|
||||
args = placeholder_window()
|
||||
|
||||
if not args:
|
||||
return # operation canceled, no locator created
|
||||
|
||||
# custom arg parse to force empty data query
|
||||
# and still imprint them on placeholder
|
||||
# and getting items when arg is of type Enumerator
|
||||
options = create_options(args)
|
||||
|
||||
# create placeholder name dynamically from args and options
|
||||
placeholder_name = create_placeholder_name(args, options)
|
||||
|
||||
selection = cmds.ls(selection=True)
|
||||
if not selection:
|
||||
raise ValueError("Nothing is selected")
|
||||
|
||||
placeholder = cmds.spaceLocator(name=placeholder_name)[0]
|
||||
|
||||
# get the long name of the placeholder (with the groups)
|
||||
placeholder_full_name = cmds.ls(selection[0], long=True)[
|
||||
0] + '|' + placeholder.replace('|', '')
|
||||
|
||||
if selection:
|
||||
cmds.parent(placeholder, selection[0])
|
||||
|
||||
imprint(placeholder_full_name, options)
|
||||
|
||||
# Some tweaks because imprint force enums to to default value so we get
|
||||
# back arg read and force them to attributes
|
||||
imprint_enum(placeholder_full_name, args)
|
||||
|
||||
# Add helper attributes to keep placeholder info
|
||||
cmds.addAttr(
|
||||
placeholder_full_name,
|
||||
longName="parent",
|
||||
hidden=True,
|
||||
dataType="string"
|
||||
)
|
||||
cmds.addAttr(
|
||||
placeholder_full_name,
|
||||
longName="index",
|
||||
hidden=True,
|
||||
attributeType="short",
|
||||
defaultValue=-1
|
||||
)
|
||||
|
||||
cmds.setAttr(placeholder_full_name + '.parent', "", type="string")
|
||||
|
||||
|
||||
def create_placeholder_name(args, options):
|
||||
placeholder_builder_type = [
|
||||
arg.read() for arg in args if 'builder_type' in str(arg)
|
||||
][0]
|
||||
placeholder_family = options['family']
|
||||
placeholder_name = placeholder_builder_type.split('_')
|
||||
|
||||
# add famlily in any
|
||||
if placeholder_family:
|
||||
placeholder_name.insert(1, placeholder_family)
|
||||
|
||||
# add loader arguments if any
|
||||
if options['loader_args']:
|
||||
pos = 2
|
||||
loader_args = options['loader_args'].replace('\'', '\"')
|
||||
loader_args = json.loads(loader_args)
|
||||
values = [v for v in loader_args.values()]
|
||||
for i in range(len(values)):
|
||||
placeholder_name.insert(i + pos, values[i])
|
||||
|
||||
placeholder_name = '_'.join(placeholder_name)
|
||||
|
||||
return placeholder_name.capitalize()
|
||||
|
||||
|
||||
def update_placeholder():
|
||||
placeholder = cmds.ls(selection=True)
|
||||
if len(placeholder) == 0:
|
||||
raise ValueError("No node selected")
|
||||
if len(placeholder) > 1:
|
||||
raise ValueError("Too many selected nodes")
|
||||
placeholder = placeholder[0]
|
||||
|
||||
args = placeholder_window(get_placeholder_attributes(placeholder))
|
||||
|
||||
if not args:
|
||||
return # operation canceled
|
||||
|
||||
# delete placeholder attributes
|
||||
delete_placeholder_attributes(placeholder)
|
||||
|
||||
options = create_options(args)
|
||||
|
||||
imprint(placeholder, options)
|
||||
imprint_enum(placeholder, args)
|
||||
|
||||
cmds.addAttr(
|
||||
placeholder,
|
||||
longName="parent",
|
||||
hidden=True,
|
||||
dataType="string"
|
||||
)
|
||||
cmds.addAttr(
|
||||
placeholder,
|
||||
longName="index",
|
||||
hidden=True,
|
||||
attributeType="short",
|
||||
defaultValue=-1
|
||||
)
|
||||
|
||||
cmds.setAttr(placeholder + '.parent', '', type="string")
|
||||
|
||||
|
||||
def create_options(args):
|
||||
options = OrderedDict()
|
||||
for arg in args:
|
||||
if not type(arg) == qargparse.Separator:
|
||||
options[str(arg)] = arg._data.get("items") or arg.read()
|
||||
return options
|
||||
|
||||
|
||||
def imprint_enum(placeholder, args):
|
||||
"""
|
||||
Imprint method doesn't act properly with enums.
|
||||
Replacing the functionnality with this for now
|
||||
"""
|
||||
enum_values = {str(arg): arg.read()
|
||||
for arg in args if arg._data.get("items")}
|
||||
string_to_value_enum_table = {
|
||||
build: i for i, build
|
||||
in enumerate(build_types)}
|
||||
for key, value in enum_values.items():
|
||||
cmds.setAttr(
|
||||
placeholder + "." + key,
|
||||
string_to_value_enum_table[value])
|
||||
|
||||
|
||||
def placeholder_window(options=None):
|
||||
options = options or dict()
|
||||
dialog = OptionDialog(parent=get_main_window())
|
||||
dialog.setWindowTitle("Create Placeholder")
|
||||
|
||||
args = [
|
||||
qargparse.Separator("Main attributes"),
|
||||
qargparse.Enum(
|
||||
"builder_type",
|
||||
label="Asset Builder Type",
|
||||
default=options.get("builder_type", 0),
|
||||
items=build_types,
|
||||
help="""Asset Builder Type
|
||||
Builder type describe what template loader will look for.
|
||||
context_asset : Template loader will look for subsets of
|
||||
current context asset (Asset bob will find asset)
|
||||
linked_asset : Template loader will look for assets linked
|
||||
to current context asset.
|
||||
Linked asset are looked in avalon database under field "inputLinks"
|
||||
"""
|
||||
),
|
||||
qargparse.String(
|
||||
"family",
|
||||
default=options.get("family", ""),
|
||||
label="OpenPype Family",
|
||||
placeholder="ex: model, look ..."),
|
||||
qargparse.String(
|
||||
"representation",
|
||||
default=options.get("representation", ""),
|
||||
label="OpenPype Representation",
|
||||
placeholder="ex: ma, abc ..."),
|
||||
qargparse.String(
|
||||
"loader",
|
||||
default=options.get("loader", ""),
|
||||
label="Loader",
|
||||
placeholder="ex: ReferenceLoader, LightLoader ...",
|
||||
help="""Loader
|
||||
Defines what openpype loader will be used to load assets.
|
||||
Useable loader depends on current host's loader list.
|
||||
Field is case sensitive.
|
||||
"""),
|
||||
qargparse.String(
|
||||
"loader_args",
|
||||
default=options.get("loader_args", ""),
|
||||
label="Loader Arguments",
|
||||
placeholder='ex: {"camera":"persp", "lights":True}',
|
||||
help="""Loader
|
||||
Defines a dictionnary of arguments used to load assets.
|
||||
Useable arguments depend on current placeholder Loader.
|
||||
Field should be a valid python dict. Anything else will be ignored.
|
||||
"""),
|
||||
qargparse.Integer(
|
||||
"order",
|
||||
default=options.get("order", 0),
|
||||
min=0,
|
||||
max=999,
|
||||
label="Order",
|
||||
placeholder="ex: 0, 100 ... (smallest order loaded first)",
|
||||
help="""Order
|
||||
Order defines asset loading priority (0 to 999)
|
||||
Priority rule is : "lowest is first to load"."""),
|
||||
qargparse.Separator(
|
||||
"Optional attributes"),
|
||||
qargparse.String(
|
||||
"asset",
|
||||
default=options.get("asset", ""),
|
||||
label="Asset filter",
|
||||
placeholder="regex filtering by asset name",
|
||||
help="Filtering assets by matching field regex to asset's name"),
|
||||
qargparse.String(
|
||||
"subset",
|
||||
default=options.get("subset", ""),
|
||||
label="Subset filter",
|
||||
placeholder="regex filtering by subset name",
|
||||
help="Filtering assets by matching field regex to subset's name"),
|
||||
qargparse.String(
|
||||
"hierarchy",
|
||||
default=options.get("hierarchy", ""),
|
||||
label="Hierarchy filter",
|
||||
placeholder="regex filtering by asset's hierarchy",
|
||||
help="Filtering assets by matching field asset's hierarchy")
|
||||
]
|
||||
dialog.create(args)
|
||||
|
||||
if not dialog.exec_():
|
||||
return None
|
||||
|
||||
return args
|
||||
|
|
@ -9,10 +9,15 @@ import maya.cmds as cmds
|
|||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.workfile import BuildWorkfile
|
||||
from openpype.pipeline.workfile.build_template import (
|
||||
build_workfile_template,
|
||||
update_workfile_template
|
||||
)
|
||||
from openpype.tools.utils import host_tools
|
||||
from openpype.hosts.maya.api import lib, lib_rendersettings
|
||||
from .lib import get_main_window, IS_HEADLESS
|
||||
from .commands import reset_frame_range
|
||||
from .lib_template_builder import create_placeholder, update_placeholder
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
@ -147,6 +152,34 @@ def install():
|
|||
parent_widget
|
||||
)
|
||||
)
|
||||
|
||||
builder_menu = cmds.menuItem(
|
||||
"Template Builder",
|
||||
subMenu=True,
|
||||
tearOff=True,
|
||||
parent=MENU_NAME
|
||||
)
|
||||
cmds.menuItem(
|
||||
"Create Placeholder",
|
||||
parent=builder_menu,
|
||||
command=lambda *args: create_placeholder()
|
||||
)
|
||||
cmds.menuItem(
|
||||
"Update Placeholder",
|
||||
parent=builder_menu,
|
||||
command=lambda *args: update_placeholder()
|
||||
)
|
||||
cmds.menuItem(
|
||||
"Build Workfile from template",
|
||||
parent=builder_menu,
|
||||
command=build_workfile_template
|
||||
)
|
||||
cmds.menuItem(
|
||||
"Update Workfile from template",
|
||||
parent=builder_menu,
|
||||
command=update_workfile_template
|
||||
)
|
||||
|
||||
cmds.setParent(MENU_NAME, menu=True)
|
||||
|
||||
def add_scripts_menu():
|
||||
|
|
|
|||
252
openpype/hosts/maya/api/template_loader.py
Normal file
252
openpype/hosts/maya/api/template_loader.py
Normal file
|
|
@ -0,0 +1,252 @@
|
|||
import re
|
||||
from maya import cmds
|
||||
|
||||
from openpype.client import get_representations
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.pipeline.workfile.abstract_template_loader import (
|
||||
AbstractPlaceholder,
|
||||
AbstractTemplateLoader
|
||||
)
|
||||
from openpype.pipeline.workfile.build_template_exceptions import (
|
||||
TemplateAlreadyImported
|
||||
)
|
||||
|
||||
PLACEHOLDER_SET = 'PLACEHOLDERS_SET'
|
||||
|
||||
|
||||
class MayaTemplateLoader(AbstractTemplateLoader):
|
||||
"""Concrete implementation of AbstractTemplateLoader for maya
|
||||
"""
|
||||
|
||||
def import_template(self, path):
|
||||
"""Import template into current scene.
|
||||
Block if a template is already loaded.
|
||||
Args:
|
||||
path (str): A path to current template (usually given by
|
||||
get_template_path implementation)
|
||||
Returns:
|
||||
bool: Wether the template was succesfully imported or not
|
||||
"""
|
||||
if cmds.objExists(PLACEHOLDER_SET):
|
||||
raise TemplateAlreadyImported(
|
||||
"Build template already loaded\n"
|
||||
"Clean scene if needed (File > New Scene)")
|
||||
|
||||
cmds.sets(name=PLACEHOLDER_SET, empty=True)
|
||||
self.new_nodes = cmds.file(path, i=True, returnNewNodes=True)
|
||||
cmds.setAttr(PLACEHOLDER_SET + '.hiddenInOutliner', True)
|
||||
|
||||
for set in cmds.listSets(allSets=True):
|
||||
if (cmds.objExists(set) and
|
||||
cmds.attributeQuery('id', node=set, exists=True) and
|
||||
cmds.getAttr(set + '.id') == 'pyblish.avalon.instance'):
|
||||
if cmds.attributeQuery('asset', node=set, exists=True):
|
||||
cmds.setAttr(
|
||||
set + '.asset',
|
||||
legacy_io.Session['AVALON_ASSET'], type='string'
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def template_already_imported(self, err_msg):
|
||||
clearButton = "Clear scene and build"
|
||||
updateButton = "Update template"
|
||||
abortButton = "Abort"
|
||||
|
||||
title = "Scene already builded"
|
||||
message = (
|
||||
"It's seems a template was already build for this scene.\n"
|
||||
"Error message reveived :\n\n\"{}\"".format(err_msg))
|
||||
buttons = [clearButton, updateButton, abortButton]
|
||||
defaultButton = clearButton
|
||||
cancelButton = abortButton
|
||||
dismissString = abortButton
|
||||
answer = cmds.confirmDialog(
|
||||
t=title,
|
||||
m=message,
|
||||
b=buttons,
|
||||
db=defaultButton,
|
||||
cb=cancelButton,
|
||||
ds=dismissString)
|
||||
|
||||
if answer == clearButton:
|
||||
cmds.file(newFile=True, force=True)
|
||||
self.import_template(self.template_path)
|
||||
self.populate_template()
|
||||
elif answer == updateButton:
|
||||
self.update_missing_containers()
|
||||
elif answer == abortButton:
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def get_template_nodes():
|
||||
attributes = cmds.ls('*.builder_type', long=True)
|
||||
return [attribute.rpartition('.')[0] for attribute in attributes]
|
||||
|
||||
def get_loaded_containers_by_id(self):
|
||||
try:
|
||||
containers = cmds.sets("AVALON_CONTAINERS", q=True)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
return [
|
||||
cmds.getAttr(container + '.representation')
|
||||
for container in containers]
|
||||
|
||||
|
||||
class MayaPlaceholder(AbstractPlaceholder):
|
||||
"""Concrete implementation of AbstractPlaceholder for maya
|
||||
"""
|
||||
|
||||
optional_keys = {'asset', 'subset', 'hierarchy'}
|
||||
|
||||
def get_data(self, node):
|
||||
user_data = dict()
|
||||
for attr in self.required_keys.union(self.optional_keys):
|
||||
attribute_name = '{}.{}'.format(node, attr)
|
||||
if not cmds.attributeQuery(attr, node=node, exists=True):
|
||||
print("{} not found".format(attribute_name))
|
||||
continue
|
||||
user_data[attr] = cmds.getAttr(
|
||||
attribute_name,
|
||||
asString=True)
|
||||
user_data['parent'] = (
|
||||
cmds.getAttr(node + '.parent', asString=True)
|
||||
or node.rpartition('|')[0]
|
||||
or ""
|
||||
)
|
||||
user_data['node'] = node
|
||||
if user_data['parent']:
|
||||
siblings = cmds.listRelatives(user_data['parent'], children=True)
|
||||
else:
|
||||
siblings = cmds.ls(assemblies=True)
|
||||
node_shortname = user_data['node'].rpartition('|')[2]
|
||||
current_index = cmds.getAttr(node + '.index', asString=True)
|
||||
user_data['index'] = (
|
||||
current_index if current_index >= 0
|
||||
else siblings.index(node_shortname))
|
||||
|
||||
self.data = user_data
|
||||
|
||||
def parent_in_hierarchy(self, containers):
|
||||
"""Parent loaded container to placeholder's parent
|
||||
ie : Set loaded content as placeholder's sibling
|
||||
Args:
|
||||
containers (String): Placeholder loaded containers
|
||||
"""
|
||||
if not containers:
|
||||
return
|
||||
|
||||
roots = cmds.sets(containers, q=True)
|
||||
nodes_to_parent = []
|
||||
for root in roots:
|
||||
if root.endswith("_RN"):
|
||||
refRoot = cmds.referenceQuery(root, n=True)[0]
|
||||
refRoot = cmds.listRelatives(refRoot, parent=True) or [refRoot]
|
||||
nodes_to_parent.extend(refRoot)
|
||||
elif root in cmds.listSets(allSets=True):
|
||||
if not cmds.sets(root, q=True):
|
||||
return
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
nodes_to_parent.append(root)
|
||||
|
||||
if self.data['parent']:
|
||||
cmds.parent(nodes_to_parent, self.data['parent'])
|
||||
# Move loaded nodes to correct index in outliner hierarchy
|
||||
placeholder_node = self.data['node']
|
||||
placeholder_form = cmds.xform(
|
||||
placeholder_node,
|
||||
q=True,
|
||||
matrix=True,
|
||||
worldSpace=True
|
||||
)
|
||||
for node in set(nodes_to_parent):
|
||||
cmds.reorder(node, front=True)
|
||||
cmds.reorder(node, relative=self.data['index'])
|
||||
cmds.xform(node, matrix=placeholder_form, ws=True)
|
||||
|
||||
holding_sets = cmds.listSets(object=placeholder_node)
|
||||
if not holding_sets:
|
||||
return
|
||||
for holding_set in holding_sets:
|
||||
cmds.sets(roots, forceElement=holding_set)
|
||||
|
||||
def clean(self):
|
||||
"""Hide placeholder, parent them to root
|
||||
add them to placeholder set and register placeholder's parent
|
||||
to keep placeholder info available for future use
|
||||
"""
|
||||
node = self.data['node']
|
||||
if self.data['parent']:
|
||||
cmds.setAttr(node + '.parent', self.data['parent'], type='string')
|
||||
if cmds.getAttr(node + '.index') < 0:
|
||||
cmds.setAttr(node + '.index', self.data['index'])
|
||||
|
||||
holding_sets = cmds.listSets(object=node)
|
||||
if holding_sets:
|
||||
for set in holding_sets:
|
||||
cmds.sets(node, remove=set)
|
||||
|
||||
if cmds.listRelatives(node, p=True):
|
||||
node = cmds.parent(node, world=True)[0]
|
||||
cmds.sets(node, addElement=PLACEHOLDER_SET)
|
||||
cmds.hide(node)
|
||||
cmds.setAttr(node + '.hiddenInOutliner', True)
|
||||
|
||||
def get_representations(self, current_asset_doc, linked_asset_docs):
|
||||
project_name = legacy_io.active_project()
|
||||
|
||||
builder_type = self.data["builder_type"]
|
||||
if builder_type == "context_asset":
|
||||
context_filters = {
|
||||
"asset": [current_asset_doc["name"]],
|
||||
"subset": [re.compile(self.data["subset"])],
|
||||
"hierarchy": [re.compile(self.data["hierarchy"])],
|
||||
"representations": [self.data["representation"]],
|
||||
"family": [self.data["family"]]
|
||||
}
|
||||
|
||||
elif builder_type != "linked_asset":
|
||||
context_filters = {
|
||||
"asset": [re.compile(self.data["asset"])],
|
||||
"subset": [re.compile(self.data["subset"])],
|
||||
"hierarchy": [re.compile(self.data["hierarchy"])],
|
||||
"representation": [self.data["representation"]],
|
||||
"family": [self.data["family"]]
|
||||
}
|
||||
|
||||
else:
|
||||
asset_regex = re.compile(self.data["asset"])
|
||||
linked_asset_names = []
|
||||
for asset_doc in linked_asset_docs:
|
||||
asset_name = asset_doc["name"]
|
||||
if asset_regex.match(asset_name):
|
||||
linked_asset_names.append(asset_name)
|
||||
|
||||
context_filters = {
|
||||
"asset": linked_asset_names,
|
||||
"subset": [re.compile(self.data["subset"])],
|
||||
"hierarchy": [re.compile(self.data["hierarchy"])],
|
||||
"representation": [self.data["representation"]],
|
||||
"family": [self.data["family"]],
|
||||
}
|
||||
|
||||
return list(get_representations(
|
||||
project_name,
|
||||
context_filters=context_filters
|
||||
))
|
||||
|
||||
def err_message(self):
|
||||
return (
|
||||
"Error while trying to load a representation.\n"
|
||||
"Either the subset wasn't published or the template is malformed."
|
||||
"\n\n"
|
||||
"Builder was looking for :\n{attributes}".format(
|
||||
attributes="\n".join([
|
||||
"{}: {}".format(key.title(), value)
|
||||
for key, value in self.data.items()]
|
||||
)
|
||||
)
|
||||
)
|
||||
|
|
@ -2,11 +2,9 @@
|
|||
import os
|
||||
from maya import cmds
|
||||
|
||||
from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return HOST_WORKFILE_EXTENSIONS["maya"]
|
||||
return [".ma", ".mb"]
|
||||
|
||||
|
||||
def has_unsaved_changes():
|
||||
|
|
|
|||
47
openpype/hosts/maya/module.py
Normal file
47
openpype/hosts/maya/module.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostModule
|
||||
|
||||
MAYA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class OpenPypeMaya(OpenPypeModule, IHostModule):
|
||||
name = "openpype_maya"
|
||||
host_name = "maya"
|
||||
|
||||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def add_implementation_envs(self, env, _app):
|
||||
# Add requirements to PYTHONPATH
|
||||
new_python_paths = [
|
||||
os.path.join(MAYA_ROOT_DIR, "startup")
|
||||
]
|
||||
old_python_path = env.get("PYTHONPATH") or ""
|
||||
for path in old_python_path.split(os.pathsep):
|
||||
if not path:
|
||||
continue
|
||||
|
||||
norm_path = os.path.normpath(path)
|
||||
if norm_path not in new_python_paths:
|
||||
new_python_paths.append(norm_path)
|
||||
|
||||
env["PYTHONPATH"] = os.pathsep.join(new_python_paths)
|
||||
|
||||
# Set default values if are not already set via settings
|
||||
defaults = {
|
||||
"OPENPYPE_LOG_NO_COLORS": "Yes"
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(MAYA_ROOT_DIR, "hooks")
|
||||
]
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".ma", ".mb"]
|
||||
|
|
@ -71,7 +71,6 @@ class CreateRender(plugin.Creator):
|
|||
label = "Render"
|
||||
family = "rendering"
|
||||
icon = "eye"
|
||||
|
||||
_token = None
|
||||
_user = None
|
||||
_password = None
|
||||
|
|
@ -220,6 +219,12 @@ class CreateRender(plugin.Creator):
|
|||
self.data["tilesY"] = 2
|
||||
self.data["convertToScanline"] = False
|
||||
self.data["useReferencedAovs"] = False
|
||||
self.data["renderSetupIncludeLights"] = (
|
||||
self._project_settings.get(
|
||||
"maya", {}).get(
|
||||
"RenderSettings", {}).get(
|
||||
"enable_all_lights", False)
|
||||
)
|
||||
# Disable for now as this feature is not working yet
|
||||
# self.data["assScene"] = False
|
||||
|
||||
|
|
|
|||
|
|
@ -154,12 +154,6 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
layer_name = "rs_{}".format(expected_layer_name)
|
||||
|
||||
# collect all frames we are expecting to be rendered
|
||||
renderer = self.get_render_attribute("currentRenderer",
|
||||
layer=layer_name)
|
||||
# handle various renderman names
|
||||
if renderer.startswith("renderman"):
|
||||
renderer = "renderman"
|
||||
|
||||
# return all expected files for all cameras and aovs in given
|
||||
# frame range
|
||||
layer_render_products = get_layer_render_products(layer_name)
|
||||
|
|
@ -202,8 +196,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
aov_dict = {}
|
||||
default_render_file = context.data.get('project_settings')\
|
||||
.get('maya')\
|
||||
.get('create')\
|
||||
.get('CreateRender')\
|
||||
.get('RenderSettings')\
|
||||
.get('default_render_image_folder') or ""
|
||||
# replace relative paths with absolute. Render products are
|
||||
# returned as list of dictionaries.
|
||||
|
|
@ -318,7 +311,10 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
"useReferencedAovs": render_instance.data.get(
|
||||
"useReferencedAovs") or render_instance.data.get(
|
||||
"vrayUseReferencedAovs") or False,
|
||||
"aovSeparator": layer_render_products.layer_data.aov_separator # noqa: E501
|
||||
"aovSeparator": layer_render_products.layer_data.aov_separator, # noqa: E501
|
||||
"renderSetupIncludeLights": render_instance.data.get(
|
||||
"renderSetupIncludeLights"
|
||||
)
|
||||
}
|
||||
|
||||
# Collect Deadline url if Deadline module is enabled
|
||||
|
|
@ -361,6 +357,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
|
||||
instance = context.create_instance(expected_layer_name)
|
||||
instance.data["label"] = label
|
||||
instance.data["farm"] = True
|
||||
instance.data.update(data)
|
||||
self.log.debug("data: {}".format(json.dumps(data, indent=4)))
|
||||
|
||||
|
|
|
|||
146
openpype/hosts/maya/plugins/publish/extract_layout.py
Normal file
146
openpype/hosts/maya/plugins/publish/extract_layout.py
Normal file
|
|
@ -0,0 +1,146 @@
|
|||
import math
|
||||
import os
|
||||
import json
|
||||
|
||||
from maya import cmds
|
||||
from maya.api import OpenMaya as om
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from openpype.pipeline import legacy_io
|
||||
import openpype.api
|
||||
|
||||
|
||||
class ExtractLayout(openpype.api.Extractor):
|
||||
"""Extract a layout."""
|
||||
|
||||
label = "Extract Layout"
|
||||
hosts = ["maya"]
|
||||
families = ["layout"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
# Define extract output file path
|
||||
stagingdir = self.staging_dir(instance)
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction..")
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
json_data = []
|
||||
|
||||
for asset in cmds.sets(str(instance), query=True):
|
||||
# Find the container
|
||||
grp_name = asset.split(':')[0]
|
||||
containers = cmds.ls(f"{grp_name}*_CON")
|
||||
|
||||
assert len(containers) == 1, \
|
||||
f"More than one container found for {asset}"
|
||||
|
||||
container = containers[0]
|
||||
|
||||
representation_id = cmds.getAttr(f"{container}.representation")
|
||||
|
||||
representation = legacy_io.find_one(
|
||||
{
|
||||
"type": "representation",
|
||||
"_id": ObjectId(representation_id)
|
||||
}, projection={"parent": True, "context.family": True})
|
||||
|
||||
self.log.info(representation)
|
||||
|
||||
version_id = representation.get("parent")
|
||||
family = representation.get("context").get("family")
|
||||
|
||||
json_element = {
|
||||
"family": family,
|
||||
"instance_name": cmds.getAttr(f"{container}.name"),
|
||||
"representation": str(representation_id),
|
||||
"version": str(version_id)
|
||||
}
|
||||
|
||||
loc = cmds.xform(asset, query=True, translation=True)
|
||||
rot = cmds.xform(asset, query=True, rotation=True, euler=True)
|
||||
scl = cmds.xform(asset, query=True, relative=True, scale=True)
|
||||
|
||||
json_element["transform"] = {
|
||||
"translation": {
|
||||
"x": loc[0],
|
||||
"y": loc[1],
|
||||
"z": loc[2]
|
||||
},
|
||||
"rotation": {
|
||||
"x": math.radians(rot[0]),
|
||||
"y": math.radians(rot[1]),
|
||||
"z": math.radians(rot[2])
|
||||
},
|
||||
"scale": {
|
||||
"x": scl[0],
|
||||
"y": scl[1],
|
||||
"z": scl[2]
|
||||
}
|
||||
}
|
||||
|
||||
row_length = 4
|
||||
t_matrix_list = cmds.xform(asset, query=True, matrix=True)
|
||||
|
||||
transform_mm = om.MMatrix(t_matrix_list)
|
||||
transform = om.MTransformationMatrix(transform_mm)
|
||||
|
||||
t = transform.translation(om.MSpace.kWorld)
|
||||
t = om.MVector(t.x, t.z, -t.y)
|
||||
transform.setTranslation(t, om.MSpace.kWorld)
|
||||
transform.rotateBy(
|
||||
om.MEulerRotation(math.radians(-90), 0, 0), om.MSpace.kWorld)
|
||||
transform.scaleBy([1.0, 1.0, -1.0], om.MSpace.kObject)
|
||||
|
||||
t_matrix_list = list(transform.asMatrix())
|
||||
|
||||
t_matrix = []
|
||||
for i in range(0, len(t_matrix_list), row_length):
|
||||
t_matrix.append(t_matrix_list[i:i + row_length])
|
||||
|
||||
json_element["transform_matrix"] = []
|
||||
for row in t_matrix:
|
||||
json_element["transform_matrix"].append(list(row))
|
||||
|
||||
basis_list = [
|
||||
1, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, -1, 0,
|
||||
0, 0, 0, 1
|
||||
]
|
||||
|
||||
basis_mm = om.MMatrix(basis_list)
|
||||
basis = om.MTransformationMatrix(basis_mm)
|
||||
|
||||
b_matrix_list = list(basis.asMatrix())
|
||||
b_matrix = []
|
||||
|
||||
for i in range(0, len(b_matrix_list), row_length):
|
||||
b_matrix.append(b_matrix_list[i:i + row_length])
|
||||
|
||||
json_element["basis"] = []
|
||||
for row in b_matrix:
|
||||
json_element["basis"].append(list(row))
|
||||
|
||||
json_data.append(json_element)
|
||||
|
||||
json_filename = "{}.json".format(instance.name)
|
||||
json_path = os.path.join(stagingdir, json_filename)
|
||||
|
||||
with open(json_path, "w+") as file:
|
||||
json.dump(json_data, fp=file, indent=2)
|
||||
|
||||
json_representation = {
|
||||
'name': 'json',
|
||||
'ext': 'json',
|
||||
'files': json_filename,
|
||||
"stagingDir": stagingdir,
|
||||
}
|
||||
instance.data["representations"].append(json_representation)
|
||||
|
||||
self.log.info("Extracted instance '%s' to: %s",
|
||||
instance.name, json_representation)
|
||||
|
|
@ -40,11 +40,13 @@ def get_ocio_config_path(profile_folder):
|
|||
Returns:
|
||||
str: Path to vendorized config file.
|
||||
"""
|
||||
|
||||
return os.path.join(
|
||||
os.environ["OPENPYPE_ROOT"],
|
||||
"vendor",
|
||||
"configs",
|
||||
"OpenColorIO-Configs",
|
||||
"bin",
|
||||
"ocioconfig",
|
||||
"OpenColorIOConfigs",
|
||||
profile_folder,
|
||||
"config.ocio"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,17 +1,15 @@
|
|||
import maya.mel as mel
|
||||
import pymel.core as pm
|
||||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
|
||||
def get_file_rule(rule):
|
||||
"""Workaround for a bug in python with cmds.workspace"""
|
||||
return mel.eval('workspace -query -fileRuleEntry "{}"'.format(rule))
|
||||
|
||||
|
||||
class ValidateRenderImageRule(pyblish.api.InstancePlugin):
|
||||
"""Validates "images" file rule is set to "renders/"
|
||||
"""Validates Maya Workpace "images" file rule matches project settings.
|
||||
|
||||
This validates against the configured default render image folder:
|
||||
Studio Settings > Project > Maya >
|
||||
Render Settings > Default render image folder.
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -23,24 +21,29 @@ class ValidateRenderImageRule(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
|
||||
default_render_file = self.get_default_render_image_folder(instance)
|
||||
required_images_rule = self.get_default_render_image_folder(instance)
|
||||
current_images_rule = cmds.workspace(fileRuleEntry="images")
|
||||
|
||||
assert get_file_rule("images") == default_render_file, (
|
||||
"Workspace's `images` file rule must be set to: {}".format(
|
||||
default_render_file
|
||||
assert current_images_rule == required_images_rule, (
|
||||
"Invalid workspace `images` file rule value: '{}'. "
|
||||
"Must be set to: '{}'".format(
|
||||
current_images_rule, required_images_rule
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
default = cls.get_default_render_image_folder(instance)
|
||||
pm.workspace.fileRules["images"] = default
|
||||
pm.system.Workspace.save()
|
||||
|
||||
required_images_rule = cls.get_default_render_image_folder(instance)
|
||||
current_images_rule = cmds.workspace(fileRuleEntry="images")
|
||||
|
||||
if current_images_rule != required_images_rule:
|
||||
cmds.workspace(fileRule=("images", required_images_rule))
|
||||
cmds.workspace(saveWorkspace=True)
|
||||
|
||||
@staticmethod
|
||||
def get_default_render_image_folder(instance):
|
||||
return instance.context.data.get('project_settings')\
|
||||
.get('maya') \
|
||||
.get('create') \
|
||||
.get('CreateRender') \
|
||||
.get('RenderSettings') \
|
||||
.get('default_render_image_folder')
|
||||
|
|
|
|||
|
|
@ -242,6 +242,14 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
|
|||
instance.context.data["project_settings"]["maya"]["publish"]["ValidateRenderSettings"].get( # noqa: E501
|
||||
"{}_render_attributes".format(renderer)) or []
|
||||
)
|
||||
settings_lights_flag = instance.context.data["project_settings"].get(
|
||||
"maya", {}).get(
|
||||
"RenderSettings", {}).get(
|
||||
"enable_all_lights", False)
|
||||
|
||||
instance_lights_flag = instance.data.get("renderSetupIncludeLights")
|
||||
if settings_lights_flag != instance_lights_flag:
|
||||
cls.log.warning('Instance flag for "Render Setup Include Lights" is set to {0} and Settings flag is set to {1}'.format(instance_lights_flag, settings_lights_flag)) # noqa
|
||||
|
||||
# go through definitions and test if such node.attribute exists.
|
||||
# if so, compare its value from the one required.
|
||||
|
|
|
|||
|
|
@ -162,7 +162,15 @@ class LoadClip(plugin.NukeLoader):
|
|||
data_imprint = {}
|
||||
for k in add_keys:
|
||||
if k == 'version':
|
||||
data_imprint[k] = context["version"]['name']
|
||||
version_doc = context["version"]
|
||||
if version_doc["type"] == "hero_version":
|
||||
version = "hero"
|
||||
else:
|
||||
version = version_doc.get("name")
|
||||
|
||||
if version:
|
||||
data_imprint[k] = version
|
||||
|
||||
elif k == 'colorspace':
|
||||
colorspace = repre["data"].get(k)
|
||||
colorspace = colorspace or version_data.get(k)
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ class PSWorkfileCreator(AutoCreator):
|
|||
identifier = "workfile"
|
||||
family = "workfile"
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def get_instance_attr_defs(self):
|
||||
return []
|
||||
|
||||
|
|
@ -35,7 +37,6 @@ class PSWorkfileCreator(AutoCreator):
|
|||
existing_instance = instance
|
||||
break
|
||||
|
||||
variant = ''
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
task_name = legacy_io.Session["AVALON_TASK"]
|
||||
|
|
@ -43,15 +44,17 @@ class PSWorkfileCreator(AutoCreator):
|
|||
if existing_instance is None:
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
)
|
||||
data = {
|
||||
"asset": asset_name,
|
||||
"task": task_name,
|
||||
"variant": variant
|
||||
"variant": self.default_variant
|
||||
}
|
||||
data.update(self.get_dynamic_data(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
))
|
||||
|
||||
new_instance = CreatedInstance(
|
||||
|
|
@ -67,7 +70,9 @@ class PSWorkfileCreator(AutoCreator):
|
|||
):
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
subset_name = self.get_subset_name(
|
||||
variant, task_name, asset_doc, project_name, host_name
|
||||
self.default_variant, task_name, asset_doc,
|
||||
project_name, host_name
|
||||
)
|
||||
existing_instance["asset"] = asset_name
|
||||
existing_instance["task"] = task_name
|
||||
existing_instance["subset"] = subset_name
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
label = "Collect Workfile"
|
||||
hosts = ["photoshop"]
|
||||
|
||||
default_variant = "Main"
|
||||
|
||||
def process(self, context):
|
||||
existing_instance = None
|
||||
for instance in context:
|
||||
|
|
@ -20,9 +22,11 @@ class CollectWorkfile(pyblish.api.ContextPlugin):
|
|||
break
|
||||
|
||||
family = "workfile"
|
||||
# context.data["variant"] might come only from collect_batch_data
|
||||
variant = context.data.get("variant") or self.default_variant
|
||||
subset = get_subset_name_with_asset_doc(
|
||||
family,
|
||||
"",
|
||||
variant,
|
||||
context.data["anatomyData"]["task"]["name"],
|
||||
context.data["assetEntity"],
|
||||
context.data["anatomyData"]["project"]["name"],
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
import shutil
|
||||
from PIL import Image
|
||||
|
||||
import openpype.api
|
||||
import openpype.lib
|
||||
|
|
@ -8,10 +9,17 @@ from openpype.hosts.photoshop import api as photoshop
|
|||
|
||||
class ExtractReview(openpype.api.Extractor):
|
||||
"""
|
||||
Produce a flattened or sequence image file from all 'image' instances.
|
||||
Produce a flattened or sequence image files from all 'image' instances.
|
||||
|
||||
If no 'image' instance is created, it produces flattened image from
|
||||
all visible layers.
|
||||
|
||||
It creates review, thumbnail and mov representations.
|
||||
|
||||
'review' family could be used in other steps as a reference, as it
|
||||
contains flattened image by default. (Eg. artist could load this
|
||||
review as a single item and see full image. In most cases 'image'
|
||||
family is separated by layers to better usage in animation or comp.)
|
||||
"""
|
||||
|
||||
label = "Extract Review"
|
||||
|
|
@ -22,6 +30,7 @@ class ExtractReview(openpype.api.Extractor):
|
|||
jpg_options = None
|
||||
mov_options = None
|
||||
make_image_sequence = None
|
||||
max_downscale_size = 8192
|
||||
|
||||
def process(self, instance):
|
||||
staging_dir = self.staging_dir(instance)
|
||||
|
|
@ -49,7 +58,7 @@ class ExtractReview(openpype.api.Extractor):
|
|||
"stagingDir": staging_dir,
|
||||
"tags": self.jpg_options['tags'],
|
||||
})
|
||||
|
||||
processed_img_names = img_list
|
||||
else:
|
||||
self.log.info("Extract layers to flatten image.")
|
||||
img_list = self._saves_flattened_layers(staging_dir, layers)
|
||||
|
|
@ -57,26 +66,33 @@ class ExtractReview(openpype.api.Extractor):
|
|||
instance.data["representations"].append({
|
||||
"name": "jpg",
|
||||
"ext": "jpg",
|
||||
"files": img_list,
|
||||
"files": img_list, # cannot be [] for single frame
|
||||
"stagingDir": staging_dir,
|
||||
"tags": self.jpg_options['tags']
|
||||
})
|
||||
processed_img_names = [img_list]
|
||||
|
||||
ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
|
||||
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
# Generate thumbnail.
|
||||
source_files_pattern = os.path.join(staging_dir,
|
||||
self.output_seq_filename)
|
||||
source_files_pattern = self._check_and_resize(processed_img_names,
|
||||
source_files_pattern,
|
||||
staging_dir)
|
||||
# Generate thumbnail
|
||||
thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg")
|
||||
self.log.info(f"Generate thumbnail {thumbnail_path}")
|
||||
args = [
|
||||
ffmpeg_path,
|
||||
"-y",
|
||||
"-i", os.path.join(staging_dir, self.output_seq_filename),
|
||||
"-i", source_files_pattern,
|
||||
"-vf", "scale=300:-1",
|
||||
"-vframes", "1",
|
||||
thumbnail_path
|
||||
]
|
||||
self.log.debug("thumbnail args:: {}".format(args))
|
||||
output = openpype.lib.run_subprocess(args)
|
||||
|
||||
instance.data["representations"].append({
|
||||
|
|
@ -94,11 +110,12 @@ class ExtractReview(openpype.api.Extractor):
|
|||
args = [
|
||||
ffmpeg_path,
|
||||
"-y",
|
||||
"-i", os.path.join(staging_dir, self.output_seq_filename),
|
||||
"-i", source_files_pattern,
|
||||
"-vf", "pad=ceil(iw/2)*2:ceil(ih/2)*2",
|
||||
"-vframes", str(img_number),
|
||||
mov_path
|
||||
]
|
||||
self.log.debug("mov args:: {}".format(args))
|
||||
output = openpype.lib.run_subprocess(args)
|
||||
self.log.debug(output)
|
||||
instance.data["representations"].append({
|
||||
|
|
@ -120,6 +137,34 @@ class ExtractReview(openpype.api.Extractor):
|
|||
|
||||
self.log.info(f"Extracted {instance} to {staging_dir}")
|
||||
|
||||
def _check_and_resize(self, processed_img_names, source_files_pattern,
|
||||
staging_dir):
|
||||
"""Check if saved image could be used in ffmpeg.
|
||||
|
||||
Ffmpeg has max size 16384x16384. Saved image(s) must be resized to be
|
||||
used as a source for thumbnail or review mov.
|
||||
"""
|
||||
Image.MAX_IMAGE_PIXELS = None
|
||||
first_url = os.path.join(staging_dir, processed_img_names[0])
|
||||
with Image.open(first_url) as im:
|
||||
width, height = im.size
|
||||
|
||||
if width > self.max_downscale_size or height > self.max_downscale_size:
|
||||
resized_dir = os.path.join(staging_dir, "resized")
|
||||
os.mkdir(resized_dir)
|
||||
source_files_pattern = os.path.join(resized_dir,
|
||||
self.output_seq_filename)
|
||||
for file_name in processed_img_names:
|
||||
source_url = os.path.join(staging_dir, file_name)
|
||||
with Image.open(source_url) as res_img:
|
||||
# 'thumbnail' automatically keeps aspect ratio
|
||||
res_img.thumbnail((self.max_downscale_size,
|
||||
self.max_downscale_size),
|
||||
Image.ANTIALIAS)
|
||||
res_img.save(os.path.join(resized_dir, file_name))
|
||||
|
||||
return source_files_pattern
|
||||
|
||||
def _get_image_path_from_instances(self, instance):
|
||||
img_list = []
|
||||
|
||||
|
|
|
|||
|
|
@ -1,129 +0,0 @@
|
|||
from .api.utils import (
|
||||
setup,
|
||||
get_resolve_module
|
||||
)
|
||||
|
||||
from .api.pipeline import (
|
||||
install,
|
||||
uninstall,
|
||||
ls,
|
||||
containerise,
|
||||
update_container,
|
||||
publish,
|
||||
launch_workfiles_app,
|
||||
maintained_selection,
|
||||
remove_instance,
|
||||
list_instances
|
||||
)
|
||||
|
||||
from .api.lib import (
|
||||
maintain_current_timeline,
|
||||
publish_clip_color,
|
||||
get_project_manager,
|
||||
get_current_project,
|
||||
get_current_timeline,
|
||||
create_bin,
|
||||
get_media_pool_item,
|
||||
create_media_pool_item,
|
||||
create_timeline_item,
|
||||
get_timeline_item,
|
||||
get_video_track_names,
|
||||
get_current_timeline_items,
|
||||
get_pype_timeline_item_by_name,
|
||||
get_timeline_item_pype_tag,
|
||||
set_timeline_item_pype_tag,
|
||||
imprint,
|
||||
set_publish_attribute,
|
||||
get_publish_attribute,
|
||||
create_compound_clip,
|
||||
swap_clips,
|
||||
get_pype_clip_metadata,
|
||||
set_project_manager_to_folder_name,
|
||||
get_otio_clip_instance_data,
|
||||
get_reformated_path
|
||||
)
|
||||
|
||||
from .api.menu import launch_pype_menu
|
||||
|
||||
from .api.plugin import (
|
||||
ClipLoader,
|
||||
TimelineItemLoader,
|
||||
Creator,
|
||||
PublishClip
|
||||
)
|
||||
|
||||
from .api.workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
current_file,
|
||||
has_unsaved_changes,
|
||||
file_extensions,
|
||||
work_root
|
||||
)
|
||||
|
||||
from .api.testing_utils import TestGUI
|
||||
|
||||
|
||||
__all__ = [
|
||||
# pipeline
|
||||
"install",
|
||||
"uninstall",
|
||||
"ls",
|
||||
"containerise",
|
||||
"update_container",
|
||||
"reload_pipeline",
|
||||
"publish",
|
||||
"launch_workfiles_app",
|
||||
"maintained_selection",
|
||||
"remove_instance",
|
||||
"list_instances",
|
||||
|
||||
# utils
|
||||
"setup",
|
||||
"get_resolve_module",
|
||||
|
||||
# lib
|
||||
"maintain_current_timeline",
|
||||
"publish_clip_color",
|
||||
"get_project_manager",
|
||||
"get_current_project",
|
||||
"get_current_timeline",
|
||||
"create_bin",
|
||||
"get_media_pool_item",
|
||||
"create_media_pool_item",
|
||||
"create_timeline_item",
|
||||
"get_timeline_item",
|
||||
"get_video_track_names",
|
||||
"get_current_timeline_items",
|
||||
"get_pype_timeline_item_by_name",
|
||||
"get_timeline_item_pype_tag",
|
||||
"set_timeline_item_pype_tag",
|
||||
"imprint",
|
||||
"set_publish_attribute",
|
||||
"get_publish_attribute",
|
||||
"create_compound_clip",
|
||||
"swap_clips",
|
||||
"get_pype_clip_metadata",
|
||||
"set_project_manager_to_folder_name",
|
||||
"get_otio_clip_instance_data",
|
||||
"get_reformated_path",
|
||||
|
||||
# menu
|
||||
"launch_pype_menu",
|
||||
|
||||
# plugin
|
||||
"ClipLoader",
|
||||
"TimelineItemLoader",
|
||||
"Creator",
|
||||
"PublishClip",
|
||||
|
||||
# workio
|
||||
"open_file",
|
||||
"save_file",
|
||||
"current_file",
|
||||
"has_unsaved_changes",
|
||||
"file_extensions",
|
||||
"work_root",
|
||||
|
||||
"TestGUI"
|
||||
]
|
||||
|
|
@ -1,11 +1,137 @@
|
|||
"""
|
||||
resolve api
|
||||
"""
|
||||
import os
|
||||
|
||||
bmdvr = None
|
||||
bmdvf = None
|
||||
|
||||
API_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
HOST_DIR = os.path.dirname(API_DIR)
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
from .utils import (
|
||||
get_resolve_module
|
||||
)
|
||||
|
||||
from .pipeline import (
|
||||
install,
|
||||
uninstall,
|
||||
ls,
|
||||
containerise,
|
||||
update_container,
|
||||
publish,
|
||||
launch_workfiles_app,
|
||||
maintained_selection,
|
||||
remove_instance,
|
||||
list_instances
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
maintain_current_timeline,
|
||||
publish_clip_color,
|
||||
get_project_manager,
|
||||
get_current_project,
|
||||
get_current_timeline,
|
||||
create_bin,
|
||||
get_media_pool_item,
|
||||
create_media_pool_item,
|
||||
create_timeline_item,
|
||||
get_timeline_item,
|
||||
get_video_track_names,
|
||||
get_current_timeline_items,
|
||||
get_pype_timeline_item_by_name,
|
||||
get_timeline_item_pype_tag,
|
||||
set_timeline_item_pype_tag,
|
||||
imprint,
|
||||
set_publish_attribute,
|
||||
get_publish_attribute,
|
||||
create_compound_clip,
|
||||
swap_clips,
|
||||
get_pype_clip_metadata,
|
||||
set_project_manager_to_folder_name,
|
||||
get_otio_clip_instance_data,
|
||||
get_reformated_path
|
||||
)
|
||||
|
||||
from .menu import launch_pype_menu
|
||||
|
||||
from .plugin import (
|
||||
ClipLoader,
|
||||
TimelineItemLoader,
|
||||
Creator,
|
||||
PublishClip
|
||||
)
|
||||
|
||||
from .workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
current_file,
|
||||
has_unsaved_changes,
|
||||
file_extensions,
|
||||
work_root
|
||||
)
|
||||
|
||||
from .testing_utils import TestGUI
|
||||
|
||||
|
||||
__all__ = [
|
||||
"bmdvr",
|
||||
"bmdvf",
|
||||
|
||||
# pipeline
|
||||
"install",
|
||||
"uninstall",
|
||||
"ls",
|
||||
"containerise",
|
||||
"update_container",
|
||||
"reload_pipeline",
|
||||
"publish",
|
||||
"launch_workfiles_app",
|
||||
"maintained_selection",
|
||||
"remove_instance",
|
||||
"list_instances",
|
||||
|
||||
# utils
|
||||
"get_resolve_module",
|
||||
|
||||
# lib
|
||||
"maintain_current_timeline",
|
||||
"publish_clip_color",
|
||||
"get_project_manager",
|
||||
"get_current_project",
|
||||
"get_current_timeline",
|
||||
"create_bin",
|
||||
"get_media_pool_item",
|
||||
"create_media_pool_item",
|
||||
"create_timeline_item",
|
||||
"get_timeline_item",
|
||||
"get_video_track_names",
|
||||
"get_current_timeline_items",
|
||||
"get_pype_timeline_item_by_name",
|
||||
"get_timeline_item_pype_tag",
|
||||
"set_timeline_item_pype_tag",
|
||||
"imprint",
|
||||
"set_publish_attribute",
|
||||
"get_publish_attribute",
|
||||
"create_compound_clip",
|
||||
"swap_clips",
|
||||
"get_pype_clip_metadata",
|
||||
"set_project_manager_to_folder_name",
|
||||
"get_otio_clip_instance_data",
|
||||
"get_reformated_path",
|
||||
|
||||
# menu
|
||||
"launch_pype_menu",
|
||||
|
||||
# plugin
|
||||
"ClipLoader",
|
||||
"TimelineItemLoader",
|
||||
"Creator",
|
||||
"PublishClip",
|
||||
|
||||
# workio
|
||||
"open_file",
|
||||
"save_file",
|
||||
"current_file",
|
||||
"has_unsaved_changes",
|
||||
"file_extensions",
|
||||
"work_root",
|
||||
|
||||
"TestGUI"
|
||||
]
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from __future__ import absolute_import
|
|||
import pyblish.api
|
||||
|
||||
|
||||
from ...action import get_errored_instances_from_context
|
||||
from openpype.action import get_errored_instances_from_context
|
||||
|
||||
|
||||
class SelectInvalidAction(pyblish.api.Action):
|
||||
|
|
|
|||
|
|
@ -4,13 +4,13 @@ import re
|
|||
import os
|
||||
import contextlib
|
||||
from opentimelineio import opentime
|
||||
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline.editorial import is_overlapping_otio_ranges
|
||||
|
||||
from ..otio import davinci_export as otio_export
|
||||
|
||||
from openpype.api import Logger
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self.project_manager = None
|
||||
|
|
|
|||
|
|
@ -3,13 +3,13 @@ import sys
|
|||
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
from .pipeline import (
|
||||
publish,
|
||||
launch_workfiles_app
|
||||
)
|
||||
|
||||
from openpype.tools.utils import host_tools
|
||||
|
||||
|
||||
def load_stylesheet():
|
||||
path = os.path.join(os.path.dirname(__file__), "menu_style.qss")
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from collections import OrderedDict
|
|||
|
||||
from pyblish import api as pyblish
|
||||
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import Logger
|
||||
from openpype.pipeline import (
|
||||
schema,
|
||||
register_loader_plugin_path,
|
||||
|
|
@ -16,11 +16,15 @@ from openpype.pipeline import (
|
|||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
from . import lib
|
||||
from . import PLUGINS_DIR
|
||||
from openpype.tools.utils import host_tools
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
from . import lib
|
||||
from .utils import get_resolve_module
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
HOST_DIR = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
|
||||
PLUGINS_DIR = os.path.join(HOST_DIR, "plugins")
|
||||
PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish")
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
||||
|
|
@ -39,7 +43,6 @@ def install():
|
|||
See the Maya equivalent for inspiration on how to implement this.
|
||||
|
||||
"""
|
||||
from .. import get_resolve_module
|
||||
|
||||
log.info("openpype.hosts.resolve installed")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
#!/usr/bin/env python
|
||||
import time
|
||||
from openpype.hosts.resolve.utils import get_resolve_module
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import Logger
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
wait_delay = 2.5
|
||||
wait = 0.00
|
||||
|
|
|
|||
|
|
@ -4,21 +4,21 @@
|
|||
Resolve's tools for setting environment
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
from . import HOST_DIR
|
||||
from openpype.api import Logger
|
||||
log = Logger().get_logger(__name__)
|
||||
import sys
|
||||
|
||||
from openpype.lib import Logger
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
def get_resolve_module():
|
||||
from openpype.hosts import resolve
|
||||
from openpype.hosts.resolve import api
|
||||
# dont run if already loaded
|
||||
if resolve.api.bmdvr:
|
||||
if api.bmdvr:
|
||||
log.info(("resolve module is assigned to "
|
||||
f"`pype.hosts.resolve.api.bmdvr`: {resolve.api.bmdvr}"))
|
||||
return resolve.api.bmdvr
|
||||
f"`pype.hosts.resolve.api.bmdvr`: {api.bmdvr}"))
|
||||
return api.bmdvr
|
||||
try:
|
||||
"""
|
||||
The PYTHONPATH needs to be set correctly for this import
|
||||
|
|
@ -71,79 +71,9 @@ def get_resolve_module():
|
|||
# assign global var and return
|
||||
bmdvr = bmd.scriptapp("Resolve")
|
||||
bmdvf = bmd.scriptapp("Fusion")
|
||||
resolve.api.bmdvr = bmdvr
|
||||
resolve.api.bmdvf = bmdvf
|
||||
api.bmdvr = bmdvr
|
||||
api.bmdvf = bmdvf
|
||||
log.info(("Assigning resolve module to "
|
||||
f"`pype.hosts.resolve.api.bmdvr`: {resolve.api.bmdvr}"))
|
||||
f"`pype.hosts.resolve.api.bmdvr`: {api.bmdvr}"))
|
||||
log.info(("Assigning resolve module to "
|
||||
f"`pype.hosts.resolve.api.bmdvf`: {resolve.api.bmdvf}"))
|
||||
|
||||
|
||||
def _sync_utility_scripts(env=None):
|
||||
""" Synchronizing basic utlility scripts for resolve.
|
||||
|
||||
To be able to run scripts from inside `Resolve/Workspace/Scripts` menu
|
||||
all scripts has to be accessible from defined folder.
|
||||
"""
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# initiate inputs
|
||||
scripts = {}
|
||||
us_env = env.get("RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR")
|
||||
us_dir = env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "")
|
||||
us_paths = [os.path.join(
|
||||
HOST_DIR,
|
||||
"utility_scripts"
|
||||
)]
|
||||
|
||||
# collect script dirs
|
||||
if us_env:
|
||||
log.info(f"Utility Scripts Env: `{us_env}`")
|
||||
us_paths = us_env.split(
|
||||
os.pathsep) + us_paths
|
||||
|
||||
# collect scripts from dirs
|
||||
for path in us_paths:
|
||||
scripts.update({path: os.listdir(path)})
|
||||
|
||||
log.info(f"Utility Scripts Dir: `{us_paths}`")
|
||||
log.info(f"Utility Scripts: `{scripts}`")
|
||||
|
||||
# make sure no script file is in folder
|
||||
if next((s for s in os.listdir(us_dir)), None):
|
||||
for s in os.listdir(us_dir):
|
||||
path = os.path.join(us_dir, s)
|
||||
log.info(f"Removing `{path}`...")
|
||||
if os.path.isdir(path):
|
||||
shutil.rmtree(path, onerror=None)
|
||||
else:
|
||||
os.remove(path)
|
||||
|
||||
# copy scripts into Resolve's utility scripts dir
|
||||
for d, sl in scripts.items():
|
||||
# directory and scripts list
|
||||
for s in sl:
|
||||
# script in script list
|
||||
src = os.path.join(d, s)
|
||||
dst = os.path.join(us_dir, s)
|
||||
log.info(f"Copying `{src}` to `{dst}`...")
|
||||
if os.path.isdir(src):
|
||||
shutil.copytree(
|
||||
src, dst, symlinks=False,
|
||||
ignore=None, ignore_dangling_symlinks=False
|
||||
)
|
||||
else:
|
||||
shutil.copy2(src, dst)
|
||||
|
||||
|
||||
def setup(env=None):
|
||||
""" Wrapper installer started from pype.hooks.resolve.ResolvePrelaunch()
|
||||
"""
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# synchronize resolve utility scripts
|
||||
_sync_utility_scripts(env)
|
||||
|
||||
log.info("Resolve OpenPype wrapper has been installed")
|
||||
f"`pype.hosts.resolve.api.bmdvf`: {api.bmdvf}"))
|
||||
|
|
|
|||
|
|
@ -2,14 +2,14 @@
|
|||
|
||||
import os
|
||||
from openpype.api import Logger
|
||||
from .. import (
|
||||
from .lib import (
|
||||
get_project_manager,
|
||||
get_current_project,
|
||||
set_project_manager_to_folder_name
|
||||
)
|
||||
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
exported_projet_ext = ".drp"
|
||||
|
||||
|
|
@ -60,7 +60,7 @@ def open_file(filepath):
|
|||
# load project from input path
|
||||
project = pm.LoadProject(fname)
|
||||
log.info(f"Project {project.GetName()} opened...")
|
||||
return True
|
||||
|
||||
except AttributeError:
|
||||
log.warning((f"Project with name `{fname}` does not exist! It will "
|
||||
f"be imported from {filepath} and then loaded..."))
|
||||
|
|
@ -69,9 +69,8 @@ def open_file(filepath):
|
|||
project = pm.LoadProject(fname)
|
||||
log.info(f"Project imported/loaded {project.GetName()}...")
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
return False
|
||||
return True
|
||||
|
||||
def current_file():
|
||||
pm = get_project_manager()
|
||||
|
|
@ -80,13 +79,9 @@ def current_file():
|
|||
name = project.GetName()
|
||||
fname = name + exported_projet_ext
|
||||
current_file = os.path.join(current_dir, fname)
|
||||
normalised = os.path.normpath(current_file)
|
||||
|
||||
# Unsaved current file
|
||||
if normalised == "":
|
||||
if not current_file:
|
||||
return None
|
||||
|
||||
return normalised
|
||||
return os.path.normpath(current_file)
|
||||
|
||||
|
||||
def work_root(session):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import importlib
|
||||
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype.hosts.resolve.api import utils
|
||||
from openpype.hosts.resolve.utils import setup
|
||||
|
||||
|
||||
class ResolvePrelaunch(PreLaunchHook):
|
||||
|
|
@ -43,18 +43,6 @@ class ResolvePrelaunch(PreLaunchHook):
|
|||
self.launch_context.env.get("PRE_PYTHON_SCRIPT", ""))
|
||||
self.launch_context.env["PRE_PYTHON_SCRIPT"] = pre_py_sc
|
||||
self.log.debug(f"-- pre_py_sc: `{pre_py_sc}`...")
|
||||
try:
|
||||
__import__("openpype.hosts.resolve")
|
||||
__import__("pyblish")
|
||||
|
||||
except ImportError:
|
||||
self.log.warning(
|
||||
"pyblish: Could not load Resolve integration.",
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
else:
|
||||
# Resolve Setup integration
|
||||
importlib.reload(utils)
|
||||
self.log.debug(f"-- utils.__file__: `{utils.__file__}`")
|
||||
utils.setup(self.launch_context.env)
|
||||
# Resolve Setup integration
|
||||
setup(self.launch_context.env)
|
||||
|
|
|
|||
|
|
@ -1,9 +1,12 @@
|
|||
# from pprint import pformat
|
||||
from openpype.hosts import resolve
|
||||
from openpype.hosts.resolve.api import lib
|
||||
from openpype.hosts.resolve.api import plugin, lib
|
||||
from openpype.hosts.resolve.api.lib import (
|
||||
get_video_track_names,
|
||||
create_bin,
|
||||
)
|
||||
|
||||
|
||||
class CreateShotClip(resolve.Creator):
|
||||
class CreateShotClip(plugin.Creator):
|
||||
"""Publishable clip"""
|
||||
|
||||
label = "Create Publishable Clip"
|
||||
|
|
@ -11,7 +14,7 @@ class CreateShotClip(resolve.Creator):
|
|||
icon = "film"
|
||||
defaults = ["Main"]
|
||||
|
||||
gui_tracks = resolve.get_video_track_names()
|
||||
gui_tracks = get_video_track_names()
|
||||
gui_name = "OpenPype publish attributes creator"
|
||||
gui_info = "Define sequential rename and fill hierarchy data."
|
||||
gui_inputs = {
|
||||
|
|
@ -250,7 +253,7 @@ class CreateShotClip(resolve.Creator):
|
|||
sq_markers = self.timeline.GetMarkers()
|
||||
|
||||
# create media bin for compound clips (trackItems)
|
||||
mp_folder = resolve.create_bin(self.timeline.GetName())
|
||||
mp_folder = create_bin(self.timeline.GetName())
|
||||
|
||||
kwargs = {
|
||||
"ui_inputs": widget.result,
|
||||
|
|
@ -264,6 +267,6 @@ class CreateShotClip(resolve.Creator):
|
|||
self.rename_index = i
|
||||
self.log.info(track_item_data)
|
||||
# convert track item to timeline media pool item
|
||||
track_item = resolve.PublishClip(
|
||||
track_item = plugin.PublishClip(
|
||||
self, track_item_data, **kwargs).convert()
|
||||
track_item.SetClipColor(lib.publish_clip_color)
|
||||
|
|
|
|||
|
|
@ -1,21 +1,22 @@
|
|||
from copy import deepcopy
|
||||
from importlib import reload
|
||||
|
||||
from openpype.client import (
|
||||
get_version_by_id,
|
||||
get_last_version_by_subset_id,
|
||||
)
|
||||
from openpype.hosts import resolve
|
||||
# from openpype.hosts import resolve
|
||||
from openpype.pipeline import (
|
||||
get_representation_path,
|
||||
legacy_io,
|
||||
)
|
||||
from openpype.hosts.resolve.api import lib, plugin
|
||||
reload(plugin)
|
||||
reload(lib)
|
||||
from openpype.hosts.resolve.api.pipeline import (
|
||||
containerise,
|
||||
update_container,
|
||||
)
|
||||
|
||||
|
||||
class LoadClip(resolve.TimelineItemLoader):
|
||||
class LoadClip(plugin.TimelineItemLoader):
|
||||
"""Load a subset to timeline as clip
|
||||
|
||||
Place clip to timeline on its asset origin timings collected
|
||||
|
|
@ -46,7 +47,7 @@ class LoadClip(resolve.TimelineItemLoader):
|
|||
})
|
||||
|
||||
# load clip to timeline and get main variables
|
||||
timeline_item = resolve.ClipLoader(
|
||||
timeline_item = plugin.ClipLoader(
|
||||
self, context, **options).load()
|
||||
namespace = namespace or timeline_item.GetName()
|
||||
version = context['version']
|
||||
|
|
@ -80,7 +81,7 @@ class LoadClip(resolve.TimelineItemLoader):
|
|||
|
||||
self.log.info("Loader done: `{}`".format(name))
|
||||
|
||||
return resolve.containerise(
|
||||
return containerise(
|
||||
timeline_item,
|
||||
name, namespace, context,
|
||||
self.__class__.__name__,
|
||||
|
|
@ -98,7 +99,7 @@ class LoadClip(resolve.TimelineItemLoader):
|
|||
context.update({"representation": representation})
|
||||
name = container['name']
|
||||
namespace = container['namespace']
|
||||
timeline_item_data = resolve.get_pype_timeline_item_by_name(namespace)
|
||||
timeline_item_data = lib.get_pype_timeline_item_by_name(namespace)
|
||||
timeline_item = timeline_item_data["clip"]["item"]
|
||||
project_name = legacy_io.active_project()
|
||||
version = get_version_by_id(project_name, representation["parent"])
|
||||
|
|
@ -109,7 +110,7 @@ class LoadClip(resolve.TimelineItemLoader):
|
|||
self.fname = get_representation_path(representation)
|
||||
context["version"] = {"data": version_data}
|
||||
|
||||
loader = resolve.ClipLoader(self, context)
|
||||
loader = plugin.ClipLoader(self, context)
|
||||
timeline_item = loader.update(timeline_item)
|
||||
|
||||
# add additional metadata from the version to imprint Avalon knob
|
||||
|
|
@ -136,7 +137,7 @@ class LoadClip(resolve.TimelineItemLoader):
|
|||
# update color of clip regarding the version order
|
||||
self.set_item_color(timeline_item, version)
|
||||
|
||||
return resolve.update_container(timeline_item, data_imprint)
|
||||
return update_container(timeline_item, data_imprint)
|
||||
|
||||
@classmethod
|
||||
def set_item_color(cls, timeline_item, version):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
from openpype.hosts import resolve
|
||||
from openpype.hosts.resolve.api.lib import get_project_manager
|
||||
|
||||
|
||||
class ExtractWorkfile(openpype.api.Extractor):
|
||||
|
|
@ -29,7 +29,7 @@ class ExtractWorkfile(openpype.api.Extractor):
|
|||
os.path.join(staging_dir, drp_file_name))
|
||||
|
||||
# write out the drp workfile
|
||||
resolve.get_project_manager().ExportProject(
|
||||
get_project_manager().ExportProject(
|
||||
project.GetName(), drp_file_path)
|
||||
|
||||
# create drp workfile representation
|
||||
|
|
|
|||
|
|
@ -1,9 +1,15 @@
|
|||
import pyblish
|
||||
from openpype.hosts import resolve
|
||||
|
||||
# # developer reload modules
|
||||
from pprint import pformat
|
||||
|
||||
import pyblish
|
||||
|
||||
from openpype.hosts.resolve.api.lib import (
|
||||
get_current_timeline_items,
|
||||
get_timeline_item_pype_tag,
|
||||
publish_clip_color,
|
||||
get_publish_attribute,
|
||||
get_otio_clip_instance_data,
|
||||
)
|
||||
|
||||
|
||||
class PrecollectInstances(pyblish.api.ContextPlugin):
|
||||
"""Collect all Track items selection."""
|
||||
|
|
@ -14,8 +20,8 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
def process(self, context):
|
||||
otio_timeline = context.data["otioTimeline"]
|
||||
selected_timeline_items = resolve.get_current_timeline_items(
|
||||
filter=True, selecting_color=resolve.publish_clip_color)
|
||||
selected_timeline_items = get_current_timeline_items(
|
||||
filter=True, selecting_color=publish_clip_color)
|
||||
|
||||
self.log.info(
|
||||
"Processing enabled track items: {}".format(
|
||||
|
|
@ -27,7 +33,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
timeline_item = timeline_item_data["clip"]["item"]
|
||||
|
||||
# get pype tag data
|
||||
tag_data = resolve.get_timeline_item_pype_tag(timeline_item)
|
||||
tag_data = get_timeline_item_pype_tag(timeline_item)
|
||||
self.log.debug(f"__ tag_data: {pformat(tag_data)}")
|
||||
|
||||
if not tag_data:
|
||||
|
|
@ -67,7 +73,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
"asset": asset,
|
||||
"item": timeline_item,
|
||||
"families": families,
|
||||
"publish": resolve.get_publish_attribute(timeline_item),
|
||||
"publish": get_publish_attribute(timeline_item),
|
||||
"fps": context.data["fps"],
|
||||
"handleStart": handle_start,
|
||||
"handleEnd": handle_end,
|
||||
|
|
@ -75,7 +81,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
})
|
||||
|
||||
# otio clip data
|
||||
otio_data = resolve.get_otio_clip_instance_data(
|
||||
otio_data = get_otio_clip_instance_data(
|
||||
otio_timeline, timeline_item_data) or {}
|
||||
data.update(otio_data)
|
||||
|
||||
|
|
@ -134,7 +140,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
"asset": asset,
|
||||
"family": family,
|
||||
"families": [],
|
||||
"publish": resolve.get_publish_attribute(timeline_item)
|
||||
"publish": get_publish_attribute(timeline_item)
|
||||
})
|
||||
|
||||
context.create_instance(**data)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,9 @@
|
|||
import pyblish.api
|
||||
from pprint import pformat
|
||||
from importlib import reload
|
||||
|
||||
from openpype.hosts import resolve
|
||||
from openpype.hosts.resolve import api as rapi
|
||||
from openpype.pipeline import legacy_io
|
||||
from openpype.hosts.resolve.otio import davinci_export
|
||||
reload(davinci_export)
|
||||
|
||||
|
||||
class PrecollectWorkfile(pyblish.api.ContextPlugin):
|
||||
|
|
@ -18,9 +16,9 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
|
|||
|
||||
asset = legacy_io.Session["AVALON_ASSET"]
|
||||
subset = "workfile"
|
||||
project = resolve.get_current_project()
|
||||
project = rapi.get_current_project()
|
||||
fps = project.GetSetting("timelineFrameRate")
|
||||
video_tracks = resolve.get_video_track_names()
|
||||
video_tracks = rapi.get_video_track_names()
|
||||
|
||||
# adding otio timeline to context
|
||||
otio_timeline = davinci_export.create_otio_timeline(project)
|
||||
|
|
|
|||
|
|
@ -6,10 +6,11 @@ from openpype.pipeline import install_host
|
|||
|
||||
|
||||
def main(env):
|
||||
import openpype.hosts.resolve as bmdvr
|
||||
from openpype.hosts.resolve.utils import setup
|
||||
import openpype.hosts.resolve.api as bmdvr
|
||||
# Registers openpype's Global pyblish plugins
|
||||
install_host(bmdvr)
|
||||
bmdvr.setup(env)
|
||||
setup(env)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -2,13 +2,13 @@ import os
|
|||
import sys
|
||||
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.api import Logger
|
||||
from openpype.lib import Logger
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
def main(env):
|
||||
import openpype.hosts.resolve as bmdvr
|
||||
import openpype.hosts.resolve.api as bmdvr
|
||||
|
||||
# activate resolve from openpype
|
||||
install_host(bmdvr)
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@ import opentimelineio as otio
|
|||
|
||||
from openpype.pipeline import install_host
|
||||
|
||||
from openpype.hosts.resolve import TestGUI
|
||||
import openpype.hosts.resolve as bmdvr
|
||||
import openpype.hosts.resolve.api as bmdvr
|
||||
from openpype.hosts.resolve.api.testing_utils import TestGUI
|
||||
from openpype.hosts.resolve.otio import davinci_export as otio_export
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,11 +2,16 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.resolve import TestGUI
|
||||
import openpype.hosts.resolve as bmdvr
|
||||
import clique
|
||||
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.resolve.api.testing_utils import TestGUI
|
||||
import openpype.hosts.resolve.api as bmdvr
|
||||
from openpype.hosts.resolve.api.lib import (
|
||||
create_media_pool_item,
|
||||
create_timeline_item,
|
||||
)
|
||||
|
||||
|
||||
class ThisTestGUI(TestGUI):
|
||||
extensions = [".exr", ".jpg", ".mov", ".png", ".mp4", ".ari", ".arx"]
|
||||
|
|
@ -55,10 +60,10 @@ class ThisTestGUI(TestGUI):
|
|||
# skip if unwanted extension
|
||||
if ext not in self.extensions:
|
||||
return
|
||||
media_pool_item = bmdvr.create_media_pool_item(fpath)
|
||||
media_pool_item = create_media_pool_item(fpath)
|
||||
print(media_pool_item)
|
||||
|
||||
track_item = bmdvr.create_timeline_item(media_pool_item)
|
||||
track_item = create_timeline_item(media_pool_item)
|
||||
print(track_item)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,13 +1,17 @@
|
|||
#! python3
|
||||
from openpype.pipeline import install_host
|
||||
import openpype.hosts.resolve as bmdvr
|
||||
from openpype.hosts.resolve import api as bmdvr
|
||||
from openpype.hosts.resolve.api.lib import (
|
||||
create_media_pool_item,
|
||||
create_timeline_item,
|
||||
)
|
||||
|
||||
|
||||
def file_processing(fpath):
|
||||
media_pool_item = bmdvr.create_media_pool_item(fpath)
|
||||
media_pool_item = create_media_pool_item(fpath)
|
||||
print(media_pool_item)
|
||||
|
||||
track_item = bmdvr.create_timeline_item(media_pool_item)
|
||||
track_item = create_timeline_item(media_pool_item)
|
||||
print(track_item)
|
||||
|
||||
|
||||
|
|
|
|||
54
openpype/hosts/resolve/utils.py
Normal file
54
openpype/hosts/resolve/utils.py
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
import os
|
||||
import shutil
|
||||
from openpype.lib import Logger
|
||||
|
||||
RESOLVE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
def setup(env):
|
||||
log = Logger.get_logger("ResolveSetup")
|
||||
scripts = {}
|
||||
us_env = env.get("RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR")
|
||||
us_dir = env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "")
|
||||
us_paths = [os.path.join(
|
||||
RESOLVE_ROOT_DIR,
|
||||
"utility_scripts"
|
||||
)]
|
||||
|
||||
# collect script dirs
|
||||
if us_env:
|
||||
log.info(f"Utility Scripts Env: `{us_env}`")
|
||||
us_paths = us_env.split(
|
||||
os.pathsep) + us_paths
|
||||
|
||||
# collect scripts from dirs
|
||||
for path in us_paths:
|
||||
scripts.update({path: os.listdir(path)})
|
||||
|
||||
log.info(f"Utility Scripts Dir: `{us_paths}`")
|
||||
log.info(f"Utility Scripts: `{scripts}`")
|
||||
|
||||
# make sure no script file is in folder
|
||||
for s in os.listdir(us_dir):
|
||||
path = os.path.join(us_dir, s)
|
||||
log.info(f"Removing `{path}`...")
|
||||
if os.path.isdir(path):
|
||||
shutil.rmtree(path, onerror=None)
|
||||
else:
|
||||
os.remove(path)
|
||||
|
||||
# copy scripts into Resolve's utility scripts dir
|
||||
for d, sl in scripts.items():
|
||||
# directory and scripts list
|
||||
for s in sl:
|
||||
# script in script list
|
||||
src = os.path.join(d, s)
|
||||
dst = os.path.join(us_dir, s)
|
||||
log.info(f"Copying `{src}` to `{dst}`...")
|
||||
if os.path.isdir(src):
|
||||
shutil.copytree(
|
||||
src, dst, symlinks=False,
|
||||
ignore=None, ignore_dangling_symlinks=False
|
||||
)
|
||||
else:
|
||||
shutil.copy2(src, dst)
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
from .standalonepublish_module import StandAlonePublishModule
|
||||
|
||||
|
||||
__all__ = (
|
||||
"StandAlonePublishModule",
|
||||
)
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
import os
|
||||
|
||||
import click
|
||||
|
||||
from openpype.lib import get_openpype_execute_args
|
||||
from openpype.lib.execute import run_detached_process
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import ITrayAction, IHostModule
|
||||
|
||||
STANDALONEPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class StandAlonePublishModule(OpenPypeModule, ITrayAction, IHostModule):
|
||||
label = "Publish"
|
||||
name = "standalonepublish_tool"
|
||||
host_name = "standalonepublisher"
|
||||
|
||||
def initialize(self, modules_settings):
|
||||
self.enabled = modules_settings[self.name]["enabled"]
|
||||
self.publish_paths = [
|
||||
os.path.join(STANDALONEPUBLISH_ROOT_DIR, "plugins", "publish")
|
||||
]
|
||||
|
||||
def tray_init(self):
|
||||
return
|
||||
|
||||
def on_action_trigger(self):
|
||||
self.run_standalone_publisher()
|
||||
|
||||
def connect_with_modules(self, enabled_modules):
|
||||
"""Collect publish paths from other modules."""
|
||||
|
||||
publish_paths = self.manager.collect_plugin_paths()["publish"]
|
||||
self.publish_paths.extend(publish_paths)
|
||||
|
||||
def run_standalone_publisher(self):
|
||||
args = get_openpype_execute_args("module", self.name, "launch")
|
||||
run_detached_process(args)
|
||||
|
||||
def cli(self, click_group):
|
||||
click_group.add_command(cli_main)
|
||||
|
||||
|
||||
@click.group(
|
||||
StandAlonePublishModule.name,
|
||||
help="StandalonePublisher related commands.")
|
||||
def cli_main():
|
||||
pass
|
||||
|
||||
|
||||
@cli_main.command()
|
||||
def launch():
|
||||
"""Launch StandalonePublisher tool UI."""
|
||||
|
||||
from openpype.tools import standalonepublish
|
||||
|
||||
standalonepublish.main()
|
||||
6
openpype/hosts/traypublisher/__init__.py
Normal file
6
openpype/hosts/traypublisher/__init__.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
from .module import TrayPublishModule
|
||||
|
||||
|
||||
__all__ = (
|
||||
"TrayPublishModule",
|
||||
)
|
||||
|
|
@ -1,25 +1,24 @@
|
|||
import os
|
||||
|
||||
import click
|
||||
|
||||
from openpype.lib import get_openpype_execute_args
|
||||
from openpype.lib.execute import run_detached_process
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import ITrayAction
|
||||
from openpype.modules.interfaces import ITrayAction, IHostModule
|
||||
|
||||
TRAYPUBLISH_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class TrayPublishAction(OpenPypeModule, ITrayAction):
|
||||
class TrayPublishModule(OpenPypeModule, IHostModule, ITrayAction):
|
||||
label = "New Publish (beta)"
|
||||
name = "traypublish_tool"
|
||||
host_name = "traypublish"
|
||||
|
||||
def initialize(self, modules_settings):
|
||||
import openpype
|
||||
self.enabled = True
|
||||
self.publish_paths = [
|
||||
os.path.join(
|
||||
openpype.PACKAGE_DIR,
|
||||
"hosts",
|
||||
"traypublisher",
|
||||
"plugins",
|
||||
"publish"
|
||||
)
|
||||
os.path.join(TRAYPUBLISH_ROOT_DIR, "plugins", "publish")
|
||||
]
|
||||
self._experimental_tools = None
|
||||
|
||||
|
|
@ -29,7 +28,7 @@ class TrayPublishAction(OpenPypeModule, ITrayAction):
|
|||
self._experimental_tools = ExperimentalTools()
|
||||
|
||||
def tray_menu(self, *args, **kwargs):
|
||||
super(TrayPublishAction, self).tray_menu(*args, **kwargs)
|
||||
super(TrayPublishModule, self).tray_menu(*args, **kwargs)
|
||||
traypublisher = self._experimental_tools.get("traypublisher")
|
||||
visible = False
|
||||
if traypublisher and traypublisher.enabled:
|
||||
|
|
@ -45,5 +44,24 @@ class TrayPublishAction(OpenPypeModule, ITrayAction):
|
|||
self.publish_paths.extend(publish_paths)
|
||||
|
||||
def run_traypublisher(self):
|
||||
args = get_openpype_execute_args("traypublisher")
|
||||
args = get_openpype_execute_args(
|
||||
"module", self.name, "launch"
|
||||
)
|
||||
run_detached_process(args)
|
||||
|
||||
def cli(self, click_group):
|
||||
click_group.add_command(cli_main)
|
||||
|
||||
|
||||
@click.group(TrayPublishModule.name, help="TrayPublisher related commands.")
|
||||
def cli_main():
|
||||
pass
|
||||
|
||||
|
||||
@cli_main.command()
|
||||
def launch():
|
||||
"""Launch TrayPublish tool UI."""
|
||||
|
||||
from openpype.tools import traypublisher
|
||||
|
||||
traypublisher.main()
|
||||
|
|
@ -1,20 +1,12 @@
|
|||
import os
|
||||
from .tvpaint_module import (
|
||||
get_launch_script_path,
|
||||
TVPaintModule,
|
||||
TVPAINT_ROOT_DIR,
|
||||
)
|
||||
|
||||
|
||||
def add_implementation_envs(env, _app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
defaults = {
|
||||
"OPENPYPE_LOG_NO_COLORS": "True"
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
|
||||
|
||||
def get_launch_script_path():
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
return os.path.join(
|
||||
current_dir,
|
||||
"api",
|
||||
"launch_script.py"
|
||||
)
|
||||
__all__ = (
|
||||
"get_launch_script_path",
|
||||
"TVPaintModule",
|
||||
"TVPAINT_ROOT_DIR",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ from . import pipeline
|
|||
from . import plugin
|
||||
from .pipeline import (
|
||||
install,
|
||||
uninstall,
|
||||
maintained_selection,
|
||||
remove_instance,
|
||||
list_instances,
|
||||
|
|
@ -33,7 +32,6 @@ __all__ = (
|
|||
"plugin",
|
||||
|
||||
"install",
|
||||
"uninstall",
|
||||
"maintained_selection",
|
||||
"remove_instance",
|
||||
"list_instances",
|
||||
|
|
|
|||
|
|
@ -16,8 +16,6 @@ from openpype.pipeline import (
|
|||
legacy_io,
|
||||
register_loader_plugin_path,
|
||||
register_creator_plugin_path,
|
||||
deregister_loader_plugin_path,
|
||||
deregister_creator_plugin_path,
|
||||
AVALON_CONTAINER_ID,
|
||||
)
|
||||
|
||||
|
|
@ -91,19 +89,6 @@ def install():
|
|||
register_event_callback("application.exit", application_exit)
|
||||
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall TVPaint-specific functionality.
|
||||
|
||||
This function is called automatically on calling `uninstall_host()`.
|
||||
"""
|
||||
|
||||
log.info("OpenPype - Uninstalling TVPaint integration")
|
||||
pyblish.api.deregister_host("tvpaint")
|
||||
pyblish.api.deregister_plugin_path(PUBLISH_PATH)
|
||||
deregister_loader_plugin_path(LOAD_PATH)
|
||||
deregister_creator_plugin_path(CREATE_PATH)
|
||||
|
||||
|
||||
def containerise(
|
||||
name, namespace, members, context, loader, current_containers=None
|
||||
):
|
||||
|
|
|
|||
41
openpype/hosts/tvpaint/tvpaint_module.py
Normal file
41
openpype/hosts/tvpaint/tvpaint_module.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostModule
|
||||
|
||||
TVPAINT_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
def get_launch_script_path():
|
||||
return os.path.join(
|
||||
TVPAINT_ROOT_DIR,
|
||||
"api",
|
||||
"launch_script.py"
|
||||
)
|
||||
|
||||
|
||||
class TVPaintModule(OpenPypeModule, IHostModule):
|
||||
name = "tvpaint"
|
||||
host_name = "tvpaint"
|
||||
|
||||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def add_implementation_envs(self, env, _app):
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
|
||||
defaults = {
|
||||
"OPENPYPE_LOG_NO_COLORS": "True"
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(TVPAINT_ROOT_DIR, "hooks")
|
||||
]
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".tvpp"]
|
||||
|
|
@ -1,24 +1,6 @@
|
|||
import os
|
||||
import openpype.hosts
|
||||
from openpype.lib.applications import Application
|
||||
from .module import UnrealModule
|
||||
|
||||
|
||||
def add_implementation_envs(env: dict, _app: Application) -> None:
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
# Set OPENPYPE_UNREAL_PLUGIN required for Unreal implementation
|
||||
|
||||
ue_plugin = "UE_5.0" if _app.name[:1] == "5" else "UE_4.7"
|
||||
unreal_plugin_path = os.path.join(
|
||||
os.path.dirname(os.path.abspath(openpype.hosts.__file__)),
|
||||
"unreal", "integration", ue_plugin
|
||||
)
|
||||
if not env.get("OPENPYPE_UNREAL_PLUGIN"):
|
||||
env["OPENPYPE_UNREAL_PLUGIN"] = unreal_plugin_path
|
||||
|
||||
# Set default environments if are not set via settings
|
||||
defaults = {
|
||||
"OPENPYPE_LOG_NO_COLORS": "True"
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
__all__ = (
|
||||
"UnrealModule",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ from .pipeline import (
|
|||
show_tools_dialog,
|
||||
show_tools_popup,
|
||||
instantiate,
|
||||
UnrealHost,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
|
|
@ -36,5 +37,6 @@ __all__ = [
|
|||
"show_experimental_tools",
|
||||
"show_tools_dialog",
|
||||
"show_tools_popup",
|
||||
"instantiate"
|
||||
"instantiate",
|
||||
"UnrealHost",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ from openpype.pipeline import (
|
|||
)
|
||||
from openpype.tools.utils import host_tools
|
||||
import openpype.hosts.unreal
|
||||
from openpype.host import HostBase, ILoadHost
|
||||
|
||||
import unreal # noqa
|
||||
|
||||
|
|
@ -29,6 +30,32 @@ CREATE_PATH = os.path.join(PLUGINS_DIR, "create")
|
|||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory")
|
||||
|
||||
|
||||
class UnrealHost(HostBase, ILoadHost):
|
||||
"""Unreal host implementation.
|
||||
|
||||
For some time this class will re-use functions from module based
|
||||
implementation for backwards compatibility of older unreal projects.
|
||||
"""
|
||||
|
||||
name = "unreal"
|
||||
|
||||
def install(self):
|
||||
install()
|
||||
|
||||
def get_containers(self):
|
||||
return ls()
|
||||
|
||||
def show_tools_popup(self):
|
||||
"""Show tools popup with actions leading to show other tools."""
|
||||
|
||||
show_tools_popup()
|
||||
|
||||
def show_tools_dialog(self):
|
||||
"""Show tools dialog with actions leading to show other tools."""
|
||||
|
||||
show_tools_dialog()
|
||||
|
||||
|
||||
def install():
|
||||
"""Install Unreal configuration for OpenPype."""
|
||||
print("-=" * 40)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,9 @@ import unreal
|
|||
openpype_detected = True
|
||||
try:
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.unreal import api as openpype_host
|
||||
from openpype.hosts.unreal.api import UnrealHost
|
||||
|
||||
openpype_host = UnrealHost()
|
||||
except ImportError as exc:
|
||||
openpype_host = None
|
||||
openpype_detected = False
|
||||
|
|
|
|||
|
|
@ -3,7 +3,9 @@ import unreal
|
|||
openpype_detected = True
|
||||
try:
|
||||
from openpype.pipeline import install_host
|
||||
from openpype.hosts.unreal import api as openpype_host
|
||||
from openpype.hosts.unreal.api import UnrealHost
|
||||
|
||||
openpype_host = UnrealHost()
|
||||
except ImportError as exc:
|
||||
openpype_host = None
|
||||
openpype_detected = False
|
||||
|
|
|
|||
42
openpype/hosts/unreal/module.py
Normal file
42
openpype/hosts/unreal/module.py
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import os
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype.modules.interfaces import IHostModule
|
||||
|
||||
UNREAL_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class UnrealModule(OpenPypeModule, IHostModule):
|
||||
name = "unreal"
|
||||
host_name = "unreal"
|
||||
|
||||
def initialize(self, module_settings):
|
||||
self.enabled = True
|
||||
|
||||
def add_implementation_envs(self, env, app) -> None:
|
||||
"""Modify environments to contain all required for implementation."""
|
||||
# Set OPENPYPE_UNREAL_PLUGIN required for Unreal implementation
|
||||
|
||||
ue_plugin = "UE_5.0" if app.name[:1] == "5" else "UE_4.7"
|
||||
unreal_plugin_path = os.path.join(
|
||||
UNREAL_ROOT_DIR, "integration", ue_plugin
|
||||
)
|
||||
if not env.get("OPENPYPE_UNREAL_PLUGIN"):
|
||||
env["OPENPYPE_UNREAL_PLUGIN"] = unreal_plugin_path
|
||||
|
||||
# Set default environments if are not set via settings
|
||||
defaults = {
|
||||
"OPENPYPE_LOG_NO_COLORS": "True"
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if not env.get(key):
|
||||
env[key] = value
|
||||
|
||||
def get_launch_hook_paths(self, app):
|
||||
if app.host_name != self.host_name:
|
||||
return []
|
||||
return [
|
||||
os.path.join(UNREAL_ROOT_DIR, "hooks")
|
||||
]
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
return [".uproject"]
|
||||
|
|
@ -20,6 +20,34 @@ class SkeletalMeshAlembicLoader(plugin.Loader):
|
|||
icon = "cube"
|
||||
color = "orange"
|
||||
|
||||
def get_task(self, filename, asset_dir, asset_name, replace):
|
||||
task = unreal.AssetImportTask()
|
||||
options = unreal.AbcImportSettings()
|
||||
sm_settings = unreal.AbcStaticMeshSettings()
|
||||
conversion_settings = unreal.AbcConversionSettings(
|
||||
preset=unreal.AbcConversionPreset.CUSTOM,
|
||||
flip_u=False, flip_v=False,
|
||||
rotation=[0.0, 0.0, 0.0],
|
||||
scale=[1.0, 1.0, 1.0])
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', replace)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
# set import options here
|
||||
# Unreal 4.24 ignores the settings. It works with Unreal 4.26
|
||||
options.set_editor_property(
|
||||
'import_type', unreal.AlembicImportType.SKELETAL)
|
||||
|
||||
options.static_mesh_settings = sm_settings
|
||||
options.conversion_settings = conversion_settings
|
||||
task.options = options
|
||||
|
||||
return task
|
||||
|
||||
def load(self, context, name, namespace, data):
|
||||
"""Load and containerise representation into Content Browser.
|
||||
|
||||
|
|
@ -50,36 +78,24 @@ class SkeletalMeshAlembicLoader(plugin.Loader):
|
|||
asset_name = "{}_{}".format(asset, name)
|
||||
else:
|
||||
asset_name = "{}".format(name)
|
||||
version = context.get('version').get('name')
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
"{}/{}/{}".format(root, asset, name), suffix="")
|
||||
f"{root}/{asset}/{name}_v{version:03d}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
task = unreal.AssetImportTask()
|
||||
task = self.get_task(self.fname, asset_dir, asset_name, False)
|
||||
|
||||
task.set_editor_property('filename', self.fname)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
task.set_editor_property('destination_name', asset_name)
|
||||
task.set_editor_property('replace_existing', False)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
|
||||
|
||||
# set import options here
|
||||
# Unreal 4.24 ignores the settings. It works with Unreal 4.26
|
||||
options = unreal.AbcImportSettings()
|
||||
options.set_editor_property(
|
||||
'import_type', unreal.AlembicImportType.SKELETAL)
|
||||
|
||||
task.options = options
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
data = {
|
||||
"schema": "openpype:container-2.0",
|
||||
|
|
@ -110,23 +126,8 @@ class SkeletalMeshAlembicLoader(plugin.Loader):
|
|||
source_path = get_representation_path(representation)
|
||||
destination_path = container["namespace"]
|
||||
|
||||
task = unreal.AssetImportTask()
|
||||
task = self.get_task(source_path, destination_path, name, True)
|
||||
|
||||
task.set_editor_property('filename', source_path)
|
||||
task.set_editor_property('destination_path', destination_path)
|
||||
# strip suffix
|
||||
task.set_editor_property('destination_name', name)
|
||||
task.set_editor_property('replace_existing', True)
|
||||
task.set_editor_property('automated', True)
|
||||
task.set_editor_property('save', True)
|
||||
|
||||
# set import options here
|
||||
# Unreal 4.24 ignores the settings. It works with Unreal 4.26
|
||||
options = unreal.AbcImportSettings()
|
||||
options.set_editor_property(
|
||||
'import_type', unreal.AlembicImportType.SKELETAL)
|
||||
|
||||
task.options = options
|
||||
# do import fbx and replace existing data
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task])
|
||||
container_path = "{}/{}".format(container["namespace"],
|
||||
|
|
|
|||
|
|
@ -24,7 +24,11 @@ class StaticMeshAlembicLoader(plugin.Loader):
|
|||
task = unreal.AssetImportTask()
|
||||
options = unreal.AbcImportSettings()
|
||||
sm_settings = unreal.AbcStaticMeshSettings()
|
||||
conversion_settings = unreal.AbcConversionSettings()
|
||||
conversion_settings = unreal.AbcConversionSettings(
|
||||
preset=unreal.AbcConversionPreset.CUSTOM,
|
||||
flip_u=False, flip_v=False,
|
||||
rotation=[0.0, 0.0, 0.0],
|
||||
scale=[1.0, 1.0, 1.0])
|
||||
|
||||
task.set_editor_property('filename', filename)
|
||||
task.set_editor_property('destination_path', asset_dir)
|
||||
|
|
@ -40,13 +44,6 @@ class StaticMeshAlembicLoader(plugin.Loader):
|
|||
|
||||
sm_settings.set_editor_property('merge_meshes', True)
|
||||
|
||||
conversion_settings.set_editor_property('flip_u', False)
|
||||
conversion_settings.set_editor_property('flip_v', True)
|
||||
conversion_settings.set_editor_property(
|
||||
'scale', unreal.Vector(x=100.0, y=100.0, z=100.0))
|
||||
conversion_settings.set_editor_property(
|
||||
'rotation', unreal.Vector(x=-90.0, y=0.0, z=180.0))
|
||||
|
||||
options.static_mesh_settings = sm_settings
|
||||
options.conversion_settings = conversion_settings
|
||||
task.options = options
|
||||
|
|
@ -83,22 +80,24 @@ class StaticMeshAlembicLoader(plugin.Loader):
|
|||
asset_name = "{}_{}".format(asset, name)
|
||||
else:
|
||||
asset_name = "{}".format(name)
|
||||
version = context.get('version').get('name')
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
"{}/{}/{}".format(root, asset, name), suffix="")
|
||||
f"{root}/{asset}/{name}_v{version:03d}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir):
|
||||
unreal.EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
task = self.get_task(self.fname, asset_dir, asset_name, False)
|
||||
task = self.get_task(self.fname, asset_dir, asset_name, False)
|
||||
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
|
||||
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks([task]) # noqa: E501
|
||||
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
# Create Asset Container
|
||||
unreal_pipeline.create_container(
|
||||
container=container_name, path=asset_dir)
|
||||
|
||||
data = {
|
||||
"schema": "openpype:container-2.0",
|
||||
|
|
|
|||
|
|
@ -9,7 +9,10 @@ from unreal import EditorLevelLibrary
|
|||
from unreal import EditorLevelUtils
|
||||
from unreal import AssetToolsHelpers
|
||||
from unreal import FBXImportType
|
||||
from unreal import MathLibrary as umath
|
||||
from unreal import MovieSceneLevelVisibilityTrack
|
||||
from unreal import MovieSceneSubTrack
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from openpype.client import get_asset_by_name, get_assets
|
||||
from openpype.pipeline import (
|
||||
|
|
@ -21,6 +24,7 @@ from openpype.pipeline import (
|
|||
legacy_io,
|
||||
)
|
||||
from openpype.pipeline.context_tools import get_current_project_asset
|
||||
from openpype.api import get_current_project_settings
|
||||
from openpype.hosts.unreal.api import plugin
|
||||
from openpype.hosts.unreal.api import pipeline as unreal_pipeline
|
||||
|
||||
|
|
@ -159,9 +163,29 @@ class LayoutLoader(plugin.Loader):
|
|||
hid_section.set_row_index(index)
|
||||
hid_section.set_level_names(maps)
|
||||
|
||||
@staticmethod
|
||||
def _transform_from_basis(self, transform, basis):
|
||||
"""Transform a transform from a basis to a new basis."""
|
||||
# Get the basis matrix
|
||||
basis_matrix = unreal.Matrix(
|
||||
basis[0],
|
||||
basis[1],
|
||||
basis[2],
|
||||
basis[3]
|
||||
)
|
||||
transform_matrix = unreal.Matrix(
|
||||
transform[0],
|
||||
transform[1],
|
||||
transform[2],
|
||||
transform[3]
|
||||
)
|
||||
|
||||
new_transform = (
|
||||
basis_matrix.get_inverse() * transform_matrix * basis_matrix)
|
||||
|
||||
return new_transform.transform()
|
||||
|
||||
def _process_family(
|
||||
assets, class_name, transform, sequence, inst_name=None
|
||||
self, assets, class_name, transform, basis, sequence, inst_name=None
|
||||
):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
|
|
@ -171,30 +195,12 @@ class LayoutLoader(plugin.Loader):
|
|||
for asset in assets:
|
||||
obj = ar.get_asset_by_object_path(asset).get_asset()
|
||||
if obj.get_class().get_name() == class_name:
|
||||
t = self._transform_from_basis(transform, basis)
|
||||
actor = EditorLevelLibrary.spawn_actor_from_object(
|
||||
obj,
|
||||
transform.get('translation')
|
||||
obj, t.translation
|
||||
)
|
||||
if inst_name:
|
||||
try:
|
||||
# Rename method leads to crash
|
||||
# actor.rename(name=inst_name)
|
||||
|
||||
# The label works, although it make it slightly more
|
||||
# complicated to check for the names, as we need to
|
||||
# loop through all the actors in the level
|
||||
actor.set_actor_label(inst_name)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
actor.set_actor_rotation(unreal.Rotator(
|
||||
umath.radians_to_degrees(
|
||||
transform.get('rotation').get('x')),
|
||||
-umath.radians_to_degrees(
|
||||
transform.get('rotation').get('y')),
|
||||
umath.radians_to_degrees(
|
||||
transform.get('rotation').get('z')),
|
||||
), False)
|
||||
actor.set_actor_scale3d(transform.get('scale'))
|
||||
actor.set_actor_rotation(t.rotation.rotator(), False)
|
||||
actor.set_actor_scale3d(t.scale3d)
|
||||
|
||||
if class_name == 'SkeletalMesh':
|
||||
skm_comp = actor.get_editor_property(
|
||||
|
|
@ -203,16 +209,17 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
actors.append(actor)
|
||||
|
||||
binding = None
|
||||
for p in sequence.get_possessables():
|
||||
if p.get_name() == actor.get_name():
|
||||
binding = p
|
||||
break
|
||||
if sequence:
|
||||
binding = None
|
||||
for p in sequence.get_possessables():
|
||||
if p.get_name() == actor.get_name():
|
||||
binding = p
|
||||
break
|
||||
|
||||
if not binding:
|
||||
binding = sequence.add_possessable(actor)
|
||||
if not binding:
|
||||
binding = sequence.add_possessable(actor)
|
||||
|
||||
bindings.append(binding)
|
||||
bindings.append(binding)
|
||||
|
||||
return actors, bindings
|
||||
|
||||
|
|
@ -301,52 +308,53 @@ class LayoutLoader(plugin.Loader):
|
|||
actor.skeletal_mesh_component.animation_data.set_editor_property(
|
||||
'anim_to_play', animation)
|
||||
|
||||
# Add animation to the sequencer
|
||||
bindings = bindings_dict.get(instance_name)
|
||||
if sequence:
|
||||
# Add animation to the sequencer
|
||||
bindings = bindings_dict.get(instance_name)
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
for binding in bindings:
|
||||
tracks = binding.get_tracks()
|
||||
track = None
|
||||
track = tracks[0] if tracks else binding.add_track(
|
||||
unreal.MovieSceneSkeletalAnimationTrack)
|
||||
for binding in bindings:
|
||||
tracks = binding.get_tracks()
|
||||
track = None
|
||||
track = tracks[0] if tracks else binding.add_track(
|
||||
unreal.MovieSceneSkeletalAnimationTrack)
|
||||
|
||||
sections = track.get_sections()
|
||||
section = None
|
||||
if not sections:
|
||||
section = track.add_section()
|
||||
else:
|
||||
section = sections[0]
|
||||
sections = track.get_sections()
|
||||
section = None
|
||||
if not sections:
|
||||
section = track.add_section()
|
||||
else:
|
||||
section = sections[0]
|
||||
|
||||
sec_params = section.get_editor_property('params')
|
||||
curr_anim = sec_params.get_editor_property('animation')
|
||||
|
||||
if curr_anim:
|
||||
# Checks if the animation path has a container.
|
||||
# If it does, it means that the animation is
|
||||
# already in the sequencer.
|
||||
anim_path = str(Path(
|
||||
curr_anim.get_path_name()).parent
|
||||
).replace('\\', '/')
|
||||
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["AssetContainer"],
|
||||
package_paths=[anim_path],
|
||||
recursive_paths=False)
|
||||
containers = ar.get_assets(_filter)
|
||||
|
||||
if len(containers) > 0:
|
||||
return
|
||||
|
||||
section.set_range(
|
||||
sequence.get_playback_start(),
|
||||
sequence.get_playback_end())
|
||||
sec_params = section.get_editor_property('params')
|
||||
curr_anim = sec_params.get_editor_property('animation')
|
||||
|
||||
if curr_anim:
|
||||
# Checks if the animation path has a container.
|
||||
# If it does, it means that the animation is already
|
||||
# in the sequencer.
|
||||
anim_path = str(Path(
|
||||
curr_anim.get_path_name()).parent
|
||||
).replace('\\', '/')
|
||||
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["AssetContainer"],
|
||||
package_paths=[anim_path],
|
||||
recursive_paths=False)
|
||||
containers = ar.get_assets(_filter)
|
||||
|
||||
if len(containers) > 0:
|
||||
return
|
||||
|
||||
section.set_range(
|
||||
sequence.get_playback_start(),
|
||||
sequence.get_playback_end())
|
||||
sec_params = section.get_editor_property('params')
|
||||
sec_params.set_editor_property('animation', animation)
|
||||
sec_params.set_editor_property('animation', animation)
|
||||
|
||||
@staticmethod
|
||||
def _generate_sequence(self, h, h_dir):
|
||||
def _generate_sequence(h, h_dir):
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
|
||||
sequence = tools.create_asset(
|
||||
|
|
@ -402,7 +410,7 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
return sequence, (min_frame, max_frame)
|
||||
|
||||
def _process(self, lib_path, asset_dir, sequence, loaded=None):
|
||||
def _process(self, lib_path, asset_dir, sequence, repr_loaded=None):
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
with open(lib_path, "r") as fp:
|
||||
|
|
@ -410,8 +418,8 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
all_loaders = discover_loader_plugins()
|
||||
|
||||
if not loaded:
|
||||
loaded = []
|
||||
if not repr_loaded:
|
||||
repr_loaded = []
|
||||
|
||||
path = Path(lib_path)
|
||||
|
||||
|
|
@ -422,36 +430,65 @@ class LayoutLoader(plugin.Loader):
|
|||
loaded_assets = []
|
||||
|
||||
for element in data:
|
||||
reference = None
|
||||
if element.get('reference_fbx'):
|
||||
reference = element.get('reference_fbx')
|
||||
representation = None
|
||||
repr_format = None
|
||||
if element.get('representation'):
|
||||
# representation = element.get('representation')
|
||||
|
||||
self.log.info(element.get("version"))
|
||||
|
||||
valid_formats = ['fbx', 'abc']
|
||||
|
||||
repr_data = legacy_io.find_one({
|
||||
"type": "representation",
|
||||
"parent": ObjectId(element.get("version")),
|
||||
"name": {"$in": valid_formats}
|
||||
})
|
||||
repr_format = repr_data.get('name')
|
||||
|
||||
if not repr_data:
|
||||
self.log.error(
|
||||
f"No valid representation found for version "
|
||||
f"{element.get('version')}")
|
||||
continue
|
||||
|
||||
representation = str(repr_data.get('_id'))
|
||||
print(representation)
|
||||
# This is to keep compatibility with old versions of the
|
||||
# json format.
|
||||
elif element.get('reference_fbx'):
|
||||
representation = element.get('reference_fbx')
|
||||
repr_format = 'fbx'
|
||||
elif element.get('reference_abc'):
|
||||
reference = element.get('reference_abc')
|
||||
representation = element.get('reference_abc')
|
||||
repr_format = 'abc'
|
||||
|
||||
# If reference is None, this element is skipped, as it cannot be
|
||||
# imported in Unreal
|
||||
if not reference:
|
||||
if not representation:
|
||||
continue
|
||||
|
||||
instance_name = element.get('instance_name')
|
||||
|
||||
skeleton = None
|
||||
|
||||
if reference not in loaded:
|
||||
loaded.append(reference)
|
||||
if representation not in repr_loaded:
|
||||
repr_loaded.append(representation)
|
||||
|
||||
family = element.get('family')
|
||||
loaders = loaders_from_representation(
|
||||
all_loaders, reference)
|
||||
all_loaders, representation)
|
||||
|
||||
loader = None
|
||||
|
||||
if reference == element.get('reference_fbx'):
|
||||
if repr_format == 'fbx':
|
||||
loader = self._get_fbx_loader(loaders, family)
|
||||
elif reference == element.get('reference_abc'):
|
||||
elif repr_format == 'abc':
|
||||
loader = self._get_abc_loader(loaders, family)
|
||||
|
||||
if not loader:
|
||||
self.log.error(
|
||||
f"No valid loader found for {representation}")
|
||||
continue
|
||||
|
||||
options = {
|
||||
|
|
@ -460,7 +497,7 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
assets = load_container(
|
||||
loader,
|
||||
reference,
|
||||
representation,
|
||||
namespace=instance_name,
|
||||
options=options
|
||||
)
|
||||
|
|
@ -478,28 +515,36 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
instances = [
|
||||
item for item in data
|
||||
if (item.get('reference_fbx') == reference or
|
||||
item.get('reference_abc') == reference)]
|
||||
if ((item.get('version') and
|
||||
item.get('version') == element.get('version')) or
|
||||
item.get('reference_fbx') == representation or
|
||||
item.get('reference_abc') == representation)]
|
||||
|
||||
for instance in instances:
|
||||
transform = instance.get('transform')
|
||||
# transform = instance.get('transform')
|
||||
transform = instance.get('transform_matrix')
|
||||
basis = instance.get('basis')
|
||||
inst = instance.get('instance_name')
|
||||
|
||||
actors = []
|
||||
|
||||
if family == 'model':
|
||||
actors, _ = self._process_family(
|
||||
assets, 'StaticMesh', transform, sequence, inst)
|
||||
assets, 'StaticMesh', transform, basis,
|
||||
sequence, inst
|
||||
)
|
||||
elif family == 'rig':
|
||||
actors, bindings = self._process_family(
|
||||
assets, 'SkeletalMesh', transform, sequence, inst)
|
||||
assets, 'SkeletalMesh', transform, basis,
|
||||
sequence, inst
|
||||
)
|
||||
actors_dict[inst] = actors
|
||||
bindings_dict[inst] = bindings
|
||||
|
||||
if skeleton:
|
||||
skeleton_dict[reference] = skeleton
|
||||
skeleton_dict[representation] = skeleton
|
||||
else:
|
||||
skeleton = skeleton_dict.get(reference)
|
||||
skeleton = skeleton_dict.get(representation)
|
||||
|
||||
animation_file = element.get('animation')
|
||||
|
||||
|
|
@ -573,6 +618,9 @@ class LayoutLoader(plugin.Loader):
|
|||
Returns:
|
||||
list(str): list of container content
|
||||
"""
|
||||
data = get_current_project_settings()
|
||||
create_sequences = data["unreal"]["level_sequences_for_layouts"]
|
||||
|
||||
# Create directory for asset and avalon container
|
||||
hierarchy = context.get('asset').get('data').get('parents')
|
||||
root = self.ASSET_ROOT
|
||||
|
|
@ -593,81 +641,88 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
EditorAssetLibrary.make_directory(asset_dir)
|
||||
|
||||
# Create map for the shot, and create hierarchy of map. If the maps
|
||||
# already exist, we will use them.
|
||||
h_dir = hierarchy_dir_list[0]
|
||||
h_asset = hierarchy[0]
|
||||
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
|
||||
if not EditorAssetLibrary.does_asset_exist(master_level):
|
||||
EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map")
|
||||
master_level = None
|
||||
shot = None
|
||||
sequences = []
|
||||
|
||||
level = f"{asset_dir}/{asset}_map.{asset}_map"
|
||||
EditorLevelLibrary.new_level(f"{asset_dir}/{asset}_map")
|
||||
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorLevelUtils.add_level_to_world(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
level,
|
||||
unreal.LevelStreamingDynamic
|
||||
)
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(level)
|
||||
if create_sequences:
|
||||
# Create map for the shot, and create hierarchy of map. If the
|
||||
# maps already exist, we will use them.
|
||||
if hierarchy:
|
||||
h_dir = hierarchy_dir_list[0]
|
||||
h_asset = hierarchy[0]
|
||||
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
|
||||
if not EditorAssetLibrary.does_asset_exist(master_level):
|
||||
EditorLevelLibrary.new_level(f"{h_dir}/{h_asset}_map")
|
||||
|
||||
# Get all the sequences in the hierarchy. It will create them, if
|
||||
# they don't exist.
|
||||
sequences = []
|
||||
frame_ranges = []
|
||||
for (h_dir, h) in zip(hierarchy_dir_list, hierarchy):
|
||||
root_content = EditorAssetLibrary.list_assets(
|
||||
h_dir, recursive=False, include_folder=False)
|
||||
if master_level:
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorLevelUtils.add_level_to_world(
|
||||
EditorLevelLibrary.get_editor_world(),
|
||||
level,
|
||||
unreal.LevelStreamingDynamic
|
||||
)
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
EditorLevelLibrary.load_level(level)
|
||||
|
||||
existing_sequences = [
|
||||
EditorAssetLibrary.find_asset_data(asset)
|
||||
for asset in root_content
|
||||
if EditorAssetLibrary.find_asset_data(
|
||||
asset).get_class().get_name() == 'LevelSequence'
|
||||
]
|
||||
# Get all the sequences in the hierarchy. It will create them, if
|
||||
# they don't exist.
|
||||
frame_ranges = []
|
||||
for (h_dir, h) in zip(hierarchy_dir_list, hierarchy):
|
||||
root_content = EditorAssetLibrary.list_assets(
|
||||
h_dir, recursive=False, include_folder=False)
|
||||
|
||||
if not existing_sequences:
|
||||
sequence, frame_range = self._generate_sequence(h, h_dir)
|
||||
existing_sequences = [
|
||||
EditorAssetLibrary.find_asset_data(asset)
|
||||
for asset in root_content
|
||||
if EditorAssetLibrary.find_asset_data(
|
||||
asset).get_class().get_name() == 'LevelSequence'
|
||||
]
|
||||
|
||||
sequences.append(sequence)
|
||||
frame_ranges.append(frame_range)
|
||||
else:
|
||||
for e in existing_sequences:
|
||||
sequences.append(e.get_asset())
|
||||
frame_ranges.append((
|
||||
e.get_asset().get_playback_start(),
|
||||
e.get_asset().get_playback_end()))
|
||||
if not existing_sequences:
|
||||
sequence, frame_range = self._generate_sequence(h, h_dir)
|
||||
|
||||
shot = tools.create_asset(
|
||||
asset_name=asset,
|
||||
package_path=asset_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
sequences.append(sequence)
|
||||
frame_ranges.append(frame_range)
|
||||
else:
|
||||
for e in existing_sequences:
|
||||
sequences.append(e.get_asset())
|
||||
frame_ranges.append((
|
||||
e.get_asset().get_playback_start(),
|
||||
e.get_asset().get_playback_end()))
|
||||
|
||||
# sequences and frame_ranges have the same length
|
||||
for i in range(0, len(sequences) - 1):
|
||||
self._set_sequence_hierarchy(
|
||||
sequences[i], sequences[i + 1],
|
||||
frame_ranges[i][1],
|
||||
frame_ranges[i + 1][0], frame_ranges[i + 1][1],
|
||||
[level])
|
||||
shot = tools.create_asset(
|
||||
asset_name=asset,
|
||||
package_path=asset_dir,
|
||||
asset_class=unreal.LevelSequence,
|
||||
factory=unreal.LevelSequenceFactoryNew()
|
||||
)
|
||||
|
||||
project_name = legacy_io.active_project()
|
||||
data = get_asset_by_name(project_name, asset)["data"]
|
||||
shot.set_display_rate(
|
||||
unreal.FrameRate(data.get("fps"), 1.0))
|
||||
shot.set_playback_start(0)
|
||||
shot.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1)
|
||||
self._set_sequence_hierarchy(
|
||||
sequences[-1], shot,
|
||||
frame_ranges[-1][1],
|
||||
data.get('clipIn'), data.get('clipOut'),
|
||||
[level])
|
||||
# sequences and frame_ranges have the same length
|
||||
for i in range(0, len(sequences) - 1):
|
||||
self._set_sequence_hierarchy(
|
||||
sequences[i], sequences[i + 1],
|
||||
frame_ranges[i][1],
|
||||
frame_ranges[i + 1][0], frame_ranges[i + 1][1],
|
||||
[level])
|
||||
|
||||
EditorLevelLibrary.load_level(level)
|
||||
project_name = legacy_io.active_project()
|
||||
data = get_asset_by_name(project_name, asset)["data"]
|
||||
shot.set_display_rate(
|
||||
unreal.FrameRate(data.get("fps"), 1.0))
|
||||
shot.set_playback_start(0)
|
||||
shot.set_playback_end(data.get('clipOut') - data.get('clipIn') + 1)
|
||||
if sequences:
|
||||
self._set_sequence_hierarchy(
|
||||
sequences[-1], shot,
|
||||
frame_ranges[-1][1],
|
||||
data.get('clipIn'), data.get('clipOut'),
|
||||
[level])
|
||||
|
||||
EditorLevelLibrary.load_level(level)
|
||||
|
||||
loaded_assets = self._process(self.fname, asset_dir, shot)
|
||||
|
||||
|
|
@ -702,32 +757,47 @@ class LayoutLoader(plugin.Loader):
|
|||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
if master_level:
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
|
||||
return asset_content
|
||||
|
||||
def update(self, container, representation):
|
||||
data = get_current_project_settings()
|
||||
create_sequences = data["unreal"]["level_sequences_for_layouts"]
|
||||
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
|
||||
root = "/Game/OpenPype"
|
||||
|
||||
asset_dir = container.get('namespace')
|
||||
|
||||
context = representation.get("context")
|
||||
|
||||
hierarchy = context.get('hierarchy').split("/")
|
||||
h_dir = f"{root}/{hierarchy[0]}"
|
||||
h_asset = hierarchy[0]
|
||||
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
|
||||
sequence = None
|
||||
master_level = None
|
||||
|
||||
# # Create a temporary level to delete the layout level.
|
||||
# EditorLevelLibrary.save_all_dirty_levels()
|
||||
# EditorAssetLibrary.make_directory(f"{root}/tmp")
|
||||
# tmp_level = f"{root}/tmp/temp_map"
|
||||
# if not EditorAssetLibrary.does_asset_exist(f"{tmp_level}.temp_map"):
|
||||
# EditorLevelLibrary.new_level(tmp_level)
|
||||
# else:
|
||||
# EditorLevelLibrary.load_level(tmp_level)
|
||||
if create_sequences:
|
||||
hierarchy = context.get('hierarchy').split("/")
|
||||
h_dir = f"{root}/{hierarchy[0]}"
|
||||
h_asset = hierarchy[0]
|
||||
master_level = f"{h_dir}/{h_asset}_map.{h_asset}_map"
|
||||
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(filter)
|
||||
sequence = sequences[0].get_asset()
|
||||
|
||||
prev_level = None
|
||||
|
||||
if not master_level:
|
||||
curr_level = unreal.LevelEditorSubsystem().get_current_level()
|
||||
curr_level_path = curr_level.get_outer().get_path_name()
|
||||
# If the level path does not start with "/Game/", the current
|
||||
# level is a temporary, unsaved level.
|
||||
if curr_level_path.startswith("/Game/"):
|
||||
prev_level = curr_level_path
|
||||
|
||||
# Get layout level
|
||||
filter = unreal.ARFilter(
|
||||
|
|
@ -735,11 +805,6 @@ class LayoutLoader(plugin.Loader):
|
|||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(filter)
|
||||
filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[asset_dir],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(filter)
|
||||
|
||||
layout_level = levels[0].get_editor_property('object_path')
|
||||
|
||||
|
|
@ -751,14 +816,14 @@ class LayoutLoader(plugin.Loader):
|
|||
for actor in actors:
|
||||
unreal.EditorLevelLibrary.destroy_actor(actor)
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
if create_sequences:
|
||||
EditorLevelLibrary.save_current_level()
|
||||
|
||||
EditorAssetLibrary.delete_directory(f"{asset_dir}/animations/")
|
||||
|
||||
source_path = get_representation_path(representation)
|
||||
|
||||
loaded_assets = self._process(
|
||||
source_path, asset_dir, sequences[0].get_asset())
|
||||
loaded_assets = self._process(source_path, asset_dir, sequence)
|
||||
|
||||
data = {
|
||||
"representation": str(representation["_id"]),
|
||||
|
|
@ -776,13 +841,20 @@ class LayoutLoader(plugin.Loader):
|
|||
for a in asset_content:
|
||||
EditorAssetLibrary.save_asset(a)
|
||||
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
if master_level:
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
elif prev_level:
|
||||
EditorLevelLibrary.load_level(prev_level)
|
||||
|
||||
def remove(self, container):
|
||||
"""
|
||||
Delete the layout. First, check if the assets loaded with the layout
|
||||
are used by other layouts. If not, delete the assets.
|
||||
"""
|
||||
data = get_current_project_settings()
|
||||
create_sequences = data["unreal"]["level_sequences_for_layouts"]
|
||||
|
||||
root = "/Game/OpenPype"
|
||||
path = Path(container.get("namespace"))
|
||||
|
||||
containers = unreal_pipeline.ls()
|
||||
|
|
@ -793,7 +865,7 @@ class LayoutLoader(plugin.Loader):
|
|||
|
||||
# Check if the assets have been loaded by other layouts, and deletes
|
||||
# them if they haven't.
|
||||
for asset in container.get('loaded_assets'):
|
||||
for asset in eval(container.get('loaded_assets')):
|
||||
layouts = [
|
||||
lc for lc in layout_containers
|
||||
if asset in lc.get('loaded_assets')]
|
||||
|
|
@ -801,71 +873,87 @@ class LayoutLoader(plugin.Loader):
|
|||
if not layouts:
|
||||
EditorAssetLibrary.delete_directory(str(Path(asset).parent))
|
||||
|
||||
# Remove the Level Sequence from the parent.
|
||||
# We need to traverse the hierarchy from the master sequence to find
|
||||
# the level sequence.
|
||||
root = "/Game/OpenPype"
|
||||
namespace = container.get('namespace').replace(f"{root}/", "")
|
||||
ms_asset = namespace.split('/')[0]
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(_filter)
|
||||
master_sequence = sequences[0].get_asset()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(_filter)
|
||||
master_level = levels[0].get_editor_property('object_path')
|
||||
# Delete the parent folder if there aren't any more
|
||||
# layouts in it.
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
str(Path(asset).parent.parent), recursive=False,
|
||||
include_folder=True
|
||||
)
|
||||
|
||||
sequences = [master_sequence]
|
||||
if len(asset_content) == 0:
|
||||
EditorAssetLibrary.delete_directory(
|
||||
str(Path(asset).parent.parent))
|
||||
|
||||
parent = None
|
||||
for s in sequences:
|
||||
tracks = s.get_master_tracks()
|
||||
subscene_track = None
|
||||
visibility_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == unreal.MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
if (t.get_class() ==
|
||||
unreal.MovieSceneLevelVisibilityTrack.static_class()):
|
||||
visibility_track = t
|
||||
if subscene_track:
|
||||
sections = subscene_track.get_sections()
|
||||
for ss in sections:
|
||||
if ss.get_sequence().get_name() == container.get('asset'):
|
||||
parent = s
|
||||
subscene_track.remove_section(ss)
|
||||
break
|
||||
sequences.append(ss.get_sequence())
|
||||
# Update subscenes indexes.
|
||||
i = 0
|
||||
for ss in sections:
|
||||
ss.set_row_index(i)
|
||||
i += 1
|
||||
master_sequence = None
|
||||
master_level = None
|
||||
sequences = []
|
||||
|
||||
if visibility_track:
|
||||
sections = visibility_track.get_sections()
|
||||
for ss in sections:
|
||||
if (unreal.Name(f"{container.get('asset')}_map")
|
||||
in ss.get_level_names()):
|
||||
visibility_track.remove_section(ss)
|
||||
# Update visibility sections indexes.
|
||||
i = -1
|
||||
prev_name = []
|
||||
for ss in sections:
|
||||
if prev_name != ss.get_level_names():
|
||||
if create_sequences:
|
||||
# Remove the Level Sequence from the parent.
|
||||
# We need to traverse the hierarchy from the master sequence to
|
||||
# find the level sequence.
|
||||
namespace = container.get('namespace').replace(f"{root}/", "")
|
||||
ms_asset = namespace.split('/')[0]
|
||||
ar = unreal.AssetRegistryHelpers.get_asset_registry()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["LevelSequence"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
sequences = ar.get_assets(_filter)
|
||||
master_sequence = sequences[0].get_asset()
|
||||
_filter = unreal.ARFilter(
|
||||
class_names=["World"],
|
||||
package_paths=[f"{root}/{ms_asset}"],
|
||||
recursive_paths=False)
|
||||
levels = ar.get_assets(_filter)
|
||||
master_level = levels[0].get_editor_property('object_path')
|
||||
|
||||
sequences = [master_sequence]
|
||||
|
||||
parent = None
|
||||
for s in sequences:
|
||||
tracks = s.get_master_tracks()
|
||||
subscene_track = None
|
||||
visibility_track = None
|
||||
for t in tracks:
|
||||
if t.get_class() == MovieSceneSubTrack.static_class():
|
||||
subscene_track = t
|
||||
if (t.get_class() ==
|
||||
MovieSceneLevelVisibilityTrack.static_class()):
|
||||
visibility_track = t
|
||||
if subscene_track:
|
||||
sections = subscene_track.get_sections()
|
||||
for ss in sections:
|
||||
if (ss.get_sequence().get_name() ==
|
||||
container.get('asset')):
|
||||
parent = s
|
||||
subscene_track.remove_section(ss)
|
||||
break
|
||||
sequences.append(ss.get_sequence())
|
||||
# Update subscenes indexes.
|
||||
i = 0
|
||||
for ss in sections:
|
||||
ss.set_row_index(i)
|
||||
i += 1
|
||||
ss.set_row_index(i)
|
||||
prev_name = ss.get_level_names()
|
||||
if parent:
|
||||
break
|
||||
|
||||
assert parent, "Could not find the parent sequence"
|
||||
if visibility_track:
|
||||
sections = visibility_track.get_sections()
|
||||
for ss in sections:
|
||||
if (unreal.Name(f"{container.get('asset')}_map")
|
||||
in ss.get_level_names()):
|
||||
visibility_track.remove_section(ss)
|
||||
# Update visibility sections indexes.
|
||||
i = -1
|
||||
prev_name = []
|
||||
for ss in sections:
|
||||
if prev_name != ss.get_level_names():
|
||||
i += 1
|
||||
ss.set_row_index(i)
|
||||
prev_name = ss.get_level_names()
|
||||
if parent:
|
||||
break
|
||||
|
||||
assert parent, "Could not find the parent sequence"
|
||||
|
||||
# Create a temporary level to delete the layout level.
|
||||
EditorLevelLibrary.save_all_dirty_levels()
|
||||
|
|
@ -879,10 +967,9 @@ class LayoutLoader(plugin.Loader):
|
|||
# Delete the layout directory.
|
||||
EditorAssetLibrary.delete_directory(str(path))
|
||||
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorAssetLibrary.delete_directory(f"{root}/tmp")
|
||||
|
||||
EditorLevelLibrary.save_current_level()
|
||||
if create_sequences:
|
||||
EditorLevelLibrary.load_level(master_level)
|
||||
EditorAssetLibrary.delete_directory(f"{root}/tmp")
|
||||
|
||||
# Delete the parent folder if there aren't any more layouts in it.
|
||||
asset_content = EditorAssetLibrary.list_assets(
|
||||
|
|
|
|||
|
|
@ -957,32 +957,24 @@ class ApplicationLaunchContext:
|
|||
|
||||
# TODO load additional studio paths from settings
|
||||
import openpype
|
||||
pype_dir = os.path.dirname(os.path.abspath(openpype.__file__))
|
||||
openpype_dir = os.path.dirname(os.path.abspath(openpype.__file__))
|
||||
|
||||
# --- START: Backwards compatibility ---
|
||||
hooks_dir = os.path.join(pype_dir, "hooks")
|
||||
global_hooks_dir = os.path.join(openpype_dir, "hooks")
|
||||
|
||||
subfolder_names = ["global"]
|
||||
if self.host_name:
|
||||
subfolder_names.append(self.host_name)
|
||||
for subfolder_name in subfolder_names:
|
||||
path = os.path.join(hooks_dir, subfolder_name)
|
||||
if (
|
||||
os.path.exists(path)
|
||||
and os.path.isdir(path)
|
||||
and path not in paths
|
||||
):
|
||||
paths.append(path)
|
||||
# --- END: Backwards compatibility ---
|
||||
|
||||
subfolders_list = [
|
||||
["hooks"]
|
||||
hooks_dirs = [
|
||||
global_hooks_dir
|
||||
]
|
||||
if self.host_name:
|
||||
subfolders_list.append(["hosts", self.host_name, "hooks"])
|
||||
# If host requires launch hooks and is module then launch hooks
|
||||
# should be collected using 'collect_launch_hook_paths'
|
||||
# - module have to implement 'get_launch_hook_paths'
|
||||
host_module = self.modules_manager.get_host_module(self.host_name)
|
||||
if not host_module:
|
||||
hooks_dirs.append(os.path.join(
|
||||
openpype_dir, "hosts", self.host_name, "hooks"
|
||||
))
|
||||
|
||||
for subfolders in subfolders_list:
|
||||
path = os.path.join(pype_dir, *subfolders)
|
||||
for path in hooks_dirs:
|
||||
if (
|
||||
os.path.exists(path)
|
||||
and os.path.isdir(path)
|
||||
|
|
@ -991,7 +983,9 @@ class ApplicationLaunchContext:
|
|||
paths.append(path)
|
||||
|
||||
# Load modules paths
|
||||
paths.extend(self.modules_manager.collect_launch_hook_paths())
|
||||
paths.extend(
|
||||
self.modules_manager.collect_launch_hook_paths(self.application)
|
||||
)
|
||||
|
||||
return paths
|
||||
|
||||
|
|
@ -1304,6 +1298,7 @@ def get_app_environments_for_context(
|
|||
dict: Environments for passed context and application.
|
||||
"""
|
||||
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype.pipeline import AvalonMongoDB, Anatomy
|
||||
|
||||
# Avalon database connection
|
||||
|
|
@ -1316,8 +1311,6 @@ def get_app_environments_for_context(
|
|||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
|
||||
if modules_manager is None:
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
modules_manager = ModulesManager()
|
||||
|
||||
# Prepare app object which can be obtained only from ApplciationManager
|
||||
|
|
@ -1344,7 +1337,7 @@ def get_app_environments_for_context(
|
|||
})
|
||||
|
||||
prepare_app_environments(data, env_group, modules_manager)
|
||||
prepare_context_environments(data, env_group)
|
||||
prepare_context_environments(data, env_group, modules_manager)
|
||||
|
||||
# Discard avalon connection
|
||||
dbcon.uninstall()
|
||||
|
|
@ -1503,8 +1496,10 @@ def prepare_app_environments(
|
|||
final_env = None
|
||||
# Add host specific environments
|
||||
if app.host_name and implementation_envs:
|
||||
module = __import__("openpype.hosts", fromlist=[app.host_name])
|
||||
host_module = getattr(module, app.host_name, None)
|
||||
host_module = modules_manager.get_host_module(app.host_name)
|
||||
if not host_module:
|
||||
module = __import__("openpype.hosts", fromlist=[app.host_name])
|
||||
host_module = getattr(module, app.host_name, None)
|
||||
add_implementation_envs = None
|
||||
if host_module:
|
||||
add_implementation_envs = getattr(
|
||||
|
|
@ -1563,7 +1558,7 @@ def apply_project_environments_value(
|
|||
return env
|
||||
|
||||
|
||||
def prepare_context_environments(data, env_group=None):
|
||||
def prepare_context_environments(data, env_group=None, modules_manager=None):
|
||||
"""Modify launch environments with context data for launched host.
|
||||
|
||||
Args:
|
||||
|
|
@ -1658,10 +1653,10 @@ def prepare_context_environments(data, env_group=None):
|
|||
data["env"]["AVALON_APP"] = app.host_name
|
||||
data["env"]["AVALON_WORKDIR"] = workdir
|
||||
|
||||
_prepare_last_workfile(data, workdir)
|
||||
_prepare_last_workfile(data, workdir, modules_manager)
|
||||
|
||||
|
||||
def _prepare_last_workfile(data, workdir):
|
||||
def _prepare_last_workfile(data, workdir, modules_manager):
|
||||
"""last workfile workflow preparation.
|
||||
|
||||
Function check if should care about last workfile workflow and tries
|
||||
|
|
@ -1676,8 +1671,13 @@ def _prepare_last_workfile(data, workdir):
|
|||
result will be stored.
|
||||
workdir (str): Path to folder where workfiles should be stored.
|
||||
"""
|
||||
|
||||
from openpype.modules import ModulesManager
|
||||
from openpype.pipeline import HOST_WORKFILE_EXTENSIONS
|
||||
|
||||
if not modules_manager:
|
||||
modules_manager = ModulesManager()
|
||||
|
||||
log = data["log"]
|
||||
|
||||
_workdir_data = data.get("workdir_data")
|
||||
|
|
@ -1725,7 +1725,12 @@ def _prepare_last_workfile(data, workdir):
|
|||
# Last workfile path
|
||||
last_workfile_path = data.get("last_workfile_path") or ""
|
||||
if not last_workfile_path:
|
||||
extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name)
|
||||
host_module = modules_manager.get_host_module(app.host_name)
|
||||
if host_module:
|
||||
extensions = host_module.get_workfile_extensions()
|
||||
else:
|
||||
extensions = HOST_WORKFILE_EXTENSIONS.get(app.host_name)
|
||||
|
||||
if extensions:
|
||||
from openpype.pipeline.workfile import (
|
||||
get_workfile_template_key,
|
||||
|
|
|
|||
|
|
@ -1,11 +1,9 @@
|
|||
"""Should be used only inside of hosts."""
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
import copy
|
||||
import platform
|
||||
import logging
|
||||
import collections
|
||||
import functools
|
||||
import warnings
|
||||
|
||||
|
|
@ -13,13 +11,9 @@ from openpype.client import (
|
|||
get_project,
|
||||
get_assets,
|
||||
get_asset_by_name,
|
||||
get_subsets,
|
||||
get_last_versions,
|
||||
get_last_version_by_subset_name,
|
||||
get_representations,
|
||||
get_workfile_info,
|
||||
)
|
||||
from openpype.settings import get_project_settings
|
||||
from .profiles_filtering import filter_profiles
|
||||
from .events import emit_event
|
||||
from .path_templates import StringTemplate
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@ class _LoadCache:
|
|||
def get_default_modules_dir():
|
||||
"""Path to default OpenPype modules."""
|
||||
|
||||
current_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
output = []
|
||||
for folder_name in ("default_modules", ):
|
||||
|
|
@ -298,6 +298,8 @@ def _load_modules():
|
|||
# Add current directory at first place
|
||||
# - has small differences in import logic
|
||||
current_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
hosts_dir = os.path.join(os.path.dirname(current_dir), "hosts")
|
||||
module_dirs.insert(0, hosts_dir)
|
||||
module_dirs.insert(0, current_dir)
|
||||
|
||||
processed_paths = set()
|
||||
|
|
@ -314,6 +316,7 @@ def _load_modules():
|
|||
continue
|
||||
|
||||
is_in_current_dir = dirpath == current_dir
|
||||
is_in_host_dir = dirpath == hosts_dir
|
||||
for filename in os.listdir(dirpath):
|
||||
# Ignore filenames
|
||||
if filename in IGNORED_FILENAMES:
|
||||
|
|
@ -353,6 +356,24 @@ def _load_modules():
|
|||
sys.modules[new_import_str] = default_module
|
||||
setattr(openpype_modules, basename, default_module)
|
||||
|
||||
elif is_in_host_dir:
|
||||
import_str = "openpype.hosts.{}".format(basename)
|
||||
new_import_str = "{}.{}".format(modules_key, basename)
|
||||
# Until all hosts are converted to be able use them as
|
||||
# modules is this error check needed
|
||||
try:
|
||||
default_module = __import__(
|
||||
import_str, fromlist=("", )
|
||||
)
|
||||
sys.modules[new_import_str] = default_module
|
||||
setattr(openpype_modules, basename, default_module)
|
||||
|
||||
except Exception:
|
||||
log.warning(
|
||||
"Failed to import host folder {}".format(basename),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
elif os.path.isdir(fullpath):
|
||||
import_module_from_dirpath(dirpath, filename, modules_key)
|
||||
|
||||
|
|
@ -768,24 +789,50 @@ class ModulesManager:
|
|||
output.extend(paths)
|
||||
return output
|
||||
|
||||
def collect_launch_hook_paths(self):
|
||||
"""Helper to collect hooks from modules inherited ILaunchHookPaths.
|
||||
def collect_launch_hook_paths(self, app):
|
||||
"""Helper to collect application launch hooks.
|
||||
|
||||
It used to be based on 'ILaunchHookPaths' which is not true anymore.
|
||||
Module just have to have implemented 'get_launch_hook_paths' method.
|
||||
|
||||
Args:
|
||||
app (Application): Application object which can be used for
|
||||
filtering of which launch hook paths are returned.
|
||||
|
||||
Returns:
|
||||
list: Paths to launch hook directories.
|
||||
"""
|
||||
from openpype_interfaces import ILaunchHookPaths
|
||||
|
||||
str_type = type("")
|
||||
expected_types = (list, tuple, set)
|
||||
|
||||
output = []
|
||||
for module in self.get_enabled_modules():
|
||||
# Skip module that do not inherit from `ILaunchHookPaths`
|
||||
if not isinstance(module, ILaunchHookPaths):
|
||||
# Skip module if does not have implemented 'get_launch_hook_paths'
|
||||
func = getattr(module, "get_launch_hook_paths", None)
|
||||
if func is None:
|
||||
continue
|
||||
|
||||
func = module.get_launch_hook_paths
|
||||
if hasattr(inspect, "signature"):
|
||||
sig = inspect.signature(func)
|
||||
expect_args = len(sig.parameters) > 0
|
||||
else:
|
||||
expect_args = len(inspect.getargspec(func)[0]) > 0
|
||||
|
||||
# Pass application argument if method expect it.
|
||||
try:
|
||||
if expect_args:
|
||||
hook_paths = func(app)
|
||||
else:
|
||||
hook_paths = func()
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
"Failed to call 'get_launch_hook_paths'",
|
||||
exc_info=True
|
||||
)
|
||||
continue
|
||||
|
||||
hook_paths = module.get_launch_hook_paths()
|
||||
if not hook_paths:
|
||||
continue
|
||||
|
||||
|
|
@ -804,6 +851,45 @@ class ModulesManager:
|
|||
output.extend(hook_paths)
|
||||
return output
|
||||
|
||||
def get_host_module(self, host_name):
|
||||
"""Find host module by host name.
|
||||
|
||||
Args:
|
||||
host_name (str): Host name for which is found host module.
|
||||
|
||||
Returns:
|
||||
OpenPypeModule: Found host module by name.
|
||||
None: There was not found module inheriting IHostModule which has
|
||||
host name set to passed 'host_name'.
|
||||
"""
|
||||
|
||||
from openpype_interfaces import IHostModule
|
||||
|
||||
for module in self.get_enabled_modules():
|
||||
if (
|
||||
isinstance(module, IHostModule)
|
||||
and module.host_name == host_name
|
||||
):
|
||||
return module
|
||||
return None
|
||||
|
||||
def get_host_names(self):
|
||||
"""List of available host names based on host modules.
|
||||
|
||||
Returns:
|
||||
Iterable[str]: All available host names based on enabled modules
|
||||
inheriting 'IHostModule'.
|
||||
"""
|
||||
|
||||
from openpype_interfaces import IHostModule
|
||||
|
||||
host_names = {
|
||||
module.host_name
|
||||
for module in self.get_enabled_modules()
|
||||
if isinstance(module, IHostModule)
|
||||
}
|
||||
return host_names
|
||||
|
||||
def print_report(self):
|
||||
"""Print out report of time spent on modules initialization parts.
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
It provides Deadline JobInfo data class.
|
||||
|
||||
"""
|
||||
import json.decoder
|
||||
import os
|
||||
from abc import abstractmethod
|
||||
import platform
|
||||
|
|
@ -15,7 +16,12 @@ import attr
|
|||
import requests
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import AbstractMetaInstancePlugin
|
||||
from openpype.pipeline.publish import (
|
||||
AbstractMetaInstancePlugin,
|
||||
KnownPublishError
|
||||
)
|
||||
|
||||
JSONDecodeError = getattr(json.decoder, "JSONDecodeError", ValueError)
|
||||
|
||||
|
||||
def requests_post(*args, **kwargs):
|
||||
|
|
@ -615,7 +621,7 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
str: resulting Deadline job id.
|
||||
|
||||
Throws:
|
||||
RuntimeError: if submission fails.
|
||||
KnownPublishError: if submission fails.
|
||||
|
||||
"""
|
||||
url = "{}/api/jobs".format(self._deadline_url)
|
||||
|
|
@ -625,9 +631,16 @@ class AbstractSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self.log.error(response.status_code)
|
||||
self.log.error(response.content)
|
||||
self.log.debug(payload)
|
||||
raise RuntimeError(response.text)
|
||||
raise KnownPublishError(response.text)
|
||||
|
||||
try:
|
||||
result = response.json()
|
||||
except JSONDecodeError:
|
||||
msg = "Broken response {}. ".format(response)
|
||||
msg += "Try restarting the Deadline Webservice."
|
||||
self.log.warning(msg, exc_info=True)
|
||||
raise KnownPublishError("Broken response from DL")
|
||||
|
||||
result = response.json()
|
||||
# for submit publish job
|
||||
self._instance.data["deadlineSubmissionJob"] = result
|
||||
|
||||
|
|
|
|||
|
|
@ -62,6 +62,7 @@ payload_skeleton_template = {
|
|||
"RenderLayer": None, # Render only this layer
|
||||
"Renderer": None,
|
||||
"ProjectPath": None, # Resolve relative references
|
||||
"RenderSetupIncludeLights": None, # Include all lights flag.
|
||||
},
|
||||
"AuxFiles": [] # Mandatory for Deadline, may be empty
|
||||
}
|
||||
|
|
@ -413,8 +414,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
# Gather needed data ------------------------------------------------
|
||||
default_render_file = instance.context.data.get('project_settings')\
|
||||
.get('maya')\
|
||||
.get('create')\
|
||||
.get('CreateRender')\
|
||||
.get('RenderSettings')\
|
||||
.get('default_render_image_folder')
|
||||
filename = os.path.basename(filepath)
|
||||
comment = context.data.get("comment", "")
|
||||
|
|
@ -505,6 +505,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
self.payload_skeleton["JobInfo"]["Comment"] = comment
|
||||
self.payload_skeleton["PluginInfo"]["RenderLayer"] = renderlayer
|
||||
|
||||
self.payload_skeleton["PluginInfo"]["RenderSetupIncludeLights"] = instance.data.get("renderSetupIncludeLights") # noqa
|
||||
# Adding file dependencies.
|
||||
dependencies = instance.context.data["fileDependencies"]
|
||||
dependencies.append(filepath)
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ def get_openpype_version_from_path(path, build=True):
|
|||
|
||||
# if only builds are requested
|
||||
if build and not os.path.isfile(exe): # noqa: E501
|
||||
print(f" ! path is not a build: {path}")
|
||||
print(" ! path is not a build: {}".format(path))
|
||||
return None
|
||||
|
||||
version = {}
|
||||
|
|
@ -70,11 +70,12 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
# lets go over all available and find compatible build.
|
||||
requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION")
|
||||
if requested_version:
|
||||
print((">>> Scanning for compatible requested "
|
||||
f"version {requested_version}"))
|
||||
print((
|
||||
">>> Scanning for compatible requested version {}"
|
||||
).format(requested_version))
|
||||
install_dir = DirectoryUtils.SearchDirectoryList(dir_list)
|
||||
if install_dir:
|
||||
print(f"--- Looking for OpenPype at: {install_dir}")
|
||||
print("--- Looking for OpenPype at: {}".format(install_dir))
|
||||
sub_dirs = [
|
||||
f.path for f in os.scandir(install_dir)
|
||||
if f.is_dir()
|
||||
|
|
@ -83,18 +84,20 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
version = get_openpype_version_from_path(subdir)
|
||||
if not version:
|
||||
continue
|
||||
print(f" - found: {version} - {subdir}")
|
||||
print(" - found: {} - {}".format(version, subdir))
|
||||
openpype_versions.append((version, subdir))
|
||||
|
||||
exe = FileUtils.SearchFileList(exe_list)
|
||||
if openpype_versions:
|
||||
# if looking for requested compatible version,
|
||||
# add the implicitly specified to the list too.
|
||||
print(f"Looking for OpenPype at: {os.path.dirname(exe)}")
|
||||
print("Looking for OpenPype at: {}".format(os.path.dirname(exe)))
|
||||
version = get_openpype_version_from_path(
|
||||
os.path.dirname(exe))
|
||||
if version:
|
||||
print(f" - found: {version} - {os.path.dirname(exe)}")
|
||||
print(" - found: {} - {}".format(
|
||||
version, os.path.dirname(exe)
|
||||
))
|
||||
openpype_versions.append((version, os.path.dirname(exe)))
|
||||
|
||||
if requested_version:
|
||||
|
|
@ -106,8 +109,9 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
int(t) if t.isdigit() else t.lower()
|
||||
for t in re.split(r"(\d+)", ver[0])
|
||||
])
|
||||
print(("*** Latest available version found is "
|
||||
f"{openpype_versions[-1][0]}"))
|
||||
print((
|
||||
"*** Latest available version found is {}"
|
||||
).format(openpype_versions[-1][0]))
|
||||
requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501
|
||||
compatible_versions = []
|
||||
for version in openpype_versions:
|
||||
|
|
@ -127,8 +131,9 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
int(t) if t.isdigit() else t.lower()
|
||||
for t in re.split(r"(\d+)", ver[0])
|
||||
])
|
||||
print(("*** Latest compatible version found is "
|
||||
f"{compatible_versions[-1][0]}"))
|
||||
print((
|
||||
"*** Latest compatible version found is {}"
|
||||
).format(compatible_versions[-1][0]))
|
||||
# create list of executables for different platform and let
|
||||
# Deadline decide.
|
||||
exe_list = [
|
||||
|
|
@ -234,78 +239,6 @@ def inject_render_job_id(deadlinePlugin):
|
|||
print(">>> Injection end.")
|
||||
|
||||
|
||||
def pype_command_line(executable, arguments, workingDirectory):
|
||||
"""Remap paths in comand line argument string.
|
||||
|
||||
Using Deadline rempper it will remap all path found in command-line.
|
||||
|
||||
Args:
|
||||
executable (str): path to executable
|
||||
arguments (str): arguments passed to executable
|
||||
workingDirectory (str): working directory path
|
||||
|
||||
Returns:
|
||||
Tuple(executable, arguments, workingDirectory)
|
||||
|
||||
"""
|
||||
print("-" * 40)
|
||||
print("executable: {}".format(executable))
|
||||
print("arguments: {}".format(arguments))
|
||||
print("workingDirectory: {}".format(workingDirectory))
|
||||
print("-" * 40)
|
||||
print("Remapping arguments ...")
|
||||
arguments = RepositoryUtils.CheckPathMapping(arguments)
|
||||
print("* {}".format(arguments))
|
||||
print("-" * 40)
|
||||
return executable, arguments, workingDirectory
|
||||
|
||||
|
||||
def pype(deadlinePlugin):
|
||||
"""Remaps `PYPE_METADATA_FILE` and `PYPE_PYTHON_EXE` environment vars.
|
||||
|
||||
`PYPE_METADATA_FILE` is used on farm to point to rendered data. This path
|
||||
originates on platform from which this job was published. To be able to
|
||||
publish on different platform, this path needs to be remapped.
|
||||
|
||||
`PYPE_PYTHON_EXE` can be used to specify custom location of python
|
||||
interpreter to use for Pype. This is remappeda also if present even
|
||||
though it probably doesn't make much sense.
|
||||
|
||||
Arguments:
|
||||
deadlinePlugin: Deadline job plugin passed by Deadline
|
||||
|
||||
"""
|
||||
print(">>> Getting job ...")
|
||||
job = deadlinePlugin.GetJob()
|
||||
# PYPE should be here, not OPENPYPE - backward compatibility!!
|
||||
pype_metadata = job.GetJobEnvironmentKeyValue("PYPE_METADATA_FILE")
|
||||
pype_python = job.GetJobEnvironmentKeyValue("PYPE_PYTHON_EXE")
|
||||
print(">>> Having backward compatible env vars {}/{}".format(pype_metadata,
|
||||
pype_python))
|
||||
# test if it is pype publish job.
|
||||
if pype_metadata:
|
||||
pype_metadata = RepositoryUtils.CheckPathMapping(pype_metadata)
|
||||
if platform.system().lower() == "linux":
|
||||
pype_metadata = pype_metadata.replace("\\", "/")
|
||||
|
||||
print("- remapping PYPE_METADATA_FILE: {}".format(pype_metadata))
|
||||
job.SetJobEnvironmentKeyValue("PYPE_METADATA_FILE", pype_metadata)
|
||||
deadlinePlugin.SetProcessEnvironmentVariable(
|
||||
"PYPE_METADATA_FILE", pype_metadata)
|
||||
|
||||
if pype_python:
|
||||
pype_python = RepositoryUtils.CheckPathMapping(pype_python)
|
||||
if platform.system().lower() == "linux":
|
||||
pype_python = pype_python.replace("\\", "/")
|
||||
|
||||
print("- remapping PYPE_PYTHON_EXE: {}".format(pype_python))
|
||||
job.SetJobEnvironmentKeyValue("PYPE_PYTHON_EXE", pype_python)
|
||||
deadlinePlugin.SetProcessEnvironmentVariable(
|
||||
"PYPE_PYTHON_EXE", pype_python)
|
||||
|
||||
deadlinePlugin.ModifyCommandLineCallback += pype_command_line
|
||||
|
||||
|
||||
def __main__(deadlinePlugin):
|
||||
print("*** GlobalJobPreload start ...")
|
||||
print(">>> Getting job ...")
|
||||
|
|
@ -329,5 +262,3 @@ def __main__(deadlinePlugin):
|
|||
inject_render_job_id(deadlinePlugin)
|
||||
elif openpype_render_job == '1' or openpype_remote_job == '1':
|
||||
inject_openpype_environment(deadlinePlugin)
|
||||
else:
|
||||
pype(deadlinePlugin) # backward compatibility with Pype2
|
||||
|
|
|
|||
|
|
@ -697,13 +697,22 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
continue
|
||||
|
||||
auto_sync = changes[CUST_ATTR_AUTO_SYNC]["new"]
|
||||
if auto_sync == "1":
|
||||
turned_on = auto_sync == "1"
|
||||
ft_project = self.cur_project
|
||||
username = self._get_username(session, event)
|
||||
message = (
|
||||
"Auto sync was turned {} for project \"{}\" by \"{}\"."
|
||||
).format(
|
||||
"on" if turned_on else "off",
|
||||
ft_project["full_name"],
|
||||
username
|
||||
)
|
||||
if turned_on:
|
||||
message += " Triggering syncToAvalon action."
|
||||
self.log.debug(message)
|
||||
|
||||
if turned_on:
|
||||
# Trigger sync to avalon action if auto sync was turned on
|
||||
ft_project = self.cur_project
|
||||
self.log.debug((
|
||||
"Auto sync was turned on for project <{}>."
|
||||
" Triggering syncToAvalon action."
|
||||
).format(ft_project["full_name"]))
|
||||
selection = [{
|
||||
"entityId": ft_project["id"],
|
||||
"entityType": "show"
|
||||
|
|
@ -851,6 +860,26 @@ class SyncToAvalonEvent(BaseEvent):
|
|||
self.report()
|
||||
return True
|
||||
|
||||
def _get_username(self, session, event):
|
||||
username = "Unknown"
|
||||
event_source = event.get("source")
|
||||
if not event_source:
|
||||
return username
|
||||
user_info = event_source.get("user")
|
||||
if not user_info:
|
||||
return username
|
||||
user_id = user_info.get("id")
|
||||
if not user_id:
|
||||
return username
|
||||
|
||||
user_entity = session.query(
|
||||
"User where id is {}".format(user_id)
|
||||
).first()
|
||||
if user_entity:
|
||||
username = user_entity["username"] or username
|
||||
return username
|
||||
|
||||
|
||||
def process_removed(self):
|
||||
"""
|
||||
Handles removed entities (not removed tasks - handle separately).
|
||||
|
|
|
|||
|
|
@ -105,11 +105,17 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
|
|||
context.data["ftrackEntity"] = asset_entity
|
||||
context.data["ftrackTask"] = task_entity
|
||||
|
||||
self.per_instance_process(context, asset_name, task_name)
|
||||
self.per_instance_process(context, asset_entity, task_entity)
|
||||
|
||||
def per_instance_process(
|
||||
self, context, context_asset_name, context_task_name
|
||||
self, context, context_asset_entity, context_task_entity
|
||||
):
|
||||
context_task_name = None
|
||||
context_asset_name = None
|
||||
if context_asset_entity:
|
||||
context_asset_name = context_asset_entity["name"]
|
||||
if context_task_entity:
|
||||
context_task_name = context_task_entity["name"]
|
||||
instance_by_asset_and_task = {}
|
||||
for instance in context:
|
||||
self.log.debug(
|
||||
|
|
@ -120,6 +126,8 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
|
|||
|
||||
if not instance_asset_name and not instance_task_name:
|
||||
self.log.debug("Instance does not have set context keys.")
|
||||
instance.data["ftrackEntity"] = context_asset_entity
|
||||
instance.data["ftrackTask"] = context_task_entity
|
||||
continue
|
||||
|
||||
elif instance_asset_name and instance_task_name:
|
||||
|
|
@ -131,6 +139,8 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
|
|||
"Instance's context is same as in publish context."
|
||||
" Asset: {} | Task: {}"
|
||||
).format(context_asset_name, context_task_name))
|
||||
instance.data["ftrackEntity"] = context_asset_entity
|
||||
instance.data["ftrackTask"] = context_task_entity
|
||||
continue
|
||||
asset_name = instance_asset_name
|
||||
task_name = instance_task_name
|
||||
|
|
@ -141,6 +151,8 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
|
|||
"Instance's context task is same as in publish"
|
||||
" context. Task: {}"
|
||||
).format(context_task_name))
|
||||
instance.data["ftrackEntity"] = context_asset_entity
|
||||
instance.data["ftrackTask"] = context_task_entity
|
||||
continue
|
||||
|
||||
asset_name = context_asset_name
|
||||
|
|
@ -152,6 +164,8 @@ class CollectFtrackApi(pyblish.api.ContextPlugin):
|
|||
"Instance's context asset is same as in publish"
|
||||
" context. Asset: {}"
|
||||
).format(context_asset_name))
|
||||
instance.data["ftrackEntity"] = context_asset_entity
|
||||
instance.data["ftrackTask"] = context_task_entity
|
||||
continue
|
||||
|
||||
# Do not use context's task name
|
||||
|
|
|
|||
|
|
@ -0,0 +1,150 @@
|
|||
import pyblish.api
|
||||
from openpype.lib import filter_profiles
|
||||
|
||||
|
||||
class IntegrateFtrackFarmStatus(pyblish.api.ContextPlugin):
|
||||
"""Change task status when should be published on farm.
|
||||
|
||||
Instance which has set "farm" key in data to 'True' is considered as will
|
||||
be rendered on farm thus it's status should be changed.
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 0.48
|
||||
label = "Integrate Ftrack Farm Status"
|
||||
|
||||
farm_status_profiles = []
|
||||
|
||||
def process(self, context):
|
||||
# Quick end
|
||||
if not self.farm_status_profiles:
|
||||
project_name = context.data["projectName"]
|
||||
self.log.info((
|
||||
"Status profiles are not filled for project \"{}\". Skipping"
|
||||
).format(project_name))
|
||||
return
|
||||
|
||||
filtered_instances = self.filter_instances(context)
|
||||
instances_with_status_names = self.get_instances_with_statuse_names(
|
||||
context, filtered_instances
|
||||
)
|
||||
if instances_with_status_names:
|
||||
self.fill_statuses(context, instances_with_status_names)
|
||||
|
||||
def filter_instances(self, context):
|
||||
filtered_instances = []
|
||||
for instance in context:
|
||||
# Skip disabled instances
|
||||
if instance.data.get("publish") is False:
|
||||
continue
|
||||
subset_name = instance.data["subset"]
|
||||
msg_start = "Skipping instance {}.".format(subset_name)
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug(
|
||||
"{} Won't be rendered on farm.".format(msg_start)
|
||||
)
|
||||
continue
|
||||
|
||||
task_entity = instance.data.get("ftrackTask")
|
||||
if not task_entity:
|
||||
self.log.debug(
|
||||
"{} Does not have filled task".format(msg_start)
|
||||
)
|
||||
continue
|
||||
|
||||
filtered_instances.append(instance)
|
||||
return filtered_instances
|
||||
|
||||
def get_instances_with_statuse_names(self, context, instances):
|
||||
instances_with_status_names = []
|
||||
for instance in instances:
|
||||
family = instance.data["family"]
|
||||
subset_name = instance.data["subset"]
|
||||
task_entity = instance.data["ftrackTask"]
|
||||
host_name = context.data["hostName"]
|
||||
task_name = task_entity["name"]
|
||||
task_type = task_entity["type"]["name"]
|
||||
status_profile = filter_profiles(
|
||||
self.farm_status_profiles,
|
||||
{
|
||||
"hosts": host_name,
|
||||
"task_types": task_type,
|
||||
"task_names": task_name,
|
||||
"families": family,
|
||||
"subsets": subset_name,
|
||||
},
|
||||
logger=self.log
|
||||
)
|
||||
if not status_profile:
|
||||
# There already is log in 'filter_profiles'
|
||||
continue
|
||||
|
||||
status_name = status_profile["status_name"]
|
||||
if status_name:
|
||||
instances_with_status_names.append((instance, status_name))
|
||||
return instances_with_status_names
|
||||
|
||||
def fill_statuses(self, context, instances_with_status_names):
|
||||
# Prepare available task statuses on the project
|
||||
project_name = context.data["projectName"]
|
||||
session = context.data["ftrackSession"]
|
||||
project_entity = session.query((
|
||||
"select project_schema from Project where full_name is \"{}\""
|
||||
).format(project_name)).one()
|
||||
project_schema = project_entity["project_schema"]
|
||||
|
||||
task_type_ids = set()
|
||||
for item in instances_with_status_names:
|
||||
instance, _ = item
|
||||
task_entity = instance.data["ftrackTask"]
|
||||
task_type_ids.add(task_entity["type"]["id"])
|
||||
|
||||
task_statuses_by_type_id = {
|
||||
task_type_id: project_schema.get_statuses("Task", task_type_id)
|
||||
for task_type_id in task_type_ids
|
||||
}
|
||||
|
||||
# Keep track if anything has changed
|
||||
skipped_status_names = set()
|
||||
status_changed = False
|
||||
for item in instances_with_status_names:
|
||||
instance, status_name = item
|
||||
task_entity = instance.data["ftrackTask"]
|
||||
task_statuses = task_statuses_by_type_id[task_entity["type"]["id"]]
|
||||
status_name_low = status_name.lower()
|
||||
|
||||
status_id = None
|
||||
status_name = None
|
||||
# Skip if status name was already tried to be found
|
||||
for status in task_statuses:
|
||||
if status["name"].lower() == status_name_low:
|
||||
status_id = status["id"]
|
||||
status_name = status["name"]
|
||||
break
|
||||
|
||||
if status_id is None:
|
||||
if status_name_low not in skipped_status_names:
|
||||
skipped_status_names.add(status_name_low)
|
||||
joined_status_names = ", ".join({
|
||||
'"{}"'.format(status["name"])
|
||||
for status in task_statuses
|
||||
})
|
||||
self.log.warning((
|
||||
"Status \"{}\" is not available on project \"{}\"."
|
||||
" Available statuses are {}"
|
||||
).format(status_name, project_name, joined_status_names))
|
||||
continue
|
||||
|
||||
# Change task status id
|
||||
if status_id != task_entity["status_id"]:
|
||||
task_entity["status_id"] = status_id
|
||||
status_changed = True
|
||||
path = "/".join([
|
||||
item["name"]
|
||||
for item in task_entity["link"]
|
||||
])
|
||||
self.log.debug("Set status \"{}\" to \"{}\"".format(
|
||||
status_name, path
|
||||
))
|
||||
|
||||
if status_changed:
|
||||
session.commit()
|
||||
|
|
@ -3,6 +3,7 @@ import json
|
|||
import copy
|
||||
import pyblish.api
|
||||
|
||||
from openpype.lib.openpype_version import get_openpype_version
|
||||
from openpype.lib.transcoding import (
|
||||
get_ffprobe_streams,
|
||||
convert_ffprobe_fps_to_float,
|
||||
|
|
@ -20,6 +21,17 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
label = "Integrate Ftrack Component"
|
||||
families = ["ftrack"]
|
||||
|
||||
metadata_keys_to_label = {
|
||||
"openpype_version": "OpenPype version",
|
||||
"frame_start": "Frame start",
|
||||
"frame_end": "Frame end",
|
||||
"duration": "Duration",
|
||||
"width": "Resolution width",
|
||||
"height": "Resolution height",
|
||||
"fps": "FPS",
|
||||
"codec": "Codec"
|
||||
}
|
||||
|
||||
family_mapping = {
|
||||
"camera": "cam",
|
||||
"look": "look",
|
||||
|
|
@ -43,6 +55,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
}
|
||||
keep_first_subset_name_for_review = True
|
||||
asset_versions_status_profiles = {}
|
||||
additional_metadata_keys = []
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug("instance {}".format(instance))
|
||||
|
|
@ -105,7 +118,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
"component_data": None,
|
||||
"component_path": None,
|
||||
"component_location": None,
|
||||
"component_location_name": None
|
||||
"component_location_name": None,
|
||||
"additional_data": {}
|
||||
}
|
||||
|
||||
# Filter types of representations
|
||||
|
|
@ -152,6 +166,7 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
"name": "thumbnail"
|
||||
}
|
||||
thumbnail_item["thumbnail"] = True
|
||||
|
||||
# Create copy of item before setting location
|
||||
src_components_to_add.append(copy.deepcopy(thumbnail_item))
|
||||
# Create copy of first thumbnail
|
||||
|
|
@ -248,19 +263,15 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
first_thumbnail_component[
|
||||
"asset_data"]["name"] = extended_asset_name
|
||||
|
||||
component_meta = self._prepare_component_metadata(
|
||||
instance, repre, repre_path, True
|
||||
)
|
||||
|
||||
# Change location
|
||||
review_item["component_path"] = repre_path
|
||||
# Change component data
|
||||
review_item["component_data"] = {
|
||||
# Default component name is "main".
|
||||
"name": "ftrackreview-mp4",
|
||||
"metadata": {
|
||||
"ftr_meta": json.dumps(component_meta)
|
||||
}
|
||||
"metadata": self._prepare_component_metadata(
|
||||
instance, repre, repre_path, True
|
||||
)
|
||||
}
|
||||
|
||||
if is_first_review_repre:
|
||||
|
|
@ -302,13 +313,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
component_data = copy_src_item["component_data"]
|
||||
component_name = component_data["name"]
|
||||
component_data["name"] = component_name + "_src"
|
||||
component_meta = self._prepare_component_metadata(
|
||||
component_data["metadata"] = self._prepare_component_metadata(
|
||||
instance, repre, copy_src_item["component_path"], False
|
||||
)
|
||||
if component_meta:
|
||||
component_data["metadata"] = {
|
||||
"ftr_meta": json.dumps(component_meta)
|
||||
}
|
||||
component_list.append(copy_src_item)
|
||||
|
||||
# Add others representations as component
|
||||
|
|
@ -326,16 +333,12 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
):
|
||||
other_item["asset_data"]["name"] = extended_asset_name
|
||||
|
||||
component_meta = self._prepare_component_metadata(
|
||||
instance, repre, published_path, False
|
||||
)
|
||||
component_data = {
|
||||
"name": repre["name"]
|
||||
"name": repre["name"],
|
||||
"metadata": self._prepare_component_metadata(
|
||||
instance, repre, published_path, False
|
||||
)
|
||||
}
|
||||
if component_meta:
|
||||
component_data["metadata"] = {
|
||||
"ftr_meta": json.dumps(component_meta)
|
||||
}
|
||||
other_item["component_data"] = component_data
|
||||
other_item["component_location_name"] = unmanaged_location_name
|
||||
other_item["component_path"] = published_path
|
||||
|
|
@ -354,6 +357,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
))
|
||||
instance.data["ftrackComponentsList"] = component_list
|
||||
|
||||
def _collect_additional_metadata(self, streams):
|
||||
pass
|
||||
|
||||
def _get_repre_path(self, instance, repre, only_published):
|
||||
"""Get representation path that can be used for integration.
|
||||
|
||||
|
|
@ -423,6 +429,11 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
def _prepare_component_metadata(
|
||||
self, instance, repre, component_path, is_review
|
||||
):
|
||||
metadata = {}
|
||||
if "openpype_version" in self.additional_metadata_keys:
|
||||
label = self.metadata_keys_to_label["openpype_version"]
|
||||
metadata[label] = get_openpype_version()
|
||||
|
||||
extension = os.path.splitext(component_path)[-1]
|
||||
streams = []
|
||||
try:
|
||||
|
|
@ -442,13 +453,23 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
# - exr is special case which can have issues with reading through
|
||||
# ffmpegh but we want to set fps for it
|
||||
if not video_streams and extension not in [".exr"]:
|
||||
return {}
|
||||
return metadata
|
||||
|
||||
stream_width = None
|
||||
stream_height = None
|
||||
stream_fps = None
|
||||
frame_out = None
|
||||
codec_label = None
|
||||
for video_stream in video_streams:
|
||||
codec_label = video_stream.get("codec_long_name")
|
||||
if not codec_label:
|
||||
codec_label = video_stream.get("codec")
|
||||
|
||||
if codec_label:
|
||||
pix_fmt = video_stream.get("pix_fmt")
|
||||
if pix_fmt:
|
||||
codec_label += " ({})".format(pix_fmt)
|
||||
|
||||
tmp_width = video_stream.get("width")
|
||||
tmp_height = video_stream.get("height")
|
||||
if tmp_width and tmp_height:
|
||||
|
|
@ -456,8 +477,8 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
stream_height = tmp_height
|
||||
|
||||
input_framerate = video_stream.get("r_frame_rate")
|
||||
duration = video_stream.get("duration")
|
||||
if input_framerate is None or duration is None:
|
||||
stream_duration = video_stream.get("duration")
|
||||
if input_framerate is None or stream_duration is None:
|
||||
continue
|
||||
try:
|
||||
stream_fps = convert_ffprobe_fps_to_float(
|
||||
|
|
@ -473,9 +494,9 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
stream_height = tmp_height
|
||||
|
||||
self.log.debug("FPS from stream is {} and duration is {}".format(
|
||||
input_framerate, duration
|
||||
input_framerate, stream_duration
|
||||
))
|
||||
frame_out = float(duration) * stream_fps
|
||||
frame_out = float(stream_duration) * stream_fps
|
||||
break
|
||||
|
||||
# Prepare FPS
|
||||
|
|
@ -483,43 +504,58 @@ class IntegrateFtrackInstance(pyblish.api.InstancePlugin):
|
|||
if instance_fps is None:
|
||||
instance_fps = instance.context.data["fps"]
|
||||
|
||||
if not is_review:
|
||||
output = {}
|
||||
fps = stream_fps or instance_fps
|
||||
if fps:
|
||||
output["frameRate"] = fps
|
||||
|
||||
if stream_width and stream_height:
|
||||
output["width"] = int(stream_width)
|
||||
output["height"] = int(stream_height)
|
||||
return output
|
||||
|
||||
frame_start = repre.get("frameStartFtrack")
|
||||
frame_end = repre.get("frameEndFtrack")
|
||||
if frame_start is None or frame_end is None:
|
||||
frame_start = instance.data["frameStart"]
|
||||
frame_end = instance.data["frameEnd"]
|
||||
|
||||
fps = None
|
||||
repre_fps = repre.get("fps")
|
||||
if repre_fps is not None:
|
||||
repre_fps = float(repre_fps)
|
||||
|
||||
fps = stream_fps or repre_fps or instance_fps
|
||||
|
||||
# Prepare frame ranges
|
||||
frame_start = repre.get("frameStartFtrack")
|
||||
frame_end = repre.get("frameEndFtrack")
|
||||
if frame_start is None or frame_end is None:
|
||||
frame_start = instance.data["frameStart"]
|
||||
frame_end = instance.data["frameEnd"]
|
||||
duration = (frame_end - frame_start) + 1
|
||||
|
||||
for key, value in [
|
||||
("fps", fps),
|
||||
("frame_start", frame_start),
|
||||
("frame_end", frame_end),
|
||||
("duration", duration),
|
||||
("width", stream_width),
|
||||
("height", stream_height),
|
||||
("fps", fps),
|
||||
("codec", codec_label)
|
||||
]:
|
||||
if not value or key not in self.additional_metadata_keys:
|
||||
continue
|
||||
label = self.metadata_keys_to_label[key]
|
||||
metadata[label] = value
|
||||
|
||||
if not is_review:
|
||||
ftr_meta = {}
|
||||
if fps:
|
||||
ftr_meta["frameRate"] = fps
|
||||
|
||||
if stream_width and stream_height:
|
||||
ftr_meta["width"] = int(stream_width)
|
||||
ftr_meta["height"] = int(stream_height)
|
||||
metadata["ftr_meta"] = json.dumps(ftr_meta)
|
||||
return metadata
|
||||
|
||||
# Frame end of uploaded video file should be duration in frames
|
||||
# - frame start is always 0
|
||||
# - frame end is duration in frames
|
||||
if not frame_out:
|
||||
frame_out = frame_end - frame_start + 1
|
||||
frame_out = duration
|
||||
|
||||
# Ftrack documentation says that it is required to have
|
||||
# 'width' and 'height' in review component. But with those values
|
||||
# review video does not play.
|
||||
component_meta = {
|
||||
metadata["ftr_meta"] = json.dumps({
|
||||
"frameIn": 0,
|
||||
"frameOut": frame_out,
|
||||
"frameRate": float(fps)
|
||||
}
|
||||
|
||||
return component_meta
|
||||
})
|
||||
return metadata
|
||||
|
|
|
|||
|
|
@ -1,9 +1,12 @@
|
|||
import sys
|
||||
import collections
|
||||
import six
|
||||
import pyblish.api
|
||||
from copy import deepcopy
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.client import get_asset_by_id
|
||||
from openpype.lib import filter_profiles
|
||||
|
||||
|
||||
# Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC`
|
||||
|
|
@ -73,6 +76,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
"traypublisher"
|
||||
]
|
||||
optional = False
|
||||
create_task_status_profiles = []
|
||||
|
||||
def process(self, context):
|
||||
self.context = context
|
||||
|
|
@ -82,14 +86,16 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
hierarchy_context = self._get_active_assets(context)
|
||||
self.log.debug("__ hierarchy_context: {}".format(hierarchy_context))
|
||||
|
||||
self.session = self.context.data["ftrackSession"]
|
||||
session = self.context.data["ftrackSession"]
|
||||
project_name = self.context.data["projectEntity"]["name"]
|
||||
query = 'Project where full_name is "{}"'.format(project_name)
|
||||
project = self.session.query(query).one()
|
||||
auto_sync_state = project[
|
||||
"custom_attributes"][CUST_ATTR_AUTO_SYNC]
|
||||
project = session.query(query).one()
|
||||
auto_sync_state = project["custom_attributes"][CUST_ATTR_AUTO_SYNC]
|
||||
|
||||
self.ft_project = None
|
||||
self.session = session
|
||||
self.ft_project = project
|
||||
self.task_types = self.get_all_task_types(project)
|
||||
self.task_statuses = self.get_task_statuses(project)
|
||||
|
||||
# disable termporarily ftrack project's autosyncing
|
||||
if auto_sync_state:
|
||||
|
|
@ -121,10 +127,7 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
self.log.debug(entity_type)
|
||||
|
||||
if entity_type.lower() == 'project':
|
||||
query = 'Project where full_name is "{}"'.format(entity_name)
|
||||
entity = self.session.query(query).one()
|
||||
self.ft_project = entity
|
||||
self.task_types = self.get_all_task_types(entity)
|
||||
entity = self.ft_project
|
||||
|
||||
elif self.ft_project is None or parent is None:
|
||||
raise AssertionError(
|
||||
|
|
@ -217,13 +220,6 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
task_type=task_type,
|
||||
parent=entity
|
||||
)
|
||||
try:
|
||||
self.session.commit()
|
||||
except Exception:
|
||||
tp, value, tb = sys.exc_info()
|
||||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
# Incoming links.
|
||||
self.create_links(project_name, entity_data, entity)
|
||||
|
|
@ -303,7 +299,37 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
|
||||
return tasks
|
||||
|
||||
def get_task_statuses(self, project_entity):
|
||||
project_schema = project_entity["project_schema"]
|
||||
task_workflow_statuses = project_schema["_task_workflow"]["statuses"]
|
||||
return {
|
||||
status["id"]: status
|
||||
for status in task_workflow_statuses
|
||||
}
|
||||
|
||||
def create_task(self, name, task_type, parent):
|
||||
filter_data = {
|
||||
"task_names": name,
|
||||
"task_types": task_type
|
||||
}
|
||||
profile = filter_profiles(
|
||||
self.create_task_status_profiles,
|
||||
filter_data
|
||||
)
|
||||
status_id = None
|
||||
if profile:
|
||||
status_name = profile["status_name"]
|
||||
status_name_low = status_name.lower()
|
||||
for _status_id, status in self.task_statuses.items():
|
||||
if status["name"].lower() == status_name_low:
|
||||
status_id = _status_id
|
||||
break
|
||||
|
||||
if status_id is None:
|
||||
self.log.warning(
|
||||
"Task status \"{}\" was not found".format(status_name)
|
||||
)
|
||||
|
||||
task = self.session.create('Task', {
|
||||
'name': name,
|
||||
'parent': parent
|
||||
|
|
@ -312,6 +338,8 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
self.log.info(task_type)
|
||||
self.log.info(self.task_types)
|
||||
task['type'] = self.task_types[task_type]
|
||||
if status_id is not None:
|
||||
task["status_id"] = status_id
|
||||
|
||||
try:
|
||||
self.session.commit()
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from abc import abstractmethod
|
||||
from abc import abstractmethod, abstractproperty
|
||||
|
||||
from openpype import resources
|
||||
|
||||
|
|
@ -50,12 +50,32 @@ class IPluginPaths(OpenPypeInterface):
|
|||
class ILaunchHookPaths(OpenPypeInterface):
|
||||
"""Module has launch hook paths to return.
|
||||
|
||||
Modules does not have to inherit from this interface (changed 8.11.2022).
|
||||
Module just have to have implemented 'get_launch_hook_paths' to be able use
|
||||
the advantage.
|
||||
|
||||
Expected result is list of paths.
|
||||
["path/to/launch_hooks_dir"]
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_launch_hook_paths(self):
|
||||
def get_launch_hook_paths(self, app):
|
||||
"""Paths to directory with application launch hooks.
|
||||
|
||||
Method can be also defined without arguments.
|
||||
```python
|
||||
def get_launch_hook_paths(self):
|
||||
return []
|
||||
```
|
||||
|
||||
Args:
|
||||
app (Application): Application object which can be used for
|
||||
filtering of which launch hook paths are returned.
|
||||
|
||||
Returns:
|
||||
Iterable[str]: Path to directories where launch hooks can be found.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
|
@ -66,6 +86,7 @@ class ITrayModule(OpenPypeInterface):
|
|||
The module still must be usable if is not used in tray even if
|
||||
would do nothing.
|
||||
"""
|
||||
|
||||
tray_initialized = False
|
||||
_tray_manager = None
|
||||
|
||||
|
|
@ -78,16 +99,19 @@ class ITrayModule(OpenPypeInterface):
|
|||
This is where GUIs should be loaded or tray specific parts should be
|
||||
prepared.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def tray_menu(self, tray_menu):
|
||||
"""Add module's action to tray menu."""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def tray_start(self):
|
||||
"""Start procedure in Pype tray."""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
@ -96,6 +120,7 @@ class ITrayModule(OpenPypeInterface):
|
|||
|
||||
This is place where all threads should be shut.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def execute_in_main_thread(self, callback):
|
||||
|
|
@ -104,6 +129,7 @@ class ITrayModule(OpenPypeInterface):
|
|||
Some callbacks need to be processed on main thread (menu actions
|
||||
must be added on main thread or they won't get triggered etc.)
|
||||
"""
|
||||
|
||||
if not self.tray_initialized:
|
||||
# TODO Called without initialized tray, still main thread needed
|
||||
try:
|
||||
|
|
@ -128,6 +154,7 @@ class ITrayModule(OpenPypeInterface):
|
|||
msecs (int): Duration of message visibility in miliseconds.
|
||||
Default is 10000 msecs, may differ by Qt version.
|
||||
"""
|
||||
|
||||
if self._tray_manager:
|
||||
self._tray_manager.show_tray_message(title, message, icon, msecs)
|
||||
|
||||
|
|
@ -280,16 +307,19 @@ class ITrayService(ITrayModule):
|
|||
|
||||
def set_service_running_icon(self):
|
||||
"""Change icon of an QAction to green circle."""
|
||||
|
||||
if self.menu_action:
|
||||
self.menu_action.setIcon(self.get_icon_running())
|
||||
|
||||
def set_service_failed_icon(self):
|
||||
"""Change icon of an QAction to red circle."""
|
||||
|
||||
if self.menu_action:
|
||||
self.menu_action.setIcon(self.get_icon_failed())
|
||||
|
||||
def set_service_idle_icon(self):
|
||||
"""Change icon of an QAction to orange circle."""
|
||||
|
||||
if self.menu_action:
|
||||
self.menu_action.setIcon(self.get_icon_idle())
|
||||
|
||||
|
|
@ -303,6 +333,7 @@ class ISettingsChangeListener(OpenPypeInterface):
|
|||
"publish": ["path/to/publish_plugins"]
|
||||
}
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def on_system_settings_save(
|
||||
self, old_value, new_value, changes, new_value_metadata
|
||||
|
|
@ -320,3 +351,24 @@ class ISettingsChangeListener(OpenPypeInterface):
|
|||
self, old_value, new_value, changes, project_name, new_value_metadata
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class IHostModule(OpenPypeInterface):
|
||||
"""Module which also contain a host implementation."""
|
||||
|
||||
@abstractproperty
|
||||
def host_name(self):
|
||||
"""Name of host which module represents."""
|
||||
|
||||
pass
|
||||
|
||||
def get_workfile_extensions(self):
|
||||
"""Define workfile extensions for host.
|
||||
|
||||
Not all hosts support workfiles thus this is optional implementation.
|
||||
|
||||
Returns:
|
||||
List[str]: Extensions used for workfiles with dot.
|
||||
"""
|
||||
|
||||
return []
|
||||
|
|
|
|||
|
|
@ -6,7 +6,11 @@ from typing import List
|
|||
import gazu
|
||||
from pymongo import UpdateOne
|
||||
|
||||
from openpype.client import get_project, get_assets
|
||||
from openpype.client import (
|
||||
get_projects,
|
||||
get_project,
|
||||
get_assets,
|
||||
)
|
||||
from openpype.pipeline import AvalonMongoDB
|
||||
from openpype.api import get_project_settings
|
||||
from openpype.modules.kitsu.utils.credentials import validate_credentials
|
||||
|
|
@ -37,7 +41,7 @@ def sync_zou(login: str, password: str):
|
|||
dbcon = AvalonMongoDB()
|
||||
dbcon.install()
|
||||
|
||||
op_projects = [p for p in dbcon.projects()]
|
||||
op_projects = list(get_projects())
|
||||
for project_doc in op_projects:
|
||||
sync_zou_from_op_project(project_doc["name"], dbcon, project_doc)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,49 +0,0 @@
|
|||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
from openpype.lib import get_openpype_execute_args
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import ITrayAction
|
||||
|
||||
|
||||
class StandAlonePublishAction(OpenPypeModule, ITrayAction):
|
||||
label = "Publish"
|
||||
name = "standalonepublish_tool"
|
||||
|
||||
def initialize(self, modules_settings):
|
||||
import openpype
|
||||
self.enabled = modules_settings[self.name]["enabled"]
|
||||
self.publish_paths = [
|
||||
os.path.join(
|
||||
openpype.PACKAGE_DIR,
|
||||
"hosts",
|
||||
"standalonepublisher",
|
||||
"plugins",
|
||||
"publish"
|
||||
)
|
||||
]
|
||||
|
||||
def tray_init(self):
|
||||
return
|
||||
|
||||
def on_action_trigger(self):
|
||||
self.run_standalone_publisher()
|
||||
|
||||
def connect_with_modules(self, enabled_modules):
|
||||
"""Collect publish paths from other modules."""
|
||||
publish_paths = self.manager.collect_plugin_paths()["publish"]
|
||||
self.publish_paths.extend(publish_paths)
|
||||
|
||||
def run_standalone_publisher(self):
|
||||
args = get_openpype_execute_args("standalonepublisher")
|
||||
kwargs = {}
|
||||
if platform.system().lower() == "darwin":
|
||||
new_args = ["open", "-na", args.pop(0), "--args"]
|
||||
new_args.extend(args)
|
||||
args = new_args
|
||||
|
||||
detached_process = getattr(subprocess, "DETACHED_PROCESS", None)
|
||||
if detached_process is not None:
|
||||
kwargs["creationflags"] = detached_process
|
||||
|
||||
subprocess.Popen(args, **kwargs)
|
||||
|
|
@ -62,7 +62,7 @@ class AbstractProvider:
|
|||
|
||||
@abc.abstractmethod
|
||||
def upload_file(self, source_path, path,
|
||||
server, collection, file, representation, site,
|
||||
server, project_name, file, representation, site,
|
||||
overwrite=False):
|
||||
"""
|
||||
Copy file from 'source_path' to 'target_path' on provider.
|
||||
|
|
@ -75,7 +75,7 @@ class AbstractProvider:
|
|||
|
||||
arguments for saving progress:
|
||||
server (SyncServer): server instance to call update_db on
|
||||
collection (str): name of collection
|
||||
project_name (str): name of project_name
|
||||
file (dict): info about uploaded file (matches structure from db)
|
||||
representation (dict): complete repre containing 'file'
|
||||
site (str): site name
|
||||
|
|
@ -87,7 +87,7 @@ class AbstractProvider:
|
|||
|
||||
@abc.abstractmethod
|
||||
def download_file(self, source_path, local_path,
|
||||
server, collection, file, representation, site,
|
||||
server, project_name, file, representation, site,
|
||||
overwrite=False):
|
||||
"""
|
||||
Download file from provider into local system
|
||||
|
|
@ -99,7 +99,7 @@ class AbstractProvider:
|
|||
|
||||
arguments for saving progress:
|
||||
server (SyncServer): server instance to call update_db on
|
||||
collection (str): name of collection
|
||||
project_name (str):
|
||||
file (dict): info about uploaded file (matches structure from db)
|
||||
representation (dict): complete repre containing 'file'
|
||||
site (str): site name
|
||||
|
|
|
|||
|
|
@ -224,7 +224,7 @@ class DropboxHandler(AbstractProvider):
|
|||
return False
|
||||
|
||||
def upload_file(self, source_path, path,
|
||||
server, collection, file, representation, site,
|
||||
server, project_name, file, representation, site,
|
||||
overwrite=False):
|
||||
"""
|
||||
Copy file from 'source_path' to 'target_path' on provider.
|
||||
|
|
@ -237,7 +237,7 @@ class DropboxHandler(AbstractProvider):
|
|||
|
||||
arguments for saving progress:
|
||||
server (SyncServer): server instance to call update_db on
|
||||
collection (str): name of collection
|
||||
project_name (str):
|
||||
file (dict): info about uploaded file (matches structure from db)
|
||||
representation (dict): complete repre containing 'file'
|
||||
site (str): site name
|
||||
|
|
@ -290,7 +290,7 @@ class DropboxHandler(AbstractProvider):
|
|||
cursor.offset = f.tell()
|
||||
|
||||
server.update_db(
|
||||
collection=collection,
|
||||
project_name=project_name,
|
||||
new_file_id=None,
|
||||
file=file,
|
||||
representation=representation,
|
||||
|
|
@ -301,7 +301,7 @@ class DropboxHandler(AbstractProvider):
|
|||
return path
|
||||
|
||||
def download_file(self, source_path, local_path,
|
||||
server, collection, file, representation, site,
|
||||
server, project_name, file, representation, site,
|
||||
overwrite=False):
|
||||
"""
|
||||
Download file from provider into local system
|
||||
|
|
@ -313,7 +313,7 @@ class DropboxHandler(AbstractProvider):
|
|||
|
||||
arguments for saving progress:
|
||||
server (SyncServer): server instance to call update_db on
|
||||
collection (str): name of collection
|
||||
project_name (str):
|
||||
file (dict): info about uploaded file (matches structure from db)
|
||||
representation (dict): complete repre containing 'file'
|
||||
site (str): site name
|
||||
|
|
@ -337,7 +337,7 @@ class DropboxHandler(AbstractProvider):
|
|||
self.dbx.files_download_to_file(local_path, source_path)
|
||||
|
||||
server.update_db(
|
||||
collection=collection,
|
||||
project_name=project_name,
|
||||
new_file_id=None,
|
||||
file=file,
|
||||
representation=representation,
|
||||
|
|
|
|||
|
|
@ -251,7 +251,7 @@ class GDriveHandler(AbstractProvider):
|
|||
return folder_id
|
||||
|
||||
def upload_file(self, source_path, path,
|
||||
server, collection, file, representation, site,
|
||||
server, project_name, file, representation, site,
|
||||
overwrite=False):
|
||||
"""
|
||||
Uploads single file from 'source_path' to destination 'path'.
|
||||
|
|
@ -264,7 +264,7 @@ class GDriveHandler(AbstractProvider):
|
|||
|
||||
arguments for saving progress:
|
||||
server (SyncServer): server instance to call update_db on
|
||||
collection (str): name of collection
|
||||
project_name (str):
|
||||
file (dict): info about uploaded file (matches structure from db)
|
||||
representation (dict): complete repre containing 'file'
|
||||
site (str): site name
|
||||
|
|
@ -324,7 +324,7 @@ class GDriveHandler(AbstractProvider):
|
|||
while response is None:
|
||||
if server.is_representation_paused(representation['_id'],
|
||||
check_parents=True,
|
||||
project_name=collection):
|
||||
project_name=project_name):
|
||||
raise ValueError("Paused during process, please redo.")
|
||||
if status:
|
||||
status_val = float(status.progress())
|
||||
|
|
@ -333,7 +333,7 @@ class GDriveHandler(AbstractProvider):
|
|||
last_tick = time.time()
|
||||
log.debug("Uploaded %d%%." %
|
||||
int(status_val * 100))
|
||||
server.update_db(collection=collection,
|
||||
server.update_db(project_name=project_name,
|
||||
new_file_id=None,
|
||||
file=file,
|
||||
representation=representation,
|
||||
|
|
@ -358,7 +358,7 @@ class GDriveHandler(AbstractProvider):
|
|||
return response['id']
|
||||
|
||||
def download_file(self, source_path, local_path,
|
||||
server, collection, file, representation, site,
|
||||
server, project_name, file, representation, site,
|
||||
overwrite=False):
|
||||
"""
|
||||
Downloads single file from 'source_path' (remote) to 'local_path'.
|
||||
|
|
@ -372,7 +372,7 @@ class GDriveHandler(AbstractProvider):
|
|||
|
||||
arguments for saving progress:
|
||||
server (SyncServer): server instance to call update_db on
|
||||
collection (str): name of collection
|
||||
project_name (str):
|
||||
file (dict): info about uploaded file (matches structure from db)
|
||||
representation (dict): complete repre containing 'file'
|
||||
site (str): site name
|
||||
|
|
@ -410,7 +410,7 @@ class GDriveHandler(AbstractProvider):
|
|||
while response is None:
|
||||
if server.is_representation_paused(representation['_id'],
|
||||
check_parents=True,
|
||||
project_name=collection):
|
||||
project_name=project_name):
|
||||
raise ValueError("Paused during process, please redo.")
|
||||
if status:
|
||||
status_val = float(status.progress())
|
||||
|
|
@ -419,7 +419,7 @@ class GDriveHandler(AbstractProvider):
|
|||
last_tick = time.time()
|
||||
log.debug("Downloaded %d%%." %
|
||||
int(status_val * 100))
|
||||
server.update_db(collection=collection,
|
||||
server.update_db(project_name=project_name,
|
||||
new_file_id=None,
|
||||
file=file,
|
||||
representation=representation,
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ class LocalDriveHandler(AbstractProvider):
|
|||
return editable
|
||||
|
||||
def upload_file(self, source_path, target_path,
|
||||
server, collection, file, representation, site,
|
||||
server, project_name, file, representation, site,
|
||||
overwrite=False, direction="Upload"):
|
||||
"""
|
||||
Copies file from 'source_path' to 'target_path'
|
||||
|
|
@ -95,7 +95,7 @@ class LocalDriveHandler(AbstractProvider):
|
|||
thread = threading.Thread(target=self._copy,
|
||||
args=(source_path, target_path))
|
||||
thread.start()
|
||||
self._mark_progress(collection, file, representation, server,
|
||||
self._mark_progress(project_name, file, representation, server,
|
||||
site, source_path, target_path, direction)
|
||||
else:
|
||||
if os.path.exists(target_path):
|
||||
|
|
@ -105,13 +105,14 @@ class LocalDriveHandler(AbstractProvider):
|
|||
return os.path.basename(target_path)
|
||||
|
||||
def download_file(self, source_path, local_path,
|
||||
server, collection, file, representation, site,
|
||||
server, project_name, file, representation, site,
|
||||
overwrite=False):
|
||||
"""
|
||||
Download a file form 'source_path' to 'local_path'
|
||||
"""
|
||||
return self.upload_file(source_path, local_path,
|
||||
server, collection, file, representation, site,
|
||||
server, project_name, file,
|
||||
representation, site,
|
||||
overwrite, direction="Download")
|
||||
|
||||
def delete_file(self, path):
|
||||
|
|
@ -188,7 +189,7 @@ class LocalDriveHandler(AbstractProvider):
|
|||
except shutil.SameFileError:
|
||||
print("same files, skipping")
|
||||
|
||||
def _mark_progress(self, collection, file, representation, server, site,
|
||||
def _mark_progress(self, project_name, file, representation, server, site,
|
||||
source_path, target_path, direction):
|
||||
"""
|
||||
Updates progress field in DB by values 0-1.
|
||||
|
|
@ -204,7 +205,7 @@ class LocalDriveHandler(AbstractProvider):
|
|||
status_val = target_file_size / source_file_size
|
||||
last_tick = time.time()
|
||||
log.debug(direction + "ed %d%%." % int(status_val * 100))
|
||||
server.update_db(collection=collection,
|
||||
server.update_db(project_name=project_name,
|
||||
new_file_id=None,
|
||||
file=file,
|
||||
representation=representation,
|
||||
|
|
|
|||
|
|
@ -222,7 +222,7 @@ class SFTPHandler(AbstractProvider):
|
|||
return os.path.basename(path)
|
||||
|
||||
def upload_file(self, source_path, target_path,
|
||||
server, collection, file, representation, site,
|
||||
server, project_name, file, representation, site,
|
||||
overwrite=False):
|
||||
"""
|
||||
Uploads single file from 'source_path' to destination 'path'.
|
||||
|
|
@ -235,7 +235,7 @@ class SFTPHandler(AbstractProvider):
|
|||
|
||||
arguments for saving progress:
|
||||
server (SyncServer): server instance to call update_db on
|
||||
collection (str): name of collection
|
||||
project_name (str):
|
||||
file (dict): info about uploaded file (matches structure from db)
|
||||
representation (dict): complete repre containing 'file'
|
||||
site (str): site name
|
||||
|
|
@ -256,7 +256,7 @@ class SFTPHandler(AbstractProvider):
|
|||
thread = threading.Thread(target=self._upload,
|
||||
args=(source_path, target_path))
|
||||
thread.start()
|
||||
self._mark_progress(collection, file, representation, server,
|
||||
self._mark_progress(project_name, file, representation, server,
|
||||
site, source_path, target_path, "upload")
|
||||
|
||||
return os.path.basename(target_path)
|
||||
|
|
@ -267,7 +267,7 @@ class SFTPHandler(AbstractProvider):
|
|||
conn.put(source_path, target_path)
|
||||
|
||||
def download_file(self, source_path, target_path,
|
||||
server, collection, file, representation, site,
|
||||
server, project_name, file, representation, site,
|
||||
overwrite=False):
|
||||
"""
|
||||
Downloads single file from 'source_path' (remote) to 'target_path'.
|
||||
|
|
@ -281,7 +281,7 @@ class SFTPHandler(AbstractProvider):
|
|||
|
||||
arguments for saving progress:
|
||||
server (SyncServer): server instance to call update_db on
|
||||
collection (str): name of collection
|
||||
project_name (str):
|
||||
file (dict): info about uploaded file (matches structure from db)
|
||||
representation (dict): complete repre containing 'file'
|
||||
site (str): site name
|
||||
|
|
@ -302,7 +302,7 @@ class SFTPHandler(AbstractProvider):
|
|||
thread = threading.Thread(target=self._download,
|
||||
args=(source_path, target_path))
|
||||
thread.start()
|
||||
self._mark_progress(collection, file, representation, server,
|
||||
self._mark_progress(project_name, file, representation, server,
|
||||
site, source_path, target_path, "download")
|
||||
|
||||
return os.path.basename(target_path)
|
||||
|
|
@ -425,7 +425,7 @@ class SFTPHandler(AbstractProvider):
|
|||
pysftp.exceptions.ConnectionException):
|
||||
log.warning("Couldn't connect", exc_info=True)
|
||||
|
||||
def _mark_progress(self, collection, file, representation, server, site,
|
||||
def _mark_progress(self, project_name, file, representation, server, site,
|
||||
source_path, target_path, direction):
|
||||
"""
|
||||
Updates progress field in DB by values 0-1.
|
||||
|
|
@ -446,7 +446,7 @@ class SFTPHandler(AbstractProvider):
|
|||
status_val = target_file_size / source_file_size
|
||||
last_tick = time.time()
|
||||
log.debug(direction + "ed %d%%." % int(status_val * 100))
|
||||
server.update_db(collection=collection,
|
||||
server.update_db(project_name=project_name,
|
||||
new_file_id=None,
|
||||
file=file,
|
||||
representation=representation,
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from .utils import SyncStatus, ResumableError
|
|||
log = PypeLogger().get_logger("SyncServer")
|
||||
|
||||
|
||||
async def upload(module, collection, file, representation, provider_name,
|
||||
async def upload(module, project_name, file, representation, provider_name,
|
||||
remote_site_name, tree=None, preset=None):
|
||||
"""
|
||||
Upload single 'file' of a 'representation' to 'provider'.
|
||||
|
|
@ -31,7 +31,7 @@ async def upload(module, collection, file, representation, provider_name,
|
|||
|
||||
Args:
|
||||
module(SyncServerModule): object to run SyncServerModule API
|
||||
collection (str): source collection
|
||||
project_name (str): source db
|
||||
file (dictionary): of file from representation in Mongo
|
||||
representation (dictionary): of representation
|
||||
provider_name (string): gdrive, gdc etc.
|
||||
|
|
@ -47,15 +47,16 @@ async def upload(module, collection, file, representation, provider_name,
|
|||
# thread can do that at a time, upload/download to prepared
|
||||
# structure should be run in parallel
|
||||
remote_handler = lib.factory.get_provider(provider_name,
|
||||
collection,
|
||||
project_name,
|
||||
remote_site_name,
|
||||
tree=tree,
|
||||
presets=preset)
|
||||
|
||||
file_path = file.get("path", "")
|
||||
try:
|
||||
local_file_path, remote_file_path = resolve_paths(module,
|
||||
file_path, collection, remote_site_name, remote_handler
|
||||
local_file_path, remote_file_path = resolve_paths(
|
||||
module, file_path, project_name,
|
||||
remote_site_name, remote_handler
|
||||
)
|
||||
except Exception as exp:
|
||||
print(exp)
|
||||
|
|
@ -74,27 +75,28 @@ async def upload(module, collection, file, representation, provider_name,
|
|||
local_file_path,
|
||||
remote_file_path,
|
||||
module,
|
||||
collection,
|
||||
project_name,
|
||||
file,
|
||||
representation,
|
||||
remote_site_name,
|
||||
True
|
||||
)
|
||||
|
||||
module.handle_alternate_site(collection, representation, remote_site_name,
|
||||
module.handle_alternate_site(project_name, representation,
|
||||
remote_site_name,
|
||||
file["_id"], file_id)
|
||||
|
||||
return file_id
|
||||
|
||||
|
||||
async def download(module, collection, file, representation, provider_name,
|
||||
async def download(module, project_name, file, representation, provider_name,
|
||||
remote_site_name, tree=None, preset=None):
|
||||
"""
|
||||
Downloads file to local folder denoted in representation.Context.
|
||||
|
||||
Args:
|
||||
module(SyncServerModule): object to run SyncServerModule API
|
||||
collection (str): source collection
|
||||
project_name (str): source
|
||||
file (dictionary) : info about processed file
|
||||
representation (dictionary): repr that 'file' belongs to
|
||||
provider_name (string): 'gdrive' etc
|
||||
|
|
@ -108,20 +110,20 @@ async def download(module, collection, file, representation, provider_name,
|
|||
"""
|
||||
with module.lock:
|
||||
remote_handler = lib.factory.get_provider(provider_name,
|
||||
collection,
|
||||
project_name,
|
||||
remote_site_name,
|
||||
tree=tree,
|
||||
presets=preset)
|
||||
|
||||
file_path = file.get("path", "")
|
||||
local_file_path, remote_file_path = resolve_paths(
|
||||
module, file_path, collection, remote_site_name, remote_handler
|
||||
module, file_path, project_name, remote_site_name, remote_handler
|
||||
)
|
||||
|
||||
local_folder = os.path.dirname(local_file_path)
|
||||
os.makedirs(local_folder, exist_ok=True)
|
||||
|
||||
local_site = module.get_active_site(collection)
|
||||
local_site = module.get_active_site(project_name)
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
file_id = await loop.run_in_executor(None,
|
||||
|
|
@ -129,20 +131,20 @@ async def download(module, collection, file, representation, provider_name,
|
|||
remote_file_path,
|
||||
local_file_path,
|
||||
module,
|
||||
collection,
|
||||
project_name,
|
||||
file,
|
||||
representation,
|
||||
local_site,
|
||||
True
|
||||
)
|
||||
|
||||
module.handle_alternate_site(collection, representation, local_site,
|
||||
module.handle_alternate_site(project_name, representation, local_site,
|
||||
file["_id"], file_id)
|
||||
|
||||
return file_id
|
||||
|
||||
|
||||
def resolve_paths(module, file_path, collection,
|
||||
def resolve_paths(module, file_path, project_name,
|
||||
remote_site_name=None, remote_handler=None):
|
||||
"""
|
||||
Returns tuple of local and remote file paths with {root}
|
||||
|
|
@ -153,7 +155,7 @@ def resolve_paths(module, file_path, collection,
|
|||
Args:
|
||||
module(SyncServerModule): object to run SyncServerModule API
|
||||
file_path(string): path with {root}
|
||||
collection(string): project name
|
||||
project_name(string): project name
|
||||
remote_site_name(string): remote site
|
||||
remote_handler(AbstractProvider): implementation
|
||||
Returns:
|
||||
|
|
@ -164,7 +166,7 @@ def resolve_paths(module, file_path, collection,
|
|||
remote_file_path = remote_handler.resolve_path(file_path)
|
||||
|
||||
local_handler = lib.factory.get_provider(
|
||||
'local_drive', collection, module.get_active_site(collection))
|
||||
'local_drive', project_name, module.get_active_site(project_name))
|
||||
local_file_path = local_handler.resolve_path(file_path)
|
||||
|
||||
return local_file_path, remote_file_path
|
||||
|
|
@ -269,8 +271,8 @@ class SyncServerThread(threading.Thread):
|
|||
- gets list of collections in DB
|
||||
- gets list of active remote providers (has configuration,
|
||||
credentials)
|
||||
- for each collection it looks for representations that should
|
||||
be synced
|
||||
- for each project_name it looks for representations that
|
||||
should be synced
|
||||
- synchronize found collections
|
||||
- update representations - fills error messages for exceptions
|
||||
- waits X seconds and repeat
|
||||
|
|
@ -282,17 +284,17 @@ class SyncServerThread(threading.Thread):
|
|||
import time
|
||||
start_time = time.time()
|
||||
self.module.set_sync_project_settings() # clean cache
|
||||
collection = None
|
||||
project_name = None
|
||||
enabled_projects = self.module.get_enabled_projects()
|
||||
for collection in enabled_projects:
|
||||
preset = self.module.sync_project_settings[collection]
|
||||
for project_name in enabled_projects:
|
||||
preset = self.module.sync_project_settings[project_name]
|
||||
|
||||
local_site, remote_site = self._working_sites(collection)
|
||||
local_site, remote_site = self._working_sites(project_name)
|
||||
if not all([local_site, remote_site]):
|
||||
continue
|
||||
|
||||
sync_repres = self.module.get_sync_representations(
|
||||
collection,
|
||||
project_name,
|
||||
local_site,
|
||||
remote_site
|
||||
)
|
||||
|
|
@ -310,7 +312,7 @@ class SyncServerThread(threading.Thread):
|
|||
remote_provider = \
|
||||
self.module.get_provider_for_site(site=remote_site)
|
||||
handler = lib.factory.get_provider(remote_provider,
|
||||
collection,
|
||||
project_name,
|
||||
remote_site,
|
||||
presets=site_preset)
|
||||
limit = lib.factory.get_provider_batch_limit(
|
||||
|
|
@ -341,7 +343,7 @@ class SyncServerThread(threading.Thread):
|
|||
limit -= 1
|
||||
task = asyncio.create_task(
|
||||
upload(self.module,
|
||||
collection,
|
||||
project_name,
|
||||
file,
|
||||
sync,
|
||||
remote_provider,
|
||||
|
|
@ -353,7 +355,7 @@ class SyncServerThread(threading.Thread):
|
|||
files_processed_info.append((file,
|
||||
sync,
|
||||
remote_site,
|
||||
collection
|
||||
project_name
|
||||
))
|
||||
processed_file_path.add(file_path)
|
||||
if status == SyncStatus.DO_DOWNLOAD:
|
||||
|
|
@ -361,7 +363,7 @@ class SyncServerThread(threading.Thread):
|
|||
limit -= 1
|
||||
task = asyncio.create_task(
|
||||
download(self.module,
|
||||
collection,
|
||||
project_name,
|
||||
file,
|
||||
sync,
|
||||
remote_provider,
|
||||
|
|
@ -373,7 +375,7 @@ class SyncServerThread(threading.Thread):
|
|||
files_processed_info.append((file,
|
||||
sync,
|
||||
local_site,
|
||||
collection
|
||||
project_name
|
||||
))
|
||||
processed_file_path.add(file_path)
|
||||
|
||||
|
|
@ -384,12 +386,12 @@ class SyncServerThread(threading.Thread):
|
|||
return_exceptions=True)
|
||||
for file_id, info in zip(files_created,
|
||||
files_processed_info):
|
||||
file, representation, site, collection = info
|
||||
file, representation, site, project_name = info
|
||||
error = None
|
||||
if isinstance(file_id, BaseException):
|
||||
error = str(file_id)
|
||||
file_id = None
|
||||
self.module.update_db(collection,
|
||||
self.module.update_db(project_name,
|
||||
file_id,
|
||||
file,
|
||||
representation,
|
||||
|
|
@ -399,7 +401,7 @@ class SyncServerThread(threading.Thread):
|
|||
duration = time.time() - start_time
|
||||
log.debug("One loop took {:.2f}s".format(duration))
|
||||
|
||||
delay = self.module.get_loop_delay(collection)
|
||||
delay = self.module.get_loop_delay(project_name)
|
||||
log.debug("Waiting for {} seconds to new loop".format(delay))
|
||||
self.timer = asyncio.create_task(self.run_timer(delay))
|
||||
await asyncio.gather(self.timer)
|
||||
|
|
@ -458,19 +460,19 @@ class SyncServerThread(threading.Thread):
|
|||
self.timer.cancel()
|
||||
self.timer = None
|
||||
|
||||
def _working_sites(self, collection):
|
||||
if self.module.is_project_paused(collection):
|
||||
def _working_sites(self, project_name):
|
||||
if self.module.is_project_paused(project_name):
|
||||
log.debug("Both sites same, skipping")
|
||||
return None, None
|
||||
|
||||
local_site = self.module.get_active_site(collection)
|
||||
remote_site = self.module.get_remote_site(collection)
|
||||
local_site = self.module.get_active_site(project_name)
|
||||
remote_site = self.module.get_remote_site(project_name)
|
||||
if local_site == remote_site:
|
||||
log.debug("{}-{} sites same, skipping".format(local_site,
|
||||
remote_site))
|
||||
return None, None
|
||||
|
||||
configured_sites = _get_configured_sites(self.module, collection)
|
||||
configured_sites = _get_configured_sites(self.module, project_name)
|
||||
if not all([local_site in configured_sites,
|
||||
remote_site in configured_sites]):
|
||||
log.debug("Some of the sites {} - {} is not ".format(local_site,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import platform
|
|||
import copy
|
||||
from collections import deque, defaultdict
|
||||
|
||||
|
||||
from openpype.client import get_projects
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import ITrayModule
|
||||
from openpype.settings import (
|
||||
|
|
@ -25,6 +25,8 @@ from .providers import lib
|
|||
|
||||
from .utils import time_function, SyncStatus, SiteAlreadyPresentError
|
||||
|
||||
from openpype.client import get_representations, get_representation_by_id
|
||||
|
||||
|
||||
log = PypeLogger.get_logger("SyncServer")
|
||||
|
||||
|
|
@ -128,12 +130,12 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
self.projects_processed = set()
|
||||
|
||||
""" Start of Public API """
|
||||
def add_site(self, collection, representation_id, site_name=None,
|
||||
def add_site(self, project_name, representation_id, site_name=None,
|
||||
force=False):
|
||||
"""
|
||||
Adds new site to representation to be synced.
|
||||
|
||||
'collection' must have synchronization enabled (globally or
|
||||
'project_name' must have synchronization enabled (globally or
|
||||
project only)
|
||||
|
||||
Used as a API endpoint from outside applications (Loader etc).
|
||||
|
|
@ -141,7 +143,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
Use 'force' to reset existing site.
|
||||
|
||||
Args:
|
||||
collection (string): project name (must match DB)
|
||||
project_name (string): project name (must match DB)
|
||||
representation_id (string): MongoDB _id value
|
||||
site_name (string): name of configured and active site
|
||||
force (bool): reset site if exists
|
||||
|
|
@ -151,25 +153,25 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
not 'force'
|
||||
ValueError - other errors (repre not found, misconfiguration)
|
||||
"""
|
||||
if not self.get_sync_project_setting(collection):
|
||||
if not self.get_sync_project_setting(project_name):
|
||||
raise ValueError("Project not configured")
|
||||
|
||||
if not site_name:
|
||||
site_name = self.DEFAULT_SITE
|
||||
|
||||
self.reset_site_on_representation(collection,
|
||||
self.reset_site_on_representation(project_name,
|
||||
representation_id,
|
||||
site_name=site_name,
|
||||
force=force)
|
||||
|
||||
def remove_site(self, collection, representation_id, site_name,
|
||||
def remove_site(self, project_name, representation_id, site_name,
|
||||
remove_local_files=False):
|
||||
"""
|
||||
Removes 'site_name' for particular 'representation_id' on
|
||||
'collection'
|
||||
'project_name'
|
||||
|
||||
Args:
|
||||
collection (string): project name (must match DB)
|
||||
project_name (string): project name (must match DB)
|
||||
representation_id (string): MongoDB _id value
|
||||
site_name (string): name of configured and active site
|
||||
remove_local_files (bool): remove only files for 'local_id'
|
||||
|
|
@ -178,15 +180,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
Returns:
|
||||
throws ValueError if any issue
|
||||
"""
|
||||
if not self.get_sync_project_setting(collection):
|
||||
if not self.get_sync_project_setting(project_name):
|
||||
raise ValueError("Project not configured")
|
||||
|
||||
self.reset_site_on_representation(collection,
|
||||
self.reset_site_on_representation(project_name,
|
||||
representation_id,
|
||||
site_name=site_name,
|
||||
remove=True)
|
||||
if remove_local_files:
|
||||
self._remove_local_file(collection, representation_id, site_name)
|
||||
self._remove_local_file(project_name, representation_id, site_name)
|
||||
|
||||
def compute_resource_sync_sites(self, project_name):
|
||||
"""Get available resource sync sites state for publish process.
|
||||
|
|
@ -333,9 +335,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
return alt_site_pairs
|
||||
|
||||
def clear_project(self, collection, site_name):
|
||||
def clear_project(self, project_name, site_name):
|
||||
"""
|
||||
Clear 'collection' of 'site_name' and its local files
|
||||
Clear 'project_name' of 'site_name' and its local files
|
||||
|
||||
Works only on real local sites, not on 'studio'
|
||||
"""
|
||||
|
|
@ -344,16 +346,17 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
"files.sites.name": site_name
|
||||
}
|
||||
|
||||
# TODO currently not possible to replace with get_representations
|
||||
representations = list(
|
||||
self.connection.database[collection].find(query))
|
||||
self.connection.database[project_name].find(query))
|
||||
if not representations:
|
||||
self.log.debug("No repre found")
|
||||
return
|
||||
|
||||
for repre in representations:
|
||||
self.remove_site(collection, repre.get("_id"), site_name, True)
|
||||
self.remove_site(project_name, repre.get("_id"), site_name, True)
|
||||
|
||||
def create_validate_project_task(self, collection, site_name):
|
||||
def create_validate_project_task(self, project_name, site_name):
|
||||
"""Adds metadata about project files validation on a queue.
|
||||
|
||||
This process will loop through all representation and check if
|
||||
|
|
@ -370,33 +373,28 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
"""
|
||||
task = {
|
||||
"type": "validate",
|
||||
"project_name": collection,
|
||||
"func": lambda: self.validate_project(collection, site_name,
|
||||
"project_name": project_name,
|
||||
"func": lambda: self.validate_project(project_name, site_name,
|
||||
reset_missing=True)
|
||||
}
|
||||
self.projects_processed.add(collection)
|
||||
self.projects_processed.add(project_name)
|
||||
self.long_running_tasks.append(task)
|
||||
|
||||
def validate_project(self, collection, site_name, reset_missing=False):
|
||||
"""Validate 'collection' of 'site_name' and its local files
|
||||
def validate_project(self, project_name, site_name, reset_missing=False):
|
||||
"""Validate 'project_name' of 'site_name' and its local files
|
||||
|
||||
If file present and not marked with a 'site_name' in DB, DB is
|
||||
updated with site name and file modified date.
|
||||
|
||||
Args:
|
||||
collection (string): project name
|
||||
project_name (string): project name
|
||||
site_name (string): active site name
|
||||
reset_missing (bool): if True reset site in DB if missing
|
||||
physically
|
||||
"""
|
||||
self.log.debug("Validation of {} for {} started".format(collection,
|
||||
self.log.debug("Validation of {} for {} started".format(project_name,
|
||||
site_name))
|
||||
query = {
|
||||
"type": "representation"
|
||||
}
|
||||
|
||||
representations = list(
|
||||
self.connection.database[collection].find(query))
|
||||
representations = list(get_representations(project_name))
|
||||
if not representations:
|
||||
self.log.debug("No repre found")
|
||||
return
|
||||
|
|
@ -416,7 +414,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
continue
|
||||
|
||||
file_path = repre_file.get("path", "")
|
||||
local_file_path = self.get_local_file_path(collection,
|
||||
local_file_path = self.get_local_file_path(project_name,
|
||||
site_name,
|
||||
file_path)
|
||||
|
||||
|
|
@ -428,14 +426,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
"Adding site {} for {}".format(site_name,
|
||||
repre_id))
|
||||
|
||||
query = {
|
||||
"_id": repre_id
|
||||
}
|
||||
created_dt = datetime.fromtimestamp(
|
||||
os.path.getmtime(local_file_path))
|
||||
elem = {"name": site_name,
|
||||
"created_dt": created_dt}
|
||||
self._add_site(collection, query, repre, elem,
|
||||
self._add_site(project_name, repre, elem,
|
||||
site_name=site_name,
|
||||
file_id=repre_file["_id"],
|
||||
force=True)
|
||||
|
|
@ -445,41 +440,42 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
self.log.debug("Resetting site {} for {}".
|
||||
format(site_name, repre_id))
|
||||
self.reset_site_on_representation(
|
||||
collection, repre_id, site_name=site_name,
|
||||
project_name, repre_id, site_name=site_name,
|
||||
file_id=repre_file["_id"])
|
||||
sites_reset += 1
|
||||
|
||||
if sites_added % 100 == 0:
|
||||
self.log.debug("Sites added {}".format(sites_added))
|
||||
|
||||
self.log.debug("Validation of {} for {} ended".format(collection,
|
||||
self.log.debug("Validation of {} for {} ended".format(project_name,
|
||||
site_name))
|
||||
self.log.info("Sites added {}, sites reset {}".format(sites_added,
|
||||
reset_missing))
|
||||
|
||||
def pause_representation(self, collection, representation_id, site_name):
|
||||
def pause_representation(self, project_name, representation_id, site_name):
|
||||
"""
|
||||
Sets 'representation_id' as paused, eg. no syncing should be
|
||||
happening on it.
|
||||
|
||||
Args:
|
||||
collection (string): project name
|
||||
project_name (string): project name
|
||||
representation_id (string): MongoDB objectId value
|
||||
site_name (string): 'gdrive', 'studio' etc.
|
||||
"""
|
||||
log.info("Pausing SyncServer for {}".format(representation_id))
|
||||
self._paused_representations.add(representation_id)
|
||||
self.reset_site_on_representation(collection, representation_id,
|
||||
self.reset_site_on_representation(project_name, representation_id,
|
||||
site_name=site_name, pause=True)
|
||||
|
||||
def unpause_representation(self, collection, representation_id, site_name):
|
||||
def unpause_representation(self, project_name,
|
||||
representation_id, site_name):
|
||||
"""
|
||||
Sets 'representation_id' as unpaused.
|
||||
|
||||
Does not fail or warn if repre wasn't paused.
|
||||
|
||||
Args:
|
||||
collection (string): project name
|
||||
project_name (string): project name
|
||||
representation_id (string): MongoDB objectId value
|
||||
site_name (string): 'gdrive', 'studio' etc.
|
||||
"""
|
||||
|
|
@ -489,7 +485,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
except KeyError:
|
||||
pass
|
||||
# self.paused_representations is not persistent
|
||||
self.reset_site_on_representation(collection, representation_id,
|
||||
self.reset_site_on_representation(project_name, representation_id,
|
||||
site_name=site_name, pause=False)
|
||||
|
||||
def is_representation_paused(self, representation_id,
|
||||
|
|
@ -520,7 +516,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
happening on all representation inside.
|
||||
|
||||
Args:
|
||||
project_name (string): collection name
|
||||
project_name (string): project_name name
|
||||
"""
|
||||
log.info("Pausing SyncServer for {}".format(project_name))
|
||||
self._paused_projects.add(project_name)
|
||||
|
|
@ -532,7 +528,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
Does not fail or warn if project wasn't paused.
|
||||
|
||||
Args:
|
||||
project_name (string): collection name
|
||||
project_name (string):
|
||||
"""
|
||||
log.info("Unpausing SyncServer for {}".format(project_name))
|
||||
try:
|
||||
|
|
@ -545,7 +541,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
Returns if 'project_name' is paused or not.
|
||||
|
||||
Args:
|
||||
project_name (string): collection name
|
||||
project_name (string):
|
||||
check_parents (bool): check if server itself
|
||||
is not paused
|
||||
Returns:
|
||||
|
|
@ -917,7 +913,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
enabled_projects = []
|
||||
|
||||
if self.enabled:
|
||||
for project in self.connection.projects(projection={"name": 1}):
|
||||
for project in get_projects(fields=["name"]):
|
||||
project_name = project["name"]
|
||||
if self.is_project_enabled(project_name):
|
||||
enabled_projects.append(project_name)
|
||||
|
|
@ -944,8 +940,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
return True
|
||||
return False
|
||||
|
||||
def handle_alternate_site(self, collection, representation, processed_site,
|
||||
file_id, synced_file_id):
|
||||
def handle_alternate_site(self, project_name, representation,
|
||||
processed_site, file_id, synced_file_id):
|
||||
"""
|
||||
For special use cases where one site vendors another.
|
||||
|
||||
|
|
@ -958,7 +954,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
same location >> file is accesible on 'sftp' site right away.
|
||||
|
||||
Args:
|
||||
collection (str): name of project
|
||||
project_name (str): name of project
|
||||
representation (dict)
|
||||
processed_site (str): real site_name of published/uploaded file
|
||||
file_id (ObjectId): DB id of file handled
|
||||
|
|
@ -982,26 +978,112 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
alternate_sites = set(alternate_sites)
|
||||
|
||||
for alt_site in alternate_sites:
|
||||
query = {
|
||||
"_id": representation["_id"]
|
||||
}
|
||||
elem = {"name": alt_site,
|
||||
"created_dt": datetime.now(),
|
||||
"id": synced_file_id}
|
||||
|
||||
self.log.debug("Adding alternate {} to {}".format(
|
||||
alt_site, representation["_id"]))
|
||||
self._add_site(collection, query,
|
||||
self._add_site(project_name,
|
||||
representation, elem,
|
||||
alt_site, file_id=file_id, force=True)
|
||||
|
||||
def get_repre_info_for_versions(self, project_name, version_ids,
|
||||
active_site, remote_site):
|
||||
"""Returns representation documents for versions and sites combi
|
||||
|
||||
Args:
|
||||
project_name (str)
|
||||
version_ids (list): of version[_id]
|
||||
active_site (string): 'local', 'studio' etc
|
||||
remote_site (string): dtto
|
||||
Returns:
|
||||
|
||||
"""
|
||||
self.connection.Session["AVALON_PROJECT"] = project_name
|
||||
query = [
|
||||
{"$match": {"parent": {"$in": version_ids},
|
||||
"type": "representation",
|
||||
"files.sites.name": {"$exists": 1}}},
|
||||
{"$unwind": "$files"},
|
||||
{'$addFields': {
|
||||
'order_local': {
|
||||
'$filter': {
|
||||
'input': '$files.sites', 'as': 'p',
|
||||
'cond': {'$eq': ['$$p.name', active_site]}
|
||||
}
|
||||
}
|
||||
}},
|
||||
{'$addFields': {
|
||||
'order_remote': {
|
||||
'$filter': {
|
||||
'input': '$files.sites', 'as': 'p',
|
||||
'cond': {'$eq': ['$$p.name', remote_site]}
|
||||
}
|
||||
}
|
||||
}},
|
||||
{'$addFields': {
|
||||
'progress_local': {"$arrayElemAt": [{
|
||||
'$cond': [
|
||||
{'$size': "$order_local.progress"},
|
||||
"$order_local.progress",
|
||||
# if exists created_dt count is as available
|
||||
{'$cond': [
|
||||
{'$size': "$order_local.created_dt"},
|
||||
[1],
|
||||
[0]
|
||||
]}
|
||||
]},
|
||||
0
|
||||
]}
|
||||
}},
|
||||
{'$addFields': {
|
||||
'progress_remote': {"$arrayElemAt": [{
|
||||
'$cond': [
|
||||
{'$size': "$order_remote.progress"},
|
||||
"$order_remote.progress",
|
||||
# if exists created_dt count is as available
|
||||
{'$cond': [
|
||||
{'$size': "$order_remote.created_dt"},
|
||||
[1],
|
||||
[0]
|
||||
]}
|
||||
]},
|
||||
0
|
||||
]}
|
||||
}},
|
||||
{'$group': { # first group by repre
|
||||
'_id': '$_id',
|
||||
'parent': {'$first': '$parent'},
|
||||
'avail_ratio_local': {
|
||||
'$first': {
|
||||
'$divide': [{'$sum': "$progress_local"}, {'$sum': 1}]
|
||||
}
|
||||
},
|
||||
'avail_ratio_remote': {
|
||||
'$first': {
|
||||
'$divide': [{'$sum': "$progress_remote"}, {'$sum': 1}]
|
||||
}
|
||||
}
|
||||
}},
|
||||
{'$group': { # second group by parent, eg version_id
|
||||
'_id': '$parent',
|
||||
'repre_count': {'$sum': 1}, # total representations
|
||||
# fully available representation for site
|
||||
'avail_repre_local': {'$sum': "$avail_ratio_local"},
|
||||
'avail_repre_remote': {'$sum': "$avail_ratio_remote"},
|
||||
}},
|
||||
]
|
||||
# docs = list(self.connection.aggregate(query))
|
||||
return self.connection.aggregate(query)
|
||||
|
||||
""" End of Public API """
|
||||
|
||||
def get_local_file_path(self, collection, site_name, file_path):
|
||||
def get_local_file_path(self, project_name, site_name, file_path):
|
||||
"""
|
||||
Externalized for app
|
||||
"""
|
||||
handler = LocalDriveHandler(collection, site_name)
|
||||
handler = LocalDriveHandler(project_name, site_name)
|
||||
local_file_path = handler.resolve_path(file_path)
|
||||
|
||||
return local_file_path
|
||||
|
|
@ -1160,10 +1242,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
def _prepare_sync_project_settings(self, exclude_locals):
|
||||
sync_project_settings = {}
|
||||
system_sites = self.get_all_site_configs()
|
||||
project_docs = self.connection.projects(
|
||||
projection={"name": 1},
|
||||
only_active=True
|
||||
)
|
||||
project_docs = get_projects(fields=["name"])
|
||||
for project_doc in project_docs:
|
||||
project_name = project_doc["name"]
|
||||
sites = copy.deepcopy(system_sites) # get all configured sites
|
||||
|
|
@ -1288,7 +1367,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
return sites.get(site, 'N/A')
|
||||
|
||||
@time_function
|
||||
def get_sync_representations(self, collection, active_site, remote_site):
|
||||
def get_sync_representations(self, project_name, active_site, remote_site):
|
||||
"""
|
||||
Get representations that should be synced, these could be
|
||||
recognised by presence of document in 'files.sites', where key is
|
||||
|
|
@ -1299,8 +1378,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
better performance. Goal is to get as few representations as
|
||||
possible.
|
||||
Args:
|
||||
collection (string): name of collection (in most cases matches
|
||||
project name
|
||||
project_name (string):
|
||||
active_site (string): identifier of current active site (could be
|
||||
'local_0' when working from home, 'studio' when working in the
|
||||
studio (default)
|
||||
|
|
@ -1309,10 +1387,10 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
Returns:
|
||||
(list) of dictionaries
|
||||
"""
|
||||
log.debug("Check representations for : {}".format(collection))
|
||||
self.connection.Session["AVALON_PROJECT"] = collection
|
||||
log.debug("Check representations for : {}".format(project_name))
|
||||
self.connection.Session["AVALON_PROJECT"] = project_name
|
||||
# retry_cnt - number of attempts to sync specific file before giving up
|
||||
retries_arr = self._get_retries_arr(collection)
|
||||
retries_arr = self._get_retries_arr(project_name)
|
||||
match = {
|
||||
"type": "representation",
|
||||
"$or": [
|
||||
|
|
@ -1449,14 +1527,14 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
return SyncStatus.DO_NOTHING
|
||||
|
||||
def update_db(self, collection, new_file_id, file, representation,
|
||||
def update_db(self, project_name, new_file_id, file, representation,
|
||||
site, error=None, progress=None, priority=None):
|
||||
"""
|
||||
Update 'provider' portion of records in DB with success (file_id)
|
||||
or error (exception)
|
||||
|
||||
Args:
|
||||
collection (string): name of project - force to db connection as
|
||||
project_name (string): name of project - force to db connection as
|
||||
each file might come from different collection
|
||||
new_file_id (string):
|
||||
file (dictionary): info about processed file (pulled from DB)
|
||||
|
|
@ -1499,7 +1577,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
if file_id:
|
||||
arr_filter.append({'f._id': ObjectId(file_id)})
|
||||
|
||||
self.connection.database[collection].update_one(
|
||||
self.connection.database[project_name].update_one(
|
||||
query,
|
||||
update,
|
||||
upsert=True,
|
||||
|
|
@ -1562,7 +1640,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
return -1, None
|
||||
|
||||
def reset_site_on_representation(self, collection, representation_id,
|
||||
def reset_site_on_representation(self, project_name, representation_id,
|
||||
side=None, file_id=None, site_name=None,
|
||||
remove=False, pause=None, force=False):
|
||||
"""
|
||||
|
|
@ -1579,7 +1657,7 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
Should be used when repre should be synced to new site.
|
||||
|
||||
Args:
|
||||
collection (string): name of project (eg. collection) in DB
|
||||
project_name (string): name of project (eg. collection) in DB
|
||||
representation_id(string): _id of representation
|
||||
file_id (string): file _id in representation
|
||||
side (string): local or remote side
|
||||
|
|
@ -1593,20 +1671,18 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
not 'force'
|
||||
ValueError - other errors (repre not found, misconfiguration)
|
||||
"""
|
||||
query = {
|
||||
"_id": ObjectId(representation_id)
|
||||
}
|
||||
|
||||
representation = self.connection.database[collection].find_one(query)
|
||||
representation = get_representation_by_id(project_name,
|
||||
representation_id)
|
||||
if not representation:
|
||||
raise ValueError("Representation {} not found in {}".
|
||||
format(representation_id, collection))
|
||||
format(representation_id, project_name))
|
||||
|
||||
if side and site_name:
|
||||
raise ValueError("Misconfiguration, only one of side and " +
|
||||
"site_name arguments should be passed.")
|
||||
|
||||
local_site = self.get_active_site(collection)
|
||||
remote_site = self.get_remote_site(collection)
|
||||
local_site = self.get_active_site(project_name)
|
||||
remote_site = self.get_remote_site(project_name)
|
||||
|
||||
if side:
|
||||
if side == 'local':
|
||||
|
|
@ -1617,37 +1693,43 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
elem = {"name": site_name}
|
||||
|
||||
if file_id: # reset site for particular file
|
||||
self._reset_site_for_file(collection, query,
|
||||
self._reset_site_for_file(project_name, representation_id,
|
||||
elem, file_id, site_name)
|
||||
elif side: # reset site for whole representation
|
||||
self._reset_site(collection, query, elem, site_name)
|
||||
self._reset_site(project_name, representation_id, elem, site_name)
|
||||
elif remove: # remove site for whole representation
|
||||
self._remove_site(collection, query, representation, site_name)
|
||||
self._remove_site(project_name,
|
||||
representation, site_name)
|
||||
elif pause is not None:
|
||||
self._pause_unpause_site(collection, query,
|
||||
self._pause_unpause_site(project_name,
|
||||
representation, site_name, pause)
|
||||
else: # add new site to all files for representation
|
||||
self._add_site(collection, query, representation, elem, site_name,
|
||||
self._add_site(project_name, representation, elem, site_name,
|
||||
force=force)
|
||||
|
||||
def _update_site(self, collection, query, update, arr_filter):
|
||||
def _update_site(self, project_name, representation_id,
|
||||
update, arr_filter):
|
||||
"""
|
||||
Auxiliary method to call update_one function on DB
|
||||
|
||||
Used for refactoring ugly reset_provider_for_file
|
||||
"""
|
||||
self.connection.database[collection].update_one(
|
||||
query = {
|
||||
"_id": ObjectId(representation_id)
|
||||
}
|
||||
|
||||
self.connection.database[project_name].update_one(
|
||||
query,
|
||||
update,
|
||||
upsert=True,
|
||||
array_filters=arr_filter
|
||||
)
|
||||
|
||||
def _reset_site_for_file(self, collection, query,
|
||||
def _reset_site_for_file(self, project_name, representation_id,
|
||||
elem, file_id, site_name):
|
||||
"""
|
||||
Resets 'site_name' for 'file_id' on representation in 'query' on
|
||||
'collection'
|
||||
'project_name'
|
||||
"""
|
||||
update = {
|
||||
"$set": {"files.$[f].sites.$[s]": elem}
|
||||
|
|
@ -1660,9 +1742,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
{'f._id': file_id}
|
||||
]
|
||||
|
||||
self._update_site(collection, query, update, arr_filter)
|
||||
self._update_site(project_name, representation_id, update, arr_filter)
|
||||
|
||||
def _reset_site(self, collection, query, elem, site_name):
|
||||
def _reset_site(self, project_name, representation_id, elem, site_name):
|
||||
"""
|
||||
Resets 'site_name' for all files of representation in 'query'
|
||||
"""
|
||||
|
|
@ -1674,9 +1756,9 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
{'s.name': site_name}
|
||||
]
|
||||
|
||||
self._update_site(collection, query, update, arr_filter)
|
||||
self._update_site(project_name, representation_id, update, arr_filter)
|
||||
|
||||
def _remove_site(self, collection, query, representation, site_name):
|
||||
def _remove_site(self, project_name, representation, site_name):
|
||||
"""
|
||||
Removes 'site_name' for 'representation' in 'query'
|
||||
|
||||
|
|
@ -1698,10 +1780,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
}
|
||||
arr_filter = []
|
||||
|
||||
self._update_site(collection, query, update, arr_filter)
|
||||
self._update_site(project_name, representation["_id"],
|
||||
update, arr_filter)
|
||||
|
||||
def _pause_unpause_site(self, collection, query,
|
||||
representation, site_name, pause):
|
||||
def _pause_unpause_site(self, project_name, representation,
|
||||
site_name, pause):
|
||||
"""
|
||||
Pauses/unpauses all files for 'representation' based on 'pause'
|
||||
|
||||
|
|
@ -1733,12 +1816,13 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
{'s.name': site_name}
|
||||
]
|
||||
|
||||
self._update_site(collection, query, update, arr_filter)
|
||||
self._update_site(project_name, representation["_id"],
|
||||
update, arr_filter)
|
||||
|
||||
def _add_site(self, collection, query, representation, elem, site_name,
|
||||
def _add_site(self, project_name, representation, elem, site_name,
|
||||
force=False, file_id=None):
|
||||
"""
|
||||
Adds 'site_name' to 'representation' on 'collection'
|
||||
Adds 'site_name' to 'representation' on 'project_name'
|
||||
|
||||
Args:
|
||||
representation (dict)
|
||||
|
|
@ -1746,10 +1830,11 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
|
||||
Use 'force' to remove existing or raises ValueError
|
||||
"""
|
||||
representation_id = representation["_id"]
|
||||
reset_existing = False
|
||||
files = representation.get("files", [])
|
||||
if not files:
|
||||
log.debug("No files for {}".format(representation["_id"]))
|
||||
log.debug("No files for {}".format(representation_id))
|
||||
return
|
||||
|
||||
for repre_file in files:
|
||||
|
|
@ -1759,7 +1844,8 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
for site in repre_file.get("sites"):
|
||||
if site["name"] == site_name:
|
||||
if force or site.get("error"):
|
||||
self._reset_site_for_file(collection, query,
|
||||
self._reset_site_for_file(project_name,
|
||||
representation_id,
|
||||
elem, repre_file["_id"],
|
||||
site_name)
|
||||
reset_existing = True
|
||||
|
|
@ -1785,14 +1871,15 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
{'f._id': file_id}
|
||||
]
|
||||
|
||||
self._update_site(collection, query, update, arr_filter)
|
||||
self._update_site(project_name, representation_id,
|
||||
update, arr_filter)
|
||||
|
||||
def _remove_local_file(self, collection, representation_id, site_name):
|
||||
def _remove_local_file(self, project_name, representation_id, site_name):
|
||||
"""
|
||||
Removes all local files for 'site_name' of 'representation_id'
|
||||
|
||||
Args:
|
||||
collection (string): project name (must match DB)
|
||||
project_name (string): project name (must match DB)
|
||||
representation_id (string): MongoDB _id value
|
||||
site_name (string): name of configured and active site
|
||||
|
||||
|
|
@ -1808,21 +1895,17 @@ class SyncServerModule(OpenPypeModule, ITrayModule):
|
|||
provider_name = self.get_provider_for_site(site=site_name)
|
||||
|
||||
if provider_name == 'local_drive':
|
||||
query = {
|
||||
"_id": ObjectId(representation_id)
|
||||
}
|
||||
|
||||
representation = list(
|
||||
self.connection.database[collection].find(query))
|
||||
representation = get_representation_by_id(project_name,
|
||||
representation_id,
|
||||
fields=["files"])
|
||||
if not representation:
|
||||
self.log.debug("No repre {} found".format(
|
||||
representation_id))
|
||||
return
|
||||
|
||||
representation = representation.pop()
|
||||
local_file_path = ''
|
||||
for file in representation.get("files"):
|
||||
local_file_path = self.get_local_file_path(collection,
|
||||
local_file_path = self.get_local_file_path(project_name,
|
||||
site_name,
|
||||
file.get("path", "")
|
||||
)
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from openpype.tools.utils.delegates import pretty_timestamp
|
|||
|
||||
from openpype.lib import PypeLogger
|
||||
from openpype.api import get_local_site_id
|
||||
from openpype.client import get_representation_by_id
|
||||
|
||||
from . import lib
|
||||
|
||||
|
|
@ -440,7 +441,7 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel):
|
|||
full text filtering.
|
||||
|
||||
Allows pagination, most of heavy lifting is being done on DB side.
|
||||
Single model matches to single collection. When project is changed,
|
||||
Single model matches to single project. When project is changed,
|
||||
model is reset and refreshed.
|
||||
|
||||
Args:
|
||||
|
|
@ -919,11 +920,10 @@ class SyncRepresentationSummaryModel(_SyncRepresentationModel):
|
|||
|
||||
repre_id = self.data(index, Qt.UserRole)
|
||||
|
||||
representation = list(self.dbcon.find({"type": "representation",
|
||||
"_id": repre_id}))
|
||||
representation = get_representation_by_id(self.project, repre_id)
|
||||
if representation:
|
||||
self.sync_server.update_db(self.project, None, None,
|
||||
representation.pop(),
|
||||
representation,
|
||||
get_local_site_id(),
|
||||
priority=value)
|
||||
self.is_editing = False
|
||||
|
|
@ -1357,11 +1357,10 @@ class SyncRepresentationDetailModel(_SyncRepresentationModel):
|
|||
file_id = self.data(index, Qt.UserRole)
|
||||
|
||||
updated_file = None
|
||||
# conversion from cursor to list
|
||||
representations = list(self.dbcon.find({"type": "representation",
|
||||
"_id": self._id}))
|
||||
representation = get_representation_by_id(self.project, self._id)
|
||||
if not representation:
|
||||
return
|
||||
|
||||
representation = representations.pop()
|
||||
for repre_file in representation["files"]:
|
||||
if repre_file["_id"] == file_id:
|
||||
updated_file = repre_file
|
||||
|
|
|
|||
|
|
@ -263,16 +263,17 @@ def get_media_range_with_retimes(otio_clip, handle_start, handle_end):
|
|||
"retime": True,
|
||||
"speed": time_scalar,
|
||||
"timewarps": time_warp_nodes,
|
||||
"handleStart": round(handle_start),
|
||||
"handleEnd": round(handle_end)
|
||||
"handleStart": int(round(handle_start)),
|
||||
"handleEnd": int(round(handle_end))
|
||||
}
|
||||
}
|
||||
|
||||
returning_dict = {
|
||||
"mediaIn": media_in_trimmed,
|
||||
"mediaOut": media_out_trimmed,
|
||||
"handleStart": round(handle_start),
|
||||
"handleEnd": round(handle_end)
|
||||
"handleStart": int(round(handle_start)),
|
||||
"handleEnd": int(round(handle_end)),
|
||||
"speed": time_scalar
|
||||
}
|
||||
|
||||
# add version data only if retime
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ from .utils import (
|
|||
switch_container,
|
||||
|
||||
get_loader_identifier,
|
||||
get_loaders_by_name,
|
||||
|
||||
get_representation_path_from_context,
|
||||
get_representation_path,
|
||||
|
|
@ -61,6 +62,7 @@ __all__ = (
|
|||
"switch_container",
|
||||
|
||||
"get_loader_identifier",
|
||||
"get_loaders_by_name",
|
||||
|
||||
"get_representation_path_from_context",
|
||||
"get_representation_path",
|
||||
|
|
|
|||
|
|
@ -222,13 +222,20 @@ def get_representation_context(representation):
|
|||
project_name, representation
|
||||
)
|
||||
|
||||
if not representation:
|
||||
raise AssertionError("Representation was not found in database")
|
||||
|
||||
version, subset, asset, project = get_representation_parents(
|
||||
project_name, representation
|
||||
)
|
||||
|
||||
assert all([representation, version, subset, asset, project]), (
|
||||
"This is a bug"
|
||||
)
|
||||
if not version:
|
||||
raise AssertionError("Version was not found in database")
|
||||
if not subset:
|
||||
raise AssertionError("Subset was not found in database")
|
||||
if not asset:
|
||||
raise AssertionError("Asset was not found in database")
|
||||
if not project:
|
||||
raise AssertionError("Project was not found in database")
|
||||
|
||||
context = {
|
||||
"project": {
|
||||
|
|
@ -369,6 +376,20 @@ def get_loader_identifier(loader):
|
|||
return loader.__name__
|
||||
|
||||
|
||||
def get_loaders_by_name():
|
||||
from .plugins import discover_loader_plugins
|
||||
|
||||
loaders_by_name = {}
|
||||
for loader in discover_loader_plugins():
|
||||
loader_name = loader.__name__
|
||||
if loader_name in loaders_by_name:
|
||||
raise KeyError(
|
||||
"Duplicated loader name {} !".format(loader_name)
|
||||
)
|
||||
loaders_by_name[loader_name] = loader
|
||||
return loaders_by_name
|
||||
|
||||
|
||||
def _get_container_loader(container):
|
||||
"""Return the Loader corresponding to the container"""
|
||||
from .plugins import discover_loader_plugins
|
||||
|
|
|
|||
526
openpype/pipeline/workfile/abstract_template_loader.py
Normal file
526
openpype/pipeline/workfile/abstract_template_loader.py
Normal file
|
|
@ -0,0 +1,526 @@
|
|||
import os
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
import six
|
||||
import logging
|
||||
from functools import reduce
|
||||
|
||||
from openpype.client import get_asset_by_name
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.lib import (
|
||||
StringTemplate,
|
||||
Logger,
|
||||
filter_profiles,
|
||||
get_linked_assets,
|
||||
)
|
||||
from openpype.pipeline import legacy_io, Anatomy
|
||||
from openpype.pipeline.load import (
|
||||
get_loaders_by_name,
|
||||
get_representation_context,
|
||||
load_with_repre_context,
|
||||
)
|
||||
|
||||
from .build_template_exceptions import (
|
||||
TemplateAlreadyImported,
|
||||
TemplateLoadingFailed,
|
||||
TemplateProfileNotFound,
|
||||
TemplateNotFound
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def update_representations(entities, entity):
|
||||
if entity['context']['subset'] not in entities:
|
||||
entities[entity['context']['subset']] = entity
|
||||
else:
|
||||
current = entities[entity['context']['subset']]
|
||||
incomming = entity
|
||||
entities[entity['context']['subset']] = max(
|
||||
current, incomming,
|
||||
key=lambda entity: entity["context"].get("version", -1))
|
||||
|
||||
return entities
|
||||
|
||||
|
||||
def parse_loader_args(loader_args):
|
||||
if not loader_args:
|
||||
return dict()
|
||||
try:
|
||||
parsed_args = eval(loader_args)
|
||||
if not isinstance(parsed_args, dict):
|
||||
return dict()
|
||||
else:
|
||||
return parsed_args
|
||||
except Exception as err:
|
||||
print(
|
||||
"Error while parsing loader arguments '{}'.\n{}: {}\n\n"
|
||||
"Continuing with default arguments. . .".format(
|
||||
loader_args,
|
||||
err.__class__.__name__,
|
||||
err))
|
||||
return dict()
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class AbstractTemplateLoader:
|
||||
"""
|
||||
Abstraction of Template Loader.
|
||||
Properties:
|
||||
template_path : property to get current template path
|
||||
Methods:
|
||||
import_template : Abstract Method. Used to load template,
|
||||
depending on current host
|
||||
get_template_nodes : Abstract Method. Used to query nodes acting
|
||||
as placeholders. Depending on current host
|
||||
"""
|
||||
|
||||
_log = None
|
||||
|
||||
def __init__(self, placeholder_class):
|
||||
# TODO template loader should expect host as and argument
|
||||
# - host have all responsibility for most of code (also provide
|
||||
# placeholder class)
|
||||
# - also have responsibility for current context
|
||||
# - this won't work in DCCs where multiple workfiles with
|
||||
# different contexts can be opened at single time
|
||||
# - template loader should have ability to change context
|
||||
project_name = legacy_io.active_project()
|
||||
asset_name = legacy_io.Session["AVALON_ASSET"]
|
||||
|
||||
self.loaders_by_name = get_loaders_by_name()
|
||||
self.current_asset = asset_name
|
||||
self.project_name = project_name
|
||||
self.host_name = legacy_io.Session["AVALON_APP"]
|
||||
self.task_name = legacy_io.Session["AVALON_TASK"]
|
||||
self.placeholder_class = placeholder_class
|
||||
self.current_asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
self.task_type = (
|
||||
self.current_asset_doc
|
||||
.get("data", {})
|
||||
.get("tasks", {})
|
||||
.get(self.task_name, {})
|
||||
.get("type")
|
||||
)
|
||||
|
||||
self.log.info(
|
||||
"BUILDING ASSET FROM TEMPLATE :\n"
|
||||
"Starting templated build for {asset} in {project}\n\n"
|
||||
"Asset : {asset}\n"
|
||||
"Task : {task_name} ({task_type})\n"
|
||||
"Host : {host}\n"
|
||||
"Project : {project}\n".format(
|
||||
asset=self.current_asset,
|
||||
host=self.host_name,
|
||||
project=self.project_name,
|
||||
task_name=self.task_name,
|
||||
task_type=self.task_type
|
||||
))
|
||||
# Skip if there is no loader
|
||||
if not self.loaders_by_name:
|
||||
self.log.warning(
|
||||
"There is no registered loaders. No assets will be loaded")
|
||||
return
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger(self.__class__.__name__)
|
||||
return self._log
|
||||
|
||||
def template_already_imported(self, err_msg):
|
||||
"""In case template was already loaded.
|
||||
Raise the error as a default action.
|
||||
Override this method in your template loader implementation
|
||||
to manage this case."""
|
||||
self.log.error("{}: {}".format(
|
||||
err_msg.__class__.__name__,
|
||||
err_msg))
|
||||
raise TemplateAlreadyImported(err_msg)
|
||||
|
||||
def template_loading_failed(self, err_msg):
|
||||
"""In case template loading failed
|
||||
Raise the error as a default action.
|
||||
Override this method in your template loader implementation
|
||||
to manage this case.
|
||||
"""
|
||||
self.log.error("{}: {}".format(
|
||||
err_msg.__class__.__name__,
|
||||
err_msg))
|
||||
raise TemplateLoadingFailed(err_msg)
|
||||
|
||||
@property
|
||||
def template_path(self):
|
||||
"""
|
||||
Property returning template path. Avoiding setter.
|
||||
Getting template path from open pype settings based on current avalon
|
||||
session and solving the path variables if needed.
|
||||
Returns:
|
||||
str: Solved template path
|
||||
Raises:
|
||||
TemplateProfileNotFound: No profile found from settings for
|
||||
current avalon session
|
||||
KeyError: Could not solve path because a key does not exists
|
||||
in avalon context
|
||||
TemplateNotFound: Solved path does not exists on current filesystem
|
||||
"""
|
||||
project_name = self.project_name
|
||||
host_name = self.host_name
|
||||
task_name = self.task_name
|
||||
task_type = self.task_type
|
||||
|
||||
anatomy = Anatomy(project_name)
|
||||
project_settings = get_project_settings(project_name)
|
||||
|
||||
build_info = project_settings[host_name]["templated_workfile_build"]
|
||||
profile = filter_profiles(
|
||||
build_info["profiles"],
|
||||
{
|
||||
"task_types": task_type,
|
||||
"tasks": task_name
|
||||
}
|
||||
)
|
||||
|
||||
if not profile:
|
||||
raise TemplateProfileNotFound(
|
||||
"No matching profile found for task '{}' of type '{}' "
|
||||
"with host '{}'".format(task_name, task_type, host_name)
|
||||
)
|
||||
|
||||
path = profile["path"]
|
||||
if not path:
|
||||
raise TemplateLoadingFailed(
|
||||
"Template path is not set.\n"
|
||||
"Path need to be set in {}\\Template Workfile Build "
|
||||
"Settings\\Profiles".format(host_name.title()))
|
||||
|
||||
# Try fill path with environments and anatomy roots
|
||||
fill_data = {
|
||||
key: value
|
||||
for key, value in os.environ.items()
|
||||
}
|
||||
fill_data["root"] = anatomy.roots
|
||||
result = StringTemplate.format_template(path, fill_data)
|
||||
if result.solved:
|
||||
path = result.normalized()
|
||||
|
||||
if path and os.path.exists(path):
|
||||
self.log.info("Found template at: '{}'".format(path))
|
||||
return path
|
||||
|
||||
solved_path = None
|
||||
while True:
|
||||
try:
|
||||
solved_path = anatomy.path_remapper(path)
|
||||
except KeyError as missing_key:
|
||||
raise KeyError(
|
||||
"Could not solve key '{}' in template path '{}'".format(
|
||||
missing_key, path))
|
||||
|
||||
if solved_path is None:
|
||||
solved_path = path
|
||||
if solved_path == path:
|
||||
break
|
||||
path = solved_path
|
||||
|
||||
solved_path = os.path.normpath(solved_path)
|
||||
if not os.path.exists(solved_path):
|
||||
raise TemplateNotFound(
|
||||
"Template found in openPype settings for task '{}' with host "
|
||||
"'{}' does not exists. (Not found : {})".format(
|
||||
task_name, host_name, solved_path))
|
||||
|
||||
self.log.info("Found template at: '{}'".format(solved_path))
|
||||
|
||||
return solved_path
|
||||
|
||||
def populate_template(self, ignored_ids=None):
|
||||
"""
|
||||
Use template placeholders to load assets and parent them in hierarchy
|
||||
Arguments :
|
||||
ignored_ids :
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
|
||||
loaders_by_name = self.loaders_by_name
|
||||
current_asset_doc = self.current_asset_doc
|
||||
linked_assets = get_linked_assets(current_asset_doc)
|
||||
|
||||
ignored_ids = ignored_ids or []
|
||||
placeholders = self.get_placeholders()
|
||||
self.log.debug("Placeholders found in template: {}".format(
|
||||
[placeholder.name for placeholder in placeholders]
|
||||
))
|
||||
for placeholder in placeholders:
|
||||
self.log.debug("Start to processing placeholder {}".format(
|
||||
placeholder.name
|
||||
))
|
||||
placeholder_representations = self.get_placeholder_representations(
|
||||
placeholder,
|
||||
current_asset_doc,
|
||||
linked_assets
|
||||
)
|
||||
|
||||
if not placeholder_representations:
|
||||
self.log.info(
|
||||
"There's no representation for this placeholder: "
|
||||
"{}".format(placeholder.name)
|
||||
)
|
||||
continue
|
||||
|
||||
for representation in placeholder_representations:
|
||||
self.preload(placeholder, loaders_by_name, representation)
|
||||
|
||||
if self.load_data_is_incorrect(
|
||||
placeholder,
|
||||
representation,
|
||||
ignored_ids):
|
||||
continue
|
||||
|
||||
self.log.info(
|
||||
"Loading {}_{} with loader {}\n"
|
||||
"Loader arguments used : {}".format(
|
||||
representation['context']['asset'],
|
||||
representation['context']['subset'],
|
||||
placeholder.loader_name,
|
||||
placeholder.loader_args))
|
||||
|
||||
try:
|
||||
container = self.load(
|
||||
placeholder, loaders_by_name, representation)
|
||||
except Exception:
|
||||
self.load_failed(placeholder, representation)
|
||||
else:
|
||||
self.load_succeed(placeholder, container)
|
||||
finally:
|
||||
self.postload(placeholder)
|
||||
|
||||
def get_placeholder_representations(
|
||||
self, placeholder, current_asset_doc, linked_asset_docs
|
||||
):
|
||||
placeholder_representations = placeholder.get_representations(
|
||||
current_asset_doc,
|
||||
linked_asset_docs
|
||||
)
|
||||
for repre_doc in reduce(
|
||||
update_representations,
|
||||
placeholder_representations,
|
||||
dict()
|
||||
).values():
|
||||
yield repre_doc
|
||||
|
||||
def load_data_is_incorrect(
|
||||
self, placeholder, last_representation, ignored_ids):
|
||||
if not last_representation:
|
||||
self.log.warning(placeholder.err_message())
|
||||
return True
|
||||
if (str(last_representation['_id']) in ignored_ids):
|
||||
print("Ignoring : ", last_representation['_id'])
|
||||
return True
|
||||
return False
|
||||
|
||||
def preload(self, placeholder, loaders_by_name, last_representation):
|
||||
pass
|
||||
|
||||
def load(self, placeholder, loaders_by_name, last_representation):
|
||||
repre = get_representation_context(last_representation)
|
||||
return load_with_repre_context(
|
||||
loaders_by_name[placeholder.loader_name],
|
||||
repre,
|
||||
options=parse_loader_args(placeholder.loader_args))
|
||||
|
||||
def load_succeed(self, placeholder, container):
|
||||
placeholder.parent_in_hierarchy(container)
|
||||
|
||||
def load_failed(self, placeholder, last_representation):
|
||||
self.log.warning(
|
||||
"Got error trying to load {}:{} with {}".format(
|
||||
last_representation['context']['asset'],
|
||||
last_representation['context']['subset'],
|
||||
placeholder.loader_name
|
||||
),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
def postload(self, placeholder):
|
||||
placeholder.clean()
|
||||
|
||||
def update_missing_containers(self):
|
||||
loaded_containers_ids = self.get_loaded_containers_by_id()
|
||||
self.populate_template(ignored_ids=loaded_containers_ids)
|
||||
|
||||
def get_placeholders(self):
|
||||
placeholders = map(self.placeholder_class, self.get_template_nodes())
|
||||
valid_placeholders = filter(
|
||||
lambda i: i.is_valid,
|
||||
placeholders
|
||||
)
|
||||
sorted_placeholders = list(sorted(
|
||||
valid_placeholders,
|
||||
key=lambda i: i.order
|
||||
))
|
||||
return sorted_placeholders
|
||||
|
||||
@abstractmethod
|
||||
def get_loaded_containers_by_id(self):
|
||||
"""
|
||||
Collect already loaded containers for updating scene
|
||||
Return:
|
||||
dict (string, node): A dictionnary id as key
|
||||
and containers as value
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def import_template(self, template_path):
|
||||
"""
|
||||
Import template in current host
|
||||
Args:
|
||||
template_path (str): fullpath to current task and
|
||||
host's template file
|
||||
Return:
|
||||
None
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_template_nodes(self):
|
||||
"""
|
||||
Returning a list of nodes acting as host placeholders for
|
||||
templating. The data representation is by user.
|
||||
AbstractLoadTemplate (and LoadTemplate) won't directly manipulate nodes
|
||||
Args :
|
||||
None
|
||||
Returns:
|
||||
list(AnyNode): Solved template path
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class AbstractPlaceholder:
|
||||
"""Abstraction of placeholders logic.
|
||||
|
||||
Properties:
|
||||
required_keys: A list of mandatory keys to decribe placeholder
|
||||
and assets to load.
|
||||
optional_keys: A list of optional keys to decribe
|
||||
placeholder and assets to load
|
||||
loader_name: Name of linked loader to use while loading assets
|
||||
|
||||
Args:
|
||||
identifier (str): Placeholder identifier. Should be possible to be
|
||||
used as identifier in "a scene" (e.g. unique node name).
|
||||
"""
|
||||
|
||||
required_keys = {
|
||||
"builder_type",
|
||||
"family",
|
||||
"representation",
|
||||
"order",
|
||||
"loader",
|
||||
"loader_args"
|
||||
}
|
||||
optional_keys = {}
|
||||
|
||||
def __init__(self, identifier):
|
||||
self._log = None
|
||||
self._name = identifier
|
||||
self.get_data(identifier)
|
||||
|
||||
@property
|
||||
def log(self):
|
||||
if self._log is None:
|
||||
self._log = Logger.get_logger(repr(self))
|
||||
return self._log
|
||||
|
||||
def __repr__(self):
|
||||
return "< {} {} >".format(self.__class__.__name__, self.name)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def loader_args(self):
|
||||
return self.data["loader_args"]
|
||||
|
||||
@property
|
||||
def builder_type(self):
|
||||
return self.data["builder_type"]
|
||||
|
||||
@property
|
||||
def order(self):
|
||||
return self.data["order"]
|
||||
|
||||
@property
|
||||
def loader_name(self):
|
||||
"""Return placeholder loader name.
|
||||
|
||||
Returns:
|
||||
str: Loader name that will be used to load placeholder
|
||||
representations.
|
||||
"""
|
||||
|
||||
return self.data["loader"]
|
||||
|
||||
@property
|
||||
def is_valid(self):
|
||||
"""Test validity of placeholder.
|
||||
|
||||
i.e.: every required key exists in placeholder data
|
||||
|
||||
Returns:
|
||||
bool: True if every key is in data
|
||||
"""
|
||||
|
||||
if set(self.required_keys).issubset(self.data.keys()):
|
||||
self.log.debug("Valid placeholder : {}".format(self.name))
|
||||
return True
|
||||
self.log.info("Placeholder is not valid : {}".format(self.name))
|
||||
return False
|
||||
|
||||
@abstractmethod
|
||||
def parent_in_hierarchy(self, container):
|
||||
"""Place loaded container in correct hierarchy given by placeholder
|
||||
|
||||
Args:
|
||||
container (Dict[str, Any]): Loaded container created by loader.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def clean(self):
|
||||
"""Clean placeholder from hierarchy after loading assets."""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_representations(self, current_asset_doc, linked_asset_docs):
|
||||
"""Query representations based on placeholder data.
|
||||
|
||||
Args:
|
||||
current_asset_doc (Dict[str, Any]): Document of current
|
||||
context asset.
|
||||
linked_asset_docs (List[Dict[str, Any]]): Documents of assets
|
||||
linked to current context asset.
|
||||
|
||||
Returns:
|
||||
Iterable[Dict[str, Any]]: Representations that are matching
|
||||
placeholder filters.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_data(self, identifier):
|
||||
"""Collect information about placeholder by identifier.
|
||||
|
||||
Args:
|
||||
identifier (str): A unique placeholder identifier defined by
|
||||
implementation.
|
||||
"""
|
||||
|
||||
pass
|
||||
68
openpype/pipeline/workfile/build_template.py
Normal file
68
openpype/pipeline/workfile/build_template.py
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
from importlib import import_module
|
||||
from openpype.lib import classes_from_module
|
||||
from openpype.host import HostBase
|
||||
from openpype.pipeline import registered_host
|
||||
|
||||
from .abstract_template_loader import (
|
||||
AbstractPlaceholder,
|
||||
AbstractTemplateLoader)
|
||||
|
||||
from .build_template_exceptions import (
|
||||
TemplateLoadingFailed,
|
||||
TemplateAlreadyImported,
|
||||
MissingHostTemplateModule,
|
||||
MissingTemplatePlaceholderClass,
|
||||
MissingTemplateLoaderClass
|
||||
)
|
||||
|
||||
_module_path_format = 'openpype.hosts.{host}.api.template_loader'
|
||||
|
||||
|
||||
def build_workfile_template(*args):
|
||||
template_loader = build_template_loader()
|
||||
try:
|
||||
template_loader.import_template(template_loader.template_path)
|
||||
except TemplateAlreadyImported as err:
|
||||
template_loader.template_already_imported(err)
|
||||
except TemplateLoadingFailed as err:
|
||||
template_loader.template_loading_failed(err)
|
||||
else:
|
||||
template_loader.populate_template()
|
||||
|
||||
|
||||
def update_workfile_template(args):
|
||||
template_loader = build_template_loader()
|
||||
template_loader.update_missing_containers()
|
||||
|
||||
|
||||
def build_template_loader():
|
||||
# TODO refactor to use advantage of 'HostBase' and don't import dynamically
|
||||
# - hosts should have methods that gives option to return builders
|
||||
host = registered_host()
|
||||
if isinstance(host, HostBase):
|
||||
host_name = host.name
|
||||
else:
|
||||
host_name = host.__name__.partition('.')[2]
|
||||
module_path = _module_path_format.format(host=host_name)
|
||||
module = import_module(module_path)
|
||||
if not module:
|
||||
raise MissingHostTemplateModule(
|
||||
"No template loader found for host {}".format(host_name))
|
||||
|
||||
template_loader_class = classes_from_module(
|
||||
AbstractTemplateLoader,
|
||||
module
|
||||
)
|
||||
template_placeholder_class = classes_from_module(
|
||||
AbstractPlaceholder,
|
||||
module
|
||||
)
|
||||
|
||||
if not template_loader_class:
|
||||
raise MissingTemplateLoaderClass()
|
||||
template_loader_class = template_loader_class[0]
|
||||
|
||||
if not template_placeholder_class:
|
||||
raise MissingTemplatePlaceholderClass()
|
||||
template_placeholder_class = template_placeholder_class[0]
|
||||
return template_loader_class(template_placeholder_class)
|
||||
35
openpype/pipeline/workfile/build_template_exceptions.py
Normal file
35
openpype/pipeline/workfile/build_template_exceptions.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
class MissingHostTemplateModule(Exception):
|
||||
"""Error raised when expected module does not exists"""
|
||||
pass
|
||||
|
||||
|
||||
class MissingTemplatePlaceholderClass(Exception):
|
||||
"""Error raised when module doesn't implement a placeholder class"""
|
||||
pass
|
||||
|
||||
|
||||
class MissingTemplateLoaderClass(Exception):
|
||||
"""Error raised when module doesn't implement a template loader class"""
|
||||
pass
|
||||
|
||||
|
||||
class TemplateNotFound(Exception):
|
||||
"""Exception raised when template does not exist."""
|
||||
pass
|
||||
|
||||
|
||||
class TemplateProfileNotFound(Exception):
|
||||
"""Exception raised when current profile
|
||||
doesn't match any template profile"""
|
||||
pass
|
||||
|
||||
|
||||
class TemplateAlreadyImported(Exception):
|
||||
"""Error raised when Template was already imported by host for
|
||||
this session"""
|
||||
pass
|
||||
|
||||
|
||||
class TemplateLoadingFailed(Exception):
|
||||
"""Error raised whend Template loader was unable to load the template"""
|
||||
pass
|
||||
|
|
@ -121,10 +121,8 @@ class CollectOtioSubsetResources(pyblish.api.InstancePlugin):
|
|||
otio.schema.ImageSequenceReference
|
||||
):
|
||||
is_sequence = True
|
||||
else:
|
||||
# for OpenTimelineIO 0.12 and older
|
||||
if metadata.get("padding"):
|
||||
is_sequence = True
|
||||
elif metadata.get("padding"):
|
||||
is_sequence = True
|
||||
|
||||
self.log.info(
|
||||
"frame_start-frame_end: {}-{}".format(frame_start, frame_end))
|
||||
|
|
|
|||
|
|
@ -1210,7 +1210,6 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
|
||||
# Get instance data
|
||||
pixel_aspect = temp_data["pixel_aspect"]
|
||||
|
||||
if reformat_in_baking:
|
||||
self.log.debug((
|
||||
"Using resolution from input. It is already "
|
||||
|
|
@ -1230,6 +1229,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
# - settings value can't have None but has value of 0
|
||||
output_width = output_def.get("width") or output_width or None
|
||||
output_height = output_def.get("height") or output_height or None
|
||||
# Force to use input resolution if output resolution was not defined
|
||||
# in settings. Resolution from instance is not used when
|
||||
# 'use_input_res' is set to 'True'.
|
||||
use_input_res = False
|
||||
|
||||
# Overscal color
|
||||
overscan_color_value = "black"
|
||||
|
|
@ -1241,6 +1244,17 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
)
|
||||
self.log.debug("Overscan color: `{}`".format(overscan_color_value))
|
||||
|
||||
# Scale input to have proper pixel aspect ratio
|
||||
# - scale width by the pixel aspect ratio
|
||||
scale_pixel_aspect = output_def.get("scale_pixel_aspect", True)
|
||||
if scale_pixel_aspect and pixel_aspect != 1:
|
||||
# Change input width after pixel aspect
|
||||
input_width = int(input_width * pixel_aspect)
|
||||
use_input_res = True
|
||||
filters.append((
|
||||
"scale={}x{}:flags=lanczos".format(input_width, input_height)
|
||||
))
|
||||
|
||||
# Convert overscan value video filters
|
||||
overscan_crop = output_def.get("overscan_crop")
|
||||
overscan = OverscanCrop(
|
||||
|
|
@ -1251,13 +1265,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
# resolution by it's values
|
||||
if overscan_crop_filters:
|
||||
filters.extend(overscan_crop_filters)
|
||||
# Change input resolution after overscan crop
|
||||
input_width = overscan.width()
|
||||
input_height = overscan.height()
|
||||
# Use output resolution as inputs after cropping to skip usage of
|
||||
# instance data resolution
|
||||
if output_width is None or output_height is None:
|
||||
output_width = input_width
|
||||
output_height = input_height
|
||||
use_input_res = True
|
||||
|
||||
# Make sure input width and height is not an odd number
|
||||
input_width_is_odd = bool(input_width % 2 != 0)
|
||||
|
|
@ -1283,8 +1294,10 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
self.log.debug("input_width: `{}`".format(input_width))
|
||||
self.log.debug("input_height: `{}`".format(input_height))
|
||||
|
||||
# Use instance resolution if output definition has not set it.
|
||||
if output_width is None or output_height is None:
|
||||
# Use instance resolution if output definition has not set it
|
||||
# - use instance resolution only if there were not scale changes
|
||||
# that may massivelly affect output 'use_input_res'
|
||||
if not use_input_res and output_width is None or output_height is None:
|
||||
output_width = temp_data["resolution_width"]
|
||||
output_height = temp_data["resolution_height"]
|
||||
|
||||
|
|
@ -1326,7 +1339,6 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
output_width == input_width
|
||||
and output_height == input_height
|
||||
and not letter_box_enabled
|
||||
and pixel_aspect == 1
|
||||
):
|
||||
self.log.debug(
|
||||
"Output resolution is same as input's"
|
||||
|
|
@ -1336,39 +1348,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
new_repre["resolutionHeight"] = input_height
|
||||
return filters
|
||||
|
||||
# defining image ratios
|
||||
input_res_ratio = (
|
||||
(float(input_width) * pixel_aspect) / input_height
|
||||
)
|
||||
output_res_ratio = float(output_width) / float(output_height)
|
||||
self.log.debug("input_res_ratio: `{}`".format(input_res_ratio))
|
||||
self.log.debug("output_res_ratio: `{}`".format(output_res_ratio))
|
||||
|
||||
# Round ratios to 2 decimal places for comparing
|
||||
input_res_ratio = round(input_res_ratio, 2)
|
||||
output_res_ratio = round(output_res_ratio, 2)
|
||||
|
||||
# get scale factor
|
||||
scale_factor_by_width = (
|
||||
float(output_width) / (input_width * pixel_aspect)
|
||||
)
|
||||
scale_factor_by_height = (
|
||||
float(output_height) / input_height
|
||||
)
|
||||
|
||||
self.log.debug(
|
||||
"scale_factor_by_with: `{}`".format(scale_factor_by_width)
|
||||
)
|
||||
self.log.debug(
|
||||
"scale_factor_by_height: `{}`".format(scale_factor_by_height)
|
||||
)
|
||||
|
||||
# scaling none square pixels and 1920 width
|
||||
if (
|
||||
input_height != output_height
|
||||
or input_width != output_width
|
||||
or pixel_aspect != 1
|
||||
):
|
||||
if input_height != output_height or input_width != output_width:
|
||||
filters.extend([
|
||||
(
|
||||
"scale={}x{}"
|
||||
|
|
@ -1478,6 +1459,8 @@ class ExtractReview(pyblish.api.InstancePlugin):
|
|||
output = -1
|
||||
regexes = self.compile_list_of_regexes(in_list)
|
||||
for regex in regexes:
|
||||
if not value:
|
||||
continue
|
||||
if re.match(regex, value):
|
||||
output = 1
|
||||
break
|
||||
|
|
|
|||
|
|
@ -93,6 +93,6 @@ class IntegrateSubsetGroup(pyblish.api.InstancePlugin):
|
|||
return {
|
||||
"families": anatomy_data["family"],
|
||||
"tasks": task.get("name"),
|
||||
"hosts": anatomy_data["app"],
|
||||
"hosts": instance.context.data["hostName"],
|
||||
"task_types": task.get("type")
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue