mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch 'develop' into feature/timers_manager_get_task_time
# Conflicts: # openpype/modules/default_modules/ftrack/ftrack_module.py
This commit is contained in:
commit
b4d55bf1fa
337 changed files with 16247 additions and 2650 deletions
11
.github/workflows/prerelease.yml
vendored
11
.github/workflows/prerelease.yml
vendored
|
|
@ -20,12 +20,12 @@ jobs:
|
|||
python-version: 3.7
|
||||
|
||||
- name: Install Python requirements
|
||||
run: pip install gitpython semver
|
||||
run: pip install gitpython semver PyGithub
|
||||
|
||||
- name: 🔎 Determine next version type
|
||||
id: version_type
|
||||
run: |
|
||||
TYPE=$(python ./tools/ci_tools.py --bump)
|
||||
TYPE=$(python ./tools/ci_tools.py --bump --github_token ${{ secrets.GITHUB_TOKEN }})
|
||||
|
||||
echo ::set-output name=type::$TYPE
|
||||
|
||||
|
|
@ -43,11 +43,7 @@ jobs:
|
|||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
breakingLabel: '**💥 Breaking**'
|
||||
enhancementLabel: '**🚀 Enhancements**'
|
||||
bugsLabel: '**🐛 Bug fixes**'
|
||||
deprecatedLabel: '**⚠️ Deprecations**'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"### 🆕 New features","labels":["feature"]},}'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}}'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: "3.0.0"
|
||||
|
|
@ -80,6 +76,7 @@ jobs:
|
|||
git add .
|
||||
git commit -m "[Automated] Bump version"
|
||||
tag_name="CI/${{ steps.version.outputs.next_tag }}"
|
||||
echo $tag_name
|
||||
git tag -a $tag_name -m "nightly build"
|
||||
|
||||
- name: Push to protected main branch
|
||||
|
|
|
|||
14
.github/workflows/release.yml
vendored
14
.github/workflows/release.yml
vendored
|
|
@ -21,7 +21,7 @@ jobs:
|
|||
with:
|
||||
python-version: 3.7
|
||||
- name: Install Python requirements
|
||||
run: pip install gitpython semver
|
||||
run: pip install gitpython semver PyGithub
|
||||
|
||||
- name: 💉 Inject new version into files
|
||||
id: version
|
||||
|
|
@ -39,11 +39,7 @@ jobs:
|
|||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
breakingLabel: '**💥 Breaking**'
|
||||
enhancementLabel: '**🚀 Enhancements**'
|
||||
bugsLabel: '**🐛 Bug fixes**'
|
||||
deprecatedLabel: '**⚠️ Deprecations**'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]}}'
|
||||
addSections: '{"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]},"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]}}'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: "3.0.0"
|
||||
|
|
@ -85,11 +81,7 @@ jobs:
|
|||
uses: heinrichreimer/github-changelog-generator-action@v2.2
|
||||
with:
|
||||
token: ${{ secrets.ADMIN_TOKEN }}
|
||||
breakingLabel: '**💥 Breaking**'
|
||||
enhancementLabel: '**🚀 Enhancements**'
|
||||
bugsLabel: '**🐛 Bug fixes**'
|
||||
deprecatedLabel: '**⚠️ Deprecations**'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]}}'
|
||||
addSections: '{"documentation":{"prefix":"### 📖 Documentation","labels":["type: documentation"]},"tests":{"prefix":"### ✅ Testing","labels":["tests"]},"feature":{"prefix":"**🆕 New features**", "labels":["type: feature"]},"breaking":{"prefix":"**💥 Breaking**", "labels":["breaking"]},"enhancements":{"prefix":"**🚀 Enhancements**", "labels":["type: enhancement"]},"bugs":{"prefix":"**🐛 Bug fixes**", "labels":["type: bug"]},"deprecated":{"prefix":"**⚠️ Deprecations**", "labels":["depreciated"]}}'
|
||||
issues: false
|
||||
issuesWoLabels: false
|
||||
sinceTag: ${{ steps.version.outputs.last_release }}
|
||||
|
|
|
|||
2
.gitmodules
vendored
2
.gitmodules
vendored
|
|
@ -9,4 +9,4 @@
|
|||
url = https://github.com/arrow-py/arrow.git
|
||||
[submodule "openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api"]
|
||||
path = openpype/modules/default_modules/ftrack/python2_vendor/ftrack-python-api
|
||||
url = https://bitbucket.org/ftrack/ftrack-python-api.git
|
||||
url = https://bitbucket.org/ftrack/ftrack-python-api.git
|
||||
193
CHANGELOG.md
193
CHANGELOG.md
|
|
@ -1,92 +1,139 @@
|
|||
# Changelog
|
||||
|
||||
## [3.4.0-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
## [3.5.0-nightly.4](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.3.1...HEAD)
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.4.1...HEAD)
|
||||
|
||||
**🆕 New features**
|
||||
|
||||
- Added running configurable disk mapping command before start of OP [\#2091](https://github.com/pypeclub/OpenPype/pull/2091)
|
||||
- SFTP provider [\#2073](https://github.com/pypeclub/OpenPype/pull/2073)
|
||||
- Maya: Validate setdress top group [\#2068](https://github.com/pypeclub/OpenPype/pull/2068)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- Settings UI: Project model refreshing and sorting [\#2104](https://github.com/pypeclub/OpenPype/pull/2104)
|
||||
- Added choosing different dirmap mapping if workfile synched locally [\#2088](https://github.com/pypeclub/OpenPype/pull/2088)
|
||||
- General: Remove IdleManager module [\#2084](https://github.com/pypeclub/OpenPype/pull/2084)
|
||||
- Tray UI: Message box about missing settings defaults [\#2080](https://github.com/pypeclub/OpenPype/pull/2080)
|
||||
- Tray UI: Show menu where first click happened [\#2079](https://github.com/pypeclub/OpenPype/pull/2079)
|
||||
- Global: add global validators to settings [\#2078](https://github.com/pypeclub/OpenPype/pull/2078)
|
||||
- Use CRF for burnin when available [\#2070](https://github.com/pypeclub/OpenPype/pull/2070)
|
||||
- Project manager: Filter first item after selection of project [\#2069](https://github.com/pypeclub/OpenPype/pull/2069)
|
||||
- Nuke: Adding `still` image family workflow [\#2064](https://github.com/pypeclub/OpenPype/pull/2064)
|
||||
- Maya: validate authorized loaded plugins [\#2062](https://github.com/pypeclub/OpenPype/pull/2062)
|
||||
- Tools: add support for pyenv on windows [\#2051](https://github.com/pypeclub/OpenPype/pull/2051)
|
||||
- SyncServer: Dropbox Provider [\#1979](https://github.com/pypeclub/OpenPype/pull/1979)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- TVPaint: Behavior name of loop also accept repeat [\#2109](https://github.com/pypeclub/OpenPype/pull/2109)
|
||||
- Ftrack: Project settings save custom attributes skip unknown attributes [\#2103](https://github.com/pypeclub/OpenPype/pull/2103)
|
||||
- Fix broken import in sftp provider [\#2100](https://github.com/pypeclub/OpenPype/pull/2100)
|
||||
- Global: Fix docstring on publish plugin extract review [\#2097](https://github.com/pypeclub/OpenPype/pull/2097)
|
||||
- General: Cloud mongo ca certificate issue [\#2095](https://github.com/pypeclub/OpenPype/pull/2095)
|
||||
- TVPaint: Creator use context from workfile [\#2087](https://github.com/pypeclub/OpenPype/pull/2087)
|
||||
- Blender: fix texture missing when publishing blend files [\#2085](https://github.com/pypeclub/OpenPype/pull/2085)
|
||||
- General: Startup validations oiio tool path fix on linux [\#2083](https://github.com/pypeclub/OpenPype/pull/2083)
|
||||
- Blender: fixed Curves with modifiers in Rigs [\#2081](https://github.com/pypeclub/OpenPype/pull/2081)
|
||||
- Fix Sync Queue when project disabled [\#2063](https://github.com/pypeclub/OpenPype/pull/2063)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Delivery Action Files Sequence fix [\#2096](https://github.com/pypeclub/OpenPype/pull/2096)
|
||||
- Bump pywin32 from 300 to 301 [\#2086](https://github.com/pypeclub/OpenPype/pull/2086)
|
||||
- Nuke UI scaling [\#2077](https://github.com/pypeclub/OpenPype/pull/2077)
|
||||
|
||||
## [3.4.1](https://github.com/pypeclub/OpenPype/tree/3.4.1) (2021-09-23)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.4.1-nightly.1...3.4.1)
|
||||
|
||||
**🆕 New features**
|
||||
|
||||
- Settings: Flag project as deactivated and hide from tools' view [\#2008](https://github.com/pypeclub/OpenPype/pull/2008)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- General: Startup validations [\#2054](https://github.com/pypeclub/OpenPype/pull/2054)
|
||||
- Nuke: proxy mode validator [\#2052](https://github.com/pypeclub/OpenPype/pull/2052)
|
||||
- Ftrack: Removed ftrack interface [\#2049](https://github.com/pypeclub/OpenPype/pull/2049)
|
||||
- Settings UI: Deffered set value on entity [\#2044](https://github.com/pypeclub/OpenPype/pull/2044)
|
||||
- Loader: Families filtering [\#2043](https://github.com/pypeclub/OpenPype/pull/2043)
|
||||
- Settings UI: Project view enhancements [\#2042](https://github.com/pypeclub/OpenPype/pull/2042)
|
||||
- Added possibility to configure of synchronization of workfile version… [\#2041](https://github.com/pypeclub/OpenPype/pull/2041)
|
||||
- Settings for Nuke IncrementScriptVersion [\#2039](https://github.com/pypeclub/OpenPype/pull/2039)
|
||||
- Loader & Library loader: Use tools from OpenPype [\#2038](https://github.com/pypeclub/OpenPype/pull/2038)
|
||||
- Adding predefined project folders creation in PM [\#2030](https://github.com/pypeclub/OpenPype/pull/2030)
|
||||
- WebserverModule: Removed interface of webserver module [\#2028](https://github.com/pypeclub/OpenPype/pull/2028)
|
||||
- TimersManager: Removed interface of timers manager [\#2024](https://github.com/pypeclub/OpenPype/pull/2024)
|
||||
- Feature Maya import asset from scene inventory [\#2018](https://github.com/pypeclub/OpenPype/pull/2018)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- Timers manger: Typo fix [\#2058](https://github.com/pypeclub/OpenPype/pull/2058)
|
||||
- Hiero: Editorial fixes [\#2057](https://github.com/pypeclub/OpenPype/pull/2057)
|
||||
- Differentiate jpg sequences from thumbnail [\#2056](https://github.com/pypeclub/OpenPype/pull/2056)
|
||||
- FFmpeg: Split command to list does not work [\#2046](https://github.com/pypeclub/OpenPype/pull/2046)
|
||||
- Removed shell flag in subprocess call [\#2045](https://github.com/pypeclub/OpenPype/pull/2045)
|
||||
- Hiero: Fix "none" named tags [\#2033](https://github.com/pypeclub/OpenPype/pull/2033)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Bump prismjs from 1.24.0 to 1.25.0 in /website [\#2050](https://github.com/pypeclub/OpenPype/pull/2050)
|
||||
|
||||
## [3.4.0](https://github.com/pypeclub/OpenPype/tree/3.4.0) (2021-09-17)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.4.0-nightly.6...3.4.0)
|
||||
|
||||
### 📖 Documentation
|
||||
|
||||
- Documentation: Ftrack launch argsuments update [\#2014](https://github.com/pypeclub/OpenPype/pull/2014)
|
||||
|
||||
**🆕 New features**
|
||||
|
||||
- Nuke: Compatibility with Nuke 13 [\#2003](https://github.com/pypeclub/OpenPype/pull/2003)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- General: Task types in profiles [\#2036](https://github.com/pypeclub/OpenPype/pull/2036)
|
||||
- Console interpreter: Handle invalid sizes on initialization [\#2022](https://github.com/pypeclub/OpenPype/pull/2022)
|
||||
- Ftrack: Show OpenPype versions in event server status [\#2019](https://github.com/pypeclub/OpenPype/pull/2019)
|
||||
- General: Staging icon [\#2017](https://github.com/pypeclub/OpenPype/pull/2017)
|
||||
- Ftrack: Sync to avalon actions have jobs [\#2015](https://github.com/pypeclub/OpenPype/pull/2015)
|
||||
- Modules: Connect method is not required [\#2009](https://github.com/pypeclub/OpenPype/pull/2009)
|
||||
- Settings UI: Number with configurable steps [\#2001](https://github.com/pypeclub/OpenPype/pull/2001)
|
||||
- Moving project folder structure creation out of ftrack module \#1989 [\#1996](https://github.com/pypeclub/OpenPype/pull/1996)
|
||||
- Configurable items for providers without Settings [\#1987](https://github.com/pypeclub/OpenPype/pull/1987)
|
||||
- Global: Example addons [\#1986](https://github.com/pypeclub/OpenPype/pull/1986)
|
||||
- Standalone Publisher: Extract harmony zip handle workfile template [\#1982](https://github.com/pypeclub/OpenPype/pull/1982)
|
||||
- Settings UI: Number sliders [\#1978](https://github.com/pypeclub/OpenPype/pull/1978)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- Workfiles tool: Task selection [\#2040](https://github.com/pypeclub/OpenPype/pull/2040)
|
||||
- Ftrack: Delete old versions missing settings key [\#2037](https://github.com/pypeclub/OpenPype/pull/2037)
|
||||
- Nuke: typo on a button [\#2034](https://github.com/pypeclub/OpenPype/pull/2034)
|
||||
- FFmpeg: Subprocess arguments as list [\#2032](https://github.com/pypeclub/OpenPype/pull/2032)
|
||||
- General: Fix Python 2 breaking line [\#2016](https://github.com/pypeclub/OpenPype/pull/2016)
|
||||
- Bugfix/webpublisher task type [\#2006](https://github.com/pypeclub/OpenPype/pull/2006)
|
||||
- Nuke thumbnails generated from middle of the sequence [\#1992](https://github.com/pypeclub/OpenPype/pull/1992)
|
||||
- Nuke: last version from path gets correct version [\#1990](https://github.com/pypeclub/OpenPype/pull/1990)
|
||||
- nuke, resolve, hiero: precollector order lest then 0.5 [\#1984](https://github.com/pypeclub/OpenPype/pull/1984)
|
||||
- Last workfile with multiple work templates [\#1981](https://github.com/pypeclub/OpenPype/pull/1981)
|
||||
- Collectors order [\#1977](https://github.com/pypeclub/OpenPype/pull/1977)
|
||||
- Stop timer was within validator order range. [\#1975](https://github.com/pypeclub/OpenPype/pull/1975)
|
||||
- Ftrack: arrow submodule has https url source [\#1974](https://github.com/pypeclub/OpenPype/pull/1974)
|
||||
- Ftrack: Fix hosts attribute in collect ftrack username [\#1972](https://github.com/pypeclub/OpenPype/pull/1972)
|
||||
- Removed deprecated submodules [\#1967](https://github.com/pypeclub/OpenPype/pull/1967)
|
||||
- Launcher: Fix crashes on action click [\#1964](https://github.com/pypeclub/OpenPype/pull/1964)
|
||||
- Settings: Minor fixes in UI and missing default values [\#1963](https://github.com/pypeclub/OpenPype/pull/1963)
|
||||
- Blender: Toggle system console works on windows [\#1962](https://github.com/pypeclub/OpenPype/pull/1962)
|
||||
- Resolve path when adding to zip [\#1960](https://github.com/pypeclub/OpenPype/pull/1960)
|
||||
- Bump url-parse from 1.5.1 to 1.5.3 in /website [\#1958](https://github.com/pypeclub/OpenPype/pull/1958)
|
||||
- Global: Avalon Host name collector [\#1949](https://github.com/pypeclub/OpenPype/pull/1949)
|
||||
- Global: Define hosts in CollectSceneVersion [\#1948](https://github.com/pypeclub/OpenPype/pull/1948)
|
||||
- Maya: Add Xgen family support [\#1947](https://github.com/pypeclub/OpenPype/pull/1947)
|
||||
- Add face sets to exported alembics [\#1942](https://github.com/pypeclub/OpenPype/pull/1942)
|
||||
- Bump path-parse from 1.0.6 to 1.0.7 in /website [\#1933](https://github.com/pypeclub/OpenPype/pull/1933)
|
||||
- \#1894 - adds host to template\_name\_profiles for filtering [\#1915](https://github.com/pypeclub/OpenPype/pull/1915)
|
||||
- Environments: Tool environments in alphabetical order [\#1910](https://github.com/pypeclub/OpenPype/pull/1910)
|
||||
- Disregard publishing time. [\#1888](https://github.com/pypeclub/OpenPype/pull/1888)
|
||||
- Feature/webpublisher backend [\#1876](https://github.com/pypeclub/OpenPype/pull/1876)
|
||||
- Dynamic modules [\#1872](https://github.com/pypeclub/OpenPype/pull/1872)
|
||||
- Houdini: add Camera, Point Cache, Composite, Redshift ROP and VDB Cache support [\#1821](https://github.com/pypeclub/OpenPype/pull/1821)
|
||||
- Deadline: Houdini plugins in different hierarchy [\#1970](https://github.com/pypeclub/OpenPype/pull/1970)
|
||||
|
||||
## [3.3.1](https://github.com/pypeclub/OpenPype/tree/3.3.1) (2021-08-20)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.1-nightly.1...3.3.1)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- TVPaint: Fixed rendered frame indexes [\#1946](https://github.com/pypeclub/OpenPype/pull/1946)
|
||||
- Maya: Menu actions fix [\#1945](https://github.com/pypeclub/OpenPype/pull/1945)
|
||||
- standalone: editorial shared object problem [\#1941](https://github.com/pypeclub/OpenPype/pull/1941)
|
||||
- Bugfix nuke deadline app name [\#1928](https://github.com/pypeclub/OpenPype/pull/1928)
|
||||
|
||||
## [3.3.0](https://github.com/pypeclub/OpenPype/tree/3.3.0) (2021-08-17)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.3.0-nightly.11...3.3.0)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Python console interpreter [\#1940](https://github.com/pypeclub/OpenPype/pull/1940)
|
||||
- Fix - make AE workfile publish to Ftrack configurable [\#1937](https://github.com/pypeclub/OpenPype/pull/1937)
|
||||
- Fix - ftrack family was added incorrectly in some cases [\#1935](https://github.com/pypeclub/OpenPype/pull/1935)
|
||||
- Settings UI: Breadcrumbs in settings [\#1932](https://github.com/pypeclub/OpenPype/pull/1932)
|
||||
- Fix - Deadline publish on Linux started Tray instead of headless publishing [\#1930](https://github.com/pypeclub/OpenPype/pull/1930)
|
||||
- Maya: Validate Model Name - repair accident deletion in settings defaults [\#1929](https://github.com/pypeclub/OpenPype/pull/1929)
|
||||
- Global: Updated logos and Default settings [\#1927](https://github.com/pypeclub/OpenPype/pull/1927)
|
||||
- Nuke: submit to farm failed due `ftrack` family remove [\#1926](https://github.com/pypeclub/OpenPype/pull/1926)
|
||||
- Check for missing ✨ Python when using `pyenv` [\#1925](https://github.com/pypeclub/OpenPype/pull/1925)
|
||||
- Maya: Scene patching 🩹on submission to Deadline [\#1923](https://github.com/pypeclub/OpenPype/pull/1923)
|
||||
- Fix - validate takes repre\["files"\] as list all the time [\#1922](https://github.com/pypeclub/OpenPype/pull/1922)
|
||||
- Settings: Default values for enum [\#1920](https://github.com/pypeclub/OpenPype/pull/1920)
|
||||
- Settings UI: Modifiable dict view enhance [\#1919](https://github.com/pypeclub/OpenPype/pull/1919)
|
||||
- standalone: validator asset parents [\#1917](https://github.com/pypeclub/OpenPype/pull/1917)
|
||||
- Nuke: update video file crassing [\#1916](https://github.com/pypeclub/OpenPype/pull/1916)
|
||||
- Fix - texture validators for workfiles triggers only for textures workfiles [\#1914](https://github.com/pypeclub/OpenPype/pull/1914)
|
||||
- submodules: avalon-core update [\#1911](https://github.com/pypeclub/OpenPype/pull/1911)
|
||||
- Settings UI: List order works as expected [\#1906](https://github.com/pypeclub/OpenPype/pull/1906)
|
||||
- Add support for multiple Deadline ☠️➖ servers [\#1905](https://github.com/pypeclub/OpenPype/pull/1905)
|
||||
- Hiero: loaded clip was not set colorspace from version data [\#1904](https://github.com/pypeclub/OpenPype/pull/1904)
|
||||
- Pyblish UI: Fix collecting stage processing [\#1903](https://github.com/pypeclub/OpenPype/pull/1903)
|
||||
- Burnins: Use input's bitrate in h624 [\#1902](https://github.com/pypeclub/OpenPype/pull/1902)
|
||||
- Feature AE local render [\#1901](https://github.com/pypeclub/OpenPype/pull/1901)
|
||||
- Ftrack: Where I run action enhancement [\#1900](https://github.com/pypeclub/OpenPype/pull/1900)
|
||||
- Ftrack: Private project server actions [\#1899](https://github.com/pypeclub/OpenPype/pull/1899)
|
||||
- Support nested studio plugins paths. [\#1898](https://github.com/pypeclub/OpenPype/pull/1898)
|
||||
- Bug: fixed python detection [\#1893](https://github.com/pypeclub/OpenPype/pull/1893)
|
||||
- Settings: global validators with options [\#1892](https://github.com/pypeclub/OpenPype/pull/1892)
|
||||
- Settings: Conditional dict enum positioning [\#1891](https://github.com/pypeclub/OpenPype/pull/1891)
|
||||
- global: integrate name missing default template [\#1890](https://github.com/pypeclub/OpenPype/pull/1890)
|
||||
- publisher: editorial plugins fixes [\#1889](https://github.com/pypeclub/OpenPype/pull/1889)
|
||||
- Expose stop timer through rest api. [\#1886](https://github.com/pypeclub/OpenPype/pull/1886)
|
||||
- TVPaint: Increment workfile [\#1885](https://github.com/pypeclub/OpenPype/pull/1885)
|
||||
- Allow Multiple Notes to run on tasks. [\#1882](https://github.com/pypeclub/OpenPype/pull/1882)
|
||||
- Normalize path returned from Workfiles. [\#1880](https://github.com/pypeclub/OpenPype/pull/1880)
|
||||
- Prepare for pyside2 [\#1869](https://github.com/pypeclub/OpenPype/pull/1869)
|
||||
- Filter hosts in settings host-enum [\#1868](https://github.com/pypeclub/OpenPype/pull/1868)
|
||||
- Local actions with process identifier [\#1867](https://github.com/pypeclub/OpenPype/pull/1867)
|
||||
- Workfile tool start at host launch support [\#1865](https://github.com/pypeclub/OpenPype/pull/1865)
|
||||
- Maya: add support for `RedshiftNormalMap` node, fix `tx` linear space 🚀 [\#1863](https://github.com/pypeclub/OpenPype/pull/1863)
|
||||
- Workfiles tool event arguments fix [\#1862](https://github.com/pypeclub/OpenPype/pull/1862)
|
||||
- Maya: support for configurable `dirmap` 🗺️ [\#1859](https://github.com/pypeclub/OpenPype/pull/1859)
|
||||
- Maya: don't add reference members as connections to the container set 📦 [\#1855](https://github.com/pypeclub/OpenPype/pull/1855)
|
||||
- Settings list can use template or schema as object type [\#1815](https://github.com/pypeclub/OpenPype/pull/1815)
|
||||
|
||||
## [3.2.0](https://github.com/pypeclub/OpenPype/tree/3.2.0) (2021-07-13)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.2.0-nightly.7...3.2.0)
|
||||
|
|
|
|||
|
|
@ -12,6 +12,9 @@ from .version import __version__ as version
|
|||
|
||||
def open_dialog():
|
||||
"""Show Igniter dialog."""
|
||||
if os.getenv("OPENPYPE_HEADLESS_MODE"):
|
||||
print("!!! Can't open dialog in headless mode. Exiting.")
|
||||
sys.exit(1)
|
||||
from Qt import QtWidgets, QtCore
|
||||
from .install_dialog import InstallDialog
|
||||
|
||||
|
|
@ -28,8 +31,31 @@ def open_dialog():
|
|||
return d.result()
|
||||
|
||||
|
||||
def open_update_window(openpype_version):
|
||||
"""Open update window."""
|
||||
if os.getenv("OPENPYPE_HEADLESS_MODE"):
|
||||
print("!!! Can't open dialog in headless mode. Exiting.")
|
||||
sys.exit(1)
|
||||
from Qt import QtWidgets, QtCore
|
||||
from .update_window import UpdateWindow
|
||||
|
||||
scale_attr = getattr(QtCore.Qt, "AA_EnableHighDpiScaling", None)
|
||||
if scale_attr is not None:
|
||||
QtWidgets.QApplication.setAttribute(scale_attr)
|
||||
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
|
||||
d = UpdateWindow(version=openpype_version)
|
||||
d.open()
|
||||
|
||||
app.exec_()
|
||||
version_path = d.get_version_path()
|
||||
return version_path
|
||||
|
||||
|
||||
__all__ = [
|
||||
"BootstrapRepos",
|
||||
"open_dialog",
|
||||
"open_update_window",
|
||||
"version"
|
||||
]
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import sys
|
|||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Union, Callable, List, Tuple
|
||||
import hashlib
|
||||
|
||||
from zipfile import ZipFile, BadZipFile
|
||||
|
||||
|
|
@ -28,6 +29,25 @@ LOG_WARNING = 1
|
|||
LOG_ERROR = 3
|
||||
|
||||
|
||||
def sha256sum(filename):
|
||||
"""Calculate sha256 for content of the file.
|
||||
|
||||
Args:
|
||||
filename (str): Path to file.
|
||||
|
||||
Returns:
|
||||
str: hex encoded sha256
|
||||
|
||||
"""
|
||||
h = hashlib.sha256()
|
||||
b = bytearray(128 * 1024)
|
||||
mv = memoryview(b)
|
||||
with open(filename, 'rb', buffering=0) as f:
|
||||
for n in iter(lambda: f.readinto(mv), 0):
|
||||
h.update(mv[:n])
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
class OpenPypeVersion(semver.VersionInfo):
|
||||
"""Class for storing information about OpenPype version.
|
||||
|
||||
|
|
@ -261,7 +281,8 @@ class BootstrapRepos:
|
|||
self.live_repo_dir = Path(Path(__file__).parent / ".." / "repos")
|
||||
|
||||
@staticmethod
|
||||
def get_version_path_from_list(version: str, version_list: list) -> Path:
|
||||
def get_version_path_from_list(
|
||||
version: str, version_list: list) -> Union[Path, None]:
|
||||
"""Get path for specific version in list of OpenPype versions.
|
||||
|
||||
Args:
|
||||
|
|
@ -275,6 +296,7 @@ class BootstrapRepos:
|
|||
for v in version_list:
|
||||
if str(v) == version:
|
||||
return v.path
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_local_live_version() -> str:
|
||||
|
|
@ -487,6 +509,7 @@ class BootstrapRepos:
|
|||
openpype_root = openpype_path.resolve()
|
||||
# generate list of filtered paths
|
||||
dir_filter = [openpype_root / f for f in self.openpype_filter]
|
||||
checksums = []
|
||||
|
||||
file: Path
|
||||
for file in openpype_list:
|
||||
|
|
@ -508,12 +531,119 @@ class BootstrapRepos:
|
|||
processed_path = file
|
||||
self._print(f"- processing {processed_path}")
|
||||
|
||||
zip_file.write(file, file.resolve().relative_to(openpype_root))
|
||||
checksums.append(
|
||||
(
|
||||
sha256sum(file.as_posix()),
|
||||
file.resolve().relative_to(openpype_root)
|
||||
)
|
||||
)
|
||||
zip_file.write(
|
||||
file, file.resolve().relative_to(openpype_root))
|
||||
|
||||
checksums_str = ""
|
||||
for c in checksums:
|
||||
checksums_str += "{}:{}\n".format(c[0], c[1])
|
||||
zip_file.writestr("checksums", checksums_str)
|
||||
# test if zip is ok
|
||||
zip_file.testzip()
|
||||
self._progress_callback(100)
|
||||
|
||||
def validate_openpype_version(self, path: Path) -> tuple:
|
||||
"""Validate version directory or zip file.
|
||||
|
||||
This will load `checksums` file if present, calculate checksums
|
||||
of existing files in given path and compare. It will also compare
|
||||
lists of files together for missing files.
|
||||
|
||||
Args:
|
||||
path (Path): Path to OpenPype version to validate.
|
||||
|
||||
Returns:
|
||||
tuple(bool, str): with version validity as first item
|
||||
and string with reason as second.
|
||||
|
||||
"""
|
||||
if not path.exists():
|
||||
return False, "Path doesn't exist"
|
||||
|
||||
if path.is_file():
|
||||
return self._validate_zip(path)
|
||||
return self._validate_dir(path)
|
||||
|
||||
@staticmethod
|
||||
def _validate_zip(path: Path) -> tuple:
|
||||
"""Validate content of zip file."""
|
||||
with ZipFile(path, "r") as zip_file:
|
||||
# read checksums
|
||||
try:
|
||||
checksums_data = str(zip_file.read("checksums"))
|
||||
except IOError:
|
||||
# FIXME: This should be set to False sometimes in the future
|
||||
return True, "Cannot read checksums for archive."
|
||||
|
||||
# split it to the list of tuples
|
||||
checksums = [
|
||||
tuple(line.split(":"))
|
||||
for line in checksums_data.split("\n") if line
|
||||
]
|
||||
|
||||
# calculate and compare checksums in the zip file
|
||||
for file in checksums:
|
||||
h = hashlib.sha256()
|
||||
try:
|
||||
h.update(zip_file.read(file[1]))
|
||||
except FileNotFoundError:
|
||||
return False, f"Missing file [ {file[1]} ]"
|
||||
if h.hexdigest() != file[0]:
|
||||
return False, f"Invalid checksum on {file[1]}"
|
||||
|
||||
# get list of files in zip minus `checksums` file itself
|
||||
# and turn in to set to compare against list of files
|
||||
# from checksum file. If difference exists, something is
|
||||
# wrong
|
||||
files_in_zip = zip_file.namelist()
|
||||
files_in_zip.remove("checksums")
|
||||
files_in_zip = set(files_in_zip)
|
||||
files_in_checksum = set([file[1] for file in checksums])
|
||||
diff = files_in_zip.difference(files_in_checksum)
|
||||
if diff:
|
||||
return False, f"Missing files {diff}"
|
||||
|
||||
return True, "All ok"
|
||||
|
||||
@staticmethod
|
||||
def _validate_dir(path: Path) -> tuple:
|
||||
checksums_file = Path(path / "checksums")
|
||||
if not checksums_file.exists():
|
||||
# FIXME: This should be set to False sometimes in the future
|
||||
return True, "Cannot read checksums for archive."
|
||||
checksums_data = checksums_file.read_text()
|
||||
checksums = [
|
||||
tuple(line.split(":"))
|
||||
for line in checksums_data.split("\n") if line
|
||||
]
|
||||
files_in_dir = [
|
||||
file.relative_to(path).as_posix()
|
||||
for file in path.iterdir() if file.is_file()
|
||||
]
|
||||
files_in_dir.remove("checksums")
|
||||
files_in_dir = set(files_in_dir)
|
||||
files_in_checksum = set([file[1] for file in checksums])
|
||||
|
||||
for file in checksums:
|
||||
try:
|
||||
current = sha256sum((path / file[1]).as_posix())
|
||||
except FileNotFoundError:
|
||||
return False, f"Missing file [ {file[1]} ]"
|
||||
|
||||
if file[0] != current:
|
||||
return False, f"Invalid checksum on {file[1]}"
|
||||
diff = files_in_dir.difference(files_in_checksum)
|
||||
if diff:
|
||||
return False, f"Missing files {diff}"
|
||||
|
||||
return True, "All ok"
|
||||
|
||||
@staticmethod
|
||||
def add_paths_from_archive(archive: Path) -> None:
|
||||
"""Add first-level directory and 'repos' as paths to :mod:`sys.path`.
|
||||
|
|
@ -837,6 +967,7 @@ class BootstrapRepos:
|
|||
|
||||
# test if destination directory already exist, if so lets delete it.
|
||||
if destination.exists() and force:
|
||||
self._print("removing existing directory")
|
||||
try:
|
||||
shutil.rmtree(destination)
|
||||
except OSError as e:
|
||||
|
|
@ -846,6 +977,7 @@ class BootstrapRepos:
|
|||
raise OpenPypeVersionIOError(
|
||||
f"cannot remove existing {destination}") from e
|
||||
elif destination.exists() and not force:
|
||||
self._print("destination directory already exists")
|
||||
raise OpenPypeVersionExists(f"{destination} already exist.")
|
||||
else:
|
||||
# create destination parent directories even if they don't exist.
|
||||
|
|
@ -855,6 +987,7 @@ class BootstrapRepos:
|
|||
if openpype_version.path.is_dir():
|
||||
# create zip inside temporary directory.
|
||||
self._print("Creating zip from directory ...")
|
||||
self._progress_callback(0)
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
temp_zip = \
|
||||
Path(temp_dir) / f"openpype-v{openpype_version}.zip"
|
||||
|
|
@ -880,13 +1013,16 @@ class BootstrapRepos:
|
|||
raise OpenPypeVersionInvalid("Invalid file format")
|
||||
|
||||
if not self.is_inside_user_data(openpype_version.path):
|
||||
self._progress_callback(35)
|
||||
openpype_version.path = self._copy_zip(
|
||||
openpype_version.path, destination)
|
||||
|
||||
# extract zip there
|
||||
self._print("extracting zip to destination ...")
|
||||
with ZipFile(openpype_version.path, "r") as zip_ref:
|
||||
self._progress_callback(75)
|
||||
zip_ref.extractall(destination)
|
||||
self._progress_callback(100)
|
||||
|
||||
return destination
|
||||
|
||||
|
|
|
|||
|
|
@ -14,21 +14,13 @@ from .tools import (
|
|||
validate_mongo_connection,
|
||||
get_openpype_path_from_db
|
||||
)
|
||||
|
||||
from .nice_progress_bar import NiceProgressBar
|
||||
from .user_settings import OpenPypeSecureRegistry
|
||||
from .tools import load_stylesheet
|
||||
from .version import __version__
|
||||
|
||||
|
||||
def load_stylesheet():
|
||||
stylesheet_path = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
"stylesheet.css"
|
||||
)
|
||||
with open(stylesheet_path, "r") as file_stream:
|
||||
stylesheet = file_stream.read()
|
||||
|
||||
return stylesheet
|
||||
|
||||
|
||||
class ButtonWithOptions(QtWidgets.QFrame):
|
||||
option_clicked = QtCore.Signal(str)
|
||||
|
||||
|
|
@ -91,25 +83,6 @@ class ButtonWithOptions(QtWidgets.QFrame):
|
|||
self.option_clicked.emit(self._default_value)
|
||||
|
||||
|
||||
class NiceProgressBar(QtWidgets.QProgressBar):
|
||||
def __init__(self, parent=None):
|
||||
super(NiceProgressBar, self).__init__(parent)
|
||||
self._real_value = 0
|
||||
|
||||
def setValue(self, value):
|
||||
self._real_value = value
|
||||
if value != 0 and value < 11:
|
||||
value = 11
|
||||
|
||||
super(NiceProgressBar, self).setValue(value)
|
||||
|
||||
def value(self):
|
||||
return self._real_value
|
||||
|
||||
def text(self):
|
||||
return "{} %".format(self._real_value)
|
||||
|
||||
|
||||
class ConsoleWidget(QtWidgets.QWidget):
|
||||
def __init__(self, parent=None):
|
||||
super(ConsoleWidget, self).__init__(parent)
|
||||
|
|
|
|||
20
igniter/nice_progress_bar.py
Normal file
20
igniter/nice_progress_bar.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
from Qt import QtCore, QtGui, QtWidgets # noqa
|
||||
|
||||
|
||||
class NiceProgressBar(QtWidgets.QProgressBar):
|
||||
def __init__(self, parent=None):
|
||||
super(NiceProgressBar, self).__init__(parent)
|
||||
self._real_value = 0
|
||||
|
||||
def setValue(self, value):
|
||||
self._real_value = value
|
||||
if value != 0 and value < 11:
|
||||
value = 11
|
||||
|
||||
super(NiceProgressBar, self).setValue(value)
|
||||
|
||||
def value(self):
|
||||
return self._real_value
|
||||
|
||||
def text(self):
|
||||
return "{} %".format(self._real_value)
|
||||
160
igniter/tools.py
160
igniter/tools.py
|
|
@ -1,18 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Tools used in **Igniter** GUI.
|
||||
|
||||
Functions ``compose_url()`` and ``decompose_url()`` are the same as in
|
||||
``openpype.lib`` and they are here to avoid importing OpenPype module before its
|
||||
version is decided.
|
||||
|
||||
"""
|
||||
import sys
|
||||
"""Tools used in **Igniter** GUI."""
|
||||
import os
|
||||
from typing import Dict, Union
|
||||
from typing import Union
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
from pathlib import Path
|
||||
import platform
|
||||
|
||||
import certifi
|
||||
from pymongo import MongoClient
|
||||
from pymongo.errors import (
|
||||
ServerSelectionTimeoutError,
|
||||
|
|
@ -22,89 +16,32 @@ from pymongo.errors import (
|
|||
)
|
||||
|
||||
|
||||
def decompose_url(url: str) -> Dict:
|
||||
"""Decompose mongodb url to its separate components.
|
||||
|
||||
Args:
|
||||
url (str): Mongodb url.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of components.
|
||||
def should_add_certificate_path_to_mongo_url(mongo_url):
|
||||
"""Check if should add ca certificate to mongo url.
|
||||
|
||||
Since 30.9.2021 cloud mongo requires newer certificates that are not
|
||||
available on most of workstation. This adds path to certifi certificate
|
||||
which is valid for it. To add the certificate path url must have scheme
|
||||
'mongodb+srv' or has 'ssl=true' or 'tls=true' in url query.
|
||||
"""
|
||||
components = {
|
||||
"scheme": None,
|
||||
"host": None,
|
||||
"port": None,
|
||||
"username": None,
|
||||
"password": None,
|
||||
"auth_db": None
|
||||
}
|
||||
parsed = urlparse(mongo_url)
|
||||
query = parse_qs(parsed.query)
|
||||
lowered_query_keys = set(key.lower() for key in query.keys())
|
||||
add_certificate = False
|
||||
# Check if url 'ssl' or 'tls' are set to 'true'
|
||||
for key in ("ssl", "tls"):
|
||||
if key in query and "true" in query["ssl"]:
|
||||
add_certificate = True
|
||||
break
|
||||
|
||||
result = urlparse(url)
|
||||
if result.scheme is None:
|
||||
_url = "mongodb://{}".format(url)
|
||||
result = urlparse(_url)
|
||||
# Check if url contains 'mongodb+srv'
|
||||
if not add_certificate and parsed.scheme == "mongodb+srv":
|
||||
add_certificate = True
|
||||
|
||||
components["scheme"] = result.scheme
|
||||
components["host"] = result.hostname
|
||||
try:
|
||||
components["port"] = result.port
|
||||
except ValueError:
|
||||
raise RuntimeError("invalid port specified")
|
||||
components["username"] = result.username
|
||||
components["password"] = result.password
|
||||
|
||||
try:
|
||||
components["auth_db"] = parse_qs(result.query)['authSource'][0]
|
||||
except KeyError:
|
||||
# no auth db provided, mongo will use the one we are connecting to
|
||||
pass
|
||||
|
||||
return components
|
||||
|
||||
|
||||
def compose_url(scheme: str = None,
|
||||
host: str = None,
|
||||
username: str = None,
|
||||
password: str = None,
|
||||
port: int = None,
|
||||
auth_db: str = None) -> str:
|
||||
"""Compose mongodb url from its individual components.
|
||||
|
||||
Args:
|
||||
scheme (str, optional):
|
||||
host (str, optional):
|
||||
username (str, optional):
|
||||
password (str, optional):
|
||||
port (str, optional):
|
||||
auth_db (str, optional):
|
||||
|
||||
Returns:
|
||||
str: mongodb url
|
||||
|
||||
"""
|
||||
|
||||
url = "{scheme}://"
|
||||
|
||||
if username and password:
|
||||
url += "{username}:{password}@"
|
||||
|
||||
url += "{host}"
|
||||
if port:
|
||||
url += ":{port}"
|
||||
|
||||
if auth_db:
|
||||
url += "?authSource={auth_db}"
|
||||
|
||||
return url.format(**{
|
||||
"scheme": scheme,
|
||||
"host": host,
|
||||
"username": username,
|
||||
"password": password,
|
||||
"port": port,
|
||||
"auth_db": auth_db
|
||||
})
|
||||
# Check if url does already contain certificate path
|
||||
if add_certificate and "tlscafile" in lowered_query_keys:
|
||||
add_certificate = False
|
||||
return add_certificate
|
||||
|
||||
|
||||
def validate_mongo_connection(cnx: str) -> (bool, str):
|
||||
|
|
@ -121,12 +58,18 @@ def validate_mongo_connection(cnx: str) -> (bool, str):
|
|||
if parsed.scheme not in ["mongodb", "mongodb+srv"]:
|
||||
return False, "Not mongodb schema"
|
||||
|
||||
kwargs = {
|
||||
"serverSelectionTimeoutMS": 2000
|
||||
}
|
||||
# Add certificate path if should be required
|
||||
if should_add_certificate_path_to_mongo_url(cnx):
|
||||
kwargs["ssl_ca_certs"] = certifi.where()
|
||||
|
||||
try:
|
||||
client = MongoClient(
|
||||
cnx,
|
||||
serverSelectionTimeoutMS=2000
|
||||
)
|
||||
client = MongoClient(cnx, **kwargs)
|
||||
client.server_info()
|
||||
with client.start_session():
|
||||
pass
|
||||
client.close()
|
||||
except ServerSelectionTimeoutError as e:
|
||||
return False, f"Cannot connect to server {cnx} - {e}"
|
||||
|
|
@ -152,10 +95,7 @@ def validate_mongo_string(mongo: str) -> (bool, str):
|
|||
"""
|
||||
if not mongo:
|
||||
return True, "empty string"
|
||||
parsed = urlparse(mongo)
|
||||
if parsed.scheme in ["mongodb", "mongodb+srv"]:
|
||||
return validate_mongo_connection(mongo)
|
||||
return False, "not valid mongodb schema"
|
||||
return validate_mongo_connection(mongo)
|
||||
|
||||
|
||||
def validate_path_string(path: str) -> (bool, str):
|
||||
|
|
@ -195,21 +135,13 @@ def get_openpype_global_settings(url: str) -> dict:
|
|||
Returns:
|
||||
dict: With settings data. Empty dictionary is returned if not found.
|
||||
"""
|
||||
try:
|
||||
components = decompose_url(url)
|
||||
except RuntimeError:
|
||||
return {}
|
||||
mongo_kwargs = {
|
||||
"host": compose_url(**components),
|
||||
"serverSelectionTimeoutMS": 2000
|
||||
}
|
||||
port = components.get("port")
|
||||
if port is not None:
|
||||
mongo_kwargs["port"] = int(port)
|
||||
kwargs = {}
|
||||
if should_add_certificate_path_to_mongo_url(url):
|
||||
kwargs["ssl_ca_certs"] = certifi.where()
|
||||
|
||||
try:
|
||||
# Create mongo connection
|
||||
client = MongoClient(**mongo_kwargs)
|
||||
client = MongoClient(url, **kwargs)
|
||||
# Access settings collection
|
||||
col = client["openpype"]["settings"]
|
||||
# Query global settings
|
||||
|
|
@ -248,3 +180,15 @@ def get_openpype_path_from_db(url: str) -> Union[str, None]:
|
|||
if os.path.exists(path):
|
||||
return path
|
||||
return None
|
||||
|
||||
|
||||
def load_stylesheet() -> str:
|
||||
"""Load css style sheet.
|
||||
|
||||
Returns:
|
||||
str: content of the stylesheet
|
||||
|
||||
"""
|
||||
stylesheet_path = Path(__file__).parent.resolve() / "stylesheet.css"
|
||||
|
||||
return stylesheet_path.read_text()
|
||||
|
|
|
|||
61
igniter/update_thread.py
Normal file
61
igniter/update_thread.py
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Working thread for update."""
|
||||
from Qt.QtCore import QThread, Signal, QObject # noqa
|
||||
|
||||
from .bootstrap_repos import (
|
||||
BootstrapRepos,
|
||||
OpenPypeVersion
|
||||
)
|
||||
|
||||
|
||||
class UpdateThread(QThread):
|
||||
"""Install Worker thread.
|
||||
|
||||
This class takes care of finding OpenPype version on user entered path
|
||||
(or loading this path from database). If nothing is entered by user,
|
||||
OpenPype will create its zip files from repositories that comes with it.
|
||||
|
||||
If path contains plain repositories, they are zipped and installed to
|
||||
user data dir.
|
||||
|
||||
"""
|
||||
progress = Signal(int)
|
||||
message = Signal((str, bool))
|
||||
|
||||
def __init__(self, parent=None):
|
||||
self._result = None
|
||||
self._openpype_version = None
|
||||
QThread.__init__(self, parent)
|
||||
|
||||
def set_version(self, openpype_version: OpenPypeVersion):
|
||||
self._openpype_version = openpype_version
|
||||
|
||||
def result(self):
|
||||
"""Result of finished installation."""
|
||||
return self._result
|
||||
|
||||
def _set_result(self, value):
|
||||
if self._result is not None:
|
||||
raise AssertionError("BUG: Result was set more than once!")
|
||||
self._result = value
|
||||
|
||||
def run(self):
|
||||
"""Thread entry point.
|
||||
|
||||
Using :class:`BootstrapRepos` to either install OpenPype as zip files
|
||||
or copy them from location specified by user or retrieved from
|
||||
database.
|
||||
"""
|
||||
bs = BootstrapRepos(
|
||||
progress_callback=self.set_progress, message=self.message)
|
||||
version_path = bs.install_version(self._openpype_version)
|
||||
self._set_result(version_path)
|
||||
|
||||
def set_progress(self, progress: int) -> None:
|
||||
"""Helper to set progress bar.
|
||||
|
||||
Args:
|
||||
progress (int): Progress in percents.
|
||||
|
||||
"""
|
||||
self.progress.emit(progress)
|
||||
136
igniter/update_window.py
Normal file
136
igniter/update_window.py
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Progress window to show when OpenPype is updating/installing locally."""
|
||||
import os
|
||||
from .update_thread import UpdateThread
|
||||
from Qt import QtCore, QtGui, QtWidgets # noqa
|
||||
from .bootstrap_repos import OpenPypeVersion
|
||||
from .nice_progress_bar import NiceProgressBar
|
||||
from .tools import load_stylesheet
|
||||
|
||||
|
||||
class UpdateWindow(QtWidgets.QDialog):
|
||||
"""OpenPype update window."""
|
||||
|
||||
_width = 500
|
||||
_height = 100
|
||||
|
||||
def __init__(self, version: OpenPypeVersion, parent=None):
|
||||
super(UpdateWindow, self).__init__(parent)
|
||||
self._openpype_version = version
|
||||
self._result_version_path = None
|
||||
|
||||
self.setWindowTitle(
|
||||
f"OpenPype is updating ..."
|
||||
)
|
||||
self.setModal(True)
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.WindowMinimizeButtonHint
|
||||
)
|
||||
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
roboto_font_path = os.path.join(current_dir, "RobotoMono-Regular.ttf")
|
||||
poppins_font_path = os.path.join(current_dir, "Poppins")
|
||||
icon_path = os.path.join(current_dir, "openpype_icon.png")
|
||||
|
||||
# Install roboto font
|
||||
QtGui.QFontDatabase.addApplicationFont(roboto_font_path)
|
||||
for filename in os.listdir(poppins_font_path):
|
||||
if os.path.splitext(filename)[1] == ".ttf":
|
||||
QtGui.QFontDatabase.addApplicationFont(filename)
|
||||
|
||||
# Load logo
|
||||
pixmap_openpype_logo = QtGui.QPixmap(icon_path)
|
||||
# Set logo as icon of window
|
||||
self.setWindowIcon(QtGui.QIcon(pixmap_openpype_logo))
|
||||
|
||||
self._pixmap_openpype_logo = pixmap_openpype_logo
|
||||
|
||||
self._update_thread = None
|
||||
|
||||
self.resize(QtCore.QSize(self._width, self._height))
|
||||
self._init_ui()
|
||||
|
||||
# Set stylesheet
|
||||
self.setStyleSheet(load_stylesheet())
|
||||
self._run_update()
|
||||
|
||||
def _init_ui(self):
|
||||
|
||||
# Main info
|
||||
# --------------------------------------------------------------------
|
||||
main_label = QtWidgets.QLabel(
|
||||
f"<b>OpenPype</b> is updating to {self._openpype_version}", self)
|
||||
main_label.setWordWrap(True)
|
||||
main_label.setObjectName("MainLabel")
|
||||
|
||||
# Progress bar
|
||||
# --------------------------------------------------------------------
|
||||
progress_bar = NiceProgressBar(self)
|
||||
progress_bar.setAlignment(QtCore.Qt.AlignCenter)
|
||||
progress_bar.setTextVisible(False)
|
||||
|
||||
# add all to main
|
||||
main = QtWidgets.QVBoxLayout(self)
|
||||
main.addSpacing(15)
|
||||
main.addWidget(main_label, 0)
|
||||
main.addSpacing(15)
|
||||
main.addWidget(progress_bar, 0)
|
||||
main.addSpacing(15)
|
||||
|
||||
self._progress_bar = progress_bar
|
||||
|
||||
def _run_update(self):
|
||||
"""Start install process.
|
||||
|
||||
This will once again validate entered path and mongo if ok, start
|
||||
working thread that will do actual job.
|
||||
"""
|
||||
# Check if install thread is not already running
|
||||
if self._update_thread and self._update_thread.isRunning():
|
||||
return
|
||||
self._progress_bar.setRange(0, 0)
|
||||
update_thread = UpdateThread(self)
|
||||
update_thread.set_version(self._openpype_version)
|
||||
update_thread.message.connect(self.update_console)
|
||||
update_thread.progress.connect(self._update_progress)
|
||||
update_thread.finished.connect(self._installation_finished)
|
||||
|
||||
self._update_thread = update_thread
|
||||
|
||||
update_thread.start()
|
||||
|
||||
def get_version_path(self):
|
||||
return self._result_version_path
|
||||
|
||||
def _installation_finished(self):
|
||||
status = self._update_thread.result()
|
||||
self._result_version_path = status
|
||||
self._progress_bar.setRange(0, 1)
|
||||
self._update_progress(100)
|
||||
QtWidgets.QApplication.processEvents()
|
||||
self.done(0)
|
||||
|
||||
def _update_progress(self, progress: int):
|
||||
# not updating progress as we are not able to determine it
|
||||
# correctly now. Progress bar is set to un-deterministic mode
|
||||
# until we are able to get progress in better way.
|
||||
"""
|
||||
self._progress_bar.setRange(0, 0)
|
||||
self._progress_bar.setValue(progress)
|
||||
text_visible = self._progress_bar.isTextVisible()
|
||||
if progress == 0:
|
||||
if text_visible:
|
||||
self._progress_bar.setTextVisible(False)
|
||||
elif not text_visible:
|
||||
self._progress_bar.setTextVisible(True)
|
||||
"""
|
||||
return
|
||||
|
||||
def update_console(self, msg: str, error: bool = False) -> None:
|
||||
"""Display message in console.
|
||||
|
||||
Args:
|
||||
msg (str): message.
|
||||
error (bool): if True, print it red.
|
||||
"""
|
||||
print(msg)
|
||||
|
|
@ -24,7 +24,9 @@ from .lib import (
|
|||
get_latest_version,
|
||||
get_global_environments,
|
||||
get_local_site_id,
|
||||
change_openpype_mongo_url
|
||||
change_openpype_mongo_url,
|
||||
create_project_folders,
|
||||
get_project_basic_paths
|
||||
)
|
||||
|
||||
from .lib.mongo import (
|
||||
|
|
@ -72,6 +74,7 @@ __all__ = [
|
|||
"get_current_project_settings",
|
||||
"get_anatomy_settings",
|
||||
"get_environments",
|
||||
"get_project_basic_paths",
|
||||
|
||||
"SystemSettings",
|
||||
|
||||
|
|
@ -120,5 +123,9 @@ __all__ = [
|
|||
"get_global_environments",
|
||||
|
||||
"get_local_site_id",
|
||||
"change_openpype_mongo_url"
|
||||
"change_openpype_mongo_url",
|
||||
|
||||
"get_project_basic_paths",
|
||||
"create_project_folders"
|
||||
|
||||
]
|
||||
|
|
|
|||
|
|
@ -18,6 +18,8 @@ from .pype_commands import PypeCommands
|
|||
@click.option("--list-versions", is_flag=True, expose_value=False,
|
||||
help=("list all detected versions. Use With `--use-staging "
|
||||
"to list staging versions."))
|
||||
@click.option("--validate-version", expose_value=False,
|
||||
help="validate given version integrity")
|
||||
def main(ctx):
|
||||
"""Pype is main command serving as entry point to pipeline system.
|
||||
|
||||
|
|
@ -281,3 +283,18 @@ def run(script):
|
|||
args_string = " ".join(args[1:])
|
||||
print(f"... running: {script} {args_string}")
|
||||
runpy.run_path(script, run_name="__main__", )
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.argument("folder", nargs=-1)
|
||||
@click.option("-m",
|
||||
"--mark",
|
||||
help="Run tests marked by",
|
||||
default=None)
|
||||
@click.option("-p",
|
||||
"--pyargs",
|
||||
help="Run tests from package",
|
||||
default=None)
|
||||
def runtests(folder, mark, pyargs):
|
||||
"""Run all automatic tests after proper initialization via start.py"""
|
||||
PypeCommands().run_tests(folder, mark, pyargs)
|
||||
|
|
|
|||
|
|
@ -43,6 +43,8 @@ class GlobalHostDataHook(PreLaunchHook):
|
|||
|
||||
"env": self.launch_context.env,
|
||||
|
||||
"last_workfile_path": self.data.get("last_workfile_path"),
|
||||
|
||||
"log": self.log
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -66,12 +66,16 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
objects = []
|
||||
nodes = list(container.children)
|
||||
|
||||
for obj in nodes:
|
||||
obj.parent = asset_group
|
||||
allowed_types = ['ARMATURE', 'MESH']
|
||||
|
||||
for obj in nodes:
|
||||
objects.append(obj)
|
||||
nodes.extend(list(obj.children))
|
||||
if obj.type in allowed_types:
|
||||
obj.parent = asset_group
|
||||
|
||||
for obj in nodes:
|
||||
if obj.type in allowed_types:
|
||||
objects.append(obj)
|
||||
nodes.extend(list(obj.children))
|
||||
|
||||
objects.reverse()
|
||||
|
||||
|
|
@ -107,7 +111,8 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
|
||||
if action is not None:
|
||||
local_obj.animation_data.action = action
|
||||
elif local_obj.animation_data.action is not None:
|
||||
elif (local_obj.animation_data and
|
||||
local_obj.animation_data.action is not None):
|
||||
plugin.prepare_data(
|
||||
local_obj.animation_data.action, group_name)
|
||||
|
||||
|
|
@ -126,7 +131,30 @@ class BlendRigLoader(plugin.AssetLoader):
|
|||
|
||||
objects.reverse()
|
||||
|
||||
bpy.data.orphans_purge(do_local_ids=False)
|
||||
curves = [obj for obj in data_to.objects if obj.type == 'CURVE']
|
||||
|
||||
for curve in curves:
|
||||
local_obj = plugin.prepare_data(curve, group_name)
|
||||
plugin.prepare_data(local_obj.data, group_name)
|
||||
|
||||
local_obj.use_fake_user = True
|
||||
|
||||
for mod in local_obj.modifiers:
|
||||
mod_target_name = mod.object.name
|
||||
mod.object = bpy.data.objects.get(
|
||||
f"{group_name}:{mod_target_name}")
|
||||
|
||||
if not local_obj.get(AVALON_PROPERTY):
|
||||
local_obj[AVALON_PROPERTY] = dict()
|
||||
|
||||
avalon_info = local_obj[AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": group_name})
|
||||
|
||||
local_obj.parent = asset_group
|
||||
objects.append(local_obj)
|
||||
|
||||
while bpy.data.orphans_purge(do_local_ids=False):
|
||||
pass
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
|
|
|
|||
|
|
@ -28,6 +28,16 @@ class ExtractBlend(openpype.api.Extractor):
|
|||
|
||||
for obj in instance:
|
||||
data_blocks.add(obj)
|
||||
# Pack used images in the blend files.
|
||||
if obj.type == 'MESH':
|
||||
for material_slot in obj.material_slots:
|
||||
mat = material_slot.material
|
||||
if mat and mat.use_nodes:
|
||||
tree = mat.node_tree
|
||||
if tree.type == 'SHADER':
|
||||
for node in tree.nodes:
|
||||
if node.bl_idname == 'ShaderNodeTexImage':
|
||||
node.image.pack()
|
||||
|
||||
bpy.data.libraries.write(filepath, data_blocks)
|
||||
|
||||
|
|
|
|||
|
|
@ -10,8 +10,10 @@ from .pipeline import (
|
|||
|
||||
from avalon.tools import (
|
||||
creator,
|
||||
loader,
|
||||
sceneinventory,
|
||||
)
|
||||
from openpype.tools import (
|
||||
loader,
|
||||
libraryloader
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ Basic avalon integration
|
|||
"""
|
||||
import os
|
||||
|
||||
from avalon.tools import workfiles
|
||||
from openpype.tools import workfiles
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
from openpype.api import Logger
|
||||
|
|
|
|||
|
|
@ -91,7 +91,8 @@ class ExtractRender(pyblish.api.InstancePlugin):
|
|||
thumbnail_path = os.path.join(path, "thumbnail.png")
|
||||
ffmpeg_path = openpype.lib.get_ffmpeg_tool_path("ffmpeg")
|
||||
args = [
|
||||
"{}".format(ffmpeg_path), "-y",
|
||||
ffmpeg_path,
|
||||
"-y",
|
||||
"-i", os.path.join(path, list(collections[0])[0]),
|
||||
"-vf", "scale=300:-1",
|
||||
"-vframes", "1",
|
||||
|
|
|
|||
|
|
@ -41,7 +41,8 @@ def menu_install():
|
|||
apply_colorspace_project, apply_colorspace_clips
|
||||
)
|
||||
# here is the best place to add menu
|
||||
from avalon.tools import cbloader, creator, sceneinventory
|
||||
from avalon.tools import creator, sceneinventory
|
||||
from openpype.tools import loader
|
||||
from avalon.vendor.Qt import QtGui
|
||||
|
||||
menu_name = os.environ['AVALON_LABEL']
|
||||
|
|
@ -90,7 +91,7 @@ def menu_install():
|
|||
|
||||
loader_action = menu.addAction("Load ...")
|
||||
loader_action.setIcon(QtGui.QIcon("icons:CopyRectangle.png"))
|
||||
loader_action.triggered.connect(cbloader.show)
|
||||
loader_action.triggered.connect(loader.show)
|
||||
|
||||
sceneinventory_action = menu.addAction("Manage ...")
|
||||
sceneinventory_action.setIcon(QtGui.QIcon("icons:CopyRectangle.png"))
|
||||
|
|
|
|||
|
|
@ -4,10 +4,8 @@ Basic avalon integration
|
|||
import os
|
||||
import contextlib
|
||||
from collections import OrderedDict
|
||||
from avalon.tools import (
|
||||
workfiles,
|
||||
publish as _publish
|
||||
)
|
||||
from avalon.tools import publish as _publish
|
||||
from openpype.tools import workfiles
|
||||
from avalon.pipeline import AVALON_CONTAINER_ID
|
||||
from avalon import api as avalon
|
||||
from avalon import schema
|
||||
|
|
|
|||
|
|
@ -10,16 +10,16 @@ log = Logger().get_logger(__name__)
|
|||
|
||||
def tag_data():
|
||||
return {
|
||||
"Retiming": {
|
||||
"editable": "1",
|
||||
"note": "Clip has retime or TimeWarp effects (or multiple effects stacked on the clip)", # noqa
|
||||
"icon": "retiming.png",
|
||||
"metadata": {
|
||||
"family": "retiming",
|
||||
"marginIn": 1,
|
||||
"marginOut": 1
|
||||
}
|
||||
},
|
||||
# "Retiming": {
|
||||
# "editable": "1",
|
||||
# "note": "Clip has retime or TimeWarp effects (or multiple effects stacked on the clip)", # noqa
|
||||
# "icon": "retiming.png",
|
||||
# "metadata": {
|
||||
# "family": "retiming",
|
||||
# "marginIn": 1,
|
||||
# "marginOut": 1
|
||||
# }
|
||||
# },
|
||||
"[Lenses]": {
|
||||
"Set lense here": {
|
||||
"editable": "1",
|
||||
|
|
@ -31,15 +31,15 @@ def tag_data():
|
|||
}
|
||||
}
|
||||
},
|
||||
"NukeScript": {
|
||||
"editable": "1",
|
||||
"note": "Collecting track items to Nuke scripts.",
|
||||
"icon": "icons:TagNuke.png",
|
||||
"metadata": {
|
||||
"family": "nukescript",
|
||||
"subset": "main"
|
||||
}
|
||||
},
|
||||
# "NukeScript": {
|
||||
# "editable": "1",
|
||||
# "note": "Collecting track items to Nuke scripts.",
|
||||
# "icon": "icons:TagNuke.png",
|
||||
# "metadata": {
|
||||
# "family": "nukescript",
|
||||
# "subset": "main"
|
||||
# }
|
||||
# },
|
||||
"Comment": {
|
||||
"editable": "1",
|
||||
"note": "Comment on a shot.",
|
||||
|
|
@ -78,8 +78,7 @@ def update_tag(tag, data):
|
|||
# set icon if any available in input data
|
||||
if data.get("icon"):
|
||||
tag.setIcon(str(data["icon"]))
|
||||
# set note description of tag
|
||||
tag.setNote(data["note"])
|
||||
|
||||
# get metadata of tag
|
||||
mtd = tag.metadata()
|
||||
# get metadata key from data
|
||||
|
|
@ -97,6 +96,9 @@ def update_tag(tag, data):
|
|||
"tag.{}".format(str(k)),
|
||||
str(v)
|
||||
)
|
||||
|
||||
# set note description of tag
|
||||
tag.setNote(str(data["note"]))
|
||||
return tag
|
||||
|
||||
|
||||
|
|
@ -106,6 +108,26 @@ def add_tags_to_workfile():
|
|||
"""
|
||||
from .lib import get_current_project
|
||||
|
||||
def add_tag_to_bin(root_bin, name, data):
|
||||
# for Tags to be created in root level Bin
|
||||
# at first check if any of input data tag is not already created
|
||||
done_tag = next((t for t in root_bin.items()
|
||||
if str(name) in t.name()), None)
|
||||
|
||||
if not done_tag:
|
||||
# create Tag
|
||||
tag = create_tag(name, data)
|
||||
tag.setName(str(name))
|
||||
|
||||
log.debug("__ creating tag: {}".format(tag))
|
||||
# adding Tag to Root Bin
|
||||
root_bin.addItem(tag)
|
||||
else:
|
||||
# update only non hierarchy tags
|
||||
update_tag(done_tag, data)
|
||||
done_tag.setName(str(name))
|
||||
log.debug("__ updating tag: {}".format(done_tag))
|
||||
|
||||
# get project and root bin object
|
||||
project = get_current_project()
|
||||
root_bin = project.tagsBin()
|
||||
|
|
@ -125,10 +147,8 @@ def add_tags_to_workfile():
|
|||
for task_type in tasks.keys():
|
||||
nks_pres_tags["[Tasks]"][task_type.lower()] = {
|
||||
"editable": "1",
|
||||
"note": "",
|
||||
"icon": {
|
||||
"path": "icons:TagGood.png"
|
||||
},
|
||||
"note": task_type,
|
||||
"icon": "icons:TagGood.png",
|
||||
"metadata": {
|
||||
"family": "task",
|
||||
"type": task_type
|
||||
|
|
@ -157,10 +177,10 @@ def add_tags_to_workfile():
|
|||
# check if key is not decorated with [] so it is defined as bin
|
||||
bin_find = None
|
||||
pattern = re.compile(r"\[(.*)\]")
|
||||
bin_finds = pattern.findall(_k)
|
||||
_bin_finds = pattern.findall(_k)
|
||||
# if there is available any then pop it to string
|
||||
if bin_finds:
|
||||
bin_find = bin_finds.pop()
|
||||
if _bin_finds:
|
||||
bin_find = _bin_finds.pop()
|
||||
|
||||
# if bin was found then create or update
|
||||
if bin_find:
|
||||
|
|
@ -168,7 +188,6 @@ def add_tags_to_workfile():
|
|||
# first check if in root lever is not already created bins
|
||||
bins = [b for b in root_bin.items()
|
||||
if b.name() in str(bin_find)]
|
||||
log.debug(">>> bins: {}".format(bins))
|
||||
|
||||
if bins:
|
||||
bin = bins.pop()
|
||||
|
|
@ -178,49 +197,14 @@ def add_tags_to_workfile():
|
|||
bin = hiero.core.Bin(str(bin_find))
|
||||
|
||||
# update or create tags in the bin
|
||||
for k, v in _val.items():
|
||||
tags = [t for t in bin.items()
|
||||
if str(k) in t.name()
|
||||
if len(str(k)) == len(t.name())]
|
||||
if not tags:
|
||||
# create Tag obj
|
||||
tag = create_tag(k, v)
|
||||
|
||||
# adding Tag to Bin
|
||||
bin.addItem(tag)
|
||||
else:
|
||||
update_tag(tags.pop(), v)
|
||||
for __k, __v in _val.items():
|
||||
add_tag_to_bin(bin, __k, __v)
|
||||
|
||||
# finally add the Bin object to the root level Bin
|
||||
if root_add:
|
||||
# adding Tag to Root Bin
|
||||
root_bin.addItem(bin)
|
||||
else:
|
||||
# for Tags to be created in root level Bin
|
||||
# at first check if any of input data tag is not already created
|
||||
tags = None
|
||||
tags = [t for t in root_bin.items()
|
||||
if str(_k) in t.name()]
|
||||
|
||||
if not tags:
|
||||
# create Tag
|
||||
tag = create_tag(_k, _val)
|
||||
|
||||
# adding Tag to Root Bin
|
||||
root_bin.addItem(tag)
|
||||
else:
|
||||
# update Tags if they already exists
|
||||
for _t in tags:
|
||||
# skip bin objects
|
||||
if isinstance(_t, hiero.core.Bin):
|
||||
continue
|
||||
|
||||
# check if Hierarchy in name and skip it
|
||||
# because hierarchy could be edited
|
||||
if "hierarchy" in _t.name().lower():
|
||||
continue
|
||||
|
||||
# update only non hierarchy tags
|
||||
update_tag(_t, _val)
|
||||
add_tag_to_bin(root_bin, _k, _val)
|
||||
|
||||
log.info("Default Tags were set...")
|
||||
|
|
|
|||
|
|
@ -378,6 +378,17 @@ def add_otio_metadata(otio_item, media_source, **kwargs):
|
|||
|
||||
def create_otio_timeline():
|
||||
|
||||
def set_prev_item(itemindex, track_item):
|
||||
# Add Gap if needed
|
||||
if itemindex == 0:
|
||||
# if it is first track item at track then add
|
||||
# it to previouse item
|
||||
return track_item
|
||||
|
||||
else:
|
||||
# get previouse item
|
||||
return track_item.parent().items()[itemindex - 1]
|
||||
|
||||
# get current timeline
|
||||
self.timeline = hiero.ui.activeSequence()
|
||||
self.project_fps = self.timeline.framerate().toFloat()
|
||||
|
|
@ -396,14 +407,6 @@ def create_otio_timeline():
|
|||
type(track), track.name())
|
||||
|
||||
for itemindex, track_item in enumerate(track):
|
||||
# skip offline track items
|
||||
if not track_item.isMediaPresent():
|
||||
continue
|
||||
|
||||
# skip if track item is disabled
|
||||
if not track_item.isEnabled():
|
||||
continue
|
||||
|
||||
# Add Gap if needed
|
||||
if itemindex == 0:
|
||||
# if it is first track item at track then add
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import pyblish.api
|
|||
class PreCollectClipEffects(pyblish.api.InstancePlugin):
|
||||
"""Collect soft effects instances."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.579
|
||||
order = pyblish.api.CollectorOrder - 0.479
|
||||
label = "Precollect Clip Effects Instances"
|
||||
families = ["clip"]
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ from pprint import pformat
|
|||
class PrecollectInstances(pyblish.api.ContextPlugin):
|
||||
"""Collect all Track items selection."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.59
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
label = "Precollect Instances"
|
||||
hosts = ["hiero"]
|
||||
|
||||
|
|
@ -131,7 +131,7 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
self.create_shot_instance(context, **data)
|
||||
|
||||
self.log.info("Creating instance: {}".format(instance))
|
||||
self.log.debug(
|
||||
self.log.info(
|
||||
"_ instance.data: {}".format(pformat(instance.data)))
|
||||
|
||||
if not with_audio:
|
||||
|
|
|
|||
|
|
@ -8,11 +8,12 @@ from openpype.hosts.hiero.otio import hiero_export
|
|||
from Qt.QtGui import QPixmap
|
||||
import tempfile
|
||||
|
||||
|
||||
class PrecollectWorkfile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
label = "Precollect Workfile"
|
||||
order = pyblish.api.CollectorOrder - 0.6
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
|
||||
def process(self, context):
|
||||
|
||||
|
|
|
|||
|
|
@ -15,8 +15,8 @@ creator.show()
|
|||
<scriptItem id="avalon_load">
|
||||
<label>Load ...</label>
|
||||
<scriptCode><![CDATA[
|
||||
from avalon.tools import cbloader
|
||||
cbloader.show(use_context=True)
|
||||
from openpype.tools import loader
|
||||
loader.show(use_context=True)
|
||||
]]></scriptCode>
|
||||
</scriptItem>
|
||||
|
||||
|
|
|
|||
9
openpype/hosts/houdini/startup/scripts/houdinicore.py
Normal file
9
openpype/hosts/houdini/startup/scripts/houdinicore.py
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
from avalon import api, houdini
|
||||
|
||||
|
||||
def main():
|
||||
print("Installing OpenPype ...")
|
||||
api.install(houdini)
|
||||
|
||||
|
||||
main()
|
||||
|
|
@ -8,7 +8,7 @@ from avalon import api as avalon
|
|||
from avalon import pipeline
|
||||
from avalon.maya import suspended_refresh
|
||||
from avalon.maya.pipeline import IS_HEADLESS
|
||||
from avalon.tools import workfiles
|
||||
from openpype.tools import workfiles
|
||||
from pyblish import api as pyblish
|
||||
from openpype.lib import any_outdated
|
||||
import openpype.hosts.maya
|
||||
|
|
@ -35,6 +35,7 @@ def install():
|
|||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
log.info(PUBLISH_PATH)
|
||||
menu.install()
|
||||
|
||||
|
|
@ -63,14 +64,23 @@ def process_dirmap(project_settings):
|
|||
# type: (dict) -> None
|
||||
"""Go through all paths in Settings and set them using `dirmap`.
|
||||
|
||||
If artists has Site Sync enabled, take dirmap mapping directly from
|
||||
Local Settings when artist is syncing workfile locally.
|
||||
|
||||
Args:
|
||||
project_settings (dict): Settings for current project.
|
||||
|
||||
"""
|
||||
if not project_settings["maya"].get("maya-dirmap"):
|
||||
local_mapping = _get_local_sync_dirmap(project_settings)
|
||||
if not project_settings["maya"].get("maya-dirmap") and not local_mapping:
|
||||
return
|
||||
mapping = project_settings["maya"]["maya-dirmap"]["paths"] or {}
|
||||
mapping_enabled = project_settings["maya"]["maya-dirmap"]["enabled"]
|
||||
|
||||
mapping = local_mapping or \
|
||||
project_settings["maya"]["maya-dirmap"]["paths"] \
|
||||
or {}
|
||||
mapping_enabled = project_settings["maya"]["maya-dirmap"]["enabled"] \
|
||||
or bool(local_mapping)
|
||||
|
||||
if not mapping or not mapping_enabled:
|
||||
return
|
||||
if mapping.get("source-path") and mapping_enabled is True:
|
||||
|
|
@ -93,10 +103,72 @@ def process_dirmap(project_settings):
|
|||
continue
|
||||
|
||||
|
||||
def _get_local_sync_dirmap(project_settings):
|
||||
"""
|
||||
Returns dirmap if synch to local project is enabled.
|
||||
|
||||
Only valid mapping is from roots of remote site to local site set in
|
||||
Local Settings.
|
||||
|
||||
Args:
|
||||
project_settings (dict)
|
||||
Returns:
|
||||
dict : { "source-path": [XXX], "destination-path": [YYYY]}
|
||||
"""
|
||||
import json
|
||||
mapping = {}
|
||||
|
||||
if not project_settings["global"]["sync_server"]["enabled"]:
|
||||
log.debug("Site Sync not enabled")
|
||||
return mapping
|
||||
|
||||
from openpype.settings.lib import get_site_local_overrides
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
manager = ModulesManager()
|
||||
sync_module = manager.modules_by_name["sync_server"]
|
||||
|
||||
project_name = os.getenv("AVALON_PROJECT")
|
||||
sync_settings = sync_module.get_sync_project_setting(
|
||||
os.getenv("AVALON_PROJECT"), exclude_locals=False, cached=False)
|
||||
log.debug(json.dumps(sync_settings, indent=4))
|
||||
|
||||
active_site = sync_module.get_local_normalized_site(
|
||||
sync_module.get_active_site(project_name))
|
||||
remote_site = sync_module.get_local_normalized_site(
|
||||
sync_module.get_remote_site(project_name))
|
||||
log.debug("active {} - remote {}".format(active_site, remote_site))
|
||||
|
||||
if active_site == "local" \
|
||||
and project_name in sync_module.get_enabled_projects()\
|
||||
and active_site != remote_site:
|
||||
overrides = get_site_local_overrides(os.getenv("AVALON_PROJECT"),
|
||||
active_site)
|
||||
for root_name, value in overrides.items():
|
||||
if os.path.isdir(value):
|
||||
try:
|
||||
mapping["destination-path"] = [value]
|
||||
mapping["source-path"] = [sync_settings["sites"]\
|
||||
[remote_site]\
|
||||
["root"]\
|
||||
[root_name]]
|
||||
except IndexError:
|
||||
# missing corresponding destination path
|
||||
log.debug("overrides".format(overrides))
|
||||
log.error(
|
||||
("invalid dirmap mapping, missing corresponding"
|
||||
" destination directory."))
|
||||
break
|
||||
|
||||
log.debug("local sync mapping:: {}".format(mapping))
|
||||
return mapping
|
||||
|
||||
|
||||
def uninstall():
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.deregister_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
menu.uninstall()
|
||||
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ def override_toolbox_ui():
|
|||
log.warning("Could not import SceneInventory tool")
|
||||
|
||||
try:
|
||||
import avalon.tools.loader as loader
|
||||
import openpype.tools.loader as loader
|
||||
except Exception:
|
||||
log.warning("Could not import Loader tool")
|
||||
|
||||
|
|
|
|||
|
|
@ -114,6 +114,8 @@ class RenderProduct(object):
|
|||
aov = attr.ib(default=None) # source aov
|
||||
driver = attr.ib(default=None) # source driver
|
||||
multipart = attr.ib(default=False) # multichannel file
|
||||
camera = attr.ib(default=None) # used only when rendering
|
||||
# from multiple cameras
|
||||
|
||||
|
||||
def get(layer, render_instance=None):
|
||||
|
|
@ -183,6 +185,16 @@ class ARenderProducts:
|
|||
self.layer_data = self._get_layer_data()
|
||||
self.layer_data.products = self.get_render_products()
|
||||
|
||||
def has_camera_token(self):
|
||||
# type: () -> bool
|
||||
"""Check if camera token is in image prefix.
|
||||
|
||||
Returns:
|
||||
bool: True/False if camera token is present.
|
||||
|
||||
"""
|
||||
return "<camera>" in self.layer_data.filePrefix.lower()
|
||||
|
||||
@abstractmethod
|
||||
def get_render_products(self):
|
||||
"""To be implemented by renderer class.
|
||||
|
|
@ -307,7 +319,7 @@ class ARenderProducts:
|
|||
# Deadline allows submitting renders with a custom frame list
|
||||
# to support those cases we might want to allow 'custom frames'
|
||||
# to be overridden to `ExpectFiles` class?
|
||||
layer_data = LayerMetadata(
|
||||
return LayerMetadata(
|
||||
frameStart=int(self.get_render_attribute("startFrame")),
|
||||
frameEnd=int(self.get_render_attribute("endFrame")),
|
||||
frameStep=int(self.get_render_attribute("byFrameStep")),
|
||||
|
|
@ -321,7 +333,6 @@ class ARenderProducts:
|
|||
defaultExt=self._get_attr("defaultRenderGlobals.imfPluginKey"),
|
||||
filePrefix=file_prefix
|
||||
)
|
||||
return layer_data
|
||||
|
||||
def _generate_file_sequence(
|
||||
self, layer_data,
|
||||
|
|
@ -330,7 +341,7 @@ class ARenderProducts:
|
|||
force_cameras=None):
|
||||
# type: (LayerMetadata, str, str, list) -> list
|
||||
expected_files = []
|
||||
cameras = force_cameras if force_cameras else layer_data.cameras
|
||||
cameras = force_cameras or layer_data.cameras
|
||||
ext = force_ext or layer_data.defaultExt
|
||||
for cam in cameras:
|
||||
file_prefix = layer_data.filePrefix
|
||||
|
|
@ -361,8 +372,8 @@ class ARenderProducts:
|
|||
)
|
||||
return expected_files
|
||||
|
||||
def get_files(self, product, camera):
|
||||
# type: (RenderProduct, str) -> list
|
||||
def get_files(self, product):
|
||||
# type: (RenderProduct) -> list
|
||||
"""Return list of expected files.
|
||||
|
||||
It will translate render token strings ('<RenderPass>', etc.) to
|
||||
|
|
@ -373,7 +384,6 @@ class ARenderProducts:
|
|||
Args:
|
||||
product (RenderProduct): Render product to be used for file
|
||||
generation.
|
||||
camera (str): Camera name.
|
||||
|
||||
Returns:
|
||||
List of files
|
||||
|
|
@ -383,7 +393,7 @@ class ARenderProducts:
|
|||
self.layer_data,
|
||||
force_aov_name=product.productName,
|
||||
force_ext=product.ext,
|
||||
force_cameras=[camera]
|
||||
force_cameras=[product.camera]
|
||||
)
|
||||
|
||||
def get_renderable_cameras(self):
|
||||
|
|
@ -460,15 +470,21 @@ class RenderProductsArnold(ARenderProducts):
|
|||
|
||||
return prefix
|
||||
|
||||
def _get_aov_render_products(self, aov):
|
||||
def _get_aov_render_products(self, aov, cameras=None):
|
||||
"""Return all render products for the AOV"""
|
||||
|
||||
products = list()
|
||||
products = []
|
||||
aov_name = self._get_attr(aov, "name")
|
||||
ai_drivers = cmds.listConnections("{}.outputs".format(aov),
|
||||
source=True,
|
||||
destination=False,
|
||||
type="aiAOVDriver") or []
|
||||
if not cameras:
|
||||
cameras = [
|
||||
self.sanitize_camera_name(
|
||||
self.get_renderable_cameras()[0]
|
||||
)
|
||||
]
|
||||
|
||||
for ai_driver in ai_drivers:
|
||||
# todo: check aiAOVDriver.prefix as it could have
|
||||
|
|
@ -497,30 +513,37 @@ class RenderProductsArnold(ARenderProducts):
|
|||
name = "beauty"
|
||||
|
||||
# Support Arnold light groups for AOVs
|
||||
# Global AOV: When disabled the main layer is not written: `{pass}`
|
||||
# Global AOV: When disabled the main layer is
|
||||
# not written: `{pass}`
|
||||
# All Light Groups: When enabled, a `{pass}_lgroups` file is
|
||||
# written and is always merged into a single file
|
||||
# Light Groups List: When set, a product per light group is written
|
||||
# written and is always merged into a
|
||||
# single file
|
||||
# Light Groups List: When set, a product per light
|
||||
# group is written
|
||||
# e.g. {pass}_front, {pass}_rim
|
||||
global_aov = self._get_attr(aov, "globalAov")
|
||||
if global_aov:
|
||||
product = RenderProduct(productName=name,
|
||||
ext=ext,
|
||||
aov=aov_name,
|
||||
driver=ai_driver)
|
||||
products.append(product)
|
||||
for camera in cameras:
|
||||
product = RenderProduct(productName=name,
|
||||
ext=ext,
|
||||
aov=aov_name,
|
||||
driver=ai_driver,
|
||||
camera=camera)
|
||||
products.append(product)
|
||||
|
||||
all_light_groups = self._get_attr(aov, "lightGroups")
|
||||
if all_light_groups:
|
||||
# All light groups is enabled. A single multipart
|
||||
# Render Product
|
||||
product = RenderProduct(productName=name + "_lgroups",
|
||||
ext=ext,
|
||||
aov=aov_name,
|
||||
driver=ai_driver,
|
||||
# Always multichannel output
|
||||
multipart=True)
|
||||
products.append(product)
|
||||
for camera in cameras:
|
||||
product = RenderProduct(productName=name + "_lgroups",
|
||||
ext=ext,
|
||||
aov=aov_name,
|
||||
driver=ai_driver,
|
||||
# Always multichannel output
|
||||
multipart=True,
|
||||
camera=camera)
|
||||
products.append(product)
|
||||
else:
|
||||
value = self._get_attr(aov, "lightGroupsList")
|
||||
if not value:
|
||||
|
|
@ -529,11 +552,15 @@ class RenderProductsArnold(ARenderProducts):
|
|||
for light_group in selected_light_groups:
|
||||
# Render Product per selected light group
|
||||
aov_light_group_name = "{}_{}".format(name, light_group)
|
||||
product = RenderProduct(productName=aov_light_group_name,
|
||||
aov=aov_name,
|
||||
driver=ai_driver,
|
||||
ext=ext)
|
||||
products.append(product)
|
||||
for camera in cameras:
|
||||
product = RenderProduct(
|
||||
productName=aov_light_group_name,
|
||||
aov=aov_name,
|
||||
driver=ai_driver,
|
||||
ext=ext,
|
||||
camera=camera
|
||||
)
|
||||
products.append(product)
|
||||
|
||||
return products
|
||||
|
||||
|
|
@ -556,17 +583,26 @@ class RenderProductsArnold(ARenderProducts):
|
|||
# anyway.
|
||||
return []
|
||||
|
||||
default_ext = self._get_attr("defaultRenderGlobals.imfPluginKey")
|
||||
beauty_product = RenderProduct(productName="beauty",
|
||||
ext=default_ext,
|
||||
driver="defaultArnoldDriver")
|
||||
# check if camera token is in prefix. If so, and we have list of
|
||||
# renderable cameras, generate render product for each and every
|
||||
# of them.
|
||||
cameras = [
|
||||
self.sanitize_camera_name(c)
|
||||
for c in self.get_renderable_cameras()
|
||||
]
|
||||
|
||||
default_ext = self._get_attr("defaultRenderGlobals.imfPluginKey")
|
||||
beauty_products = [RenderProduct(
|
||||
productName="beauty",
|
||||
ext=default_ext,
|
||||
driver="defaultArnoldDriver",
|
||||
camera=camera) for camera in cameras]
|
||||
# AOVs > Legacy > Maya Render View > Mode
|
||||
aovs_enabled = bool(
|
||||
self._get_attr("defaultArnoldRenderOptions.aovMode")
|
||||
)
|
||||
if not aovs_enabled:
|
||||
return [beauty_product]
|
||||
return beauty_products
|
||||
|
||||
# Common > File Output > Merge AOVs or <RenderPass>
|
||||
# We don't need to check for Merge AOVs due to overridden
|
||||
|
|
@ -575,8 +611,9 @@ class RenderProductsArnold(ARenderProducts):
|
|||
"<renderpass>" in self.layer_data.filePrefix.lower()
|
||||
)
|
||||
if not has_renderpass_token:
|
||||
beauty_product.multipart = True
|
||||
return [beauty_product]
|
||||
for product in beauty_products:
|
||||
product.multipart = True
|
||||
return beauty_products
|
||||
|
||||
# AOVs are set to be rendered separately. We should expect
|
||||
# <RenderPass> token in path.
|
||||
|
|
@ -598,14 +635,14 @@ class RenderProductsArnold(ARenderProducts):
|
|||
continue
|
||||
|
||||
# For now stick to the legacy output format.
|
||||
aov_products = self._get_aov_render_products(aov)
|
||||
aov_products = self._get_aov_render_products(aov, cameras)
|
||||
products.extend(aov_products)
|
||||
|
||||
if not any(product.aov == "RGBA" for product in products):
|
||||
if all(product.aov != "RGBA" for product in products):
|
||||
# Append default 'beauty' as this is arnolds default.
|
||||
# However, it is excluded whenever a RGBA pass is enabled.
|
||||
# For legibility add the beauty layer as first entry
|
||||
products.insert(0, beauty_product)
|
||||
products += beauty_products
|
||||
|
||||
# TODO: Output Denoising AOVs?
|
||||
|
||||
|
|
@ -670,6 +707,11 @@ class RenderProductsVray(ARenderProducts):
|
|||
# anyway.
|
||||
return []
|
||||
|
||||
cameras = [
|
||||
self.sanitize_camera_name(c)
|
||||
for c in self.get_renderable_cameras()
|
||||
]
|
||||
|
||||
image_format_str = self._get_attr("vraySettings.imageFormatStr")
|
||||
default_ext = image_format_str
|
||||
if default_ext in {"exr (multichannel)", "exr (deep)"}:
|
||||
|
|
@ -680,13 +722,21 @@ class RenderProductsVray(ARenderProducts):
|
|||
# add beauty as default when not disabled
|
||||
dont_save_rgb = self._get_attr("vraySettings.dontSaveRgbChannel")
|
||||
if not dont_save_rgb:
|
||||
products.append(RenderProduct(productName="", ext=default_ext))
|
||||
for camera in cameras:
|
||||
products.append(
|
||||
RenderProduct(productName="",
|
||||
ext=default_ext,
|
||||
camera=camera))
|
||||
|
||||
# separate alpha file
|
||||
separate_alpha = self._get_attr("vraySettings.separateAlpha")
|
||||
if separate_alpha:
|
||||
products.append(RenderProduct(productName="Alpha",
|
||||
ext=default_ext))
|
||||
for camera in cameras:
|
||||
products.append(
|
||||
RenderProduct(productName="Alpha",
|
||||
ext=default_ext,
|
||||
camera=camera)
|
||||
)
|
||||
|
||||
if image_format_str == "exr (multichannel)":
|
||||
# AOVs are merged in m-channel file, only main layer is rendered
|
||||
|
|
@ -716,19 +766,23 @@ class RenderProductsVray(ARenderProducts):
|
|||
# instead seems to output multiple Render Products,
|
||||
# specifically "Self_Illumination" and "Environment"
|
||||
product_names = ["Self_Illumination", "Environment"]
|
||||
for name in product_names:
|
||||
product = RenderProduct(productName=name,
|
||||
ext=default_ext,
|
||||
aov=aov)
|
||||
products.append(product)
|
||||
for camera in cameras:
|
||||
for name in product_names:
|
||||
product = RenderProduct(productName=name,
|
||||
ext=default_ext,
|
||||
aov=aov,
|
||||
camera=camera)
|
||||
products.append(product)
|
||||
# Continue as we've processed this special case AOV
|
||||
continue
|
||||
|
||||
aov_name = self._get_vray_aov_name(aov)
|
||||
product = RenderProduct(productName=aov_name,
|
||||
ext=default_ext,
|
||||
aov=aov)
|
||||
products.append(product)
|
||||
for camera in cameras:
|
||||
product = RenderProduct(productName=aov_name,
|
||||
ext=default_ext,
|
||||
aov=aov,
|
||||
camera=camera)
|
||||
products.append(product)
|
||||
|
||||
return products
|
||||
|
||||
|
|
@ -875,6 +929,11 @@ class RenderProductsRedshift(ARenderProducts):
|
|||
# anyway.
|
||||
return []
|
||||
|
||||
cameras = [
|
||||
self.sanitize_camera_name(c)
|
||||
for c in self.get_renderable_cameras()
|
||||
]
|
||||
|
||||
# For Redshift we don't directly return upon forcing multilayer
|
||||
# due to some AOVs still being written into separate files,
|
||||
# like Cryptomatte.
|
||||
|
|
@ -933,11 +992,14 @@ class RenderProductsRedshift(ARenderProducts):
|
|||
for light_group in light_groups:
|
||||
aov_light_group_name = "{}_{}".format(aov_name,
|
||||
light_group)
|
||||
product = RenderProduct(productName=aov_light_group_name,
|
||||
aov=aov_name,
|
||||
ext=ext,
|
||||
multipart=aov_multipart)
|
||||
products.append(product)
|
||||
for camera in cameras:
|
||||
product = RenderProduct(
|
||||
productName=aov_light_group_name,
|
||||
aov=aov_name,
|
||||
ext=ext,
|
||||
multipart=aov_multipart,
|
||||
camera=camera)
|
||||
products.append(product)
|
||||
|
||||
if light_groups:
|
||||
light_groups_enabled = True
|
||||
|
|
@ -945,11 +1007,13 @@ class RenderProductsRedshift(ARenderProducts):
|
|||
# Redshift AOV Light Select always renders the global AOV
|
||||
# even when light groups are present so we don't need to
|
||||
# exclude it when light groups are active
|
||||
product = RenderProduct(productName=aov_name,
|
||||
aov=aov_name,
|
||||
ext=ext,
|
||||
multipart=aov_multipart)
|
||||
products.append(product)
|
||||
for camera in cameras:
|
||||
product = RenderProduct(productName=aov_name,
|
||||
aov=aov_name,
|
||||
ext=ext,
|
||||
multipart=aov_multipart,
|
||||
camera=camera)
|
||||
products.append(product)
|
||||
|
||||
# When a Beauty AOV is added manually, it will be rendered as
|
||||
# 'Beauty_other' in file name and "standard" beauty will have
|
||||
|
|
@ -959,10 +1023,12 @@ class RenderProductsRedshift(ARenderProducts):
|
|||
return products
|
||||
|
||||
beauty_name = "Beauty_other" if has_beauty_aov else ""
|
||||
products.insert(0,
|
||||
RenderProduct(productName=beauty_name,
|
||||
ext=ext,
|
||||
multipart=multipart))
|
||||
for camera in cameras:
|
||||
products.insert(0,
|
||||
RenderProduct(productName=beauty_name,
|
||||
ext=ext,
|
||||
multipart=multipart,
|
||||
camera=camera))
|
||||
|
||||
return products
|
||||
|
||||
|
|
@ -987,6 +1053,16 @@ class RenderProductsRenderman(ARenderProducts):
|
|||
:func:`ARenderProducts.get_render_products()`
|
||||
|
||||
"""
|
||||
cameras = [
|
||||
self.sanitize_camera_name(c)
|
||||
for c in self.get_renderable_cameras()
|
||||
]
|
||||
|
||||
if not cameras:
|
||||
cameras = [
|
||||
self.sanitize_camera_name(
|
||||
self.get_renderable_cameras()[0])
|
||||
]
|
||||
products = []
|
||||
|
||||
default_ext = "exr"
|
||||
|
|
@ -1000,9 +1076,11 @@ class RenderProductsRenderman(ARenderProducts):
|
|||
if aov_name == "rmanDefaultDisplay":
|
||||
aov_name = "beauty"
|
||||
|
||||
product = RenderProduct(productName=aov_name,
|
||||
ext=default_ext)
|
||||
products.append(product)
|
||||
for camera in cameras:
|
||||
product = RenderProduct(productName=aov_name,
|
||||
ext=default_ext,
|
||||
camera=camera)
|
||||
products.append(product)
|
||||
|
||||
return products
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,53 @@ import avalon.maya
|
|||
from openpype.api import PypeCreatorMixin
|
||||
|
||||
|
||||
def get_reference_node(members, log=None):
|
||||
"""Get the reference node from the container members
|
||||
Args:
|
||||
members: list of node names
|
||||
|
||||
Returns:
|
||||
str: Reference node name.
|
||||
|
||||
"""
|
||||
|
||||
from maya import cmds
|
||||
|
||||
# Collect the references without .placeHolderList[] attributes as
|
||||
# unique entries (objects only) and skipping the sharedReferenceNode.
|
||||
references = set()
|
||||
for ref in cmds.ls(members, exactType="reference", objectsOnly=True):
|
||||
|
||||
# Ignore any `:sharedReferenceNode`
|
||||
if ref.rsplit(":", 1)[-1].startswith("sharedReferenceNode"):
|
||||
continue
|
||||
|
||||
# Ignore _UNKNOWN_REF_NODE_ (PLN-160)
|
||||
if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"):
|
||||
continue
|
||||
|
||||
references.add(ref)
|
||||
|
||||
assert references, "No reference node found in container"
|
||||
|
||||
# Get highest reference node (least parents)
|
||||
highest = min(references,
|
||||
key=lambda x: len(get_reference_node_parents(x)))
|
||||
|
||||
# Warn the user when we're taking the highest reference node
|
||||
if len(references) > 1:
|
||||
if not log:
|
||||
from openpype.lib import PypeLogger
|
||||
|
||||
log = PypeLogger().get_logger(__name__)
|
||||
|
||||
log.warning("More than one reference node found in "
|
||||
"container, using highest reference node: "
|
||||
"%s (in: %s)", highest, list(references))
|
||||
|
||||
return highest
|
||||
|
||||
|
||||
def get_reference_node_parents(ref):
|
||||
"""Return all parent reference nodes of reference node
|
||||
|
||||
|
|
@ -109,7 +156,7 @@ class ReferenceLoader(api.Loader):
|
|||
loader=self.__class__.__name__
|
||||
))
|
||||
else:
|
||||
ref_node = self._get_reference_node(nodes)
|
||||
ref_node = get_reference_node(nodes, self.log)
|
||||
loaded_containers.append(containerise(
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
|
|
@ -126,46 +173,6 @@ class ReferenceLoader(api.Loader):
|
|||
"""To be implemented by subclass"""
|
||||
raise NotImplementedError("Must be implemented by subclass")
|
||||
|
||||
def _get_reference_node(self, members):
|
||||
"""Get the reference node from the container members
|
||||
Args:
|
||||
members: list of node names
|
||||
|
||||
Returns:
|
||||
str: Reference node name.
|
||||
|
||||
"""
|
||||
|
||||
from maya import cmds
|
||||
|
||||
# Collect the references without .placeHolderList[] attributes as
|
||||
# unique entries (objects only) and skipping the sharedReferenceNode.
|
||||
references = set()
|
||||
for ref in cmds.ls(members, exactType="reference", objectsOnly=True):
|
||||
|
||||
# Ignore any `:sharedReferenceNode`
|
||||
if ref.rsplit(":", 1)[-1].startswith("sharedReferenceNode"):
|
||||
continue
|
||||
|
||||
# Ignore _UNKNOWN_REF_NODE_ (PLN-160)
|
||||
if ref.rsplit(":", 1)[-1].startswith("_UNKNOWN_REF_NODE_"):
|
||||
continue
|
||||
|
||||
references.add(ref)
|
||||
|
||||
assert references, "No reference node found in container"
|
||||
|
||||
# Get highest reference node (least parents)
|
||||
highest = min(references,
|
||||
key=lambda x: len(get_reference_node_parents(x)))
|
||||
|
||||
# Warn the user when we're taking the highest reference node
|
||||
if len(references) > 1:
|
||||
self.log.warning("More than one reference node found in "
|
||||
"container, using highest reference node: "
|
||||
"%s (in: %s)", highest, list(references))
|
||||
|
||||
return highest
|
||||
|
||||
def update(self, container, representation):
|
||||
|
||||
|
|
@ -178,7 +185,7 @@ class ReferenceLoader(api.Loader):
|
|||
|
||||
# Get reference node from container members
|
||||
members = cmds.sets(node, query=True, nodesOnly=True)
|
||||
reference_node = self._get_reference_node(members)
|
||||
reference_node = get_reference_node(members, self.log)
|
||||
|
||||
file_type = {
|
||||
"ma": "mayaAscii",
|
||||
|
|
@ -274,7 +281,7 @@ class ReferenceLoader(api.Loader):
|
|||
|
||||
# Assume asset has been referenced
|
||||
members = cmds.sets(node, query=True)
|
||||
reference_node = self._get_reference_node(members)
|
||||
reference_node = get_reference_node(members, self.log)
|
||||
|
||||
assert reference_node, ("Imported container not supported; "
|
||||
"container must be referenced.")
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ class ShaderDefinitionsEditor(QtWidgets.QWidget):
|
|||
|
||||
self.setObjectName("shaderDefinitionEditor")
|
||||
self.setWindowTitle("OpenPype shader name definition editor")
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
self.setWindowFlags(QtCore.Qt.Window)
|
||||
self.setParent(parent)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
from openpype.hosts.maya.api import plugin
|
||||
|
||||
|
||||
class CreateMayaAscii(plugin.Creator):
|
||||
"""Raw Maya Ascii file export"""
|
||||
class CreateMayaScene(plugin.Creator):
|
||||
"""Raw Maya Scene file export"""
|
||||
|
||||
name = "mayaAscii"
|
||||
label = "Maya Ascii"
|
||||
family = "mayaAscii"
|
||||
name = "mayaScene"
|
||||
label = "Maya Scene"
|
||||
family = "mayaScene"
|
||||
icon = "file-archive-o"
|
||||
defaults = ['Main']
|
||||
|
|
|
|||
|
|
@ -9,3 +9,8 @@ class CreateSetDress(plugin.Creator):
|
|||
family = "setdress"
|
||||
icon = "cubes"
|
||||
defaults = ["Main", "Anim"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateSetDress, self).__init__(*args, **kwargs)
|
||||
|
||||
self.data["exactSetMembersOnly"] = True
|
||||
|
|
|
|||
92
openpype/hosts/maya/plugins/inventory/import_modelrender.py
Normal file
92
openpype/hosts/maya/plugins/inventory/import_modelrender.py
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
from avalon import api, io
|
||||
|
||||
|
||||
class ImportModelRender(api.InventoryAction):
|
||||
|
||||
label = "Import Model Render Sets"
|
||||
icon = "industry"
|
||||
color = "#55DDAA"
|
||||
|
||||
scene_type_regex = "meta.render.m[ab]"
|
||||
look_data_type = "meta.render.json"
|
||||
|
||||
@staticmethod
|
||||
def is_compatible(container):
|
||||
return (
|
||||
container.get("loader") == "ReferenceLoader"
|
||||
and container.get("name", "").startswith("model")
|
||||
)
|
||||
|
||||
def process(self, containers):
|
||||
from maya import cmds
|
||||
|
||||
for container in containers:
|
||||
con_name = container["objectName"]
|
||||
nodes = []
|
||||
for n in cmds.sets(con_name, query=True, nodesOnly=True) or []:
|
||||
if cmds.nodeType(n) == "reference":
|
||||
nodes += cmds.referenceQuery(n, nodes=True)
|
||||
else:
|
||||
nodes.append(n)
|
||||
|
||||
repr_doc = io.find_one({
|
||||
"_id": io.ObjectId(container["representation"]),
|
||||
})
|
||||
version_id = repr_doc["parent"]
|
||||
|
||||
print("Importing render sets for model %r" % con_name)
|
||||
self.assign_model_render_by_version(nodes, version_id)
|
||||
|
||||
def assign_model_render_by_version(self, nodes, version_id):
|
||||
"""Assign nodes a specific published model render data version by id.
|
||||
|
||||
This assumes the nodes correspond with the asset.
|
||||
|
||||
Args:
|
||||
nodes(list): nodes to assign render data to
|
||||
version_id (bson.ObjectId): database id of the version of model
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
import json
|
||||
from maya import cmds
|
||||
from avalon import maya, io, pipeline
|
||||
from openpype.hosts.maya.api import lib
|
||||
|
||||
# Get representations of shader file and relationships
|
||||
look_repr = io.find_one({
|
||||
"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": {"$regex": self.scene_type_regex},
|
||||
})
|
||||
if not look_repr:
|
||||
print("No model render sets for this model version..")
|
||||
return
|
||||
|
||||
json_repr = io.find_one({
|
||||
"type": "representation",
|
||||
"parent": version_id,
|
||||
"name": self.look_data_type,
|
||||
})
|
||||
|
||||
context = pipeline.get_representation_context(look_repr["_id"])
|
||||
maya_file = pipeline.get_representation_path_from_context(context)
|
||||
|
||||
context = pipeline.get_representation_context(json_repr["_id"])
|
||||
json_file = pipeline.get_representation_path_from_context(context)
|
||||
|
||||
# Import the look file
|
||||
with maya.maintained_selection():
|
||||
shader_nodes = cmds.file(maya_file,
|
||||
i=True, # import
|
||||
returnNewNodes=True)
|
||||
# imprint context data
|
||||
|
||||
# Load relationships
|
||||
shader_relation = json_file
|
||||
with open(shader_relation, "r") as f:
|
||||
relationships = json.load(f)
|
||||
|
||||
# Assign relationships
|
||||
lib.apply_shaders(relationships, shader_nodes, nodes)
|
||||
29
openpype/hosts/maya/plugins/inventory/import_reference.py
Normal file
29
openpype/hosts/maya/plugins/inventory/import_reference.py
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
from maya import cmds
|
||||
|
||||
from avalon import api
|
||||
|
||||
from openpype.hosts.maya.api.plugin import get_reference_node
|
||||
|
||||
|
||||
class ImportReference(api.InventoryAction):
|
||||
"""Imports selected reference to inside of the file."""
|
||||
|
||||
label = "Import Reference"
|
||||
icon = "download"
|
||||
color = "#d8d8d8"
|
||||
|
||||
def process(self, containers):
|
||||
references = cmds.ls(type="reference")
|
||||
for container in containers:
|
||||
if container["loader"] != "ReferenceLoader":
|
||||
print("Not a reference, skipping")
|
||||
continue
|
||||
|
||||
node = container["objectName"]
|
||||
members = cmds.sets(node, query=True, nodesOnly=True)
|
||||
ref_node = get_reference_node(members)
|
||||
|
||||
ref_file = cmds.referenceQuery(ref_node, f=True)
|
||||
cmds.file(ref_file, importReference=True)
|
||||
|
||||
return True # return anything to trigger model refresh
|
||||
|
|
@ -13,6 +13,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
"pointcache",
|
||||
"animation",
|
||||
"mayaAscii",
|
||||
"mayaScene",
|
||||
"setdress",
|
||||
"layout",
|
||||
"camera",
|
||||
|
|
@ -71,7 +72,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader):
|
|||
except: # noqa: E722
|
||||
pass
|
||||
|
||||
if family not in ["layout", "setdress", "mayaAscii"]:
|
||||
if family not in ["layout", "setdress", "mayaAscii", "mayaScene"]:
|
||||
for root in roots:
|
||||
root.setParent(world=True)
|
||||
|
||||
|
|
|
|||
|
|
@ -223,8 +223,8 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
|
||||
def process(self, instance):
|
||||
"""Collect the Look in the instance with the correct layer settings"""
|
||||
|
||||
with lib.renderlayer(instance.data["renderlayer"]):
|
||||
renderlayer = instance.data.get("renderlayer", "defaultRenderLayer")
|
||||
with lib.renderlayer(renderlayer):
|
||||
self.collect(instance)
|
||||
|
||||
def collect(self, instance):
|
||||
|
|
@ -357,6 +357,23 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
for vray_node in vray_plugin_nodes:
|
||||
history.extend(cmds.listHistory(vray_node))
|
||||
|
||||
# handling render attribute sets
|
||||
render_set_types = [
|
||||
"VRayDisplacement",
|
||||
"VRayLightMesh",
|
||||
"VRayObjectProperties",
|
||||
"RedshiftObjectId",
|
||||
"RedshiftMeshParameters",
|
||||
]
|
||||
render_sets = cmds.ls(look_sets, type=render_set_types)
|
||||
if render_sets:
|
||||
history.extend(
|
||||
cmds.listHistory(render_sets,
|
||||
future=False,
|
||||
pruneDagObjects=True)
|
||||
or []
|
||||
)
|
||||
|
||||
files = cmds.ls(history, type="file", long=True)
|
||||
files.extend(cmds.ls(history, type="aiImage", long=True))
|
||||
files.extend(cmds.ls(history, type="RedshiftNormalMap", long=True))
|
||||
|
|
@ -550,3 +567,45 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
"source": source, # required for resources
|
||||
"files": files,
|
||||
"color_space": color_space} # required for resources
|
||||
|
||||
|
||||
class CollectModelRenderSets(CollectLook):
|
||||
"""Collect render attribute sets for model instance.
|
||||
|
||||
Collects additional render attribute sets so they can be
|
||||
published with model.
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.21
|
||||
families = ["model"]
|
||||
label = "Collect Model Render Sets"
|
||||
hosts = ["maya"]
|
||||
maketx = True
|
||||
|
||||
def collect_sets(self, instance):
|
||||
"""Collect all related objectSets except shadingEngines
|
||||
|
||||
Args:
|
||||
instance (list): all nodes to be published
|
||||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
|
||||
sets = {}
|
||||
for node in instance:
|
||||
related_sets = lib.get_related_sets(node)
|
||||
if not related_sets:
|
||||
continue
|
||||
|
||||
for objset in related_sets:
|
||||
if objset in sets:
|
||||
continue
|
||||
|
||||
if "shadingEngine" in cmds.nodeType(objset, inherited=True):
|
||||
continue
|
||||
|
||||
sets[objset] = {"uuid": lib.get_id(objset), "members": list()}
|
||||
|
||||
return sets
|
||||
|
|
|
|||
|
|
@ -3,14 +3,14 @@ from maya import cmds
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectMayaAscii(pyblish.api.InstancePlugin):
|
||||
"""Collect May Ascii Data
|
||||
class CollectMayaScene(pyblish.api.InstancePlugin):
|
||||
"""Collect Maya Scene Data
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = 'Collect Model Data'
|
||||
families = ["mayaAscii"]
|
||||
families = ["mayaScene"]
|
||||
|
||||
def process(self, instance):
|
||||
# Extract only current frame (override)
|
||||
|
|
@ -174,10 +174,16 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
assert render_products, "no render products generated"
|
||||
exp_files = []
|
||||
for product in render_products:
|
||||
for camera in layer_render_products.layer_data.cameras:
|
||||
exp_files.append(
|
||||
{product.productName: layer_render_products.get_files(
|
||||
product, camera)})
|
||||
product_name = product.productName
|
||||
if product.camera and layer_render_products.has_camera_token():
|
||||
product_name = "{}{}".format(
|
||||
product.camera,
|
||||
"_" + product_name if product_name else "")
|
||||
exp_files.append(
|
||||
{
|
||||
product_name: layer_render_products.get_files(
|
||||
product)
|
||||
})
|
||||
|
||||
self.log.info("multipart: {}".format(
|
||||
layer_render_products.multipart))
|
||||
|
|
@ -199,12 +205,14 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
|
||||
# replace relative paths with absolute. Render products are
|
||||
# returned as list of dictionaries.
|
||||
publish_meta_path = None
|
||||
for aov in exp_files:
|
||||
full_paths = []
|
||||
for file in aov[aov.keys()[0]]:
|
||||
full_path = os.path.join(workspace, "renders", file)
|
||||
full_path = full_path.replace("\\", "/")
|
||||
full_paths.append(full_path)
|
||||
publish_meta_path = os.path.dirname(full_path)
|
||||
aov_dict[aov.keys()[0]] = full_paths
|
||||
|
||||
frame_start_render = int(self.get_render_attribute(
|
||||
|
|
@ -230,6 +238,26 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
frame_end_handle = frame_end_render
|
||||
|
||||
full_exp_files.append(aov_dict)
|
||||
|
||||
# find common path to store metadata
|
||||
# so if image prefix is branching to many directories
|
||||
# metadata file will be located in top-most common
|
||||
# directory.
|
||||
# TODO: use `os.path.commonpath()` after switch to Python 3
|
||||
common_publish_meta_path = os.path.splitdrive(
|
||||
publish_meta_path)[0]
|
||||
if common_publish_meta_path:
|
||||
common_publish_meta_path += os.path.sep
|
||||
for part in publish_meta_path.split("/"):
|
||||
common_publish_meta_path = os.path.join(
|
||||
common_publish_meta_path, part)
|
||||
if part == expected_layer_name:
|
||||
break
|
||||
common_publish_meta_path = common_publish_meta_path.replace(
|
||||
"\\", "/")
|
||||
self.log.info(
|
||||
"Publish meta path: {}".format(common_publish_meta_path))
|
||||
|
||||
self.log.info(full_exp_files)
|
||||
self.log.info("collecting layer: {}".format(layer_name))
|
||||
# Get layer specific settings, might be overrides
|
||||
|
|
@ -262,6 +290,7 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
# which was submitted originally
|
||||
"source": filepath,
|
||||
"expectedFiles": full_exp_files,
|
||||
"publishRenderMetadataFolder": common_publish_meta_path,
|
||||
"resolutionWidth": cmds.getAttr("defaultResolution.width"),
|
||||
"resolutionHeight": cmds.getAttr("defaultResolution.height"),
|
||||
"pixelAspect": cmds.getAttr("defaultResolution.pixelAspect"),
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import os
|
|||
from maya import cmds
|
||||
|
||||
|
||||
class CollectMayaScene(pyblish.api.ContextPlugin):
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.01
|
||||
|
|
@ -122,7 +122,7 @@ def no_workspace_dir():
|
|||
|
||||
|
||||
class ExtractLook(openpype.api.Extractor):
|
||||
"""Extract Look (Maya Ascii + JSON)
|
||||
"""Extract Look (Maya Scene + JSON)
|
||||
|
||||
Only extracts the sets (shadingEngines and alike) alongside a .json file
|
||||
that stores it relationships for the sets and "attribute" data for the
|
||||
|
|
@ -130,11 +130,12 @@ class ExtractLook(openpype.api.Extractor):
|
|||
|
||||
"""
|
||||
|
||||
label = "Extract Look (Maya ASCII + JSON)"
|
||||
label = "Extract Look (Maya Scene + JSON)"
|
||||
hosts = ["maya"]
|
||||
families = ["look"]
|
||||
order = pyblish.api.ExtractorOrder + 0.2
|
||||
scene_type = "ma"
|
||||
look_data_type = "json"
|
||||
|
||||
@staticmethod
|
||||
def get_renderer_name():
|
||||
|
|
@ -176,6 +177,8 @@ class ExtractLook(openpype.api.Extractor):
|
|||
# no preset found
|
||||
pass
|
||||
|
||||
return "mayaAscii" if self.scene_type == "ma" else "mayaBinary"
|
||||
|
||||
def process(self, instance):
|
||||
"""Plugin entry point.
|
||||
|
||||
|
|
@ -183,10 +186,12 @@ class ExtractLook(openpype.api.Extractor):
|
|||
instance: Instance to process.
|
||||
|
||||
"""
|
||||
_scene_type = self.get_maya_scene_type(instance)
|
||||
|
||||
# Define extract output file path
|
||||
dir_path = self.staging_dir(instance)
|
||||
maya_fname = "{0}.{1}".format(instance.name, self.scene_type)
|
||||
json_fname = "{0}.json".format(instance.name)
|
||||
json_fname = "{0}.{1}".format(instance.name, self.look_data_type)
|
||||
|
||||
# Make texture dump folder
|
||||
maya_path = os.path.join(dir_path, maya_fname)
|
||||
|
|
@ -196,11 +201,100 @@ class ExtractLook(openpype.api.Extractor):
|
|||
|
||||
# Remove all members of the sets so they are not included in the
|
||||
# exported file by accident
|
||||
self.log.info("Extract sets (Maya ASCII) ...")
|
||||
self.log.info("Extract sets (%s) ..." % _scene_type)
|
||||
lookdata = instance.data["lookData"]
|
||||
relationships = lookdata["relationships"]
|
||||
sets = relationships.keys()
|
||||
|
||||
results = self.process_resources(instance, staging_dir=dir_path)
|
||||
transfers = results["fileTransfers"]
|
||||
hardlinks = results["fileHardlinks"]
|
||||
hashes = results["fileHashes"]
|
||||
remap = results["attrRemap"]
|
||||
|
||||
# Extract in correct render layer
|
||||
layer = instance.data.get("renderlayer", "defaultRenderLayer")
|
||||
with lib.renderlayer(layer):
|
||||
# TODO: Ensure membership edits don't become renderlayer overrides
|
||||
with lib.empty_sets(sets, force=True):
|
||||
# To avoid Maya trying to automatically remap the file
|
||||
# textures relative to the `workspace -directory` we force
|
||||
# it to a fake temporary workspace. This fixes textures
|
||||
# getting incorrectly remapped. (LKD-17, PLN-101)
|
||||
with no_workspace_dir():
|
||||
with lib.attribute_values(remap):
|
||||
with avalon.maya.maintained_selection():
|
||||
cmds.select(sets, noExpand=True)
|
||||
cmds.file(
|
||||
maya_path,
|
||||
force=True,
|
||||
typ=_scene_type,
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=True,
|
||||
constraints=True,
|
||||
expressions=True,
|
||||
constructionHistory=True,
|
||||
)
|
||||
|
||||
# Write the JSON data
|
||||
self.log.info("Extract json..")
|
||||
data = {
|
||||
"attributes": lookdata["attributes"],
|
||||
"relationships": relationships
|
||||
}
|
||||
|
||||
with open(json_path, "w") as f:
|
||||
json.dump(data, f)
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = []
|
||||
if "hardlinks" not in instance.data:
|
||||
instance.data["hardlinks"] = []
|
||||
if "transfers" not in instance.data:
|
||||
instance.data["transfers"] = []
|
||||
|
||||
instance.data["files"].append(maya_fname)
|
||||
instance.data["files"].append(json_fname)
|
||||
|
||||
if instance.data.get("representations") is None:
|
||||
instance.data["representations"] = []
|
||||
|
||||
instance.data["representations"].append(
|
||||
{
|
||||
"name": self.scene_type,
|
||||
"ext": self.scene_type,
|
||||
"files": os.path.basename(maya_fname),
|
||||
"stagingDir": os.path.dirname(maya_fname),
|
||||
}
|
||||
)
|
||||
instance.data["representations"].append(
|
||||
{
|
||||
"name": self.look_data_type,
|
||||
"ext": self.look_data_type,
|
||||
"files": os.path.basename(json_fname),
|
||||
"stagingDir": os.path.dirname(json_fname),
|
||||
}
|
||||
)
|
||||
|
||||
# Set up the resources transfers/links for the integrator
|
||||
instance.data["transfers"].extend(transfers)
|
||||
instance.data["hardlinks"].extend(hardlinks)
|
||||
|
||||
# Source hash for the textures
|
||||
instance.data["sourceHashes"] = hashes
|
||||
|
||||
"""
|
||||
self.log.info("Returning colorspaces to their original values ...")
|
||||
for attr, value in remap.items():
|
||||
self.log.info(" - {}: {}".format(attr, value))
|
||||
cmds.setAttr(attr, value, type="string")
|
||||
"""
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
|
||||
maya_path))
|
||||
|
||||
def process_resources(self, instance, staging_dir):
|
||||
|
||||
# Extract the textures to transfer, possibly convert with maketx and
|
||||
# remap the node paths to the destination path. Note that a source
|
||||
# might be included more than once amongst the resources as they could
|
||||
|
|
@ -218,7 +312,6 @@ class ExtractLook(openpype.api.Extractor):
|
|||
color_space = resource.get("color_space")
|
||||
|
||||
for f in resource["files"]:
|
||||
|
||||
files_metadata[os.path.normpath(f)] = {
|
||||
"color_space": color_space}
|
||||
# files.update(os.path.normpath(f))
|
||||
|
|
@ -244,7 +337,7 @@ class ExtractLook(openpype.api.Extractor):
|
|||
source, mode, texture_hash = self._process_texture(
|
||||
filepath,
|
||||
do_maketx,
|
||||
staging=dir_path,
|
||||
staging=staging_dir,
|
||||
linearize=linearize,
|
||||
force=force_copy
|
||||
)
|
||||
|
|
@ -299,85 +392,13 @@ class ExtractLook(openpype.api.Extractor):
|
|||
|
||||
self.log.info("Finished remapping destinations ...")
|
||||
|
||||
# Extract in correct render layer
|
||||
layer = instance.data.get("renderlayer", "defaultRenderLayer")
|
||||
with lib.renderlayer(layer):
|
||||
# TODO: Ensure membership edits don't become renderlayer overrides
|
||||
with lib.empty_sets(sets, force=True):
|
||||
# To avoid Maya trying to automatically remap the file
|
||||
# textures relative to the `workspace -directory` we force
|
||||
# it to a fake temporary workspace. This fixes textures
|
||||
# getting incorrectly remapped. (LKD-17, PLN-101)
|
||||
with no_workspace_dir():
|
||||
with lib.attribute_values(remap):
|
||||
with avalon.maya.maintained_selection():
|
||||
cmds.select(sets, noExpand=True)
|
||||
cmds.file(
|
||||
maya_path,
|
||||
force=True,
|
||||
typ="mayaAscii",
|
||||
exportSelected=True,
|
||||
preserveReferences=False,
|
||||
channels=True,
|
||||
constraints=True,
|
||||
expressions=True,
|
||||
constructionHistory=True,
|
||||
)
|
||||
|
||||
# Write the JSON data
|
||||
self.log.info("Extract json..")
|
||||
data = {
|
||||
"attributes": lookdata["attributes"],
|
||||
"relationships": relationships
|
||||
return {
|
||||
"fileTransfers": transfers,
|
||||
"fileHardlinks": hardlinks,
|
||||
"fileHashes": hashes,
|
||||
"attrRemap": remap,
|
||||
}
|
||||
|
||||
with open(json_path, "w") as f:
|
||||
json.dump(data, f)
|
||||
|
||||
if "files" not in instance.data:
|
||||
instance.data["files"] = []
|
||||
if "hardlinks" not in instance.data:
|
||||
instance.data["hardlinks"] = []
|
||||
if "transfers" not in instance.data:
|
||||
instance.data["transfers"] = []
|
||||
|
||||
instance.data["files"].append(maya_fname)
|
||||
instance.data["files"].append(json_fname)
|
||||
|
||||
instance.data["representations"] = []
|
||||
instance.data["representations"].append(
|
||||
{
|
||||
"name": "ma",
|
||||
"ext": "ma",
|
||||
"files": os.path.basename(maya_fname),
|
||||
"stagingDir": os.path.dirname(maya_fname),
|
||||
}
|
||||
)
|
||||
instance.data["representations"].append(
|
||||
{
|
||||
"name": "json",
|
||||
"ext": "json",
|
||||
"files": os.path.basename(json_fname),
|
||||
"stagingDir": os.path.dirname(json_fname),
|
||||
}
|
||||
)
|
||||
|
||||
# Set up the resources transfers/links for the integrator
|
||||
instance.data["transfers"].extend(transfers)
|
||||
instance.data["hardlinks"].extend(hardlinks)
|
||||
|
||||
# Source hash for the textures
|
||||
instance.data["sourceHashes"] = hashes
|
||||
|
||||
"""
|
||||
self.log.info("Returning colorspaces to their original values ...")
|
||||
for attr, value in remap.items():
|
||||
self.log.info(" - {}: {}".format(attr, value))
|
||||
cmds.setAttr(attr, value, type="string")
|
||||
"""
|
||||
self.log.info("Extracted instance '%s' to: %s" % (instance.name,
|
||||
maya_path))
|
||||
|
||||
def resource_destination(self, instance, filepath, do_maketx):
|
||||
"""Get resource destination path.
|
||||
|
||||
|
|
@ -467,3 +488,26 @@ class ExtractLook(openpype.api.Extractor):
|
|||
return converted, COPY, texture_hash
|
||||
|
||||
return filepath, COPY, texture_hash
|
||||
|
||||
|
||||
class ExtractModelRenderSets(ExtractLook):
|
||||
"""Extract model render attribute sets as model metadata
|
||||
|
||||
Only extracts the render attrib sets (NO shadingEngines) alongside
|
||||
a .json file that stores it relationships for the sets and "attribute"
|
||||
data for the instance members.
|
||||
|
||||
"""
|
||||
|
||||
label = "Model Render Sets"
|
||||
hosts = ["maya"]
|
||||
families = ["model"]
|
||||
scene_type_prefix = "meta.render."
|
||||
look_data_type = "meta.render.json"
|
||||
|
||||
def get_maya_scene_type(self, instance):
|
||||
typ = super(ExtractModelRenderSets, self).get_maya_scene_type(instance)
|
||||
# add prefix
|
||||
self.scene_type = self.scene_type_prefix + self.scene_type
|
||||
|
||||
return typ
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ class ExtractMayaSceneRaw(openpype.api.Extractor):
|
|||
label = "Maya Scene (Raw)"
|
||||
hosts = ["maya"]
|
||||
families = ["mayaAscii",
|
||||
"mayaScene",
|
||||
"setdress",
|
||||
"layout",
|
||||
"camerarig",
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ from __future__ import absolute_import
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class SelectInvalidInstances(pyblish.api.Action):
|
||||
"""Select invalid instances in Outliner."""
|
||||
|
|
@ -18,13 +20,12 @@ class SelectInvalidInstances(pyblish.api.Action):
|
|||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if result["error"] is None:
|
||||
continue
|
||||
if result["instance"] is None:
|
||||
continue
|
||||
if result["instance"] in failed:
|
||||
continue
|
||||
if result["plugin"] != plugin:
|
||||
if (
|
||||
result["error"] is None
|
||||
or result["instance"] is None
|
||||
or result["instance"] in failed
|
||||
or result["plugin"] != plugin
|
||||
):
|
||||
continue
|
||||
|
||||
failed.append(result["instance"])
|
||||
|
|
@ -44,25 +45,10 @@ class SelectInvalidInstances(pyblish.api.Action):
|
|||
self.deselect()
|
||||
|
||||
def select(self, instances):
|
||||
if "nuke" in pyblish.api.registered_hosts():
|
||||
import avalon.nuke.lib
|
||||
import nuke
|
||||
avalon.nuke.lib.select_nodes(
|
||||
[nuke.toNode(str(x)) for x in instances]
|
||||
)
|
||||
|
||||
if "maya" in pyblish.api.registered_hosts():
|
||||
from maya import cmds
|
||||
cmds.select(instances, replace=True, noExpand=True)
|
||||
cmds.select(instances, replace=True, noExpand=True)
|
||||
|
||||
def deselect(self):
|
||||
if "nuke" in pyblish.api.registered_hosts():
|
||||
import avalon.nuke.lib
|
||||
avalon.nuke.lib.reset_selection()
|
||||
|
||||
if "maya" in pyblish.api.registered_hosts():
|
||||
from maya import cmds
|
||||
cmds.select(deselect=True)
|
||||
cmds.select(deselect=True)
|
||||
|
||||
|
||||
class RepairSelectInvalidInstances(pyblish.api.Action):
|
||||
|
|
@ -92,23 +78,14 @@ class RepairSelectInvalidInstances(pyblish.api.Action):
|
|||
|
||||
context_asset = context.data["assetEntity"]["name"]
|
||||
for instance in instances:
|
||||
if "nuke" in pyblish.api.registered_hosts():
|
||||
import openpype.hosts.nuke.api as nuke_api
|
||||
origin_node = instance[0]
|
||||
nuke_api.lib.recreate_instance(
|
||||
origin_node, avalon_data={"asset": context_asset}
|
||||
)
|
||||
else:
|
||||
self.set_attribute(instance, context_asset)
|
||||
self.set_attribute(instance, context_asset)
|
||||
|
||||
def set_attribute(self, instance, context_asset):
|
||||
if "maya" in pyblish.api.registered_hosts():
|
||||
from maya import cmds
|
||||
cmds.setAttr(
|
||||
instance.data.get("name") + ".asset",
|
||||
context_asset,
|
||||
type="string"
|
||||
)
|
||||
cmds.setAttr(
|
||||
instance.data.get("name") + ".asset",
|
||||
context_asset,
|
||||
type="string"
|
||||
)
|
||||
|
||||
|
||||
class ValidateInstanceInContext(pyblish.api.InstancePlugin):
|
||||
|
|
@ -124,7 +101,7 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin):
|
|||
order = openpype.api.ValidateContentsOrder
|
||||
label = "Instance in same Context"
|
||||
optional = True
|
||||
hosts = ["maya", "nuke"]
|
||||
hosts = ["maya"]
|
||||
actions = [SelectInvalidInstances, RepairSelectInvalidInstances]
|
||||
|
||||
def process(self, instance):
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
import pyblish.api
|
||||
import maya.cmds as cmds
|
||||
import openpype.api
|
||||
import os
|
||||
|
||||
|
||||
class ValidateLoadedPlugin(pyblish.api.ContextPlugin):
|
||||
"""Ensure there are no unauthorized loaded plugins"""
|
||||
|
||||
label = "Loaded Plugin"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
host = ["maya"]
|
||||
actions = [openpype.api.RepairContextAction]
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, context):
|
||||
|
||||
invalid = []
|
||||
loaded_plugin = cmds.pluginInfo(query=True, listPlugins=True)
|
||||
# get variable from OpenPype settings
|
||||
whitelist_native_plugins = cls.whitelist_native_plugins
|
||||
authorized_plugins = cls.authorized_plugins or []
|
||||
|
||||
for plugin in loaded_plugin:
|
||||
if not whitelist_native_plugins and os.getenv('MAYA_LOCATION') \
|
||||
in cmds.pluginInfo(plugin, query=True, path=True):
|
||||
continue
|
||||
if plugin not in authorized_plugins:
|
||||
invalid.append(plugin)
|
||||
|
||||
return invalid
|
||||
|
||||
def process(self, context):
|
||||
|
||||
invalid = self.get_invalid(context)
|
||||
if invalid:
|
||||
raise RuntimeError(
|
||||
"Found forbidden plugin name: {}".format(", ".join(invalid))
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, context):
|
||||
"""Unload forbidden plugins"""
|
||||
|
||||
for plugin in cls.get_invalid(context):
|
||||
cmds.pluginInfo(plugin, edit=True, autoload=False)
|
||||
cmds.unloadPlugin(plugin, force=True)
|
||||
|
|
@ -76,7 +76,7 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
|
|||
r'%a|<aov>|<renderpass>', re.IGNORECASE)
|
||||
R_LAYER_TOKEN = re.compile(
|
||||
r'%l|<layer>|<renderlayer>', re.IGNORECASE)
|
||||
R_CAMERA_TOKEN = re.compile(r'%c|<camera>', re.IGNORECASE)
|
||||
R_CAMERA_TOKEN = re.compile(r'%c|Camera>')
|
||||
R_SCENE_TOKEN = re.compile(r'%s|<scene>', re.IGNORECASE)
|
||||
|
||||
DEFAULT_PADDING = 4
|
||||
|
|
@ -126,7 +126,9 @@ class ValidateRenderSettings(pyblish.api.InstancePlugin):
|
|||
if len(cameras) > 1 and not re.search(cls.R_CAMERA_TOKEN, prefix):
|
||||
invalid = True
|
||||
cls.log.error("Wrong image prefix [ {} ] - "
|
||||
"doesn't have: '<camera>' token".format(prefix))
|
||||
"doesn't have: '<Camera>' token".format(prefix))
|
||||
cls.log.error(
|
||||
"Note that to needs to have capital 'C' at the beginning")
|
||||
|
||||
# renderer specific checks
|
||||
if renderer == "vray":
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
|
||||
|
||||
class ValidateSetdressRoot(pyblish.api.InstancePlugin):
|
||||
"""
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
label = "SetDress Root"
|
||||
hosts = ["maya"]
|
||||
families = ["setdress"]
|
||||
|
||||
def process(self, instance):
|
||||
from maya import cmds
|
||||
|
||||
if instance.data.get("exactSetMembersOnly"):
|
||||
return
|
||||
|
||||
set_member = instance.data["setMembers"]
|
||||
root = cmds.ls(set_member, assemblies=True, long=True)
|
||||
|
||||
if not root or root[0] not in set_member:
|
||||
raise Exception("Setdress top root node is not being published.")
|
||||
|
|
@ -21,6 +21,7 @@ def add_implementation_envs(env, _app):
|
|||
new_nuke_paths.append(norm_path)
|
||||
|
||||
env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths)
|
||||
env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None)
|
||||
|
||||
# Try to add QuickTime to PATH
|
||||
quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem"
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from collections import OrderedDict
|
|||
|
||||
|
||||
from avalon import api, io, lib
|
||||
from avalon.tools import workfiles
|
||||
from openpype.tools import workfiles
|
||||
import avalon.nuke
|
||||
from avalon.nuke import lib as anlib
|
||||
from avalon.nuke import (
|
||||
|
|
@ -287,15 +287,16 @@ def script_name():
|
|||
|
||||
def add_button_write_to_read(node):
|
||||
name = "createReadNode"
|
||||
label = "Cread Read From Rendered"
|
||||
value = "import write_to_read;write_to_read.write_to_read(nuke.thisNode())"
|
||||
label = "Create Read From Rendered"
|
||||
value = "import write_to_read;\
|
||||
write_to_read.write_to_read(nuke.thisNode(), allow_relative=False)"
|
||||
knob = nuke.PyScript_Knob(name, label, value)
|
||||
knob.clearFlag(nuke.STARTLINE)
|
||||
node.addKnob(knob)
|
||||
|
||||
|
||||
def create_write_node(name, data, input=None, prenodes=None,
|
||||
review=True, linked_knobs=None):
|
||||
review=True, linked_knobs=None, farm=True):
|
||||
''' Creating write node which is group node
|
||||
|
||||
Arguments:
|
||||
|
|
@ -421,7 +422,15 @@ def create_write_node(name, data, input=None, prenodes=None,
|
|||
))
|
||||
continue
|
||||
|
||||
if knob and value:
|
||||
if not knob and not value:
|
||||
continue
|
||||
|
||||
log.info((knob, value))
|
||||
|
||||
if isinstance(value, str):
|
||||
if "[" in value:
|
||||
now_node[knob].setExpression(value)
|
||||
else:
|
||||
now_node[knob].setValue(value)
|
||||
|
||||
# connect to previous node
|
||||
|
|
@ -466,7 +475,7 @@ def create_write_node(name, data, input=None, prenodes=None,
|
|||
# imprinting group node
|
||||
anlib.set_avalon_knob_data(GN, data["avalon"])
|
||||
anlib.add_publish_knob(GN)
|
||||
add_rendering_knobs(GN)
|
||||
add_rendering_knobs(GN, farm)
|
||||
|
||||
if review:
|
||||
add_review_knob(GN)
|
||||
|
|
@ -526,7 +535,7 @@ def create_write_node(name, data, input=None, prenodes=None,
|
|||
return GN
|
||||
|
||||
|
||||
def add_rendering_knobs(node):
|
||||
def add_rendering_knobs(node, farm=True):
|
||||
''' Adds additional rendering knobs to given node
|
||||
|
||||
Arguments:
|
||||
|
|
@ -535,9 +544,13 @@ def add_rendering_knobs(node):
|
|||
Return:
|
||||
node (obj): with added knobs
|
||||
'''
|
||||
knob_options = [
|
||||
"Use existing frames", "Local"]
|
||||
if farm:
|
||||
knob_options.append("On farm")
|
||||
|
||||
if "render" not in node.knobs():
|
||||
knob = nuke.Enumeration_Knob("render", "", [
|
||||
"Use existing frames", "Local", "On farm"])
|
||||
knob = nuke.Enumeration_Knob("render", "", knob_options)
|
||||
knob.clearFlag(nuke.STARTLINE)
|
||||
node.addKnob(knob)
|
||||
return node
|
||||
|
|
@ -727,7 +740,7 @@ class WorkfileSettings(object):
|
|||
log.error(msg)
|
||||
nuke.message(msg)
|
||||
|
||||
log.warning(">> root_dict: {}".format(root_dict))
|
||||
log.debug(">> root_dict: {}".format(root_dict))
|
||||
|
||||
# first set OCIO
|
||||
if self._root_node["colorManagement"].value() \
|
||||
|
|
@ -1277,6 +1290,7 @@ class ExporterReview:
|
|||
def clean_nodes(self):
|
||||
for node in self._temp_nodes:
|
||||
nuke.delete(node)
|
||||
self._temp_nodes = []
|
||||
self.log.info("Deleted nodes...")
|
||||
|
||||
|
||||
|
|
@ -1301,6 +1315,7 @@ class ExporterReviewLut(ExporterReview):
|
|||
lut_style=None):
|
||||
# initialize parent class
|
||||
ExporterReview.__init__(self, klass, instance)
|
||||
self._temp_nodes = []
|
||||
|
||||
# deal with now lut defined in viewer lut
|
||||
if hasattr(klass, "viewer_lut_raw"):
|
||||
|
|
|
|||
141
openpype/hosts/nuke/plugins/create/create_write_still.py
Normal file
141
openpype/hosts/nuke/plugins/create/create_write_still.py
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
from collections import OrderedDict
|
||||
from openpype.hosts.nuke.api import (
|
||||
plugin,
|
||||
lib)
|
||||
import nuke
|
||||
|
||||
|
||||
class CreateWriteStill(plugin.PypeCreator):
|
||||
# change this to template preset
|
||||
name = "WriteStillFrame"
|
||||
label = "Create Write Still Image"
|
||||
hosts = ["nuke"]
|
||||
n_class = "Write"
|
||||
family = "still"
|
||||
icon = "image"
|
||||
defaults = [
|
||||
"ImageFrame{:0>4}".format(nuke.frame()),
|
||||
"MPFrame{:0>4}".format(nuke.frame()),
|
||||
"LayoutFrame{:0>4}".format(nuke.frame())
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateWriteStill, self).__init__(*args, **kwargs)
|
||||
|
||||
data = OrderedDict()
|
||||
|
||||
data["family"] = self.family
|
||||
data["families"] = self.n_class
|
||||
|
||||
for k, v in self.data.items():
|
||||
if k not in data.keys():
|
||||
data.update({k: v})
|
||||
|
||||
self.data = data
|
||||
self.nodes = nuke.selectedNodes()
|
||||
self.log.debug("_ self.data: '{}'".format(self.data))
|
||||
|
||||
def process(self):
|
||||
|
||||
inputs = []
|
||||
outputs = []
|
||||
instance = nuke.toNode(self.data["subset"])
|
||||
selected_node = None
|
||||
|
||||
# use selection
|
||||
if (self.options or {}).get("useSelection"):
|
||||
nodes = self.nodes
|
||||
|
||||
if not (len(nodes) < 2):
|
||||
msg = ("Select only one node. "
|
||||
"The node you want to connect to, "
|
||||
"or tick off `Use selection`")
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
return
|
||||
|
||||
if len(nodes) == 0:
|
||||
msg = (
|
||||
"No nodes selected. Please select a single node to connect"
|
||||
" to or tick off `Use selection`"
|
||||
)
|
||||
self.log.error(msg)
|
||||
nuke.message(msg)
|
||||
return
|
||||
|
||||
selected_node = nodes[0]
|
||||
inputs = [selected_node]
|
||||
outputs = selected_node.dependent()
|
||||
|
||||
if instance:
|
||||
if (instance.name() in selected_node.name()):
|
||||
selected_node = instance.dependencies()[0]
|
||||
|
||||
# if node already exist
|
||||
if instance:
|
||||
# collect input / outputs
|
||||
inputs = instance.dependencies()
|
||||
outputs = instance.dependent()
|
||||
selected_node = inputs[0]
|
||||
# remove old one
|
||||
nuke.delete(instance)
|
||||
|
||||
# recreate new
|
||||
write_data = {
|
||||
"nodeclass": self.n_class,
|
||||
"families": [self.family],
|
||||
"avalon": self.data
|
||||
}
|
||||
|
||||
# add creator data
|
||||
creator_data = {"creator": self.__class__.__name__}
|
||||
self.data.update(creator_data)
|
||||
write_data.update(creator_data)
|
||||
|
||||
self.log.info("Adding template path from plugin")
|
||||
write_data.update({
|
||||
"fpath_template": (
|
||||
"{work}/renders/nuke/{subset}/{subset}.{ext}")})
|
||||
|
||||
_prenodes = [
|
||||
{
|
||||
"name": "FrameHold01",
|
||||
"class": "FrameHold",
|
||||
"knobs": [
|
||||
("first_frame", nuke.frame())
|
||||
],
|
||||
"dependent": None
|
||||
}
|
||||
]
|
||||
|
||||
write_node = lib.create_write_node(
|
||||
self.name,
|
||||
write_data,
|
||||
input=selected_node,
|
||||
review=False,
|
||||
prenodes=_prenodes,
|
||||
farm=False,
|
||||
linked_knobs=["channels", "___", "first", "last", "use_limit"])
|
||||
|
||||
# relinking to collected connections
|
||||
for i, input in enumerate(inputs):
|
||||
write_node.setInput(i, input)
|
||||
|
||||
write_node.autoplace()
|
||||
|
||||
for output in outputs:
|
||||
output.setInput(0, write_node)
|
||||
|
||||
# link frame hold to group node
|
||||
write_node.begin()
|
||||
for n in nuke.allNodes():
|
||||
# get write node
|
||||
if n.Class() in "Write":
|
||||
w_node = n
|
||||
write_node.end()
|
||||
|
||||
w_node["use_limit"].setValue(True)
|
||||
w_node["first"].setValue(nuke.frame())
|
||||
w_node["last"].setValue(nuke.frame())
|
||||
|
||||
return write_node
|
||||
|
|
@ -13,7 +13,7 @@ class LoadImage(api.Loader):
|
|||
"""Load still image into Nuke"""
|
||||
|
||||
families = ["render", "source", "plate", "review", "image"]
|
||||
representations = ["exr", "dpx", "jpg", "jpeg", "png", "psd"]
|
||||
representations = ["exr", "dpx", "jpg", "jpeg", "png", "psd", "tiff"]
|
||||
|
||||
label = "Load Image"
|
||||
order = -10
|
||||
|
|
|
|||
|
|
@ -76,6 +76,8 @@ class LoadSequence(api.Loader):
|
|||
file = file.replace("\\", "/")
|
||||
|
||||
repr_cont = context["representation"]["context"]
|
||||
assert repr_cont.get("frame"), "Representation is not sequence"
|
||||
|
||||
if "#" not in file:
|
||||
frame = repr_cont.get("frame")
|
||||
if frame:
|
||||
|
|
@ -170,6 +172,7 @@ class LoadSequence(api.Loader):
|
|||
assert read_node.Class() == "Read", "Must be Read"
|
||||
|
||||
repr_cont = representation["context"]
|
||||
assert repr_cont.get("frame"), "Representation is not sequence"
|
||||
|
||||
file = api.get_representation_path(representation)
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import nuke
|
|||
import pyblish.api
|
||||
from avalon.nuke import maintained_selection
|
||||
|
||||
|
||||
class CreateOutputNode(pyblish.api.ContextPlugin):
|
||||
"""Adding output node for each ouput write node
|
||||
So when latly user will want to Load .nk as LifeGroup or Precomp
|
||||
|
|
@ -15,8 +16,8 @@ class CreateOutputNode(pyblish.api.ContextPlugin):
|
|||
def process(self, context):
|
||||
# capture selection state
|
||||
with maintained_selection():
|
||||
active_node = [node for inst in context[:]
|
||||
for node in inst[:]
|
||||
active_node = [node for inst in context
|
||||
for node in inst
|
||||
if "ak:family" in node.knobs()]
|
||||
|
||||
if active_node:
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class NukeRenderLocal(openpype.api.Extractor):
|
|||
order = pyblish.api.ExtractorOrder
|
||||
label = "Render Local"
|
||||
hosts = ["nuke"]
|
||||
families = ["render.local", "prerender.local"]
|
||||
families = ["render.local", "prerender.local", "still.local"]
|
||||
|
||||
def process(self, instance):
|
||||
families = instance.data["families"]
|
||||
|
|
@ -66,13 +66,23 @@ class NukeRenderLocal(openpype.api.Extractor):
|
|||
instance.data["representations"] = []
|
||||
|
||||
collected_frames = os.listdir(out_dir)
|
||||
repre = {
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
'frameStart': "%0{}d".format(len(str(last_frame))) % first_frame,
|
||||
'files': collected_frames,
|
||||
"stagingDir": out_dir
|
||||
}
|
||||
|
||||
if len(collected_frames) == 1:
|
||||
repre = {
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
'files': collected_frames.pop(),
|
||||
"stagingDir": out_dir
|
||||
}
|
||||
else:
|
||||
repre = {
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
'frameStart': "%0{}d".format(
|
||||
len(str(last_frame))) % first_frame,
|
||||
'files': collected_frames,
|
||||
"stagingDir": out_dir
|
||||
}
|
||||
instance.data["representations"].append(repre)
|
||||
|
||||
self.log.info("Extracted instance '{0}' to: {1}".format(
|
||||
|
|
@ -89,6 +99,9 @@ class NukeRenderLocal(openpype.api.Extractor):
|
|||
instance.data['family'] = 'prerender'
|
||||
families.remove('prerender.local')
|
||||
families.insert(0, "prerender")
|
||||
elif "still.local" in families:
|
||||
instance.data['family'] = 'image'
|
||||
families.remove('still.local')
|
||||
instance.data["families"] = families
|
||||
|
||||
collections, remainder = clique.assemble(collected_frames)
|
||||
|
|
|
|||
|
|
@ -3,6 +3,12 @@ import pyblish.api
|
|||
from avalon.nuke import lib as anlib
|
||||
from openpype.hosts.nuke.api import lib as pnlib
|
||||
import openpype
|
||||
|
||||
try:
|
||||
from __builtin__ import reload
|
||||
except ImportError:
|
||||
from importlib import reload
|
||||
|
||||
reload(pnlib)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,13 @@ from avalon.nuke import lib as anlib
|
|||
from openpype.hosts.nuke.api import lib as pnlib
|
||||
import openpype
|
||||
|
||||
try:
|
||||
from __builtin__ import reload
|
||||
except ImportError:
|
||||
from importlib import reload
|
||||
|
||||
reload(pnlib)
|
||||
|
||||
|
||||
class ExtractReviewDataMov(openpype.api.Extractor):
|
||||
"""Extracts movie and thumbnail with baked in luts
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import sys
|
||||
import os
|
||||
import nuke
|
||||
from avalon.nuke import lib as anlib
|
||||
|
|
@ -5,6 +6,10 @@ import pyblish.api
|
|||
import openpype
|
||||
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
unicode = str
|
||||
|
||||
|
||||
class ExtractThumbnail(openpype.api.Extractor):
|
||||
"""Extracts movie and thumbnail with baked in luts
|
||||
|
||||
|
|
@ -112,24 +117,26 @@ class ExtractThumbnail(openpype.api.Extractor):
|
|||
|
||||
# create write node
|
||||
write_node = nuke.createNode("Write")
|
||||
file = fhead + "jpeg"
|
||||
file = fhead + "jpg"
|
||||
name = "thumbnail"
|
||||
path = os.path.join(staging_dir, file).replace("\\", "/")
|
||||
instance.data["thumbnail"] = path
|
||||
write_node["file"].setValue(path)
|
||||
write_node["file_type"].setValue("jpeg")
|
||||
write_node["file_type"].setValue("jpg")
|
||||
write_node["raw"].setValue(1)
|
||||
write_node.setInput(0, previous_node)
|
||||
temporary_nodes.append(write_node)
|
||||
tags = ["thumbnail", "publish_on_farm"]
|
||||
|
||||
# retime for
|
||||
mid_frame = int((int(last_frame) - int(first_frame)) / 2) \
|
||||
+ int(first_frame)
|
||||
first_frame = int(last_frame) / 2
|
||||
last_frame = int(last_frame) / 2
|
||||
|
||||
repre = {
|
||||
'name': name,
|
||||
'ext': "jpeg",
|
||||
'ext': "jpg",
|
||||
"outputName": "thumb",
|
||||
'files': file,
|
||||
"stagingDir": staging_dir,
|
||||
|
|
@ -140,7 +147,7 @@ class ExtractThumbnail(openpype.api.Extractor):
|
|||
instance.data["representations"].append(repre)
|
||||
|
||||
# Render frames
|
||||
nuke.execute(write_node.name(), int(first_frame), int(last_frame))
|
||||
nuke.execute(write_node.name(), int(mid_frame), int(mid_frame))
|
||||
|
||||
self.log.debug(
|
||||
"representations: {}".format(instance.data["representations"]))
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class IncrementScriptVersion(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.IntegratorOrder + 0.9
|
||||
label = "Increment Script Version"
|
||||
optional = True
|
||||
families = ["workfile", "render", "render.local", "render.farm"]
|
||||
families = ["workfile"]
|
||||
hosts = ['nuke']
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
|
|
@ -8,12 +8,12 @@ from avalon.nuke import lib as anlib
|
|||
class PreCollectNukeInstances(pyblish.api.ContextPlugin):
|
||||
"""Collect all nodes with Avalon knob."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.59
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
label = "Pre-collect Instances"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
# presets
|
||||
sync_workfile_version = False
|
||||
sync_workfile_version_on_families = []
|
||||
|
||||
def process(self, context):
|
||||
asset_data = io.find_one({
|
||||
|
|
@ -120,11 +120,12 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin):
|
|||
# sync workfile version
|
||||
_families_test = [family] + families
|
||||
self.log.debug("__ _families_test: `{}`".format(_families_test))
|
||||
if not next((f for f in _families_test
|
||||
if "prerender" in f),
|
||||
None) and self.sync_workfile_version:
|
||||
# get version to instance for integration
|
||||
instance.data['version'] = instance.context.data['version']
|
||||
for family_test in _families_test:
|
||||
if family_test in self.sync_workfile_version_on_families:
|
||||
self.log.debug("Syncing version with workfile for '{}'"
|
||||
.format(family_test))
|
||||
# get version to instance for integration
|
||||
instance.data['version'] = instance.context.data['version']
|
||||
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
|
|
|
|||
|
|
@ -3,13 +3,12 @@ import pyblish.api
|
|||
import os
|
||||
import openpype.api as pype
|
||||
from avalon.nuke import lib as anlib
|
||||
reload(anlib)
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
"""Collect current script for publish."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.60
|
||||
order = pyblish.api.CollectorOrder - 0.50
|
||||
label = "Pre-collect Workfile"
|
||||
hosts = ['nuke']
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ from avalon import io, api
|
|||
class CollectNukeWrites(pyblish.api.InstancePlugin):
|
||||
"""Collect all write nodes."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.58
|
||||
order = pyblish.api.CollectorOrder - 0.48
|
||||
label = "Pre-collect Writes"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["write"]
|
||||
|
|
@ -64,7 +64,7 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
)
|
||||
|
||||
if [fm for fm in _families_test
|
||||
if fm in ["render", "prerender"]]:
|
||||
if fm in ["render", "prerender", "still"]]:
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = list()
|
||||
|
||||
|
|
@ -100,7 +100,13 @@ class CollectNukeWrites(pyblish.api.InstancePlugin):
|
|||
frame_start_str, frame_slate_str)
|
||||
collected_frames.insert(0, slate_frame)
|
||||
|
||||
representation['files'] = collected_frames
|
||||
if collected_frames_len == 1:
|
||||
representation['files'] = collected_frames.pop()
|
||||
if "still" in _families_test:
|
||||
instance.data['family'] = 'image'
|
||||
instance.data["families"].remove('still')
|
||||
else:
|
||||
representation['files'] = collected_frames
|
||||
instance.data["representations"].append(representation)
|
||||
except Exception:
|
||||
instance.data["representations"].append(representation)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,110 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Validate if instance asset is the same as context asset."""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import nuke
|
||||
|
||||
import pyblish.api
|
||||
import openpype.api
|
||||
import avalon.nuke.lib
|
||||
import openpype.hosts.nuke.api as nuke_api
|
||||
|
||||
|
||||
class SelectInvalidInstances(pyblish.api.Action):
|
||||
"""Select invalid instances in Outliner."""
|
||||
|
||||
label = "Select Instances"
|
||||
icon = "briefcase"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
"""Process invalid validators and select invalid instances."""
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (
|
||||
result["error"] is None
|
||||
or result["instance"] is None
|
||||
or result["instance"] in failed
|
||||
or result["plugin"] != plugin
|
||||
):
|
||||
continue
|
||||
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
if instances:
|
||||
self.log.info(
|
||||
"Selecting invalid nodes: %s" % ", ".join(
|
||||
[str(x) for x in instances]
|
||||
)
|
||||
)
|
||||
self.select(instances)
|
||||
else:
|
||||
self.log.info("No invalid nodes found.")
|
||||
self.deselect()
|
||||
|
||||
def select(self, instances):
|
||||
avalon.nuke.lib.select_nodes(
|
||||
[nuke.toNode(str(x)) for x in instances]
|
||||
)
|
||||
|
||||
def deselect(self):
|
||||
avalon.nuke.lib.reset_selection()
|
||||
|
||||
|
||||
class RepairSelectInvalidInstances(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (
|
||||
result["error"] is None
|
||||
or result["instance"] is None
|
||||
or result["instance"] in failed
|
||||
or result["plugin"] != plugin
|
||||
):
|
||||
continue
|
||||
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
context_asset = context.data["assetEntity"]["name"]
|
||||
for instance in instances:
|
||||
origin_node = instance[0]
|
||||
nuke_api.lib.recreate_instance(
|
||||
origin_node, avalon_data={"asset": context_asset}
|
||||
)
|
||||
|
||||
|
||||
class ValidateInstanceInContext(pyblish.api.InstancePlugin):
|
||||
"""Validator to check if instance asset match context asset.
|
||||
|
||||
When working in per-shot style you always publish data in context of
|
||||
current asset (shot). This validator checks if this is so. It is optional
|
||||
so it can be disabled when needed.
|
||||
|
||||
Action on this validator will select invalid instances in Outliner.
|
||||
"""
|
||||
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
label = "Instance in same Context"
|
||||
hosts = ["nuke"]
|
||||
actions = [SelectInvalidInstances, RepairSelectInvalidInstances]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
asset = instance.data.get("asset")
|
||||
context_asset = instance.context.data["assetEntity"]["name"]
|
||||
msg = "{} has asset {}".format(instance.name, asset)
|
||||
assert asset == context_asset, msg
|
||||
33
openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py
Normal file
33
openpype/hosts/nuke/plugins/publish/validate_proxy_mode.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
import pyblish
|
||||
import nuke
|
||||
|
||||
|
||||
class FixProxyMode(pyblish.api.Action):
|
||||
"""
|
||||
Togger off proxy switch OFF
|
||||
"""
|
||||
|
||||
label = "Proxy toggle to OFF"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
rootNode = nuke.root()
|
||||
rootNode["proxy"].setValue(False)
|
||||
|
||||
|
||||
@pyblish.api.log
|
||||
class ValidateProxyMode(pyblish.api.ContextPlugin):
|
||||
"""Validate active proxy mode"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
label = "Validate Proxy Mode"
|
||||
hosts = ["nuke"]
|
||||
actions = [FixProxyMode]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
rootNode = nuke.root()
|
||||
isProxy = rootNode["proxy"].value()
|
||||
|
||||
assert not isProxy, "Proxy mode should be toggled OFF"
|
||||
|
|
@ -55,7 +55,7 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
|
|||
""" Validates file output. """
|
||||
|
||||
order = pyblish.api.ValidatorOrder + 0.1
|
||||
families = ["render", "prerender"]
|
||||
families = ["render", "prerender", "still"]
|
||||
|
||||
label = "Validate rendered frame"
|
||||
hosts = ["nuke", "nukestudio"]
|
||||
|
|
@ -71,6 +71,9 @@ class ValidateRenderedFrames(pyblish.api.InstancePlugin):
|
|||
self.log.error(msg)
|
||||
raise ValidationException(msg)
|
||||
|
||||
if isinstance(repre["files"], str):
|
||||
return
|
||||
|
||||
collections, remainder = clique.assemble(repre["files"])
|
||||
self.log.info("collections: {}".format(str(collections)))
|
||||
self.log.info("remainder: {}".format(str(remainder)))
|
||||
|
|
|
|||
|
|
@ -9,7 +9,9 @@ SINGLE_FILE_FORMATS = ['avi', 'mp4', 'mxf', 'mov', 'mpg', 'mpeg', 'wmv', 'm4v',
|
|||
'm2v']
|
||||
|
||||
|
||||
def evaluate_filepath_new(k_value, k_eval, project_dir, first_frame):
|
||||
def evaluate_filepath_new(
|
||||
k_value, k_eval, project_dir, first_frame, allow_relative):
|
||||
|
||||
# get combined relative path
|
||||
combined_relative_path = None
|
||||
if k_eval is not None and project_dir is not None:
|
||||
|
|
@ -26,8 +28,9 @@ def evaluate_filepath_new(k_value, k_eval, project_dir, first_frame):
|
|||
combined_relative_path = None
|
||||
|
||||
try:
|
||||
k_value = k_value % first_frame
|
||||
if os.path.exists(k_value):
|
||||
# k_value = k_value % first_frame
|
||||
if os.path.isdir(os.path.basename(k_value)):
|
||||
# doesn't check for file, only parent dir
|
||||
filepath = k_value
|
||||
elif os.path.exists(k_eval):
|
||||
filepath = k_eval
|
||||
|
|
@ -37,10 +40,12 @@ def evaluate_filepath_new(k_value, k_eval, project_dir, first_frame):
|
|||
|
||||
filepath = os.path.abspath(filepath)
|
||||
except Exception as E:
|
||||
log.error("Cannot create Read node. Perhaps it needs to be rendered first :) Error: `{}`".format(E))
|
||||
log.error("Cannot create Read node. Perhaps it needs to be \
|
||||
rendered first :) Error: `{}`".format(E))
|
||||
return None
|
||||
|
||||
filepath = filepath.replace('\\', '/')
|
||||
# assumes last number is a sequence counter
|
||||
current_frame = re.findall(r'\d+', filepath)[-1]
|
||||
padding = len(current_frame)
|
||||
basename = filepath[: filepath.rfind(current_frame)]
|
||||
|
|
@ -51,11 +56,13 @@ def evaluate_filepath_new(k_value, k_eval, project_dir, first_frame):
|
|||
pass
|
||||
else:
|
||||
# Image sequence needs hashes
|
||||
# to do still with no number not handled
|
||||
filepath = basename + '#' * padding + '.' + filetype
|
||||
|
||||
# relative path? make it relative again
|
||||
if not isinstance(project_dir, type(None)):
|
||||
filepath = filepath.replace(project_dir, '.')
|
||||
if allow_relative:
|
||||
if (not isinstance(project_dir, type(None))) and project_dir != "":
|
||||
filepath = filepath.replace(project_dir, '.')
|
||||
|
||||
# get first and last frame from disk
|
||||
frames = []
|
||||
|
|
@ -69,7 +76,8 @@ def evaluate_filepath_new(k_value, k_eval, project_dir, first_frame):
|
|||
frames = sorted(frames)
|
||||
firstframe = frames[0]
|
||||
lastframe = frames[len(frames) - 1]
|
||||
if lastframe < 0:
|
||||
|
||||
if int(lastframe) < 0:
|
||||
lastframe = firstframe
|
||||
|
||||
return filepath, firstframe, lastframe
|
||||
|
|
@ -94,41 +102,40 @@ def create_read_node(ndata, comp_start):
|
|||
return
|
||||
|
||||
|
||||
def write_to_read(gn):
|
||||
def write_to_read(gn,
|
||||
allow_relative=False):
|
||||
|
||||
comp_start = nuke.Root().knob('first_frame').value()
|
||||
comp_end = nuke.Root().knob('last_frame').value()
|
||||
project_dir = nuke.Root().knob('project_directory').getValue()
|
||||
if not os.path.exists(project_dir):
|
||||
project_dir = nuke.Root().knob('project_directory').evaluate()
|
||||
|
||||
group_read_nodes = []
|
||||
|
||||
with gn:
|
||||
height = gn.screenHeight() # get group height and position
|
||||
new_xpos = int(gn.knob('xpos').value())
|
||||
new_ypos = int(gn.knob('ypos').value()) + height + 20
|
||||
group_writes = [n for n in nuke.allNodes() if n.Class() == "Write"]
|
||||
print("__ group_writes: {}".format(group_writes))
|
||||
if group_writes != []:
|
||||
# there can be only 1 write node, taking first
|
||||
n = group_writes[0]
|
||||
|
||||
if n.knob('file') is not None:
|
||||
file_path_new = evaluate_filepath_new(
|
||||
myfile, firstFrame, lastFrame = evaluate_filepath_new(
|
||||
n.knob('file').getValue(),
|
||||
n.knob('file').evaluate(),
|
||||
project_dir,
|
||||
comp_start
|
||||
comp_start,
|
||||
allow_relative
|
||||
)
|
||||
if not file_path_new:
|
||||
if not myfile:
|
||||
return
|
||||
|
||||
myfiletranslated, firstFrame, lastFrame = file_path_new
|
||||
# get node data
|
||||
ndata = {
|
||||
'filepath': myfiletranslated,
|
||||
'firstframe': firstFrame,
|
||||
'lastframe': lastFrame,
|
||||
'filepath': myfile,
|
||||
'firstframe': int(firstFrame),
|
||||
'lastframe': int(lastFrame),
|
||||
'new_xpos': new_xpos,
|
||||
'new_ypos': new_ypos,
|
||||
'colorspace': n.knob('colorspace').getValue(),
|
||||
|
|
@ -138,7 +145,6 @@ def write_to_read(gn):
|
|||
}
|
||||
group_read_nodes.append(ndata)
|
||||
|
||||
|
||||
# create reads in one go
|
||||
for oneread in group_read_nodes:
|
||||
# create read node
|
||||
|
|
|
|||
|
|
@ -60,7 +60,8 @@ class ExtractReview(openpype.api.Extractor):
|
|||
# Generate thumbnail.
|
||||
thumbnail_path = os.path.join(staging_dir, "thumbnail.jpg")
|
||||
args = [
|
||||
"{}".format(ffmpeg_path), "-y",
|
||||
ffmpeg_path,
|
||||
"-y",
|
||||
"-i", output_image_path,
|
||||
"-vf", "scale=300:-1",
|
||||
"-vframes", "1",
|
||||
|
|
@ -78,7 +79,8 @@ class ExtractReview(openpype.api.Extractor):
|
|||
# Generate mov.
|
||||
mov_path = os.path.join(staging_dir, "review.mov")
|
||||
args = [
|
||||
ffmpeg_path, "-y",
|
||||
ffmpeg_path,
|
||||
"-y",
|
||||
"-i", output_image_path,
|
||||
"-vf", "pad=ceil(iw/2)*2:ceil(ih/2)*2",
|
||||
"-vframes", "1",
|
||||
|
|
|
|||
|
|
@ -10,11 +10,13 @@ from .pipeline import (
|
|||
|
||||
from avalon.tools import (
|
||||
creator,
|
||||
loader,
|
||||
sceneinventory,
|
||||
libraryloader,
|
||||
subsetmanager
|
||||
)
|
||||
from openpype.tools import (
|
||||
loader,
|
||||
libraryloader,
|
||||
)
|
||||
|
||||
|
||||
def load_stylesheet():
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ Basic avalon integration
|
|||
import os
|
||||
import contextlib
|
||||
from collections import OrderedDict
|
||||
from avalon.tools import workfiles
|
||||
from openpype.tools import workfiles
|
||||
from avalon import api as avalon
|
||||
from avalon import schema
|
||||
from avalon.pipeline import AVALON_CONTAINER_ID
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from pprint import pformat
|
|||
class PrecollectInstances(pyblish.api.ContextPlugin):
|
||||
"""Collect all Track items selection."""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.59
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
label = "Precollect Instances"
|
||||
hosts = ["resolve"]
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class PrecollectWorkfile(pyblish.api.ContextPlugin):
|
|||
"""Precollect the current working file into context"""
|
||||
|
||||
label = "Precollect Workfile"
|
||||
order = pyblish.api.CollectorOrder - 0.6
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
|
||||
def process(self, context):
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import zipfile
|
|||
import pyblish.api
|
||||
from avalon import api, io
|
||||
import openpype.api
|
||||
from openpype.lib import get_workfile_template_key_from_context
|
||||
|
||||
|
||||
class ExtractHarmonyZip(openpype.api.Extractor):
|
||||
|
|
@ -65,10 +66,10 @@ class ExtractHarmonyZip(openpype.api.Extractor):
|
|||
|
||||
# Get Task types and Statuses for creation if needed
|
||||
self.task_types = self._get_all_task_types(project_entity)
|
||||
self.task_statuses = self.get_all_task_statuses(project_entity)
|
||||
self.task_statuses = self._get_all_task_statuses(project_entity)
|
||||
|
||||
# Get Statuses of AssetVersions
|
||||
self.assetversion_statuses = self.get_all_assetversion_statuses(
|
||||
self.assetversion_statuses = self._get_all_assetversion_statuses(
|
||||
project_entity
|
||||
)
|
||||
|
||||
|
|
@ -233,18 +234,28 @@ class ExtractHarmonyZip(openpype.api.Extractor):
|
|||
"version": 1,
|
||||
"ext": "zip",
|
||||
}
|
||||
host_name = "harmony"
|
||||
template_name = get_workfile_template_key_from_context(
|
||||
instance.data["asset"],
|
||||
instance.data.get("task"),
|
||||
host_name,
|
||||
project_name=project_entity["name"],
|
||||
dbcon=io
|
||||
)
|
||||
|
||||
# Get a valid work filename first with version 1
|
||||
file_template = anatomy.templates["work"]["file"]
|
||||
file_template = anatomy.templates[template_name]["file"]
|
||||
anatomy_filled = anatomy.format(data)
|
||||
work_path = anatomy_filled["work"]["path"]
|
||||
work_path = anatomy_filled[template_name]["path"]
|
||||
|
||||
# Get the final work filename with the proper version
|
||||
data["version"] = api.last_workfile_with_version(
|
||||
os.path.dirname(work_path), file_template, data, [".zip"]
|
||||
os.path.dirname(work_path),
|
||||
file_template,
|
||||
data,
|
||||
api.HOST_WORKFILE_EXTENSIONS[host_name]
|
||||
)[1]
|
||||
|
||||
work_path = anatomy_filled["work"]["path"]
|
||||
base_name = os.path.splitext(os.path.basename(work_path))[0]
|
||||
|
||||
staging_work_path = os.path.join(os.path.dirname(staging_scene),
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin):
|
|||
# use first frame as thumbnail if is sequence of jpegs
|
||||
full_thumbnail_path = os.path.join(
|
||||
thumbnail_repre["stagingDir"], file
|
||||
)
|
||||
)
|
||||
self.log.info(
|
||||
"For thumbnail is used file: {}".format(full_thumbnail_path)
|
||||
)
|
||||
|
|
@ -116,7 +116,7 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin):
|
|||
|
||||
# create new thumbnail representation
|
||||
representation = {
|
||||
'name': 'jpg',
|
||||
'name': 'thumbnail',
|
||||
'ext': 'jpg',
|
||||
'files': filename,
|
||||
"stagingDir": staging_dir,
|
||||
|
|
|
|||
|
|
@ -59,32 +59,35 @@ class ExtractTrimVideoAudio(openpype.api.Extractor):
|
|||
if "trimming" not in fml
|
||||
]
|
||||
|
||||
args = [
|
||||
f"\"{ffmpeg_path}\"",
|
||||
ffmpeg_args = [
|
||||
ffmpeg_path,
|
||||
"-ss", str(start / fps),
|
||||
"-i", f"\"{video_file_path}\"",
|
||||
"-i", video_file_path,
|
||||
"-t", str(dur / fps)
|
||||
]
|
||||
if ext in [".mov", ".mp4"]:
|
||||
args.extend([
|
||||
ffmpeg_args.extend([
|
||||
"-crf", "18",
|
||||
"-pix_fmt", "yuv420p"])
|
||||
"-pix_fmt", "yuv420p"
|
||||
])
|
||||
elif ext in ".wav":
|
||||
args.extend([
|
||||
"-vn -acodec pcm_s16le",
|
||||
"-ar 48000 -ac 2"
|
||||
ffmpeg_args.extend([
|
||||
"-vn",
|
||||
"-acodec", "pcm_s16le",
|
||||
"-ar", "48000",
|
||||
"-ac", "2"
|
||||
])
|
||||
|
||||
# add output path
|
||||
args.append(f"\"{clip_trimed_path}\"")
|
||||
ffmpeg_args.append(clip_trimed_path)
|
||||
|
||||
self.log.info(f"Processing: {args}")
|
||||
ffmpeg_args = " ".join(args)
|
||||
joined_args = " ".join(ffmpeg_args)
|
||||
self.log.info(f"Processing: {joined_args}")
|
||||
openpype.api.run_subprocess(
|
||||
ffmpeg_args, shell=True, logger=self.log
|
||||
ffmpeg_args, logger=self.log
|
||||
)
|
||||
|
||||
repr = {
|
||||
repre = {
|
||||
"name": ext[1:],
|
||||
"ext": ext[1:],
|
||||
"files": os.path.basename(clip_trimed_path),
|
||||
|
|
@ -97,10 +100,10 @@ class ExtractTrimVideoAudio(openpype.api.Extractor):
|
|||
}
|
||||
|
||||
if ext in [".mov", ".mp4"]:
|
||||
repr.update({
|
||||
repre.update({
|
||||
"thumbnail": True,
|
||||
"tags": ["review", "ftrackreview", "delete"]})
|
||||
|
||||
instance.data["representations"].append(repr)
|
||||
instance.data["representations"].append(repre)
|
||||
|
||||
self.log.debug(f"Instance data: {pformat(instance.data)}")
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import os
|
||||
import logging
|
||||
|
||||
import requests
|
||||
|
||||
import avalon.api
|
||||
import pyblish.api
|
||||
from avalon.tvpaint import pipeline
|
||||
|
|
@ -8,6 +10,7 @@ from avalon.tvpaint.communication_server import register_localization_file
|
|||
from .lib import set_context_settings
|
||||
|
||||
from openpype.hosts import tvpaint
|
||||
from openpype.api import get_current_project_settings
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -51,6 +54,19 @@ def initial_launch():
|
|||
set_context_settings()
|
||||
|
||||
|
||||
def application_exit():
|
||||
data = get_current_project_settings()
|
||||
stop_timer = data["tvpaint"]["stop_timer_on_application_exit"]
|
||||
|
||||
if not stop_timer:
|
||||
return
|
||||
|
||||
# Stop application timer.
|
||||
webserver_url = os.environ.get("OPENPYPE_WEBSERVER_URL")
|
||||
rest_api_url = "{}/timers_manager/stop_timer".format(webserver_url)
|
||||
requests.post(rest_api_url)
|
||||
|
||||
|
||||
def install():
|
||||
log.info("OpenPype - Installing TVPaint integration")
|
||||
localization_file = os.path.join(HOST_DIR, "resources", "avalon.loc")
|
||||
|
|
@ -67,6 +83,7 @@ def install():
|
|||
pyblish.api.register_callback("instanceToggled", on_instance_toggle)
|
||||
|
||||
avalon.api.on("application.launched", initial_launch)
|
||||
avalon.api.on("application.exit", application_exit)
|
||||
|
||||
|
||||
def uninstall():
|
||||
|
|
|
|||
|
|
@ -3,4 +3,17 @@ from avalon.tvpaint import pipeline
|
|||
|
||||
|
||||
class Creator(PypeCreatorMixin, pipeline.Creator):
|
||||
pass
|
||||
@classmethod
|
||||
def get_dynamic_data(cls, *args, **kwargs):
|
||||
dynamic_data = super(Creator, cls).get_dynamic_data(*args, **kwargs)
|
||||
|
||||
# Change asset and name by current workfile context
|
||||
workfile_context = pipeline.get_current_workfile_context()
|
||||
asset_name = workfile_context.get("asset")
|
||||
task_name = workfile_context.get("task")
|
||||
if "asset" not in dynamic_data and asset_name:
|
||||
dynamic_data["asset"] = asset_name
|
||||
|
||||
if "task" not in dynamic_data and task_name:
|
||||
dynamic_data["task"] = task_name
|
||||
return dynamic_data
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from openpype.lib import get_subset_name
|
|||
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
label = "Collect Instances"
|
||||
order = pyblish.api.CollectorOrder - 1
|
||||
order = pyblish.api.CollectorOrder - 0.4
|
||||
hosts = ["tvpaint"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from openpype.lib import get_subset_name
|
|||
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
label = "Collect Workfile"
|
||||
order = pyblish.api.CollectorOrder - 1
|
||||
order = pyblish.api.CollectorOrder - 0.4
|
||||
hosts = ["tvpaint"]
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ class ResetTVPaintWorkfileMetadata(pyblish.api.Action):
|
|||
|
||||
class CollectWorkfileData(pyblish.api.ContextPlugin):
|
||||
label = "Collect Workfile Data"
|
||||
order = pyblish.api.CollectorOrder - 1.01
|
||||
order = pyblish.api.CollectorOrder - 0.45
|
||||
hosts = ["tvpaint"]
|
||||
actions = [ResetTVPaintWorkfileMetadata]
|
||||
|
||||
|
|
|
|||
|
|
@ -606,7 +606,7 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
self._copy_image(eq_frame_filepath, new_filepath)
|
||||
layer_files_by_frame[frame_idx] = new_filepath
|
||||
|
||||
elif pre_behavior == "loop":
|
||||
elif pre_behavior in ("loop", "repeat"):
|
||||
# Loop backwards from last frame of layer
|
||||
for frame_idx in reversed(range(mark_in_index, frame_start_index)):
|
||||
eq_frame_idx_offset = (
|
||||
|
|
@ -678,7 +678,7 @@ class ExtractSequence(pyblish.api.Extractor):
|
|||
self._copy_image(eq_frame_filepath, new_filepath)
|
||||
layer_files_by_frame[frame_idx] = new_filepath
|
||||
|
||||
elif post_behavior == "loop":
|
||||
elif post_behavior in ("loop", "repeat"):
|
||||
# Loop backwards from last frame of layer
|
||||
for frame_idx in range(frame_end_index + 1, mark_out_index + 1):
|
||||
eq_frame_idx = frame_idx % frame_count
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ Provides:
|
|||
import os
|
||||
import json
|
||||
import clique
|
||||
import tempfile
|
||||
|
||||
import pyblish.api
|
||||
from avalon import io
|
||||
|
|
@ -94,7 +95,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
|
|||
instance.data["families"] = families
|
||||
instance.data["version"] = \
|
||||
self._get_last_version(asset, subset) + 1
|
||||
instance.data["stagingDir"] = task_dir
|
||||
instance.data["stagingDir"] = tempfile.mkdtemp()
|
||||
instance.data["source"] = "webpublisher"
|
||||
|
||||
# to store logging info into DB openpype.webpublishes
|
||||
|
|
@ -113,6 +114,8 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
|
|||
instance.data["frameEnd"] = \
|
||||
instance.data["representations"][0]["frameEnd"]
|
||||
else:
|
||||
instance.data["frameStart"] = 0
|
||||
instance.data["frameEnd"] = 1
|
||||
instance.data["representations"] = self._get_single_repre(
|
||||
task_dir, task_data["files"], tags
|
||||
)
|
||||
|
|
@ -174,7 +177,11 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
|
|||
(family, [families], subset_template_name, tags) tuple
|
||||
AssertionError if not matching family found
|
||||
"""
|
||||
task_obj = settings.get(task_type)
|
||||
task_type = task_type.lower()
|
||||
lower_cased_task_types = {}
|
||||
for t_type, task in settings.items():
|
||||
lower_cased_task_types[t_type.lower()] = task
|
||||
task_obj = lower_cased_task_types.get(task_type)
|
||||
assert task_obj, "No family configuration for '{}'".format(task_type)
|
||||
|
||||
found_family = None
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ from .execute import (
|
|||
get_pype_execute_args,
|
||||
execute,
|
||||
run_subprocess,
|
||||
path_to_subprocess_arg,
|
||||
CREATE_NO_WINDOW
|
||||
)
|
||||
from .log import PypeLogger, timeit
|
||||
|
|
@ -59,6 +60,11 @@ from .python_module_tools import (
|
|||
import_module_from_dirpath
|
||||
)
|
||||
|
||||
from .profiles_filtering import (
|
||||
compile_list_of_regexes,
|
||||
filter_profiles
|
||||
)
|
||||
|
||||
from .avalon_context import (
|
||||
CURRENT_DOC_SCHEMAS,
|
||||
PROJECT_NAME_ALLOWED_SYMBOLS,
|
||||
|
|
@ -118,13 +124,9 @@ from .applications import (
|
|||
prepare_host_environments,
|
||||
prepare_context_environments,
|
||||
get_app_environments_for_context,
|
||||
apply_project_environments_value,
|
||||
|
||||
compile_list_of_regexes
|
||||
apply_project_environments_value
|
||||
)
|
||||
|
||||
from .profiles_filtering import filter_profiles
|
||||
|
||||
from .plugin_tools import (
|
||||
TaskNotSetError,
|
||||
get_subset_name,
|
||||
|
|
@ -143,7 +145,9 @@ from .plugin_tools import (
|
|||
from .path_tools import (
|
||||
version_up,
|
||||
get_version_from_path,
|
||||
get_last_version_from_path
|
||||
get_last_version_from_path,
|
||||
create_project_folders,
|
||||
get_project_basic_paths
|
||||
)
|
||||
|
||||
from .editorial import (
|
||||
|
|
@ -158,12 +162,19 @@ from .editorial import (
|
|||
make_sequence_collection
|
||||
)
|
||||
|
||||
from .pype_info import (
|
||||
get_openpype_version,
|
||||
get_build_version
|
||||
)
|
||||
|
||||
terminal = Terminal
|
||||
|
||||
__all__ = [
|
||||
"get_pype_execute_args",
|
||||
"execute",
|
||||
"run_subprocess",
|
||||
"path_to_subprocess_arg",
|
||||
"CREATE_NO_WINDOW",
|
||||
|
||||
"env_value_to_bool",
|
||||
"get_paths_from_environ",
|
||||
|
|
@ -276,5 +287,10 @@ __all__ = [
|
|||
"range_from_frames",
|
||||
"frames_to_secons",
|
||||
"frames_to_timecode",
|
||||
"make_sequence_collection"
|
||||
"make_sequence_collection",
|
||||
"create_project_folders",
|
||||
"get_project_basic_paths",
|
||||
|
||||
"get_openpype_version",
|
||||
"get_build_version",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -25,11 +25,12 @@ from . import (
|
|||
PypeLogger,
|
||||
Anatomy
|
||||
)
|
||||
from .profiles_filtering import filter_profiles
|
||||
from .local_settings import get_openpype_username
|
||||
from .avalon_context import (
|
||||
get_workdir_data,
|
||||
get_workdir_with_workdir_data,
|
||||
get_workfile_template_key_from_context
|
||||
get_workfile_template_key
|
||||
)
|
||||
|
||||
from .python_module_tools import (
|
||||
|
|
@ -1161,8 +1162,12 @@ def prepare_host_environments(data, implementation_envs=True):
|
|||
if final_env is None:
|
||||
final_env = loaded_env
|
||||
|
||||
keys_to_remove = set(data["env"].keys()) - set(final_env.keys())
|
||||
|
||||
# Update env
|
||||
data["env"].update(final_env)
|
||||
for key in keys_to_remove:
|
||||
data["env"].pop(key, None)
|
||||
|
||||
|
||||
def apply_project_environments_value(project_name, env, project_settings=None):
|
||||
|
|
@ -1226,8 +1231,12 @@ def prepare_context_environments(data):
|
|||
|
||||
# Load project specific environments
|
||||
project_name = project_doc["name"]
|
||||
project_settings = get_project_settings(project_name)
|
||||
data["project_settings"] = project_settings
|
||||
# Apply project specific environments on current env value
|
||||
apply_project_environments_value(project_name, data["env"])
|
||||
apply_project_environments_value(
|
||||
project_name, data["env"], project_settings
|
||||
)
|
||||
|
||||
app = data["app"]
|
||||
workdir_data = get_workdir_data(
|
||||
|
|
@ -1237,17 +1246,22 @@ def prepare_context_environments(data):
|
|||
|
||||
anatomy = data["anatomy"]
|
||||
|
||||
template_key = get_workfile_template_key_from_context(
|
||||
asset_doc["name"],
|
||||
task_name,
|
||||
asset_tasks = asset_doc.get("data", {}).get("tasks") or {}
|
||||
task_info = asset_tasks.get(task_name) or {}
|
||||
task_type = task_info.get("type")
|
||||
# Temp solution how to pass task type to `_prepare_last_workfile`
|
||||
data["task_type"] = task_type
|
||||
|
||||
workfile_template_key = get_workfile_template_key(
|
||||
task_type,
|
||||
app.host_name,
|
||||
project_name=project_name,
|
||||
dbcon=data["dbcon"]
|
||||
project_settings=project_settings
|
||||
)
|
||||
|
||||
try:
|
||||
workdir = get_workdir_with_workdir_data(
|
||||
workdir_data, anatomy, template_key=template_key
|
||||
workdir_data, anatomy, template_key=workfile_template_key
|
||||
)
|
||||
|
||||
except Exception as exc:
|
||||
|
|
@ -1281,10 +1295,10 @@ def prepare_context_environments(data):
|
|||
)
|
||||
data["env"].update(context_env)
|
||||
|
||||
_prepare_last_workfile(data, workdir)
|
||||
_prepare_last_workfile(data, workdir, workfile_template_key)
|
||||
|
||||
|
||||
def _prepare_last_workfile(data, workdir):
|
||||
def _prepare_last_workfile(data, workdir, workfile_template_key):
|
||||
"""last workfile workflow preparation.
|
||||
|
||||
Function check if should care about last workfile workflow and tries
|
||||
|
|
@ -1314,13 +1328,14 @@ def _prepare_last_workfile(data, workdir):
|
|||
workdir_data = copy.deepcopy(_workdir_data)
|
||||
project_name = data["project_name"]
|
||||
task_name = data["task_name"]
|
||||
task_type = data["task_type"]
|
||||
start_last_workfile = should_start_last_workfile(
|
||||
project_name, app.host_name, task_name
|
||||
project_name, app.host_name, task_name, task_type
|
||||
)
|
||||
data["start_last_workfile"] = start_last_workfile
|
||||
|
||||
workfile_startup = should_workfile_tool_start(
|
||||
project_name, app.host_name, task_name
|
||||
project_name, app.host_name, task_name, task_type
|
||||
)
|
||||
data["workfile_startup"] = workfile_startup
|
||||
|
||||
|
|
@ -1338,23 +1353,23 @@ def _prepare_last_workfile(data, workdir):
|
|||
)
|
||||
|
||||
# Last workfile path
|
||||
last_workfile_path = ""
|
||||
extensions = avalon.api.HOST_WORKFILE_EXTENSIONS.get(
|
||||
app.host_name
|
||||
)
|
||||
if extensions:
|
||||
anatomy = data["anatomy"]
|
||||
# Find last workfile
|
||||
file_template = anatomy.templates["work"]["file"]
|
||||
workdir_data.update({
|
||||
"version": 1,
|
||||
"user": get_openpype_username(),
|
||||
"ext": extensions[0]
|
||||
})
|
||||
last_workfile_path = data.get("last_workfile_path") or ""
|
||||
if not last_workfile_path:
|
||||
extensions = avalon.api.HOST_WORKFILE_EXTENSIONS.get(app.host_name)
|
||||
|
||||
last_workfile_path = avalon.api.last_workfile(
|
||||
workdir, file_template, workdir_data, extensions, True
|
||||
)
|
||||
if extensions:
|
||||
anatomy = data["anatomy"]
|
||||
# Find last workfile
|
||||
file_template = anatomy.templates["work"]["file"]
|
||||
workdir_data.update({
|
||||
"version": 1,
|
||||
"user": get_openpype_username(),
|
||||
"ext": extensions[0]
|
||||
})
|
||||
|
||||
last_workfile_path = avalon.api.last_workfile(
|
||||
workdir, file_template, workdir_data, extensions, True
|
||||
)
|
||||
|
||||
if os.path.exists(last_workfile_path):
|
||||
log.debug((
|
||||
|
|
@ -1369,54 +1384,8 @@ def _prepare_last_workfile(data, workdir):
|
|||
data["last_workfile_path"] = last_workfile_path
|
||||
|
||||
|
||||
def get_option_from_settings(
|
||||
startup_presets, host_name, task_name, default_output
|
||||
):
|
||||
host_name_lowered = host_name.lower()
|
||||
task_name_lowered = task_name.lower()
|
||||
|
||||
max_points = 2
|
||||
matching_points = -1
|
||||
matching_item = None
|
||||
for item in startup_presets:
|
||||
hosts = item.get("hosts") or tuple()
|
||||
tasks = item.get("tasks") or tuple()
|
||||
|
||||
hosts_lowered = tuple(_host_name.lower() for _host_name in hosts)
|
||||
# Skip item if has set hosts and current host is not in
|
||||
if hosts_lowered and host_name_lowered not in hosts_lowered:
|
||||
continue
|
||||
|
||||
tasks_lowered = tuple(_task_name.lower() for _task_name in tasks)
|
||||
# Skip item if has set tasks and current task is not in
|
||||
if tasks_lowered:
|
||||
task_match = False
|
||||
for task_regex in compile_list_of_regexes(tasks_lowered):
|
||||
if re.match(task_regex, task_name_lowered):
|
||||
task_match = True
|
||||
break
|
||||
|
||||
if not task_match:
|
||||
continue
|
||||
|
||||
points = int(bool(hosts_lowered)) + int(bool(tasks_lowered))
|
||||
if points > matching_points:
|
||||
matching_item = item
|
||||
matching_points = points
|
||||
|
||||
if matching_points == max_points:
|
||||
break
|
||||
|
||||
if matching_item is not None:
|
||||
output = matching_item.get("enabled")
|
||||
if output is None:
|
||||
output = default_output
|
||||
return output
|
||||
return default_output
|
||||
|
||||
|
||||
def should_start_last_workfile(
|
||||
project_name, host_name, task_name, default_output=False
|
||||
project_name, host_name, task_name, task_type, default_output=False
|
||||
):
|
||||
"""Define if host should start last version workfile if possible.
|
||||
|
||||
|
|
@ -1438,7 +1407,7 @@ def should_start_last_workfile(
|
|||
"""
|
||||
|
||||
project_settings = get_project_settings(project_name)
|
||||
startup_presets = (
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["tools"]
|
||||
|
|
@ -1446,15 +1415,27 @@ def should_start_last_workfile(
|
|||
["last_workfile_on_startup"]
|
||||
)
|
||||
|
||||
if not startup_presets:
|
||||
if not profiles:
|
||||
return default_output
|
||||
|
||||
return get_option_from_settings(
|
||||
startup_presets, host_name, task_name, default_output)
|
||||
filter_data = {
|
||||
"tasks": task_name,
|
||||
"task_types": task_type,
|
||||
"hosts": host_name
|
||||
}
|
||||
matching_item = filter_profiles(profiles, filter_data)
|
||||
|
||||
output = None
|
||||
if matching_item:
|
||||
output = matching_item.get("enabled")
|
||||
|
||||
if output is None:
|
||||
return default_output
|
||||
return output
|
||||
|
||||
|
||||
def should_workfile_tool_start(
|
||||
project_name, host_name, task_name, default_output=False
|
||||
project_name, host_name, task_name, task_type, default_output=False
|
||||
):
|
||||
"""Define if host should start workfile tool at host launch.
|
||||
|
||||
|
|
@ -1476,7 +1457,7 @@ def should_workfile_tool_start(
|
|||
"""
|
||||
|
||||
project_settings = get_project_settings(project_name)
|
||||
startup_presets = (
|
||||
profiles = (
|
||||
project_settings
|
||||
["global"]
|
||||
["tools"]
|
||||
|
|
@ -1484,27 +1465,20 @@ def should_workfile_tool_start(
|
|||
["open_workfile_tool_on_startup"]
|
||||
)
|
||||
|
||||
if not startup_presets:
|
||||
if not profiles:
|
||||
return default_output
|
||||
|
||||
return get_option_from_settings(
|
||||
startup_presets, host_name, task_name, default_output)
|
||||
filter_data = {
|
||||
"tasks": task_name,
|
||||
"task_types": task_type,
|
||||
"hosts": host_name
|
||||
}
|
||||
matching_item = filter_profiles(profiles, filter_data)
|
||||
|
||||
output = None
|
||||
if matching_item:
|
||||
output = matching_item.get("enabled")
|
||||
|
||||
def compile_list_of_regexes(in_list):
|
||||
"""Convert strings in entered list to compiled regex objects."""
|
||||
regexes = list()
|
||||
if not in_list:
|
||||
return regexes
|
||||
|
||||
for item in in_list:
|
||||
if not item:
|
||||
continue
|
||||
try:
|
||||
regexes.append(re.compile(item))
|
||||
except TypeError:
|
||||
print((
|
||||
"Invalid type \"{}\" value \"{}\"."
|
||||
" Expected string based object. Skipping."
|
||||
).format(str(type(item)), str(item)))
|
||||
return regexes
|
||||
if output is None:
|
||||
return default_output
|
||||
return output
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import functools
|
|||
|
||||
from openpype.settings import get_project_settings
|
||||
from .anatomy import Anatomy
|
||||
from .profiles_filtering import filter_profiles
|
||||
|
||||
# avalon module is not imported at the top
|
||||
# - may not be in path at the time of pype.lib initialization
|
||||
|
|
@ -453,8 +454,6 @@ def get_workfile_template_key(
|
|||
if not profiles:
|
||||
return default
|
||||
|
||||
from .profiles_filtering import filter_profiles
|
||||
|
||||
profile_filter = {
|
||||
"task_types": task_type,
|
||||
"hosts": host_name
|
||||
|
|
@ -791,7 +790,9 @@ class BuildWorkfile:
|
|||
current_task_name = avalon.io.Session["AVALON_TASK"]
|
||||
|
||||
# Load workfile presets for task
|
||||
self.build_presets = self.get_build_presets(current_task_name)
|
||||
self.build_presets = self.get_build_presets(
|
||||
current_task_name, current_asset_entity
|
||||
)
|
||||
|
||||
# Skip if there are any presets for task
|
||||
if not self.build_presets:
|
||||
|
|
@ -875,7 +876,7 @@ class BuildWorkfile:
|
|||
return loaded_containers
|
||||
|
||||
@with_avalon
|
||||
def get_build_presets(self, task_name):
|
||||
def get_build_presets(self, task_name, asset_doc):
|
||||
""" Returns presets to build workfile for task name.
|
||||
|
||||
Presets are loaded for current project set in
|
||||
|
|
@ -889,30 +890,33 @@ class BuildWorkfile:
|
|||
(dict): preset per entered task name
|
||||
"""
|
||||
host_name = avalon.api.registered_host().__name__.rsplit(".", 1)[-1]
|
||||
presets = get_project_settings(avalon.io.Session["AVALON_PROJECT"])
|
||||
project_settings = get_project_settings(
|
||||
avalon.io.Session["AVALON_PROJECT"]
|
||||
)
|
||||
|
||||
host_settings = project_settings.get(host_name) or {}
|
||||
# Get presets for host
|
||||
wb_settings = presets.get(host_name, {}).get("workfile_builder")
|
||||
|
||||
wb_settings = host_settings.get("workfile_builder")
|
||||
if not wb_settings:
|
||||
# backward compatibility
|
||||
wb_settings = presets.get(host_name, {}).get("workfile_build")
|
||||
wb_settings = host_settings.get("workfile_build") or {}
|
||||
|
||||
builder_presets = wb_settings.get("profiles")
|
||||
builder_profiles = wb_settings.get("profiles")
|
||||
if not builder_profiles:
|
||||
return None
|
||||
|
||||
if not builder_presets:
|
||||
return
|
||||
|
||||
task_name_low = task_name.lower()
|
||||
per_task_preset = None
|
||||
for preset in builder_presets:
|
||||
preset_tasks = preset.get("tasks") or []
|
||||
preset_tasks_low = [task.lower() for task in preset_tasks]
|
||||
if task_name_low in preset_tasks_low:
|
||||
per_task_preset = preset
|
||||
break
|
||||
|
||||
return per_task_preset
|
||||
task_type = (
|
||||
asset_doc
|
||||
.get("data", {})
|
||||
.get("tasks", {})
|
||||
.get(task_name, {})
|
||||
.get("type")
|
||||
)
|
||||
filter_data = {
|
||||
"task_types": task_type,
|
||||
"tasks": task_name
|
||||
}
|
||||
return filter_profiles(builder_profiles, filter_data)
|
||||
|
||||
def _filter_build_profiles(self, build_profiles, loaders_by_name):
|
||||
""" Filter build profiles by loaders and prepare process data.
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
"""Functions useful for delivery action or loader"""
|
||||
import os
|
||||
import shutil
|
||||
import glob
|
||||
import clique
|
||||
import collections
|
||||
|
||||
|
||||
def collect_frames(files):
|
||||
"""
|
||||
Returns dict of source path and its frame, if from sequence
|
||||
|
|
@ -228,7 +230,16 @@ def process_sequence(
|
|||
Returns:
|
||||
(collections.defaultdict , int)
|
||||
"""
|
||||
if not os.path.exists(src_path):
|
||||
|
||||
def hash_path_exist(myPath):
|
||||
res = myPath.replace('#', '*')
|
||||
glob_search_results = glob.glob(res)
|
||||
if len(glob_search_results) > 0:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
if not hash_path_exist(src_path):
|
||||
msg = "{} doesn't exist for {}".format(src_path,
|
||||
repre["_id"])
|
||||
report_items["Source file was not found"].append(msg)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
import logging
|
||||
import os
|
||||
import shlex
|
||||
import subprocess
|
||||
import platform
|
||||
|
||||
from .log import PypeLogger as Logger
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# MSDN process creation flag (Windows only)
|
||||
CREATE_NO_WINDOW = 0x08000000
|
||||
|
||||
|
|
@ -100,7 +99,9 @@ def run_subprocess(*args, **kwargs):
|
|||
filtered_env = {str(k): str(v) for k, v in env.items()}
|
||||
|
||||
# Use lib's logger if was not passed with kwargs.
|
||||
logger = kwargs.pop("logger", log)
|
||||
logger = kwargs.pop("logger", None)
|
||||
if logger is None:
|
||||
logger = Logger.get_logger("run_subprocess")
|
||||
|
||||
# set overrides
|
||||
kwargs['stdout'] = kwargs.get('stdout', subprocess.PIPE)
|
||||
|
|
@ -138,6 +139,14 @@ def run_subprocess(*args, **kwargs):
|
|||
return full_output
|
||||
|
||||
|
||||
def path_to_subprocess_arg(path):
|
||||
"""Prepare path for subprocess arguments.
|
||||
|
||||
Returned path can be wrapped with quotes or kept as is.
|
||||
"""
|
||||
return subprocess.list2cmdline([path])
|
||||
|
||||
|
||||
def get_pype_execute_args(*args):
|
||||
"""Arguments to run pype command.
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import sys
|
|||
import time
|
||||
import logging
|
||||
import pymongo
|
||||
import certifi
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
from urlparse import urlparse, parse_qs
|
||||
|
|
@ -85,12 +86,33 @@ def get_default_components():
|
|||
return decompose_url(mongo_url)
|
||||
|
||||
|
||||
def extract_port_from_url(url):
|
||||
parsed_url = urlparse(url)
|
||||
if parsed_url.scheme is None:
|
||||
_url = "mongodb://{}".format(url)
|
||||
parsed_url = urlparse(_url)
|
||||
return parsed_url.port
|
||||
def should_add_certificate_path_to_mongo_url(mongo_url):
|
||||
"""Check if should add ca certificate to mongo url.
|
||||
|
||||
Since 30.9.2021 cloud mongo requires newer certificates that are not
|
||||
available on most of workstation. This adds path to certifi certificate
|
||||
which is valid for it. To add the certificate path url must have scheme
|
||||
'mongodb+srv' or has 'ssl=true' or 'tls=true' in url query.
|
||||
"""
|
||||
parsed = urlparse(mongo_url)
|
||||
query = parse_qs(parsed.query)
|
||||
lowered_query_keys = set(key.lower() for key in query.keys())
|
||||
add_certificate = False
|
||||
# Check if url 'ssl' or 'tls' are set to 'true'
|
||||
for key in ("ssl", "tls"):
|
||||
if key in query and "true" in query["ssl"]:
|
||||
add_certificate = True
|
||||
break
|
||||
|
||||
# Check if url contains 'mongodb+srv'
|
||||
if not add_certificate and parsed.scheme == "mongodb+srv":
|
||||
add_certificate = True
|
||||
|
||||
# Check if url does already contain certificate path
|
||||
if add_certificate and "tlscafile" in lowered_query_keys:
|
||||
add_certificate = False
|
||||
|
||||
return add_certificate
|
||||
|
||||
|
||||
def validate_mongo_connection(mongo_uri):
|
||||
|
|
@ -106,26 +128,9 @@ def validate_mongo_connection(mongo_uri):
|
|||
passed so probably couldn't connect to mongo server.
|
||||
|
||||
"""
|
||||
parsed = urlparse(mongo_uri)
|
||||
# Force validation of scheme
|
||||
if parsed.scheme not in ["mongodb", "mongodb+srv"]:
|
||||
raise pymongo.errors.InvalidURI((
|
||||
"Invalid URI scheme:"
|
||||
" URI must begin with 'mongodb://' or 'mongodb+srv://'"
|
||||
))
|
||||
# we have mongo connection string. Let's try if we can connect.
|
||||
components = decompose_url(mongo_uri)
|
||||
mongo_args = {
|
||||
"host": compose_url(**components),
|
||||
"serverSelectionTimeoutMS": 1000
|
||||
}
|
||||
port = components.get("port")
|
||||
if port is not None:
|
||||
mongo_args["port"] = int(port)
|
||||
|
||||
# Create connection
|
||||
client = pymongo.MongoClient(**mongo_args)
|
||||
client.server_info()
|
||||
client = OpenPypeMongoConnection.create_connection(
|
||||
mongo_uri, retry_attempts=1
|
||||
)
|
||||
client.close()
|
||||
|
||||
|
||||
|
|
@ -151,6 +156,8 @@ class OpenPypeMongoConnection:
|
|||
# Naive validation of existing connection
|
||||
try:
|
||||
connection.server_info()
|
||||
with connection.start_session():
|
||||
pass
|
||||
except Exception:
|
||||
connection = None
|
||||
|
||||
|
|
@ -162,38 +169,53 @@ class OpenPypeMongoConnection:
|
|||
return connection
|
||||
|
||||
@classmethod
|
||||
def create_connection(cls, mongo_url, timeout=None):
|
||||
def create_connection(cls, mongo_url, timeout=None, retry_attempts=None):
|
||||
parsed = urlparse(mongo_url)
|
||||
# Force validation of scheme
|
||||
if parsed.scheme not in ["mongodb", "mongodb+srv"]:
|
||||
raise pymongo.errors.InvalidURI((
|
||||
"Invalid URI scheme:"
|
||||
" URI must begin with 'mongodb://' or 'mongodb+srv://'"
|
||||
))
|
||||
|
||||
if timeout is None:
|
||||
timeout = int(os.environ.get("AVALON_TIMEOUT") or 1000)
|
||||
|
||||
kwargs = {
|
||||
"host": mongo_url,
|
||||
"serverSelectionTimeoutMS": timeout
|
||||
}
|
||||
if should_add_certificate_path_to_mongo_url(mongo_url):
|
||||
kwargs["ssl_ca_certs"] = certifi.where()
|
||||
|
||||
port = extract_port_from_url(mongo_url)
|
||||
if port is not None:
|
||||
kwargs["port"] = int(port)
|
||||
mongo_client = pymongo.MongoClient(mongo_url, **kwargs)
|
||||
|
||||
mongo_client = pymongo.MongoClient(**kwargs)
|
||||
if retry_attempts is None:
|
||||
retry_attempts = 3
|
||||
|
||||
for _retry in range(3):
|
||||
elif not retry_attempts:
|
||||
retry_attempts = 1
|
||||
|
||||
last_exc = None
|
||||
valid = False
|
||||
t1 = time.time()
|
||||
for attempt in range(1, retry_attempts + 1):
|
||||
try:
|
||||
t1 = time.time()
|
||||
mongo_client.server_info()
|
||||
|
||||
except Exception:
|
||||
cls.log.warning("Retrying...")
|
||||
time.sleep(1)
|
||||
timeout *= 1.5
|
||||
|
||||
else:
|
||||
with mongo_client.start_session():
|
||||
pass
|
||||
valid = True
|
||||
break
|
||||
|
||||
else:
|
||||
raise IOError((
|
||||
"ERROR: Couldn't connect to {} in less than {:.3f}ms"
|
||||
).format(mongo_url, timeout))
|
||||
except Exception as exc:
|
||||
last_exc = exc
|
||||
if attempt < retry_attempts:
|
||||
cls.log.warning(
|
||||
"Attempt {} failed. Retrying... ".format(attempt)
|
||||
)
|
||||
time.sleep(1)
|
||||
|
||||
if not valid:
|
||||
raise last_exc
|
||||
|
||||
cls.log.info("Connected to {}, delay {:.3f}s".format(
|
||||
mongo_url, time.time() - t1
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
|
||||
|
||||
from .anatomy import Anatomy
|
||||
from openpype.settings import get_project_settings
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -77,7 +82,7 @@ def get_version_from_path(file):
|
|||
"""
|
||||
pattern = re.compile(r"[\._]v([0-9]+)", re.IGNORECASE)
|
||||
try:
|
||||
return pattern.findall(file)[0]
|
||||
return pattern.findall(file)[-1]
|
||||
except IndexError:
|
||||
log.error(
|
||||
"templates:get_version_from_workfile:"
|
||||
|
|
@ -119,3 +124,75 @@ def get_last_version_from_path(path_dir, filter):
|
|||
return filtred_files[-1]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def compute_paths(basic_paths_items, project_root):
|
||||
pattern_array = re.compile(r"\[.*\]")
|
||||
project_root_key = "__project_root__"
|
||||
output = []
|
||||
for path_items in basic_paths_items:
|
||||
clean_items = []
|
||||
for path_item in path_items:
|
||||
matches = re.findall(pattern_array, path_item)
|
||||
if len(matches) > 0:
|
||||
path_item = path_item.replace(matches[0], "")
|
||||
if path_item == project_root_key:
|
||||
path_item = project_root
|
||||
clean_items.append(path_item)
|
||||
output.append(os.path.normpath(os.path.sep.join(clean_items)))
|
||||
return output
|
||||
|
||||
|
||||
def create_project_folders(basic_paths, project_name):
|
||||
anatomy = Anatomy(project_name)
|
||||
roots_paths = []
|
||||
if isinstance(anatomy.roots, dict):
|
||||
for root in anatomy.roots.values():
|
||||
roots_paths.append(root.value)
|
||||
else:
|
||||
roots_paths.append(anatomy.roots.value)
|
||||
|
||||
for root_path in roots_paths:
|
||||
project_root = os.path.join(root_path, project_name)
|
||||
full_paths = compute_paths(basic_paths, project_root)
|
||||
# Create folders
|
||||
for path in full_paths:
|
||||
full_path = path.format(project_root=project_root)
|
||||
if os.path.exists(full_path):
|
||||
log.debug(
|
||||
"Folder already exists: {}".format(full_path)
|
||||
)
|
||||
else:
|
||||
log.debug("Creating folder: {}".format(full_path))
|
||||
os.makedirs(full_path)
|
||||
|
||||
|
||||
def _list_path_items(folder_structure):
|
||||
output = []
|
||||
for key, value in folder_structure.items():
|
||||
if not value:
|
||||
output.append(key)
|
||||
else:
|
||||
paths = _list_path_items(value)
|
||||
for path in paths:
|
||||
if not isinstance(path, (list, tuple)):
|
||||
path = [path]
|
||||
|
||||
item = [key]
|
||||
item.extend(path)
|
||||
output.append(item)
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def get_project_basic_paths(project_name):
|
||||
project_settings = get_project_settings(project_name)
|
||||
folder_structure = (
|
||||
project_settings["global"]["project_folder_structure"]
|
||||
)
|
||||
if not folder_structure:
|
||||
return []
|
||||
|
||||
if isinstance(folder_structure, str):
|
||||
folder_structure = json.loads(folder_structure)
|
||||
return _list_path_items(folder_structure)
|
||||
|
|
|
|||
|
|
@ -35,7 +35,8 @@ def get_subset_name(
|
|||
project_name=None,
|
||||
host_name=None,
|
||||
default_template=None,
|
||||
dynamic_data=None
|
||||
dynamic_data=None,
|
||||
dbcon=None
|
||||
):
|
||||
if not family:
|
||||
return ""
|
||||
|
|
@ -46,13 +47,42 @@ def get_subset_name(
|
|||
# Use only last part of class family value split by dot (`.`)
|
||||
family = family.rsplit(".", 1)[-1]
|
||||
|
||||
if project_name is None:
|
||||
import avalon.api
|
||||
|
||||
project_name = avalon.api.Session["AVALON_PROJECT"]
|
||||
|
||||
# Function should expect asset document instead of asset id
|
||||
# - that way `dbcon` is not needed
|
||||
if dbcon is None:
|
||||
from avalon.api import AvalonMongoDB
|
||||
|
||||
dbcon = AvalonMongoDB()
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
dbcon.install()
|
||||
|
||||
asset_doc = dbcon.find_one(
|
||||
{
|
||||
"type": "asset",
|
||||
"_id": asset_id
|
||||
},
|
||||
{
|
||||
"data.tasks": True
|
||||
}
|
||||
)
|
||||
asset_tasks = asset_doc.get("data", {}).get("tasks") or {}
|
||||
task_info = asset_tasks.get(task_name) or {}
|
||||
task_type = task_info.get("type")
|
||||
|
||||
# Get settings
|
||||
tools_settings = get_project_settings(project_name)["global"]["tools"]
|
||||
profiles = tools_settings["creator"]["subset_name_profiles"]
|
||||
filtering_criteria = {
|
||||
"families": family,
|
||||
"hosts": host_name,
|
||||
"tasks": task_name
|
||||
"tasks": task_name,
|
||||
"task_types": task_type
|
||||
}
|
||||
|
||||
matching_profile = filter_profiles(profiles, filtering_criteria)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,28 @@
|
|||
import re
|
||||
import logging
|
||||
from .applications import compile_list_of_regexes
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def compile_list_of_regexes(in_list):
|
||||
"""Convert strings in entered list to compiled regex objects."""
|
||||
regexes = list()
|
||||
if not in_list:
|
||||
return regexes
|
||||
|
||||
for item in in_list:
|
||||
if not item:
|
||||
continue
|
||||
try:
|
||||
regexes.append(re.compile(item))
|
||||
except TypeError:
|
||||
print((
|
||||
"Invalid type \"{}\" value \"{}\"."
|
||||
" Expected string based object. Skipping."
|
||||
).format(str(type(item)), str(item)))
|
||||
return regexes
|
||||
|
||||
|
||||
def _profile_exclusion(matching_profiles, logger):
|
||||
"""Find out most matching profile byt host, task and family match.
|
||||
|
||||
|
|
@ -165,7 +183,8 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None):
|
|||
if match == -1:
|
||||
profile_value = profile.get(key) or []
|
||||
logger.debug(
|
||||
"\"{}\" not found in {}".format(key, profile_value)
|
||||
"\"{}\" not found in \"{}\": {}".format(value, key,
|
||||
profile_value)
|
||||
)
|
||||
profile_points = -1
|
||||
break
|
||||
|
|
@ -192,13 +211,13 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None):
|
|||
])
|
||||
|
||||
if not matching_profiles:
|
||||
logger.warning(
|
||||
logger.info(
|
||||
"None of profiles match your setup. {}".format(log_parts)
|
||||
)
|
||||
return None
|
||||
|
||||
if len(matching_profiles) > 1:
|
||||
logger.warning(
|
||||
logger.info(
|
||||
"More than one profile match your setup. {}".format(log_parts)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -9,23 +9,76 @@ import openpype.version
|
|||
from openpype.settings.lib import get_local_settings
|
||||
from .execute import get_pype_execute_args
|
||||
from .local_settings import get_local_site_id
|
||||
from .python_module_tools import import_filepath
|
||||
|
||||
|
||||
def get_openpype_version():
|
||||
"""Version of pype that is currently used."""
|
||||
return openpype.version.__version__
|
||||
|
||||
|
||||
def get_pype_version():
|
||||
"""Version of pype that is currently used."""
|
||||
return openpype.version.__version__
|
||||
"""Backwards compatibility. Remove when 100% not used."""
|
||||
print((
|
||||
"Using deprecated function 'openpype.lib.pype_info.get_pype_version'"
|
||||
" replace with 'openpype.lib.pype_info.get_openpype_version'."
|
||||
))
|
||||
return get_openpype_version()
|
||||
|
||||
|
||||
def get_build_version():
|
||||
"""OpenPype version of build."""
|
||||
# Return OpenPype version if is running from code
|
||||
if not is_running_from_build():
|
||||
return get_openpype_version()
|
||||
|
||||
# Import `version.py` from build directory
|
||||
version_filepath = os.path.join(
|
||||
os.environ["OPENPYPE_ROOT"],
|
||||
"openpype",
|
||||
"version.py"
|
||||
)
|
||||
if not os.path.exists(version_filepath):
|
||||
return None
|
||||
|
||||
module = import_filepath(version_filepath, "openpype_build_version")
|
||||
return getattr(module, "__version__", None)
|
||||
|
||||
|
||||
def is_running_from_build():
|
||||
"""Determine if current process is running from build or code.
|
||||
|
||||
Returns:
|
||||
bool: True if running from build.
|
||||
"""
|
||||
executable_path = os.environ["OPENPYPE_EXECUTABLE"]
|
||||
executable_filename = os.path.basename(executable_path)
|
||||
if "python" in executable_filename.lower():
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_running_staging():
|
||||
"""Currently used OpenPype is staging version.
|
||||
|
||||
Returns:
|
||||
bool: True if openpype version containt 'staging'.
|
||||
"""
|
||||
if "staging" in get_openpype_version():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_pype_info():
|
||||
"""Information about currently used Pype process."""
|
||||
executable_args = get_pype_execute_args()
|
||||
if len(executable_args) == 1:
|
||||
if is_running_from_build():
|
||||
version_type = "build"
|
||||
else:
|
||||
version_type = "code"
|
||||
|
||||
return {
|
||||
"version": get_pype_version(),
|
||||
"version": get_openpype_version(),
|
||||
"version_type": version_type,
|
||||
"executable": executable_args[-1],
|
||||
"pype_root": os.environ["OPENPYPE_REPOS_ROOT"],
|
||||
|
|
@ -73,7 +126,7 @@ def extract_pype_info_to_file(dirpath):
|
|||
filepath (str): Full path to file where data were extracted.
|
||||
"""
|
||||
filename = "{}_{}_{}.json".format(
|
||||
get_pype_version(),
|
||||
get_openpype_version(),
|
||||
get_local_site_id(),
|
||||
datetime.datetime.now().strftime("%y%m%d%H%M%S")
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,125 +1,143 @@
|
|||
# OpenPype modules/addons
|
||||
OpenPype modules should contain separated logic of specific kind of implementation, like Ftrack connection and usage code or Deadline farm rendering or may contain only special plugins. Addons work the same way currently there is no difference in module and addon.
|
||||
OpenPype modules should contain separated logic of specific kind of implementation, such as Ftrack connection and its usage code, Deadline farm rendering or may contain only special plugins. Addons work the same way currently, there is no difference between module and addon functionality.
|
||||
|
||||
## Modules concept
|
||||
- modules and addons are dynamically imported to virtual python module `openpype_modules` from which it is possible to import them no matter where is the modulo located
|
||||
- modules or addons should never be imported directly even if you know possible full import path
|
||||
- it is because all of their content must be imported in specific order and should not be imported without defined functions as it may also break few implementation parts
|
||||
- modules and addons are dynamically imported to virtual python module `openpype_modules` from which it is possible to import them no matter where is the module located
|
||||
- modules or addons should never be imported directly, even if you know possible full import path
|
||||
- it is because all of their content must be imported in specific order and should not be imported without defined functions as it may also break few implementation parts
|
||||
|
||||
### TODOs
|
||||
- add module/addon manifest
|
||||
- definition of module (not 100% defined content e.g. minimum require OpenPype version etc.)
|
||||
- defying that folder is content of a module or an addon
|
||||
- module/addon have it's settings schemas and default values outside OpenPype
|
||||
- add general setting of paths to modules
|
||||
- definition of module (not 100% defined content e.g. minimum required OpenPype version etc.)
|
||||
- defining a folder as a content of a module or an addon
|
||||
|
||||
## Base class `OpenPypeModule`
|
||||
- abstract class as base for each module
|
||||
- implementation should be module's api withou GUI parts
|
||||
- may implement `get_global_environments` method which should return dictionary of environments that are globally appliable and value is the same for whole studio if launched at any workstation (except os specific paths)
|
||||
- implementation should contain module's api without GUI parts
|
||||
- may implement `get_global_environments` method which should return dictionary of environments that are globally applicable and value is the same for whole studio if launched at any workstation (except os specific paths)
|
||||
- abstract parts:
|
||||
- `name` attribute - name of a module
|
||||
- `initialize` method - method for own initialization of a module (should not override `__init__`)
|
||||
- `connect_with_modules` method - where module may look for it's interfaces implementations or check for other modules
|
||||
- `__init__` should not be overriden and `initialize` should not do time consuming part but only prepare base data about module
|
||||
- also keep in mind that they may be initialized in headless mode
|
||||
- `name` attribute - name of a module
|
||||
- `initialize` method - method for own initialization of a module (should not override `__init__`)
|
||||
- `connect_with_modules` method - where module may look for it's interfaces implementations or check for other modules
|
||||
- `__init__` should not be overridden and `initialize` should not do time consuming part but only prepare base data about module
|
||||
- also keep in mind that they may be initialized in headless mode
|
||||
- connection with other modules is made with help of interfaces
|
||||
|
||||
## Addon class `OpenPypeAddOn`
|
||||
- inherits from `OpenPypeModule` but is enabled by default and doesn't have to implement `initialize` and `connect_with_modules` methods
|
||||
- that is because it is expected that addons don't need to have system settings and `enabled` value on it (but it is possible...)
|
||||
|
||||
## How to add addons/modules
|
||||
- in System settings go to `modules/addon_paths` (`Modules/OpenPype AddOn Paths`) where you have to add path to addon root folder
|
||||
- for openpype example addons use `{OPENPYPE_REPOS_ROOT}/openpype/modules/example_addons`
|
||||
|
||||
## Addon/module settings
|
||||
- addons/modules may have defined custom settings definitions with default values
|
||||
- it is based on settings type `dynamic_schema` which has `name`
|
||||
- that item defines that it can be replaced dynamically with any schemas from module or module which won't be saved to openpype core defaults
|
||||
- they can't be added to any schema hierarchy
|
||||
- item must not be in settings group (under overrides) or in dynamic item (e.g. `list` of `dict-modifiable`)
|
||||
- addons may define it's dynamic schema items
|
||||
- they can be defined with class which inherits from `BaseModuleSettingsDef`
|
||||
- it is recommended to use pre implemented `JsonFilesSettingsDef` which defined structure and use json files to define dynamic schemas, schemas and default values
|
||||
- check it's docstring and check for `example_addon` in example addons
|
||||
- settings definition returns schemas by dynamic schemas names
|
||||
|
||||
# Interfaces
|
||||
- interface is class that has defined abstract methods to implement and may contain preimplemented helper methods
|
||||
- interface is class that has defined abstract methods to implement and may contain pre implemented helper methods
|
||||
- module that inherit from an interface must implement those abstract methods otherwise won't be initialized
|
||||
- it is easy to find which module object inherited from which interfaces withh 100% chance they have implemented required methods
|
||||
- it is easy to find which module object inherited from which interfaces with 100% chance they have implemented required methods
|
||||
- interfaces can be defined in `interfaces.py` inside module directory
|
||||
- the file can't use relative imports or import anything from other parts
|
||||
of module itself at the header of file
|
||||
- this is one of reasons why modules/addons can't be imported directly without using defined functions in OpenPype modules implementation
|
||||
- the file can't use relative imports or import anything from other parts
|
||||
of module itself at the header of file
|
||||
- this is one of reasons why modules/addons can't be imported directly without using defined functions in OpenPype modules implementation
|
||||
|
||||
## Base class `OpenPypeInterface`
|
||||
- has nothing implemented
|
||||
- has ABCMeta as metaclass
|
||||
- is defined to be able find out classes which inherit from this base to be
|
||||
able tell this is an Interface
|
||||
able tell this is an Interface
|
||||
|
||||
## Global interfaces
|
||||
- few interfaces are implemented for global usage
|
||||
|
||||
### IPluginPaths
|
||||
- module want to add directory path/s to avalon or publish plugins
|
||||
- module wants to add directory path/s to avalon or publish plugins
|
||||
- module must implement `get_plugin_paths` which must return dictionary with possible keys `"publish"`, `"load"`, `"create"` or `"actions"`
|
||||
- each key may contain list or string with path to directory with plugins
|
||||
- each key may contain list or string with a path to directory with plugins
|
||||
|
||||
### ITrayModule
|
||||
- module has more logic when used in tray
|
||||
- it is possible that module can be used only in tray
|
||||
- module has more logic when used in a tray
|
||||
- it is possible that module can be used only in the tray
|
||||
- abstract methods
|
||||
- `tray_init` - initialization triggered after `initialize` when used in `TrayModulesManager` and before `connect_with_modules`
|
||||
- `tray_menu` - add actions to tray widget's menu that represent the module
|
||||
- `tray_start` - start of module's login in tray
|
||||
- module is initialized and connected with other modules
|
||||
- `tray_exit` - module's cleanup like stop and join threads etc.
|
||||
- order of calling is based on implementation this order is how it works with `TrayModulesManager`
|
||||
- it is recommended to import and use GUI implementaion only in these methods
|
||||
- `tray_init` - initialization triggered after `initialize` when used in `TrayModulesManager` and before `connect_with_modules`
|
||||
- `tray_menu` - add actions to tray widget's menu that represent the module
|
||||
- `tray_start` - start of module's login in tray
|
||||
- module is initialized and connected with other modules
|
||||
- `tray_exit` - module's cleanup like stop and join threads etc.
|
||||
- order of calling is based on implementation this order is how it works with `TrayModulesManager`
|
||||
- it is recommended to import and use GUI implementation only in these methods
|
||||
- has attribute `tray_initialized` (bool) which is set to False by default and is set by `TrayModulesManager` to True after `tray_init`
|
||||
- if module has logic only in tray or for both then should be checking for `tray_initialized` attribute to decide how should handle situations
|
||||
- if module has logic only in tray or for both then should be checking for `tray_initialized` attribute to decide how should handle situations
|
||||
|
||||
### ITrayService
|
||||
- inherit from `ITrayModule` and implement `tray_menu` method for you
|
||||
- add action to submenu "Services" in tray widget menu with icon and label
|
||||
- abstract atttribute `label`
|
||||
- label shown in menu
|
||||
- interface has preimplemented methods to change icon color
|
||||
- `set_service_running` - green icon
|
||||
- `set_service_failed` - red icon
|
||||
- `set_service_idle` - orange icon
|
||||
- these states must be set by module itself `set_service_running` is default state on initialization
|
||||
- inherits from `ITrayModule` and implements `tray_menu` method for you
|
||||
- adds action to submenu "Services" in tray widget menu with icon and label
|
||||
- abstract attribute `label`
|
||||
- label shown in menu
|
||||
- interface has pre implemented methods to change icon color
|
||||
- `set_service_running` - green icon
|
||||
- `set_service_failed` - red icon
|
||||
- `set_service_idle` - orange icon
|
||||
- these states must be set by module itself `set_service_running` is default state on initialization
|
||||
|
||||
### ITrayAction
|
||||
- inherit from `ITrayModule` and implement `tray_menu` method for you
|
||||
- add action to tray widget menu with label
|
||||
- abstract atttribute `label`
|
||||
- label shown in menu
|
||||
- inherits from `ITrayModule` and implements `tray_menu` method for you
|
||||
- adds action to tray widget menu with label
|
||||
- abstract attribute `label`
|
||||
- label shown in menu
|
||||
- abstract method `on_action_trigger`
|
||||
- what should happen when action is triggered
|
||||
- NOTE: It is good idea to implement logic in `on_action_trigger` to api method and trigger that methods on callbacks this gives ability to trigger that method outside tray
|
||||
- what should happen when an action is triggered
|
||||
- NOTE: It is a good idea to implement logic in `on_action_trigger` to the api method and trigger that method on callbacks. This gives ability to trigger that method outside tray
|
||||
|
||||
## Modules interfaces
|
||||
- modules may have defined their interfaces to be able recognize other modules that would want to use their features
|
||||
-
|
||||
### Example:
|
||||
- Ftrack module has `IFtrackEventHandlerPaths` which helps to tell Ftrack module which of other modules want to add paths to server/user event handlers
|
||||
- Clockify module use `IFtrackEventHandlerPaths` and return paths to clockify ftrack synchronizers
|
||||
- modules may have defined their own interfaces to be able to recognize other modules that would want to use their features
|
||||
|
||||
- Clockify has more inharitance it's class definition looks like
|
||||
### Example:
|
||||
- Ftrack module has `IFtrackEventHandlerPaths` which helps to tell Ftrack module which other modules want to add paths to server/user event handlers
|
||||
- Clockify module use `IFtrackEventHandlerPaths` and returns paths to clockify ftrack synchronizers
|
||||
|
||||
- Clockify inherits from more interfaces. It's class definition looks like:
|
||||
```
|
||||
class ClockifyModule(
|
||||
OpenPypeModule, # Says it's Pype module so ModulesManager will try to initialize.
|
||||
ITrayModule, # Says has special implementation when used in tray.
|
||||
IPluginPaths, # Says has plugin paths that want to register (paths to clockify actions for launcher).
|
||||
IFtrackEventHandlerPaths, # Says has Ftrack actions/events for user/server.
|
||||
ITimersManager # Listen to other modules with timer and can trigger changes in other module timers through `TimerManager` module.
|
||||
OpenPypeModule, # Says it's Pype module so ModulesManager will try to initialize.
|
||||
ITrayModule, # Says has special implementation when used in tray.
|
||||
IPluginPaths, # Says has plugin paths that want to register (paths to clockify actions for launcher).
|
||||
IFtrackEventHandlerPaths, # Says has Ftrack actions/events for user/server.
|
||||
ITimersManager # Listen to other modules with timer and can trigger changes in other module timers through `TimerManager` module.
|
||||
):
|
||||
```
|
||||
|
||||
### ModulesManager
|
||||
- collect module classes and tries to initialize them
|
||||
- collects module classes and tries to initialize them
|
||||
- important attributes
|
||||
- `modules` - list of available attributes
|
||||
- `modules_by_id` - dictionary of modules mapped by their ids
|
||||
- `modules_by_name` - dictionary of modules mapped by their names
|
||||
- all these attributes contain all found modules even if are not enabled
|
||||
- `modules` - list of available attributes
|
||||
- `modules_by_id` - dictionary of modules mapped by their ids
|
||||
- `modules_by_name` - dictionary of modules mapped by their names
|
||||
- all these attributes contain all found modules even if are not enabled
|
||||
- helper methods
|
||||
- `collect_global_environments` to collect all global environments from enabled modules with calling `get_global_environments` on each of them
|
||||
- `collect_plugin_paths` collect plugin paths from all enabled modules
|
||||
- output is always dictionary with all keys and values as list
|
||||
```
|
||||
{
|
||||
"publish": [],
|
||||
"create": [],
|
||||
"load": [],
|
||||
"actions": []
|
||||
}
|
||||
```
|
||||
- `collect_global_environments` to collect all global environments from enabled modules with calling `get_global_environments` on each of them
|
||||
- `collect_plugin_paths` collects plugin paths from all enabled modules
|
||||
- output is always dictionary with all keys and values as an list
|
||||
```
|
||||
{
|
||||
"publish": [],
|
||||
"create": [],
|
||||
"load": [],
|
||||
"actions": []
|
||||
}
|
||||
```
|
||||
|
||||
### TrayModulesManager
|
||||
- inherit from `ModulesManager`
|
||||
- has specific implementations for Pype Tray tool and handle `ITrayModule` methods
|
||||
- inherits from `ModulesManager`
|
||||
- has specific implementation for Pype Tray tool and handle `ITrayModule` methods
|
||||
|
|
@ -1,21 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from .base import (
|
||||
OpenPypeModule,
|
||||
OpenPypeAddOn,
|
||||
OpenPypeInterface,
|
||||
|
||||
load_modules,
|
||||
|
||||
ModulesManager,
|
||||
TrayModulesManager
|
||||
TrayModulesManager,
|
||||
|
||||
BaseModuleSettingsDef,
|
||||
ModuleSettingsDef,
|
||||
JsonFilesSettingsDef,
|
||||
|
||||
get_module_settings_defs
|
||||
)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"OpenPypeModule",
|
||||
"OpenPypeAddOn",
|
||||
"OpenPypeInterface",
|
||||
|
||||
"load_modules",
|
||||
|
||||
"ModulesManager",
|
||||
"TrayModulesManager"
|
||||
"TrayModulesManager",
|
||||
|
||||
"BaseModuleSettingsDef",
|
||||
"ModuleSettingsDef",
|
||||
"JsonFilesSettingsDef",
|
||||
|
||||
"get_module_settings_defs"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -2,9 +2,11 @@
|
|||
"""Base class for Pype Modules."""
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
import inspect
|
||||
import logging
|
||||
import platform
|
||||
import threading
|
||||
import collections
|
||||
from uuid import uuid4
|
||||
|
|
@ -12,7 +14,18 @@ from abc import ABCMeta, abstractmethod
|
|||
import six
|
||||
|
||||
import openpype
|
||||
from openpype.settings import get_system_settings
|
||||
from openpype.settings import (
|
||||
get_system_settings,
|
||||
SYSTEM_SETTINGS_KEY,
|
||||
PROJECT_SETTINGS_KEY,
|
||||
SCHEMA_KEY_SYSTEM_SETTINGS,
|
||||
SCHEMA_KEY_PROJECT_SETTINGS
|
||||
)
|
||||
|
||||
from openpype.settings.lib import (
|
||||
get_studio_system_settings_overrides,
|
||||
load_json_file
|
||||
)
|
||||
from openpype.lib import PypeLogger
|
||||
|
||||
|
||||
|
|
@ -115,11 +128,51 @@ def get_default_modules_dir():
|
|||
return os.path.join(current_dir, "default_modules")
|
||||
|
||||
|
||||
def get_dynamic_modules_dirs():
|
||||
"""Possible paths to OpenPype Addons of Modules.
|
||||
|
||||
Paths are loaded from studio settings under:
|
||||
`modules -> addon_paths -> {platform name}`
|
||||
|
||||
Path may contain environment variable as a formatting string.
|
||||
|
||||
They are not validated or checked their existence.
|
||||
|
||||
Returns:
|
||||
list: Paths loaded from studio overrides.
|
||||
"""
|
||||
output = []
|
||||
value = get_studio_system_settings_overrides()
|
||||
for key in ("modules", "addon_paths", platform.system().lower()):
|
||||
if key not in value:
|
||||
return output
|
||||
value = value[key]
|
||||
|
||||
for path in value:
|
||||
if not path:
|
||||
continue
|
||||
|
||||
try:
|
||||
path = path.format(**os.environ)
|
||||
except Exception:
|
||||
pass
|
||||
output.append(path)
|
||||
return output
|
||||
|
||||
|
||||
def get_module_dirs():
|
||||
"""List of paths where OpenPype modules can be found."""
|
||||
dirpaths = [
|
||||
get_default_modules_dir()
|
||||
]
|
||||
_dirpaths = []
|
||||
_dirpaths.append(get_default_modules_dir())
|
||||
_dirpaths.extend(get_dynamic_modules_dirs())
|
||||
|
||||
dirpaths = []
|
||||
for path in _dirpaths:
|
||||
if not path:
|
||||
continue
|
||||
normalized = os.path.normpath(path)
|
||||
if normalized not in dirpaths:
|
||||
dirpaths.append(normalized)
|
||||
return dirpaths
|
||||
|
||||
|
||||
|
|
@ -165,6 +218,9 @@ def _load_interfaces():
|
|||
os.path.join(get_default_modules_dir(), "interfaces.py")
|
||||
)
|
||||
for dirpath in dirpaths:
|
||||
if not os.path.exists(dirpath):
|
||||
continue
|
||||
|
||||
for filename in os.listdir(dirpath):
|
||||
if filename in ("__pycache__", ):
|
||||
continue
|
||||
|
|
@ -272,12 +328,19 @@ def _load_modules():
|
|||
|
||||
# TODO add more logic how to define if folder is module or not
|
||||
# - check manifest and content of manifest
|
||||
if os.path.isdir(fullpath):
|
||||
import_module_from_dirpath(dirpath, filename, modules_key)
|
||||
try:
|
||||
if os.path.isdir(fullpath):
|
||||
import_module_from_dirpath(dirpath, filename, modules_key)
|
||||
|
||||
elif ext in (".py", ):
|
||||
module = import_filepath(fullpath)
|
||||
setattr(openpype_modules, basename, module)
|
||||
elif ext in (".py", ):
|
||||
module = import_filepath(fullpath)
|
||||
setattr(openpype_modules, basename, module)
|
||||
|
||||
except Exception:
|
||||
log.error(
|
||||
"Failed to import '{}'.".format(fullpath),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
|
||||
class _OpenPypeInterfaceMeta(ABCMeta):
|
||||
|
|
@ -354,7 +417,6 @@ class OpenPypeModule:
|
|||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def connect_with_modules(self, enabled_modules):
|
||||
"""Connect with other enabled modules."""
|
||||
pass
|
||||
|
|
@ -368,7 +430,12 @@ class OpenPypeModule:
|
|||
|
||||
|
||||
class OpenPypeAddOn(OpenPypeModule):
|
||||
pass
|
||||
# Enable Addon by default
|
||||
enabled = True
|
||||
|
||||
def initialize(self, module_settings):
|
||||
"""Initialization is not be required for most of addons."""
|
||||
pass
|
||||
|
||||
|
||||
class ModulesManager:
|
||||
|
|
@ -423,6 +490,7 @@ class ModulesManager:
|
|||
if (
|
||||
not inspect.isclass(modules_item)
|
||||
or modules_item is OpenPypeModule
|
||||
or modules_item is OpenPypeAddOn
|
||||
or not issubclass(modules_item, OpenPypeModule)
|
||||
):
|
||||
continue
|
||||
|
|
@ -920,3 +988,424 @@ class TrayModulesManager(ModulesManager):
|
|||
),
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
|
||||
def get_module_settings_defs():
|
||||
"""Check loaded addons/modules for existence of thei settings definition.
|
||||
|
||||
Check if OpenPype addon/module as python module has class that inherit
|
||||
from `ModuleSettingsDef` in python module variables (imported
|
||||
in `__init__py`).
|
||||
|
||||
Returns:
|
||||
list: All valid and not abstract settings definitions from imported
|
||||
openpype addons and modules.
|
||||
"""
|
||||
# Make sure modules are loaded
|
||||
load_modules()
|
||||
|
||||
import openpype_modules
|
||||
|
||||
settings_defs = []
|
||||
|
||||
log = PypeLogger.get_logger("ModuleSettingsLoad")
|
||||
|
||||
for raw_module in openpype_modules:
|
||||
for attr_name in dir(raw_module):
|
||||
attr = getattr(raw_module, attr_name)
|
||||
if (
|
||||
not inspect.isclass(attr)
|
||||
or attr is ModuleSettingsDef
|
||||
or not issubclass(attr, ModuleSettingsDef)
|
||||
):
|
||||
continue
|
||||
|
||||
if inspect.isabstract(attr):
|
||||
# Find missing implementations by convetion on `abc` module
|
||||
not_implemented = []
|
||||
for attr_name in dir(attr):
|
||||
attr = getattr(attr, attr_name, None)
|
||||
abs_method = getattr(
|
||||
attr, "__isabstractmethod__", None
|
||||
)
|
||||
if attr and abs_method:
|
||||
not_implemented.append(attr_name)
|
||||
|
||||
# Log missing implementations
|
||||
log.warning((
|
||||
"Skipping abstract Class: {} in module {}."
|
||||
" Missing implementations: {}"
|
||||
).format(
|
||||
attr_name, raw_module.__name__, ", ".join(not_implemented)
|
||||
))
|
||||
continue
|
||||
|
||||
settings_defs.append(attr)
|
||||
|
||||
return settings_defs
|
||||
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class BaseModuleSettingsDef:
|
||||
"""Definition of settings for OpenPype module or AddOn."""
|
||||
_id = None
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
"""ID created on initialization.
|
||||
|
||||
ID should be per created object. Helps to store objects.
|
||||
"""
|
||||
if self._id is None:
|
||||
self._id = uuid4()
|
||||
return self._id
|
||||
|
||||
@abstractmethod
|
||||
def get_settings_schemas(self, schema_type):
|
||||
"""Setting schemas for passed schema type.
|
||||
|
||||
These are main schemas by dynamic schema keys. If they're using
|
||||
sub schemas or templates they should be loaded with
|
||||
`get_dynamic_schemas`.
|
||||
|
||||
Returns:
|
||||
dict: Schema by `dynamic_schema` keys.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_dynamic_schemas(self, schema_type):
|
||||
"""Settings schemas and templates that can be used anywhere.
|
||||
|
||||
It is recommended to add prefix specific for addon/module to keys
|
||||
(e.g. "my_addon/real_schema_name").
|
||||
|
||||
Returns:
|
||||
dict: Schemas and templates by their keys.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_defaults(self, top_key):
|
||||
"""Default values for passed top key.
|
||||
|
||||
Top keys are (currently) "system_settings" or "project_settings".
|
||||
|
||||
Should return exactly what was passed with `save_defaults`.
|
||||
|
||||
Returns:
|
||||
dict: Default values by path to first key in OpenPype defaults.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def save_defaults(self, top_key, data):
|
||||
"""Save default values for passed top key.
|
||||
|
||||
Top keys are (currently) "system_settings" or "project_settings".
|
||||
|
||||
Passed data are by path to first key defined in main schemas.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ModuleSettingsDef(BaseModuleSettingsDef):
|
||||
"""Settings definiton with separated system and procect settings parts.
|
||||
|
||||
Reduce conditions that must be checked and adds predefined methods for
|
||||
each case.
|
||||
"""
|
||||
def get_defaults(self, top_key):
|
||||
"""Split method into 2 methods by top key."""
|
||||
if top_key == SYSTEM_SETTINGS_KEY:
|
||||
return self.get_default_system_settings() or {}
|
||||
elif top_key == PROJECT_SETTINGS_KEY:
|
||||
return self.get_default_project_settings() or {}
|
||||
return {}
|
||||
|
||||
def save_defaults(self, top_key, data):
|
||||
"""Split method into 2 methods by top key."""
|
||||
if top_key == SYSTEM_SETTINGS_KEY:
|
||||
self.save_system_defaults(data)
|
||||
elif top_key == PROJECT_SETTINGS_KEY:
|
||||
self.save_project_defaults(data)
|
||||
|
||||
def get_settings_schemas(self, schema_type):
|
||||
"""Split method into 2 methods by schema type."""
|
||||
if schema_type == SCHEMA_KEY_SYSTEM_SETTINGS:
|
||||
return self.get_system_settings_schemas() or {}
|
||||
elif schema_type == SCHEMA_KEY_PROJECT_SETTINGS:
|
||||
return self.get_project_settings_schemas() or {}
|
||||
return {}
|
||||
|
||||
def get_dynamic_schemas(self, schema_type):
|
||||
"""Split method into 2 methods by schema type."""
|
||||
if schema_type == SCHEMA_KEY_SYSTEM_SETTINGS:
|
||||
return self.get_system_dynamic_schemas() or {}
|
||||
elif schema_type == SCHEMA_KEY_PROJECT_SETTINGS:
|
||||
return self.get_project_dynamic_schemas() or {}
|
||||
return {}
|
||||
|
||||
@abstractmethod
|
||||
def get_system_settings_schemas(self):
|
||||
"""Schemas and templates usable in system settings schemas.
|
||||
|
||||
Returns:
|
||||
dict: Schemas and templates by it's names. Names must be unique
|
||||
across whole OpenPype.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_project_settings_schemas(self):
|
||||
"""Schemas and templates usable in project settings schemas.
|
||||
|
||||
Returns:
|
||||
dict: Schemas and templates by it's names. Names must be unique
|
||||
across whole OpenPype.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_system_dynamic_schemas(self):
|
||||
"""System schemas by dynamic schema name.
|
||||
|
||||
If dynamic schema name is not available in then schema will not used.
|
||||
|
||||
Returns:
|
||||
dict: Schemas or list of schemas by dynamic schema name.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_project_dynamic_schemas(self):
|
||||
"""Project schemas by dynamic schema name.
|
||||
|
||||
If dynamic schema name is not available in then schema will not used.
|
||||
|
||||
Returns:
|
||||
dict: Schemas or list of schemas by dynamic schema name.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_default_system_settings(self):
|
||||
"""Default system settings values.
|
||||
|
||||
Returns:
|
||||
dict: Default values by path to first key.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_default_project_settings(self):
|
||||
"""Default project settings values.
|
||||
|
||||
Returns:
|
||||
dict: Default values by path to first key.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def save_system_defaults(self, data):
|
||||
"""Save default system settings values.
|
||||
|
||||
Passed data are by path to first key defined in main schemas.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def save_project_defaults(self, data):
|
||||
"""Save default project settings values.
|
||||
|
||||
Passed data are by path to first key defined in main schemas.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class JsonFilesSettingsDef(ModuleSettingsDef):
|
||||
"""Preimplemented settings definition using json files and file structure.
|
||||
|
||||
Expected file structure:
|
||||
┕ root
|
||||
│
|
||||
│ # Default values
|
||||
┝ defaults
|
||||
│ ┝ system_settings.json
|
||||
│ ┕ project_settings.json
|
||||
│
|
||||
│ # Schemas for `dynamic_template` type
|
||||
┝ dynamic_schemas
|
||||
│ ┝ system_dynamic_schemas.json
|
||||
│ ┕ project_dynamic_schemas.json
|
||||
│
|
||||
│ # Schemas that can be used anywhere (enhancement for `dynamic_schemas`)
|
||||
┕ schemas
|
||||
┝ system_schemas
|
||||
│ ┝ <system schema.json> # Any schema or template files
|
||||
│ ┕ ...
|
||||
┕ project_schemas
|
||||
┝ <system schema.json> # Any schema or template files
|
||||
┕ ...
|
||||
|
||||
Schemas can be loaded with prefix to avoid duplicated schema/template names
|
||||
across all OpenPype addons/modules. Prefix can be defined with class
|
||||
attribute `schema_prefix`.
|
||||
|
||||
Only think which must be implemented in `get_settings_root_path` which
|
||||
should return directory path to `root` (in structure graph above).
|
||||
"""
|
||||
# Possible way how to define `schemas` prefix
|
||||
schema_prefix = ""
|
||||
|
||||
@abstractmethod
|
||||
def get_settings_root_path(self):
|
||||
"""Directory path where settings and it's schemas are located."""
|
||||
pass
|
||||
|
||||
def __init__(self):
|
||||
settings_root_dir = self.get_settings_root_path()
|
||||
defaults_dir = os.path.join(
|
||||
settings_root_dir, "defaults"
|
||||
)
|
||||
dynamic_schemas_dir = os.path.join(
|
||||
settings_root_dir, "dynamic_schemas"
|
||||
)
|
||||
schemas_dir = os.path.join(
|
||||
settings_root_dir, "schemas"
|
||||
)
|
||||
|
||||
self.system_defaults_filepath = os.path.join(
|
||||
defaults_dir, "system_settings.json"
|
||||
)
|
||||
self.project_defaults_filepath = os.path.join(
|
||||
defaults_dir, "project_settings.json"
|
||||
)
|
||||
|
||||
self.system_dynamic_schemas_filepath = os.path.join(
|
||||
dynamic_schemas_dir, "system_dynamic_schemas.json"
|
||||
)
|
||||
self.project_dynamic_schemas_filepath = os.path.join(
|
||||
dynamic_schemas_dir, "project_dynamic_schemas.json"
|
||||
)
|
||||
|
||||
self.system_schemas_dir = os.path.join(
|
||||
schemas_dir, "system_schemas"
|
||||
)
|
||||
self.project_schemas_dir = os.path.join(
|
||||
schemas_dir, "project_schemas"
|
||||
)
|
||||
|
||||
def _load_json_file_data(self, path):
|
||||
if os.path.exists(path):
|
||||
return load_json_file(path)
|
||||
return {}
|
||||
|
||||
def get_default_system_settings(self):
|
||||
"""Default system settings values.
|
||||
|
||||
Returns:
|
||||
dict: Default values by path to first key.
|
||||
"""
|
||||
return self._load_json_file_data(self.system_defaults_filepath)
|
||||
|
||||
def get_default_project_settings(self):
|
||||
"""Default project settings values.
|
||||
|
||||
Returns:
|
||||
dict: Default values by path to first key.
|
||||
"""
|
||||
return self._load_json_file_data(self.project_defaults_filepath)
|
||||
|
||||
def _save_data_to_filepath(self, path, data):
|
||||
dirpath = os.path.dirname(path)
|
||||
if not os.path.exists(dirpath):
|
||||
os.makedirs(dirpath)
|
||||
|
||||
with open(path, "w") as file_stream:
|
||||
json.dump(data, file_stream, indent=4)
|
||||
|
||||
def save_system_defaults(self, data):
|
||||
"""Save default system settings values.
|
||||
|
||||
Passed data are by path to first key defined in main schemas.
|
||||
"""
|
||||
self._save_data_to_filepath(self.system_defaults_filepath, data)
|
||||
|
||||
def save_project_defaults(self, data):
|
||||
"""Save default project settings values.
|
||||
|
||||
Passed data are by path to first key defined in main schemas.
|
||||
"""
|
||||
self._save_data_to_filepath(self.project_defaults_filepath, data)
|
||||
|
||||
def get_system_dynamic_schemas(self):
|
||||
"""System schemas by dynamic schema name.
|
||||
|
||||
If dynamic schema name is not available in then schema will not used.
|
||||
|
||||
Returns:
|
||||
dict: Schemas or list of schemas by dynamic schema name.
|
||||
"""
|
||||
return self._load_json_file_data(self.system_dynamic_schemas_filepath)
|
||||
|
||||
def get_project_dynamic_schemas(self):
|
||||
"""Project schemas by dynamic schema name.
|
||||
|
||||
If dynamic schema name is not available in then schema will not used.
|
||||
|
||||
Returns:
|
||||
dict: Schemas or list of schemas by dynamic schema name.
|
||||
"""
|
||||
return self._load_json_file_data(self.project_dynamic_schemas_filepath)
|
||||
|
||||
def _load_files_from_path(self, path):
|
||||
output = {}
|
||||
if not path or not os.path.exists(path):
|
||||
return output
|
||||
|
||||
if os.path.isfile(path):
|
||||
filename = os.path.basename(path)
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if ext == ".json":
|
||||
if self.schema_prefix:
|
||||
key = "{}/{}".format(self.schema_prefix, basename)
|
||||
else:
|
||||
key = basename
|
||||
output[key] = self._load_json_file_data(path)
|
||||
return output
|
||||
|
||||
path = os.path.normpath(path)
|
||||
for root, _, files in os.walk(path, topdown=False):
|
||||
for filename in files:
|
||||
basename, ext = os.path.splitext(filename)
|
||||
if ext != ".json":
|
||||
continue
|
||||
|
||||
json_path = os.path.join(root, filename)
|
||||
store_key = os.path.join(
|
||||
root.replace(path, ""), basename
|
||||
).replace("\\", "/")
|
||||
if self.schema_prefix:
|
||||
store_key = "{}/{}".format(self.schema_prefix, store_key)
|
||||
output[store_key] = self._load_json_file_data(json_path)
|
||||
|
||||
return output
|
||||
|
||||
def get_system_settings_schemas(self):
|
||||
"""Schemas and templates usable in system settings schemas.
|
||||
|
||||
Returns:
|
||||
dict: Schemas and templates by it's names. Names must be unique
|
||||
across whole OpenPype.
|
||||
"""
|
||||
return self._load_files_from_path(self.system_schemas_dir)
|
||||
|
||||
def get_project_settings_schemas(self):
|
||||
"""Schemas and templates usable in project settings schemas.
|
||||
|
||||
Returns:
|
||||
dict: Schemas and templates by it's names. Names must be unique
|
||||
across whole OpenPype.
|
||||
"""
|
||||
return self._load_files_from_path(self.project_schemas_dir)
|
||||
|
|
|
|||
|
|
@ -2,13 +2,10 @@ import os
|
|||
import openpype
|
||||
from openpype import resources
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import (
|
||||
ITrayModule,
|
||||
IWebServerRoutes
|
||||
)
|
||||
from openpype_interfaces import ITrayModule
|
||||
|
||||
|
||||
class AvalonModule(OpenPypeModule, ITrayModule, IWebServerRoutes):
|
||||
class AvalonModule(OpenPypeModule, ITrayModule):
|
||||
name = "avalon"
|
||||
|
||||
def initialize(self, modules_settings):
|
||||
|
|
@ -55,12 +52,12 @@ class AvalonModule(OpenPypeModule, ITrayModule, IWebServerRoutes):
|
|||
def tray_init(self):
|
||||
# Add library tool
|
||||
try:
|
||||
from avalon.tools.libraryloader import app
|
||||
from avalon import style
|
||||
from Qt import QtGui
|
||||
from avalon import style
|
||||
from openpype.tools.libraryloader import LibraryLoaderWindow
|
||||
|
||||
self.libraryloader = app.Window(
|
||||
icon=QtGui.QIcon(resources.pype_icon_filepath()),
|
||||
self.libraryloader = LibraryLoaderWindow(
|
||||
icon=QtGui.QIcon(resources.get_openpype_icon_filepath()),
|
||||
show_projects=True,
|
||||
show_libraries=True
|
||||
)
|
||||
|
|
@ -71,16 +68,6 @@ class AvalonModule(OpenPypeModule, ITrayModule, IWebServerRoutes):
|
|||
exc_info=True
|
||||
)
|
||||
|
||||
def connect_with_modules(self, _enabled_modules):
|
||||
return
|
||||
|
||||
def webserver_initialization(self, server_manager):
|
||||
"""Implementation of IWebServerRoutes interface."""
|
||||
|
||||
if self.tray_initialized:
|
||||
from .rest_api import AvalonRestApiResource
|
||||
self.rest_api_obj = AvalonRestApiResource(self, server_manager)
|
||||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, tray_menu):
|
||||
from Qt import QtWidgets
|
||||
|
|
@ -108,3 +95,10 @@ class AvalonModule(OpenPypeModule, ITrayModule, IWebServerRoutes):
|
|||
# for Windows
|
||||
self.libraryloader.activateWindow()
|
||||
self.libraryloader.refresh()
|
||||
|
||||
# Webserver module implementation
|
||||
def webserver_initialization(self, server_manager):
|
||||
"""Add routes for webserver."""
|
||||
if self.tray_initialized:
|
||||
from .rest_api import AvalonRestApiResource
|
||||
self.rest_api_obj = AvalonRestApiResource(self, server_manager)
|
||||
|
|
|
|||
|
|
@ -10,18 +10,14 @@ from .constants import (
|
|||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import (
|
||||
ITrayModule,
|
||||
IPluginPaths,
|
||||
IFtrackEventHandlerPaths,
|
||||
ITimersManager
|
||||
IPluginPaths
|
||||
)
|
||||
|
||||
|
||||
class ClockifyModule(
|
||||
OpenPypeModule,
|
||||
ITrayModule,
|
||||
IPluginPaths,
|
||||
IFtrackEventHandlerPaths,
|
||||
ITimersManager
|
||||
IPluginPaths
|
||||
):
|
||||
name = "clockify"
|
||||
|
||||
|
|
@ -39,6 +35,11 @@ class ClockifyModule(
|
|||
|
||||
self.clockapi = ClockifyAPI(master_parent=self)
|
||||
|
||||
# TimersManager attributes
|
||||
# - set `timers_manager_connector` only in `tray_init`
|
||||
self.timers_manager_connector = None
|
||||
self._timers_manager_module = None
|
||||
|
||||
def get_global_environments(self):
|
||||
return {
|
||||
"CLOCKIFY_WORKSPACE": self.workspace_name
|
||||
|
|
@ -61,6 +62,9 @@ class ClockifyModule(
|
|||
self.bool_timer_run = False
|
||||
self.bool_api_key_set = self.clockapi.set_api()
|
||||
|
||||
# Define itself as TimersManager connector
|
||||
self.timers_manager_connector = self
|
||||
|
||||
def tray_start(self):
|
||||
if self.bool_api_key_set is False:
|
||||
self.show_settings()
|
||||
|
|
@ -87,16 +91,13 @@ class ClockifyModule(
|
|||
"actions": [actions_path]
|
||||
}
|
||||
|
||||
def get_event_handler_paths(self):
|
||||
"""Implementaton of IFtrackEventHandlerPaths to get plugin paths."""
|
||||
def get_ftrack_event_handler_paths(self):
|
||||
"""Function for Ftrack module to add ftrack event handler paths."""
|
||||
return {
|
||||
"user": [CLOCKIFY_FTRACK_USER_PATH],
|
||||
"server": [CLOCKIFY_FTRACK_SERVER_PATH]
|
||||
}
|
||||
|
||||
def connect_with_modules(self, *_a, **_kw):
|
||||
return
|
||||
|
||||
def clockify_timer_stopped(self):
|
||||
self.bool_timer_run = False
|
||||
# Call `ITimersManager` method
|
||||
|
|
@ -165,10 +166,6 @@ class ClockifyModule(
|
|||
self.set_menu_visibility()
|
||||
time.sleep(5)
|
||||
|
||||
def stop_timer(self):
|
||||
"""Implementation of ITimersManager."""
|
||||
self.clockapi.finish_time_entry()
|
||||
|
||||
def signed_in(self):
|
||||
if not self.timer_manager:
|
||||
return
|
||||
|
|
@ -179,8 +176,60 @@ class ClockifyModule(
|
|||
if self.timer_manager.is_running:
|
||||
self.start_timer_manager(self.timer_manager.last_task)
|
||||
|
||||
def on_message_widget_close(self):
|
||||
self.message_widget = None
|
||||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu):
|
||||
# Menu for Tray App
|
||||
from Qt import QtWidgets
|
||||
menu = QtWidgets.QMenu("Clockify", parent_menu)
|
||||
menu.setProperty("submenu", "on")
|
||||
|
||||
# Actions
|
||||
action_show_settings = QtWidgets.QAction("Settings", menu)
|
||||
action_stop_timer = QtWidgets.QAction("Stop timer", menu)
|
||||
|
||||
menu.addAction(action_show_settings)
|
||||
menu.addAction(action_stop_timer)
|
||||
|
||||
action_show_settings.triggered.connect(self.show_settings)
|
||||
action_stop_timer.triggered.connect(self.stop_timer)
|
||||
|
||||
self.action_stop_timer = action_stop_timer
|
||||
|
||||
self.set_menu_visibility()
|
||||
|
||||
parent_menu.addMenu(menu)
|
||||
|
||||
def show_settings(self):
|
||||
self.widget_settings.input_api_key.setText(self.clockapi.get_api_key())
|
||||
self.widget_settings.show()
|
||||
|
||||
def set_menu_visibility(self):
|
||||
self.action_stop_timer.setVisible(self.bool_timer_run)
|
||||
|
||||
# --- TimersManager connection methods ---
|
||||
def register_timers_manager(self, timer_manager_module):
|
||||
"""Store TimersManager for future use."""
|
||||
self._timers_manager_module = timer_manager_module
|
||||
|
||||
def timer_started(self, data):
|
||||
"""Tell TimersManager that timer started."""
|
||||
if self._timers_manager_module is not None:
|
||||
self._timers_manager_module.timer_started(self._module.id, data)
|
||||
|
||||
def timer_stopped(self):
|
||||
"""Tell TimersManager that timer stopped."""
|
||||
if self._timers_manager_module is not None:
|
||||
self._timers_manager_module.timer_stopped(self._module.id)
|
||||
|
||||
def stop_timer(self):
|
||||
"""Called from TimersManager to stop timer."""
|
||||
self.clockapi.finish_time_entry()
|
||||
|
||||
def start_timer(self, input_data):
|
||||
"""Implementation of ITimersManager."""
|
||||
"""Called from TimersManager to start timer."""
|
||||
# If not api key is not entered then skip
|
||||
if not self.clockapi.get_api_key():
|
||||
return
|
||||
|
|
@ -237,36 +286,3 @@ class ClockifyModule(
|
|||
self.clockapi.start_time_entry(
|
||||
description, project_id, tag_ids=tag_ids
|
||||
)
|
||||
|
||||
def on_message_widget_close(self):
|
||||
self.message_widget = None
|
||||
|
||||
# Definition of Tray menu
|
||||
def tray_menu(self, parent_menu):
|
||||
# Menu for Tray App
|
||||
from Qt import QtWidgets
|
||||
menu = QtWidgets.QMenu("Clockify", parent_menu)
|
||||
menu.setProperty("submenu", "on")
|
||||
|
||||
# Actions
|
||||
action_show_settings = QtWidgets.QAction("Settings", menu)
|
||||
action_stop_timer = QtWidgets.QAction("Stop timer", menu)
|
||||
|
||||
menu.addAction(action_show_settings)
|
||||
menu.addAction(action_stop_timer)
|
||||
|
||||
action_show_settings.triggered.connect(self.show_settings)
|
||||
action_stop_timer.triggered.connect(self.stop_timer)
|
||||
|
||||
self.action_stop_timer = action_stop_timer
|
||||
|
||||
self.set_menu_visibility()
|
||||
|
||||
parent_menu.addMenu(menu)
|
||||
|
||||
def show_settings(self):
|
||||
self.widget_settings.input_api_key.setText(self.clockapi.get_api_key())
|
||||
self.widget_settings.show()
|
||||
|
||||
def set_menu_visibility(self):
|
||||
self.action_stop_timer.setVisible(self.bool_timer_run)
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class MessageWidget(QtWidgets.QWidget):
|
|||
super(MessageWidget, self).__init__()
|
||||
|
||||
# Icon
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowFlags(
|
||||
|
|
@ -90,7 +90,7 @@ class ClockifySettings(QtWidgets.QWidget):
|
|||
self.validated = False
|
||||
|
||||
# Icon
|
||||
icon = QtGui.QIcon(resources.pype_icon_filepath())
|
||||
icon = QtGui.QIcon(resources.get_openpype_icon_filepath())
|
||||
self.setWindowIcon(icon)
|
||||
|
||||
self.setWindowTitle("Clockify settings")
|
||||
|
|
|
|||
|
|
@ -26,9 +26,6 @@ class DeadlineModule(OpenPypeModule, IPluginPaths):
|
|||
"not specified. Disabling module."))
|
||||
return
|
||||
|
||||
def connect_with_modules(self, *_a, **_kw):
|
||||
return
|
||||
|
||||
def get_plugin_paths(self):
|
||||
"""Deadline plugin paths."""
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import pyblish.api
|
|||
class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
||||
"""Collect Deadline Webservice URL from instance."""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
label = "Deadline Webservice from the Instance"
|
||||
families = ["rendering"]
|
||||
|
||||
|
|
@ -46,24 +46,25 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
["deadline"]
|
||||
)
|
||||
|
||||
try:
|
||||
default_servers = deadline_settings["deadline_urls"]
|
||||
project_servers = (
|
||||
render_instance.context.data
|
||||
["project_settings"]
|
||||
["deadline"]
|
||||
["deadline_servers"]
|
||||
)
|
||||
deadline_servers = {
|
||||
k: default_servers[k]
|
||||
for k in project_servers
|
||||
if k in default_servers
|
||||
}
|
||||
|
||||
except AttributeError:
|
||||
# Handle situation were we had only one url for deadline.
|
||||
return render_instance.context.data["defaultDeadline"]
|
||||
default_server = render_instance.context.data["defaultDeadline"]
|
||||
instance_server = render_instance.data.get("deadlineServers")
|
||||
if not instance_server:
|
||||
return default_server
|
||||
|
||||
default_servers = deadline_settings["deadline_urls"]
|
||||
project_servers = (
|
||||
render_instance.context.data
|
||||
["project_settings"]
|
||||
["deadline"]
|
||||
["deadline_servers"]
|
||||
)
|
||||
deadline_servers = {
|
||||
k: default_servers[k]
|
||||
for k in project_servers
|
||||
if k in default_servers
|
||||
}
|
||||
# This is Maya specific and may not reflect real selection of deadline
|
||||
# url as dictionary keys in Python 2 are not ordered
|
||||
return deadline_servers[
|
||||
list(deadline_servers.keys())[
|
||||
int(render_instance.data.get("deadlineServers"))
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import pyblish.api
|
|||
class CollectDefaultDeadlineServer(pyblish.api.ContextPlugin):
|
||||
"""Collect default Deadline Webservice URL."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.01
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Default Deadline Webservice"
|
||||
|
||||
def process(self, context):
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue