mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch 'develop' into bugfix/OP-6416_3dsmax-container-tab
This commit is contained in:
commit
edbecc50fb
98 changed files with 2186 additions and 615 deletions
12
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
12
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -35,6 +35,12 @@ body:
|
|||
label: Version
|
||||
description: What version are you running? Look to OpenPype Tray
|
||||
options:
|
||||
- 3.16.5-nightly.4
|
||||
- 3.16.5-nightly.3
|
||||
- 3.16.5-nightly.2
|
||||
- 3.16.5-nightly.1
|
||||
- 3.16.4
|
||||
- 3.16.4-nightly.3
|
||||
- 3.16.4-nightly.2
|
||||
- 3.16.4-nightly.1
|
||||
- 3.16.3
|
||||
|
|
@ -129,12 +135,6 @@ body:
|
|||
- 3.14.9-nightly.1
|
||||
- 3.14.8
|
||||
- 3.14.8-nightly.4
|
||||
- 3.14.8-nightly.3
|
||||
- 3.14.8-nightly.2
|
||||
- 3.14.8-nightly.1
|
||||
- 3.14.7
|
||||
- 3.14.7-nightly.8
|
||||
- 3.14.7-nightly.7
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
|
|
|||
307
CHANGELOG.md
307
CHANGELOG.md
|
|
@ -1,6 +1,313 @@
|
|||
# Changelog
|
||||
|
||||
|
||||
## [3.16.4](https://github.com/ynput/OpenPype/tree/3.16.4)
|
||||
|
||||
|
||||
[Full Changelog](https://github.com/ynput/OpenPype/compare/3.16.3...3.16.4)
|
||||
|
||||
### **🆕 New features**
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Feature: Download last published workfile specify version <a href="https://github.com/ynput/OpenPype/pull/4998">#4998</a></summary>
|
||||
|
||||
Setting `workfile_version` key to hook's `self.launch_context.data` allow you to specify the workfile version you want sync service to download if none is matched locally. This is helpful if the last version hasn't been correctly published/synchronized, and you want to recover the previous one (or some you'd like).Version could be set in two ways:
|
||||
- OP's absolute version, matching the `version` index in DB.
|
||||
- Relative version in reverse order from the last one: `-2`, `-3`...I don't know where I should write documentation about that.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
### **🚀 Enhancements**
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Maya: allow not creation of group for Import loaders <a href="https://github.com/ynput/OpenPype/pull/5427">#5427</a></summary>
|
||||
|
||||
This PR enhances previous one. All ReferenceLoaders could not wrap imported products into explicit group.Also `Import` Loaders have same options. Control for this is separate in Settings, eg. Reference might wrap loaded items in group, `Import` might not.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>3dsMax: Settings for Ayon <a href="https://github.com/ynput/OpenPype/pull/5388">#5388</a></summary>
|
||||
|
||||
Max Addon Setting for Ayon
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>General: Navigation to Folder from Launcher <a href="https://github.com/ynput/OpenPype/pull/5404">#5404</a></summary>
|
||||
|
||||
Adds an action in launcher to open the directory of the asset.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Chore: Default variant in create plugin <a href="https://github.com/ynput/OpenPype/pull/5429">#5429</a></summary>
|
||||
|
||||
Attribute `default_variant` on create plugins always returns string and if default variant is not filled other ways how to get one are implemented.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Publisher: Thumbnail widget enhancements <a href="https://github.com/ynput/OpenPype/pull/5439">#5439</a></summary>
|
||||
|
||||
Thumbnails widget in Publisher has new 3 options to choose from: Paste (from clipboard), Take screenshot and Browse. Clear button and new options are not visible by default, user must expand options button to show them.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>AYON: Update ayon api to '0.3.5' <a href="https://github.com/ynput/OpenPype/pull/5460">#5460</a></summary>
|
||||
|
||||
Updated ayon-python-api to 0.3.5.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
### **🐛 Bug fixes**
|
||||
|
||||
|
||||
<details>
|
||||
<summary>AYON: Apply unknown ayon settings first <a href="https://github.com/ynput/OpenPype/pull/5435">#5435</a></summary>
|
||||
|
||||
Settings of custom addons are available in converted settings.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Maya: Fix wrong subset name of render family in deadline <a href="https://github.com/ynput/OpenPype/pull/5442">#5442</a></summary>
|
||||
|
||||
New Publisher is creating different subset names than previously which resulted in duplication of `render` string in final subset name of `render` family published on Deadline.This PR solves that, it also fixes issues with legacy instances from old publisher, it matches the subset name as was before.This solves same issue in Max implementation.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Maya: Fix setting of version to workfile instance <a href="https://github.com/ynput/OpenPype/pull/5452">#5452</a></summary>
|
||||
|
||||
If there are multiple instances of renderlayer published, previous logic resulted in unpredictable rewrite of instance family to 'workfile' if `Sync render version with workfile` was on.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Maya: Context plugin shouldn't be tied to family <a href="https://github.com/ynput/OpenPype/pull/5464">#5464</a></summary>
|
||||
|
||||
`Maya Current File` collector was tied to `workfile` unnecessary. It should run even if `workile` instance is not being published.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Unreal: Fix loading hero version for static and skeletal meshes <a href="https://github.com/ynput/OpenPype/pull/5393">#5393</a></summary>
|
||||
|
||||
Fixed a problem with loading hero versions for static ans skeletal meshes.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>TVPaint: Fix 'repeat' behavior <a href="https://github.com/ynput/OpenPype/pull/5412">#5412</a></summary>
|
||||
|
||||
Calculation of frames for repeat behavior is working correctly.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>AYON: Thumbnails cache and api prep <a href="https://github.com/ynput/OpenPype/pull/5437">#5437</a></summary>
|
||||
|
||||
Moved thumbnails cache from ayon python api to OpenPype and prepare AYON thumbnail resolver for new api functions. Current implementation should work with old and new ayon-python-api.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Nuke: Name of the Read Node should be updated correctly when switching versions or assets. <a href="https://github.com/ynput/OpenPype/pull/5444">#5444</a></summary>
|
||||
|
||||
Bug fixing of the read node's name not being updated correctly when setting version or switching asset.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Farm publishing: asymmetric handles fixed <a href="https://github.com/ynput/OpenPype/pull/5446">#5446</a></summary>
|
||||
|
||||
Handles are now set correctly on farm published product version if asymmetric were set to shot attributes.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Scene Inventory: Provider icons fix <a href="https://github.com/ynput/OpenPype/pull/5450">#5450</a></summary>
|
||||
|
||||
Fix how provider icons are accessed in scene inventory.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Fix typo on Deadline OP plugin name <a href="https://github.com/ynput/OpenPype/pull/5453">#5453</a></summary>
|
||||
|
||||
Surprised that no one has hit this bug yet... but it seems like there was a typo on the name of the OP Deadline plugin when submitting jobs to it.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>AYON: Fix version attributes update <a href="https://github.com/ynput/OpenPype/pull/5472">#5472</a></summary>
|
||||
|
||||
Fixed updates of attribs in AYON mode.
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
### **Merged pull requests**
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Added missing defaults for import_loader <a href="https://github.com/ynput/OpenPype/pull/5447">#5447</a></summary>
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Bug: Local settings don't open on 3.14.7 <a href="https://github.com/ynput/OpenPype/pull/5220">#5220</a></summary>
|
||||
|
||||
### Before posting a new ticket, have you looked through the documentation to find an answer?
|
||||
|
||||
Yes I have
|
||||
|
||||
### Have you looked through the existing tickets to find any related issues ?
|
||||
|
||||
Not yet
|
||||
|
||||
### Author of the bug
|
||||
|
||||
@FadyFS
|
||||
|
||||
### Version
|
||||
|
||||
3.15.11-nightly.3
|
||||
|
||||
### What platform you are running OpenPype on?
|
||||
|
||||
Linux / Centos
|
||||
|
||||
### Current Behavior:
|
||||
|
||||
the previous behavior (bug) :
|
||||

|
||||
|
||||
|
||||
### Expected Behavior:
|
||||
|
||||

|
||||
|
||||
|
||||
### What type of bug is it ?
|
||||
|
||||
Happened only once in a particular configuration
|
||||
|
||||
### Which project / workfile / asset / ...
|
||||
|
||||
open settings with 3.14.7
|
||||
|
||||
### Steps To Reproduce:
|
||||
|
||||
1. Run openpype on the 3.15.11-nightly.3 version
|
||||
2. Open settings in 3.14.7 version
|
||||
|
||||
### Relevant log output:
|
||||
|
||||
_No response_
|
||||
|
||||
### Additional context:
|
||||
|
||||
_No response_
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>Tests: Add automated targets for tests <a href="https://github.com/ynput/OpenPype/pull/5443">#5443</a></summary>
|
||||
|
||||
Without it plugins with 'automated' targets won't be triggered (eg `CloseAE` etc.)
|
||||
|
||||
|
||||
___
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
|
||||
|
||||
## [3.16.3](https://github.com/ynput/OpenPype/tree/3.16.3)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ development tools like [CMake](https://cmake.org/) and [Visual Studio](https://v
|
|||
|
||||
#### Clone repository:
|
||||
```sh
|
||||
git clone --recurse-submodules git@github.com:Pypeclub/OpenPype.git
|
||||
git clone --recurse-submodules git@github.com:ynput/OpenPype.git
|
||||
```
|
||||
|
||||
#### To build OpenPype:
|
||||
|
|
@ -144,6 +144,10 @@ sudo ./tools/docker_build.sh centos7
|
|||
|
||||
If all is successful, you'll find built OpenPype in `./build/` folder.
|
||||
|
||||
Docker build can be also started from Windows machine, just use `./tools/docker_build.ps1` instead of shell script.
|
||||
|
||||
This could be used even for building linux build (with argument `centos7` or `debian`)
|
||||
|
||||
#### Manual build
|
||||
You will need [Python >= 3.9](https://www.python.org/downloads/) and [git](https://git-scm.com/downloads). You'll also need [curl](https://curl.se) on systems that doesn't have one preinstalled.
|
||||
|
||||
|
|
|
|||
|
|
@ -1074,7 +1074,7 @@ def convert_update_folder_to_v4(project_name, asset_id, update_data, con):
|
|||
parent_id = None
|
||||
tasks = None
|
||||
new_data = {}
|
||||
attribs = {}
|
||||
attribs = full_update_data.pop("attrib", {})
|
||||
if "type" in update_data:
|
||||
new_update_data["active"] = update_data["type"] == "asset"
|
||||
|
||||
|
|
@ -1113,6 +1113,9 @@ def convert_update_folder_to_v4(project_name, asset_id, update_data, con):
|
|||
print("Folder has new data: {}".format(new_data))
|
||||
new_update_data["data"] = new_data
|
||||
|
||||
if attribs:
|
||||
new_update_data["attrib"] = attribs
|
||||
|
||||
if has_task_changes:
|
||||
raise ValueError("Task changes of folder are not implemented")
|
||||
|
||||
|
|
@ -1126,7 +1129,7 @@ def convert_update_subset_to_v4(project_name, subset_id, update_data, con):
|
|||
full_update_data = _from_flat_dict(update_data)
|
||||
data = full_update_data.get("data")
|
||||
new_data = {}
|
||||
attribs = {}
|
||||
attribs = full_update_data.pop("attrib", {})
|
||||
if data:
|
||||
if "family" in data:
|
||||
family = data.pop("family")
|
||||
|
|
@ -1148,9 +1151,6 @@ def convert_update_subset_to_v4(project_name, subset_id, update_data, con):
|
|||
elif value is not REMOVED_VALUE:
|
||||
new_data[key] = value
|
||||
|
||||
if attribs:
|
||||
new_update_data["attribs"] = attribs
|
||||
|
||||
if "name" in update_data:
|
||||
new_update_data["name"] = update_data["name"]
|
||||
|
||||
|
|
@ -1165,6 +1165,9 @@ def convert_update_subset_to_v4(project_name, subset_id, update_data, con):
|
|||
new_update_data["folderId"] = update_data["parent"]
|
||||
|
||||
flat_data = _to_flat_dict(new_update_data)
|
||||
if attribs:
|
||||
flat_data["attrib"] = attribs
|
||||
|
||||
if new_data:
|
||||
print("Subset has new data: {}".format(new_data))
|
||||
flat_data["data"] = new_data
|
||||
|
|
@ -1179,7 +1182,7 @@ def convert_update_version_to_v4(project_name, version_id, update_data, con):
|
|||
full_update_data = _from_flat_dict(update_data)
|
||||
data = full_update_data.get("data")
|
||||
new_data = {}
|
||||
attribs = {}
|
||||
attribs = full_update_data.pop("attrib", {})
|
||||
if data:
|
||||
if "author" in data:
|
||||
new_update_data["author"] = data.pop("author")
|
||||
|
|
@ -1196,9 +1199,6 @@ def convert_update_version_to_v4(project_name, version_id, update_data, con):
|
|||
elif value is not REMOVED_VALUE:
|
||||
new_data[key] = value
|
||||
|
||||
if attribs:
|
||||
new_update_data["attribs"] = attribs
|
||||
|
||||
if "name" in update_data:
|
||||
new_update_data["version"] = update_data["name"]
|
||||
|
||||
|
|
@ -1213,6 +1213,9 @@ def convert_update_version_to_v4(project_name, version_id, update_data, con):
|
|||
new_update_data["productId"] = update_data["parent"]
|
||||
|
||||
flat_data = _to_flat_dict(new_update_data)
|
||||
if attribs:
|
||||
flat_data["attrib"] = attribs
|
||||
|
||||
if new_data:
|
||||
print("Version has new data: {}".format(new_data))
|
||||
flat_data["data"] = new_data
|
||||
|
|
@ -1252,7 +1255,7 @@ def convert_update_representation_to_v4(
|
|||
data = full_update_data.get("data")
|
||||
|
||||
new_data = {}
|
||||
attribs = {}
|
||||
attribs = full_update_data.pop("attrib", {})
|
||||
if data:
|
||||
for key, value in data.items():
|
||||
if key in folder_attributes:
|
||||
|
|
@ -1309,6 +1312,9 @@ def convert_update_representation_to_v4(
|
|||
new_update_data["files"] = new_files
|
||||
|
||||
flat_data = _to_flat_dict(new_update_data)
|
||||
if attribs:
|
||||
flat_data["attrib"] = attribs
|
||||
|
||||
if new_data:
|
||||
print("Representation has new data: {}".format(new_data))
|
||||
flat_data["data"] = new_data
|
||||
|
|
|
|||
|
|
@ -83,10 +83,10 @@ def _get_subsets(
|
|||
project_name,
|
||||
subset_ids,
|
||||
subset_names,
|
||||
folder_ids,
|
||||
names_by_folder_ids,
|
||||
active,
|
||||
fields
|
||||
folder_ids=folder_ids,
|
||||
names_by_folder_ids=names_by_folder_ids,
|
||||
active=active,
|
||||
fields=fields,
|
||||
):
|
||||
yield convert_v4_subset_to_v3(subset)
|
||||
|
||||
|
|
|
|||
|
|
@ -45,6 +45,9 @@ class OCIOEnvHook(PreLaunchHook):
|
|||
if config_data:
|
||||
ocio_path = config_data["path"]
|
||||
|
||||
if self.host_name in ["nuke", "hiero"]:
|
||||
ocio_path = ocio_path.replace("\\", "/")
|
||||
|
||||
self.log.info(
|
||||
f"Setting OCIO environment to config path: {ocio_path}")
|
||||
|
||||
|
|
|
|||
|
|
@ -138,7 +138,6 @@ class CollectAERender(publish.AbstractCollectRender):
|
|||
fam = "render.farm"
|
||||
if fam not in instance.families:
|
||||
instance.families.append(fam)
|
||||
instance.toBeRenderedOn = "deadline"
|
||||
instance.renderer = "aerender"
|
||||
instance.farm = True # to skip integrate
|
||||
if "review" in instance.families:
|
||||
|
|
|
|||
|
|
@ -108,7 +108,6 @@ class CollectFusionRender(
|
|||
fam = "render.farm"
|
||||
if fam not in instance.families:
|
||||
instance.families.append(fam)
|
||||
instance.toBeRenderedOn = "deadline"
|
||||
instance.farm = True # to skip integrate
|
||||
if "review" in instance.families:
|
||||
# to skip ExtractReview locally
|
||||
|
|
|
|||
|
|
@ -147,13 +147,13 @@ class CollectFarmRender(publish.AbstractCollectRender):
|
|||
attachTo=False,
|
||||
setMembers=[node],
|
||||
publish=info[4],
|
||||
review=False,
|
||||
renderer=None,
|
||||
priority=50,
|
||||
name=node.split("/")[1],
|
||||
|
||||
family="render.farm",
|
||||
families=["render.farm"],
|
||||
farm=True,
|
||||
|
||||
resolutionWidth=context.data["resolutionWidth"],
|
||||
resolutionHeight=context.data["resolutionHeight"],
|
||||
|
|
@ -174,7 +174,6 @@ class CollectFarmRender(publish.AbstractCollectRender):
|
|||
outputFormat=info[1],
|
||||
outputStartFrame=info[3],
|
||||
leadingZeros=info[2],
|
||||
toBeRenderedOn='deadline',
|
||||
ignoreFrameHandleCheck=True
|
||||
|
||||
)
|
||||
|
|
|
|||
|
|
@ -22,9 +22,12 @@ log = logging.getLogger(__name__)
|
|||
JSON_PREFIX = "JSON:::"
|
||||
|
||||
|
||||
def get_asset_fps():
|
||||
def get_asset_fps(asset_doc=None):
|
||||
"""Return current asset fps."""
|
||||
return get_current_project_asset()["data"].get("fps")
|
||||
|
||||
if asset_doc is None:
|
||||
asset_doc = get_current_project_asset(fields=["data.fps"])
|
||||
return asset_doc["data"]["fps"]
|
||||
|
||||
|
||||
def set_id(node, unique_id, overwrite=False):
|
||||
|
|
@ -472,14 +475,19 @@ def maintained_selection():
|
|||
|
||||
|
||||
def reset_framerange():
|
||||
"""Set frame range to current asset"""
|
||||
"""Set frame range and FPS to current asset"""
|
||||
|
||||
# Get asset data
|
||||
project_name = get_current_project_name()
|
||||
asset_name = get_current_asset_name()
|
||||
# Get the asset ID from the database for the asset of current context
|
||||
asset_doc = get_asset_by_name(project_name, asset_name)
|
||||
asset_data = asset_doc["data"]
|
||||
|
||||
# Get FPS
|
||||
fps = get_asset_fps(asset_doc)
|
||||
|
||||
# Get Start and End Frames
|
||||
frame_start = asset_data.get("frameStart")
|
||||
frame_end = asset_data.get("frameEnd")
|
||||
|
||||
|
|
@ -493,6 +501,9 @@ def reset_framerange():
|
|||
frame_start -= int(handle_start)
|
||||
frame_end += int(handle_end)
|
||||
|
||||
# Set frame range and FPS
|
||||
print("Setting scene FPS to {}".format(int(fps)))
|
||||
set_scene_fps(fps)
|
||||
hou.playbar.setFrameRange(frame_start, frame_end)
|
||||
hou.playbar.setPlaybackRange(frame_start, frame_end)
|
||||
hou.setFrame(frame_start)
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ from openpype.lib import (
|
|||
emit_event,
|
||||
)
|
||||
|
||||
from .lib import get_asset_fps
|
||||
|
||||
log = logging.getLogger("openpype.hosts.houdini")
|
||||
|
||||
|
|
@ -385,11 +384,6 @@ def _set_context_settings():
|
|||
None
|
||||
"""
|
||||
|
||||
# Set new scene fps
|
||||
fps = get_asset_fps()
|
||||
print("Setting scene FPS to %i" % fps)
|
||||
lib.set_scene_fps(fps)
|
||||
|
||||
lib.reset_framerange()
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ class CreateVDBCache(plugin.HoudiniCreator):
|
|||
}
|
||||
|
||||
if self.selected_nodes:
|
||||
parms["soppath"] = self.selected_nodes[0].path()
|
||||
parms["soppath"] = self.get_sop_node_path(self.selected_nodes[0])
|
||||
|
||||
instance_node.setParms(parms)
|
||||
|
||||
|
|
@ -42,3 +42,63 @@ class CreateVDBCache(plugin.HoudiniCreator):
|
|||
hou.ropNodeTypeCategory(),
|
||||
hou.sopNodeTypeCategory()
|
||||
]
|
||||
|
||||
def get_sop_node_path(self, selected_node):
|
||||
"""Get Sop Path of the selected node.
|
||||
|
||||
Although Houdini allows ObjNode path on `sop_path` for the
|
||||
the ROP node, we prefer it set to the SopNode path explicitly.
|
||||
"""
|
||||
|
||||
# Allow sop level paths (e.g. /obj/geo1/box1)
|
||||
if isinstance(selected_node, hou.SopNode):
|
||||
self.log.debug(
|
||||
"Valid SopNode selection, 'SOP Path' in ROP will"
|
||||
" be set to '%s'.", selected_node.path()
|
||||
)
|
||||
return selected_node.path()
|
||||
|
||||
# Allow object level paths to Geometry nodes (e.g. /obj/geo1)
|
||||
# but do not allow other object level nodes types like cameras, etc.
|
||||
elif isinstance(selected_node, hou.ObjNode) and \
|
||||
selected_node.type().name() == "geo":
|
||||
|
||||
# Try to find output node.
|
||||
sop_node = self.get_obj_output(selected_node)
|
||||
if sop_node:
|
||||
self.log.debug(
|
||||
"Valid ObjNode selection, 'SOP Path' in ROP will "
|
||||
"be set to the child path '%s'.", sop_node.path()
|
||||
)
|
||||
return sop_node.path()
|
||||
|
||||
self.log.debug(
|
||||
"Selection isn't valid. 'SOP Path' in ROP will be empty."
|
||||
)
|
||||
return ""
|
||||
|
||||
def get_obj_output(self, obj_node):
|
||||
"""Try to find output node.
|
||||
|
||||
If any output nodes are present, return the output node with
|
||||
the minimum 'outputidx'
|
||||
If no output nodes are present, return the node with display flag
|
||||
If no nodes are present at all, return None
|
||||
"""
|
||||
|
||||
outputs = obj_node.subnetOutputs()
|
||||
|
||||
# if obj_node is empty
|
||||
if not outputs:
|
||||
return
|
||||
|
||||
# if obj_node has one output child whether its
|
||||
# sop output node or a node with the render flag
|
||||
elif len(outputs) == 1:
|
||||
return outputs[0]
|
||||
|
||||
# if there are more than one, then it has multiple output nodes
|
||||
# return the one with the minimum 'outputidx'
|
||||
else:
|
||||
return min(outputs,
|
||||
key=lambda node: node.evalParm('outputidx'))
|
||||
|
|
|
|||
|
|
@ -59,6 +59,9 @@ class HdaLoader(load.LoaderPlugin):
|
|||
def_paths = [d.libraryFilePath() for d in defs]
|
||||
new = def_paths.index(file_path)
|
||||
defs[new].setIsPreferred(True)
|
||||
hda_node.setParms({
|
||||
"representation": str(representation["_id"])
|
||||
})
|
||||
|
||||
def remove(self, container):
|
||||
node = container["node"]
|
||||
|
|
|
|||
|
|
@ -2,7 +2,19 @@
|
|||
<mainMenu>
|
||||
<menuBar>
|
||||
<subMenu id="openpype_menu">
|
||||
<label>OpenPype</label>
|
||||
<labelExpression><![CDATA[
|
||||
import os
|
||||
return os.environ.get("AVALON_LABEL") or "OpenPype"
|
||||
]]></labelExpression>
|
||||
<actionItem id="asset_name">
|
||||
<labelExpression><![CDATA[
|
||||
from openpype.pipeline import get_current_asset_name, get_current_task_name
|
||||
label = "{}, {}".format(get_current_asset_name(), get_current_task_name())
|
||||
return label
|
||||
]]></labelExpression>
|
||||
</actionItem>
|
||||
|
||||
<separatorItem/>
|
||||
|
||||
<scriptItem id="openpype_create">
|
||||
<label>Create...</label>
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class RenderSettings(object):
|
|||
rt.viewport.setCamera(sel)
|
||||
break
|
||||
if not found:
|
||||
raise RuntimeError("Camera not found")
|
||||
raise RuntimeError("Active Camera not found")
|
||||
|
||||
def render_output(self, container):
|
||||
folder = rt.maxFilePath
|
||||
|
|
@ -113,7 +113,8 @@ class RenderSettings(object):
|
|||
# for setting up renderable camera
|
||||
arv = rt.MAXToAOps.ArnoldRenderView()
|
||||
render_camera = rt.viewport.GetCamera()
|
||||
arv.setOption("Camera", str(render_camera))
|
||||
if render_camera:
|
||||
arv.setOption("Camera", str(render_camera))
|
||||
|
||||
# TODO: add AOVs and extension
|
||||
img_fmt = self._project_settings["max"]["RenderSettings"]["image_format"] # noqa
|
||||
|
|
|
|||
|
|
@ -34,6 +34,9 @@ class CollectRender(pyblish.api.InstancePlugin):
|
|||
aovs = RenderProducts().get_aovs(instance.name)
|
||||
files_by_aov.update(aovs)
|
||||
|
||||
camera = rt.viewport.GetCamera()
|
||||
instance.data["cameras"] = [camera.name] if camera else None # noqa
|
||||
|
||||
if "expectedFiles" not in instance.data:
|
||||
instance.data["expectedFiles"] = list()
|
||||
instance.data["files"] = list()
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ class ValidateMaxContents(pyblish.api.InstancePlugin):
|
|||
order = pyblish.api.ValidatorOrder
|
||||
families = ["camera",
|
||||
"maxScene",
|
||||
"maxrender",
|
||||
"review"]
|
||||
hosts = ["max"]
|
||||
label = "Max Scene Contents"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,46 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
from openpype.pipeline import (
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin)
|
||||
from openpype.pipeline.publish import RepairAction
|
||||
from openpype.hosts.max.api.lib import get_current_renderer
|
||||
|
||||
from pymxs import runtime as rt
|
||||
|
||||
|
||||
class ValidateRenderableCamera(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validates Renderable Camera
|
||||
|
||||
Check if the renderable camera used for rendering
|
||||
"""
|
||||
|
||||
order = pyblish.api.ValidatorOrder
|
||||
families = ["maxrender"]
|
||||
hosts = ["max"]
|
||||
label = "Renderable Camera"
|
||||
optional = True
|
||||
actions = [RepairAction]
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
if not instance.data["cameras"]:
|
||||
raise PublishValidationError(
|
||||
"No renderable Camera found in scene."
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
|
||||
rt.viewport.setType(rt.Name("view_camera"))
|
||||
camera = rt.viewport.GetCamera()
|
||||
cls.log.info(f"Camera {camera} set as renderable camera")
|
||||
renderer_class = get_current_renderer()
|
||||
renderer = str(renderer_class).split(":")[0]
|
||||
if renderer == "Arnold":
|
||||
arv = rt.MAXToAOps.ArnoldRenderView()
|
||||
arv.setOption("Camera", str(camera))
|
||||
arv.close()
|
||||
instance.data["cameras"] = [camera.name]
|
||||
|
|
@ -10,7 +10,6 @@ class CollectCurrentFile(pyblish.api.ContextPlugin):
|
|||
order = pyblish.api.CollectorOrder - 0.4
|
||||
label = "Maya Current File"
|
||||
hosts = ['maya']
|
||||
families = ["workfile"]
|
||||
|
||||
def process(self, context):
|
||||
"""Inject the current working file"""
|
||||
|
|
|
|||
|
|
@ -249,7 +249,6 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
|
|||
Authenticate with Muster, collect all data, prepare path for post
|
||||
render publish job and submit job to farm.
|
||||
"""
|
||||
instance.data["toBeRenderedOn"] = "muster"
|
||||
# setup muster environment
|
||||
self.MUSTER_REST_URL = os.environ.get("MUSTER_REST_URL")
|
||||
|
||||
|
|
|
|||
|
|
@ -3,94 +3,19 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import pyblish.api
|
||||
import openpype.hosts.maya.api.action
|
||||
from openpype.pipeline.publish import (
|
||||
ValidateContentsOrder, PublishValidationError
|
||||
RepairAction,
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError,
|
||||
OptionalPyblishPluginMixin
|
||||
)
|
||||
|
||||
from maya import cmds
|
||||
|
||||
|
||||
class SelectInvalidInstances(pyblish.api.Action):
|
||||
"""Select invalid instances in Outliner."""
|
||||
|
||||
label = "Select Instances"
|
||||
icon = "briefcase"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
"""Process invalid validators and select invalid instances."""
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (
|
||||
result["error"] is None
|
||||
or result["instance"] is None
|
||||
or result["instance"] in failed
|
||||
or result["plugin"] != plugin
|
||||
):
|
||||
continue
|
||||
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
if instances:
|
||||
self.log.info(
|
||||
"Selecting invalid nodes: %s" % ", ".join(
|
||||
[str(x) for x in instances]
|
||||
)
|
||||
)
|
||||
self.select(instances)
|
||||
else:
|
||||
self.log.info("No invalid nodes found.")
|
||||
self.deselect()
|
||||
|
||||
def select(self, instances):
|
||||
cmds.select(instances, replace=True, noExpand=True)
|
||||
|
||||
def deselect(self):
|
||||
cmds.select(deselect=True)
|
||||
|
||||
|
||||
class RepairSelectInvalidInstances(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if result["error"] is None:
|
||||
continue
|
||||
if result["instance"] is None:
|
||||
continue
|
||||
if result["instance"] in failed:
|
||||
continue
|
||||
if result["plugin"] != plugin:
|
||||
continue
|
||||
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
context_asset = context.data["assetEntity"]["name"]
|
||||
for instance in instances:
|
||||
self.set_attribute(instance, context_asset)
|
||||
|
||||
def set_attribute(self, instance, context_asset):
|
||||
cmds.setAttr(
|
||||
instance.data.get("name") + ".asset",
|
||||
context_asset,
|
||||
type="string"
|
||||
)
|
||||
|
||||
|
||||
class ValidateInstanceInContext(pyblish.api.InstancePlugin):
|
||||
class ValidateInstanceInContext(pyblish.api.InstancePlugin,
|
||||
OptionalPyblishPluginMixin):
|
||||
"""Validator to check if instance asset match context asset.
|
||||
|
||||
When working in per-shot style you always publish data in context of
|
||||
|
|
@ -104,11 +29,49 @@ class ValidateInstanceInContext(pyblish.api.InstancePlugin):
|
|||
label = "Instance in same Context"
|
||||
optional = True
|
||||
hosts = ["maya"]
|
||||
actions = [SelectInvalidInstances, RepairSelectInvalidInstances]
|
||||
actions = [
|
||||
openpype.hosts.maya.api.action.SelectInvalidAction, RepairAction
|
||||
]
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
asset = instance.data.get("asset")
|
||||
context_asset = instance.context.data["assetEntity"]["name"]
|
||||
msg = "{} has asset {}".format(instance.name, asset)
|
||||
context_asset = self.get_context_asset(instance)
|
||||
if asset != context_asset:
|
||||
raise PublishValidationError(msg)
|
||||
raise PublishValidationError(
|
||||
message=(
|
||||
"Instance '{}' publishes to different asset than current "
|
||||
"context: {}. Current context: {}".format(
|
||||
instance.name, asset, context_asset
|
||||
)
|
||||
),
|
||||
description=(
|
||||
"## Publishing to a different asset\n"
|
||||
"There are publish instances present which are publishing "
|
||||
"into a different asset than your current context.\n\n"
|
||||
"Usually this is not what you want but there can be cases "
|
||||
"where you might want to publish into another asset or "
|
||||
"shot. If that's the case you can disable the validation "
|
||||
"on the instance to ignore it."
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
return [instance.data["instance_node"]]
|
||||
|
||||
@classmethod
|
||||
def repair(cls, instance):
|
||||
context_asset = cls.get_context_asset(instance)
|
||||
instance_node = instance.data["instance_node"]
|
||||
cmds.setAttr(
|
||||
"{}.asset".format(instance_node),
|
||||
context_asset,
|
||||
type="string"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_context_asset(instance):
|
||||
return instance.context.data["assetEntity"]["name"]
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ from maya import cmds
|
|||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.hosts.maya.api.lib import pairwise
|
||||
from openpype.hosts.maya.api.action import SelectInvalidAction
|
||||
from openpype.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
PublishValidationError
|
||||
|
|
@ -19,31 +21,33 @@ class ValidatePluginPathAttributes(pyblish.api.InstancePlugin):
|
|||
hosts = ['maya']
|
||||
families = ["workfile"]
|
||||
label = "Plug-in Path Attributes"
|
||||
actions = [SelectInvalidAction]
|
||||
|
||||
def get_invalid(self, instance):
|
||||
# Attributes are defined in project settings
|
||||
attribute = []
|
||||
|
||||
@classmethod
|
||||
def get_invalid(cls, instance):
|
||||
invalid = list()
|
||||
|
||||
# get the project setting
|
||||
validate_path = (
|
||||
instance.context.data["project_settings"]["maya"]["publish"]
|
||||
)
|
||||
file_attr = validate_path["ValidatePluginPathAttributes"]["attribute"]
|
||||
file_attr = cls.attribute
|
||||
if not file_attr:
|
||||
return invalid
|
||||
|
||||
# get the nodes and file attributes
|
||||
for node, attr in file_attr.items():
|
||||
# check the related nodes
|
||||
targets = cmds.ls(type=node)
|
||||
# Consider only valid node types to avoid "Unknown object type" warning
|
||||
all_node_types = set(cmds.allNodeTypes())
|
||||
node_types = [key for key in file_attr.keys() if key in all_node_types]
|
||||
|
||||
for target in targets:
|
||||
# get the filepath
|
||||
file_attr = "{}.{}".format(target, attr)
|
||||
filepath = cmds.getAttr(file_attr)
|
||||
for node, node_type in pairwise(cmds.ls(type=node_types,
|
||||
showType=True)):
|
||||
# get the filepath
|
||||
file_attr = "{}.{}".format(node, file_attr[node_type])
|
||||
filepath = cmds.getAttr(file_attr)
|
||||
|
||||
if filepath and not os.path.exists(filepath):
|
||||
self.log.error("File {0} not exists".format(filepath)) # noqa
|
||||
invalid.append(target)
|
||||
if filepath and not os.path.exists(filepath):
|
||||
cls.log.error("{} '{}' uses non-existing filepath: {}"
|
||||
.format(node_type, node, filepath))
|
||||
invalid.append(node)
|
||||
|
||||
return invalid
|
||||
|
||||
|
|
@ -51,5 +55,16 @@ class ValidatePluginPathAttributes(pyblish.api.InstancePlugin):
|
|||
"""Process all directories Set as Filenames in Non-Maya Nodes"""
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise PublishValidationError("Non-existent Path "
|
||||
"found: {0}".format(invalid))
|
||||
raise PublishValidationError(
|
||||
title="Plug-in Path Attributes",
|
||||
message="Non-existent filepath found on nodes: {}".format(
|
||||
", ".join(invalid)
|
||||
),
|
||||
description=(
|
||||
"## Plug-in nodes use invalid filepaths\n"
|
||||
"The workfile contains nodes from plug-ins that use "
|
||||
"filepaths which do not exist.\n\n"
|
||||
"Please make sure their filepaths are correct and the "
|
||||
"files exist on disk."
|
||||
)
|
||||
)
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ from openpype.hosts.maya.api import lib
|
|||
from openpype.pipeline.publish import (
|
||||
ValidateContentsOrder,
|
||||
RepairAction,
|
||||
PublishValidationError
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -67,5 +68,30 @@ class ValidateShapeZero(pyblish.api.Validator):
|
|||
|
||||
invalid = self.get_invalid(instance)
|
||||
if invalid:
|
||||
raise ValueError("Shapes found with non-zero component tweaks: "
|
||||
"{0}".format(invalid))
|
||||
raise PublishValidationError(
|
||||
title="Shape Component Tweaks",
|
||||
message="Shapes found with non-zero component tweaks: '{}'"
|
||||
"".format(", ".join(invalid)),
|
||||
description=(
|
||||
"## Shapes found with component tweaks\n"
|
||||
"Shapes were detected that have component tweaks on their "
|
||||
"components. Please remove the component tweaks to "
|
||||
"continue.\n\n"
|
||||
"### Repair\n"
|
||||
"The repair action will try to *freeze* the component "
|
||||
"tweaks into the shapes, which is usually the correct fix "
|
||||
"if the mesh has no construction history (= has its "
|
||||
"history deleted)."),
|
||||
detail=(
|
||||
"Maya allows to store component tweaks within shape nodes "
|
||||
"which are applied between its `inMesh` and `outMesh` "
|
||||
"connections resulting in the output of a shape node "
|
||||
"differing from the input. We usually want to avoid this "
|
||||
"for published meshes (in particular for Maya scenes) as "
|
||||
"it can have unintended results when using these meshes "
|
||||
"as intermediate meshes since it applies positional "
|
||||
"differences without being visible edits in the node "
|
||||
"graph.\n\n"
|
||||
"These tweaks are traditionally stored in the `.pnts` "
|
||||
"attribute of shapes.")
|
||||
)
|
||||
|
|
|
|||
|
|
@ -2041,6 +2041,7 @@ class WorkfileSettings(object):
|
|||
)
|
||||
|
||||
workfile_settings = imageio_host["workfile"]
|
||||
viewer_process_settings = imageio_host["viewer"]["viewerProcess"]
|
||||
|
||||
if not config_data:
|
||||
# TODO: backward compatibility for old projects - remove later
|
||||
|
|
@ -2091,6 +2092,15 @@ class WorkfileSettings(object):
|
|||
workfile_settings.pop("colorManagement", None)
|
||||
workfile_settings.pop("OCIO_config", None)
|
||||
|
||||
# get monitor lut from settings respecting Nuke version differences
|
||||
monitor_lut = workfile_settings.pop("monitorLut", None)
|
||||
monitor_lut_data = self._get_monitor_settings(
|
||||
viewer_process_settings, monitor_lut)
|
||||
|
||||
# set monitor related knobs luts (MonitorOut, Thumbnails)
|
||||
for knob, value_ in monitor_lut_data.items():
|
||||
workfile_settings[knob] = value_
|
||||
|
||||
# then set the rest
|
||||
for knob, value_ in workfile_settings.items():
|
||||
# skip unfilled ocio config path
|
||||
|
|
@ -2107,8 +2117,9 @@ class WorkfileSettings(object):
|
|||
|
||||
# set ocio config path
|
||||
if config_data:
|
||||
config_path = config_data["path"].replace("\\", "/")
|
||||
log.info("OCIO config path found: `{}`".format(
|
||||
config_data["path"]))
|
||||
config_path))
|
||||
|
||||
# check if there's a mismatch between environment and settings
|
||||
correct_settings = self._is_settings_matching_environment(
|
||||
|
|
@ -2118,6 +2129,40 @@ class WorkfileSettings(object):
|
|||
if correct_settings:
|
||||
self._set_ocio_config_path_to_workfile(config_data)
|
||||
|
||||
def _get_monitor_settings(self, viewer_lut, monitor_lut):
|
||||
""" Get monitor settings from viewer and monitor lut
|
||||
|
||||
Args:
|
||||
viewer_lut (str): viewer lut string
|
||||
monitor_lut (str): monitor lut string
|
||||
|
||||
Returns:
|
||||
dict: monitor settings
|
||||
"""
|
||||
output_data = {}
|
||||
m_display, m_viewer = get_viewer_config_from_string(monitor_lut)
|
||||
v_display, v_viewer = get_viewer_config_from_string(viewer_lut)
|
||||
|
||||
# set monitor lut differently for nuke version 14
|
||||
if nuke.NUKE_VERSION_MAJOR >= 14:
|
||||
output_data["monitorOutLUT"] = create_viewer_profile_string(
|
||||
m_viewer, m_display, path_like=False)
|
||||
# monitorLut=thumbnails - viewerProcess makes more sense
|
||||
output_data["monitorLut"] = create_viewer_profile_string(
|
||||
v_viewer, v_display, path_like=False)
|
||||
|
||||
if nuke.NUKE_VERSION_MAJOR == 13:
|
||||
output_data["monitorOutLUT"] = create_viewer_profile_string(
|
||||
m_viewer, m_display, path_like=False)
|
||||
# monitorLut=thumbnails - viewerProcess makes more sense
|
||||
output_data["monitorLut"] = create_viewer_profile_string(
|
||||
v_viewer, v_display, path_like=True)
|
||||
if nuke.NUKE_VERSION_MAJOR <= 12:
|
||||
output_data["monitorLut"] = create_viewer_profile_string(
|
||||
m_viewer, m_display, path_like=True)
|
||||
|
||||
return output_data
|
||||
|
||||
def _is_settings_matching_environment(self, config_data):
|
||||
""" Check if OCIO config path is different from environment
|
||||
|
||||
|
|
@ -2177,6 +2222,7 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
"""
|
||||
# replace path with env var if possible
|
||||
ocio_path = self._replace_ocio_path_with_env_var(config_data)
|
||||
ocio_path = ocio_path.replace("\\", "/")
|
||||
|
||||
log.info("Setting OCIO config path to: `{}`".format(
|
||||
ocio_path))
|
||||
|
|
@ -2232,7 +2278,7 @@ Reopening Nuke should synchronize these paths and resolve any discrepancies.
|
|||
Returns:
|
||||
str: OCIO config path with environment variable TCL expression
|
||||
"""
|
||||
config_path = config_data["path"]
|
||||
config_path = config_data["path"].replace("\\", "/")
|
||||
config_template = config_data["template"]
|
||||
|
||||
included_vars = self._get_included_vars(config_template)
|
||||
|
|
@ -3320,11 +3366,11 @@ def get_viewer_config_from_string(input_string):
|
|||
display = split[0]
|
||||
elif "(" in viewer:
|
||||
pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]"
|
||||
result = re.findall(pattern, viewer)
|
||||
result_ = re.findall(pattern, viewer)
|
||||
try:
|
||||
result = result.pop()
|
||||
display = str(result[1]).rstrip()
|
||||
viewer = str(result[0]).rstrip()
|
||||
result_ = result_.pop()
|
||||
display = str(result_[1]).rstrip()
|
||||
viewer = str(result_[0]).rstrip()
|
||||
except IndexError:
|
||||
raise IndexError((
|
||||
"Viewer Input string is not correct. "
|
||||
|
|
@ -3332,3 +3378,22 @@ def get_viewer_config_from_string(input_string):
|
|||
).format(input_string))
|
||||
|
||||
return (display, viewer)
|
||||
|
||||
|
||||
def create_viewer_profile_string(viewer, display=None, path_like=False):
|
||||
"""Convert viewer and display to string
|
||||
|
||||
Args:
|
||||
viewer (str): viewer name
|
||||
display (Optional[str]): display name
|
||||
path_like (Optional[bool]): if True, return path like string
|
||||
|
||||
Returns:
|
||||
str: viewer config string
|
||||
"""
|
||||
if not display:
|
||||
return viewer
|
||||
|
||||
if path_like:
|
||||
return "{}/{}".format(display, viewer)
|
||||
return "{} ({})".format(viewer, display)
|
||||
|
|
|
|||
|
|
@ -543,6 +543,9 @@ def list_instances(creator_id=None):
|
|||
|
||||
For SubsetManager
|
||||
|
||||
Args:
|
||||
creator_id (Optional[str]): creator identifier
|
||||
|
||||
Returns:
|
||||
(list) of dictionaries matching instances format
|
||||
"""
|
||||
|
|
@ -575,10 +578,13 @@ def list_instances(creator_id=None):
|
|||
if creator_id and instance_data["creator_identifier"] != creator_id:
|
||||
continue
|
||||
|
||||
if instance_data["instance_id"] in instance_ids:
|
||||
instance_id = instance_data.get("instance_id")
|
||||
if not instance_id:
|
||||
pass
|
||||
elif instance_id in instance_ids:
|
||||
instance_data.pop("instance_id")
|
||||
else:
|
||||
instance_ids.add(instance_data["instance_id"])
|
||||
instance_ids.add(instance_id)
|
||||
|
||||
# node name could change, so update subset name data
|
||||
_update_subset_name_data(instance_data, node)
|
||||
|
|
|
|||
|
|
@ -327,6 +327,7 @@ class NukeWriteCreator(NukeCreator):
|
|||
"frames": "Use existing frames"
|
||||
}
|
||||
if ("farm_rendering" in self.instance_attributes):
|
||||
rendering_targets["frames_farm"] = "Use existing frames - farm"
|
||||
rendering_targets["farm"] = "Farm rendering"
|
||||
|
||||
return EnumDef(
|
||||
|
|
|
|||
|
|
@ -2,11 +2,13 @@ import nuke
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectInstanceData(pyblish.api.InstancePlugin):
|
||||
"""Collect all nodes with Avalon knob."""
|
||||
class CollectNukeInstanceData(pyblish.api.InstancePlugin):
|
||||
"""Collect Nuke instance data
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.49
|
||||
label = "Collect Instance Data"
|
||||
label = "Collect Nuke Instance Data"
|
||||
hosts = ["nuke", "nukeassist"]
|
||||
|
||||
# presets
|
||||
|
|
@ -40,5 +42,14 @@ class CollectInstanceData(pyblish.api.InstancePlugin):
|
|||
"pixelAspect": pixel_aspect
|
||||
|
||||
})
|
||||
|
||||
# add creator attributes to instance
|
||||
creator_attributes = instance.data["creator_attributes"]
|
||||
instance.data.update(creator_attributes)
|
||||
|
||||
# add review family if review activated on instance
|
||||
if instance.data.get("review"):
|
||||
instance.data["families"].append("review")
|
||||
|
||||
self.log.debug("Collected instance: {}".format(
|
||||
instance.data))
|
||||
|
|
@ -5,7 +5,7 @@ import nuke
|
|||
class CollectSlate(pyblish.api.InstancePlugin):
|
||||
"""Check if SLATE node is in scene and connected to rendering tree"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.09
|
||||
order = pyblish.api.CollectorOrder + 0.002
|
||||
label = "Collect Slate Node"
|
||||
hosts = ["nuke"]
|
||||
families = ["render"]
|
||||
|
|
@ -13,10 +13,14 @@ class CollectSlate(pyblish.api.InstancePlugin):
|
|||
def process(self, instance):
|
||||
node = instance.data["transientData"]["node"]
|
||||
|
||||
slate = next((n for n in nuke.allNodes()
|
||||
if "slate" in n.name().lower()
|
||||
if not n["disable"].getValue()),
|
||||
None)
|
||||
slate = next(
|
||||
(
|
||||
n_ for n_ in nuke.allNodes()
|
||||
if "slate" in n_.name().lower()
|
||||
if not n_["disable"].getValue()
|
||||
),
|
||||
None
|
||||
)
|
||||
|
||||
if slate:
|
||||
# check if slate node is connected to write node tree
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
from pprint import pformat
|
||||
import nuke
|
||||
import pyblish.api
|
||||
from openpype.hosts.nuke import api as napi
|
||||
|
|
@ -15,30 +14,16 @@ class CollectNukeWrites(pyblish.api.InstancePlugin,
|
|||
hosts = ["nuke", "nukeassist"]
|
||||
families = ["render", "prerender", "image"]
|
||||
|
||||
# cache
|
||||
_write_nodes = {}
|
||||
_frame_ranges = {}
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug(pformat(instance.data))
|
||||
creator_attributes = instance.data["creator_attributes"]
|
||||
instance.data.update(creator_attributes)
|
||||
|
||||
group_node = instance.data["transientData"]["node"]
|
||||
render_target = instance.data["render_target"]
|
||||
family = instance.data["family"]
|
||||
families = instance.data["families"]
|
||||
|
||||
# add targeted family to families
|
||||
instance.data["families"].append(
|
||||
"{}.{}".format(family, render_target)
|
||||
)
|
||||
if instance.data.get("review"):
|
||||
instance.data["families"].append("review")
|
||||
|
||||
child_nodes = napi.get_instance_group_node_childs(instance)
|
||||
instance.data["transientData"]["childNodes"] = child_nodes
|
||||
|
||||
write_node = None
|
||||
for x in child_nodes:
|
||||
if x.Class() == "Write":
|
||||
write_node = x
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
if write_node is None:
|
||||
self.log.warning(
|
||||
|
|
@ -48,113 +33,134 @@ class CollectNukeWrites(pyblish.api.InstancePlugin,
|
|||
)
|
||||
return
|
||||
|
||||
instance.data["writeNode"] = write_node
|
||||
self.log.debug("checking instance: {}".format(instance))
|
||||
# get colorspace and add to version data
|
||||
colorspace = napi.get_colorspace_from_node(write_node)
|
||||
|
||||
# Determine defined file type
|
||||
ext = write_node["file_type"].value()
|
||||
if render_target == "frames":
|
||||
self._set_existing_files_data(instance, colorspace)
|
||||
|
||||
# Get frame range
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
frame_length = int(last_frame - first_frame + 1)
|
||||
elif render_target == "frames_farm":
|
||||
collected_frames = self._set_existing_files_data(
|
||||
instance, colorspace)
|
||||
|
||||
if write_node["use_limit"].getValue():
|
||||
first_frame = int(write_node["first"].getValue())
|
||||
last_frame = int(write_node["last"].getValue())
|
||||
self._set_expected_files(instance, collected_frames)
|
||||
|
||||
self._add_farm_instance_data(instance)
|
||||
|
||||
elif render_target == "farm":
|
||||
self._add_farm_instance_data(instance)
|
||||
|
||||
# set additional instance data
|
||||
self._set_additional_instance_data(instance, render_target, colorspace)
|
||||
|
||||
def _set_existing_files_data(self, instance, colorspace):
|
||||
"""Set existing files data to instance data.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
colorspace (str): colorspace
|
||||
|
||||
Returns:
|
||||
list: collected frames
|
||||
"""
|
||||
collected_frames = self._get_collected_frames(instance)
|
||||
|
||||
representation = self._get_existing_frames_representation(
|
||||
instance, collected_frames
|
||||
)
|
||||
|
||||
# inject colorspace data
|
||||
self.set_representation_colorspace(
|
||||
representation, instance.context,
|
||||
colorspace=colorspace
|
||||
)
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
return collected_frames
|
||||
|
||||
def _set_expected_files(self, instance, collected_frames):
|
||||
"""Set expected files to instance data.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
collected_frames (list): collected frames
|
||||
"""
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
# get colorspace and add to version data
|
||||
colorspace = napi.get_colorspace_from_node(write_node)
|
||||
instance.data["expectedFiles"] = [
|
||||
os.path.join(output_dir, source_file)
|
||||
for source_file in collected_frames
|
||||
]
|
||||
|
||||
self.log.debug('output dir: {}'.format(output_dir))
|
||||
def _get_frame_range_data(self, instance):
|
||||
"""Get frame range data from instance.
|
||||
|
||||
if render_target == "frames":
|
||||
representation = {
|
||||
'name': ext,
|
||||
'ext': ext,
|
||||
"stagingDir": output_dir,
|
||||
"tags": []
|
||||
}
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
|
||||
# get file path knob
|
||||
node_file_knob = write_node["file"]
|
||||
# list file paths based on input frames
|
||||
expected_paths = list(sorted({
|
||||
node_file_knob.evaluate(frame)
|
||||
for frame in range(first_frame, last_frame + 1)
|
||||
}))
|
||||
Returns:
|
||||
tuple: first_frame, last_frame
|
||||
"""
|
||||
|
||||
# convert only to base names
|
||||
expected_filenames = [
|
||||
os.path.basename(filepath)
|
||||
for filepath in expected_paths
|
||||
]
|
||||
instance_name = instance.data["name"]
|
||||
|
||||
# make sure files are existing at folder
|
||||
collected_frames = [
|
||||
filename
|
||||
for filename in os.listdir(output_dir)
|
||||
if filename in expected_filenames
|
||||
]
|
||||
if self._frame_ranges.get(instance_name):
|
||||
# return cashed write node
|
||||
return self._frame_ranges[instance_name]
|
||||
|
||||
if collected_frames:
|
||||
collected_frames_len = len(collected_frames)
|
||||
frame_start_str = "%0{}d".format(
|
||||
len(str(last_frame))) % first_frame
|
||||
representation['frameStart'] = frame_start_str
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
# in case slate is expected and not yet rendered
|
||||
self.log.debug("_ frame_length: {}".format(frame_length))
|
||||
self.log.debug("_ collected_frames_len: {}".format(
|
||||
collected_frames_len))
|
||||
# Get frame range from workfile
|
||||
first_frame = int(nuke.root()["first_frame"].getValue())
|
||||
last_frame = int(nuke.root()["last_frame"].getValue())
|
||||
|
||||
# this will only run if slate frame is not already
|
||||
# rendered from previews publishes
|
||||
if (
|
||||
"slate" in families
|
||||
and frame_length == collected_frames_len
|
||||
and family == "render"
|
||||
):
|
||||
frame_slate_str = (
|
||||
"{{:0{}d}}".format(len(str(last_frame)))
|
||||
).format(first_frame - 1)
|
||||
# Get frame range from write node if activated
|
||||
if write_node["use_limit"].getValue():
|
||||
first_frame = int(write_node["first"].getValue())
|
||||
last_frame = int(write_node["last"].getValue())
|
||||
|
||||
slate_frame = collected_frames[0].replace(
|
||||
frame_start_str, frame_slate_str)
|
||||
collected_frames.insert(0, slate_frame)
|
||||
# add to cache
|
||||
self._frame_ranges[instance_name] = (first_frame, last_frame)
|
||||
|
||||
if collected_frames_len == 1:
|
||||
representation['files'] = collected_frames.pop()
|
||||
else:
|
||||
representation['files'] = collected_frames
|
||||
return first_frame, last_frame
|
||||
|
||||
# inject colorspace data
|
||||
self.set_representation_colorspace(
|
||||
representation, instance.context,
|
||||
colorspace=colorspace
|
||||
)
|
||||
def _set_additional_instance_data(
|
||||
self, instance, render_target, colorspace
|
||||
):
|
||||
"""Set additional instance data.
|
||||
|
||||
instance.data["representations"].append(representation)
|
||||
self.log.info("Publishing rendered frames ...")
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
render_target (str): render target
|
||||
colorspace (str): colorspace
|
||||
"""
|
||||
family = instance.data["family"]
|
||||
|
||||
elif render_target == "farm":
|
||||
farm_keys = ["farm_chunk", "farm_priority", "farm_concurrency"]
|
||||
for key in farm_keys:
|
||||
# Skip if key is not in creator attributes
|
||||
if key not in creator_attributes:
|
||||
continue
|
||||
# Add farm attributes to instance
|
||||
instance.data[key] = creator_attributes[key]
|
||||
# add targeted family to families
|
||||
instance.data["families"].append(
|
||||
"{}.{}".format(family, render_target)
|
||||
)
|
||||
self.log.debug("Appending render target to families: {}.{}".format(
|
||||
family, render_target)
|
||||
)
|
||||
|
||||
# Farm rendering
|
||||
instance.data["transfer"] = False
|
||||
instance.data["farm"] = True
|
||||
self.log.info("Farm rendering ON ...")
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
# Determine defined file type
|
||||
ext = write_node["file_type"].value()
|
||||
|
||||
# get frame range data
|
||||
handle_start = instance.context.data["handleStart"]
|
||||
handle_end = instance.context.data["handleEnd"]
|
||||
first_frame, last_frame = self._get_frame_range_data(instance)
|
||||
|
||||
# get output paths
|
||||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
# TODO: remove this when we have proper colorspace support
|
||||
version_data = {
|
||||
|
|
@ -188,10 +194,6 @@ class CollectNukeWrites(pyblish.api.InstancePlugin,
|
|||
"frameEndHandle": last_frame,
|
||||
})
|
||||
|
||||
# make sure rendered sequence on farm will
|
||||
# be used for extract review
|
||||
if not instance.data.get("review"):
|
||||
instance.data["useSequenceForReview"] = False
|
||||
|
||||
# TODO temporarily set stagingDir as persistent for backward
|
||||
# compatibility. This is mainly focused on `renders`folders which
|
||||
|
|
@ -199,4 +201,201 @@ class CollectNukeWrites(pyblish.api.InstancePlugin,
|
|||
# this logic should be removed and replaced with custom staging dir
|
||||
instance.data["stagingDir_persistent"] = True
|
||||
|
||||
self.log.debug("instance.data: {}".format(pformat(instance.data)))
|
||||
def _write_node_helper(self, instance):
|
||||
"""Helper function to get write node from instance.
|
||||
|
||||
Also sets instance transient data with child nodes.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
|
||||
Returns:
|
||||
nuke.Node: write node
|
||||
"""
|
||||
instance_name = instance.data["name"]
|
||||
|
||||
if self._write_nodes.get(instance_name):
|
||||
# return cashed write node
|
||||
return self._write_nodes[instance_name]
|
||||
|
||||
# get all child nodes from group node
|
||||
child_nodes = napi.get_instance_group_node_childs(instance)
|
||||
|
||||
# set child nodes to instance transient data
|
||||
instance.data["transientData"]["childNodes"] = child_nodes
|
||||
|
||||
write_node = None
|
||||
for node_ in child_nodes:
|
||||
if node_.Class() == "Write":
|
||||
write_node = node_
|
||||
|
||||
if write_node:
|
||||
# for slate frame extraction
|
||||
instance.data["transientData"]["writeNode"] = write_node
|
||||
# add to cache
|
||||
self._write_nodes[instance_name] = write_node
|
||||
|
||||
return self._write_nodes[instance_name]
|
||||
|
||||
def _get_existing_frames_representation(
|
||||
self,
|
||||
instance,
|
||||
collected_frames
|
||||
):
|
||||
"""Get existing frames representation.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
collected_frames (list): collected frames
|
||||
|
||||
Returns:
|
||||
dict: representation
|
||||
"""
|
||||
|
||||
first_frame, last_frame = self._get_frame_range_data(instance)
|
||||
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
# Determine defined file type
|
||||
ext = write_node["file_type"].value()
|
||||
|
||||
representation = {
|
||||
"name": ext,
|
||||
"ext": ext,
|
||||
"stagingDir": output_dir,
|
||||
"tags": []
|
||||
}
|
||||
|
||||
frame_start_str = self._get_frame_start_str(first_frame, last_frame)
|
||||
|
||||
representation['frameStart'] = frame_start_str
|
||||
|
||||
# set slate frame
|
||||
collected_frames = self._add_slate_frame_to_collected_frames(
|
||||
instance,
|
||||
collected_frames,
|
||||
first_frame,
|
||||
last_frame
|
||||
)
|
||||
|
||||
if len(collected_frames) == 1:
|
||||
representation['files'] = collected_frames.pop()
|
||||
else:
|
||||
representation['files'] = collected_frames
|
||||
|
||||
return representation
|
||||
|
||||
def _get_frame_start_str(self, first_frame, last_frame):
|
||||
"""Get frame start string.
|
||||
|
||||
Args:
|
||||
first_frame (int): first frame
|
||||
last_frame (int): last frame
|
||||
|
||||
Returns:
|
||||
str: frame start string
|
||||
"""
|
||||
# convert first frame to string with padding
|
||||
return (
|
||||
"{{:0{}d}}".format(len(str(last_frame)))
|
||||
).format(first_frame)
|
||||
|
||||
def _add_slate_frame_to_collected_frames(
|
||||
self,
|
||||
instance,
|
||||
collected_frames,
|
||||
first_frame,
|
||||
last_frame
|
||||
):
|
||||
"""Add slate frame to collected frames.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
collected_frames (list): collected frames
|
||||
first_frame (int): first frame
|
||||
last_frame (int): last frame
|
||||
|
||||
Returns:
|
||||
list: collected frames
|
||||
"""
|
||||
frame_start_str = self._get_frame_start_str(first_frame, last_frame)
|
||||
frame_length = int(last_frame - first_frame + 1)
|
||||
|
||||
# this will only run if slate frame is not already
|
||||
# rendered from previews publishes
|
||||
if (
|
||||
"slate" in instance.data["families"]
|
||||
and frame_length == len(collected_frames)
|
||||
):
|
||||
frame_slate_str = self._get_frame_start_str(
|
||||
first_frame - 1,
|
||||
last_frame
|
||||
)
|
||||
|
||||
slate_frame = collected_frames[0].replace(
|
||||
frame_start_str, frame_slate_str)
|
||||
collected_frames.insert(0, slate_frame)
|
||||
|
||||
return collected_frames
|
||||
|
||||
def _add_farm_instance_data(self, instance):
|
||||
"""Add farm publishing related instance data.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
"""
|
||||
|
||||
# make sure rendered sequence on farm will
|
||||
# be used for extract review
|
||||
if not instance.data.get("review"):
|
||||
instance.data["useSequenceForReview"] = False
|
||||
|
||||
# Farm rendering
|
||||
instance.data.update({
|
||||
"transfer": False,
|
||||
"farm": True # to skip integrate
|
||||
})
|
||||
self.log.info("Farm rendering ON ...")
|
||||
|
||||
def _get_collected_frames(self, instance):
|
||||
"""Get collected frames.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
|
||||
Returns:
|
||||
list: collected frames
|
||||
"""
|
||||
|
||||
first_frame, last_frame = self._get_frame_range_data(instance)
|
||||
|
||||
write_node = self._write_node_helper(instance)
|
||||
|
||||
write_file_path = nuke.filename(write_node)
|
||||
output_dir = os.path.dirname(write_file_path)
|
||||
|
||||
# get file path knob
|
||||
node_file_knob = write_node["file"]
|
||||
# list file paths based on input frames
|
||||
expected_paths = list(sorted({
|
||||
node_file_knob.evaluate(frame)
|
||||
for frame in range(first_frame, last_frame + 1)
|
||||
}))
|
||||
|
||||
# convert only to base names
|
||||
expected_filenames = {
|
||||
os.path.basename(filepath)
|
||||
for filepath in expected_paths
|
||||
}
|
||||
|
||||
# make sure files are existing at folder
|
||||
collected_frames = [
|
||||
filename
|
||||
for filename in os.listdir(output_dir)
|
||||
if filename in expected_filenames
|
||||
]
|
||||
|
||||
return collected_frames
|
||||
|
|
|
|||
|
|
@ -11,9 +11,9 @@ from openpype.hosts.nuke.api.lib import maintained_selection
|
|||
|
||||
|
||||
class ExtractCamera(publish.Extractor):
|
||||
""" 3D camera exctractor
|
||||
""" 3D camera extractor
|
||||
"""
|
||||
label = 'Exctract Camera'
|
||||
label = 'Extract Camera'
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["camera"]
|
||||
hosts = ["nuke"]
|
||||
|
|
|
|||
|
|
@ -11,9 +11,9 @@ from openpype.hosts.nuke.api.lib import (
|
|||
|
||||
|
||||
class ExtractModel(publish.Extractor):
|
||||
""" 3D model exctractor
|
||||
""" 3D model extractor
|
||||
"""
|
||||
label = 'Exctract Model'
|
||||
label = 'Extract Model'
|
||||
order = pyblish.api.ExtractorOrder
|
||||
families = ["model"]
|
||||
hosts = ["nuke"]
|
||||
|
|
|
|||
|
|
@ -249,7 +249,7 @@ class ExtractSlateFrame(publish.Extractor):
|
|||
|
||||
# Add file to representation files
|
||||
# - get write node
|
||||
write_node = instance.data["writeNode"]
|
||||
write_node = instance.data["transientData"]["writeNode"]
|
||||
# - evaluate filepaths for first frame and slate frame
|
||||
first_filename = os.path.basename(
|
||||
write_node["file"].evaluate(first_frame))
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ class ExtractThumbnail(publish.Extractor):
|
|||
def render_thumbnail(self, instance, output_name=None, **kwargs):
|
||||
first_frame = instance.data["frameStartHandle"]
|
||||
last_frame = instance.data["frameEndHandle"]
|
||||
colorspace = instance.data["colorspace"]
|
||||
|
||||
# find frame range and define middle thumb frame
|
||||
mid_frame = int((last_frame - first_frame) / 2)
|
||||
|
|
@ -112,8 +113,8 @@ class ExtractThumbnail(publish.Extractor):
|
|||
if self.use_rendered and os.path.isfile(path_render):
|
||||
# check if file exist otherwise connect to write node
|
||||
rnode = nuke.createNode("Read")
|
||||
|
||||
rnode["file"].setValue(path_render)
|
||||
rnode["colorspace"].setValue(colorspace)
|
||||
|
||||
# turn it raw if none of baking is ON
|
||||
if all([
|
||||
|
|
|
|||
|
|
@ -14,27 +14,26 @@ class RepairActionBase(pyblish.api.Action):
|
|||
# Get the errored instances
|
||||
return get_errored_instances_from_context(context, plugin=plugin)
|
||||
|
||||
def repair_knob(self, instances, state):
|
||||
def repair_knob(self, context, instances, state):
|
||||
create_context = context.data["create_context"]
|
||||
for instance in instances:
|
||||
node = instance.data["transientData"]["node"]
|
||||
files_remove = [os.path.join(instance.data["outputDir"], f)
|
||||
for r in instance.data.get("representations", [])
|
||||
for f in r.get("files", [])
|
||||
]
|
||||
self.log.info("Files to be removed: {}".format(files_remove))
|
||||
for f in files_remove:
|
||||
os.remove(f)
|
||||
self.log.debug("removing file: {}".format(f))
|
||||
node["render"].setValue(state)
|
||||
# Reset the render knob
|
||||
instance_id = instance.data.get("instance_id")
|
||||
created_instance = create_context.get_instance_by_id(
|
||||
instance_id
|
||||
)
|
||||
created_instance.creator_attributes["render_target"] = state
|
||||
self.log.info("Rendering toggled to `{}`".format(state))
|
||||
|
||||
create_context.save_changes()
|
||||
|
||||
|
||||
class RepairCollectionActionToLocal(RepairActionBase):
|
||||
label = "Repair - rerender with \"Local\""
|
||||
|
||||
def process(self, context, plugin):
|
||||
instances = self.get_instance(context, plugin)
|
||||
self.repair_knob(instances, "Local")
|
||||
self.repair_knob(context, instances, "local")
|
||||
|
||||
|
||||
class RepairCollectionActionToFarm(RepairActionBase):
|
||||
|
|
@ -42,7 +41,7 @@ class RepairCollectionActionToFarm(RepairActionBase):
|
|||
|
||||
def process(self, context, plugin):
|
||||
instances = self.get_instance(context, plugin)
|
||||
self.repair_knob(instances, "On farm")
|
||||
self.repair_knob(context, instances, "farm")
|
||||
|
||||
|
||||
class ValidateRenderedFrames(pyblish.api.InstancePlugin):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from collections import defaultdict
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import get_errored_instances_from_context
|
||||
from openpype.hosts.nuke.api.lib import (
|
||||
|
|
@ -87,6 +89,11 @@ class ValidateNukeWriteNode(
|
|||
correct_data
|
||||
))
|
||||
|
||||
# Collect key values of same type in a list.
|
||||
values_by_name = defaultdict(list)
|
||||
for knob_data in correct_data["knobs"]:
|
||||
values_by_name[knob_data["name"]].append(knob_data["value"])
|
||||
|
||||
for knob_data in correct_data["knobs"]:
|
||||
knob_type = knob_data["type"]
|
||||
self.log.debug("__ knob_type: {}".format(
|
||||
|
|
@ -105,28 +112,33 @@ class ValidateNukeWriteNode(
|
|||
)
|
||||
|
||||
key = knob_data["name"]
|
||||
value = knob_data["value"]
|
||||
values = values_by_name[key]
|
||||
node_value = write_node[key].value()
|
||||
|
||||
# fix type differences
|
||||
if type(node_value) in (int, float):
|
||||
try:
|
||||
if isinstance(value, list):
|
||||
value = color_gui_to_int(value)
|
||||
else:
|
||||
value = float(value)
|
||||
node_value = float(node_value)
|
||||
except ValueError:
|
||||
value = str(value)
|
||||
else:
|
||||
value = str(value)
|
||||
node_value = str(node_value)
|
||||
fixed_values = []
|
||||
for value in values:
|
||||
if type(node_value) in (int, float):
|
||||
try:
|
||||
|
||||
self.log.debug("__ key: {} | value: {}".format(
|
||||
key, value
|
||||
if isinstance(value, list):
|
||||
value = color_gui_to_int(value)
|
||||
else:
|
||||
value = float(value)
|
||||
node_value = float(node_value)
|
||||
except ValueError:
|
||||
value = str(value)
|
||||
else:
|
||||
value = str(value)
|
||||
node_value = str(node_value)
|
||||
|
||||
fixed_values.append(value)
|
||||
|
||||
self.log.debug("__ key: {} | values: {}".format(
|
||||
key, fixed_values
|
||||
))
|
||||
if (
|
||||
node_value != value
|
||||
node_value not in fixed_values
|
||||
and key != "file"
|
||||
and key != "tile_color"
|
||||
):
|
||||
|
|
|
|||
|
|
@ -76,11 +76,16 @@ class AnimationAlembicLoader(plugin.Loader):
|
|||
asset_name = "{}_{}".format(asset, name)
|
||||
else:
|
||||
asset_name = "{}".format(name)
|
||||
version = context.get('version').get('name')
|
||||
version = context.get('version')
|
||||
# Check if version is hero version and use different name
|
||||
if not version.get("name") and version.get('type') == "hero_version":
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version.get('name'):03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/{asset}/{name}_v{version:03d}", suffix="")
|
||||
f"{root}/{asset}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
|
|
|
|||
|
|
@ -78,11 +78,16 @@ class SkeletalMeshAlembicLoader(plugin.Loader):
|
|||
asset_name = "{}_{}".format(asset, name)
|
||||
else:
|
||||
asset_name = "{}".format(name)
|
||||
version = context.get('version').get('name')
|
||||
version = context.get('version')
|
||||
# Check if version is hero version and use different name
|
||||
if not version.get("name") and version.get('type') == "hero_version":
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version.get('name'):03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/{asset}/{name}_v{version:03d}", suffix="")
|
||||
f"{root}/{asset}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
|
|
|
|||
|
|
@ -52,11 +52,16 @@ class SkeletalMeshFBXLoader(plugin.Loader):
|
|||
asset_name = "{}_{}".format(asset, name)
|
||||
else:
|
||||
asset_name = "{}".format(name)
|
||||
version = context.get('version').get('name')
|
||||
version = context.get('version')
|
||||
# Check if version is hero version and use different name
|
||||
if not version.get("name") and version.get('type') == "hero_version":
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version.get('name'):03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/{asset}/{name}_v{version:03d}", suffix="")
|
||||
f"{root}/{asset}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
|
|
|
|||
|
|
@ -79,11 +79,13 @@ class StaticMeshAlembicLoader(plugin.Loader):
|
|||
root = "/Game/Ayon/Assets"
|
||||
asset = context.get('asset').get('name')
|
||||
suffix = "_CON"
|
||||
if asset:
|
||||
asset_name = "{}_{}".format(asset, name)
|
||||
asset_name = f"{asset}_{name}" if asset else f"{name}"
|
||||
version = context.get('version')
|
||||
# Check if version is hero version and use different name
|
||||
if not version.get("name") and version.get('type') == "hero_version":
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
asset_name = "{}".format(name)
|
||||
version = context.get('version').get('name')
|
||||
name_version = f"{name}_v{version.get('name'):03d}"
|
||||
|
||||
default_conversion = False
|
||||
if options.get("default_conversion"):
|
||||
|
|
@ -91,7 +93,7 @@ class StaticMeshAlembicLoader(plugin.Loader):
|
|||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/{asset}/{name}_v{version:03d}", suffix="")
|
||||
f"{root}/{asset}/{name_version}", suffix="")
|
||||
|
||||
container_name += suffix
|
||||
|
||||
|
|
|
|||
|
|
@ -78,10 +78,16 @@ class StaticMeshFBXLoader(plugin.Loader):
|
|||
asset_name = "{}_{}".format(asset, name)
|
||||
else:
|
||||
asset_name = "{}".format(name)
|
||||
version = context.get('version')
|
||||
# Check if version is hero version and use different name
|
||||
if not version.get("name") and version.get('type') == "hero_version":
|
||||
name_version = f"{name}_hero"
|
||||
else:
|
||||
name_version = f"{name}_v{version.get('name'):03d}"
|
||||
|
||||
tools = unreal.AssetToolsHelpers().get_asset_tools()
|
||||
asset_dir, container_name = tools.create_unique_asset_name(
|
||||
f"{root}/{asset}/{name}", suffix=""
|
||||
f"{root}/{asset}/{name_version}", suffix=""
|
||||
)
|
||||
|
||||
container_name += suffix
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
import clique
|
||||
import os
|
||||
import re
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
|
@ -21,7 +23,19 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin):
|
|||
representations = instance.data.get("representations")
|
||||
for repr in representations:
|
||||
data = instance.data.get("assetEntity", {}).get("data", {})
|
||||
patterns = [clique.PATTERNS["frames"]]
|
||||
repr_files = repr["files"]
|
||||
if isinstance(repr_files, str):
|
||||
continue
|
||||
|
||||
ext = repr.get("ext")
|
||||
if not ext:
|
||||
_, ext = os.path.splitext(repr_files[0])
|
||||
elif not ext.startswith("."):
|
||||
ext = ".{}".format(ext)
|
||||
pattern = r"\D?(?P<index>(?P<padding>0*)\d+){}$".format(
|
||||
re.escape(ext))
|
||||
patterns = [pattern]
|
||||
|
||||
collections, remainder = clique.assemble(
|
||||
repr["files"], minimum_items=1, patterns=patterns)
|
||||
|
||||
|
|
@ -30,6 +44,10 @@ class ValidateSequenceFrames(pyblish.api.InstancePlugin):
|
|||
collection = collections[0]
|
||||
frames = list(collection.indexes)
|
||||
|
||||
if instance.data.get("slate"):
|
||||
# Slate is not part of the frame range
|
||||
frames = frames[1:]
|
||||
|
||||
current_range = (frames[0], frames[-1])
|
||||
required_range = (data["clipIn"],
|
||||
data["clipOut"])
|
||||
|
|
|
|||
|
|
@ -280,13 +280,14 @@ class BatchPublishEndpoint(WebpublishApiEndpoint):
|
|||
|
||||
for key, value in add_args.items():
|
||||
# Skip key values where value is None
|
||||
if value is not None:
|
||||
args.append("--{}".format(key))
|
||||
# Extend list into arguments (targets can be a list)
|
||||
if isinstance(value, (tuple, list)):
|
||||
args.extend(value)
|
||||
else:
|
||||
args.append(value)
|
||||
if value is None:
|
||||
continue
|
||||
arg_key = "--{}".format(key)
|
||||
if not isinstance(value, (tuple, list)):
|
||||
value = [value]
|
||||
|
||||
for item in value:
|
||||
args += [arg_key, item]
|
||||
|
||||
log.info("args:: {}".format(args))
|
||||
if add_to_queue:
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import os
|
|||
import re
|
||||
import copy
|
||||
import inspect
|
||||
import collections
|
||||
import logging
|
||||
import weakref
|
||||
from uuid import uuid4
|
||||
|
|
@ -340,8 +341,8 @@ class EventSystem(object):
|
|||
event.emit()
|
||||
return event
|
||||
|
||||
def emit_event(self, event):
|
||||
"""Emit event object.
|
||||
def _process_event(self, event):
|
||||
"""Process event topic and trigger callbacks.
|
||||
|
||||
Args:
|
||||
event (Event): Prepared event with topic and data.
|
||||
|
|
@ -356,6 +357,91 @@ class EventSystem(object):
|
|||
for callback in invalid_callbacks:
|
||||
self._registered_callbacks.remove(callback)
|
||||
|
||||
def emit_event(self, event):
|
||||
"""Emit event object.
|
||||
|
||||
Args:
|
||||
event (Event): Prepared event with topic and data.
|
||||
"""
|
||||
|
||||
self._process_event(event)
|
||||
|
||||
|
||||
class QueuedEventSystem(EventSystem):
|
||||
"""Events are automatically processed in queue.
|
||||
|
||||
If callback triggers another event, the event is not processed until
|
||||
all callbacks of previous event are processed.
|
||||
|
||||
Allows to implement custom event process loop by changing 'auto_execute'.
|
||||
|
||||
Note:
|
||||
This probably should be default behavior of 'EventSystem'. Changing it
|
||||
now could cause problems in existing code.
|
||||
|
||||
Args:
|
||||
auto_execute (Optional[bool]): If 'True', events are processed
|
||||
automatically. Custom loop calling 'process_next_event'
|
||||
must be implemented when set to 'False'.
|
||||
"""
|
||||
|
||||
def __init__(self, auto_execute=True):
|
||||
super(QueuedEventSystem, self).__init__()
|
||||
self._event_queue = collections.deque()
|
||||
self._current_event = None
|
||||
self._auto_execute = auto_execute
|
||||
|
||||
def __len__(self):
|
||||
return self.count()
|
||||
|
||||
def count(self):
|
||||
"""Get number of events in queue.
|
||||
|
||||
Returns:
|
||||
int: Number of events in queue.
|
||||
"""
|
||||
|
||||
return len(self._event_queue)
|
||||
|
||||
def process_next_event(self):
|
||||
"""Process next event in queue.
|
||||
|
||||
Should be used only if 'auto_execute' is set to 'False'. Only single
|
||||
event is processed.
|
||||
|
||||
Returns:
|
||||
Union[Event, None]: Processed event.
|
||||
"""
|
||||
|
||||
if self._current_event is not None:
|
||||
raise ValueError("An event is already in progress.")
|
||||
|
||||
if not self._event_queue:
|
||||
return None
|
||||
event = self._event_queue.popleft()
|
||||
self._current_event = event
|
||||
self._process_event(event)
|
||||
self._current_event = None
|
||||
return event
|
||||
|
||||
def emit_event(self, event):
|
||||
"""Emit event object.
|
||||
|
||||
Args:
|
||||
event (Event): Prepared event with topic and data.
|
||||
"""
|
||||
|
||||
if not self._auto_execute or self._current_event is not None:
|
||||
self._event_queue.append(event)
|
||||
return
|
||||
|
||||
self._event_queue.append(event)
|
||||
while self._event_queue:
|
||||
event = self._event_queue.popleft()
|
||||
self._current_event = event
|
||||
self._process_event(event)
|
||||
self._current_event = None
|
||||
|
||||
|
||||
class GlobalEventSystem:
|
||||
"""Event system living in global scope of process.
|
||||
|
|
|
|||
|
|
@ -373,10 +373,12 @@ def _load_ayon_addons(openpype_modules, modules_key, log):
|
|||
addons_info = _get_ayon_addons_information()
|
||||
if not addons_info:
|
||||
return v3_addons_to_skip
|
||||
addons_dir = os.path.join(
|
||||
appdirs.user_data_dir("AYON", "Ynput"),
|
||||
"addons"
|
||||
)
|
||||
addons_dir = os.environ.get("AYON_ADDONS_DIR")
|
||||
if not addons_dir:
|
||||
addons_dir = os.path.join(
|
||||
appdirs.user_data_dir("AYON", "Ynput"),
|
||||
"addons"
|
||||
)
|
||||
if not os.path.exists(addons_dir):
|
||||
log.warning("Addons directory does not exists. Path \"{}\"".format(
|
||||
addons_dir
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ attribute or using default server if that attribute doesn't exists.
|
|||
from maya import cmds
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline.publish import KnownPublishError
|
||||
|
||||
|
||||
class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
||||
|
|
@ -81,13 +82,14 @@ class CollectDeadlineServerFromInstance(pyblish.api.InstancePlugin):
|
|||
if k in default_servers
|
||||
}
|
||||
|
||||
msg = (
|
||||
"\"{}\" server on instance is not enabled in project settings."
|
||||
" Enabled project servers:\n{}".format(
|
||||
instance_server, project_enabled_servers
|
||||
if instance_server not in project_enabled_servers:
|
||||
msg = (
|
||||
"\"{}\" server on instance is not enabled in project settings."
|
||||
" Enabled project servers:\n{}".format(
|
||||
instance_server, project_enabled_servers
|
||||
)
|
||||
)
|
||||
)
|
||||
assert instance_server in project_enabled_servers, msg
|
||||
raise KnownPublishError(msg)
|
||||
|
||||
self.log.debug("Using project approved server.")
|
||||
return project_enabled_servers[instance_server]
|
||||
|
|
|
|||
|
|
@ -1,31 +1,31 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Scene setting</title>
|
||||
<title>Deadline Pools</title>
|
||||
<description>
|
||||
## Invalid Deadline pools found
|
||||
## Invalid Deadline pools found
|
||||
|
||||
Configured pools don't match what is set in Deadline.
|
||||
Configured pools don't match available pools in Deadline.
|
||||
|
||||
{invalid_value_str}
|
||||
### How to repair?
|
||||
|
||||
### How to repair?
|
||||
If your instance had deadline pools set on creation, remove or
|
||||
change them.
|
||||
|
||||
If your instance had deadline pools set on creation, remove or
|
||||
change them.
|
||||
In other cases inform admin to change them in Settings.
|
||||
|
||||
In other cases inform admin to change them in Settings.
|
||||
Available deadline pools:
|
||||
|
||||
{pools_str}
|
||||
|
||||
Available deadline pools {pools_str}.
|
||||
</description>
|
||||
<detail>
|
||||
### __Detailed Info__
|
||||
### __Detailed Info__
|
||||
|
||||
This error is shown when deadline pool is not on Deadline anymore. It
|
||||
could happen in case of republish old workfile which was created with
|
||||
previous deadline pools,
|
||||
or someone changed pools on Deadline side, but didn't modify Openpype
|
||||
Settings.
|
||||
This error is shown when a configured pool is not available on Deadline. It
|
||||
can happen when publishing old workfiles which were created with previous
|
||||
deadline pools, or someone changed the available pools in Deadline,
|
||||
but didn't modify Openpype Settings to match the changes.
|
||||
</detail>
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -27,7 +27,7 @@ class CelactionSubmitDeadline(pyblish.api.InstancePlugin):
|
|||
deadline_job_delay = "00:00:08:00"
|
||||
|
||||
def process(self, instance):
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
context = instance.context
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
|
|
|
|||
|
|
@ -265,7 +265,7 @@ class HarmonySubmitDeadline(
|
|||
job_info.SecondaryPool = self._instance.data.get("secondaryPool")
|
||||
job_info.ChunkSize = self.chunk_size
|
||||
batch_name = os.path.basename(self._instance.data["source"])
|
||||
if is_in_tests:
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
job_info.BatchName = batch_name
|
||||
job_info.Department = self.department
|
||||
|
|
|
|||
|
|
@ -141,4 +141,3 @@ class HoudiniSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline):
|
|||
# Store output dir for unified publisher (filesequence)
|
||||
output_dir = os.path.dirname(instance.data["files"][0])
|
||||
instance.data["outputDir"] = output_dir
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
|
|
|||
|
|
@ -12,7 +12,9 @@ from openpype.pipeline import (
|
|||
legacy_io,
|
||||
OpenPypePyblishPluginMixin
|
||||
)
|
||||
from openpype.settings import get_project_settings
|
||||
from openpype.pipeline.publish.lib import (
|
||||
replace_with_published_scene_path
|
||||
)
|
||||
from openpype.hosts.max.api.lib import (
|
||||
get_current_renderer,
|
||||
get_multipass_setting
|
||||
|
|
@ -174,7 +176,6 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
first_file = next(self._iter_expected_files(files))
|
||||
output_dir = os.path.dirname(first_file)
|
||||
instance.data["outputDir"] = output_dir
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
filename = os.path.basename(filepath)
|
||||
|
||||
|
|
@ -236,7 +237,10 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
if renderer == "Redshift_Renderer":
|
||||
plugin_data["redshift_SeparateAovFiles"] = instance.data.get(
|
||||
"separateAovFiles")
|
||||
|
||||
if instance.data["cameras"]:
|
||||
plugin_info["Camera0"] = None
|
||||
plugin_info["Camera"] = instance.data["cameras"][0]
|
||||
plugin_info["Camera1"] = instance.data["cameras"][0]
|
||||
self.log.debug("plugin data:{}".format(plugin_data))
|
||||
plugin_info.update(plugin_data)
|
||||
|
||||
|
|
@ -247,7 +251,8 @@ class MaxSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
if instance.data["renderer"] == "Redshift_Renderer":
|
||||
self.log.debug("Using Redshift...published scene wont be used..")
|
||||
replace_in_path = False
|
||||
return replace_in_path
|
||||
return replace_with_published_scene_path(
|
||||
instance, replace_in_path)
|
||||
|
||||
@staticmethod
|
||||
def _iter_expected_files(exp):
|
||||
|
|
|
|||
|
|
@ -300,7 +300,6 @@ class MayaSubmitDeadline(abstract_submit_deadline.AbstractSubmitDeadline,
|
|||
first_file = next(iter_expected_files(expected_files))
|
||||
output_dir = os.path.dirname(first_file)
|
||||
instance.data["outputDir"] = output_dir
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
|
||||
# Patch workfile (only when use_published is enabled)
|
||||
if self.use_published:
|
||||
|
|
|
|||
|
|
@ -90,7 +90,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
instance.data["attributeValues"] = self.get_attr_values_from_data(
|
||||
instance.data)
|
||||
|
||||
|
|
@ -98,7 +97,6 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
instance.data["suspend_publish"] = instance.data["attributeValues"][
|
||||
"suspend_publish"]
|
||||
|
||||
instance.data["toBeRenderedOn"] = "deadline"
|
||||
families = instance.data["families"]
|
||||
|
||||
node = instance.data["transientData"]["node"]
|
||||
|
|
@ -123,13 +121,10 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
render_path = instance.data['path']
|
||||
script_path = context.data["currentFile"]
|
||||
|
||||
for item in context:
|
||||
if "workfile" in item.data["families"]:
|
||||
msg = "Workfile (scene) must be published along"
|
||||
assert item.data["publish"] is True, msg
|
||||
|
||||
template_data = item.data.get("anatomyData")
|
||||
rep = item.data.get("representations")[0].get("name")
|
||||
for item_ in context:
|
||||
if "workfile" in item_.data["family"]:
|
||||
template_data = item_.data.get("anatomyData")
|
||||
rep = item_.data.get("representations")[0].get("name")
|
||||
template_data["representation"] = rep
|
||||
template_data["ext"] = rep
|
||||
template_data["comment"] = None
|
||||
|
|
@ -141,19 +136,24 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
"Using published scene for render {}".format(script_path)
|
||||
)
|
||||
|
||||
response = self.payload_submit(
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
node.name(),
|
||||
submit_frame_start,
|
||||
submit_frame_end
|
||||
)
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["deadlineSubmissionJob"] = response.json()
|
||||
instance.data["outputDir"] = os.path.dirname(
|
||||
render_path).replace("\\", "/")
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
# only add main rendering job if target is not frames_farm
|
||||
r_job_response_json = None
|
||||
if instance.data["render_target"] != "frames_farm":
|
||||
r_job_response = self.payload_submit(
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
node.name(),
|
||||
submit_frame_start,
|
||||
submit_frame_end
|
||||
)
|
||||
r_job_response_json = r_job_response.json()
|
||||
instance.data["deadlineSubmissionJob"] = r_job_response_json
|
||||
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["outputDir"] = os.path.dirname(
|
||||
render_path).replace("\\", "/")
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
|
||||
if instance.data.get("bakingNukeScripts"):
|
||||
for baking_script in instance.data["bakingNukeScripts"]:
|
||||
|
|
@ -161,18 +161,20 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
script_path = baking_script["bakeScriptPath"]
|
||||
exe_node_name = baking_script["bakeWriteNodeName"]
|
||||
|
||||
resp = self.payload_submit(
|
||||
b_job_response = self.payload_submit(
|
||||
instance,
|
||||
script_path,
|
||||
render_path,
|
||||
exe_node_name,
|
||||
submit_frame_start,
|
||||
submit_frame_end,
|
||||
response.json()
|
||||
r_job_response_json,
|
||||
baking_submission=True
|
||||
)
|
||||
|
||||
# Store output dir for unified publisher (filesequence)
|
||||
instance.data["deadlineSubmissionJob"] = resp.json()
|
||||
instance.data["deadlineSubmissionJob"] = b_job_response.json()
|
||||
|
||||
instance.data["publishJobState"] = "Suspended"
|
||||
|
||||
# add to list of job Id
|
||||
|
|
@ -180,7 +182,7 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
instance.data["bakingSubmissionJobs"] = []
|
||||
|
||||
instance.data["bakingSubmissionJobs"].append(
|
||||
resp.json()["_id"])
|
||||
b_job_response.json()["_id"])
|
||||
|
||||
# redefinition of families
|
||||
if "render" in instance.data["family"]:
|
||||
|
|
@ -199,15 +201,35 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
exe_node_name,
|
||||
start_frame,
|
||||
end_frame,
|
||||
response_data=None
|
||||
response_data=None,
|
||||
baking_submission=False,
|
||||
):
|
||||
"""Submit payload to Deadline
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
script_path (str): path to nuke script
|
||||
render_path (str): path to rendered images
|
||||
exe_node_name (str): name of the node to render
|
||||
start_frame (int): start frame
|
||||
end_frame (int): end frame
|
||||
response_data Optional[dict]: response data from
|
||||
previous submission
|
||||
baking_submission Optional[bool]: if it's baking submission
|
||||
|
||||
Returns:
|
||||
requests.Response
|
||||
"""
|
||||
render_dir = os.path.normpath(os.path.dirname(render_path))
|
||||
batch_name = os.path.basename(script_path)
|
||||
jobname = "%s - %s" % (batch_name, instance.name)
|
||||
|
||||
# batch name
|
||||
src_filepath = instance.context.data["currentFile"]
|
||||
batch_name = os.path.basename(src_filepath)
|
||||
job_name = os.path.basename(render_path)
|
||||
|
||||
if is_in_tests():
|
||||
batch_name += datetime.now().strftime("%d%m%Y%H%M%S")
|
||||
|
||||
|
||||
output_filename_0 = self.preview_fname(render_path)
|
||||
|
||||
if not response_data:
|
||||
|
|
@ -228,11 +250,8 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
# Top-level group name
|
||||
"BatchName": batch_name,
|
||||
|
||||
# Asset dependency to wait for at least the scene file to sync.
|
||||
# "AssetDependency0": script_path,
|
||||
|
||||
# Job name, as seen in Monitor
|
||||
"Name": jobname,
|
||||
"Name": job_name,
|
||||
|
||||
# Arbitrary username, for visualisation in Monitor
|
||||
"UserName": self._deadline_user,
|
||||
|
|
@ -294,12 +313,17 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin,
|
|||
"AuxFiles": []
|
||||
}
|
||||
|
||||
if response_data.get("_id"):
|
||||
# TODO: rewrite for baking with sequences
|
||||
if baking_submission:
|
||||
payload["JobInfo"].update({
|
||||
"JobType": "Normal",
|
||||
"ChunkSize": 99999999
|
||||
})
|
||||
|
||||
if response_data.get("_id"):
|
||||
payload["JobInfo"].update({
|
||||
"BatchName": response_data["Props"]["Batch"],
|
||||
"JobDependency0": response_data["_id"],
|
||||
"ChunkSize": 99999999
|
||||
})
|
||||
|
||||
# Include critical environment variables with submission
|
||||
|
|
|
|||
|
|
@ -98,7 +98,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
hosts = ["fusion", "max", "maya", "nuke", "houdini",
|
||||
"celaction", "aftereffects", "harmony"]
|
||||
|
||||
families = ["render.farm", "prerender.farm",
|
||||
families = ["render.farm", "render.frames_farm",
|
||||
"prerender.farm", "prerender.frames_farm",
|
||||
"renderlayer", "imagesequence",
|
||||
"vrayscene", "maxrender",
|
||||
"arnold_rop", "mantra_rop",
|
||||
|
|
@ -121,7 +122,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"FTRACK_SERVER",
|
||||
"AVALON_APP_NAME",
|
||||
"OPENPYPE_USERNAME",
|
||||
"OPENPYPE_SG_USER"
|
||||
"OPENPYPE_SG_USER",
|
||||
"KITSU_LOGIN",
|
||||
"KITSU_PWD"
|
||||
]
|
||||
|
||||
# custom deadline attributes
|
||||
|
|
@ -299,7 +302,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
payload["JobInfo"]["JobDependency{}".format(
|
||||
job_index)] = assembly_id # noqa: E501
|
||||
job_index += 1
|
||||
else:
|
||||
elif job.get("_id"):
|
||||
payload["JobInfo"]["JobDependency0"] = job["_id"]
|
||||
|
||||
for index, (key_, value_) in enumerate(environment.items()):
|
||||
|
|
@ -475,6 +478,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin,
|
|||
"FTRACK_SERVER": os.environ.get("FTRACK_SERVER"),
|
||||
}
|
||||
|
||||
deadline_publish_job_id = None
|
||||
if submission_type == "deadline":
|
||||
# get default deadline webservice url from deadline module
|
||||
self.deadline_url = instance.context.data["defaultDeadline"]
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
import os
|
||||
import requests
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype_modules.deadline.abstract_submit_deadline import requests_get
|
||||
|
||||
|
||||
class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
|
||||
"""Validate Deadline Web Service is running"""
|
||||
|
|
@ -10,7 +9,10 @@ class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
|
|||
label = "Validate Deadline Web Service"
|
||||
order = pyblish.api.ValidatorOrder
|
||||
hosts = ["maya", "nuke"]
|
||||
families = ["renderlayer"]
|
||||
families = ["renderlayer", "render"]
|
||||
|
||||
# cache
|
||||
responses = {}
|
||||
|
||||
def process(self, instance):
|
||||
# get default deadline webservice url from deadline module
|
||||
|
|
@ -18,28 +20,16 @@ class ValidateDeadlineConnection(pyblish.api.InstancePlugin):
|
|||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
self.log.info(
|
||||
"We have deadline URL on instance {}".format(
|
||||
deadline_url))
|
||||
self.log.debug(
|
||||
"We have deadline URL on instance {}".format(deadline_url)
|
||||
)
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
# Check response
|
||||
response = self._requests_get(deadline_url)
|
||||
if deadline_url not in self.responses:
|
||||
self.responses[deadline_url] = requests_get(deadline_url)
|
||||
|
||||
response = self.responses[deadline_url]
|
||||
assert response.ok, "Response must be ok"
|
||||
assert response.text.startswith("Deadline Web Service "), (
|
||||
"Web service did not respond with 'Deadline Web Service'"
|
||||
)
|
||||
|
||||
def _requests_get(self, *args, **kwargs):
|
||||
""" Wrapper for requests, disabling SSL certificate validation if
|
||||
DONT_VERIFY_SSL environment variable is found. This is useful when
|
||||
Deadline or Muster server are running with self-signed certificates
|
||||
and their certificate is not added to trusted certificates on
|
||||
client machines.
|
||||
|
||||
WARNING: disabling SSL certificate validation is defeating one line
|
||||
of defense SSL is providing and it is not recommended.
|
||||
"""
|
||||
if 'verify' not in kwargs:
|
||||
kwargs['verify'] = False if os.getenv("OPENPYPE_DONT_VERIFY_SSL", True) else True # noqa
|
||||
return requests.get(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -19,38 +19,64 @@ class ValidateDeadlinePools(OptionalPyblishPluginMixin,
|
|||
order = pyblish.api.ValidatorOrder
|
||||
families = ["rendering",
|
||||
"render.farm",
|
||||
"render.frames_farm",
|
||||
"renderFarm",
|
||||
"renderlayer",
|
||||
"maxrender"]
|
||||
optional = True
|
||||
|
||||
# cache
|
||||
pools_per_url = {}
|
||||
|
||||
def process(self, instance):
|
||||
if not self.is_active(instance.data):
|
||||
return
|
||||
|
||||
if not instance.data.get("farm"):
|
||||
self.log.debug("Skipping local instance.")
|
||||
return
|
||||
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
self.log.info("deadline_url::{}".format(deadline_url))
|
||||
pools = DeadlineModule.get_deadline_pools(deadline_url, log=self.log)
|
||||
self.log.info("pools::{}".format(pools))
|
||||
|
||||
formatting_data = {
|
||||
"pools_str": ",".join(pools)
|
||||
}
|
||||
deadline_url = self.get_deadline_url(instance)
|
||||
pools = self.get_pools(deadline_url)
|
||||
|
||||
invalid_pools = {}
|
||||
primary_pool = instance.data.get("primaryPool")
|
||||
if primary_pool and primary_pool not in pools:
|
||||
msg = "Configured primary '{}' not present on Deadline".format(
|
||||
instance.data["primaryPool"])
|
||||
formatting_data["invalid_value_str"] = msg
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
invalid_pools["primary"] = primary_pool
|
||||
|
||||
secondary_pool = instance.data.get("secondaryPool")
|
||||
if secondary_pool and secondary_pool not in pools:
|
||||
msg = "Configured secondary '{}' not present on Deadline".format(
|
||||
instance.data["secondaryPool"])
|
||||
formatting_data["invalid_value_str"] = msg
|
||||
raise PublishXmlValidationError(self, msg,
|
||||
formatting_data=formatting_data)
|
||||
invalid_pools["secondary"] = secondary_pool
|
||||
|
||||
if invalid_pools:
|
||||
message = "\n".join(
|
||||
"{} pool '{}' not available on Deadline".format(key.title(),
|
||||
pool)
|
||||
for key, pool in invalid_pools.items()
|
||||
)
|
||||
raise PublishXmlValidationError(
|
||||
plugin=self,
|
||||
message=message,
|
||||
formatting_data={"pools_str": ", ".join(pools)}
|
||||
)
|
||||
|
||||
def get_deadline_url(self, instance):
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
if instance.data.get("deadlineUrl"):
|
||||
# if custom one is set in instance, use that
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
return deadline_url
|
||||
|
||||
def get_pools(self, deadline_url):
|
||||
if deadline_url not in self.pools_per_url:
|
||||
self.log.debug(
|
||||
"Querying available pools for Deadline url: {}".format(
|
||||
deadline_url)
|
||||
)
|
||||
pools = DeadlineModule.get_deadline_pools(deadline_url,
|
||||
log=self.log)
|
||||
self.log.info("Available pools: {}".format(pools))
|
||||
self.pools_per_url[deadline_url] = pools
|
||||
|
||||
return self.pools_per_url[deadline_url]
|
||||
|
|
|
|||
|
|
@ -20,8 +20,19 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
allow_user_override = True
|
||||
|
||||
def process(self, instance):
|
||||
self.instance = instance
|
||||
frame_list = self._get_frame_list(instance.data["render_job_id"])
|
||||
"""Process all the nodes in the instance"""
|
||||
|
||||
# get dependency jobs ids for retrieving frame list
|
||||
dependent_job_ids = self._get_dependent_job_ids(instance)
|
||||
|
||||
if not dependent_job_ids:
|
||||
self.log.warning("No dependent jobs found for instance: {}"
|
||||
"".format(instance))
|
||||
return
|
||||
|
||||
# get list of frames from dependent jobs
|
||||
frame_list = self._get_dependent_jobs_frames(
|
||||
instance, dependent_job_ids)
|
||||
|
||||
for repre in instance.data["representations"]:
|
||||
expected_files = self._get_expected_files(repre)
|
||||
|
|
@ -59,7 +70,10 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
# Update the representation expected files
|
||||
self.log.info("Update range from actual job range "
|
||||
"to frame list: {}".format(frame_list))
|
||||
repre["files"] = sorted(job_expected_files)
|
||||
# single item files must be string not list
|
||||
repre["files"] = (sorted(job_expected_files)
|
||||
if len(job_expected_files) > 1 else
|
||||
list(job_expected_files)[0])
|
||||
|
||||
# Update the expected files
|
||||
expected_files = job_expected_files
|
||||
|
|
@ -78,26 +92,45 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
)
|
||||
)
|
||||
|
||||
def _get_frame_list(self, original_job_id):
|
||||
def _get_dependent_job_ids(self, instance):
|
||||
"""Returns list of dependent job ids from instance metadata.json
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
|
||||
Returns:
|
||||
(list): list of dependent job ids
|
||||
|
||||
"""
|
||||
dependent_job_ids = []
|
||||
|
||||
# job_id collected from metadata.json
|
||||
original_job_id = instance.data["render_job_id"]
|
||||
|
||||
dependent_job_ids_env = os.environ.get("RENDER_JOB_IDS")
|
||||
if dependent_job_ids_env:
|
||||
dependent_job_ids = dependent_job_ids_env.split(',')
|
||||
elif original_job_id:
|
||||
dependent_job_ids = [original_job_id]
|
||||
|
||||
return dependent_job_ids
|
||||
|
||||
def _get_dependent_jobs_frames(self, instance, dependent_job_ids):
|
||||
"""Returns list of frame ranges from all render job.
|
||||
|
||||
Render job might be re-submitted so job_id in metadata.json could be
|
||||
invalid. GlobalJobPreload injects current job id to RENDER_JOB_IDS.
|
||||
|
||||
Args:
|
||||
original_job_id (str)
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
dependent_job_ids (list): list of dependent job ids
|
||||
Returns:
|
||||
(list)
|
||||
"""
|
||||
all_frame_lists = []
|
||||
render_job_ids = os.environ.get("RENDER_JOB_IDS")
|
||||
if render_job_ids:
|
||||
render_job_ids = render_job_ids.split(',')
|
||||
else: # fallback
|
||||
render_job_ids = [original_job_id]
|
||||
|
||||
for job_id in render_job_ids:
|
||||
job_info = self._get_job_info(job_id)
|
||||
for job_id in dependent_job_ids:
|
||||
job_info = self._get_job_info(instance, job_id)
|
||||
frame_list = job_info["Props"].get("Frames")
|
||||
if frame_list:
|
||||
all_frame_lists.extend(frame_list.split(','))
|
||||
|
|
@ -152,18 +185,25 @@ class ValidateExpectedFiles(pyblish.api.InstancePlugin):
|
|||
|
||||
return file_name_template, frame_placeholder
|
||||
|
||||
def _get_job_info(self, job_id):
|
||||
def _get_job_info(self, instance, job_id):
|
||||
"""Calls DL for actual job info for 'job_id'
|
||||
|
||||
Might be different than job info saved in metadata.json if user
|
||||
manually changes job pre/during rendering.
|
||||
|
||||
Args:
|
||||
instance (pyblish.api.Instance): pyblish instance
|
||||
job_id (str): Deadline job id
|
||||
|
||||
Returns:
|
||||
(dict): Job info from Deadline
|
||||
|
||||
"""
|
||||
# get default deadline webservice url from deadline module
|
||||
deadline_url = self.instance.context.data["defaultDeadline"]
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if self.instance.data.get("deadlineUrl"):
|
||||
deadline_url = self.instance.data.get("deadlineUrl")
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
|
||||
url = "{}/api/jobs?JobID={}".format(deadline_url, job_id)
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ class AyonDeadlinePlugin(DeadlinePlugin):
|
|||
for publish process.
|
||||
"""
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.InitializeProcess
|
||||
self.RenderExecutableCallback += self.RenderExecutable
|
||||
self.RenderArgumentCallback += self.RenderArgument
|
||||
|
|
@ -90,7 +91,13 @@ class AyonDeadlinePlugin(DeadlinePlugin):
|
|||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
exe_list = exe_list.replace("\\ ", " ")
|
||||
exe = FileUtils.SearchFileList(exe_list)
|
||||
|
||||
expanded_paths = []
|
||||
for path in exe_list.split(";"):
|
||||
if path.startswith("~"):
|
||||
path = os.path.expanduser(path)
|
||||
expanded_paths.append(path)
|
||||
exe = FileUtils.SearchFileList(";".join(expanded_paths))
|
||||
|
||||
if exe == "":
|
||||
self.FailRender(
|
||||
|
|
|
|||
|
|
@ -547,7 +547,14 @@ def get_ayon_executable():
|
|||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
exe_list = exe_list.replace("\\ ", " ")
|
||||
return exe_list
|
||||
|
||||
# Expand user paths
|
||||
expanded_paths = []
|
||||
for path in exe_list.split(";"):
|
||||
if path.startswith("~"):
|
||||
path = os.path.expanduser(path)
|
||||
expanded_paths.append(path)
|
||||
return ";".join(expanded_paths)
|
||||
|
||||
|
||||
def inject_render_job_id(deadlinePlugin):
|
||||
|
|
|
|||
|
|
@ -77,4 +77,22 @@ CategoryOrder=0
|
|||
Index=4
|
||||
Label=Harmony 20 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 20 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 20 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_20/lnx86_64/bin/HarmonyPremium
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 20 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 20 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_20/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_21]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 21 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 21 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 21 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_21/lnx86_64/bin/HarmonyPremium
|
||||
|
||||
[Harmony_RenderExecutable_22]
|
||||
Type=multilinemultifilename
|
||||
Category=Render Executables
|
||||
CategoryOrder=0
|
||||
Index=4
|
||||
Label=Harmony 22 Render Executable
|
||||
Description=The path to the Harmony Render executable file used for rendering. Enter alternative paths on separate lines.
|
||||
Default=c:\Program Files (x86)\Toon Boom Animation\Toon Boom Harmony 22 Premium\win64\bin\HarmonyPremium.exe;/Applications/Toon Boom Harmony 22 Premium/Harmony Premium.app/Contents/MacOS/Harmony Premium;/usr/local/ToonBoomAnimation/harmonyPremium_22/lnx86_64/bin/HarmonyPremium
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
#!/usr/bin/env python3
|
||||
from System import *
|
||||
from System.Diagnostics import *
|
||||
from System.IO import *
|
||||
|
|
@ -8,13 +9,14 @@ from Deadline.Scripting import *
|
|||
|
||||
def GetDeadlinePlugin():
|
||||
return HarmonyOpenPypePlugin()
|
||||
|
||||
|
||||
def CleanupDeadlinePlugin( deadlinePlugin ):
|
||||
deadlinePlugin.Cleanup()
|
||||
|
||||
|
||||
class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
||||
|
||||
def __init__( self ):
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.InitializeProcess
|
||||
self.RenderExecutableCallback += self.RenderExecutable
|
||||
self.RenderArgumentCallback += self.RenderArgument
|
||||
|
|
@ -24,11 +26,11 @@ class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
|||
print("Cleanup")
|
||||
for stdoutHandler in self.StdoutHandlers:
|
||||
del stdoutHandler.HandleCallback
|
||||
|
||||
|
||||
del self.InitializeProcessCallback
|
||||
del self.RenderExecutableCallback
|
||||
del self.RenderArgumentCallback
|
||||
|
||||
|
||||
def CheckExitCode( self, exitCode ):
|
||||
print("check code")
|
||||
if exitCode != 0:
|
||||
|
|
@ -36,20 +38,20 @@ class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
|||
self.LogInfo( "Renderer reported an error with error code 100. This will be ignored, since the option to ignore it is specified in the Job Properties." )
|
||||
else:
|
||||
self.FailRender( "Renderer returned non-zero error code %d. Check the renderer's output." % exitCode )
|
||||
|
||||
|
||||
def InitializeProcess( self ):
|
||||
self.PluginType = PluginType.Simple
|
||||
self.StdoutHandling = True
|
||||
self.PopupHandling = True
|
||||
|
||||
|
||||
self.AddStdoutHandlerCallback( "Rendered frame ([0-9]+)" ).HandleCallback += self.HandleStdoutProgress
|
||||
|
||||
|
||||
def HandleStdoutProgress( self ):
|
||||
startFrame = self.GetStartFrame()
|
||||
endFrame = self.GetEndFrame()
|
||||
if( endFrame - startFrame + 1 != 0 ):
|
||||
self.SetProgress( 100 * ( int(self.GetRegexMatch(1)) - startFrame + 1 ) / ( endFrame - startFrame + 1 ) )
|
||||
|
||||
|
||||
def RenderExecutable( self ):
|
||||
version = int( self.GetPluginInfoEntry( "Version" ) )
|
||||
exe = ""
|
||||
|
|
@ -58,7 +60,7 @@ class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
|||
if( exe == "" ):
|
||||
self.FailRender( "Harmony render executable was not found in the configured separated list \"" + exeList + "\". The path to the render executable can be configured from the Plugin Configuration in the Deadline Monitor." )
|
||||
return exe
|
||||
|
||||
|
||||
def RenderArgument( self ):
|
||||
renderArguments = "-batch"
|
||||
|
||||
|
|
@ -72,20 +74,20 @@ class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
|||
resolutionX = self.GetIntegerPluginInfoEntryWithDefault( "ResolutionX", -1 )
|
||||
resolutionY = self.GetIntegerPluginInfoEntryWithDefault( "ResolutionY", -1 )
|
||||
fov = self.GetFloatPluginInfoEntryWithDefault( "FieldOfView", -1 )
|
||||
|
||||
|
||||
if resolutionX > 0 and resolutionY > 0 and fov > 0:
|
||||
renderArguments += " -res " + str( resolutionX ) + " " + str( resolutionY ) + " " + str( fov )
|
||||
|
||||
|
||||
camera = self.GetPluginInfoEntryWithDefault( "Camera", "" )
|
||||
|
||||
|
||||
if not camera == "":
|
||||
renderArguments += " -camera " + camera
|
||||
|
||||
|
||||
startFrame = str( self.GetStartFrame() )
|
||||
endFrame = str( self.GetEndFrame() )
|
||||
|
||||
|
||||
renderArguments += " -frames " + startFrame + " " + endFrame
|
||||
|
||||
|
||||
if not self.GetBooleanPluginInfoEntryWithDefault( "IsDatabase", False ):
|
||||
sceneFilename = self.GetPluginInfoEntryWithDefault( "SceneFile", self.GetDataFilename() )
|
||||
sceneFilename = RepositoryUtils.CheckPathMapping( sceneFilename )
|
||||
|
|
@ -99,12 +101,12 @@ class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
|||
renderArguments += " -scene " + scene
|
||||
version = self.GetPluginInfoEntryWithDefault( "SceneVersion", "" )
|
||||
renderArguments += " -version " + version
|
||||
|
||||
|
||||
#tempSceneDirectory = self.CreateTempDirectory( "thread" + str(self.GetThreadNumber()) )
|
||||
#preRenderScript =
|
||||
#preRenderScript =
|
||||
rendernodeNum = 0
|
||||
scriptBuilder = StringBuilder()
|
||||
|
||||
|
||||
while True:
|
||||
nodeName = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Node", "" )
|
||||
if nodeName == "":
|
||||
|
|
@ -115,35 +117,35 @@ class HarmonyOpenPypePlugin( DeadlinePlugin ):
|
|||
nodeLeadingZero = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "LeadingZero", "" )
|
||||
nodeFormat = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Format", "" )
|
||||
nodeStartFrame = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "StartFrame", "" )
|
||||
|
||||
|
||||
if not nodePath == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"drawingName\", 1, \"" + nodePath + "\" );")
|
||||
|
||||
|
||||
if not nodeLeadingZero == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"leadingZeros\", 1, \"" + nodeLeadingZero + "\" );")
|
||||
|
||||
|
||||
if not nodeFormat == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"drawingType\", 1, \"" + nodeFormat + "\" );")
|
||||
|
||||
|
||||
if not nodeStartFrame == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"start\", 1, \"" + nodeStartFrame + "\" );")
|
||||
|
||||
|
||||
if nodeType == "Movie":
|
||||
nodePath = self.GetPluginInfoEntryWithDefault( "Output" + str( rendernodeNum ) + "Path", "" )
|
||||
if not nodePath == "":
|
||||
scriptBuilder.AppendLine("node.setTextAttr( \"" + nodeName + "\", \"moviePath\", 1, \"" + nodePath + "\" );")
|
||||
|
||||
|
||||
rendernodeNum += 1
|
||||
|
||||
|
||||
tempDirectory = self.CreateTempDirectory( "thread" + str(self.GetThreadNumber()) )
|
||||
preRenderScriptName = Path.Combine( tempDirectory, "preRenderScript.txt" )
|
||||
|
||||
|
||||
File.WriteAllText( preRenderScriptName, scriptBuilder.ToString() )
|
||||
|
||||
|
||||
preRenderInlineScript = self.GetPluginInfoEntryWithDefault( "PreRenderInlineScript", "" )
|
||||
if preRenderInlineScript:
|
||||
renderArguments += " -preRenderInlineScript \"" + preRenderInlineScript +"\""
|
||||
|
||||
|
||||
renderArguments += " -preRenderScript \"" + preRenderScriptName +"\""
|
||||
|
||||
|
||||
return renderArguments
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin):
|
|||
for publish process.
|
||||
"""
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.InitializeProcess
|
||||
self.RenderExecutableCallback += self.RenderExecutable
|
||||
self.RenderArgumentCallback += self.RenderArgument
|
||||
|
|
@ -107,7 +108,7 @@ class OpenPypeDeadlinePlugin(DeadlinePlugin):
|
|||
"Scanning for compatible requested "
|
||||
f"version {requested_version}"))
|
||||
dir_list = self.GetConfigEntry("OpenPypeInstallationDirs")
|
||||
|
||||
|
||||
# clean '\ ' for MacOS pasting
|
||||
if platform.system().lower() == "darwin":
|
||||
dir_list = dir_list.replace("\\ ", " ")
|
||||
|
|
|
|||
|
|
@ -249,6 +249,7 @@ class OpenPypeTileAssembler(DeadlinePlugin):
|
|||
|
||||
def __init__(self):
|
||||
"""Init."""
|
||||
super().__init__()
|
||||
self.InitializeProcessCallback += self.initialize_process
|
||||
self.RenderExecutableCallback += self.render_executable
|
||||
self.RenderArgumentCallback += self.render_argument
|
||||
|
|
|
|||
|
|
@ -353,7 +353,7 @@ class IntegrateFtrackApi(pyblish.api.InstancePlugin):
|
|||
status_name = asset_version_data.pop("status_name", None)
|
||||
|
||||
# Try query asset version by criteria (asset id and version)
|
||||
version = asset_version_data.get("version") or 0
|
||||
version = asset_version_data.get("version") or "0"
|
||||
asset_version_entity = self._query_asset_version(
|
||||
session, version, asset_id
|
||||
)
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ def get_transferable_representations(instance):
|
|||
to_transfer = []
|
||||
|
||||
for representation in instance.data.get("representations", []):
|
||||
if "publish_on_farm" not in representation.get("tags"):
|
||||
if "publish_on_farm" not in representation.get("tags", []):
|
||||
continue
|
||||
|
||||
trans_rep = representation.copy()
|
||||
|
|
@ -265,8 +265,7 @@ def create_skeleton_instance(
|
|||
instance_skeleton_data[v] = instance.data.get(v)
|
||||
|
||||
representations = get_transferable_representations(instance)
|
||||
instance_skeleton_data["representations"] = []
|
||||
instance_skeleton_data["representations"] += representations
|
||||
instance_skeleton_data["representations"] = representations
|
||||
|
||||
persistent = instance.data.get("stagingDir_persistent") is True
|
||||
instance_skeleton_data["stagingDir_persistent"] = persistent
|
||||
|
|
|
|||
|
|
@ -75,7 +75,6 @@ class RenderInstance(object):
|
|||
tilesY = attr.ib(default=0) # number of tiles in Y
|
||||
|
||||
# submit_publish_job
|
||||
toBeRenderedOn = attr.ib(default=None)
|
||||
deadlineSubmissionJob = attr.ib(default=None)
|
||||
anatomyData = attr.ib(default=None)
|
||||
outputDir = attr.ib(default=None)
|
||||
|
|
|
|||
|
|
@ -464,9 +464,8 @@ def apply_plugin_settings_automatically(plugin, settings, logger=None):
|
|||
|
||||
for option, value in settings.items():
|
||||
if logger:
|
||||
logger.debug("Plugin {} - Attr: {} -> {}".format(
|
||||
option, value, plugin.__name__
|
||||
))
|
||||
logger.debug("Plugin %s - Attr: %s -> %s",
|
||||
plugin.__name__, option, value)
|
||||
setattr(plugin, option, value)
|
||||
|
||||
|
||||
|
|
@ -953,6 +952,7 @@ def replace_with_published_scene_path(instance, replace_in_path=True):
|
|||
|
||||
return file_path
|
||||
|
||||
|
||||
def add_repre_files_for_cleanup(instance, repre):
|
||||
""" Explicitly mark repre files to be deleted.
|
||||
|
||||
|
|
@ -961,7 +961,16 @@ def add_repre_files_for_cleanup(instance, repre):
|
|||
"""
|
||||
files = repre["files"]
|
||||
staging_dir = repre.get("stagingDir")
|
||||
if not staging_dir or instance.data.get("stagingDir_persistent"):
|
||||
|
||||
# first make sure representation level is not persistent
|
||||
if (
|
||||
not staging_dir
|
||||
or repre.get("stagingDir_persistent")
|
||||
):
|
||||
return
|
||||
|
||||
# then look into instance level if it's not persistent
|
||||
if instance.data.get("stagingDir_persistent"):
|
||||
return
|
||||
|
||||
if isinstance(files, str):
|
||||
|
|
|
|||
35
openpype/plugins/publish/collect_farm_target.py
Normal file
35
openpype/plugins/publish/collect_farm_target.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectFarmTarget(pyblish.api.InstancePlugin):
|
||||
"""Collects the render target for the instance
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = "Collect Farm Target"
|
||||
targets = ["local"]
|
||||
|
||||
def process(self, instance):
|
||||
if not instance.data.get("farm"):
|
||||
return
|
||||
|
||||
context = instance.context
|
||||
|
||||
farm_name = ""
|
||||
op_modules = context.data.get("openPypeModules")
|
||||
|
||||
for farm_renderer in ["deadline", "royalrender", "muster"]:
|
||||
op_module = op_modules.get(farm_renderer, False)
|
||||
|
||||
if op_module and op_module.enabled:
|
||||
farm_name = farm_renderer
|
||||
elif not op_module:
|
||||
self.log.error("Cannot get OpenPype {0} module.".format(
|
||||
farm_renderer))
|
||||
|
||||
if farm_name:
|
||||
self.log.debug("Collected render target: {0}".format(farm_name))
|
||||
instance.data["toBeRenderedOn"] = farm_name
|
||||
else:
|
||||
AssertionError("No OpenPype renderer module found")
|
||||
|
|
@ -53,8 +53,8 @@ class ExtractBurnin(publish.Extractor):
|
|||
"flame",
|
||||
"houdini",
|
||||
"max",
|
||||
"blender"
|
||||
# "resolve"
|
||||
"blender",
|
||||
"unreal"
|
||||
]
|
||||
|
||||
optional = True
|
||||
|
|
|
|||
|
|
@ -8,6 +8,11 @@ from ayon_api import slugify_string
|
|||
from ayon_api.entity_hub import EntityHub
|
||||
|
||||
from openpype import AYON_SERVER_ENABLED
|
||||
from openpype.client import get_assets
|
||||
from openpype.pipeline.template_data import (
|
||||
get_asset_template_data,
|
||||
get_task_template_data,
|
||||
)
|
||||
|
||||
|
||||
def _default_json_parse(value):
|
||||
|
|
@ -27,13 +32,51 @@ class ExtractHierarchyToAYON(pyblish.api.ContextPlugin):
|
|||
|
||||
hierarchy_context = context.data.get("hierarchyContext")
|
||||
if not hierarchy_context:
|
||||
self.log.info("Skipping")
|
||||
self.log.debug("Skipping")
|
||||
return
|
||||
|
||||
project_name = context.data["projectName"]
|
||||
self._create_hierarchy(context, project_name)
|
||||
self._fill_instance_entities(context, project_name)
|
||||
|
||||
def _fill_instance_entities(self, context, project_name):
|
||||
instances_by_asset_name = collections.defaultdict(list)
|
||||
for instance in context:
|
||||
if instance.data.get("publish") is False:
|
||||
continue
|
||||
|
||||
instance_entity = instance.data.get("assetEntity")
|
||||
if instance_entity:
|
||||
continue
|
||||
|
||||
# Skip if instance asset does not match
|
||||
instance_asset_name = instance.data.get("asset")
|
||||
instances_by_asset_name[instance_asset_name].append(instance)
|
||||
|
||||
project_doc = context.data["projectEntity"]
|
||||
asset_docs = get_assets(
|
||||
project_name, asset_names=instances_by_asset_name.keys()
|
||||
)
|
||||
asset_docs_by_name = {
|
||||
asset_doc["name"]: asset_doc
|
||||
for asset_doc in asset_docs
|
||||
}
|
||||
for asset_name, instances in instances_by_asset_name.items():
|
||||
asset_doc = asset_docs_by_name[asset_name]
|
||||
asset_data = get_asset_template_data(asset_doc, project_name)
|
||||
for instance in instances:
|
||||
task_name = instance.data.get("task")
|
||||
template_data = get_task_template_data(
|
||||
project_doc, asset_doc, task_name)
|
||||
template_data.update(copy.deepcopy(asset_data))
|
||||
|
||||
instance.data["anatomyData"].update(template_data)
|
||||
instance.data["assetEntity"] = asset_doc
|
||||
|
||||
def _create_hierarchy(self, context, project_name):
|
||||
hierarchy_context = self._filter_hierarchy(context)
|
||||
if not hierarchy_context:
|
||||
self.log.info("All folders were filtered out")
|
||||
self.log.debug("All folders were filtered out")
|
||||
return
|
||||
|
||||
self.log.debug("Hierarchy_context: {}".format(
|
||||
|
|
|
|||
|
|
@ -128,7 +128,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
if thumbnail_created:
|
||||
return full_output_path
|
||||
|
||||
self.log.warning("Thumbanil has not been created.")
|
||||
self.log.warning("Thumbnail has not been created.")
|
||||
|
||||
def _instance_has_thumbnail(self, instance):
|
||||
if "representations" not in instance.data:
|
||||
|
|
@ -147,6 +147,7 @@ class ExtractThumbnailFromSource(pyblish.api.InstancePlugin):
|
|||
oiio_cmd = get_oiio_tool_args(
|
||||
"oiiotool",
|
||||
"-a", src_path,
|
||||
"--ch", "R,G,B",
|
||||
"-o", dst_path
|
||||
)
|
||||
self.log.info("Running: {}".format(" ".join(oiio_cmd)))
|
||||
|
|
|
|||
|
|
@ -7,12 +7,12 @@ from openpype.pipeline.publish import (
|
|||
|
||||
|
||||
class ValidatePublishDir(pyblish.api.InstancePlugin):
|
||||
"""Validates if 'publishDir' is a project directory
|
||||
"""Validates if files are being published into a project directory
|
||||
|
||||
'publishDir' is collected based on publish templates. In specific cases
|
||||
('source' template) source folder of items is used as a 'publishDir', this
|
||||
validates if it is inside any project dir for the project.
|
||||
(eg. files are not published from local folder, unaccessible for studio'
|
||||
In specific cases ('source' template - in place publishing) source folder
|
||||
of published items is used as a regular `publish` dir.
|
||||
This validates if it is inside any project dir for the project.
|
||||
(eg. files are not published from local folder, inaccessible for studio')
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -44,6 +44,8 @@ class ValidatePublishDir(pyblish.api.InstancePlugin):
|
|||
|
||||
anatomy = instance.context.data["anatomy"]
|
||||
|
||||
# original_dirname must be convertable to rootless path
|
||||
# in other case it is path inside of root folder for the project
|
||||
success, _ = anatomy.find_root_template_from_path(original_dirname)
|
||||
|
||||
formatting_data = {
|
||||
|
|
@ -56,11 +58,12 @@ class ValidatePublishDir(pyblish.api.InstancePlugin):
|
|||
formatting_data=formatting_data)
|
||||
|
||||
def _get_template_name_from_instance(self, instance):
|
||||
"""Find template which will be used during integration."""
|
||||
project_name = instance.context.data["projectName"]
|
||||
host_name = instance.context.data["hostName"]
|
||||
anatomy_data = instance.data["anatomyData"]
|
||||
family = anatomy_data["family"]
|
||||
family = self.family_mapping.get("family") or family
|
||||
family = self.family_mapping.get(family) or family
|
||||
task_info = anatomy_data.get("task") or {}
|
||||
|
||||
return get_publish_template_name(
|
||||
|
|
|
|||
|
|
@ -25,16 +25,16 @@ class ValidateVersion(pyblish.api.InstancePlugin):
|
|||
# TODO: Remove full non-html version upon drop of old publisher
|
||||
msg = (
|
||||
"Version '{0}' from instance '{1}' that you are "
|
||||
" trying to publish is lower or equal to an existing version "
|
||||
" in the database. Version in database: '{2}'."
|
||||
"trying to publish is lower or equal to an existing version "
|
||||
"in the database. Version in database: '{2}'."
|
||||
"Please version up your workfile to a higher version number "
|
||||
"than: '{2}'."
|
||||
).format(version, instance.data["name"], latest_version)
|
||||
|
||||
msg_html = (
|
||||
"Version <b>{0}</b> from instance <b>{1}</b> that you are "
|
||||
" trying to publish is lower or equal to an existing version "
|
||||
" in the database. Version in database: <b>{2}</b>.<br><br>"
|
||||
"trying to publish is lower or equal to an existing version "
|
||||
"in the database. Version in database: <b>{2}</b>.<br><br>"
|
||||
"Please version up your workfile to a higher version number "
|
||||
"than: <b>{2}</b>."
|
||||
).format(version, instance.data["name"], latest_version)
|
||||
|
|
|
|||
|
|
@ -616,6 +616,23 @@ def _convert_maya_project_settings(ayon_settings, output):
|
|||
output["maya"] = ayon_maya
|
||||
|
||||
|
||||
def _convert_3dsmax_project_settings(ayon_settings, output):
|
||||
if "max" not in ayon_settings:
|
||||
return
|
||||
|
||||
ayon_max = ayon_settings["max"]
|
||||
_convert_host_imageio(ayon_max)
|
||||
if "PointCloud" in ayon_max:
|
||||
point_cloud_attribute = ayon_max["PointCloud"]["attribute"]
|
||||
new_point_cloud_attribute = {
|
||||
item["name"]: item["value"]
|
||||
for item in point_cloud_attribute
|
||||
}
|
||||
ayon_max["PointCloud"]["attribute"] = new_point_cloud_attribute
|
||||
|
||||
output["max"] = ayon_max
|
||||
|
||||
|
||||
def _convert_nuke_knobs(knobs):
|
||||
new_knobs = []
|
||||
for knob in knobs:
|
||||
|
|
@ -737,6 +754,17 @@ def _convert_nuke_project_settings(ayon_settings, output):
|
|||
item_filter["subsets"] = item_filter.pop("product_names")
|
||||
item_filter["families"] = item_filter.pop("product_types")
|
||||
|
||||
reformat_nodes_config = item.get("reformat_nodes_config") or {}
|
||||
reposition_nodes = reformat_nodes_config.get(
|
||||
"reposition_nodes") or []
|
||||
|
||||
for reposition_node in reposition_nodes:
|
||||
if "knobs" not in reposition_node:
|
||||
continue
|
||||
reposition_node["knobs"] = _convert_nuke_knobs(
|
||||
reposition_node["knobs"]
|
||||
)
|
||||
|
||||
name = item.pop("name")
|
||||
new_review_data_outputs[name] = item
|
||||
ayon_publish["ExtractReviewDataMov"]["outputs"] = new_review_data_outputs
|
||||
|
|
@ -1261,6 +1289,7 @@ def convert_project_settings(ayon_settings, default_settings):
|
|||
_convert_flame_project_settings(ayon_settings, output)
|
||||
_convert_fusion_project_settings(ayon_settings, output)
|
||||
_convert_maya_project_settings(ayon_settings, output)
|
||||
_convert_3dsmax_project_settings(ayon_settings, output)
|
||||
_convert_nuke_project_settings(ayon_settings, output)
|
||||
_convert_hiero_project_settings(ayon_settings, output)
|
||||
_convert_photoshop_project_settings(ayon_settings, output)
|
||||
|
|
|
|||
|
|
@ -256,6 +256,23 @@
|
|||
"allow_multiple_items": true,
|
||||
"allow_version_control": false,
|
||||
"extensions": []
|
||||
},
|
||||
{
|
||||
"family": "audio",
|
||||
"identifier": "",
|
||||
"label": "Audio ",
|
||||
"icon": "fa5s.file-audio",
|
||||
"default_variants": [
|
||||
"Main"
|
||||
],
|
||||
"description": "Audio product",
|
||||
"detailed_description": "Audio files for review or final delivery",
|
||||
"allow_sequences": false,
|
||||
"allow_multiple_items": false,
|
||||
"allow_version_control": false,
|
||||
"extensions": [
|
||||
".wav"
|
||||
]
|
||||
}
|
||||
],
|
||||
"editorial_creators": {
|
||||
|
|
|
|||
|
|
@ -343,6 +343,7 @@ class TextAttrWidget(_BaseAttrDefWidget):
|
|||
return self._input_widget.text()
|
||||
|
||||
def set_value(self, value, multivalue=False):
|
||||
block_signals = False
|
||||
if multivalue:
|
||||
set_value = set(value)
|
||||
if None in set_value:
|
||||
|
|
@ -352,13 +353,18 @@ class TextAttrWidget(_BaseAttrDefWidget):
|
|||
if len(set_value) == 1:
|
||||
value = tuple(set_value)[0]
|
||||
else:
|
||||
block_signals = True
|
||||
value = "< Multiselection >"
|
||||
|
||||
if value != self.current_value():
|
||||
if block_signals:
|
||||
self._input_widget.blockSignals(True)
|
||||
if self.multiline:
|
||||
self._input_widget.setPlainText(value)
|
||||
else:
|
||||
self._input_widget.setText(value)
|
||||
if block_signals:
|
||||
self._input_widget.blockSignals(False)
|
||||
|
||||
|
||||
class BoolAttrWidget(_BaseAttrDefWidget):
|
||||
|
|
@ -391,7 +397,9 @@ class BoolAttrWidget(_BaseAttrDefWidget):
|
|||
set_value.add(self.attr_def.default)
|
||||
|
||||
if len(set_value) > 1:
|
||||
self._input_widget.blockSignals(True)
|
||||
self._input_widget.setCheckState(QtCore.Qt.PartiallyChecked)
|
||||
self._input_widget.blockSignals(False)
|
||||
return
|
||||
value = tuple(set_value)[0]
|
||||
|
||||
|
|
|
|||
|
|
@ -168,7 +168,7 @@ class OverviewWidget(QtWidgets.QFrame):
|
|||
def make_sure_animation_is_finished(self):
|
||||
if self._change_anim.state() == QtCore.QAbstractAnimation.Running:
|
||||
self._change_anim.stop()
|
||||
self._on_change_anim_finished()
|
||||
self._on_change_anim_finished()
|
||||
|
||||
def set_state(self, new_state, animate):
|
||||
if new_state == self._current_state:
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ class TasksModel(QtGui.QStandardItemModel):
|
|||
|
||||
def set_asset_id(self, asset_id):
|
||||
asset_doc = None
|
||||
if self._context_is_valid():
|
||||
if asset_id and self._context_is_valid():
|
||||
project_name = self._get_current_project()
|
||||
asset_doc = get_asset_by_id(
|
||||
project_name, asset_id, fields=["data.tasks"]
|
||||
|
|
|
|||
|
|
@ -48,6 +48,11 @@ from ._api import (
|
|||
patch,
|
||||
delete,
|
||||
|
||||
get_timeout,
|
||||
set_timeout,
|
||||
get_max_retries,
|
||||
set_max_retries,
|
||||
|
||||
get_event,
|
||||
get_events,
|
||||
dispatch_event,
|
||||
|
|
@ -245,6 +250,11 @@ __all__ = (
|
|||
"patch",
|
||||
"delete",
|
||||
|
||||
"get_timeout",
|
||||
"set_timeout",
|
||||
"get_max_retries",
|
||||
"set_max_retries",
|
||||
|
||||
"get_event",
|
||||
"get_events",
|
||||
"dispatch_event",
|
||||
|
|
|
|||
20
openpype/vendor/python/common/ayon_api/_api.py
vendored
20
openpype/vendor/python/common/ayon_api/_api.py
vendored
|
|
@ -474,6 +474,26 @@ def delete(*args, **kwargs):
|
|||
return con.delete(*args, **kwargs)
|
||||
|
||||
|
||||
def get_timeout(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
return con.get_timeout(*args, **kwargs)
|
||||
|
||||
|
||||
def set_timeout(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
return con.set_timeout(*args, **kwargs)
|
||||
|
||||
|
||||
def get_max_retries(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
return con.get_max_retries(*args, **kwargs)
|
||||
|
||||
|
||||
def set_max_retries(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
return con.set_max_retries(*args, **kwargs)
|
||||
|
||||
|
||||
def get_event(*args, **kwargs):
|
||||
con = get_server_api_connection()
|
||||
return con.get_event(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -1,18 +1,21 @@
|
|||
# Environments where server url and api key are stored for global connection
|
||||
SERVER_URL_ENV_KEY = "AYON_SERVER_URL"
|
||||
SERVER_API_ENV_KEY = "AYON_API_KEY"
|
||||
SERVER_TIMEOUT_ENV_KEY = "AYON_SERVER_TIMEOUT"
|
||||
SERVER_RETRIES_ENV_KEY = "AYON_SERVER_RETRIES"
|
||||
|
||||
# Backwards compatibility
|
||||
SERVER_TOKEN_ENV_KEY = SERVER_API_ENV_KEY
|
||||
|
||||
# --- User ---
|
||||
DEFAULT_USER_FIELDS = {
|
||||
"roles",
|
||||
"accessGroups",
|
||||
"defaultAccessGroups",
|
||||
"name",
|
||||
"isService",
|
||||
"isManager",
|
||||
"isGuest",
|
||||
"isAdmin",
|
||||
"defaultRoles",
|
||||
"createdAt",
|
||||
"active",
|
||||
"hasPassword",
|
||||
|
|
|
|||
|
|
@ -247,9 +247,11 @@ def products_graphql_query(fields):
|
|||
query = GraphQlQuery("ProductsQuery")
|
||||
|
||||
project_name_var = query.add_variable("projectName", "String!")
|
||||
folder_ids_var = query.add_variable("folderIds", "[String!]")
|
||||
product_ids_var = query.add_variable("productIds", "[String!]")
|
||||
product_names_var = query.add_variable("productNames", "[String!]")
|
||||
folder_ids_var = query.add_variable("folderIds", "[String!]")
|
||||
product_types_var = query.add_variable("productTypes", "[String!]")
|
||||
statuses_var = query.add_variable("statuses", "[String!]")
|
||||
|
||||
project_field = query.add_field("project")
|
||||
project_field.set_filter("name", project_name_var)
|
||||
|
|
@ -258,6 +260,8 @@ def products_graphql_query(fields):
|
|||
products_field.set_filter("ids", product_ids_var)
|
||||
products_field.set_filter("names", product_names_var)
|
||||
products_field.set_filter("folderIds", folder_ids_var)
|
||||
products_field.set_filter("productTypes", product_types_var)
|
||||
products_field.set_filter("statuses", statuses_var)
|
||||
|
||||
nested_fields = fields_to_dict(set(fields))
|
||||
add_links_fields(products_field, nested_fields)
|
||||
|
|
|
|||
236
openpype/vendor/python/common/ayon_api/server_api.py
vendored
236
openpype/vendor/python/common/ayon_api/server_api.py
vendored
|
|
@ -2,6 +2,7 @@ import os
|
|||
import re
|
||||
import io
|
||||
import json
|
||||
import time
|
||||
import logging
|
||||
import collections
|
||||
import platform
|
||||
|
|
@ -26,6 +27,8 @@ except ImportError:
|
|||
from json import JSONDecodeError as RequestsJSONDecodeError
|
||||
|
||||
from .constants import (
|
||||
SERVER_TIMEOUT_ENV_KEY,
|
||||
SERVER_RETRIES_ENV_KEY,
|
||||
DEFAULT_PRODUCT_TYPE_FIELDS,
|
||||
DEFAULT_PROJECT_FIELDS,
|
||||
DEFAULT_FOLDER_FIELDS,
|
||||
|
|
@ -127,6 +130,8 @@ class RestApiResponse(object):
|
|||
|
||||
@property
|
||||
def text(self):
|
||||
if self._response is None:
|
||||
return self.detail
|
||||
return self._response.text
|
||||
|
||||
@property
|
||||
|
|
@ -135,6 +140,8 @@ class RestApiResponse(object):
|
|||
|
||||
@property
|
||||
def headers(self):
|
||||
if self._response is None:
|
||||
return {}
|
||||
return self._response.headers
|
||||
|
||||
@property
|
||||
|
|
@ -148,6 +155,8 @@ class RestApiResponse(object):
|
|||
|
||||
@property
|
||||
def content(self):
|
||||
if self._response is None:
|
||||
return b""
|
||||
return self._response.content
|
||||
|
||||
@property
|
||||
|
|
@ -339,7 +348,11 @@ class ServerAPI(object):
|
|||
variable value 'AYON_CERT_FILE' by default.
|
||||
create_session (Optional[bool]): Create session for connection if
|
||||
token is available. Default is True.
|
||||
timeout (Optional[float]): Timeout for requests.
|
||||
max_retries (Optional[int]): Number of retries for requests.
|
||||
"""
|
||||
_default_timeout = 10.0
|
||||
_default_max_retries = 3
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
|
@ -352,6 +365,8 @@ class ServerAPI(object):
|
|||
ssl_verify=None,
|
||||
cert=None,
|
||||
create_session=True,
|
||||
timeout=None,
|
||||
max_retries=None,
|
||||
):
|
||||
if not base_url:
|
||||
raise ValueError("Invalid server URL {}".format(str(base_url)))
|
||||
|
|
@ -370,6 +385,13 @@ class ServerAPI(object):
|
|||
)
|
||||
self._sender = sender
|
||||
|
||||
self._timeout = None
|
||||
self._max_retries = None
|
||||
|
||||
# Set timeout and max retries based on passed values
|
||||
self.set_timeout(timeout)
|
||||
self.set_max_retries(max_retries)
|
||||
|
||||
if ssl_verify is None:
|
||||
# Custom AYON env variable for CA file or 'True'
|
||||
# - that should cover most default behaviors in 'requests'
|
||||
|
|
@ -474,6 +496,87 @@ class ServerAPI(object):
|
|||
ssl_verify = property(get_ssl_verify, set_ssl_verify)
|
||||
cert = property(get_cert, set_cert)
|
||||
|
||||
@classmethod
|
||||
def get_default_timeout(cls):
|
||||
"""Default value for requests timeout.
|
||||
|
||||
First looks for environment variable SERVER_TIMEOUT_ENV_KEY which
|
||||
can affect timeout value. If not available then use class
|
||||
attribute '_default_timeout'.
|
||||
|
||||
Returns:
|
||||
float: Timeout value in seconds.
|
||||
"""
|
||||
|
||||
try:
|
||||
return float(os.environ.get(SERVER_TIMEOUT_ENV_KEY))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
return cls._default_timeout
|
||||
|
||||
@classmethod
|
||||
def get_default_max_retries(cls):
|
||||
"""Default value for requests max retries.
|
||||
|
||||
First looks for environment variable SERVER_RETRIES_ENV_KEY, which
|
||||
can affect max retries value. If not available then use class
|
||||
attribute '_default_max_retries'.
|
||||
|
||||
Returns:
|
||||
int: Max retries value.
|
||||
"""
|
||||
|
||||
try:
|
||||
return int(os.environ.get(SERVER_RETRIES_ENV_KEY))
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
return cls._default_max_retries
|
||||
|
||||
def get_timeout(self):
|
||||
"""Current value for requests timeout.
|
||||
|
||||
Returns:
|
||||
float: Timeout value in seconds.
|
||||
"""
|
||||
|
||||
return self._timeout
|
||||
|
||||
def set_timeout(self, timeout):
|
||||
"""Change timeout value for requests.
|
||||
|
||||
Args:
|
||||
timeout (Union[float, None]): Timeout value in seconds.
|
||||
"""
|
||||
|
||||
if timeout is None:
|
||||
timeout = self.get_default_timeout()
|
||||
self._timeout = float(timeout)
|
||||
|
||||
def get_max_retries(self):
|
||||
"""Current value for requests max retries.
|
||||
|
||||
Returns:
|
||||
int: Max retries value.
|
||||
"""
|
||||
|
||||
return self._max_retries
|
||||
|
||||
def set_max_retries(self, max_retries):
|
||||
"""Change max retries value for requests.
|
||||
|
||||
Args:
|
||||
max_retries (Union[int, None]): Max retries value.
|
||||
"""
|
||||
|
||||
if max_retries is None:
|
||||
max_retries = self.get_default_max_retries()
|
||||
self._max_retries = int(max_retries)
|
||||
|
||||
timeout = property(get_timeout, set_timeout)
|
||||
max_retries = property(get_max_retries, set_max_retries)
|
||||
|
||||
@property
|
||||
def access_token(self):
|
||||
"""Access token used for authorization to server.
|
||||
|
|
@ -890,9 +993,17 @@ class ServerAPI(object):
|
|||
for attr, filter_value in filters.items():
|
||||
query.set_variable_value(attr, filter_value)
|
||||
|
||||
# Backwards compatibility for server 0.3.x
|
||||
# - will be removed in future releases
|
||||
major, minor, _, _, _ = self.server_version_tuple
|
||||
access_groups_field = "accessGroups"
|
||||
if major == 0 and minor <= 3:
|
||||
access_groups_field = "roles"
|
||||
|
||||
for parsed_data in query.continuous_query(self):
|
||||
for user in parsed_data["users"]:
|
||||
user["roles"] = json.loads(user["roles"])
|
||||
user[access_groups_field] = json.loads(
|
||||
user[access_groups_field])
|
||||
yield user
|
||||
|
||||
def get_user(self, username=None):
|
||||
|
|
@ -1004,6 +1115,10 @@ class ServerAPI(object):
|
|||
logout_from_server(self._base_url, self._access_token)
|
||||
|
||||
def _do_rest_request(self, function, url, **kwargs):
|
||||
kwargs.setdefault("timeout", self.timeout)
|
||||
max_retries = kwargs.get("max_retries", self.max_retries)
|
||||
if max_retries < 1:
|
||||
max_retries = 1
|
||||
if self._session is None:
|
||||
# Validate token if was not yet validated
|
||||
# - ignore validation if we're in middle of
|
||||
|
|
@ -1023,38 +1138,54 @@ class ServerAPI(object):
|
|||
elif isinstance(function, RequestType):
|
||||
function = self._session_functions_mapping[function]
|
||||
|
||||
try:
|
||||
response = function(url, **kwargs)
|
||||
response = None
|
||||
new_response = None
|
||||
for _ in range(max_retries):
|
||||
try:
|
||||
response = function(url, **kwargs)
|
||||
break
|
||||
|
||||
except ConnectionRefusedError:
|
||||
# Server may be restarting
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{"detail": "Unable to connect the server. Connection refused"}
|
||||
)
|
||||
except requests.exceptions.Timeout:
|
||||
# Connection timed out
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{"detail": "Connection timed out."}
|
||||
)
|
||||
except requests.exceptions.ConnectionError:
|
||||
# Other connection error (ssl, etc) - does not make sense to
|
||||
# try call server again
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{"detail": "Unable to connect the server. Connection error"}
|
||||
)
|
||||
break
|
||||
|
||||
time.sleep(0.1)
|
||||
|
||||
if new_response is not None:
|
||||
return new_response
|
||||
|
||||
content_type = response.headers.get("Content-Type")
|
||||
if content_type == "application/json":
|
||||
try:
|
||||
new_response = RestApiResponse(response)
|
||||
except JSONDecodeError:
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{
|
||||
"detail": "The response is not a JSON: {}".format(
|
||||
response.text)
|
||||
}
|
||||
)
|
||||
|
||||
except ConnectionRefusedError:
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{"detail": "Unable to connect the server. Connection refused"}
|
||||
)
|
||||
except requests.exceptions.ConnectionError:
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{"detail": "Unable to connect the server. Connection error"}
|
||||
)
|
||||
else:
|
||||
content_type = response.headers.get("Content-Type")
|
||||
if content_type == "application/json":
|
||||
try:
|
||||
new_response = RestApiResponse(response)
|
||||
except JSONDecodeError:
|
||||
new_response = RestApiResponse(
|
||||
None,
|
||||
{
|
||||
"detail": "The response is not a JSON: {}".format(
|
||||
response.text)
|
||||
}
|
||||
)
|
||||
|
||||
elif content_type in ("image/jpeg", "image/png"):
|
||||
new_response = RestApiResponse(response)
|
||||
|
||||
else:
|
||||
new_response = RestApiResponse(response)
|
||||
new_response = RestApiResponse(response)
|
||||
|
||||
self.log.debug("Response {}".format(str(new_response)))
|
||||
return new_response
|
||||
|
|
@ -1747,7 +1878,15 @@ class ServerAPI(object):
|
|||
entity_type_defaults = DEFAULT_WORKFILE_INFO_FIELDS
|
||||
|
||||
elif entity_type == "user":
|
||||
entity_type_defaults = DEFAULT_USER_FIELDS
|
||||
entity_type_defaults = set(DEFAULT_USER_FIELDS)
|
||||
# Backwards compatibility for server 0.3.x
|
||||
# - will be removed in future releases
|
||||
major, minor, _, _, _ = self.server_version_tuple
|
||||
if major == 0 and minor <= 3:
|
||||
entity_type_defaults.discard("accessGroups")
|
||||
entity_type_defaults.discard("defaultAccessGroups")
|
||||
entity_type_defaults.add("roles")
|
||||
entity_type_defaults.add("defaultRoles")
|
||||
|
||||
else:
|
||||
raise ValueError("Unknown entity type \"{}\"".format(entity_type))
|
||||
|
|
@ -2124,7 +2263,12 @@ class ServerAPI(object):
|
|||
server.
|
||||
"""
|
||||
|
||||
result = self.get("desktop/dependency_packages")
|
||||
endpoint = "desktop/dependencyPackages"
|
||||
major, minor, _, _, _ = self.server_version_tuple
|
||||
if major == 0 and minor <= 3:
|
||||
endpoint = "desktop/dependency_packages"
|
||||
|
||||
result = self.get(endpoint)
|
||||
result.raise_for_status()
|
||||
return result.data
|
||||
|
||||
|
|
@ -3810,6 +3954,8 @@ class ServerAPI(object):
|
|||
product_ids=None,
|
||||
product_names=None,
|
||||
folder_ids=None,
|
||||
product_types=None,
|
||||
statuses=None,
|
||||
names_by_folder_ids=None,
|
||||
active=True,
|
||||
fields=None,
|
||||
|
|
@ -3828,6 +3974,10 @@ class ServerAPI(object):
|
|||
filtering.
|
||||
folder_ids (Optional[Iterable[str]]): Ids of task parents.
|
||||
Use 'None' if folder is direct child of project.
|
||||
product_types (Optional[Iterable[str]]): Product types used for
|
||||
filtering.
|
||||
statuses (Optional[Iterable[str]]): Product statuses used for
|
||||
filtering.
|
||||
names_by_folder_ids (Optional[dict[str, Iterable[str]]]): Product
|
||||
name filtering by folder id.
|
||||
active (Optional[bool]): Filter active/inactive products.
|
||||
|
|
@ -3862,6 +4012,18 @@ class ServerAPI(object):
|
|||
if not filter_folder_ids:
|
||||
return
|
||||
|
||||
filter_product_types = None
|
||||
if product_types is not None:
|
||||
filter_product_types = set(product_types)
|
||||
if not filter_product_types:
|
||||
return
|
||||
|
||||
filter_statuses = None
|
||||
if statuses is not None:
|
||||
filter_statuses = set(statuses)
|
||||
if not filter_statuses:
|
||||
return
|
||||
|
||||
# This will disable 'folder_ids' and 'product_names' filters
|
||||
# - maybe could be enhanced in future?
|
||||
if names_by_folder_ids is not None:
|
||||
|
|
@ -3881,7 +4043,7 @@ class ServerAPI(object):
|
|||
fields = set(fields) | {"id"}
|
||||
if "attrib" in fields:
|
||||
fields.remove("attrib")
|
||||
fields |= self.get_attributes_fields_for_type("folder")
|
||||
fields |= self.get_attributes_fields_for_type("product")
|
||||
else:
|
||||
fields = self.get_default_fields_for_type("product")
|
||||
|
||||
|
|
@ -3908,6 +4070,12 @@ class ServerAPI(object):
|
|||
if filter_folder_ids:
|
||||
filters["folderIds"] = list(filter_folder_ids)
|
||||
|
||||
if filter_product_types:
|
||||
filters["productTypes"] = list(filter_product_types)
|
||||
|
||||
if filter_statuses:
|
||||
filters["statuses"] = list(filter_statuses)
|
||||
|
||||
if product_ids:
|
||||
filters["productIds"] = list(product_ids)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,2 +1,2 @@
|
|||
"""Package declaring Python API for Ayon server."""
|
||||
__version__ = "0.3.5"
|
||||
__version__ = "0.4.1"
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring Pype version."""
|
||||
__version__ = "3.16.4-nightly.2"
|
||||
__version__ = "3.16.5-nightly.4"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "OpenPype"
|
||||
version = "3.16.3" # OpenPype
|
||||
version = "3.16.4" # OpenPype
|
||||
description = "Open VFX and Animation pipeline with support."
|
||||
authors = ["OpenPype Team <info@openpype.io>"]
|
||||
license = "MIT License"
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from ayon_server.settings import BaseSettingsModel
|
|||
|
||||
class CreateRenderPlugin(BaseSettingsModel):
|
||||
mark_for_review: bool = Field(True, title="Review")
|
||||
defaults: list[str] = Field(
|
||||
default_variants: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Default Variants"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ DEFAULT_AFTEREFFECTS_SETTING = {
|
|||
"create": {
|
||||
"RenderCreator": {
|
||||
"mark_for_review": True,
|
||||
"defaults": [
|
||||
"default_variants": [
|
||||
"Main"
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Package declaring addon version."""
|
||||
__version__ = "0.1.1"
|
||||
__version__ = "0.1.2"
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from ayon_server.settings import (
|
|||
BaseSettingsModel,
|
||||
MultiplatformPathListModel,
|
||||
ensure_unique_names,
|
||||
task_types_enum,
|
||||
)
|
||||
from ayon_server.exceptions import BadRequestException
|
||||
|
||||
|
|
@ -38,13 +39,52 @@ class CoreImageIOConfigModel(BaseSettingsModel):
|
|||
class CoreImageIOBaseModel(BaseSettingsModel):
|
||||
activate_global_color_management: bool = Field(
|
||||
False,
|
||||
title="Override global OCIO config"
|
||||
title="Enable Color Management"
|
||||
)
|
||||
ocio_config: CoreImageIOConfigModel = Field(
|
||||
default_factory=CoreImageIOConfigModel, title="OCIO config"
|
||||
default_factory=CoreImageIOConfigModel,
|
||||
title="OCIO config"
|
||||
)
|
||||
file_rules: CoreImageIOFileRulesModel = Field(
|
||||
default_factory=CoreImageIOFileRulesModel, title="File Rules"
|
||||
default_factory=CoreImageIOFileRulesModel,
|
||||
title="File Rules"
|
||||
)
|
||||
|
||||
|
||||
class VersionStartCategoryProfileModel(BaseSettingsModel):
|
||||
_layout = "expanded"
|
||||
host_names: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Host names"
|
||||
)
|
||||
task_types: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Task types",
|
||||
enum_resolver=task_types_enum
|
||||
)
|
||||
task_names: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Task names"
|
||||
)
|
||||
product_types: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Product types"
|
||||
)
|
||||
product_names: list[str] = Field(
|
||||
default_factory=list,
|
||||
title="Product names"
|
||||
)
|
||||
version_start: int = Field(
|
||||
1,
|
||||
title="Version Start",
|
||||
ge=0
|
||||
)
|
||||
|
||||
|
||||
class VersionStartCategoryModel(BaseSettingsModel):
|
||||
profiles: list[VersionStartCategoryProfileModel] = Field(
|
||||
default_factory=list,
|
||||
title="Profiles"
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -61,6 +101,10 @@ class CoreSettings(BaseSettingsModel):
|
|||
default_factory=GlobalToolsModel,
|
||||
title="Tools"
|
||||
)
|
||||
version_start_category: VersionStartCategoryModel = Field(
|
||||
default_factory=VersionStartCategoryModel,
|
||||
title="Version start"
|
||||
)
|
||||
imageio: CoreImageIOBaseModel = Field(
|
||||
default_factory=CoreImageIOBaseModel,
|
||||
title="Color Management (ImageIO)"
|
||||
|
|
@ -131,6 +175,9 @@ DEFAULT_VALUES = {
|
|||
"studio_code": "",
|
||||
"environments": "{}",
|
||||
"tools": DEFAULT_TOOLS_VALUES,
|
||||
"version_start_category": {
|
||||
"profiles": []
|
||||
},
|
||||
"publish": DEFAULT_PUBLISH_VALUES,
|
||||
"project_folder_structure": json.dumps({
|
||||
"__project_root__": {
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
__version__ = "0.1.1"
|
||||
__version__ = "0.1.2"
|
||||
|
|
|
|||
|
|
@ -44,6 +44,6 @@ class RenderSettingsModel(BaseSettingsModel):
|
|||
DEFAULT_RENDER_SETTINGS = {
|
||||
"default_render_image_folder": "renders/3dsmax",
|
||||
"aov_separator": "underscore",
|
||||
"image_format": "png",
|
||||
"image_format": "exr",
|
||||
"multipass": True
|
||||
}
|
||||
|
|
|
|||
|
|
@ -252,7 +252,9 @@ DEFAULT_CREATORS_SETTINGS = {
|
|||
},
|
||||
"CreateUnrealSkeletalMesh": {
|
||||
"enabled": True,
|
||||
"default_variants": [],
|
||||
"default_variants": [
|
||||
"Main",
|
||||
],
|
||||
"joint_hints": "jnt_org"
|
||||
},
|
||||
"CreateMultiverseLook": {
|
||||
|
|
|
|||
|
|
@ -288,5 +288,22 @@ DEFAULT_SIMPLE_CREATORS = [
|
|||
"allow_multiple_items": True,
|
||||
"allow_version_control": False,
|
||||
"extensions": []
|
||||
},
|
||||
{
|
||||
"product_type": "audio",
|
||||
"identifier": "",
|
||||
"label": "Audio ",
|
||||
"icon": "fa5s.file-audio",
|
||||
"default_variants": [
|
||||
"Main"
|
||||
],
|
||||
"description": "Audio product",
|
||||
"detailed_description": "Audio files for review or final delivery",
|
||||
"allow_sequences": False,
|
||||
"allow_multiple_items": False,
|
||||
"allow_version_control": False,
|
||||
"extensions": [
|
||||
".wav"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ import logging
|
|||
from pyblish.api import Instance as PyblishInstance
|
||||
|
||||
from tests.lib.testing_classes import BaseTest
|
||||
from openpype.plugins.publish.validate_sequence_frames import (
|
||||
from openpype.hosts.unreal.plugins.publish.validate_sequence_frames import (
|
||||
ValidateSequenceFrames
|
||||
)
|
||||
|
||||
|
|
@ -38,7 +38,13 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
data = {
|
||||
"frameStart": 1001,
|
||||
"frameEnd": 1002,
|
||||
"representations": []
|
||||
"representations": [],
|
||||
"assetEntity": {
|
||||
"data": {
|
||||
"clipIn": 1001,
|
||||
"clipOut": 1002,
|
||||
}
|
||||
}
|
||||
}
|
||||
yield Instance
|
||||
|
||||
|
|
@ -58,6 +64,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
]
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1001
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1001
|
||||
|
||||
plugin.process(instance)
|
||||
|
||||
|
|
@ -84,49 +91,11 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
|
||||
plugin.process(instance)
|
||||
|
||||
@pytest.mark.parametrize("files",
|
||||
[["Main_beauty.1001.v001.exr",
|
||||
"Main_beauty.1002.v001.exr"]])
|
||||
def test_validate_sequence_frames_wrong_name(self, instance,
|
||||
plugin, files):
|
||||
# tests for names with number inside, caused clique failure before
|
||||
representations = [
|
||||
{
|
||||
"ext": "exr",
|
||||
"files": files,
|
||||
}
|
||||
]
|
||||
instance.data["representations"] = representations
|
||||
|
||||
with pytest.raises(AssertionError) as excinfo:
|
||||
plugin.process(instance)
|
||||
assert ("Must detect single collection" in
|
||||
str(excinfo.value))
|
||||
|
||||
@pytest.mark.parametrize("files",
|
||||
[["Main_beauty.v001.1001.ass.gz",
|
||||
"Main_beauty.v001.1002.ass.gz"]])
|
||||
def test_validate_sequence_frames_possible_wrong_name(
|
||||
self, instance, plugin, files):
|
||||
# currently pattern fails on extensions with dots
|
||||
representations = [
|
||||
{
|
||||
"files": files,
|
||||
}
|
||||
]
|
||||
instance.data["representations"] = representations
|
||||
|
||||
with pytest.raises(AssertionError) as excinfo:
|
||||
plugin.process(instance)
|
||||
assert ("Must not have remainder" in
|
||||
str(excinfo.value))
|
||||
|
||||
@pytest.mark.parametrize("files",
|
||||
[["Main_beauty.v001.1001.ass.gz",
|
||||
"Main_beauty.v001.1002.ass.gz"]])
|
||||
def test_validate_sequence_frames__correct_ext(
|
||||
self, instance, plugin, files):
|
||||
# currently pattern fails on extensions with dots
|
||||
representations = [
|
||||
{
|
||||
"ext": "ass.gz",
|
||||
|
|
@ -147,6 +116,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
]
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1003
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1003
|
||||
|
||||
plugin.process(instance)
|
||||
|
||||
|
|
@ -160,6 +130,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
]
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1003
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1003
|
||||
|
||||
with pytest.raises(ValueError) as excinfo:
|
||||
plugin.process(instance)
|
||||
|
|
@ -175,6 +146,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
]
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1003
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1003
|
||||
|
||||
with pytest.raises(AssertionError) as excinfo:
|
||||
plugin.process(instance)
|
||||
|
|
@ -195,6 +167,7 @@ class TestValidateSequenceFrames(BaseTest):
|
|||
instance.data["slate"] = True
|
||||
instance.data["representations"] = representations
|
||||
instance.data["frameEnd"] = 1003
|
||||
instance.data["assetEntity"]["data"]["clipOut"] = 1003
|
||||
|
||||
plugin.process(instance)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Test suite for delivery functions."""
|
||||
from openpype.lib.delivery import collect_frames
|
||||
from openpype.lib import collect_frames
|
||||
|
||||
|
||||
def test_collect_frames_multi_sequence():
|
||||
|
|
@ -153,4 +153,3 @@ def test_collect_frames_single_file():
|
|||
|
||||
print(ret)
|
||||
assert ret == expected, "Not matching"
|
||||
|
||||
|
|
|
|||
83
tests/unit/openpype/lib/test_event_system.py
Normal file
83
tests/unit/openpype/lib/test_event_system.py
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
from openpype.lib.events import EventSystem, QueuedEventSystem
|
||||
|
||||
|
||||
def test_default_event_system():
|
||||
output = []
|
||||
expected_output = [3, 2, 1]
|
||||
event_system = EventSystem()
|
||||
|
||||
def callback_1():
|
||||
event_system.emit("topic.2", {}, None)
|
||||
output.append(1)
|
||||
|
||||
def callback_2():
|
||||
event_system.emit("topic.3", {}, None)
|
||||
output.append(2)
|
||||
|
||||
def callback_3():
|
||||
output.append(3)
|
||||
|
||||
event_system.add_callback("topic.1", callback_1)
|
||||
event_system.add_callback("topic.2", callback_2)
|
||||
event_system.add_callback("topic.3", callback_3)
|
||||
|
||||
event_system.emit("topic.1", {}, None)
|
||||
|
||||
assert output == expected_output, (
|
||||
"Callbacks were not called in correct order")
|
||||
|
||||
|
||||
def test_base_event_system_queue():
|
||||
output = []
|
||||
expected_output = [1, 2, 3]
|
||||
event_system = QueuedEventSystem()
|
||||
|
||||
def callback_1():
|
||||
event_system.emit("topic.2", {}, None)
|
||||
output.append(1)
|
||||
|
||||
def callback_2():
|
||||
event_system.emit("topic.3", {}, None)
|
||||
output.append(2)
|
||||
|
||||
def callback_3():
|
||||
output.append(3)
|
||||
|
||||
event_system.add_callback("topic.1", callback_1)
|
||||
event_system.add_callback("topic.2", callback_2)
|
||||
event_system.add_callback("topic.3", callback_3)
|
||||
|
||||
event_system.emit("topic.1", {}, None)
|
||||
|
||||
assert output == expected_output, (
|
||||
"Callbacks were not called in correct order")
|
||||
|
||||
|
||||
def test_manual_event_system_queue():
|
||||
output = []
|
||||
expected_output = [1, 2, 3]
|
||||
event_system = QueuedEventSystem(auto_execute=False)
|
||||
|
||||
def callback_1():
|
||||
event_system.emit("topic.2", {}, None)
|
||||
output.append(1)
|
||||
|
||||
def callback_2():
|
||||
event_system.emit("topic.3", {}, None)
|
||||
output.append(2)
|
||||
|
||||
def callback_3():
|
||||
output.append(3)
|
||||
|
||||
event_system.add_callback("topic.1", callback_1)
|
||||
event_system.add_callback("topic.2", callback_2)
|
||||
event_system.add_callback("topic.3", callback_3)
|
||||
|
||||
event_system.emit("topic.1", {}, None)
|
||||
|
||||
while True:
|
||||
if event_system.process_next_event() is None:
|
||||
break
|
||||
|
||||
assert output == expected_output, (
|
||||
"Callbacks were not called in correct order")
|
||||
|
|
@ -12,16 +12,19 @@
|
|||
removes temporary databases (?)
|
||||
"""
|
||||
import pytest
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from tests.lib.testing_classes import ModuleUnitTest
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from openpype.modules.sync_server.utils import SiteAlreadyPresentError
|
||||
|
||||
|
||||
|
||||
class TestSiteOperation(ModuleUnitTest):
|
||||
|
||||
REPRESENTATION_ID = "60e578d0c987036c6a7b741d"
|
||||
|
||||
TEST_FILES = [("1eCwPljuJeOI8A3aisfOIBKKjcmIycTEt",
|
||||
TEST_FILES = [("1FHE70Hi7y05LLT_1O3Y6jGxwZGXKV9zX",
|
||||
"test_site_operations.zip", '')]
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
|
|
@ -71,7 +74,7 @@ class TestSiteOperation(ModuleUnitTest):
|
|||
@pytest.mark.usefixtures("setup_sync_server_module")
|
||||
def test_add_site_again(self, dbcon, setup_sync_server_module):
|
||||
"""Depends on test_add_site, must throw exception."""
|
||||
with pytest.raises(ValueError):
|
||||
with pytest.raises(SiteAlreadyPresentError):
|
||||
setup_sync_server_module.add_site(self.TEST_PROJECT_NAME,
|
||||
self.REPRESENTATION_ID,
|
||||
site_name='test_site')
|
||||
|
|
|
|||
98
tools/docker_build.ps1
Normal file
98
tools/docker_build.ps1
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
$current_dir = Get-Location
|
||||
$script_dir = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
|
||||
$repo_root = (Get-Item $script_dir).parent.FullName
|
||||
|
||||
$env:PSModulePath = $env:PSModulePath + ";$($repo_root)\tools\modules\powershell"
|
||||
|
||||
function Exit-WithCode($exitcode) {
|
||||
# Only exit this host process if it's a child of another PowerShell parent process...
|
||||
$parentPID = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$PID" | Select-Object -Property ParentProcessId).ParentProcessId
|
||||
$parentProcName = (Get-CimInstance -ClassName Win32_Process -Filter "ProcessId=$parentPID" | Select-Object -Property Name).Name
|
||||
if ('powershell.exe' -eq $parentProcName) { $host.SetShouldExit($exitcode) }
|
||||
|
||||
exit $exitcode
|
||||
}
|
||||
|
||||
function Restore-Cwd() {
|
||||
$tmp_current_dir = Get-Location
|
||||
if ("$tmp_current_dir" -ne "$current_dir") {
|
||||
Write-Color -Text ">>> ", "Restoring current directory" -Color Green, Gray
|
||||
Set-Location -Path $current_dir
|
||||
}
|
||||
}
|
||||
|
||||
function Get-Container {
|
||||
if (-not (Test-Path -PathType Leaf -Path "$($repo_root)\build\docker-image.id")) {
|
||||
Write-Color -Text "!!! ", "Docker command failed, cannot find image id." -Color Red, Yellow
|
||||
Restore-Cwd
|
||||
Exit-WithCode 1
|
||||
}
|
||||
$id = Get-Content "$($repo_root)\build\docker-image.id"
|
||||
Write-Color -Text ">>> ", "Creating container from image id ", "[", $id, "]" -Color Green, Gray, White, Cyan, White
|
||||
$cid = docker create $id bash
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Color -Text "!!! ", "Cannot create container." -Color Red, Yellow
|
||||
Restore-Cwd
|
||||
Exit-WithCode 1
|
||||
}
|
||||
return $cid
|
||||
}
|
||||
|
||||
function Change-Cwd() {
|
||||
Set-Location -Path $repo_root
|
||||
}
|
||||
|
||||
function New-DockerBuild {
|
||||
$version_file = Get-Content -Path "$($repo_root)\openpype\version.py"
|
||||
$result = [regex]::Matches($version_file, '__version__ = "(?<version>\d+\.\d+.\d+.*)"')
|
||||
$openpype_version = $result[0].Groups['version'].Value
|
||||
$startTime = [int][double]::Parse((Get-Date -UFormat %s))
|
||||
Write-Color -Text ">>> ", "Building OpenPype using Docker ..." -Color Green, Gray, White
|
||||
$variant = $args[0]
|
||||
if ($variant.Length -eq 0) {
|
||||
$dockerfile = "$($repo_root)\Dockerfile"
|
||||
} else {
|
||||
$dockerfile = "$( $repo_root )\Dockerfile.$variant"
|
||||
}
|
||||
if (-not (Test-Path -PathType Leaf -Path $dockerfile)) {
|
||||
Write-Color -Text "!!! ", "Dockerfile for specifed platform ", "[", $variant, "]", "doesn't exist." -Color Red, Yellow, Cyan, White, Cyan, Yellow
|
||||
Restore-Cwd
|
||||
Exit-WithCode 1
|
||||
}
|
||||
Write-Color -Text ">>> ", "Using Dockerfile for ", "[ ", $variant, " ]" -Color Green, Gray, White, Cyan, White
|
||||
|
||||
$build_dir = "$($repo_root)\build"
|
||||
if (-not(Test-Path $build_dir)) {
|
||||
New-Item -ItemType Directory -Path $build_dir
|
||||
}
|
||||
Write-Color -Text "--- ", "Cleaning build directory ..." -Color Yellow, Gray
|
||||
try {
|
||||
Remove-Item -Recurse -Force "$($build_dir)\*"
|
||||
} catch {
|
||||
Write-Color -Text "!!! ", "Cannot clean build directory, possibly because process is using it." -Color Red, Gray
|
||||
Write-Color -Text $_.Exception.Message -Color Red
|
||||
Exit-WithCode 1
|
||||
}
|
||||
|
||||
Write-Color -Text ">>> ", "Running Docker build ..." -Color Green, Gray, White
|
||||
docker build --pull --iidfile $repo_root/build/docker-image.id --build-arg BUILD_DATE=$(Get-Date -UFormat %Y-%m-%dT%H:%M:%SZ) --build-arg VERSION=$openpype_version -t pypeclub/openpype:$openpype_version -f $dockerfile .
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Color -Text "!!! ", "Docker command failed.", $LASTEXITCODE -Color Red, Yellow, Red
|
||||
Restore-Cwd
|
||||
Exit-WithCode 1
|
||||
}
|
||||
Write-Color -Text ">>> ", "Copying build from container ..." -Color Green, Gray, White
|
||||
$cid = Get-Container
|
||||
|
||||
docker cp "$($cid):/opt/openpype/build/exe.linux-x86_64-3.9" "$($repo_root)/build"
|
||||
docker cp "$($cid):/opt/openpype/build/build.log" "$($repo_root)/build"
|
||||
|
||||
$endTime = [int][double]::Parse((Get-Date -UFormat %s))
|
||||
try {
|
||||
New-BurntToastNotification -AppLogo "$openpype_root/openpype/resources/icons/openpype_icon.png" -Text "OpenPype build complete!", "All done in $( $endTime - $startTime ) secs. You will find OpenPype and build log in build directory."
|
||||
} catch {}
|
||||
Write-Color -Text "*** ", "All done in ", $($endTime - $startTime), " secs. You will find OpenPype and build log in ", "'.\build'", " directory." -Color Green, Gray, White, Gray, White, Gray
|
||||
}
|
||||
|
||||
Change-Cwd
|
||||
New-DockerBuild $ARGS
|
||||
Loading…
Add table
Add a link
Reference in a new issue