mirror of
https://github.com/ynput/ayon-core.git
synced 2025-12-25 05:14:40 +01:00
Merge branch 'develop' into feature/OP-3292_Map-where-database-queries-and-changes
This commit is contained in:
commit
76be40b3e9
296 changed files with 87880 additions and 784 deletions
|
|
@ -309,7 +309,18 @@
|
|||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Tilix4",
|
||||
"name": "Félix David",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/22875539?v=4",
|
||||
"profile": "http://felixdavid.com/",
|
||||
"contributions": [
|
||||
"code",
|
||||
"doc"
|
||||
]
|
||||
}
|
||||
],
|
||||
"contributorsPerLine": 7
|
||||
}
|
||||
"contributorsPerLine": 7,
|
||||
"skipCi": true
|
||||
}
|
||||
|
|
|
|||
42
CHANGELOG.md
42
CHANGELOG.md
|
|
@ -1,19 +1,28 @@
|
|||
# Changelog
|
||||
|
||||
## [3.10.1-nightly.3](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
## [3.11.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.10.0...HEAD)
|
||||
|
||||
### 📖 Documentation
|
||||
|
||||
- doc: adding royal render and multiverse to the web site [\#3285](https://github.com/pypeclub/OpenPype/pull/3285)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- General: Updated windows oiio tool [\#3268](https://github.com/pypeclub/OpenPype/pull/3268)
|
||||
- Unreal: add support for skeletalMesh and staticMesh to loaders [\#3267](https://github.com/pypeclub/OpenPype/pull/3267)
|
||||
- Maya: reference loaders could store placeholder in referenced url [\#3264](https://github.com/pypeclub/OpenPype/pull/3264)
|
||||
- TVPaint: Init file for TVPaint worker also handle guideline images [\#3250](https://github.com/pypeclub/OpenPype/pull/3250)
|
||||
- Nuke: Change default icon path in settings [\#3247](https://github.com/pypeclub/OpenPype/pull/3247)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- Global: extract review slate issues [\#3286](https://github.com/pypeclub/OpenPype/pull/3286)
|
||||
- Webpublisher: return only active projects in ProjectsEndpoint [\#3281](https://github.com/pypeclub/OpenPype/pull/3281)
|
||||
- Hiero: add support for task tags 3.10.x [\#3279](https://github.com/pypeclub/OpenPype/pull/3279)
|
||||
- General: Fix Oiio tool path resolving [\#3278](https://github.com/pypeclub/OpenPype/pull/3278)
|
||||
- Maya: Fix udim support for e.g. uppercase \<UDIM\> tag [\#3266](https://github.com/pypeclub/OpenPype/pull/3266)
|
||||
- Nuke: bake reformat was failing on string type [\#3261](https://github.com/pypeclub/OpenPype/pull/3261)
|
||||
- Maya: hotfix Pxr multitexture in looks [\#3260](https://github.com/pypeclub/OpenPype/pull/3260)
|
||||
- Unreal: Fix Camera Loading if Layout is missing [\#3255](https://github.com/pypeclub/OpenPype/pull/3255)
|
||||
|
|
@ -21,13 +30,14 @@
|
|||
- Unreal: Fixed Render creation in UE5 [\#3239](https://github.com/pypeclub/OpenPype/pull/3239)
|
||||
- Unreal: Fixed Camera loading in UE5 [\#3238](https://github.com/pypeclub/OpenPype/pull/3238)
|
||||
- Flame: debugging [\#3224](https://github.com/pypeclub/OpenPype/pull/3224)
|
||||
- Ftrack: Push hierarchical attributes action works [\#3210](https://github.com/pypeclub/OpenPype/pull/3210)
|
||||
- add silent audio to slate [\#3162](https://github.com/pypeclub/OpenPype/pull/3162)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Maya: better handling of legacy review subsets names [\#3269](https://github.com/pypeclub/OpenPype/pull/3269)
|
||||
- Deadline: publishing of animation and pointcache on a farm [\#3225](https://github.com/pypeclub/OpenPype/pull/3225)
|
||||
- Nuke: add pointcache and animation to loader [\#3186](https://github.com/pypeclub/OpenPype/pull/3186)
|
||||
- Add a gizmo menu to nuke [\#3172](https://github.com/pypeclub/OpenPype/pull/3172)
|
||||
|
||||
## [3.10.0](https://github.com/pypeclub/OpenPype/tree/3.10.0) (2022-05-26)
|
||||
|
||||
|
|
@ -37,7 +47,6 @@
|
|||
|
||||
- General: OpenPype modules publish plugins are registered in host [\#3180](https://github.com/pypeclub/OpenPype/pull/3180)
|
||||
- General: Creator plugins from addons can be registered [\#3179](https://github.com/pypeclub/OpenPype/pull/3179)
|
||||
- Ftrack: Single image reviewable [\#3157](https://github.com/pypeclub/OpenPype/pull/3157)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
|
|
@ -50,10 +59,6 @@
|
|||
- Maya: added clean\_import option to Import loader [\#3181](https://github.com/pypeclub/OpenPype/pull/3181)
|
||||
- Add the scripts menu definition to nuke [\#3168](https://github.com/pypeclub/OpenPype/pull/3168)
|
||||
- Maya: add maya 2023 to default applications [\#3167](https://github.com/pypeclub/OpenPype/pull/3167)
|
||||
- Compressed bgeo publishing in SAP and Houdini loader [\#3153](https://github.com/pypeclub/OpenPype/pull/3153)
|
||||
- General: Add 'dataclasses' to required python modules [\#3149](https://github.com/pypeclub/OpenPype/pull/3149)
|
||||
- Hooks: Tweak logging grammar [\#3147](https://github.com/pypeclub/OpenPype/pull/3147)
|
||||
- Nuke: settings for reformat node in CreateWriteRender node [\#3143](https://github.com/pypeclub/OpenPype/pull/3143)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
|
|
@ -66,6 +71,7 @@
|
|||
- Hiero: debugging frame range and other 3.10 [\#3222](https://github.com/pypeclub/OpenPype/pull/3222)
|
||||
- Project Manager: Fix persistent editors on project change [\#3218](https://github.com/pypeclub/OpenPype/pull/3218)
|
||||
- Deadline: instance data overwrite fix [\#3214](https://github.com/pypeclub/OpenPype/pull/3214)
|
||||
- Ftrack: Push hierarchical attributes action works [\#3210](https://github.com/pypeclub/OpenPype/pull/3210)
|
||||
- Standalone Publisher: Always create new representation for thumbnail [\#3203](https://github.com/pypeclub/OpenPype/pull/3203)
|
||||
- Photoshop: skip collector when automatic testing [\#3202](https://github.com/pypeclub/OpenPype/pull/3202)
|
||||
- Nuke: render/workfile version sync doesn't work on farm [\#3185](https://github.com/pypeclub/OpenPype/pull/3185)
|
||||
|
|
@ -76,9 +82,6 @@
|
|||
- General: Oiio conversion for ffmpeg checks for invalid characters [\#3166](https://github.com/pypeclub/OpenPype/pull/3166)
|
||||
- Fix for attaching render to subset [\#3164](https://github.com/pypeclub/OpenPype/pull/3164)
|
||||
- Harmony: fixed missing task name in render instance [\#3163](https://github.com/pypeclub/OpenPype/pull/3163)
|
||||
- Ftrack: Action delete old versions formatting works [\#3152](https://github.com/pypeclub/OpenPype/pull/3152)
|
||||
- Deadline: fix the output directory [\#3144](https://github.com/pypeclub/OpenPype/pull/3144)
|
||||
- General: New Session schema [\#3141](https://github.com/pypeclub/OpenPype/pull/3141)
|
||||
|
||||
**🔀 Refactored code**
|
||||
|
||||
|
|
@ -89,7 +92,6 @@
|
|||
- Harmony: message length in 21.1 [\#3257](https://github.com/pypeclub/OpenPype/pull/3257)
|
||||
- Harmony: 21.1 fix [\#3249](https://github.com/pypeclub/OpenPype/pull/3249)
|
||||
- Maya: added jpg to filter for Image Plane Loader [\#3223](https://github.com/pypeclub/OpenPype/pull/3223)
|
||||
- Maya: added jpg to filter for Image Plane Loader [\#3221](https://github.com/pypeclub/OpenPype/pull/3221)
|
||||
- Webpublisher: replace space by underscore in subset names [\#3160](https://github.com/pypeclub/OpenPype/pull/3160)
|
||||
|
||||
## [3.9.8](https://github.com/pypeclub/OpenPype/tree/3.9.8) (2022-05-19)
|
||||
|
|
@ -119,24 +121,6 @@
|
|||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.6...3.9.7)
|
||||
|
||||
**🆕 New features**
|
||||
|
||||
- Ftrack: Single image reviewable [\#3158](https://github.com/pypeclub/OpenPype/pull/3158)
|
||||
|
||||
**🚀 Enhancements**
|
||||
|
||||
- Deadline output dir issue to 3.9x [\#3155](https://github.com/pypeclub/OpenPype/pull/3155)
|
||||
- nuke: removing redundant code from startup [\#3142](https://github.com/pypeclub/OpenPype/pull/3142)
|
||||
|
||||
**🐛 Bug fixes**
|
||||
|
||||
- Ftrack: Action delete old versions formatting works [\#3154](https://github.com/pypeclub/OpenPype/pull/3154)
|
||||
- nuke: adding extract thumbnail settings [\#3148](https://github.com/pypeclub/OpenPype/pull/3148)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Webpublisher: replace space by underscore in subset names [\#3159](https://github.com/pypeclub/OpenPype/pull/3159)
|
||||
|
||||
## [3.9.6](https://github.com/pypeclub/OpenPype/tree/3.9.6) (2022-05-03)
|
||||
|
||||
[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.5...3.9.6)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
|
||||
[](#contributors-)
|
||||
[](#contributors-)
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:END -->
|
||||
OpenPype
|
||||
====
|
||||
|
|
@ -328,6 +328,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d
|
|||
<td align="center"><a href="https://github.com/Malthaldar"><img src="https://avatars.githubusercontent.com/u/33671694?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Malthaldar</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=Malthaldar" title="Code">💻</a></td>
|
||||
<td align="center"><a href="http://www.svenneve.com/"><img src="https://avatars.githubusercontent.com/u/2472863?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Sven Neve</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=svenneve" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/zafrs"><img src="https://avatars.githubusercontent.com/u/26890002?v=4?s=100" width="100px;" alt=""/><br /><sub><b>zafrs</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=zafrs" title="Code">💻</a></td>
|
||||
<td align="center"><a href="http://felixdavid.com/"><img src="https://avatars.githubusercontent.com/u/22875539?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Félix David</b></sub></a><br /><a href="https://github.com/pypeclub/OpenPype/commits?author=Tilix4" title="Code">💻</a> <a href="https://github.com/pypeclub/OpenPype/commits?author=Tilix4" title="Documentation">📖</a></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
|
|
|||
|
|
@ -44,6 +44,7 @@ from . import resources
|
|||
|
||||
from .plugin import (
|
||||
Extractor,
|
||||
Integrator,
|
||||
|
||||
ValidatePipelineOrder,
|
||||
ValidateContentsOrder,
|
||||
|
|
@ -86,6 +87,7 @@ __all__ = [
|
|||
|
||||
# plugin classes
|
||||
"Extractor",
|
||||
"Integrator",
|
||||
# ordering
|
||||
"ValidatePipelineOrder",
|
||||
"ValidateContentsOrder",
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ from .lib import (
|
|||
get_current_sequence,
|
||||
get_timeline_selection,
|
||||
get_current_track,
|
||||
get_track_item_tags,
|
||||
get_track_item_pype_tag,
|
||||
set_track_item_pype_tag,
|
||||
get_track_item_pype_data,
|
||||
|
|
@ -83,6 +84,7 @@ __all__ = [
|
|||
"get_current_sequence",
|
||||
"get_timeline_selection",
|
||||
"get_current_track",
|
||||
"get_track_item_tags",
|
||||
"get_track_item_pype_tag",
|
||||
"set_track_item_pype_tag",
|
||||
"get_track_item_pype_data",
|
||||
|
|
|
|||
|
|
@ -274,6 +274,31 @@ def _validate_all_atrributes(
|
|||
])
|
||||
|
||||
|
||||
def get_track_item_tags(track_item):
|
||||
"""
|
||||
Get track item tags excluded openpype tag
|
||||
|
||||
Attributes:
|
||||
trackItem (hiero.core.TrackItem): hiero object
|
||||
|
||||
Returns:
|
||||
hiero.core.Tag: hierarchy, orig clip attributes
|
||||
"""
|
||||
returning_tag_data = []
|
||||
# get all tags from track item
|
||||
_tags = track_item.tags()
|
||||
if not _tags:
|
||||
return []
|
||||
|
||||
# collect all tags which are not openpype tag
|
||||
returning_tag_data.extend(
|
||||
tag for tag in _tags
|
||||
if tag.name() != self.pype_tag_name
|
||||
)
|
||||
|
||||
return returning_tag_data
|
||||
|
||||
|
||||
def get_track_item_pype_tag(track_item):
|
||||
"""
|
||||
Get pype track item tag created by creator or loader plugin.
|
||||
|
|
|
|||
|
|
@ -4,16 +4,16 @@ from pyblish import api
|
|||
class CollectClipTagTasks(api.InstancePlugin):
|
||||
"""Collect Tags from selected track items."""
|
||||
|
||||
order = api.CollectorOrder
|
||||
order = api.CollectorOrder - 0.077
|
||||
label = "Collect Tag Tasks"
|
||||
hosts = ["hiero"]
|
||||
families = ['clip']
|
||||
families = ["shot"]
|
||||
|
||||
def process(self, instance):
|
||||
# gets tags
|
||||
tags = instance.data["tags"]
|
||||
|
||||
tasks = dict()
|
||||
tasks = {}
|
||||
for tag in tags:
|
||||
t_metadata = dict(tag.metadata())
|
||||
t_family = t_metadata.get("tag.family", "")
|
||||
|
|
@ -106,7 +106,10 @@ class PrecollectInstances(pyblish.api.ContextPlugin):
|
|||
|
||||
# clip's effect
|
||||
"clipEffectItems": subtracks,
|
||||
"clipAnnotations": annotations
|
||||
"clipAnnotations": annotations,
|
||||
|
||||
# add all additional tags
|
||||
"tags": phiero.get_track_item_tags(track_item)
|
||||
})
|
||||
|
||||
# otio clip data
|
||||
|
|
|
|||
|
|
@ -1737,8 +1737,11 @@ def apply_shaders(relationships, shadernodes, nodes):
|
|||
log.warning("No nodes found for shading engine "
|
||||
"'{0}'".format(id_shading_engines[0]))
|
||||
continue
|
||||
try:
|
||||
cmds.sets(filtered_nodes, forceElement=id_shading_engines[0])
|
||||
except RuntimeError as rte:
|
||||
log.error("Error during shader assignment: {}".format(rte))
|
||||
|
||||
cmds.sets(filtered_nodes, forceElement=id_shading_engines[0])
|
||||
# endregion
|
||||
|
||||
apply_attributes(attributes, nodes_by_id)
|
||||
|
|
|
|||
|
|
@ -66,13 +66,23 @@ def install():
|
|||
log.info("Installing callbacks ... ")
|
||||
register_event_callback("init", on_init)
|
||||
|
||||
# Callbacks below are not required for headless mode, the `init` however
|
||||
# is important to load referenced Alembics correctly at rendertime.
|
||||
if os.environ.get("HEADLESS_PUBLISH"):
|
||||
# Maya launched on farm, lib.IS_HEADLESS might be triggered locally too
|
||||
# target "farm" == rendering on farm, expects OPENPYPE_PUBLISH_DATA
|
||||
# target "remote" == remote execution
|
||||
print("Registering pyblish target: remote")
|
||||
pyblish.api.register_target("remote")
|
||||
return
|
||||
|
||||
if lib.IS_HEADLESS:
|
||||
log.info(("Running in headless mode, skipping Maya "
|
||||
"save/open/new callback installation.."))
|
||||
|
||||
return
|
||||
|
||||
print("Registering pyblish target: local")
|
||||
pyblish.api.register_target("local")
|
||||
|
||||
_set_project()
|
||||
_register_callbacks()
|
||||
|
||||
|
|
|
|||
|
|
@ -38,3 +38,7 @@ class CreateAnimation(plugin.Creator):
|
|||
|
||||
# Default to exporting world-space
|
||||
self.data["worldSpace"] = True
|
||||
|
||||
# Default to not send to farm.
|
||||
self.data["farm"] = False
|
||||
self.data["priority"] = 50
|
||||
|
|
|
|||
15
openpype/hosts/maya/plugins/create/create_multiverse_look.py
Normal file
15
openpype/hosts/maya/plugins/create/create_multiverse_look.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
from openpype.hosts.maya.api import plugin
|
||||
|
||||
|
||||
class CreateMultiverseLook(plugin.Creator):
|
||||
"""Create Multiverse Look"""
|
||||
|
||||
name = "mvLook"
|
||||
label = "Multiverse Look"
|
||||
family = "mvLook"
|
||||
icon = "cubes"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateMultiverseLook, self).__init__(*args, **kwargs)
|
||||
self.data["fileFormat"] = ["usda", "usd"]
|
||||
self.data["publishMipMap"] = True
|
||||
|
|
@ -2,11 +2,11 @@ from openpype.hosts.maya.api import plugin, lib
|
|||
|
||||
|
||||
class CreateMultiverseUsd(plugin.Creator):
|
||||
"""Multiverse USD data"""
|
||||
"""Create Multiverse USD Asset"""
|
||||
|
||||
name = "usdMain"
|
||||
label = "Multiverse USD"
|
||||
family = "usd"
|
||||
name = "mvUsdMain"
|
||||
label = "Multiverse USD Asset"
|
||||
family = "mvUsd"
|
||||
icon = "cubes"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
@ -15,6 +15,7 @@ class CreateMultiverseUsd(plugin.Creator):
|
|||
# Add animation data first, since it maintains order.
|
||||
self.data.update(lib.collect_animation_data(True))
|
||||
|
||||
self.data["fileFormat"] = ["usd", "usda", "usdz"]
|
||||
self.data["stripNamespaces"] = False
|
||||
self.data["mergeTransformAndShape"] = False
|
||||
self.data["writeAncestors"] = True
|
||||
|
|
@ -45,6 +46,7 @@ class CreateMultiverseUsd(plugin.Creator):
|
|||
self.data["writeShadingNetworks"] = False
|
||||
self.data["writeTransformMatrix"] = True
|
||||
self.data["writeUsdAttributes"] = False
|
||||
self.data["writeInstancesAsReferences"] = False
|
||||
self.data["timeVaryingTopology"] = False
|
||||
self.data["customMaterialNamespace"] = ''
|
||||
self.data["numTimeSamples"] = 1
|
||||
|
|
|
|||
|
|
@ -4,9 +4,9 @@ from openpype.hosts.maya.api import plugin, lib
|
|||
class CreateMultiverseUsdComp(plugin.Creator):
|
||||
"""Create Multiverse USD Composition"""
|
||||
|
||||
name = "usdCompositionMain"
|
||||
name = "mvUsdCompositionMain"
|
||||
label = "Multiverse USD Composition"
|
||||
family = "usdComposition"
|
||||
family = "mvUsdComposition"
|
||||
icon = "cubes"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
@ -15,9 +15,12 @@ class CreateMultiverseUsdComp(plugin.Creator):
|
|||
# Add animation data first, since it maintains order.
|
||||
self.data.update(lib.collect_animation_data(True))
|
||||
|
||||
# Order of `fileFormat` must match extract_multiverse_usd_comp.py
|
||||
self.data["fileFormat"] = ["usda", "usd"]
|
||||
self.data["stripNamespaces"] = False
|
||||
self.data["mergeTransformAndShape"] = False
|
||||
self.data["flattenContent"] = False
|
||||
self.data["writeAsCompoundLayers"] = False
|
||||
self.data["writePendingOverrides"] = False
|
||||
self.data["numTimeSamples"] = 1
|
||||
self.data["timeSamplesSpan"] = 0.0
|
||||
|
|
|
|||
|
|
@ -2,11 +2,11 @@ from openpype.hosts.maya.api import plugin, lib
|
|||
|
||||
|
||||
class CreateMultiverseUsdOver(plugin.Creator):
|
||||
"""Multiverse USD data"""
|
||||
"""Create Multiverse USD Override"""
|
||||
|
||||
name = "usdOverrideMain"
|
||||
name = "mvUsdOverrideMain"
|
||||
label = "Multiverse USD Override"
|
||||
family = "usdOverride"
|
||||
family = "mvUsdOverride"
|
||||
icon = "cubes"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
|
@ -15,6 +15,8 @@ class CreateMultiverseUsdOver(plugin.Creator):
|
|||
# Add animation data first, since it maintains order.
|
||||
self.data.update(lib.collect_animation_data(True))
|
||||
|
||||
# Order of `fileFormat` must match extract_multiverse_usd_over.py
|
||||
self.data["fileFormat"] = ["usda", "usd"]
|
||||
self.data["writeAll"] = False
|
||||
self.data["writeTransforms"] = True
|
||||
self.data["writeVisibility"] = True
|
||||
|
|
|
|||
|
|
@ -28,3 +28,7 @@ class CreatePointCache(plugin.Creator):
|
|||
# Add options for custom attributes
|
||||
self.data["attr"] = ""
|
||||
self.data["attrPrefix"] = ""
|
||||
|
||||
# Default to not send to farm.
|
||||
self.data["farm"] = False
|
||||
self.data["priority"] = 50
|
||||
|
|
|
|||
|
|
@ -14,13 +14,13 @@ from openpype.hosts.maya.api.pipeline import containerise
|
|||
|
||||
|
||||
class MultiverseUsdLoader(load.LoaderPlugin):
|
||||
"""Load the USD by Multiverse"""
|
||||
"""Read USD data in a Multiverse Compound"""
|
||||
|
||||
families = ["model", "usd", "usdComposition", "usdOverride",
|
||||
families = ["model", "mvUsd", "mvUsdComposition", "mvUsdOverride",
|
||||
"pointcache", "animation"]
|
||||
representations = ["usd", "usda", "usdc", "usdz", "abc"]
|
||||
|
||||
label = "Read USD by Multiverse"
|
||||
label = "Load USD to Multiverse"
|
||||
order = -10
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
|
|
|||
|
|
@ -55,3 +55,6 @@ class CollectAnimationOutputGeometry(pyblish.api.InstancePlugin):
|
|||
|
||||
# Store data in the instance for the validator
|
||||
instance.data["out_hierarchy"] = hierarchy
|
||||
|
||||
if instance.data.get("farm"):
|
||||
instance.data["families"].append("publish.farm")
|
||||
|
|
|
|||
|
|
@ -109,16 +109,18 @@ def node_uses_image_sequence(node, node_path):
|
|||
"""
|
||||
|
||||
# useFrameExtension indicates an explicit image sequence
|
||||
# The following tokens imply a sequence
|
||||
patterns = ["<udim>", "<tile>", "<uvtile>",
|
||||
"u<u>_v<v>", "<frame0", "<f4>"]
|
||||
try:
|
||||
use_frame_extension = cmds.getAttr('%s.useFrameExtension' % node)
|
||||
except ValueError:
|
||||
use_frame_extension = False
|
||||
if use_frame_extension:
|
||||
return True
|
||||
|
||||
return (use_frame_extension or
|
||||
any(pattern in node_path for pattern in patterns))
|
||||
# The following tokens imply a sequence
|
||||
patterns = ["<udim>", "<tile>", "<uvtile>",
|
||||
"u<u>_v<v>", "<frame0", "<f4>"]
|
||||
node_path_lowered = node_path.lower()
|
||||
return any(pattern in node_path_lowered for pattern in patterns)
|
||||
|
||||
|
||||
def seq_to_glob(path):
|
||||
|
|
|
|||
372
openpype/hosts/maya/plugins/publish/collect_multiverse_look.py
Normal file
372
openpype/hosts/maya/plugins/publish/collect_multiverse_look.py
Normal file
|
|
@ -0,0 +1,372 @@
|
|||
import glob
|
||||
import os
|
||||
import re
|
||||
|
||||
from maya import cmds
|
||||
import pyblish.api
|
||||
from openpype.hosts.maya.api import lib
|
||||
|
||||
SHAPE_ATTRS = ["castsShadows",
|
||||
"receiveShadows",
|
||||
"motionBlur",
|
||||
"primaryVisibility",
|
||||
"smoothShading",
|
||||
"visibleInReflections",
|
||||
"visibleInRefractions",
|
||||
"doubleSided",
|
||||
"opposite"]
|
||||
|
||||
SHAPE_ATTRS = set(SHAPE_ATTRS)
|
||||
COLOUR_SPACES = ['sRGB', 'linear', 'auto']
|
||||
MIPMAP_EXTENSIONS = ['tdl']
|
||||
|
||||
|
||||
def get_look_attrs(node):
|
||||
"""Returns attributes of a node that are important for the look.
|
||||
|
||||
These are the "changed" attributes (those that have edits applied
|
||||
in the current scene).
|
||||
|
||||
Returns:
|
||||
list: Attribute names to extract
|
||||
|
||||
"""
|
||||
# When referenced get only attributes that are "changed since file open"
|
||||
# which includes any reference edits, otherwise take *all* user defined
|
||||
# attributes
|
||||
is_referenced = cmds.referenceQuery(node, isNodeReferenced=True)
|
||||
result = cmds.listAttr(node, userDefined=True,
|
||||
changedSinceFileOpen=is_referenced) or []
|
||||
|
||||
# `cbId` is added when a scene is saved, ignore by default
|
||||
if "cbId" in result:
|
||||
result.remove("cbId")
|
||||
|
||||
# For shapes allow render stat changes
|
||||
if cmds.objectType(node, isAType="shape"):
|
||||
attrs = cmds.listAttr(node, changedSinceFileOpen=True) or []
|
||||
for attr in attrs:
|
||||
if attr in SHAPE_ATTRS:
|
||||
result.append(attr)
|
||||
elif attr.startswith('ai'):
|
||||
result.append(attr)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def node_uses_image_sequence(node):
|
||||
"""Return whether file node uses an image sequence or single image.
|
||||
|
||||
Determine if a node uses an image sequence or just a single image,
|
||||
not always obvious from its file path alone.
|
||||
|
||||
Args:
|
||||
node (str): Name of the Maya node
|
||||
|
||||
Returns:
|
||||
bool: True if node uses an image sequence
|
||||
|
||||
"""
|
||||
|
||||
# useFrameExtension indicates an explicit image sequence
|
||||
node_path = get_file_node_path(node).lower()
|
||||
|
||||
# The following tokens imply a sequence
|
||||
patterns = ["<udim>", "<tile>", "<uvtile>", "u<u>_v<v>", "<frame0"]
|
||||
|
||||
return (cmds.getAttr('%s.useFrameExtension' % node) or
|
||||
any(pattern in node_path for pattern in patterns))
|
||||
|
||||
|
||||
def seq_to_glob(path):
|
||||
"""Takes an image sequence path and returns it in glob format,
|
||||
with the frame number replaced by a '*'.
|
||||
|
||||
Image sequences may be numerical sequences, e.g. /path/to/file.1001.exr
|
||||
will return as /path/to/file.*.exr.
|
||||
|
||||
Image sequences may also use tokens to denote sequences, e.g.
|
||||
/path/to/texture.<UDIM>.tif will return as /path/to/texture.*.tif.
|
||||
|
||||
Args:
|
||||
path (str): the image sequence path
|
||||
|
||||
Returns:
|
||||
str: Return glob string that matches the filename pattern.
|
||||
|
||||
"""
|
||||
|
||||
if path is None:
|
||||
return path
|
||||
|
||||
# If any of the patterns, convert the pattern
|
||||
patterns = {
|
||||
"<udim>": "<udim>",
|
||||
"<tile>": "<tile>",
|
||||
"<uvtile>": "<uvtile>",
|
||||
"#": "#",
|
||||
"u<u>_v<v>": "<u>|<v>",
|
||||
"<frame0": "<frame0\d+>", # noqa - copied from collect_look.py
|
||||
"<f>": "<f>"
|
||||
}
|
||||
|
||||
lower = path.lower()
|
||||
has_pattern = False
|
||||
for pattern, regex_pattern in patterns.items():
|
||||
if pattern in lower:
|
||||
path = re.sub(regex_pattern, "*", path, flags=re.IGNORECASE)
|
||||
has_pattern = True
|
||||
|
||||
if has_pattern:
|
||||
return path
|
||||
|
||||
base = os.path.basename(path)
|
||||
matches = list(re.finditer(r'\d+', base))
|
||||
if matches:
|
||||
match = matches[-1]
|
||||
new_base = '{0}*{1}'.format(base[:match.start()],
|
||||
base[match.end():])
|
||||
head = os.path.dirname(path)
|
||||
return os.path.join(head, new_base)
|
||||
else:
|
||||
return path
|
||||
|
||||
|
||||
def get_file_node_path(node):
|
||||
"""Get the file path used by a Maya file node.
|
||||
|
||||
Args:
|
||||
node (str): Name of the Maya file node
|
||||
|
||||
Returns:
|
||||
str: the file path in use
|
||||
|
||||
"""
|
||||
# if the path appears to be sequence, use computedFileTextureNamePattern,
|
||||
# this preserves the <> tag
|
||||
if cmds.attributeQuery('computedFileTextureNamePattern',
|
||||
node=node,
|
||||
exists=True):
|
||||
plug = '{0}.computedFileTextureNamePattern'.format(node)
|
||||
texture_pattern = cmds.getAttr(plug)
|
||||
|
||||
patterns = ["<udim>",
|
||||
"<tile>",
|
||||
"u<u>_v<v>",
|
||||
"<f>",
|
||||
"<frame0",
|
||||
"<uvtile>"]
|
||||
lower = texture_pattern.lower()
|
||||
if any(pattern in lower for pattern in patterns):
|
||||
return texture_pattern
|
||||
|
||||
if cmds.nodeType(node) == 'aiImage':
|
||||
return cmds.getAttr('{0}.filename'.format(node))
|
||||
if cmds.nodeType(node) == 'RedshiftNormalMap':
|
||||
return cmds.getAttr('{}.tex0'.format(node))
|
||||
|
||||
# otherwise use fileTextureName
|
||||
return cmds.getAttr('{0}.fileTextureName'.format(node))
|
||||
|
||||
|
||||
def get_file_node_files(node):
|
||||
"""Return the file paths related to the file node
|
||||
|
||||
Note:
|
||||
Will only return existing files. Returns an empty list
|
||||
if not valid existing files are linked.
|
||||
|
||||
Returns:
|
||||
list: List of full file paths.
|
||||
|
||||
"""
|
||||
|
||||
path = get_file_node_path(node)
|
||||
path = cmds.workspace(expandName=path)
|
||||
if node_uses_image_sequence(node):
|
||||
glob_pattern = seq_to_glob(path)
|
||||
return glob.glob(glob_pattern)
|
||||
elif os.path.exists(path):
|
||||
return [path]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def get_mipmap(fname):
|
||||
for colour_space in COLOUR_SPACES:
|
||||
for mipmap_ext in MIPMAP_EXTENSIONS:
|
||||
mipmap_fname = '.'.join([fname, colour_space, mipmap_ext])
|
||||
if os.path.exists(mipmap_fname):
|
||||
return mipmap_fname
|
||||
return None
|
||||
|
||||
|
||||
def is_mipmap(fname):
|
||||
ext = os.path.splitext(fname)[1][1:]
|
||||
if ext in MIPMAP_EXTENSIONS:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class CollectMultiverseLookData(pyblish.api.InstancePlugin):
|
||||
"""Collect Multiverse Look
|
||||
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.2
|
||||
label = 'Collect Multiverse Look'
|
||||
families = ["mvLook"]
|
||||
|
||||
def process(self, instance):
|
||||
# Load plugin first
|
||||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
import multiverse
|
||||
|
||||
self.log.info("Processing mvLook for '{}'".format(instance))
|
||||
|
||||
nodes = set()
|
||||
for node in instance:
|
||||
# We want only mvUsdCompoundShape nodes.
|
||||
nodes_of_interest = cmds.ls(node,
|
||||
dag=True,
|
||||
shapes=False,
|
||||
type="mvUsdCompoundShape",
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
nodes.update(nodes_of_interest)
|
||||
|
||||
files = []
|
||||
sets = {}
|
||||
instance.data["resources"] = []
|
||||
publishMipMap = instance.data["publishMipMap"]
|
||||
|
||||
for node in nodes:
|
||||
self.log.info("Getting resources for '{}'".format(node))
|
||||
|
||||
# We know what nodes need to be collected, now we need to
|
||||
# extract the materials overrides.
|
||||
overrides = multiverse.ListMaterialOverridePrims(node)
|
||||
for override in overrides:
|
||||
matOver = multiverse.GetMaterialOverride(node, override)
|
||||
|
||||
if isinstance(matOver, multiverse.MaterialSourceShadingGroup):
|
||||
# We now need to grab the shadingGroup so add it to the
|
||||
# sets we pass down the pipe.
|
||||
shadingGroup = matOver.shadingGroupName
|
||||
self.log.debug("ShadingGroup = '{}'".format(shadingGroup))
|
||||
sets[shadingGroup] = {"uuid": lib.get_id(
|
||||
shadingGroup), "members": list()}
|
||||
|
||||
# The SG may reference files, add those too!
|
||||
history = cmds.listHistory(shadingGroup)
|
||||
files = cmds.ls(history, type="file", long=True)
|
||||
|
||||
for f in files:
|
||||
resources = self.collect_resource(f, publishMipMap)
|
||||
instance.data["resources"].append(resources)
|
||||
|
||||
elif isinstance(matOver, multiverse.MaterialSourceUsdPath):
|
||||
# TODO: Handle this later.
|
||||
pass
|
||||
|
||||
# Store data on the instance for validators, extractos, etc.
|
||||
instance.data["lookData"] = {
|
||||
"attributes": [],
|
||||
"relationships": sets
|
||||
}
|
||||
|
||||
def collect_resource(self, node, publishMipMap):
|
||||
"""Collect the link to the file(s) used (resource)
|
||||
Args:
|
||||
node (str): name of the node
|
||||
|
||||
Returns:
|
||||
dict
|
||||
"""
|
||||
|
||||
self.log.debug("processing: {}".format(node))
|
||||
if cmds.nodeType(node) not in ["file", "aiImage", "RedshiftNormalMap"]:
|
||||
self.log.error(
|
||||
"Unsupported file node: {}".format(cmds.nodeType(node)))
|
||||
raise AssertionError("Unsupported file node")
|
||||
|
||||
if cmds.nodeType(node) == 'file':
|
||||
self.log.debug(" - file node")
|
||||
attribute = "{}.fileTextureName".format(node)
|
||||
computed_attribute = "{}.computedFileTextureNamePattern".format(
|
||||
node)
|
||||
elif cmds.nodeType(node) == 'aiImage':
|
||||
self.log.debug("aiImage node")
|
||||
attribute = "{}.filename".format(node)
|
||||
computed_attribute = attribute
|
||||
elif cmds.nodeType(node) == 'RedshiftNormalMap':
|
||||
self.log.debug("RedshiftNormalMap node")
|
||||
attribute = "{}.tex0".format(node)
|
||||
computed_attribute = attribute
|
||||
|
||||
source = cmds.getAttr(attribute)
|
||||
self.log.info(" - file source: {}".format(source))
|
||||
color_space_attr = "{}.colorSpace".format(node)
|
||||
try:
|
||||
color_space = cmds.getAttr(color_space_attr)
|
||||
except ValueError:
|
||||
# node doesn't have colorspace attribute
|
||||
color_space = "Raw"
|
||||
# Compare with the computed file path, e.g. the one with the <UDIM>
|
||||
# pattern in it, to generate some logging information about this
|
||||
# difference
|
||||
# computed_attribute = "{}.computedFileTextureNamePattern".format(node)
|
||||
computed_source = cmds.getAttr(computed_attribute)
|
||||
if source != computed_source:
|
||||
self.log.debug("Detected computed file pattern difference "
|
||||
"from original pattern: {0} "
|
||||
"({1} -> {2})".format(node,
|
||||
source,
|
||||
computed_source))
|
||||
|
||||
# We replace backslashes with forward slashes because V-Ray
|
||||
# can't handle the UDIM files with the backslashes in the
|
||||
# paths as the computed patterns
|
||||
source = source.replace("\\", "/")
|
||||
|
||||
files = get_file_node_files(node)
|
||||
files = self.handle_files(files, publishMipMap)
|
||||
if len(files) == 0:
|
||||
self.log.error("No valid files found from node `%s`" % node)
|
||||
|
||||
self.log.info("collection of resource done:")
|
||||
self.log.info(" - node: {}".format(node))
|
||||
self.log.info(" - attribute: {}".format(attribute))
|
||||
self.log.info(" - source: {}".format(source))
|
||||
self.log.info(" - file: {}".format(files))
|
||||
self.log.info(" - color space: {}".format(color_space))
|
||||
|
||||
# Define the resource
|
||||
return {"node": node,
|
||||
"attribute": attribute,
|
||||
"source": source, # required for resources
|
||||
"files": files,
|
||||
"color_space": color_space} # required for resources
|
||||
|
||||
def handle_files(self, files, publishMipMap):
|
||||
"""This will go through all the files and make sure that they are
|
||||
either already mipmapped or have a corresponding mipmap sidecar and
|
||||
add that to the list."""
|
||||
if not publishMipMap:
|
||||
return files
|
||||
|
||||
extra_files = []
|
||||
self.log.debug("Expecting MipMaps, going to look for them.")
|
||||
for fname in files:
|
||||
self.log.info("Checking '{}' for mipmaps".format(fname))
|
||||
if is_mipmap(fname):
|
||||
self.log.debug(" - file is already MipMap, skipping.")
|
||||
continue
|
||||
|
||||
mipmap = get_mipmap(fname)
|
||||
if mipmap:
|
||||
self.log.info(" mipmap found for '{}'".format(fname))
|
||||
extra_files.append(mipmap)
|
||||
else:
|
||||
self.log.warning(" no mipmap found for '{}'".format(fname))
|
||||
return files + extra_files
|
||||
14
openpype/hosts/maya/plugins/publish/collect_pointcache.py
Normal file
14
openpype/hosts/maya/plugins/publish/collect_pointcache.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
import pyblish.api
|
||||
|
||||
|
||||
class CollectPointcache(pyblish.api.InstancePlugin):
|
||||
"""Collect pointcache data for instance."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.4
|
||||
families = ["pointcache"]
|
||||
label = "Collect Pointcache"
|
||||
hosts = ["maya"]
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
instance.data["families"].append("publish.farm")
|
||||
|
|
@ -340,10 +340,10 @@ class CollectMayaRender(pyblish.api.ContextPlugin):
|
|||
"expectedFiles": full_exp_files,
|
||||
"publishRenderMetadataFolder": common_publish_meta_path,
|
||||
"resolutionWidth": lib.get_attr_in_layer(
|
||||
"defaultResolution.height", layer=layer_name
|
||||
"defaultResolution.width", layer=layer_name
|
||||
),
|
||||
"resolutionHeight": lib.get_attr_in_layer(
|
||||
"defaultResolution.width", layer=layer_name
|
||||
"defaultResolution.height", layer=layer_name
|
||||
),
|
||||
"pixelAspect": lib.get_attr_in_layer(
|
||||
"defaultResolution.pixelAspect", layer=layer_name
|
||||
|
|
|
|||
|
|
@ -16,13 +16,19 @@ class ExtractAnimation(openpype.api.Extractor):
|
|||
Positions and normals, uvs, creases are preserved, but nothing more,
|
||||
for plain and predictable point caches.
|
||||
|
||||
Plugin can run locally or remotely (on a farm - if instance is marked with
|
||||
"farm" it will be skipped in local processing, but processed on farm)
|
||||
"""
|
||||
|
||||
label = "Extract Animation"
|
||||
hosts = ["maya"]
|
||||
families = ["animation"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Should be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
# Collect the out set nodes
|
||||
out_sets = [node for node in instance if node.endswith("out_SET")]
|
||||
|
|
@ -89,4 +95,6 @@ class ExtractAnimation(openpype.api.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
instance.context.data["cleanupFullPaths"].append(path)
|
||||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
|
|
|
|||
|
|
@ -146,7 +146,7 @@ class ExtractLook(openpype.api.Extractor):
|
|||
|
||||
label = "Extract Look (Maya Scene + JSON)"
|
||||
hosts = ["maya"]
|
||||
families = ["look"]
|
||||
families = ["look", "mvLook"]
|
||||
order = pyblish.api.ExtractorOrder + 0.2
|
||||
scene_type = "ma"
|
||||
look_data_type = "json"
|
||||
|
|
|
|||
157
openpype/hosts/maya/plugins/publish/extract_multiverse_look.py
Normal file
157
openpype/hosts/maya/plugins/publish/extract_multiverse_look.py
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
import os
|
||||
|
||||
from maya import cmds
|
||||
|
||||
import openpype.api
|
||||
from openpype.hosts.maya.api.lib import maintained_selection
|
||||
|
||||
|
||||
class ExtractMultiverseLook(openpype.api.Extractor):
|
||||
"""Extractor for Multiverse USD look data.
|
||||
|
||||
This will extract:
|
||||
|
||||
- the shading networks that are assigned in MEOW as Maya material overrides
|
||||
to a Multiverse Compound
|
||||
- settings for a Multiverse Write Override operation.
|
||||
|
||||
Relevant settings are visible in the Maya set node created by a Multiverse
|
||||
USD Look instance creator.
|
||||
|
||||
The input data contained in the set is:
|
||||
|
||||
- a single Multiverse Compound node with any number of Maya material
|
||||
overrides (typically set in MEOW)
|
||||
|
||||
Upon publish two files will be written:
|
||||
|
||||
- a .usda override file containing material assignment information
|
||||
- a .ma file containing shading networks
|
||||
|
||||
Note: when layering the material assignment override on a loaded Compound,
|
||||
remember to set a matching attribute override with the namespace of
|
||||
the loaded compound in order for the material assignment to resolve.
|
||||
"""
|
||||
|
||||
label = "Extract Multiverse USD Look"
|
||||
hosts = ["maya"]
|
||||
families = ["mvLook"]
|
||||
scene_type = "usda"
|
||||
file_formats = ["usda", "usd"]
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Overridable options for Multiverse USD Export
|
||||
|
||||
Given in the following format
|
||||
- {NAME: EXPECTED TYPE}
|
||||
|
||||
If the overridden option's type does not match,
|
||||
the option is not included and a warning is logged.
|
||||
|
||||
"""
|
||||
|
||||
return {
|
||||
"writeAll": bool,
|
||||
"writeTransforms": bool,
|
||||
"writeVisibility": bool,
|
||||
"writeAttributes": bool,
|
||||
"writeMaterials": bool,
|
||||
"writeVariants": bool,
|
||||
"writeVariantsDefinition": bool,
|
||||
"writeActiveState": bool,
|
||||
"writeNamespaces": bool,
|
||||
"numTimeSamples": int,
|
||||
"timeSamplesSpan": float
|
||||
}
|
||||
|
||||
@property
|
||||
def default_options(self):
|
||||
"""The default options for Multiverse USD extraction."""
|
||||
|
||||
return {
|
||||
"writeAll": False,
|
||||
"writeTransforms": False,
|
||||
"writeVisibility": False,
|
||||
"writeAttributes": False,
|
||||
"writeMaterials": True,
|
||||
"writeVariants": False,
|
||||
"writeVariantsDefinition": False,
|
||||
"writeActiveState": False,
|
||||
"writeNamespaces": False,
|
||||
"numTimeSamples": 1,
|
||||
"timeSamplesSpan": 0.0
|
||||
}
|
||||
|
||||
def get_file_format(self, instance):
|
||||
fileFormat = instance.data["fileFormat"]
|
||||
if fileFormat in range(len(self.file_formats)):
|
||||
self.scene_type = self.file_formats[fileFormat]
|
||||
|
||||
def process(self, instance):
|
||||
# Load plugin first
|
||||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
|
||||
# Define output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
self.get_file_format(instance)
|
||||
file_name = "{0}.{1}".format(instance.name, self.scene_type)
|
||||
file_path = os.path.join(staging_dir, file_name)
|
||||
file_path = file_path.replace('\\', '/')
|
||||
|
||||
# Parse export options
|
||||
options = self.default_options
|
||||
self.log.info("Export options: {0}".format(options))
|
||||
|
||||
# Perform extraction
|
||||
self.log.info("Performing extraction ...")
|
||||
|
||||
with maintained_selection():
|
||||
members = instance.data("setMembers")
|
||||
members = cmds.ls(members,
|
||||
dag=True,
|
||||
shapes=False,
|
||||
type="mvUsdCompoundShape",
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
self.log.info('Collected object {}'.format(members))
|
||||
if len(members) > 1:
|
||||
self.log.error('More than one member: {}'.format(members))
|
||||
|
||||
import multiverse
|
||||
|
||||
over_write_opts = multiverse.OverridesWriteOptions()
|
||||
options_discard_keys = {
|
||||
"numTimeSamples",
|
||||
"timeSamplesSpan",
|
||||
"frameStart",
|
||||
"frameEnd",
|
||||
"handleStart",
|
||||
"handleEnd",
|
||||
"step",
|
||||
"fps"
|
||||
}
|
||||
for key, value in options.items():
|
||||
if key in options_discard_keys:
|
||||
continue
|
||||
setattr(over_write_opts, key, value)
|
||||
|
||||
for member in members:
|
||||
# @TODO: Make sure there is only one here.
|
||||
|
||||
self.log.debug("Writing Override for '{}'".format(member))
|
||||
multiverse.WriteOverrides(file_path, member, over_write_opts)
|
||||
|
||||
if "representations" not in instance.data:
|
||||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': self.scene_type,
|
||||
'ext': self.scene_type,
|
||||
'files': file_name,
|
||||
'stagingDir': staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
self.log.info("Extracted instance {} to {}".format(
|
||||
instance.name, file_path))
|
||||
|
|
@ -8,11 +8,27 @@ from openpype.hosts.maya.api.lib import maintained_selection
|
|||
|
||||
|
||||
class ExtractMultiverseUsd(openpype.api.Extractor):
|
||||
"""Extractor for USD by Multiverse."""
|
||||
"""Extractor for Multiverse USD Asset data.
|
||||
|
||||
label = "Extract Multiverse USD"
|
||||
This will extract settings for a Multiverse Write Asset operation:
|
||||
they are visible in the Maya set node created by a Multiverse USD
|
||||
Asset instance creator.
|
||||
|
||||
The input data contained in the set is:
|
||||
|
||||
- a single hierarchy of Maya nodes. Multiverse supports a variety of Maya
|
||||
nodes such as transforms, mesh, curves, particles, instances, particle
|
||||
instancers, pfx, MASH, lights, cameras, joints, connected materials,
|
||||
shading networks etc. including many of their attributes.
|
||||
|
||||
Upon publish a .usd (or .usdz) asset file will be typically written.
|
||||
"""
|
||||
|
||||
label = "Extract Multiverse USD Asset"
|
||||
hosts = ["maya"]
|
||||
families = ["usd"]
|
||||
families = ["mvUsd"]
|
||||
scene_type = "usd"
|
||||
file_formats = ["usd", "usda", "usdz"]
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
|
|
@ -57,6 +73,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor):
|
|||
"writeShadingNetworks": bool,
|
||||
"writeTransformMatrix": bool,
|
||||
"writeUsdAttributes": bool,
|
||||
"writeInstancesAsReferences": bool,
|
||||
"timeVaryingTopology": bool,
|
||||
"customMaterialNamespace": str,
|
||||
"numTimeSamples": int,
|
||||
|
|
@ -98,6 +115,7 @@ class ExtractMultiverseUsd(openpype.api.Extractor):
|
|||
"writeShadingNetworks": False,
|
||||
"writeTransformMatrix": True,
|
||||
"writeUsdAttributes": False,
|
||||
"writeInstancesAsReferences": False,
|
||||
"timeVaryingTopology": False,
|
||||
"customMaterialNamespace": str(),
|
||||
"numTimeSamples": 1,
|
||||
|
|
@ -130,12 +148,15 @@ class ExtractMultiverseUsd(openpype.api.Extractor):
|
|||
return options
|
||||
|
||||
def process(self, instance):
|
||||
# Load plugin firstly
|
||||
# Load plugin first
|
||||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
|
||||
# Define output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
file_name = "{}.usd".format(instance.name)
|
||||
file_format = instance.data.get("fileFormat", 0)
|
||||
if file_format in range(len(self.file_formats)):
|
||||
self.scene_type = self.file_formats[file_format]
|
||||
file_name = "{0}.{1}".format(instance.name, self.scene_type)
|
||||
file_path = os.path.join(staging_dir, file_name)
|
||||
file_path = file_path.replace('\\', '/')
|
||||
|
||||
|
|
@ -149,12 +170,6 @@ class ExtractMultiverseUsd(openpype.api.Extractor):
|
|||
|
||||
with maintained_selection():
|
||||
members = instance.data("setMembers")
|
||||
members = cmds.ls(members,
|
||||
dag=True,
|
||||
shapes=True,
|
||||
type=("mesh"),
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
self.log.info('Collected object {}'.format(members))
|
||||
|
||||
import multiverse
|
||||
|
|
@ -199,10 +214,10 @@ class ExtractMultiverseUsd(openpype.api.Extractor):
|
|||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'usd',
|
||||
'ext': 'usd',
|
||||
'name': self.scene_type,
|
||||
'ext': self.scene_type,
|
||||
'files': file_name,
|
||||
"stagingDir": staging_dir
|
||||
'stagingDir': staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
|
|
|
|||
|
|
@ -7,11 +7,28 @@ from openpype.hosts.maya.api.lib import maintained_selection
|
|||
|
||||
|
||||
class ExtractMultiverseUsdComposition(openpype.api.Extractor):
|
||||
"""Extractor of Multiverse USD Composition."""
|
||||
"""Extractor of Multiverse USD Composition data.
|
||||
|
||||
This will extract settings for a Multiverse Write Composition operation:
|
||||
they are visible in the Maya set node created by a Multiverse USD
|
||||
Composition instance creator.
|
||||
|
||||
The input data contained in the set is either:
|
||||
|
||||
- a single hierarchy consisting of several Multiverse Compound nodes, with
|
||||
any number of layers, and Maya transform nodes
|
||||
- a single Compound node with more than one layer (in this case the "Write
|
||||
as Compound Layers" option should be set).
|
||||
|
||||
Upon publish a .usda composition file will be written.
|
||||
"""
|
||||
|
||||
label = "Extract Multiverse USD Composition"
|
||||
hosts = ["maya"]
|
||||
families = ["usdComposition"]
|
||||
families = ["mvUsdComposition"]
|
||||
scene_type = "usd"
|
||||
# Order of `fileFormat` must match create_multiverse_usd_comp.py
|
||||
file_formats = ["usda", "usd"]
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
|
|
@ -29,6 +46,7 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor):
|
|||
"stripNamespaces": bool,
|
||||
"mergeTransformAndShape": bool,
|
||||
"flattenContent": bool,
|
||||
"writeAsCompoundLayers": bool,
|
||||
"writePendingOverrides": bool,
|
||||
"numTimeSamples": int,
|
||||
"timeSamplesSpan": float
|
||||
|
|
@ -42,6 +60,7 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor):
|
|||
"stripNamespaces": True,
|
||||
"mergeTransformAndShape": False,
|
||||
"flattenContent": False,
|
||||
"writeAsCompoundLayers": False,
|
||||
"writePendingOverrides": False,
|
||||
"numTimeSamples": 1,
|
||||
"timeSamplesSpan": 0.0
|
||||
|
|
@ -71,12 +90,15 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor):
|
|||
return options
|
||||
|
||||
def process(self, instance):
|
||||
# Load plugin firstly
|
||||
# Load plugin first
|
||||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
|
||||
# Define output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
file_name = "{}.usd".format(instance.name)
|
||||
file_format = instance.data.get("fileFormat", 0)
|
||||
if file_format in range(len(self.file_formats)):
|
||||
self.scene_type = self.file_formats[file_format]
|
||||
file_name = "{0}.{1}".format(instance.name, self.scene_type)
|
||||
file_path = os.path.join(staging_dir, file_name)
|
||||
file_path = file_path.replace('\\', '/')
|
||||
|
||||
|
|
@ -90,12 +112,6 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor):
|
|||
|
||||
with maintained_selection():
|
||||
members = instance.data("setMembers")
|
||||
members = cmds.ls(members,
|
||||
dag=True,
|
||||
shapes=True,
|
||||
type="mvUsdCompoundShape",
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
self.log.info('Collected object {}'.format(members))
|
||||
|
||||
import multiverse
|
||||
|
|
@ -119,6 +135,18 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor):
|
|||
time_opts.framePerSecond = fps
|
||||
|
||||
comp_write_opts = multiverse.CompositionWriteOptions()
|
||||
|
||||
"""
|
||||
OP tells MV to write to a staging directory, and then moves the
|
||||
file to it's final publish directory. By default, MV write relative
|
||||
paths, but these paths will break when the referencing file moves.
|
||||
This option forces writes to absolute paths, which is ok within OP
|
||||
because all published assets have static paths, and MV can only
|
||||
reference published assets. When a proper UsdAssetResolver is used,
|
||||
this won't be needed.
|
||||
"""
|
||||
comp_write_opts.forceAbsolutePaths = True
|
||||
|
||||
options_discard_keys = {
|
||||
'numTimeSamples',
|
||||
'timeSamplesSpan',
|
||||
|
|
@ -140,10 +168,10 @@ class ExtractMultiverseUsdComposition(openpype.api.Extractor):
|
|||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
'name': 'usd',
|
||||
'ext': 'usd',
|
||||
'name': self.scene_type,
|
||||
'ext': self.scene_type,
|
||||
'files': file_name,
|
||||
"stagingDir": staging_dir
|
||||
'stagingDir': staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
|
|
|
|||
|
|
@ -7,11 +7,26 @@ from maya import cmds
|
|||
|
||||
|
||||
class ExtractMultiverseUsdOverride(openpype.api.Extractor):
|
||||
"""Extractor for USD Override by Multiverse."""
|
||||
"""Extractor for Multiverse USD Override data.
|
||||
|
||||
This will extract settings for a Multiverse Write Override operation:
|
||||
they are visible in the Maya set node created by a Multiverse USD
|
||||
Override instance creator.
|
||||
|
||||
The input data contained in the set is:
|
||||
|
||||
- a single Multiverse Compound node with any number of overrides (typically
|
||||
set in MEOW)
|
||||
|
||||
Upon publish a .usda override file will be written.
|
||||
"""
|
||||
|
||||
label = "Extract Multiverse USD Override"
|
||||
hosts = ["maya"]
|
||||
families = ["usdOverride"]
|
||||
families = ["mvUsdOverride"]
|
||||
scene_type = "usd"
|
||||
# Order of `fileFormat` must match create_multiverse_usd_over.py
|
||||
file_formats = ["usda", "usd"]
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
|
|
@ -58,12 +73,15 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor):
|
|||
}
|
||||
|
||||
def process(self, instance):
|
||||
# Load plugin firstly
|
||||
# Load plugin first
|
||||
cmds.loadPlugin("MultiverseForMaya", quiet=True)
|
||||
|
||||
# Define output file path
|
||||
staging_dir = self.staging_dir(instance)
|
||||
file_name = "{}.usda".format(instance.name)
|
||||
file_format = instance.data.get("fileFormat", 0)
|
||||
if file_format in range(len(self.file_formats)):
|
||||
self.scene_type = self.file_formats[file_format]
|
||||
file_name = "{0}.{1}".format(instance.name, self.scene_type)
|
||||
file_path = os.path.join(staging_dir, file_name)
|
||||
file_path = file_path.replace("\\", "/")
|
||||
|
||||
|
|
@ -78,7 +96,7 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor):
|
|||
members = instance.data("setMembers")
|
||||
members = cmds.ls(members,
|
||||
dag=True,
|
||||
shapes=True,
|
||||
shapes=False,
|
||||
type="mvUsdCompoundShape",
|
||||
noIntermediate=True,
|
||||
long=True)
|
||||
|
|
@ -128,10 +146,10 @@ class ExtractMultiverseUsdOverride(openpype.api.Extractor):
|
|||
instance.data["representations"] = []
|
||||
|
||||
representation = {
|
||||
"name": "usd",
|
||||
"ext": "usd",
|
||||
"files": file_name,
|
||||
"stagingDir": staging_dir
|
||||
'name': self.scene_type,
|
||||
'ext': self.scene_type,
|
||||
'files': file_name,
|
||||
'stagingDir': staging_dir
|
||||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
|
|
|
|||
|
|
@ -16,6 +16,8 @@ class ExtractAlembic(openpype.api.Extractor):
|
|||
Positions and normals, uvs, creases are preserved, but nothing more,
|
||||
for plain and predictable point caches.
|
||||
|
||||
Plugin can run locally or remotely (on a farm - if instance is marked with
|
||||
"farm" it will be skipped in local processing, but processed on farm)
|
||||
"""
|
||||
|
||||
label = "Extract Pointcache (Alembic)"
|
||||
|
|
@ -23,8 +25,12 @@ class ExtractAlembic(openpype.api.Extractor):
|
|||
families = ["pointcache",
|
||||
"model",
|
||||
"vrayproxy"]
|
||||
targets = ["local", "remote"]
|
||||
|
||||
def process(self, instance):
|
||||
if instance.data.get("farm"):
|
||||
self.log.debug("Should be processed on farm, skipping.")
|
||||
return
|
||||
|
||||
nodes = instance[:]
|
||||
|
||||
|
|
@ -92,4 +98,6 @@ class ExtractAlembic(openpype.api.Extractor):
|
|||
}
|
||||
instance.data["representations"].append(representation)
|
||||
|
||||
instance.context.data["cleanupFullPaths"].append(path)
|
||||
|
||||
self.log.info("Extracted {} to {}".format(instance, dirname))
|
||||
|
|
|
|||
|
|
@ -0,0 +1,16 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<root>
|
||||
<error id="main">
|
||||
<title>Errors found</title>
|
||||
<description>
|
||||
## Publish process has errors
|
||||
|
||||
At least one plugin failed before this plugin, job won't be sent to Deadline for processing before all issues are fixed.
|
||||
|
||||
### How to repair?
|
||||
|
||||
Check all failing plugins (should be highlighted in red) and fix issues if possible.
|
||||
</description>
|
||||
|
||||
</error>
|
||||
</root>
|
||||
|
|
@ -30,6 +30,10 @@ class ValidateAnimationContent(pyblish.api.InstancePlugin):
|
|||
|
||||
assert 'out_hierarchy' in instance.data, "Missing `out_hierarchy` data"
|
||||
|
||||
out_sets = [node for node in instance if node.endswith("out_SET")]
|
||||
msg = "Couldn't find exactly one out_SET: {0}".format(out_sets)
|
||||
assert len(out_sets) == 1, msg
|
||||
|
||||
# All nodes in the `out_hierarchy` must be among the nodes that are
|
||||
# in the instance. The nodes in the instance are found from the top
|
||||
# group, as such this tests whether all nodes are under that top group.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,92 @@
|
|||
import pyblish.api
|
||||
import openpype.api
|
||||
import openpype.hosts.maya.api.action
|
||||
|
||||
import os
|
||||
|
||||
COLOUR_SPACES = ['sRGB', 'linear', 'auto']
|
||||
MIPMAP_EXTENSIONS = ['tdl']
|
||||
|
||||
|
||||
class ValidateMvLookContents(pyblish.api.InstancePlugin):
|
||||
order = openpype.api.ValidateContentsOrder
|
||||
families = ['mvLook']
|
||||
hosts = ['maya']
|
||||
label = 'Validate mvLook Data'
|
||||
actions = [openpype.hosts.maya.api.action.SelectInvalidAction]
|
||||
|
||||
# Allow this validation step to be skipped when you just need to
|
||||
# get things pushed through.
|
||||
optional = True
|
||||
|
||||
# These intents get enforced checks, other ones get warnings.
|
||||
enforced_intents = ['-', 'Final']
|
||||
|
||||
def process(self, instance):
|
||||
intent = instance.context.data['intent']['value']
|
||||
publishMipMap = instance.data["publishMipMap"]
|
||||
enforced = True
|
||||
if intent in self.enforced_intents:
|
||||
self.log.info("This validation will be enforced: '{}'"
|
||||
.format(intent))
|
||||
else:
|
||||
enforced = False
|
||||
self.log.info("This validation will NOT be enforced: '{}'"
|
||||
.format(intent))
|
||||
|
||||
if not instance[:]:
|
||||
raise RuntimeError("Instance is empty")
|
||||
|
||||
invalid = set()
|
||||
|
||||
resources = instance.data.get("resources", [])
|
||||
for resource in resources:
|
||||
files = resource["files"]
|
||||
self.log.debug("Resouce '{}', files: [{}]".format(resource, files))
|
||||
node = resource["node"]
|
||||
if len(files) == 0:
|
||||
self.log.error("File node '{}' uses no or non-existing "
|
||||
"files".format(node))
|
||||
invalid.add(node)
|
||||
continue
|
||||
for fname in files:
|
||||
if not self.valid_file(fname):
|
||||
self.log.error("File node '{}'/'{}' is not valid"
|
||||
.format(node, fname))
|
||||
invalid.add(node)
|
||||
|
||||
if publishMipMap and not self.is_or_has_mipmap(fname, files):
|
||||
msg = "File node '{}'/'{}' does not have a mipmap".format(
|
||||
node, fname)
|
||||
if enforced:
|
||||
invalid.add(node)
|
||||
self.log.error(msg)
|
||||
raise RuntimeError(msg)
|
||||
else:
|
||||
self.log.warning(msg)
|
||||
|
||||
if invalid:
|
||||
raise RuntimeError("'{}' has invalid look "
|
||||
"content".format(instance.name))
|
||||
|
||||
def valid_file(self, fname):
|
||||
self.log.debug("Checking validity of '{}'".format(fname))
|
||||
if not os.path.exists(fname):
|
||||
return False
|
||||
if os.path.getsize(fname) == 0:
|
||||
return False
|
||||
return True
|
||||
|
||||
def is_or_has_mipmap(self, fname, files):
|
||||
ext = os.path.splitext(fname)[1][1:]
|
||||
if ext in MIPMAP_EXTENSIONS:
|
||||
self.log.debug("Is a mipmap '{}'".format(fname))
|
||||
return True
|
||||
|
||||
for colour_space in COLOUR_SPACES:
|
||||
for mipmap_ext in MIPMAP_EXTENSIONS:
|
||||
mipmap_fname = '.'.join([fname, colour_space, mipmap_ext])
|
||||
if mipmap_fname in files:
|
||||
self.log.debug("Has a mipmap '{}'".format(fname))
|
||||
return True
|
||||
return False
|
||||
86
openpype/hosts/nuke/api/gizmo_menu.py
Normal file
86
openpype/hosts/nuke/api/gizmo_menu.py
Normal file
|
|
@ -0,0 +1,86 @@
|
|||
import os
|
||||
import re
|
||||
import nuke
|
||||
|
||||
from openpype.api import Logger
|
||||
|
||||
log = Logger.get_logger(__name__)
|
||||
|
||||
|
||||
class GizmoMenu():
|
||||
def __init__(self, title, icon=None):
|
||||
|
||||
self.toolbar = self._create_toolbar_menu(
|
||||
title,
|
||||
icon=icon
|
||||
)
|
||||
|
||||
self._script_actions = []
|
||||
|
||||
def _create_toolbar_menu(self, name, icon=None):
|
||||
nuke_node_menu = nuke.menu("Nodes")
|
||||
return nuke_node_menu.addMenu(
|
||||
name,
|
||||
icon=icon
|
||||
)
|
||||
|
||||
def _make_menu_path(self, path, icon=None):
|
||||
parent = self.toolbar
|
||||
for folder in re.split(r"/|\\", path):
|
||||
if not folder:
|
||||
continue
|
||||
existing_menu = parent.findItem(folder)
|
||||
if existing_menu:
|
||||
parent = existing_menu
|
||||
else:
|
||||
parent = parent.addMenu(folder, icon=icon)
|
||||
|
||||
return parent
|
||||
|
||||
def build_from_configuration(self, configuration):
|
||||
for menu in configuration:
|
||||
# Construct parent path else parent is toolbar
|
||||
parent = self.toolbar
|
||||
gizmo_toolbar_path = menu.get("gizmo_toolbar_path")
|
||||
if gizmo_toolbar_path:
|
||||
parent = self._make_menu_path(gizmo_toolbar_path)
|
||||
|
||||
for item in menu["sub_gizmo_list"]:
|
||||
assert isinstance(item, dict), "Configuration is wrong!"
|
||||
|
||||
if not item.get("title"):
|
||||
continue
|
||||
|
||||
item_type = item.get("sourcetype")
|
||||
|
||||
if item_type == ("python" or "file"):
|
||||
parent.addCommand(
|
||||
item["title"],
|
||||
command=str(item["command"]),
|
||||
icon=item.get("icon"),
|
||||
shortcut=item.get("hotkey")
|
||||
)
|
||||
|
||||
# add separator
|
||||
# Special behavior for separators
|
||||
elif item_type == "separator":
|
||||
parent.addSeparator()
|
||||
|
||||
# add submenu
|
||||
# items should hold a collection of submenu items (dict)
|
||||
elif item_type == "menu":
|
||||
# assert "items" in item, "Menu is missing 'items' key"
|
||||
parent.addMenu(
|
||||
item['title'],
|
||||
icon=item.get('icon')
|
||||
)
|
||||
|
||||
def add_gizmo_path(self, gizmo_paths):
|
||||
for gizmo_path in gizmo_paths:
|
||||
if os.path.isdir(gizmo_path):
|
||||
for folder in os.listdir(gizmo_path):
|
||||
if os.path.isdir(os.path.join(gizmo_path, folder)):
|
||||
nuke.pluginAddPath(os.path.join(gizmo_path, folder))
|
||||
nuke.pluginAddPath(gizmo_path)
|
||||
else:
|
||||
log.warning("This path doesn't exist: {}".format(gizmo_path))
|
||||
|
|
@ -30,6 +30,8 @@ from openpype.pipeline import (
|
|||
legacy_io,
|
||||
)
|
||||
|
||||
from . import gizmo_menu
|
||||
|
||||
from .workio import (
|
||||
save_file,
|
||||
open_file
|
||||
|
|
@ -2498,6 +2500,70 @@ def recreate_instance(origin_node, avalon_data=None):
|
|||
return new_node
|
||||
|
||||
|
||||
def add_scripts_gizmo():
|
||||
|
||||
# load configuration of custom menu
|
||||
project_settings = get_project_settings(os.getenv("AVALON_PROJECT"))
|
||||
platform_name = platform.system().lower()
|
||||
|
||||
for gizmo_settings in project_settings["nuke"]["gizmo"]:
|
||||
gizmo_list_definition = gizmo_settings["gizmo_definition"]
|
||||
toolbar_name = gizmo_settings["toolbar_menu_name"]
|
||||
# gizmo_toolbar_path = gizmo_settings["gizmo_toolbar_path"]
|
||||
gizmo_source_dir = gizmo_settings.get(
|
||||
"gizmo_source_dir", {}).get(platform_name)
|
||||
toolbar_icon_path = gizmo_settings.get(
|
||||
"toolbar_icon_path", {}).get(platform_name)
|
||||
|
||||
if not gizmo_source_dir:
|
||||
log.debug("Skipping studio gizmo `{}`, "
|
||||
"no gizmo path found.".format(toolbar_name)
|
||||
)
|
||||
return
|
||||
|
||||
if not gizmo_list_definition:
|
||||
log.debug("Skipping studio gizmo `{}`, "
|
||||
"no definition found.".format(toolbar_name)
|
||||
)
|
||||
return
|
||||
|
||||
if toolbar_icon_path:
|
||||
try:
|
||||
toolbar_icon_path = toolbar_icon_path.format(**os.environ)
|
||||
except KeyError as e:
|
||||
log.error(
|
||||
"This environment variable doesn't exist: {}".format(e)
|
||||
)
|
||||
|
||||
existing_gizmo_path = []
|
||||
for source_dir in gizmo_source_dir:
|
||||
try:
|
||||
resolve_source_dir = source_dir.format(**os.environ)
|
||||
except KeyError as e:
|
||||
log.error(
|
||||
"This environment variable doesn't exist: {}".format(e)
|
||||
)
|
||||
continue
|
||||
if not os.path.exists(resolve_source_dir):
|
||||
log.warning(
|
||||
"The source of gizmo `{}` does not exists".format(
|
||||
resolve_source_dir
|
||||
)
|
||||
)
|
||||
continue
|
||||
existing_gizmo_path.append(resolve_source_dir)
|
||||
|
||||
# run the launcher for Nuke toolbar
|
||||
toolbar_menu = gizmo_menu.GizmoMenu(
|
||||
title=toolbar_name,
|
||||
icon=toolbar_icon_path
|
||||
)
|
||||
|
||||
# apply configuration
|
||||
toolbar_menu.add_gizmo_path(existing_gizmo_path)
|
||||
toolbar_menu.build_from_configuration(gizmo_list_definition)
|
||||
|
||||
|
||||
class NukeDirmap(HostDirmap):
|
||||
def __init__(self, host_name, project_settings, sync_module, file_name):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -8,7 +8,8 @@ from openpype.hosts.nuke.api.lib import (
|
|||
on_script_load,
|
||||
check_inventory_versions,
|
||||
WorkfileSettings,
|
||||
dirmap_file_name_filter
|
||||
dirmap_file_name_filter,
|
||||
add_scripts_gizmo
|
||||
)
|
||||
from openpype.settings import get_project_settings
|
||||
|
||||
|
|
@ -59,3 +60,5 @@ def add_scripts_menu():
|
|||
|
||||
|
||||
add_scripts_menu()
|
||||
|
||||
add_scripts_gizmo()
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ import unreal # noqa
|
|||
class SkeletalMeshFBXLoader(plugin.Loader):
|
||||
"""Load Unreal SkeletalMesh from FBX."""
|
||||
|
||||
families = ["rig"]
|
||||
families = ["rig", "skeletalMesh"]
|
||||
label = "Import FBX Skeletal Mesh"
|
||||
representations = ["fbx"]
|
||||
icon = "cube"
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ import unreal # noqa
|
|||
class StaticMeshFBXLoader(plugin.Loader):
|
||||
"""Load Unreal StaticMesh from FBX."""
|
||||
|
||||
families = ["model", "unrealStaticMesh"]
|
||||
families = ["model", "staticMesh"]
|
||||
label = "Import FBX Static Mesh"
|
||||
representations = ["fbx"]
|
||||
icon = "cube"
|
||||
|
|
|
|||
|
|
@ -1282,7 +1282,13 @@ class EnvironmentPrepData(dict):
|
|||
|
||||
|
||||
def get_app_environments_for_context(
|
||||
project_name, asset_name, task_name, app_name, env_group=None, env=None
|
||||
project_name,
|
||||
asset_name,
|
||||
task_name,
|
||||
app_name,
|
||||
env_group=None,
|
||||
env=None,
|
||||
modules_manager=None
|
||||
):
|
||||
"""Prepare environment variables by context.
|
||||
Args:
|
||||
|
|
@ -1293,10 +1299,12 @@ def get_app_environments_for_context(
|
|||
by ApplicationManager.
|
||||
env (dict): Initial environment variables. `os.environ` is used when
|
||||
not passed.
|
||||
modules_manager (ModulesManager): Initialized modules manager.
|
||||
|
||||
Returns:
|
||||
dict: Environments for passed context and application.
|
||||
"""
|
||||
|
||||
from openpype.pipeline import AvalonMongoDB
|
||||
|
||||
# Avalon database connection
|
||||
|
|
@ -1311,6 +1319,11 @@ def get_app_environments_for_context(
|
|||
"name": asset_name
|
||||
})
|
||||
|
||||
if modules_manager is None:
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
modules_manager = ModulesManager()
|
||||
|
||||
# Prepare app object which can be obtained only from ApplciationManager
|
||||
app_manager = ApplicationManager()
|
||||
app = app_manager.applications[app_name]
|
||||
|
|
@ -1334,7 +1347,7 @@ def get_app_environments_for_context(
|
|||
"env": env
|
||||
})
|
||||
|
||||
prepare_app_environments(data, env_group)
|
||||
prepare_app_environments(data, env_group, modules_manager)
|
||||
prepare_context_environments(data, env_group)
|
||||
|
||||
# Discard avalon connection
|
||||
|
|
@ -1355,9 +1368,12 @@ def _merge_env(env, current_env):
|
|||
return result
|
||||
|
||||
|
||||
def _add_python_version_paths(app, env, logger):
|
||||
def _add_python_version_paths(app, env, logger, modules_manager):
|
||||
"""Add vendor packages specific for a Python version."""
|
||||
|
||||
for module in modules_manager.get_enabled_modules():
|
||||
module.modify_application_launch_arguments(app, env)
|
||||
|
||||
# Skip adding if host name is not set
|
||||
if not app.host_name:
|
||||
return
|
||||
|
|
@ -1390,7 +1406,9 @@ def _add_python_version_paths(app, env, logger):
|
|||
env["PYTHONPATH"] = os.pathsep.join(python_paths)
|
||||
|
||||
|
||||
def prepare_app_environments(data, env_group=None, implementation_envs=True):
|
||||
def prepare_app_environments(
|
||||
data, env_group=None, implementation_envs=True, modules_manager=None
|
||||
):
|
||||
"""Modify launch environments based on launched app and context.
|
||||
|
||||
Args:
|
||||
|
|
@ -1403,7 +1421,12 @@ def prepare_app_environments(data, env_group=None, implementation_envs=True):
|
|||
log = data["log"]
|
||||
source_env = data["env"].copy()
|
||||
|
||||
_add_python_version_paths(app, source_env, log)
|
||||
if modules_manager is None:
|
||||
from openpype.modules import ModulesManager
|
||||
|
||||
modules_manager = ModulesManager()
|
||||
|
||||
_add_python_version_paths(app, source_env, log, modules_manager)
|
||||
|
||||
# Use environments from local settings
|
||||
filtered_local_envs = {}
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ def start_webpublish_log(dbcon, batch_id, user):
|
|||
}).inserted_id
|
||||
|
||||
|
||||
def publish(log, close_plugin_name=None):
|
||||
def publish(log, close_plugin_name=None, raise_error=False):
|
||||
"""Loops through all plugins, logs to console. Used for tests.
|
||||
|
||||
Args:
|
||||
|
|
@ -79,10 +79,15 @@ def publish(log, close_plugin_name=None):
|
|||
result["plugin"].label, record.msg))
|
||||
|
||||
if result["error"]:
|
||||
log.error(error_format.format(**result))
|
||||
error_message = error_format.format(**result)
|
||||
log.error(error_message)
|
||||
if close_plugin: # close host app explicitly after error
|
||||
context = pyblish.api.Context()
|
||||
close_plugin().process(context)
|
||||
if raise_error:
|
||||
# Fatal Error is because of Deadline
|
||||
error_message = "Fatal Error: " + error_format.format(**result)
|
||||
raise RuntimeError(error_message)
|
||||
|
||||
|
||||
def publish_and_log(dbcon, _id, log, close_plugin_name=None, batch_id=None):
|
||||
|
|
@ -228,7 +233,7 @@ def _get_close_plugin(close_plugin_name, log):
|
|||
if plugin.__name__ == close_plugin_name:
|
||||
return plugin
|
||||
|
||||
log.warning("Close plugin not found, app might not close.")
|
||||
log.debug("Close plugin not found, app might not close.")
|
||||
|
||||
|
||||
def get_task_data(batch_dir):
|
||||
|
|
|
|||
|
|
@ -116,7 +116,10 @@ def get_oiio_tools_path(tool="oiiotool"):
|
|||
tool (string): Tool name (oiiotool, maketx, ...).
|
||||
Default is "oiiotool".
|
||||
"""
|
||||
|
||||
oiio_dir = get_vendor_bin_path("oiio")
|
||||
if platform.system().lower() == "linux":
|
||||
oiio_dir = os.path.join(oiio_dir, "bin")
|
||||
return find_executable(os.path.join(oiio_dir, tool))
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -370,6 +370,7 @@ def _load_modules():
|
|||
|
||||
class _OpenPypeInterfaceMeta(ABCMeta):
|
||||
"""OpenPypeInterface meta class to print proper string."""
|
||||
|
||||
def __str__(self):
|
||||
return "<'OpenPypeInterface.{}'>".format(self.__name__)
|
||||
|
||||
|
|
@ -388,6 +389,7 @@ class OpenPypeInterface:
|
|||
OpenPype modules which means they have to have implemented methods defined
|
||||
in the interface. By default interface does not have any abstract parts.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
|
@ -432,10 +434,12 @@ class OpenPypeModule:
|
|||
It is not recommended to override __init__ that's why specific method
|
||||
was implemented.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def connect_with_modules(self, enabled_modules):
|
||||
"""Connect with other enabled modules."""
|
||||
|
||||
pass
|
||||
|
||||
def get_global_environments(self):
|
||||
|
|
@ -443,8 +447,22 @@ class OpenPypeModule:
|
|||
|
||||
Environment variables that can be get only from system settings.
|
||||
"""
|
||||
|
||||
return {}
|
||||
|
||||
def modify_application_launch_arguments(self, application, env):
|
||||
"""Give option to modify launch environments before application launch.
|
||||
|
||||
Implementation is optional. To change environments modify passed
|
||||
dictionary of environments.
|
||||
|
||||
Args:
|
||||
application (Application): Application that is launched.
|
||||
env (dict): Current environemnt variables.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
def cli(self, module_click_group):
|
||||
"""Add commands to click group.
|
||||
|
||||
|
|
@ -465,6 +483,7 @@ class OpenPypeModule:
|
|||
def mycommand():
|
||||
print("my_command")
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
|
@ -886,6 +905,7 @@ class TrayModulesManager(ModulesManager):
|
|||
modules_menu_order = (
|
||||
"user",
|
||||
"ftrack",
|
||||
"kitsu",
|
||||
"muster",
|
||||
"launcher_tool",
|
||||
"avalon",
|
||||
|
|
|
|||
|
|
@ -0,0 +1,39 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Collect instances that should be processed and published on DL.
|
||||
|
||||
"""
|
||||
import os
|
||||
|
||||
import pyblish.api
|
||||
from openpype.pipeline import PublishValidationError
|
||||
|
||||
|
||||
class CollectDeadlinePublishableInstances(pyblish.api.InstancePlugin):
|
||||
"""Collect instances that should be processed and published on DL.
|
||||
|
||||
Some long running publishes (not just renders) could be offloaded to DL,
|
||||
this plugin compares theirs name against env variable, marks only
|
||||
publishable by farm.
|
||||
|
||||
Triggered only when running only in headless mode, eg on a farm.
|
||||
"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = "Collect Deadline Publishable Instance"
|
||||
targets = ["remote"]
|
||||
|
||||
def process(self, instance):
|
||||
self.log.debug("CollectDeadlinePublishableInstances")
|
||||
publish_inst = os.environ.get("OPENPYPE_PUBLISH_SUBSET", '')
|
||||
if not publish_inst:
|
||||
raise PublishValidationError("OPENPYPE_PUBLISH_SUBSET env var "
|
||||
"required for remote publishing")
|
||||
|
||||
subset_name = instance.data["subset"]
|
||||
if subset_name == publish_inst:
|
||||
self.log.debug("Publish {}".format(subset_name))
|
||||
instance.data["publish"] = True
|
||||
instance.data["farm"] = False
|
||||
else:
|
||||
self.log.debug("Skipping {}".format(subset_name))
|
||||
instance.data["publish"] = False
|
||||
|
|
@ -0,0 +1,136 @@
|
|||
import os
|
||||
import requests
|
||||
|
||||
from maya import cmds
|
||||
|
||||
from openpype.pipeline import legacy_io, PublishXmlValidationError
|
||||
from openpype.settings import get_project_settings
|
||||
import openpype.api
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class MayaSubmitRemotePublishDeadline(openpype.api.Integrator):
|
||||
"""Submit Maya scene to perform a local publish in Deadline.
|
||||
|
||||
Publishing in Deadline can be helpful for scenes that publish very slow.
|
||||
This way it can process in the background on another machine without the
|
||||
Artist having to wait for the publish to finish on their local machine.
|
||||
|
||||
Submission is done through the Deadline Web Service. DL then triggers
|
||||
`openpype/scripts/remote_publish.py`.
|
||||
|
||||
Each publishable instance creates its own full publish job.
|
||||
|
||||
Different from `ProcessSubmittedJobOnFarm` which creates publish job
|
||||
depending on metadata json containing context and instance data of
|
||||
rendered files.
|
||||
"""
|
||||
|
||||
label = "Submit Scene to Deadline"
|
||||
order = pyblish.api.IntegratorOrder
|
||||
hosts = ["maya"]
|
||||
families = ["publish.farm"]
|
||||
|
||||
def process(self, instance):
|
||||
settings = get_project_settings(os.getenv("AVALON_PROJECT"))
|
||||
# use setting for publish job on farm, no reason to have it separately
|
||||
deadline_publish_job_sett = (settings["deadline"]
|
||||
["publish"]
|
||||
["ProcessSubmittedJobOnFarm"])
|
||||
|
||||
# Ensure no errors so far
|
||||
if not (all(result["success"]
|
||||
for result in instance.context.data["results"])):
|
||||
raise PublishXmlValidationError("Publish process has errors")
|
||||
|
||||
if not instance.data["publish"]:
|
||||
self.log.warning("No active instances found. "
|
||||
"Skipping submission..")
|
||||
return
|
||||
|
||||
scene = instance.context.data["currentFile"]
|
||||
scenename = os.path.basename(scene)
|
||||
|
||||
# Get project code
|
||||
project_name = legacy_io.Session["AVALON_PROJECT"]
|
||||
|
||||
job_name = "{scene} [PUBLISH]".format(scene=scenename)
|
||||
batch_name = "{code} - {scene}".format(code=project_name,
|
||||
scene=scenename)
|
||||
|
||||
# Generate the payload for Deadline submission
|
||||
payload = {
|
||||
"JobInfo": {
|
||||
"Plugin": "MayaBatch",
|
||||
"BatchName": batch_name,
|
||||
"Name": job_name,
|
||||
"UserName": instance.context.data["user"],
|
||||
"Comment": instance.context.data.get("comment", ""),
|
||||
# "InitialStatus": state
|
||||
"Department": deadline_publish_job_sett["deadline_department"],
|
||||
"ChunkSize": deadline_publish_job_sett["deadline_chunk_size"],
|
||||
"Priority": deadline_publish_job_sett["deadline_priority"],
|
||||
"Group": deadline_publish_job_sett["deadline_group"],
|
||||
"Pool": deadline_publish_job_sett["deadline_pool"],
|
||||
},
|
||||
"PluginInfo": {
|
||||
|
||||
"Build": None, # Don't force build
|
||||
"StrictErrorChecking": True,
|
||||
"ScriptJob": True,
|
||||
|
||||
# Inputs
|
||||
"SceneFile": scene,
|
||||
"ScriptFilename": "{OPENPYPE_REPOS_ROOT}/openpype/scripts/remote_publish.py", # noqa
|
||||
|
||||
# Mandatory for Deadline
|
||||
"Version": cmds.about(version=True),
|
||||
|
||||
# Resolve relative references
|
||||
"ProjectPath": cmds.workspace(query=True,
|
||||
rootDirectory=True),
|
||||
|
||||
},
|
||||
|
||||
# Mandatory for Deadline, may be empty
|
||||
"AuxFiles": []
|
||||
}
|
||||
|
||||
# Include critical environment variables with submission + api.Session
|
||||
keys = [
|
||||
"FTRACK_API_USER",
|
||||
"FTRACK_API_KEY",
|
||||
"FTRACK_SERVER"
|
||||
]
|
||||
environment = dict({key: os.environ[key] for key in keys
|
||||
if key in os.environ}, **legacy_io.Session)
|
||||
|
||||
# TODO replace legacy_io with context.data ?
|
||||
environment["AVALON_PROJECT"] = legacy_io.Session["AVALON_PROJECT"]
|
||||
environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"]
|
||||
environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"]
|
||||
environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME")
|
||||
environment["OPENPYPE_LOG_NO_COLORS"] = "1"
|
||||
environment["OPENPYPE_REMOTE_JOB"] = "1"
|
||||
environment["OPENPYPE_USERNAME"] = instance.context.data["user"]
|
||||
environment["OPENPYPE_PUBLISH_SUBSET"] = instance.data["subset"]
|
||||
environment["HEADLESS_PUBLISH"] = "1"
|
||||
|
||||
payload["JobInfo"].update({
|
||||
"EnvironmentKeyValue%d" % index: "{key}={value}".format(
|
||||
key=key,
|
||||
value=environment[key]
|
||||
) for index, key in enumerate(environment)
|
||||
})
|
||||
|
||||
self.log.info("Submitting Deadline job ...")
|
||||
deadline_url = instance.context.data["defaultDeadline"]
|
||||
# if custom one is set in instance, use that
|
||||
if instance.data.get("deadlineUrl"):
|
||||
deadline_url = instance.data.get("deadlineUrl")
|
||||
assert deadline_url, "Requires Deadline Webservice URL"
|
||||
url = "{}/api/jobs".format(deadline_url)
|
||||
response = requests.post(url, json=payload, timeout=10)
|
||||
if not response.ok:
|
||||
raise Exception(response.text)
|
||||
|
|
@ -87,6 +87,13 @@ def inject_openpype_environment(deadlinePlugin):
|
|||
for key, value in contents.items():
|
||||
deadlinePlugin.SetProcessEnvironmentVariable(key, value)
|
||||
|
||||
script_url = job.GetJobPluginInfoKeyValue("ScriptFilename")
|
||||
if script_url:
|
||||
|
||||
script_url = script_url.format(**contents).replace("\\", "/")
|
||||
print(">>> Setting script path {}".format(script_url))
|
||||
job.SetJobPluginInfoKeyValue("ScriptFilename", script_url)
|
||||
|
||||
print(">>> Removing temporary file")
|
||||
os.remove(export_url)
|
||||
|
||||
|
|
@ -196,16 +203,19 @@ def __main__(deadlinePlugin):
|
|||
job.GetJobEnvironmentKeyValue('OPENPYPE_RENDER_JOB') or '0'
|
||||
openpype_publish_job = \
|
||||
job.GetJobEnvironmentKeyValue('OPENPYPE_PUBLISH_JOB') or '0'
|
||||
openpype_remote_job = \
|
||||
job.GetJobEnvironmentKeyValue('OPENPYPE_REMOTE_JOB') or '0'
|
||||
|
||||
print("--- Job type - render {}".format(openpype_render_job))
|
||||
print("--- Job type - publish {}".format(openpype_publish_job))
|
||||
print("--- Job type - remote {}".format(openpype_remote_job))
|
||||
if openpype_publish_job == '1' and openpype_render_job == '1':
|
||||
raise RuntimeError("Misconfiguration. Job couldn't be both " +
|
||||
"render and publish.")
|
||||
|
||||
if openpype_publish_job == '1':
|
||||
inject_render_job_id(deadlinePlugin)
|
||||
elif openpype_render_job == '1':
|
||||
elif openpype_render_job == '1' or openpype_remote_job == '1':
|
||||
inject_openpype_environment(deadlinePlugin)
|
||||
else:
|
||||
pype(deadlinePlugin) # backward compatibility with Pype2
|
||||
|
|
|
|||
|
|
@ -0,0 +1,346 @@
|
|||
import copy
|
||||
import json
|
||||
import collections
|
||||
|
||||
import ftrack_api
|
||||
|
||||
from openpype_modules.ftrack.lib import (
|
||||
ServerAction,
|
||||
statics_icon,
|
||||
)
|
||||
from openpype_modules.ftrack.lib.avalon_sync import create_chunks
|
||||
|
||||
|
||||
class TransferHierarchicalValues(ServerAction):
|
||||
"""Transfer values across hierarhcical attributes.
|
||||
|
||||
Aalso gives ability to convert types meanwhile. That is limited to
|
||||
conversions between numbers and strings
|
||||
- int <-> float
|
||||
- in, float -> string
|
||||
"""
|
||||
|
||||
identifier = "transfer.hierarchical.values"
|
||||
label = "OpenPype Admin"
|
||||
variant = "- Transfer values between 2 custom attributes"
|
||||
description = (
|
||||
"Move values from a hierarchical attribute to"
|
||||
" second hierarchical attribute."
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "OpenPypeAdmin.svg")
|
||||
|
||||
all_project_entities_query = (
|
||||
"select id, name, parent_id, link"
|
||||
" from TypedContext where project_id is \"{}\""
|
||||
)
|
||||
cust_attr_query = (
|
||||
"select value, entity_id from CustomAttributeValue"
|
||||
" where entity_id in ({}) and configuration_id is \"{}\""
|
||||
)
|
||||
settings_key = "transfer_values_of_hierarchical_attributes"
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
"""Show anywhere."""
|
||||
|
||||
return self.valid_roles(session, entities, event)
|
||||
|
||||
def _selection_interface(self, session, event_values=None):
|
||||
title = "Transfer hierarchical values"
|
||||
|
||||
attr_confs = session.query(
|
||||
(
|
||||
"select id, key from CustomAttributeConfiguration"
|
||||
" where is_hierarchical is true"
|
||||
)
|
||||
).all()
|
||||
attr_items = []
|
||||
for attr_conf in attr_confs:
|
||||
attr_items.append({
|
||||
"value": attr_conf["id"],
|
||||
"label": attr_conf["key"]
|
||||
})
|
||||
|
||||
if len(attr_items) < 2:
|
||||
return {
|
||||
"title": title,
|
||||
"items": [{
|
||||
"type": "label",
|
||||
"value": (
|
||||
"Didn't found custom attributes"
|
||||
" that can be transfered."
|
||||
)
|
||||
}]
|
||||
}
|
||||
|
||||
attr_items = sorted(attr_items, key=lambda item: item["label"])
|
||||
items = []
|
||||
item_splitter = {"type": "label", "value": "---"}
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<h2>Please select source and destination"
|
||||
" Custom attribute</h2>"
|
||||
)
|
||||
})
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<b>WARNING:</b> This will take affect for all projects!"
|
||||
)
|
||||
})
|
||||
if event_values:
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<b>Note:</b> Please select 2 different custom attributes."
|
||||
)
|
||||
})
|
||||
|
||||
items.append(item_splitter)
|
||||
|
||||
src_item = {
|
||||
"type": "enumerator",
|
||||
"label": "Source",
|
||||
"name": "src_attr_id",
|
||||
"data": copy.deepcopy(attr_items)
|
||||
}
|
||||
dst_item = {
|
||||
"type": "enumerator",
|
||||
"label": "Destination",
|
||||
"name": "dst_attr_id",
|
||||
"data": copy.deepcopy(attr_items)
|
||||
}
|
||||
delete_item = {
|
||||
"type": "boolean",
|
||||
"name": "delete_dst_attr_first",
|
||||
"label": "Delete first",
|
||||
"value": False
|
||||
}
|
||||
if event_values:
|
||||
src_item["value"] = event_values["src_attr_id"]
|
||||
dst_item["value"] = event_values["dst_attr_id"]
|
||||
delete_item["value"] = event_values["delete_dst_attr_first"]
|
||||
|
||||
items.append(src_item)
|
||||
items.append(dst_item)
|
||||
items.append(item_splitter)
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<b>WARNING:</b> All values from destination"
|
||||
" Custom Attribute will be removed if this is enabled."
|
||||
)
|
||||
})
|
||||
items.append(delete_item)
|
||||
|
||||
return {
|
||||
"title": title,
|
||||
"items": items
|
||||
}
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if event["data"].get("values", {}):
|
||||
return None
|
||||
|
||||
return self._selection_interface(session)
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
values = event["data"].get("values", {})
|
||||
if not values:
|
||||
return None
|
||||
src_attr_id = values["src_attr_id"]
|
||||
dst_attr_id = values["dst_attr_id"]
|
||||
delete_dst_values = values["delete_dst_attr_first"]
|
||||
|
||||
if not src_attr_id or not dst_attr_id:
|
||||
self.log.info("Attributes were not filled. Nothing to do.")
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Nothing to do"
|
||||
}
|
||||
|
||||
if src_attr_id == dst_attr_id:
|
||||
self.log.info((
|
||||
"Same attributes were selected {}, {}."
|
||||
" Showing interface again."
|
||||
).format(src_attr_id, dst_attr_id))
|
||||
return self._selection_interface(session, values)
|
||||
|
||||
# Query custom attrbutes
|
||||
src_conf = session.query((
|
||||
"select id from CustomAttributeConfiguration where id is {}"
|
||||
).format(src_attr_id)).one()
|
||||
dst_conf = session.query((
|
||||
"select id from CustomAttributeConfiguration where id is {}"
|
||||
).format(dst_attr_id)).one()
|
||||
src_type_name = src_conf["type"]["name"]
|
||||
dst_type_name = dst_conf["type"]["name"]
|
||||
# Limit conversion to
|
||||
# - same type -> same type (there is no need to do conversion)
|
||||
# - number <Any> -> number <Any> (int to float and back)
|
||||
# - number <Any> -> str (any number can be converted to str)
|
||||
src_type = None
|
||||
dst_type = None
|
||||
if src_type_name == "number" or src_type_name != dst_type_name:
|
||||
src_type = self._get_attr_type(dst_conf)
|
||||
dst_type = self._get_attr_type(dst_conf)
|
||||
valid = False
|
||||
# Can convert numbers
|
||||
if src_type in (int, float) and dst_type in (int, float):
|
||||
valid = True
|
||||
# Can convert numbers to string
|
||||
elif dst_type is str:
|
||||
valid = True
|
||||
|
||||
if not valid:
|
||||
self.log.info((
|
||||
"Don't know how to properly convert"
|
||||
" custom attribute types {} > {}"
|
||||
).format(src_type_name, dst_type_name))
|
||||
return {
|
||||
"message": (
|
||||
"Don't know how to properly convert"
|
||||
" custom attribute types {} > {}"
|
||||
).format(src_type_name, dst_type_name),
|
||||
"success": False
|
||||
}
|
||||
|
||||
# Query source values
|
||||
src_attr_values = session.query(
|
||||
(
|
||||
"select value, entity_id"
|
||||
" from CustomAttributeValue"
|
||||
" where configuration_id is {}"
|
||||
).format(src_attr_id)
|
||||
).all()
|
||||
|
||||
self.log.debug("Queried source values.")
|
||||
failed_entity_ids = []
|
||||
if dst_type is not None:
|
||||
self.log.debug("Converting source values to desctination type")
|
||||
value_by_id = {}
|
||||
for attr_value in src_attr_values:
|
||||
entity_id = attr_value["entity_id"]
|
||||
value = attr_value["value"]
|
||||
if value is not None:
|
||||
try:
|
||||
if dst_type is not None:
|
||||
value = dst_type(value)
|
||||
value_by_id[entity_id] = value
|
||||
except Exception:
|
||||
failed_entity_ids.append(entity_id)
|
||||
|
||||
if failed_entity_ids:
|
||||
self.log.info(
|
||||
"Couldn't convert some values to destination attribute"
|
||||
)
|
||||
return {
|
||||
"success": False,
|
||||
"message": (
|
||||
"Couldn't convert some values to destination attribute"
|
||||
)
|
||||
}
|
||||
|
||||
# Delete destination custom attributes first
|
||||
if delete_dst_values:
|
||||
self.log.info("Deleting destination custom attribute values first")
|
||||
self._delete_custom_attribute_values(session, dst_attr_id)
|
||||
|
||||
self.log.info("Applying source values on destination custom attribute")
|
||||
self._apply_values(session, value_by_id, dst_attr_id)
|
||||
return True
|
||||
|
||||
def _delete_custom_attribute_values(self, session, dst_attr_id):
|
||||
dst_attr_values = session.query(
|
||||
(
|
||||
"select configuration_id, entity_id"
|
||||
" from CustomAttributeValue"
|
||||
" where configuration_id is {}"
|
||||
).format(dst_attr_id)
|
||||
).all()
|
||||
delete_operations = []
|
||||
for attr_value in dst_attr_values:
|
||||
entity_id = attr_value["entity_id"]
|
||||
configuration_id = attr_value["configuration_id"]
|
||||
entity_key = collections.OrderedDict((
|
||||
("configuration_id", configuration_id),
|
||||
("entity_id", entity_id)
|
||||
))
|
||||
delete_operations.append(
|
||||
ftrack_api.operation.DeleteEntityOperation(
|
||||
"CustomAttributeValue",
|
||||
entity_key
|
||||
)
|
||||
)
|
||||
|
||||
if not delete_operations:
|
||||
return
|
||||
|
||||
for chunk in create_chunks(delete_operations, 500):
|
||||
for operation in chunk:
|
||||
session.recorded_operations.push(operation)
|
||||
session.commit()
|
||||
|
||||
def _apply_values(self, session, value_by_id, dst_attr_id):
|
||||
dst_attr_values = session.query(
|
||||
(
|
||||
"select configuration_id, entity_id"
|
||||
" from CustomAttributeValue"
|
||||
" where configuration_id is {}"
|
||||
).format(dst_attr_id)
|
||||
).all()
|
||||
|
||||
dst_entity_ids_with_value = {
|
||||
item["entity_id"]
|
||||
for item in dst_attr_values
|
||||
}
|
||||
operations = []
|
||||
for entity_id, value in value_by_id.items():
|
||||
entity_key = collections.OrderedDict((
|
||||
("configuration_id", dst_attr_id),
|
||||
("entity_id", entity_id)
|
||||
))
|
||||
if entity_id in dst_entity_ids_with_value:
|
||||
operations.append(
|
||||
ftrack_api.operation.UpdateEntityOperation(
|
||||
"CustomAttributeValue",
|
||||
entity_key,
|
||||
"value",
|
||||
ftrack_api.symbol.NOT_SET,
|
||||
value
|
||||
)
|
||||
)
|
||||
else:
|
||||
operations.append(
|
||||
ftrack_api.operation.CreateEntityOperation(
|
||||
"CustomAttributeValue",
|
||||
entity_key,
|
||||
{"value": value}
|
||||
)
|
||||
)
|
||||
|
||||
if not operations:
|
||||
return
|
||||
|
||||
for chunk in create_chunks(operations, 500):
|
||||
for operation in chunk:
|
||||
session.recorded_operations.push(operation)
|
||||
session.commit()
|
||||
|
||||
def _get_attr_type(self, conf_def):
|
||||
type_name = conf_def["type"]["name"]
|
||||
if type_name == "text":
|
||||
return str
|
||||
|
||||
if type_name == "number":
|
||||
config = json.loads(conf_def["config"])
|
||||
if config["isdecimal"]:
|
||||
return float
|
||||
return int
|
||||
return None
|
||||
|
||||
|
||||
def register(session):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
TransferHierarchicalValues(session).register()
|
||||
|
|
@ -88,6 +88,40 @@ class FtrackModule(
|
|||
"""Implementation of `ILaunchHookPaths`."""
|
||||
return os.path.join(FTRACK_MODULE_DIR, "launch_hooks")
|
||||
|
||||
def modify_application_launch_arguments(self, application, env):
|
||||
if not application.use_python_2:
|
||||
return
|
||||
|
||||
self.log.info("Adding Ftrack Python 2 packages to PYTHONPATH.")
|
||||
|
||||
# Prepare vendor dir path
|
||||
python_2_vendor = os.path.join(FTRACK_MODULE_DIR, "python2_vendor")
|
||||
|
||||
# Add Python 2 modules
|
||||
python_paths = [
|
||||
# `python-ftrack-api`
|
||||
os.path.join(python_2_vendor, "ftrack-python-api", "source"),
|
||||
# `arrow`
|
||||
os.path.join(python_2_vendor, "arrow"),
|
||||
# `builtins` from `python-future`
|
||||
# - `python-future` is strict Python 2 module that cause crashes
|
||||
# of Python 3 scripts executed through OpenPype
|
||||
# (burnin script etc.)
|
||||
os.path.join(python_2_vendor, "builtins"),
|
||||
# `backports.functools_lru_cache`
|
||||
os.path.join(
|
||||
python_2_vendor, "backports.functools_lru_cache"
|
||||
)
|
||||
]
|
||||
|
||||
# Load PYTHONPATH from current launch context
|
||||
python_path = env.get("PYTHONPATH")
|
||||
if python_path:
|
||||
python_paths.append(python_path)
|
||||
|
||||
# Set new PYTHONPATH to launch context environments
|
||||
env["PYTHONPATH"] = os.pathsep.join(python_paths)
|
||||
|
||||
def connect_with_modules(self, enabled_modules):
|
||||
for module in enabled_modules:
|
||||
if not hasattr(module, "get_ftrack_event_handler_paths"):
|
||||
|
|
|
|||
|
|
@ -1,43 +0,0 @@
|
|||
import os
|
||||
from openpype.lib import PreLaunchHook
|
||||
from openpype_modules.ftrack import FTRACK_MODULE_DIR
|
||||
|
||||
|
||||
class PrePython2Support(PreLaunchHook):
|
||||
"""Add python ftrack api module for Python 2 to PYTHONPATH.
|
||||
|
||||
Path to vendor modules is added to the beggining of PYTHONPATH.
|
||||
"""
|
||||
|
||||
def execute(self):
|
||||
if not self.application.use_python_2:
|
||||
return
|
||||
|
||||
self.log.info("Adding Ftrack Python 2 packages to PYTHONPATH.")
|
||||
|
||||
# Prepare vendor dir path
|
||||
python_2_vendor = os.path.join(FTRACK_MODULE_DIR, "python2_vendor")
|
||||
|
||||
# Add Python 2 modules
|
||||
python_paths = [
|
||||
# `python-ftrack-api`
|
||||
os.path.join(python_2_vendor, "ftrack-python-api", "source"),
|
||||
# `arrow`
|
||||
os.path.join(python_2_vendor, "arrow"),
|
||||
# `builtins` from `python-future`
|
||||
# - `python-future` is strict Python 2 module that cause crashes
|
||||
# of Python 3 scripts executed through OpenPype (burnin script etc.)
|
||||
os.path.join(python_2_vendor, "builtins"),
|
||||
# `backports.functools_lru_cache`
|
||||
os.path.join(
|
||||
python_2_vendor, "backports.functools_lru_cache"
|
||||
)
|
||||
]
|
||||
|
||||
# Load PYTHONPATH from current launch context
|
||||
python_path = self.launch_context.env.get("PYTHONPATH")
|
||||
if python_path:
|
||||
python_paths.append(python_path)
|
||||
|
||||
# Set new PYTHONPATH to launch context environments
|
||||
self.launch_context.env["PYTHONPATH"] = os.pathsep.join(python_paths)
|
||||
|
|
@ -2,7 +2,7 @@ import sys
|
|||
import collections
|
||||
import six
|
||||
import pyblish.api
|
||||
|
||||
from copy import deepcopy
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
# Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC`
|
||||
|
|
@ -72,7 +72,8 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
if "hierarchyContext" not in self.context.data:
|
||||
return
|
||||
|
||||
hierarchy_context = self.context.data["hierarchyContext"]
|
||||
hierarchy_context = self._get_active_assets(context)
|
||||
self.log.debug("__ hierarchy_context: {}".format(hierarchy_context))
|
||||
|
||||
self.session = self.context.data["ftrackSession"]
|
||||
project_name = self.context.data["projectEntity"]["name"]
|
||||
|
|
@ -86,15 +87,13 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
|
||||
self.ft_project = None
|
||||
|
||||
input_data = hierarchy_context
|
||||
|
||||
# disable termporarily ftrack project's autosyncing
|
||||
if auto_sync_state:
|
||||
self.auto_sync_off(project)
|
||||
|
||||
try:
|
||||
# import ftrack hierarchy
|
||||
self.import_to_ftrack(input_data)
|
||||
self.import_to_ftrack(hierarchy_context)
|
||||
except Exception:
|
||||
raise
|
||||
finally:
|
||||
|
|
@ -355,3 +354,41 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin):
|
|||
self.session.rollback()
|
||||
self.session._configure_locations()
|
||||
six.reraise(tp, value, tb)
|
||||
|
||||
def _get_active_assets(self, context):
|
||||
""" Returns only asset dictionary.
|
||||
Usually the last part of deep dictionary which
|
||||
is not having any children
|
||||
"""
|
||||
def get_pure_hierarchy_data(input_dict):
|
||||
input_dict_copy = deepcopy(input_dict)
|
||||
for key in input_dict.keys():
|
||||
self.log.debug("__ key: {}".format(key))
|
||||
# check if child key is available
|
||||
if input_dict[key].get("childs"):
|
||||
# loop deeper
|
||||
input_dict_copy[
|
||||
key]["childs"] = get_pure_hierarchy_data(
|
||||
input_dict[key]["childs"])
|
||||
elif key not in active_assets:
|
||||
input_dict_copy.pop(key, None)
|
||||
return input_dict_copy
|
||||
|
||||
hierarchy_context = context.data["hierarchyContext"]
|
||||
|
||||
active_assets = []
|
||||
# filter only the active publishing insatnces
|
||||
for instance in context:
|
||||
if instance.data.get("publish") is False:
|
||||
continue
|
||||
|
||||
if not instance.data.get("asset"):
|
||||
continue
|
||||
|
||||
active_assets.append(instance.data["asset"])
|
||||
|
||||
# remove duplicity in list
|
||||
active_assets = list(set(active_assets))
|
||||
self.log.debug("__ active_assets: {}".format(active_assets))
|
||||
|
||||
return get_pure_hierarchy_data(hierarchy_context)
|
||||
|
|
|
|||
9
openpype/modules/kitsu/__init__.py
Normal file
9
openpype/modules/kitsu/__init__.py
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
""" Addon class definition and Settings definition must be imported here.
|
||||
|
||||
If addon class or settings definition won't be here their definition won't
|
||||
be found by OpenPype discovery.
|
||||
"""
|
||||
|
||||
from .kitsu_module import KitsuModule
|
||||
|
||||
__all__ = ("KitsuModule",)
|
||||
136
openpype/modules/kitsu/kitsu_module.py
Normal file
136
openpype/modules/kitsu/kitsu_module.py
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
"""Kitsu module."""
|
||||
|
||||
import click
|
||||
import os
|
||||
|
||||
from openpype.modules import OpenPypeModule
|
||||
from openpype_interfaces import IPluginPaths, ITrayAction
|
||||
|
||||
|
||||
class KitsuModule(OpenPypeModule, IPluginPaths, ITrayAction):
|
||||
"""Kitsu module class."""
|
||||
|
||||
label = "Kitsu Connect"
|
||||
name = "kitsu"
|
||||
|
||||
def initialize(self, settings):
|
||||
"""Initialization of module."""
|
||||
module_settings = settings[self.name]
|
||||
|
||||
# Enabled by settings
|
||||
self.enabled = module_settings.get("enabled", False)
|
||||
|
||||
# Add API URL schema
|
||||
kitsu_url = module_settings["server"].strip()
|
||||
if kitsu_url:
|
||||
# Ensure web url
|
||||
if not kitsu_url.startswith("http"):
|
||||
kitsu_url = "https://" + kitsu_url
|
||||
|
||||
# Check for "/api" url validity
|
||||
if not kitsu_url.endswith("api"):
|
||||
kitsu_url = "{}{}api".format(
|
||||
kitsu_url, "" if kitsu_url.endswith("/") else "/"
|
||||
)
|
||||
|
||||
self.server_url = kitsu_url
|
||||
|
||||
# UI which must not be created at this time
|
||||
self._dialog = None
|
||||
|
||||
def tray_init(self):
|
||||
"""Tray init."""
|
||||
|
||||
self._create_dialog()
|
||||
|
||||
def tray_start(self):
|
||||
"""Tray start."""
|
||||
from .utils.credentials import (
|
||||
load_credentials,
|
||||
validate_credentials,
|
||||
set_credentials_envs,
|
||||
)
|
||||
|
||||
login, password = load_credentials()
|
||||
|
||||
# Check credentials, ask them if needed
|
||||
if validate_credentials(login, password):
|
||||
set_credentials_envs(login, password)
|
||||
else:
|
||||
self.show_dialog()
|
||||
|
||||
def get_global_environments(self):
|
||||
"""Kitsu's global environments."""
|
||||
return {"KITSU_SERVER": self.server_url}
|
||||
|
||||
def _create_dialog(self):
|
||||
# Don't recreate dialog if already exists
|
||||
if self._dialog is not None:
|
||||
return
|
||||
|
||||
from .kitsu_widgets import KitsuPasswordDialog
|
||||
|
||||
self._dialog = KitsuPasswordDialog()
|
||||
|
||||
def show_dialog(self):
|
||||
"""Show dialog to log-in."""
|
||||
|
||||
# Make sure dialog is created
|
||||
self._create_dialog()
|
||||
|
||||
# Show dialog
|
||||
self._dialog.open()
|
||||
|
||||
def on_action_trigger(self):
|
||||
"""Implementation of abstract method for `ITrayAction`."""
|
||||
self.show_dialog()
|
||||
|
||||
def get_plugin_paths(self):
|
||||
"""Implementation of abstract method for `IPluginPaths`."""
|
||||
current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
return {"publish": [os.path.join(current_dir, "plugins", "publish")]}
|
||||
|
||||
def cli(self, click_group):
|
||||
click_group.add_command(cli_main)
|
||||
|
||||
|
||||
@click.group(KitsuModule.name, help="Kitsu dynamic cli commands.")
|
||||
def cli_main():
|
||||
pass
|
||||
|
||||
|
||||
@cli_main.command()
|
||||
@click.option("--login", envvar="KITSU_LOGIN", help="Kitsu login")
|
||||
@click.option(
|
||||
"--password", envvar="KITSU_PWD", help="Password for kitsu username"
|
||||
)
|
||||
def push_to_zou(login, password):
|
||||
"""Synchronize Zou database (Kitsu backend) with openpype database.
|
||||
|
||||
Args:
|
||||
login (str): Kitsu user login
|
||||
password (str): Kitsu user password
|
||||
"""
|
||||
from .utils.update_zou_with_op import sync_zou
|
||||
|
||||
sync_zou(login, password)
|
||||
|
||||
|
||||
@cli_main.command()
|
||||
@click.option("-l", "--login", envvar="KITSU_LOGIN", help="Kitsu login")
|
||||
@click.option(
|
||||
"-p", "--password", envvar="KITSU_PWD", help="Password for kitsu username"
|
||||
)
|
||||
def sync_service(login, password):
|
||||
"""Synchronize openpype database from Zou sever database.
|
||||
|
||||
Args:
|
||||
login (str): Kitsu user login
|
||||
password (str): Kitsu user password
|
||||
"""
|
||||
from .utils.update_op_with_zou import sync_all_project
|
||||
from .utils.sync_service import start_listeners
|
||||
|
||||
sync_all_project(login, password)
|
||||
start_listeners(login, password)
|
||||
188
openpype/modules/kitsu/kitsu_widgets.py
Normal file
188
openpype/modules/kitsu/kitsu_widgets.py
Normal file
|
|
@ -0,0 +1,188 @@
|
|||
from Qt import QtWidgets, QtCore, QtGui
|
||||
|
||||
from openpype import style
|
||||
from openpype.modules.kitsu.utils.credentials import (
|
||||
clear_credentials,
|
||||
load_credentials,
|
||||
save_credentials,
|
||||
set_credentials_envs,
|
||||
validate_credentials,
|
||||
)
|
||||
from openpype.resources import get_resource
|
||||
from openpype.settings.lib import (
|
||||
get_system_settings,
|
||||
)
|
||||
|
||||
from openpype.widgets.password_dialog import PressHoverButton
|
||||
|
||||
|
||||
class KitsuPasswordDialog(QtWidgets.QDialog):
|
||||
"""Kitsu login dialog."""
|
||||
|
||||
finished = QtCore.Signal(bool)
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(KitsuPasswordDialog, self).__init__(parent)
|
||||
|
||||
self.setWindowTitle("Kitsu Credentials")
|
||||
self.resize(300, 120)
|
||||
|
||||
system_settings = get_system_settings()
|
||||
user_login, user_pwd = load_credentials()
|
||||
remembered = bool(user_login or user_pwd)
|
||||
|
||||
self._final_result = None
|
||||
self._connectable = bool(
|
||||
system_settings["modules"].get("kitsu", {}).get("server")
|
||||
)
|
||||
|
||||
# Server label
|
||||
server_message = (
|
||||
system_settings["modules"]["kitsu"]["server"]
|
||||
if self._connectable
|
||||
else "no server url set in Studio Settings..."
|
||||
)
|
||||
server_label = QtWidgets.QLabel(
|
||||
f"Server: {server_message}",
|
||||
self,
|
||||
)
|
||||
|
||||
# Login input
|
||||
login_widget = QtWidgets.QWidget(self)
|
||||
|
||||
login_label = QtWidgets.QLabel("Login:", login_widget)
|
||||
|
||||
login_input = QtWidgets.QLineEdit(
|
||||
login_widget,
|
||||
text=user_login if remembered else None,
|
||||
)
|
||||
login_input.setPlaceholderText("Your Kitsu account login...")
|
||||
|
||||
login_layout = QtWidgets.QHBoxLayout(login_widget)
|
||||
login_layout.setContentsMargins(0, 0, 0, 0)
|
||||
login_layout.addWidget(login_label)
|
||||
login_layout.addWidget(login_input)
|
||||
|
||||
# Password input
|
||||
password_widget = QtWidgets.QWidget(self)
|
||||
|
||||
password_label = QtWidgets.QLabel("Password:", password_widget)
|
||||
|
||||
password_input = QtWidgets.QLineEdit(
|
||||
password_widget,
|
||||
text=user_pwd if remembered else None,
|
||||
)
|
||||
password_input.setPlaceholderText("Your password...")
|
||||
password_input.setEchoMode(QtWidgets.QLineEdit.Password)
|
||||
|
||||
show_password_icon_path = get_resource("icons", "eye.png")
|
||||
show_password_icon = QtGui.QIcon(show_password_icon_path)
|
||||
show_password_btn = PressHoverButton(password_widget)
|
||||
show_password_btn.setObjectName("PasswordBtn")
|
||||
show_password_btn.setIcon(show_password_icon)
|
||||
show_password_btn.setFocusPolicy(QtCore.Qt.ClickFocus)
|
||||
|
||||
password_layout = QtWidgets.QHBoxLayout(password_widget)
|
||||
password_layout.setContentsMargins(0, 0, 0, 0)
|
||||
password_layout.addWidget(password_label)
|
||||
password_layout.addWidget(password_input)
|
||||
password_layout.addWidget(show_password_btn)
|
||||
|
||||
# Message label
|
||||
message_label = QtWidgets.QLabel("", self)
|
||||
|
||||
# Buttons
|
||||
buttons_widget = QtWidgets.QWidget(self)
|
||||
|
||||
remember_checkbox = QtWidgets.QCheckBox("Remember", buttons_widget)
|
||||
remember_checkbox.setObjectName("RememberCheckbox")
|
||||
remember_checkbox.setChecked(remembered)
|
||||
|
||||
ok_btn = QtWidgets.QPushButton("Ok", buttons_widget)
|
||||
cancel_btn = QtWidgets.QPushButton("Cancel", buttons_widget)
|
||||
|
||||
buttons_layout = QtWidgets.QHBoxLayout(buttons_widget)
|
||||
buttons_layout.setContentsMargins(0, 0, 0, 0)
|
||||
buttons_layout.addWidget(remember_checkbox)
|
||||
buttons_layout.addStretch(1)
|
||||
buttons_layout.addWidget(ok_btn)
|
||||
buttons_layout.addWidget(cancel_btn)
|
||||
|
||||
# Main layout
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.addSpacing(5)
|
||||
layout.addWidget(server_label, 0)
|
||||
layout.addSpacing(5)
|
||||
layout.addWidget(login_widget, 0)
|
||||
layout.addWidget(password_widget, 0)
|
||||
layout.addWidget(message_label, 0)
|
||||
layout.addStretch(1)
|
||||
layout.addWidget(buttons_widget, 0)
|
||||
|
||||
ok_btn.clicked.connect(self._on_ok_click)
|
||||
cancel_btn.clicked.connect(self._on_cancel_click)
|
||||
show_password_btn.change_state.connect(self._on_show_password)
|
||||
|
||||
self.login_input = login_input
|
||||
self.password_input = password_input
|
||||
self.remember_checkbox = remember_checkbox
|
||||
self.message_label = message_label
|
||||
|
||||
self.setStyleSheet(style.load_stylesheet())
|
||||
|
||||
def result(self):
|
||||
return self._final_result
|
||||
|
||||
def keyPressEvent(self, event):
|
||||
if event.key() in (QtCore.Qt.Key_Return, QtCore.Qt.Key_Enter):
|
||||
self._on_ok_click()
|
||||
return event.accept()
|
||||
super(KitsuPasswordDialog, self).keyPressEvent(event)
|
||||
|
||||
def closeEvent(self, event):
|
||||
super(KitsuPasswordDialog, self).closeEvent(event)
|
||||
self.finished.emit(self.result())
|
||||
|
||||
def _on_ok_click(self):
|
||||
# Check if is connectable
|
||||
if not self._connectable:
|
||||
self.message_label.setText(
|
||||
"Please set server url in Studio Settings!"
|
||||
)
|
||||
return
|
||||
|
||||
# Collect values
|
||||
login_value = self.login_input.text()
|
||||
pwd_value = self.password_input.text()
|
||||
remember = self.remember_checkbox.isChecked()
|
||||
|
||||
# Authenticate
|
||||
if validate_credentials(login_value, pwd_value):
|
||||
set_credentials_envs(login_value, pwd_value)
|
||||
else:
|
||||
self.message_label.setText("Authentication failed...")
|
||||
return
|
||||
|
||||
# Remember password cases
|
||||
if remember:
|
||||
save_credentials(login_value, pwd_value)
|
||||
else:
|
||||
# Clear local settings
|
||||
clear_credentials()
|
||||
|
||||
# Clear input fields
|
||||
self.login_input.clear()
|
||||
self.password_input.clear()
|
||||
|
||||
self._final_result = True
|
||||
self.close()
|
||||
|
||||
def _on_show_password(self, show_password):
|
||||
if show_password:
|
||||
echo_mode = QtWidgets.QLineEdit.Normal
|
||||
else:
|
||||
echo_mode = QtWidgets.QLineEdit.Password
|
||||
self.password_input.setEchoMode(echo_mode)
|
||||
|
||||
def _on_cancel_click(self):
|
||||
self.close()
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
|
||||
import gazu
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectKitsuSession(pyblish.api.ContextPlugin): # rename log in
|
||||
"""Collect Kitsu session using user credentials"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Kitsu user session"
|
||||
# families = ["kitsu"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
gazu.client.set_host(os.environ["KITSU_SERVER"])
|
||||
gazu.log_in(os.environ["KITSU_LOGIN"], os.environ["KITSU_PWD"])
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
|
||||
import gazu
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectKitsuEntities(pyblish.api.ContextPlugin):
|
||||
"""Collect Kitsu entities according to the current context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.499
|
||||
label = "Kitsu entities"
|
||||
|
||||
def process(self, context):
|
||||
|
||||
asset_data = context.data["assetEntity"]["data"]
|
||||
zou_asset_data = asset_data.get("zou")
|
||||
if not zou_asset_data:
|
||||
raise AssertionError("Zou asset data not found in OpenPype!")
|
||||
self.log.debug("Collected zou asset data: {}".format(zou_asset_data))
|
||||
|
||||
zou_task_data = asset_data["tasks"][os.environ["AVALON_TASK"]].get(
|
||||
"zou"
|
||||
)
|
||||
if not zou_task_data:
|
||||
self.log.warning("Zou task data not found in OpenPype!")
|
||||
self.log.debug("Collected zou task data: {}".format(zou_task_data))
|
||||
|
||||
kitsu_project = gazu.project.get_project(zou_asset_data["project_id"])
|
||||
if not kitsu_project:
|
||||
raise AssertionError("Project not found in kitsu!")
|
||||
context.data["kitsu_project"] = kitsu_project
|
||||
self.log.debug("Collect kitsu project: {}".format(kitsu_project))
|
||||
|
||||
kitsu_asset = gazu.asset.get_asset(zou_asset_data["id"])
|
||||
if not kitsu_asset:
|
||||
raise AssertionError("Asset not found in kitsu!")
|
||||
context.data["kitsu_asset"] = kitsu_asset
|
||||
self.log.debug("Collect kitsu asset: {}".format(kitsu_asset))
|
||||
|
||||
if zou_task_data:
|
||||
kitsu_task = gazu.task.get_task(zou_task_data["id"])
|
||||
if not kitsu_task:
|
||||
raise AssertionError("Task not found in kitsu!")
|
||||
context.data["kitsu_task"] = kitsu_task
|
||||
self.log.debug("Collect kitsu task: {}".format(kitsu_task))
|
||||
|
||||
else:
|
||||
kitsu_task_type = gazu.task.get_task_type_by_name(
|
||||
os.environ["AVALON_TASK"]
|
||||
)
|
||||
if not kitsu_task_type:
|
||||
raise AssertionError(
|
||||
"Task type {} not found in Kitsu!".format(
|
||||
os.environ["AVALON_TASK"]
|
||||
)
|
||||
)
|
||||
|
||||
kitsu_task = gazu.task.get_task_by_name(
|
||||
kitsu_asset, kitsu_task_type
|
||||
)
|
||||
if not kitsu_task:
|
||||
raise AssertionError("Task not found in kitsu!")
|
||||
context.data["kitsu_task"] = kitsu_task
|
||||
self.log.debug("Collect kitsu task: {}".format(kitsu_task))
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import gazu
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class IntegrateKitsuNote(pyblish.api.ContextPlugin):
|
||||
"""Integrate Kitsu Note"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder
|
||||
label = "Kitsu Note and Status"
|
||||
# families = ["kitsu"]
|
||||
set_status_note = False
|
||||
note_status_shortname = "wfa"
|
||||
|
||||
def process(self, context):
|
||||
|
||||
# Get comment text body
|
||||
publish_comment = context.data.get("comment")
|
||||
if not publish_comment:
|
||||
self.log.info("Comment is not set.")
|
||||
|
||||
self.log.debug("Comment is `{}`".format(publish_comment))
|
||||
|
||||
# Get note status, by default uses the task status for the note
|
||||
# if it is not specified in the configuration
|
||||
note_status = context.data["kitsu_task"]["task_status_id"]
|
||||
if self.set_status_note:
|
||||
kitsu_status = gazu.task.get_task_status_by_short_name(
|
||||
self.note_status_shortname
|
||||
)
|
||||
if kitsu_status:
|
||||
note_status = kitsu_status
|
||||
self.log.info("Note Kitsu status: {}".format(note_status))
|
||||
else:
|
||||
self.log.info(
|
||||
"Cannot find {} status. The status will not be "
|
||||
"changed!".format(self.note_status_shortname)
|
||||
)
|
||||
|
||||
# Add comment to kitsu task
|
||||
self.log.debug(
|
||||
"Add new note in taks id {}".format(
|
||||
context.data["kitsu_task"]["id"]
|
||||
)
|
||||
)
|
||||
kitsu_comment = gazu.task.add_comment(
|
||||
context.data["kitsu_task"], note_status, comment=publish_comment
|
||||
)
|
||||
|
||||
context.data["kitsu_comment"] = kitsu_comment
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import gazu
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class IntegrateKitsuReview(pyblish.api.InstancePlugin):
|
||||
"""Integrate Kitsu Review"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 0.01
|
||||
label = "Kitsu Review"
|
||||
# families = ["kitsu"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
context = instance.context
|
||||
task = context.data["kitsu_task"]
|
||||
comment = context.data.get("kitsu_comment")
|
||||
|
||||
# Check comment has been created
|
||||
if not comment:
|
||||
self.log.debug(
|
||||
"Comment not created, review not pushed to preview."
|
||||
)
|
||||
return
|
||||
|
||||
# Add review representations as preview of comment
|
||||
for representation in instance.data.get("representations", []):
|
||||
# Skip if not tagged as review
|
||||
if "review" not in representation.get("tags", []):
|
||||
continue
|
||||
|
||||
review_path = representation.get("published_path")
|
||||
|
||||
self.log.debug("Found review at: {}".format(review_path))
|
||||
|
||||
gazu.task.add_preview(
|
||||
task, comment, review_path, normalize_movie=True
|
||||
)
|
||||
self.log.info("Review upload on comment")
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import gazu
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class KitsuLogOut(pyblish.api.ContextPlugin):
|
||||
"""
|
||||
Log out from Kitsu API
|
||||
"""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 10
|
||||
label = "Kitsu Log Out"
|
||||
|
||||
def process(self, context):
|
||||
gazu.log_out()
|
||||
0
openpype/modules/kitsu/utils/__init__.py
Normal file
0
openpype/modules/kitsu/utils/__init__.py
Normal file
104
openpype/modules/kitsu/utils/credentials.py
Normal file
104
openpype/modules/kitsu/utils/credentials.py
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
"""Kitsu credentials functions."""
|
||||
|
||||
import os
|
||||
from typing import Tuple
|
||||
import gazu
|
||||
|
||||
from openpype.lib.local_settings import OpenPypeSecureRegistry
|
||||
|
||||
|
||||
def validate_credentials(
|
||||
login: str, password: str, kitsu_url: str = None
|
||||
) -> bool:
|
||||
"""Validate credentials by trying to connect to Kitsu host URL.
|
||||
|
||||
Args:
|
||||
login (str): Kitsu user login
|
||||
password (str): Kitsu user password
|
||||
kitsu_url (str, optional): Kitsu host URL. Defaults to None.
|
||||
|
||||
Returns:
|
||||
bool: Are credentials valid?
|
||||
"""
|
||||
if kitsu_url is None:
|
||||
kitsu_url = os.environ.get("KITSU_SERVER")
|
||||
|
||||
# Connect to server
|
||||
validate_host(kitsu_url)
|
||||
|
||||
# Authenticate
|
||||
try:
|
||||
gazu.log_in(login, password)
|
||||
except gazu.exception.AuthFailedException:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def validate_host(kitsu_url: str) -> bool:
|
||||
"""Validate credentials by trying to connect to Kitsu host URL.
|
||||
|
||||
Args:
|
||||
kitsu_url (str, optional): Kitsu host URL.
|
||||
|
||||
Returns:
|
||||
bool: Is host valid?
|
||||
"""
|
||||
# Connect to server
|
||||
gazu.set_host(kitsu_url)
|
||||
|
||||
# Test host
|
||||
if gazu.client.host_is_valid():
|
||||
return True
|
||||
else:
|
||||
raise gazu.exception.HostException(f"Host '{kitsu_url}' is invalid.")
|
||||
|
||||
|
||||
def clear_credentials():
|
||||
"""Clear credentials in Secure Registry."""
|
||||
# Get user registry
|
||||
user_registry = OpenPypeSecureRegistry("kitsu_user")
|
||||
|
||||
# Set local settings
|
||||
user_registry.delete_item("login")
|
||||
user_registry.delete_item("password")
|
||||
|
||||
|
||||
def save_credentials(login: str, password: str):
|
||||
"""Save credentials in Secure Registry.
|
||||
|
||||
Args:
|
||||
login (str): Kitsu user login
|
||||
password (str): Kitsu user password
|
||||
"""
|
||||
# Get user registry
|
||||
user_registry = OpenPypeSecureRegistry("kitsu_user")
|
||||
|
||||
# Set local settings
|
||||
user_registry.set_item("login", login)
|
||||
user_registry.set_item("password", password)
|
||||
|
||||
|
||||
def load_credentials() -> Tuple[str, str]:
|
||||
"""Load registered credentials.
|
||||
|
||||
Returns:
|
||||
Tuple[str, str]: (Login, Password)
|
||||
"""
|
||||
# Get user registry
|
||||
user_registry = OpenPypeSecureRegistry("kitsu_user")
|
||||
|
||||
return user_registry.get_item("login", None), user_registry.get_item(
|
||||
"password", None
|
||||
)
|
||||
|
||||
|
||||
def set_credentials_envs(login: str, password: str):
|
||||
"""Set environment variables with Kitsu login and password.
|
||||
|
||||
Args:
|
||||
login (str): Kitsu user login
|
||||
password (str): Kitsu user password
|
||||
"""
|
||||
os.environ["KITSU_LOGIN"] = login
|
||||
os.environ["KITSU_PWD"] = password
|
||||
384
openpype/modules/kitsu/utils/sync_service.py
Normal file
384
openpype/modules/kitsu/utils/sync_service.py
Normal file
|
|
@ -0,0 +1,384 @@
|
|||
import os
|
||||
|
||||
import gazu
|
||||
|
||||
from openpype.pipeline import AvalonMongoDB
|
||||
from .credentials import validate_credentials
|
||||
from .update_op_with_zou import (
|
||||
create_op_asset,
|
||||
set_op_project,
|
||||
write_project_to_op,
|
||||
update_op_assets,
|
||||
)
|
||||
|
||||
|
||||
class Listener:
|
||||
"""Host Kitsu listener."""
|
||||
|
||||
def __init__(self, login, password):
|
||||
"""Create client and add listeners to events without starting it.
|
||||
|
||||
Run `listener.start()` to actually start the service.
|
||||
|
||||
Args:
|
||||
login (str): Kitsu user login
|
||||
password (str): Kitsu user password
|
||||
|
||||
Raises:
|
||||
AuthFailedException: Wrong user login and/or password
|
||||
"""
|
||||
self.dbcon = AvalonMongoDB()
|
||||
self.dbcon.install()
|
||||
|
||||
gazu.client.set_host(os.environ["KITSU_SERVER"])
|
||||
|
||||
# Authenticate
|
||||
if not validate_credentials(login, password):
|
||||
raise gazu.exception.AuthFailedException(
|
||||
f"Kitsu authentication failed for login: '{login}'..."
|
||||
)
|
||||
|
||||
gazu.set_event_host(
|
||||
os.environ["KITSU_SERVER"].replace("api", "socket.io")
|
||||
)
|
||||
self.event_client = gazu.events.init()
|
||||
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "project:new", self._new_project
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "project:update", self._update_project
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "project:delete", self._delete_project
|
||||
)
|
||||
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "asset:new", self._new_asset
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "asset:update", self._update_asset
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "asset:delete", self._delete_asset
|
||||
)
|
||||
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "episode:new", self._new_episode
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "episode:update", self._update_episode
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "episode:delete", self._delete_episode
|
||||
)
|
||||
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "sequence:new", self._new_sequence
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "sequence:update", self._update_sequence
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "sequence:delete", self._delete_sequence
|
||||
)
|
||||
|
||||
gazu.events.add_listener(self.event_client, "shot:new", self._new_shot)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "shot:update", self._update_shot
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "shot:delete", self._delete_shot
|
||||
)
|
||||
|
||||
gazu.events.add_listener(self.event_client, "task:new", self._new_task)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "task:update", self._update_task
|
||||
)
|
||||
gazu.events.add_listener(
|
||||
self.event_client, "task:delete", self._delete_task
|
||||
)
|
||||
|
||||
def start(self):
|
||||
gazu.events.run_client(self.event_client)
|
||||
|
||||
# == Project ==
|
||||
def _new_project(self, data):
|
||||
"""Create new project into OP DB."""
|
||||
|
||||
# Use update process to avoid duplicating code
|
||||
self._update_project(data)
|
||||
|
||||
def _update_project(self, data):
|
||||
"""Update project into OP DB."""
|
||||
# Get project entity
|
||||
project = gazu.project.get_project(data["project_id"])
|
||||
project_name = project["name"]
|
||||
|
||||
update_project = write_project_to_op(project, self.dbcon)
|
||||
|
||||
# Write into DB
|
||||
if update_project:
|
||||
self.dbcon = self.dbcon.database[project_name]
|
||||
self.dbcon.bulk_write([update_project])
|
||||
|
||||
def _delete_project(self, data):
|
||||
"""Delete project."""
|
||||
project_doc = self.dbcon.find_one(
|
||||
{"type": "project", "data.zou_id": data["project_id"]}
|
||||
)
|
||||
|
||||
# Delete project collection
|
||||
self.dbcon.database[project_doc["name"]].drop()
|
||||
|
||||
# == Asset ==
|
||||
|
||||
def _new_asset(self, data):
|
||||
"""Create new asset into OP DB."""
|
||||
# Get project entity
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Get gazu entity
|
||||
asset = gazu.asset.get_asset(data["asset_id"])
|
||||
|
||||
# Insert doc in DB
|
||||
self.dbcon.insert_one(create_op_asset(asset))
|
||||
|
||||
# Update
|
||||
self._update_asset(data)
|
||||
|
||||
def _update_asset(self, data):
|
||||
"""Update asset into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
project_doc = self.dbcon.find_one({"type": "project"})
|
||||
|
||||
# Get gazu entity
|
||||
asset = gazu.asset.get_asset(data["asset_id"])
|
||||
|
||||
# Find asset doc
|
||||
# Query all assets of the local project
|
||||
zou_ids_and_asset_docs = {
|
||||
asset_doc["data"]["zou"]["id"]: asset_doc
|
||||
for asset_doc in self.dbcon.find({"type": "asset"})
|
||||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[asset["project_id"]] = project_doc
|
||||
|
||||
# Update
|
||||
asset_doc_id, asset_update = update_op_assets(
|
||||
self.dbcon, project_doc, [asset], zou_ids_and_asset_docs
|
||||
)[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
|
||||
def _delete_asset(self, data):
|
||||
"""Delete asset of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["asset_id"]}
|
||||
)
|
||||
|
||||
# == Episode ==
|
||||
def _new_episode(self, data):
|
||||
"""Create new episode into OP DB."""
|
||||
# Get project entity
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Get gazu entity
|
||||
episode = gazu.shot.get_episode(data["episode_id"])
|
||||
|
||||
# Insert doc in DB
|
||||
self.dbcon.insert_one(create_op_asset(episode))
|
||||
|
||||
# Update
|
||||
self._update_episode(data)
|
||||
|
||||
def _update_episode(self, data):
|
||||
"""Update episode into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
project_doc = self.dbcon.find_one({"type": "project"})
|
||||
|
||||
# Get gazu entity
|
||||
episode = gazu.shot.get_episode(data["episode_id"])
|
||||
|
||||
# Find asset doc
|
||||
# Query all assets of the local project
|
||||
zou_ids_and_asset_docs = {
|
||||
asset_doc["data"]["zou"]["id"]: asset_doc
|
||||
for asset_doc in self.dbcon.find({"type": "asset"})
|
||||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[episode["project_id"]] = project_doc
|
||||
|
||||
# Update
|
||||
asset_doc_id, asset_update = update_op_assets(
|
||||
self.dbcon, project_doc, [episode], zou_ids_and_asset_docs
|
||||
)[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
|
||||
def _delete_episode(self, data):
|
||||
"""Delete shot of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
print("delete episode") # TODO check bugfix
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["episode_id"]}
|
||||
)
|
||||
|
||||
# == Sequence ==
|
||||
def _new_sequence(self, data):
|
||||
"""Create new sequnce into OP DB."""
|
||||
# Get project entity
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Get gazu entity
|
||||
sequence = gazu.shot.get_sequence(data["sequence_id"])
|
||||
|
||||
# Insert doc in DB
|
||||
self.dbcon.insert_one(create_op_asset(sequence))
|
||||
|
||||
# Update
|
||||
self._update_sequence(data)
|
||||
|
||||
def _update_sequence(self, data):
|
||||
"""Update sequence into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
project_doc = self.dbcon.find_one({"type": "project"})
|
||||
|
||||
# Get gazu entity
|
||||
sequence = gazu.shot.get_sequence(data["sequence_id"])
|
||||
|
||||
# Find asset doc
|
||||
# Query all assets of the local project
|
||||
zou_ids_and_asset_docs = {
|
||||
asset_doc["data"]["zou"]["id"]: asset_doc
|
||||
for asset_doc in self.dbcon.find({"type": "asset"})
|
||||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[sequence["project_id"]] = project_doc
|
||||
|
||||
# Update
|
||||
asset_doc_id, asset_update = update_op_assets(
|
||||
self.dbcon, project_doc, [sequence], zou_ids_and_asset_docs
|
||||
)[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
|
||||
def _delete_sequence(self, data):
|
||||
"""Delete sequence of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
print("delete sequence") # TODO check bugfix
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["sequence_id"]}
|
||||
)
|
||||
|
||||
# == Shot ==
|
||||
def _new_shot(self, data):
|
||||
"""Create new shot into OP DB."""
|
||||
# Get project entity
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Get gazu entity
|
||||
shot = gazu.shot.get_shot(data["shot_id"])
|
||||
|
||||
# Insert doc in DB
|
||||
self.dbcon.insert_one(create_op_asset(shot))
|
||||
|
||||
# Update
|
||||
self._update_shot(data)
|
||||
|
||||
def _update_shot(self, data):
|
||||
"""Update shot into OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
project_doc = self.dbcon.find_one({"type": "project"})
|
||||
|
||||
# Get gazu entity
|
||||
shot = gazu.shot.get_shot(data["shot_id"])
|
||||
|
||||
# Find asset doc
|
||||
# Query all assets of the local project
|
||||
zou_ids_and_asset_docs = {
|
||||
asset_doc["data"]["zou"]["id"]: asset_doc
|
||||
for asset_doc in self.dbcon.find({"type": "asset"})
|
||||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[shot["project_id"]] = project_doc
|
||||
|
||||
# Update
|
||||
asset_doc_id, asset_update = update_op_assets(
|
||||
self.dbcon, project_doc, [shot], zou_ids_and_asset_docs
|
||||
)[0]
|
||||
self.dbcon.update_one({"_id": asset_doc_id}, asset_update)
|
||||
|
||||
def _delete_shot(self, data):
|
||||
"""Delete shot of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Delete
|
||||
self.dbcon.delete_one(
|
||||
{"type": "asset", "data.zou.id": data["shot_id"]}
|
||||
)
|
||||
|
||||
# == Task ==
|
||||
def _new_task(self, data):
|
||||
"""Create new task into OP DB."""
|
||||
# Get project entity
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Get gazu entity
|
||||
task = gazu.task.get_task(data["task_id"])
|
||||
|
||||
# Find asset doc
|
||||
asset_doc = self.dbcon.find_one(
|
||||
{"type": "asset", "data.zou.id": task["entity"]["id"]}
|
||||
)
|
||||
|
||||
# Update asset tasks with new one
|
||||
asset_tasks = asset_doc["data"].get("tasks")
|
||||
task_type_name = task["task_type"]["name"]
|
||||
asset_tasks[task_type_name] = {"type": task_type_name, "zou": task}
|
||||
self.dbcon.update_one(
|
||||
{"_id": asset_doc["_id"]}, {"$set": {"data.tasks": asset_tasks}}
|
||||
)
|
||||
|
||||
def _update_task(self, data):
|
||||
"""Update task into OP DB."""
|
||||
# TODO is it necessary?
|
||||
pass
|
||||
|
||||
def _delete_task(self, data):
|
||||
"""Delete task of OP DB."""
|
||||
set_op_project(self.dbcon, data["project_id"])
|
||||
|
||||
# Find asset doc
|
||||
asset_docs = [doc for doc in self.dbcon.find({"type": "asset"})]
|
||||
for doc in asset_docs:
|
||||
# Match task
|
||||
for name, task in doc["data"]["tasks"].items():
|
||||
if task.get("zou") and data["task_id"] == task["zou"]["id"]:
|
||||
# Pop task
|
||||
asset_tasks = doc["data"].get("tasks", {})
|
||||
asset_tasks.pop(name)
|
||||
|
||||
# Delete task in DB
|
||||
self.dbcon.update_one(
|
||||
{"_id": doc["_id"]},
|
||||
{"$set": {"data.tasks": asset_tasks}},
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
def start_listeners(login: str, password: str):
|
||||
"""Start listeners to keep OpenPype up-to-date with Kitsu.
|
||||
|
||||
Args:
|
||||
login (str): Kitsu user login
|
||||
password (str): Kitsu user password
|
||||
"""
|
||||
|
||||
# Connect to server
|
||||
listener = Listener(login, password)
|
||||
listener.start()
|
||||
389
openpype/modules/kitsu/utils/update_op_with_zou.py
Normal file
389
openpype/modules/kitsu/utils/update_op_with_zou.py
Normal file
|
|
@ -0,0 +1,389 @@
|
|||
"""Functions to update OpenPype data using Kitsu DB (a.k.a Zou)."""
|
||||
from copy import deepcopy
|
||||
import re
|
||||
from typing import Dict, List
|
||||
|
||||
from pymongo import DeleteOne, UpdateOne
|
||||
import gazu
|
||||
from gazu.task import (
|
||||
all_tasks_for_asset,
|
||||
all_tasks_for_shot,
|
||||
)
|
||||
|
||||
from openpype.pipeline import AvalonMongoDB
|
||||
from openpype.api import get_project_settings
|
||||
from openpype.lib import create_project
|
||||
from openpype.modules.kitsu.utils.credentials import validate_credentials
|
||||
|
||||
|
||||
# Accepted namin pattern for OP
|
||||
naming_pattern = re.compile("^[a-zA-Z0-9_.]*$")
|
||||
|
||||
|
||||
def create_op_asset(gazu_entity: dict) -> dict:
|
||||
"""Create OP asset dict from gazu entity.
|
||||
|
||||
:param gazu_entity:
|
||||
"""
|
||||
return {
|
||||
"name": gazu_entity["name"],
|
||||
"type": "asset",
|
||||
"schema": "openpype:asset-3.0",
|
||||
"data": {"zou": gazu_entity, "tasks": {}},
|
||||
}
|
||||
|
||||
|
||||
def set_op_project(dbcon: AvalonMongoDB, project_id: str):
|
||||
"""Set project context.
|
||||
|
||||
Args:
|
||||
dbcon (AvalonMongoDB): Connection to DB
|
||||
project_id (str): Project zou ID
|
||||
"""
|
||||
project = gazu.project.get_project(project_id)
|
||||
project_name = project["name"]
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
|
||||
|
||||
def update_op_assets(
|
||||
dbcon: AvalonMongoDB,
|
||||
project_doc: dict,
|
||||
entities_list: List[dict],
|
||||
asset_doc_ids: Dict[str, dict],
|
||||
) -> List[Dict[str, dict]]:
|
||||
"""Update OpenPype assets.
|
||||
Set 'data' and 'parent' fields.
|
||||
|
||||
Args:
|
||||
dbcon (AvalonMongoDB): Connection to DB
|
||||
entities_list (List[dict]): List of zou entities to update
|
||||
asset_doc_ids (Dict[str, dict]): Dicts of [{zou_id: asset_doc}, ...]
|
||||
|
||||
Returns:
|
||||
List[Dict[str, dict]]: List of (doc_id, update_dict) tuples
|
||||
"""
|
||||
project_name = project_doc["name"]
|
||||
project_module_settings = get_project_settings(project_name)["kitsu"]
|
||||
|
||||
assets_with_update = []
|
||||
for item in entities_list:
|
||||
# Check asset exists
|
||||
item_doc = asset_doc_ids.get(item["id"])
|
||||
if not item_doc: # Create asset
|
||||
op_asset = create_op_asset(item)
|
||||
insert_result = dbcon.insert_one(op_asset)
|
||||
item_doc = dbcon.find_one(
|
||||
{"type": "asset", "_id": insert_result.inserted_id}
|
||||
)
|
||||
|
||||
# Update asset
|
||||
item_data = deepcopy(item_doc["data"])
|
||||
item_data.update(item.get("data") or {})
|
||||
item_data["zou"] = item
|
||||
|
||||
# == Asset settings ==
|
||||
# Frame in, fallback on 0
|
||||
frame_in = int(item_data.get("frame_in") or 0)
|
||||
item_data["frameStart"] = frame_in
|
||||
item_data.pop("frame_in")
|
||||
# Frame out, fallback on frame_in + duration
|
||||
frames_duration = int(item.get("nb_frames") or 1)
|
||||
frame_out = (
|
||||
item_data["frame_out"]
|
||||
if item_data.get("frame_out")
|
||||
else frame_in + frames_duration
|
||||
)
|
||||
item_data["frameEnd"] = int(frame_out)
|
||||
item_data.pop("frame_out")
|
||||
# Fps, fallback to project's value when entity fps is deleted
|
||||
if not item_data.get("fps") and item_doc["data"].get("fps"):
|
||||
item_data["fps"] = project_doc["data"]["fps"]
|
||||
|
||||
# Tasks
|
||||
tasks_list = []
|
||||
item_type = item["type"]
|
||||
if item_type == "Asset":
|
||||
tasks_list = all_tasks_for_asset(item)
|
||||
elif item_type == "Shot":
|
||||
tasks_list = all_tasks_for_shot(item)
|
||||
# TODO frame in and out
|
||||
item_data["tasks"] = {
|
||||
t["task_type_name"]: {"type": t["task_type_name"]}
|
||||
for t in tasks_list
|
||||
}
|
||||
|
||||
# Get zou parent id for correct hierarchy
|
||||
# Use parent substitutes if existing
|
||||
substitute_parent_item = (
|
||||
item_data["parent_substitutes"][0]
|
||||
if item_data.get("parent_substitutes")
|
||||
else None
|
||||
)
|
||||
if substitute_parent_item:
|
||||
parent_zou_id = substitute_parent_item["parent_id"]
|
||||
else:
|
||||
parent_zou_id = (
|
||||
item.get("parent_id")
|
||||
or item.get("episode_id")
|
||||
or item.get("source_id")
|
||||
) # TODO check consistency
|
||||
|
||||
# Substitute Episode and Sequence by Shot
|
||||
substitute_item_type = (
|
||||
"shots"
|
||||
if item_type in ["Episode", "Sequence"]
|
||||
else f"{item_type.lower()}s"
|
||||
)
|
||||
entity_parent_folders = [
|
||||
f
|
||||
for f in project_module_settings["entities_root"]
|
||||
.get(substitute_item_type)
|
||||
.split("/")
|
||||
if f
|
||||
]
|
||||
|
||||
# Root parent folder if exist
|
||||
visual_parent_doc_id = (
|
||||
asset_doc_ids[parent_zou_id]["_id"] if parent_zou_id else None
|
||||
)
|
||||
if visual_parent_doc_id is None:
|
||||
# Find root folder doc
|
||||
root_folder_doc = dbcon.find_one(
|
||||
{
|
||||
"type": "asset",
|
||||
"name": entity_parent_folders[-1],
|
||||
"data.root_of": substitute_item_type,
|
||||
},
|
||||
["_id"],
|
||||
)
|
||||
if root_folder_doc:
|
||||
visual_parent_doc_id = root_folder_doc["_id"]
|
||||
|
||||
# Visual parent for hierarchy
|
||||
item_data["visualParent"] = visual_parent_doc_id
|
||||
|
||||
# Add parents for hierarchy
|
||||
item_data["parents"] = []
|
||||
while parent_zou_id is not None:
|
||||
parent_doc = asset_doc_ids[parent_zou_id]
|
||||
item_data["parents"].insert(0, parent_doc["name"])
|
||||
|
||||
# Get parent entity
|
||||
parent_entity = parent_doc["data"]["zou"]
|
||||
parent_zou_id = parent_entity["parent_id"]
|
||||
|
||||
# Set root folders parents
|
||||
item_data["parents"] = entity_parent_folders + item_data["parents"]
|
||||
|
||||
# Update 'data' different in zou DB
|
||||
updated_data = {
|
||||
k: v for k, v in item_data.items() if item_doc["data"].get(k) != v
|
||||
}
|
||||
if updated_data or not item_doc.get("parent"):
|
||||
assets_with_update.append(
|
||||
(
|
||||
item_doc["_id"],
|
||||
{
|
||||
"$set": {
|
||||
"name": item["name"],
|
||||
"data": item_data,
|
||||
"parent": asset_doc_ids[item["project_id"]]["_id"],
|
||||
}
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
return assets_with_update
|
||||
|
||||
|
||||
def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne:
|
||||
"""Write gazu project to OP database.
|
||||
Create project if doesn't exist.
|
||||
|
||||
Args:
|
||||
project (dict): Gazu project
|
||||
dbcon (AvalonMongoDB): DB to create project in
|
||||
|
||||
Returns:
|
||||
UpdateOne: Update instance for the project
|
||||
"""
|
||||
project_name = project["name"]
|
||||
project_doc = dbcon.database[project_name].find_one({"type": "project"})
|
||||
if not project_doc:
|
||||
print(f"Creating project '{project_name}'")
|
||||
project_doc = create_project(project_name, project_name, dbcon=dbcon)
|
||||
|
||||
# Project data and tasks
|
||||
project_data = project["data"] or {}
|
||||
|
||||
# Build project code and update Kitsu
|
||||
project_code = project.get("code")
|
||||
if not project_code:
|
||||
project_code = project["name"].replace(" ", "_").lower()
|
||||
project["code"] = project_code
|
||||
|
||||
# Update Zou
|
||||
gazu.project.update_project(project)
|
||||
|
||||
# Update data
|
||||
project_data.update(
|
||||
{
|
||||
"code": project_code,
|
||||
"fps": project["fps"],
|
||||
"resolutionWidth": project["resolution"].split("x")[0],
|
||||
"resolutionHeight": project["resolution"].split("x")[1],
|
||||
"zou_id": project["id"],
|
||||
}
|
||||
)
|
||||
|
||||
return UpdateOne(
|
||||
{"_id": project_doc["_id"]},
|
||||
{
|
||||
"$set": {
|
||||
"config.tasks": {
|
||||
t["name"]: {"short_name": t.get("short_name", t["name"])}
|
||||
for t in gazu.task.all_task_types_for_project(project)
|
||||
},
|
||||
"data": project_data,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def sync_all_project(login: str, password: str):
|
||||
"""Update all OP projects in DB with Zou data.
|
||||
|
||||
Args:
|
||||
login (str): Kitsu user login
|
||||
password (str): Kitsu user password
|
||||
|
||||
Raises:
|
||||
gazu.exception.AuthFailedException: Wrong user login and/or password
|
||||
"""
|
||||
|
||||
# Authenticate
|
||||
if not validate_credentials(login, password):
|
||||
raise gazu.exception.AuthFailedException(
|
||||
f"Kitsu authentication failed for login: '{login}'..."
|
||||
)
|
||||
|
||||
# Iterate projects
|
||||
dbcon = AvalonMongoDB()
|
||||
dbcon.install()
|
||||
all_projects = gazu.project.all_open_projects()
|
||||
for project in all_projects:
|
||||
sync_project_from_kitsu(dbcon, project)
|
||||
|
||||
|
||||
def sync_project_from_kitsu(dbcon: AvalonMongoDB, project: dict):
|
||||
"""Update OP project in DB with Zou data.
|
||||
|
||||
Args:
|
||||
dbcon (AvalonMongoDB): MongoDB connection
|
||||
project (dict): Project dict got using gazu.
|
||||
"""
|
||||
bulk_writes = []
|
||||
|
||||
# Get project from zou
|
||||
if not project:
|
||||
project = gazu.project.get_project_by_name(project["name"])
|
||||
|
||||
print(f"Synchronizing {project['name']}...")
|
||||
|
||||
# Get all assets from zou
|
||||
all_assets = gazu.asset.all_assets_for_project(project)
|
||||
all_episodes = gazu.shot.all_episodes_for_project(project)
|
||||
all_seqs = gazu.shot.all_sequences_for_project(project)
|
||||
all_shots = gazu.shot.all_shots_for_project(project)
|
||||
all_entities = [
|
||||
item
|
||||
for item in all_assets + all_episodes + all_seqs + all_shots
|
||||
if naming_pattern.match(item["name"])
|
||||
]
|
||||
|
||||
# Sync project. Create if doesn't exist
|
||||
bulk_writes.append(write_project_to_op(project, dbcon))
|
||||
|
||||
# Try to find project document
|
||||
dbcon.Session["AVALON_PROJECT"] = project["name"]
|
||||
project_doc = dbcon.find_one({"type": "project"})
|
||||
|
||||
# Query all assets of the local project
|
||||
zou_ids_and_asset_docs = {
|
||||
asset_doc["data"]["zou"]["id"]: asset_doc
|
||||
for asset_doc in dbcon.find({"type": "asset"})
|
||||
if asset_doc["data"].get("zou", {}).get("id")
|
||||
}
|
||||
zou_ids_and_asset_docs[project["id"]] = project_doc
|
||||
|
||||
# Create entities root folders
|
||||
project_module_settings = get_project_settings(project["name"])["kitsu"]
|
||||
for entity_type, root in project_module_settings["entities_root"].items():
|
||||
parent_folders = root.split("/")
|
||||
direct_parent_doc = None
|
||||
for i, folder in enumerate(parent_folders, 1):
|
||||
parent_doc = dbcon.find_one(
|
||||
{"type": "asset", "name": folder, "data.root_of": entity_type}
|
||||
)
|
||||
if not parent_doc:
|
||||
direct_parent_doc = dbcon.insert_one(
|
||||
{
|
||||
"name": folder,
|
||||
"type": "asset",
|
||||
"schema": "openpype:asset-3.0",
|
||||
"data": {
|
||||
"root_of": entity_type,
|
||||
"parents": parent_folders[:i],
|
||||
"visualParent": direct_parent_doc,
|
||||
"tasks": {},
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
# Create
|
||||
to_insert = []
|
||||
to_insert.extend(
|
||||
[
|
||||
create_op_asset(item)
|
||||
for item in all_entities
|
||||
if item["id"] not in zou_ids_and_asset_docs.keys()
|
||||
]
|
||||
)
|
||||
if to_insert:
|
||||
# Insert doc in DB
|
||||
dbcon.insert_many(to_insert)
|
||||
|
||||
# Update existing docs
|
||||
zou_ids_and_asset_docs.update(
|
||||
{
|
||||
asset_doc["data"]["zou"]["id"]: asset_doc
|
||||
for asset_doc in dbcon.find({"type": "asset"})
|
||||
if asset_doc["data"].get("zou")
|
||||
}
|
||||
)
|
||||
|
||||
# Update
|
||||
bulk_writes.extend(
|
||||
[
|
||||
UpdateOne({"_id": id}, update)
|
||||
for id, update in update_op_assets(
|
||||
dbcon, project_doc, all_entities, zou_ids_and_asset_docs
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
# Delete
|
||||
diff_assets = set(zou_ids_and_asset_docs.keys()) - {
|
||||
e["id"] for e in all_entities + [project]
|
||||
}
|
||||
if diff_assets:
|
||||
bulk_writes.extend(
|
||||
[
|
||||
DeleteOne(zou_ids_and_asset_docs[asset_id])
|
||||
for asset_id in diff_assets
|
||||
]
|
||||
)
|
||||
|
||||
# Write into DB
|
||||
if bulk_writes:
|
||||
dbcon.bulk_write(bulk_writes)
|
||||
262
openpype/modules/kitsu/utils/update_zou_with_op.py
Normal file
262
openpype/modules/kitsu/utils/update_zou_with_op.py
Normal file
|
|
@ -0,0 +1,262 @@
|
|||
"""Functions to update Kitsu DB (a.k.a Zou) using OpenPype Data."""
|
||||
|
||||
import re
|
||||
from typing import List
|
||||
|
||||
import gazu
|
||||
from pymongo import UpdateOne
|
||||
|
||||
from openpype.pipeline import AvalonMongoDB
|
||||
from openpype.api import get_project_settings
|
||||
from openpype.modules.kitsu.utils.credentials import validate_credentials
|
||||
|
||||
|
||||
def sync_zou(login: str, password: str):
|
||||
"""Synchronize Zou database (Kitsu backend) with openpype database.
|
||||
This is an utility function to help updating zou data with OP's, it may not
|
||||
handle correctly all cases, a human intervention might
|
||||
be required after all.
|
||||
Will work better if OP DB has been previously synchronized from zou/kitsu.
|
||||
|
||||
Args:
|
||||
login (str): Kitsu user login
|
||||
password (str): Kitsu user password
|
||||
|
||||
Raises:
|
||||
gazu.exception.AuthFailedException: Wrong user login and/or password
|
||||
"""
|
||||
|
||||
# Authenticate
|
||||
if not validate_credentials(login, password):
|
||||
raise gazu.exception.AuthFailedException(
|
||||
f"Kitsu authentication failed for login: '{login}'..."
|
||||
)
|
||||
|
||||
# Iterate projects
|
||||
dbcon = AvalonMongoDB()
|
||||
dbcon.install()
|
||||
|
||||
op_projects = [p for p in dbcon.projects()]
|
||||
for project_doc in op_projects:
|
||||
sync_zou_from_op_project(project_doc["name"], dbcon, project_doc)
|
||||
|
||||
|
||||
def sync_zou_from_op_project(
|
||||
project_name: str, dbcon: AvalonMongoDB, project_doc: dict = None
|
||||
) -> List[UpdateOne]:
|
||||
"""Update OP project in DB with Zou data.
|
||||
|
||||
Args:
|
||||
project_name (str): Name of project to sync
|
||||
dbcon (AvalonMongoDB): MongoDB connection
|
||||
project_doc (str, optional): Project doc to sync
|
||||
"""
|
||||
# Get project doc if not provided
|
||||
if not project_doc:
|
||||
project_doc = dbcon.database[project_name].find_one(
|
||||
{"type": "project"}
|
||||
)
|
||||
|
||||
# Get all entities from zou
|
||||
print(f"Synchronizing {project_name}...")
|
||||
zou_project = gazu.project.get_project_by_name(project_name)
|
||||
|
||||
# Create project
|
||||
if zou_project is None:
|
||||
raise RuntimeError(
|
||||
f"Project '{project_name}' doesn't exist in Zou database, "
|
||||
"please create it in Kitsu and add OpenPype user to it before "
|
||||
"running synchronization."
|
||||
)
|
||||
|
||||
# Update project settings and data
|
||||
if project_doc["data"]:
|
||||
zou_project.update(
|
||||
{
|
||||
"code": project_doc["data"]["code"],
|
||||
"fps": project_doc["data"]["fps"],
|
||||
"resolution": f"{project_doc['data']['resolutionWidth']}"
|
||||
f"x{project_doc['data']['resolutionHeight']}",
|
||||
}
|
||||
)
|
||||
gazu.project.update_project_data(zou_project, data=project_doc["data"])
|
||||
gazu.project.update_project(zou_project)
|
||||
|
||||
asset_types = gazu.asset.all_asset_types()
|
||||
all_assets = gazu.asset.all_assets_for_project(zou_project)
|
||||
all_episodes = gazu.shot.all_episodes_for_project(zou_project)
|
||||
all_seqs = gazu.shot.all_sequences_for_project(zou_project)
|
||||
all_shots = gazu.shot.all_shots_for_project(zou_project)
|
||||
all_entities_ids = {
|
||||
e["id"] for e in all_episodes + all_seqs + all_shots + all_assets
|
||||
}
|
||||
|
||||
# Query all assets of the local project
|
||||
project_module_settings = get_project_settings(project_name)["kitsu"]
|
||||
dbcon.Session["AVALON_PROJECT"] = project_name
|
||||
asset_docs = {
|
||||
asset_doc["_id"]: asset_doc
|
||||
for asset_doc in dbcon.find({"type": "asset"})
|
||||
}
|
||||
|
||||
# Create new assets
|
||||
new_assets_docs = [
|
||||
doc
|
||||
for doc in asset_docs.values()
|
||||
if doc["data"].get("zou", {}).get("id") not in all_entities_ids
|
||||
]
|
||||
naming_pattern = project_module_settings["entities_naming_pattern"]
|
||||
regex_ep = re.compile(
|
||||
r"(.*{}.*)|(.*{}.*)|(.*{}.*)".format(
|
||||
naming_pattern["shot"].replace("#", ""),
|
||||
naming_pattern["sequence"].replace("#", ""),
|
||||
naming_pattern["episode"].replace("#", ""),
|
||||
),
|
||||
re.IGNORECASE,
|
||||
)
|
||||
bulk_writes = []
|
||||
for doc in new_assets_docs:
|
||||
visual_parent_id = doc["data"]["visualParent"]
|
||||
parent_substitutes = []
|
||||
|
||||
# Match asset type by it's name
|
||||
match = regex_ep.match(doc["name"])
|
||||
if not match: # Asset
|
||||
new_entity = gazu.asset.new_asset(
|
||||
zou_project, asset_types[0], doc["name"]
|
||||
)
|
||||
# Match case in shot<sequence<episode order to support
|
||||
# composed names like 'ep01_sq01_sh01'
|
||||
elif match.group(1): # Shot
|
||||
# Match and check parent doc
|
||||
parent_doc = asset_docs[visual_parent_id]
|
||||
zou_parent_id = parent_doc["data"]["zou"]["id"]
|
||||
if parent_doc["data"].get("zou", {}).get("type") != "Sequence":
|
||||
# Substitute name
|
||||
digits_padding = naming_pattern["sequence"].count("#")
|
||||
episode_name = naming_pattern["episode"].replace(
|
||||
"#" * digits_padding, "1".zfill(digits_padding)
|
||||
)
|
||||
sequence_name = naming_pattern["sequence"].replace(
|
||||
"#" * digits_padding, "1".zfill(digits_padding)
|
||||
)
|
||||
substitute_sequence_name = f"{episode_name}_{sequence_name}"
|
||||
|
||||
# Warn
|
||||
print(
|
||||
f"Shot {doc['name']} must be parented to a Sequence "
|
||||
"in Kitsu. "
|
||||
f"Creating automatically one substitute sequence "
|
||||
f"called {substitute_sequence_name} in Kitsu..."
|
||||
)
|
||||
|
||||
# Create new sequence and set it as substitute
|
||||
created_sequence = gazu.shot.new_sequence(
|
||||
zou_project,
|
||||
substitute_sequence_name,
|
||||
episode=zou_parent_id,
|
||||
)
|
||||
gazu.shot.update_sequence_data(
|
||||
created_sequence, {"is_substitute": True}
|
||||
)
|
||||
parent_substitutes.append(created_sequence)
|
||||
|
||||
# Update parent ID
|
||||
zou_parent_id = created_sequence["id"]
|
||||
|
||||
# Create shot
|
||||
new_entity = gazu.shot.new_shot(
|
||||
zou_project,
|
||||
zou_parent_id,
|
||||
doc["name"],
|
||||
frame_in=doc["data"]["frameStart"],
|
||||
frame_out=doc["data"]["frameEnd"],
|
||||
nb_frames=doc["data"]["frameEnd"] - doc["data"]["frameStart"],
|
||||
)
|
||||
|
||||
elif match.group(2): # Sequence
|
||||
parent_doc = asset_docs[visual_parent_id]
|
||||
new_entity = gazu.shot.new_sequence(
|
||||
zou_project,
|
||||
doc["name"],
|
||||
episode=parent_doc["data"]["zou"]["id"],
|
||||
)
|
||||
|
||||
elif match.group(3): # Episode
|
||||
new_entity = gazu.shot.new_episode(zou_project, doc["name"])
|
||||
|
||||
# Update doc with zou id
|
||||
doc["data"].update(
|
||||
{
|
||||
"visualParent": visual_parent_id,
|
||||
"zou": new_entity,
|
||||
}
|
||||
)
|
||||
bulk_writes.append(
|
||||
UpdateOne(
|
||||
{"_id": doc["_id"]},
|
||||
{
|
||||
"$set": {
|
||||
"data.visualParent": visual_parent_id,
|
||||
"data.zou": new_entity,
|
||||
"data.parent_substitutes": parent_substitutes,
|
||||
}
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# Update assets
|
||||
all_tasks_types = {t["name"]: t for t in gazu.task.all_task_types()}
|
||||
assets_docs_to_update = [
|
||||
doc
|
||||
for doc in asset_docs.values()
|
||||
if doc["data"].get("zou", {}).get("id") in all_entities_ids
|
||||
]
|
||||
for doc in assets_docs_to_update:
|
||||
zou_id = doc["data"]["zou"]["id"]
|
||||
if zou_id:
|
||||
# Data
|
||||
entity_data = {}
|
||||
frame_in = doc["data"].get("frameStart")
|
||||
frame_out = doc["data"].get("frameEnd")
|
||||
if frame_in or frame_out:
|
||||
entity_data.update(
|
||||
{
|
||||
"data": {
|
||||
"frame_in": frame_in,
|
||||
"frame_out": frame_out,
|
||||
},
|
||||
"nb_frames": frame_out - frame_in,
|
||||
}
|
||||
)
|
||||
entity = gazu.raw.update("entities", zou_id, entity_data)
|
||||
|
||||
# Tasks
|
||||
all_tasks_func = getattr(
|
||||
gazu.task, f"all_tasks_for_{entity['type'].lower()}"
|
||||
)
|
||||
entity_tasks = {t["name"] for t in all_tasks_func(entity)}
|
||||
for task_name in doc["data"]["tasks"].keys():
|
||||
# Create only if new
|
||||
if task_name not in entity_tasks:
|
||||
task_type = all_tasks_types.get(task_name)
|
||||
|
||||
# Create non existing task
|
||||
if not task_type:
|
||||
task_type = gazu.task.new_task_type(task_name)
|
||||
all_tasks_types[task_name] = task_type
|
||||
|
||||
# New task for entity
|
||||
gazu.task.new_task(entity, task_type)
|
||||
|
||||
# Delete
|
||||
deleted_entities = all_entities_ids - {
|
||||
asset_doc["data"].get("zou", {}).get("id")
|
||||
for asset_doc in asset_docs.values()
|
||||
}
|
||||
for entity_id in deleted_entities:
|
||||
gazu.raw.delete(f"data/entities/{entity_id}")
|
||||
|
||||
# Write into DB
|
||||
if bulk_writes:
|
||||
dbcon.bulk_write(bulk_writes)
|
||||
|
|
@ -18,6 +18,16 @@ class InstancePlugin(pyblish.api.InstancePlugin):
|
|||
super(InstancePlugin, cls).process(cls, *args, **kwargs)
|
||||
|
||||
|
||||
class Integrator(InstancePlugin):
|
||||
"""Integrator base class.
|
||||
|
||||
Wraps pyblish instance plugin. Targets set to "local" which means all
|
||||
integrators should run on "local" publishes, by default.
|
||||
"farm" targets could be used for integrators that should run on a farm.
|
||||
"""
|
||||
targets = ["local"]
|
||||
|
||||
|
||||
class Extractor(InstancePlugin):
|
||||
"""Extractor base class.
|
||||
|
||||
|
|
@ -28,6 +38,8 @@ class Extractor(InstancePlugin):
|
|||
|
||||
"""
|
||||
|
||||
targets = ["local"]
|
||||
|
||||
order = 2.0
|
||||
|
||||
def staging_dir(self, instance):
|
||||
|
|
|
|||
|
|
@ -33,10 +33,6 @@ class CollectHierarchy(pyblish.api.ContextPlugin):
|
|||
family = instance.data["family"]
|
||||
families = instance.data["families"]
|
||||
|
||||
# filter out all unepropriate instances
|
||||
if not instance.data["publish"]:
|
||||
continue
|
||||
|
||||
# exclude other families then self.families with intersection
|
||||
if not set(self.families).intersection(set(families + [family])):
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -41,6 +41,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin):
|
|||
"rig",
|
||||
"plate",
|
||||
"look",
|
||||
"mvLook",
|
||||
"yetiRig",
|
||||
"yeticache",
|
||||
"nukenodes",
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
from copy import deepcopy
|
||||
|
||||
import pyblish.api
|
||||
|
||||
from openpype.pipeline import legacy_io
|
||||
|
||||
|
||||
|
|
@ -17,33 +15,16 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
if "hierarchyContext" not in context.data:
|
||||
self.log.info("skipping IntegrateHierarchyToAvalon")
|
||||
return
|
||||
hierarchy_context = deepcopy(context.data["hierarchyContext"])
|
||||
|
||||
if not legacy_io.Session:
|
||||
legacy_io.install()
|
||||
|
||||
active_assets = []
|
||||
# filter only the active publishing insatnces
|
||||
for instance in context:
|
||||
if instance.data.get("publish") is False:
|
||||
continue
|
||||
|
||||
if not instance.data.get("asset"):
|
||||
continue
|
||||
|
||||
active_assets.append(instance.data["asset"])
|
||||
|
||||
# remove duplicity in list
|
||||
self.active_assets = list(set(active_assets))
|
||||
self.log.debug("__ self.active_assets: {}".format(self.active_assets))
|
||||
|
||||
hierarchy_context = self._get_assets(hierarchy_context)
|
||||
|
||||
hierarchy_context = self._get_active_assets(context)
|
||||
self.log.debug("__ hierarchy_context: {}".format(hierarchy_context))
|
||||
input_data = context.data["hierarchyContext"] = hierarchy_context
|
||||
|
||||
self.project = None
|
||||
self.import_to_avalon(input_data)
|
||||
self.import_to_avalon(hierarchy_context)
|
||||
|
||||
|
||||
def import_to_avalon(self, input_data, parent=None):
|
||||
for name in input_data:
|
||||
|
|
@ -183,23 +164,40 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin):
|
|||
|
||||
return legacy_io.find_one({"_id": entity_id})
|
||||
|
||||
def _get_assets(self, input_dict):
|
||||
def _get_active_assets(self, context):
|
||||
""" Returns only asset dictionary.
|
||||
Usually the last part of deep dictionary which
|
||||
is not having any children
|
||||
"""
|
||||
input_dict_copy = deepcopy(input_dict)
|
||||
|
||||
for key in input_dict.keys():
|
||||
self.log.debug("__ key: {}".format(key))
|
||||
# check if child key is available
|
||||
if input_dict[key].get("childs"):
|
||||
# loop deeper
|
||||
input_dict_copy[key]["childs"] = self._get_assets(
|
||||
input_dict[key]["childs"])
|
||||
else:
|
||||
# filter out unwanted assets
|
||||
if key not in self.active_assets:
|
||||
def get_pure_hierarchy_data(input_dict):
|
||||
input_dict_copy = deepcopy(input_dict)
|
||||
for key in input_dict.keys():
|
||||
self.log.debug("__ key: {}".format(key))
|
||||
# check if child key is available
|
||||
if input_dict[key].get("childs"):
|
||||
# loop deeper
|
||||
input_dict_copy[
|
||||
key]["childs"] = get_pure_hierarchy_data(
|
||||
input_dict[key]["childs"])
|
||||
elif key not in active_assets:
|
||||
input_dict_copy.pop(key, None)
|
||||
return input_dict_copy
|
||||
|
||||
return input_dict_copy
|
||||
hierarchy_context = context.data["hierarchyContext"]
|
||||
|
||||
active_assets = []
|
||||
# filter only the active publishing insatnces
|
||||
for instance in context:
|
||||
if instance.data.get("publish") is False:
|
||||
continue
|
||||
|
||||
if not instance.data.get("asset"):
|
||||
continue
|
||||
|
||||
active_assets.append(instance.data["asset"])
|
||||
|
||||
# remove duplicity in list
|
||||
active_assets = list(set(active_assets))
|
||||
self.log.debug("__ active_assets: {}".format(active_assets))
|
||||
|
||||
return get_pure_hierarchy_data(hierarchy_context)
|
||||
|
|
|
|||
|
|
@ -173,7 +173,6 @@ class ExtractReviewSlate(openpype.api.Extractor):
|
|||
self.log.debug("Slate Timecode: `{}`".format(
|
||||
offset_timecode
|
||||
))
|
||||
input_args.extend(["-timecode", str(offset_timecode)])
|
||||
|
||||
if use_legacy_code:
|
||||
format_args = []
|
||||
|
|
@ -189,7 +188,6 @@ class ExtractReviewSlate(openpype.api.Extractor):
|
|||
|
||||
# make sure colors are correct
|
||||
output_args.extend([
|
||||
"-vf", "scale=out_color_matrix=bt709",
|
||||
"-color_primaries", "bt709",
|
||||
"-color_trc", "bt709",
|
||||
"-colorspace", "bt709",
|
||||
|
|
@ -230,6 +228,7 @@ class ExtractReviewSlate(openpype.api.Extractor):
|
|||
|
||||
scaling_arg = (
|
||||
"scale={0}x{1}:flags=lanczos"
|
||||
":out_color_matrix=bt709"
|
||||
",pad={2}:{3}:{4}:{5}:black"
|
||||
",setsar=1"
|
||||
",fps={6}"
|
||||
|
|
|
|||
|
|
@ -109,8 +109,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"usd",
|
||||
"staticMesh",
|
||||
"skeletalMesh",
|
||||
"usdComposition",
|
||||
"usdOverride",
|
||||
"mvLook",
|
||||
"mvUsd",
|
||||
"mvUsdComposition",
|
||||
"mvUsdOverride",
|
||||
"simpleUnrealTexture"
|
||||
]
|
||||
exclude_families = ["render.farm"]
|
||||
|
|
@ -139,6 +141,10 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
ef, instance.data["family"], instance.data["families"]))
|
||||
return
|
||||
|
||||
# instance should be published on a farm
|
||||
if instance.data.get("farm"):
|
||||
return
|
||||
|
||||
self.integrated_file_sizes = {}
|
||||
try:
|
||||
self.register(instance)
|
||||
|
|
|
|||
|
|
@ -144,6 +144,7 @@ class PypeCommands:
|
|||
pyblish.api.register_target("farm")
|
||||
|
||||
os.environ["OPENPYPE_PUBLISH_DATA"] = os.pathsep.join(paths)
|
||||
os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
|
||||
|
||||
log.info("Running publish ...")
|
||||
|
||||
|
|
@ -173,9 +174,11 @@ class PypeCommands:
|
|||
user_email, targets=None):
|
||||
"""Opens installed variant of 'host' and run remote publish there.
|
||||
|
||||
Eventually should be yanked out to Webpublisher cli.
|
||||
|
||||
Currently implemented and tested for Photoshop where customer
|
||||
wants to process uploaded .psd file and publish collected layers
|
||||
from there.
|
||||
from there. Triggered by Webpublisher.
|
||||
|
||||
Checks if no other batches are running (status =='in_progress). If
|
||||
so, it sleeps for SLEEP (this is separate process),
|
||||
|
|
@ -273,7 +276,8 @@ class PypeCommands:
|
|||
def remotepublish(project, batch_path, user_email, targets=None):
|
||||
"""Start headless publishing.
|
||||
|
||||
Used to publish rendered assets, workfiles etc.
|
||||
Used to publish rendered assets, workfiles etc via Webpublisher.
|
||||
Eventually should be yanked out to Webpublisher cli.
|
||||
|
||||
Publish use json from passed paths argument.
|
||||
|
||||
|
|
@ -309,6 +313,7 @@ class PypeCommands:
|
|||
os.environ["AVALON_PROJECT"] = project
|
||||
os.environ["AVALON_APP"] = host_name
|
||||
os.environ["USER_EMAIL"] = user_email
|
||||
os.environ["HEADLESS_PUBLISH"] = 'true' # to use in app lib
|
||||
|
||||
pyblish.api.register_host(host_name)
|
||||
|
||||
|
|
@ -331,9 +336,12 @@ class PypeCommands:
|
|||
log.info("Publish finished.")
|
||||
|
||||
@staticmethod
|
||||
def extractenvironments(
|
||||
output_json_path, project, asset, task, app, env_group
|
||||
):
|
||||
def extractenvironments(output_json_path, project, asset, task, app,
|
||||
env_group):
|
||||
"""Produces json file with environment based on project and app.
|
||||
|
||||
Called by Deadline plugin to propagate environment into render jobs.
|
||||
"""
|
||||
if all((project, asset, task, app)):
|
||||
from openpype.api import get_app_environments_for_context
|
||||
env = get_app_environments_for_context(
|
||||
|
|
|
|||
11
openpype/scripts/remote_publish.py
Normal file
11
openpype/scripts/remote_publish.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
try:
|
||||
from openpype.api import Logger
|
||||
import openpype.lib.remote_publish
|
||||
except ImportError as exc:
|
||||
# Ensure Deadline fails by output an error that contains "Fatal Error:"
|
||||
raise ImportError("Fatal Error: %s" % exc)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Perform remote publish with thorough error checking
|
||||
log = Logger.get_logger(__name__)
|
||||
openpype.lib.remote_publish.publish(log, raise_error=True)
|
||||
|
|
@ -109,6 +109,13 @@
|
|||
"Omitted"
|
||||
],
|
||||
"name_sorting": false
|
||||
},
|
||||
"transfer_values_of_hierarchical_attributes": {
|
||||
"enabled": true,
|
||||
"role_list": [
|
||||
"Administrator",
|
||||
"Project manager"
|
||||
]
|
||||
}
|
||||
},
|
||||
"user_handlers": {
|
||||
|
|
|
|||
17
openpype/settings/defaults/project_settings/kitsu.json
Normal file
17
openpype/settings/defaults/project_settings/kitsu.json
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"entities_root": {
|
||||
"assets": "Assets",
|
||||
"shots": "Shots"
|
||||
},
|
||||
"entities_naming_pattern": {
|
||||
"episode": "E##",
|
||||
"sequence": "SQ##",
|
||||
"shot": "SH##"
|
||||
},
|
||||
"publish": {
|
||||
"IntegrateKitsuNote": {
|
||||
"set_status_note": false,
|
||||
"note_status_shortname": "wfa"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -27,6 +27,34 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"gizmo": [
|
||||
{
|
||||
"toolbar_menu_name": "OpenPype Gizmo",
|
||||
"gizmo_source_dir": {
|
||||
"windows": [],
|
||||
"darwin": [],
|
||||
"linux": []
|
||||
},
|
||||
"toolbar_icon_path": {
|
||||
"windows": "",
|
||||
"darwin": "",
|
||||
"linux": ""
|
||||
},
|
||||
"gizmo_definition": [
|
||||
{
|
||||
"gizmo_toolbar_path": "/path/to/menu",
|
||||
"sub_gizmo_list": [
|
||||
{
|
||||
"sourcetype": "python",
|
||||
"title": "Gizmo Note",
|
||||
"command": "nuke.nodes.StickyNote(label='You can create your own toolbar menu in the Nuke GizmoMenu of OpenPype')",
|
||||
"shortcut": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"create": {
|
||||
"CreateWriteRender": {
|
||||
"fpath_template": "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}",
|
||||
|
|
|
|||
|
|
@ -137,6 +137,10 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"kitsu": {
|
||||
"enabled": false,
|
||||
"server": ""
|
||||
},
|
||||
"timers_manager": {
|
||||
"enabled": true,
|
||||
"auto_stop": true,
|
||||
|
|
|
|||
|
|
@ -62,6 +62,10 @@
|
|||
"type": "schema",
|
||||
"name": "schema_project_ftrack"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_project_kitsu"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_project_deadline"
|
||||
|
|
|
|||
|
|
@ -369,6 +369,25 @@
|
|||
"key": "name_sorting"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "transfer_values_of_hierarchical_attributes",
|
||||
"label": "Action to transfer hierarchical attribute values",
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "role_list",
|
||||
"label": "Roles",
|
||||
"object_type": "text"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
|||
|
|
@ -0,0 +1,78 @@
|
|||
{
|
||||
"type": "dict",
|
||||
"key": "kitsu",
|
||||
"label": "Kitsu",
|
||||
"collapsible": true,
|
||||
"is_file": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "entities_root",
|
||||
"label": "Entities root folder",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "assets",
|
||||
"label": "Assets:"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "shots",
|
||||
"label": "Shots (includes Episodes & Sequences if any):"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "entities_naming_pattern",
|
||||
"label": "Entities naming pattern",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "episode",
|
||||
"label": "Episode:"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "sequence",
|
||||
"label": "Sequence:"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "shot",
|
||||
"label": "Shot:"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "publish",
|
||||
"label": "Publish plugins",
|
||||
"children": [
|
||||
{
|
||||
"type": "label",
|
||||
"label": "Integrator"
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
"key": "IntegrateKitsuNote",
|
||||
"label": "Integrate Kitsu Note",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "set_status_note",
|
||||
"label": "Set status on note"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "note_status_shortname",
|
||||
"label": "Note shortname"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -83,6 +83,10 @@
|
|||
"type": "schema",
|
||||
"name": "schema_scriptsmenu"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_nuke_scriptsgizmo"
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"collapsible": true,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,124 @@
|
|||
{
|
||||
"type": "list",
|
||||
"key": "gizmo",
|
||||
"label": "Gizmo Menu",
|
||||
"is_group": true,
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "toolbar_menu_name",
|
||||
"label": "Toolbar Menu Name"
|
||||
},
|
||||
{
|
||||
"type": "path",
|
||||
"key": "gizmo_source_dir",
|
||||
"label": "Gizmo directory path",
|
||||
"multipath": true,
|
||||
"multiplatform": true
|
||||
},
|
||||
{
|
||||
"type": "collapsible-wrap",
|
||||
"label": "Options",
|
||||
"collapsible": true,
|
||||
"collapsed": true,
|
||||
"children": [
|
||||
{
|
||||
"type": "path",
|
||||
"key": "toolbar_icon_path",
|
||||
"label": "Toolbar Icon Path",
|
||||
"multipath": false,
|
||||
"multiplatform": true
|
||||
},
|
||||
{
|
||||
"type": "splitter"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "gizmo_definition",
|
||||
"label": "Gizmo definitions",
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "dict",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "gizmo_toolbar_path",
|
||||
"label": "Gizmo Menu Path"
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"key": "sub_gizmo_list",
|
||||
"label": "Sub Gizmo List",
|
||||
"use_label_wrap": true,
|
||||
"object_type": {
|
||||
"type": "dict-conditional",
|
||||
"enum_key": "sourcetype",
|
||||
"enum_label": "Type of usage",
|
||||
"enum_children": [
|
||||
{
|
||||
"key": "python",
|
||||
"label": "Python",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "title",
|
||||
"label": "Title"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "command",
|
||||
"label": "Python command"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "shortcut",
|
||||
"label": "Hotkey"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "file",
|
||||
"label": "File",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "title",
|
||||
"label": "Title"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "file_name",
|
||||
"label": "Gizmo file name"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "shortcut",
|
||||
"label": "Hotkey"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"key": "separator",
|
||||
"label": "Separator",
|
||||
"children": [
|
||||
{
|
||||
"type": "text",
|
||||
"key": "gizmo_toolbar_path",
|
||||
"label": "Toolbar path"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
{
|
||||
"type": "dict",
|
||||
"key": "kitsu",
|
||||
"label": "Kitsu",
|
||||
"collapsible": true,
|
||||
"require_restart": true,
|
||||
"checkbox_key": "enabled",
|
||||
"children": [
|
||||
{
|
||||
"type": "boolean",
|
||||
"key": "enabled",
|
||||
"label": "Enabled"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"key": "server",
|
||||
"label": "Server"
|
||||
},
|
||||
{
|
||||
"type": "splitter"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -44,6 +44,10 @@
|
|||
"type": "schema",
|
||||
"name": "schema_ftrack"
|
||||
},
|
||||
{
|
||||
"type": "schema",
|
||||
"name": "schema_kitsu"
|
||||
},
|
||||
{
|
||||
"type": "dict",
|
||||
"key": "timers_manager",
|
||||
|
|
|
|||
|
|
@ -105,6 +105,7 @@ class HostToolsHelper:
|
|||
loader_tool.show()
|
||||
loader_tool.raise_()
|
||||
loader_tool.activateWindow()
|
||||
loader_tool.showNormal()
|
||||
|
||||
if use_context is None:
|
||||
use_context = False
|
||||
|
|
@ -180,6 +181,7 @@ class HostToolsHelper:
|
|||
# Pull window to the front.
|
||||
scene_inventory_tool.raise_()
|
||||
scene_inventory_tool.activateWindow()
|
||||
scene_inventory_tool.showNormal()
|
||||
|
||||
def get_library_loader_tool(self, parent):
|
||||
"""Create, cache and return library loader tool window."""
|
||||
|
|
@ -200,8 +202,10 @@ class HostToolsHelper:
|
|||
library_loader_tool.show()
|
||||
library_loader_tool.raise_()
|
||||
library_loader_tool.activateWindow()
|
||||
library_loader_tool.showNormal()
|
||||
library_loader_tool.refresh()
|
||||
|
||||
|
||||
def show_publish(self, parent=None):
|
||||
"""Try showing the most desirable publish GUI
|
||||
|
||||
|
|
@ -243,6 +247,11 @@ class HostToolsHelper:
|
|||
look_assigner_tool = self.get_look_assigner_tool(parent)
|
||||
look_assigner_tool.show()
|
||||
|
||||
# Pull window to the front.
|
||||
look_assigner_tool.raise_()
|
||||
look_assigner_tool.activateWindow()
|
||||
look_assigner_tool.showNormal()
|
||||
|
||||
def get_experimental_tools_dialog(self, parent=None):
|
||||
"""Dialog of experimental tools.
|
||||
|
||||
|
|
@ -270,6 +279,7 @@ class HostToolsHelper:
|
|||
dialog.show()
|
||||
dialog.raise_()
|
||||
dialog.activateWindow()
|
||||
dialog.showNormal()
|
||||
|
||||
def get_tool_by_name(self, tool_name, parent=None, *args, **kwargs):
|
||||
"""Show tool by it's name.
|
||||
|
|
|
|||
3
openpype/vendor/python/python_2/certifi/__init__.py
vendored
Normal file
3
openpype/vendor/python/python_2/certifi/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
from .core import contents, where
|
||||
|
||||
__version__ = "2021.10.08"
|
||||
12
openpype/vendor/python/python_2/certifi/__main__.py
vendored
Normal file
12
openpype/vendor/python/python_2/certifi/__main__.py
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import argparse
|
||||
|
||||
from certifi import contents, where
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-c", "--contents", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.contents:
|
||||
print(contents())
|
||||
else:
|
||||
print(where())
|
||||
4362
openpype/vendor/python/python_2/certifi/cacert.pem
vendored
Normal file
4362
openpype/vendor/python/python_2/certifi/cacert.pem
vendored
Normal file
File diff suppressed because it is too large
Load diff
60
openpype/vendor/python/python_2/certifi/core.py
vendored
Normal file
60
openpype/vendor/python/python_2/certifi/core.py
vendored
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
certifi.py
|
||||
~~~~~~~~~~
|
||||
|
||||
This module returns the installation location of cacert.pem or its contents.
|
||||
"""
|
||||
import os
|
||||
|
||||
try:
|
||||
from importlib.resources import path as get_path, read_text
|
||||
|
||||
_CACERT_CTX = None
|
||||
_CACERT_PATH = None
|
||||
|
||||
def where():
|
||||
# This is slightly terrible, but we want to delay extracting the file
|
||||
# in cases where we're inside of a zipimport situation until someone
|
||||
# actually calls where(), but we don't want to re-extract the file
|
||||
# on every call of where(), so we'll do it once then store it in a
|
||||
# global variable.
|
||||
global _CACERT_CTX
|
||||
global _CACERT_PATH
|
||||
if _CACERT_PATH is None:
|
||||
# This is slightly janky, the importlib.resources API wants you to
|
||||
# manage the cleanup of this file, so it doesn't actually return a
|
||||
# path, it returns a context manager that will give you the path
|
||||
# when you enter it and will do any cleanup when you leave it. In
|
||||
# the common case of not needing a temporary file, it will just
|
||||
# return the file system location and the __exit__() is a no-op.
|
||||
#
|
||||
# We also have to hold onto the actual context manager, because
|
||||
# it will do the cleanup whenever it gets garbage collected, so
|
||||
# we will also store that at the global level as well.
|
||||
_CACERT_CTX = get_path("certifi", "cacert.pem")
|
||||
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
||||
|
||||
return _CACERT_PATH
|
||||
|
||||
|
||||
except ImportError:
|
||||
# This fallback will work for Python versions prior to 3.7 that lack the
|
||||
# importlib.resources module but relies on the existing `where` function
|
||||
# so won't address issues with environments like PyOxidizer that don't set
|
||||
# __file__ on modules.
|
||||
def read_text(_module, _path, encoding="ascii"):
|
||||
with open(where(), "r", encoding=encoding) as data:
|
||||
return data.read()
|
||||
|
||||
# If we don't have importlib.resources, then we will just do the old logic
|
||||
# of assuming we're on the filesystem and munge the path directly.
|
||||
def where():
|
||||
f = os.path.dirname(__file__)
|
||||
|
||||
return os.path.join(f, "cacert.pem")
|
||||
|
||||
|
||||
def contents():
|
||||
return read_text("certifi", "cacert.pem", encoding="ascii")
|
||||
83
openpype/vendor/python/python_2/chardet/__init__.py
vendored
Normal file
83
openpype/vendor/python/python_2/chardet/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,83 @@
|
|||
######################## BEGIN LICENSE BLOCK ########################
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
######################### END LICENSE BLOCK #########################
|
||||
|
||||
|
||||
from .universaldetector import UniversalDetector
|
||||
from .enums import InputState
|
||||
from .version import __version__, VERSION
|
||||
|
||||
|
||||
__all__ = ['UniversalDetector', 'detect', 'detect_all', '__version__', 'VERSION']
|
||||
|
||||
|
||||
def detect(byte_str):
|
||||
"""
|
||||
Detect the encoding of the given byte string.
|
||||
|
||||
:param byte_str: The byte sequence to examine.
|
||||
:type byte_str: ``bytes`` or ``bytearray``
|
||||
"""
|
||||
if not isinstance(byte_str, bytearray):
|
||||
if not isinstance(byte_str, bytes):
|
||||
raise TypeError('Expected object of type bytes or bytearray, got: '
|
||||
'{}'.format(type(byte_str)))
|
||||
else:
|
||||
byte_str = bytearray(byte_str)
|
||||
detector = UniversalDetector()
|
||||
detector.feed(byte_str)
|
||||
return detector.close()
|
||||
|
||||
|
||||
def detect_all(byte_str):
|
||||
"""
|
||||
Detect all the possible encodings of the given byte string.
|
||||
|
||||
:param byte_str: The byte sequence to examine.
|
||||
:type byte_str: ``bytes`` or ``bytearray``
|
||||
"""
|
||||
if not isinstance(byte_str, bytearray):
|
||||
if not isinstance(byte_str, bytes):
|
||||
raise TypeError('Expected object of type bytes or bytearray, got: '
|
||||
'{}'.format(type(byte_str)))
|
||||
else:
|
||||
byte_str = bytearray(byte_str)
|
||||
|
||||
detector = UniversalDetector()
|
||||
detector.feed(byte_str)
|
||||
detector.close()
|
||||
|
||||
if detector._input_state == InputState.HIGH_BYTE:
|
||||
results = []
|
||||
for prober in detector._charset_probers:
|
||||
if prober.get_confidence() > detector.MINIMUM_THRESHOLD:
|
||||
charset_name = prober.charset_name
|
||||
lower_charset_name = prober.charset_name.lower()
|
||||
# Use Windows encoding name instead of ISO-8859 if we saw any
|
||||
# extra Windows-specific bytes
|
||||
if lower_charset_name.startswith('iso-8859'):
|
||||
if detector._has_win_bytes:
|
||||
charset_name = detector.ISO_WIN_MAP.get(lower_charset_name,
|
||||
charset_name)
|
||||
results.append({
|
||||
'encoding': charset_name,
|
||||
'confidence': prober.get_confidence(),
|
||||
'language': prober.language,
|
||||
})
|
||||
if len(results) > 0:
|
||||
return sorted(results, key=lambda result: -result['confidence'])
|
||||
|
||||
return [detector.result]
|
||||
386
openpype/vendor/python/python_2/chardet/big5freq.py
vendored
Normal file
386
openpype/vendor/python/python_2/chardet/big5freq.py
vendored
Normal file
|
|
@ -0,0 +1,386 @@
|
|||
######################## BEGIN LICENSE BLOCK ########################
|
||||
# The Original Code is Mozilla Communicator client code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Netscape Communications Corporation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 1998
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mark Pilgrim - port to Python
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
######################### END LICENSE BLOCK #########################
|
||||
|
||||
# Big5 frequency table
|
||||
# by Taiwan's Mandarin Promotion Council
|
||||
# <http://www.edu.tw:81/mandr/>
|
||||
#
|
||||
# 128 --> 0.42261
|
||||
# 256 --> 0.57851
|
||||
# 512 --> 0.74851
|
||||
# 1024 --> 0.89384
|
||||
# 2048 --> 0.97583
|
||||
#
|
||||
# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
|
||||
# Random Distribution Ration = 512/(5401-512)=0.105
|
||||
#
|
||||
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
|
||||
|
||||
BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
|
||||
|
||||
#Char to FreqOrder table
|
||||
BIG5_TABLE_SIZE = 5376
|
||||
|
||||
BIG5_CHAR_TO_FREQ_ORDER = (
|
||||
1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16
|
||||
3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32
|
||||
1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48
|
||||
63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64
|
||||
3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80
|
||||
4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96
|
||||
5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112
|
||||
630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128
|
||||
179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144
|
||||
995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160
|
||||
2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176
|
||||
1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192
|
||||
3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208
|
||||
706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224
|
||||
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240
|
||||
3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256
|
||||
2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272
|
||||
437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288
|
||||
3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304
|
||||
1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320
|
||||
5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336
|
||||
266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352
|
||||
5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368
|
||||
1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384
|
||||
32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400
|
||||
188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416
|
||||
3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432
|
||||
3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448
|
||||
324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464
|
||||
2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480
|
||||
2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496
|
||||
314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512
|
||||
287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528
|
||||
3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544
|
||||
1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560
|
||||
1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576
|
||||
1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592
|
||||
2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608
|
||||
265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624
|
||||
4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640
|
||||
1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656
|
||||
5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672
|
||||
2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688
|
||||
383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704
|
||||
98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720
|
||||
523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736
|
||||
710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752
|
||||
5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768
|
||||
379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784
|
||||
1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800
|
||||
585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816
|
||||
690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832
|
||||
5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848
|
||||
1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864
|
||||
544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880
|
||||
3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896
|
||||
4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912
|
||||
3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928
|
||||
279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944
|
||||
610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960
|
||||
1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976
|
||||
4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992
|
||||
3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
|
||||
3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
|
||||
2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
|
||||
5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056
|
||||
3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
|
||||
5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
|
||||
1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
|
||||
2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
|
||||
1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
|
||||
78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
|
||||
1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
|
||||
4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
|
||||
3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
|
||||
534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
|
||||
165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232
|
||||
626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
|
||||
2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
|
||||
5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
|
||||
1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
|
||||
2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
|
||||
1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
|
||||
1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
|
||||
5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
|
||||
5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
|
||||
5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
|
||||
3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
|
||||
4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
|
||||
4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
|
||||
2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
|
||||
5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
|
||||
3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
|
||||
598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
|
||||
5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520
|
||||
5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
|
||||
1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
|
||||
2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
|
||||
3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
|
||||
4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
|
||||
5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
|
||||
3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
|
||||
4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
|
||||
1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
|
||||
1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680
|
||||
4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
|
||||
1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
|
||||
240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
|
||||
1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
|
||||
1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
|
||||
3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
|
||||
619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
|
||||
5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
|
||||
2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
|
||||
1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
|
||||
1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856
|
||||
5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
|
||||
829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
|
||||
4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
|
||||
375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
|
||||
2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
|
||||
444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
|
||||
1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
|
||||
1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
|
||||
730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
|
||||
4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
|
||||
4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
|
||||
1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
|
||||
3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
|
||||
5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
|
||||
5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096
|
||||
1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
|
||||
2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
|
||||
1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
|
||||
3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
|
||||
2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
|
||||
3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
|
||||
2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
|
||||
4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
|
||||
4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
|
||||
3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
|
||||
97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
|
||||
3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
|
||||
424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
|
||||
3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
|
||||
4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
|
||||
3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
|
||||
1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
|
||||
5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
|
||||
199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
|
||||
5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
|
||||
1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
|
||||
391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
|
||||
4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464
|
||||
4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
|
||||
397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
|
||||
2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
|
||||
2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528
|
||||
3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
|
||||
1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
|
||||
4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
|
||||
2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
|
||||
1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
|
||||
1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
|
||||
2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
|
||||
3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
|
||||
1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
|
||||
5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688
|
||||
1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
|
||||
4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720
|
||||
1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
|
||||
135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
|
||||
1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
|
||||
4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
|
||||
4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
|
||||
2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
|
||||
1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
|
||||
4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
|
||||
660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
|
||||
5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
|
||||
2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
|
||||
3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
|
||||
4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
|
||||
790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
|
||||
5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
|
||||
5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
|
||||
1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
|
||||
4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
|
||||
4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
|
||||
2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040
|
||||
3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
|
||||
3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
|
||||
2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
|
||||
1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
|
||||
4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
|
||||
3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
|
||||
3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
|
||||
2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
|
||||
4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184
|
||||
5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
|
||||
3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
|
||||
2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
|
||||
3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
|
||||
1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
|
||||
2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
|
||||
3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
|
||||
4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312
|
||||
2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
|
||||
2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
|
||||
5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
|
||||
1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
|
||||
2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
|
||||
1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
|
||||
3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
|
||||
4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440
|
||||
2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
|
||||
3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
|
||||
3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
|
||||
2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
|
||||
4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
|
||||
2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
|
||||
3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
|
||||
4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
|
||||
5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
|
||||
3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
|
||||
194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
|
||||
1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632
|
||||
4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
|
||||
1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
|
||||
4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680
|
||||
5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
|
||||
510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
|
||||
5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
|
||||
5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
|
||||
2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
|
||||
3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
|
||||
2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
|
||||
2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
|
||||
681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
|
||||
1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
|
||||
4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
|
||||
3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
|
||||
3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
|
||||
838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
|
||||
2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
|
||||
625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
|
||||
2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
|
||||
4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
|
||||
1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
|
||||
4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
|
||||
1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
|
||||
3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
|
||||
574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
|
||||
3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
|
||||
5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
|
||||
5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
|
||||
3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
|
||||
3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
|
||||
1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
|
||||
2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
|
||||
5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
|
||||
1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
|
||||
1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
|
||||
3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
|
||||
919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
|
||||
1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
|
||||
4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
|
||||
5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
|
||||
2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
|
||||
3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
|
||||
516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
|
||||
1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
|
||||
2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
|
||||
2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
|
||||
5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
|
||||
5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
|
||||
5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
|
||||
2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
|
||||
2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
|
||||
1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
|
||||
4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
|
||||
3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
|
||||
3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
|
||||
4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
|
||||
4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
|
||||
2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
|
||||
2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
|
||||
5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
|
||||
4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
|
||||
5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
|
||||
4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
|
||||
502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
|
||||
121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
|
||||
1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
|
||||
3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
|
||||
4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
|
||||
1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
|
||||
5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
|
||||
2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
|
||||
2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
|
||||
3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
|
||||
5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
|
||||
1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
|
||||
3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
|
||||
5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
|
||||
1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
|
||||
5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
|
||||
2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
|
||||
3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
|
||||
2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
|
||||
3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
|
||||
3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
|
||||
3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
|
||||
4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
|
||||
803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
|
||||
2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
|
||||
4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
|
||||
3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
|
||||
5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
|
||||
1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
|
||||
5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
|
||||
425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
|
||||
1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
|
||||
479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
|
||||
4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
|
||||
1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
|
||||
4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
|
||||
1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
|
||||
433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
|
||||
3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
|
||||
4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
|
||||
5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
|
||||
938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
|
||||
3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
|
||||
890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
|
||||
2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376
|
||||
)
|
||||
|
||||
47
openpype/vendor/python/python_2/chardet/big5prober.py
vendored
Normal file
47
openpype/vendor/python/python_2/chardet/big5prober.py
vendored
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
######################## BEGIN LICENSE BLOCK ########################
|
||||
# The Original Code is Mozilla Communicator client code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Netscape Communications Corporation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 1998
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mark Pilgrim - port to Python
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
######################### END LICENSE BLOCK #########################
|
||||
|
||||
from .mbcharsetprober import MultiByteCharSetProber
|
||||
from .codingstatemachine import CodingStateMachine
|
||||
from .chardistribution import Big5DistributionAnalysis
|
||||
from .mbcssm import BIG5_SM_MODEL
|
||||
|
||||
|
||||
class Big5Prober(MultiByteCharSetProber):
|
||||
def __init__(self):
|
||||
super(Big5Prober, self).__init__()
|
||||
self.coding_sm = CodingStateMachine(BIG5_SM_MODEL)
|
||||
self.distribution_analyzer = Big5DistributionAnalysis()
|
||||
self.reset()
|
||||
|
||||
@property
|
||||
def charset_name(self):
|
||||
return "Big5"
|
||||
|
||||
@property
|
||||
def language(self):
|
||||
return "Chinese"
|
||||
233
openpype/vendor/python/python_2/chardet/chardistribution.py
vendored
Normal file
233
openpype/vendor/python/python_2/chardet/chardistribution.py
vendored
Normal file
|
|
@ -0,0 +1,233 @@
|
|||
######################## BEGIN LICENSE BLOCK ########################
|
||||
# The Original Code is Mozilla Communicator client code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Netscape Communications Corporation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 1998
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mark Pilgrim - port to Python
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
######################### END LICENSE BLOCK #########################
|
||||
|
||||
from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE,
|
||||
EUCTW_TYPICAL_DISTRIBUTION_RATIO)
|
||||
from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE,
|
||||
EUCKR_TYPICAL_DISTRIBUTION_RATIO)
|
||||
from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE,
|
||||
GB2312_TYPICAL_DISTRIBUTION_RATIO)
|
||||
from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE,
|
||||
BIG5_TYPICAL_DISTRIBUTION_RATIO)
|
||||
from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE,
|
||||
JIS_TYPICAL_DISTRIBUTION_RATIO)
|
||||
|
||||
|
||||
class CharDistributionAnalysis(object):
|
||||
ENOUGH_DATA_THRESHOLD = 1024
|
||||
SURE_YES = 0.99
|
||||
SURE_NO = 0.01
|
||||
MINIMUM_DATA_THRESHOLD = 3
|
||||
|
||||
def __init__(self):
|
||||
# Mapping table to get frequency order from char order (get from
|
||||
# GetOrder())
|
||||
self._char_to_freq_order = None
|
||||
self._table_size = None # Size of above table
|
||||
# This is a constant value which varies from language to language,
|
||||
# used in calculating confidence. See
|
||||
# http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
|
||||
# for further detail.
|
||||
self.typical_distribution_ratio = None
|
||||
self._done = None
|
||||
self._total_chars = None
|
||||
self._freq_chars = None
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
"""reset analyser, clear any state"""
|
||||
# If this flag is set to True, detection is done and conclusion has
|
||||
# been made
|
||||
self._done = False
|
||||
self._total_chars = 0 # Total characters encountered
|
||||
# The number of characters whose frequency order is less than 512
|
||||
self._freq_chars = 0
|
||||
|
||||
def feed(self, char, char_len):
|
||||
"""feed a character with known length"""
|
||||
if char_len == 2:
|
||||
# we only care about 2-bytes character in our distribution analysis
|
||||
order = self.get_order(char)
|
||||
else:
|
||||
order = -1
|
||||
if order >= 0:
|
||||
self._total_chars += 1
|
||||
# order is valid
|
||||
if order < self._table_size:
|
||||
if 512 > self._char_to_freq_order[order]:
|
||||
self._freq_chars += 1
|
||||
|
||||
def get_confidence(self):
|
||||
"""return confidence based on existing data"""
|
||||
# if we didn't receive any character in our consideration range,
|
||||
# return negative answer
|
||||
if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD:
|
||||
return self.SURE_NO
|
||||
|
||||
if self._total_chars != self._freq_chars:
|
||||
r = (self._freq_chars / ((self._total_chars - self._freq_chars)
|
||||
* self.typical_distribution_ratio))
|
||||
if r < self.SURE_YES:
|
||||
return r
|
||||
|
||||
# normalize confidence (we don't want to be 100% sure)
|
||||
return self.SURE_YES
|
||||
|
||||
def got_enough_data(self):
|
||||
# It is not necessary to receive all data to draw conclusion.
|
||||
# For charset detection, certain amount of data is enough
|
||||
return self._total_chars > self.ENOUGH_DATA_THRESHOLD
|
||||
|
||||
def get_order(self, byte_str):
|
||||
# We do not handle characters based on the original encoding string,
|
||||
# but convert this encoding string to a number, here called order.
|
||||
# This allows multiple encodings of a language to share one frequency
|
||||
# table.
|
||||
return -1
|
||||
|
||||
|
||||
class EUCTWDistributionAnalysis(CharDistributionAnalysis):
|
||||
def __init__(self):
|
||||
super(EUCTWDistributionAnalysis, self).__init__()
|
||||
self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER
|
||||
self._table_size = EUCTW_TABLE_SIZE
|
||||
self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
|
||||
|
||||
def get_order(self, byte_str):
|
||||
# for euc-TW encoding, we are interested
|
||||
# first byte range: 0xc4 -- 0xfe
|
||||
# second byte range: 0xa1 -- 0xfe
|
||||
# no validation needed here. State machine has done that
|
||||
first_char = byte_str[0]
|
||||
if first_char >= 0xC4:
|
||||
return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1
|
||||
else:
|
||||
return -1
|
||||
|
||||
|
||||
class EUCKRDistributionAnalysis(CharDistributionAnalysis):
|
||||
def __init__(self):
|
||||
super(EUCKRDistributionAnalysis, self).__init__()
|
||||
self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER
|
||||
self._table_size = EUCKR_TABLE_SIZE
|
||||
self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
|
||||
|
||||
def get_order(self, byte_str):
|
||||
# for euc-KR encoding, we are interested
|
||||
# first byte range: 0xb0 -- 0xfe
|
||||
# second byte range: 0xa1 -- 0xfe
|
||||
# no validation needed here. State machine has done that
|
||||
first_char = byte_str[0]
|
||||
if first_char >= 0xB0:
|
||||
return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1
|
||||
else:
|
||||
return -1
|
||||
|
||||
|
||||
class GB2312DistributionAnalysis(CharDistributionAnalysis):
|
||||
def __init__(self):
|
||||
super(GB2312DistributionAnalysis, self).__init__()
|
||||
self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER
|
||||
self._table_size = GB2312_TABLE_SIZE
|
||||
self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO
|
||||
|
||||
def get_order(self, byte_str):
|
||||
# for GB2312 encoding, we are interested
|
||||
# first byte range: 0xb0 -- 0xfe
|
||||
# second byte range: 0xa1 -- 0xfe
|
||||
# no validation needed here. State machine has done that
|
||||
first_char, second_char = byte_str[0], byte_str[1]
|
||||
if (first_char >= 0xB0) and (second_char >= 0xA1):
|
||||
return 94 * (first_char - 0xB0) + second_char - 0xA1
|
||||
else:
|
||||
return -1
|
||||
|
||||
|
||||
class Big5DistributionAnalysis(CharDistributionAnalysis):
|
||||
def __init__(self):
|
||||
super(Big5DistributionAnalysis, self).__init__()
|
||||
self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER
|
||||
self._table_size = BIG5_TABLE_SIZE
|
||||
self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO
|
||||
|
||||
def get_order(self, byte_str):
|
||||
# for big5 encoding, we are interested
|
||||
# first byte range: 0xa4 -- 0xfe
|
||||
# second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
|
||||
# no validation needed here. State machine has done that
|
||||
first_char, second_char = byte_str[0], byte_str[1]
|
||||
if first_char >= 0xA4:
|
||||
if second_char >= 0xA1:
|
||||
return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
|
||||
else:
|
||||
return 157 * (first_char - 0xA4) + second_char - 0x40
|
||||
else:
|
||||
return -1
|
||||
|
||||
|
||||
class SJISDistributionAnalysis(CharDistributionAnalysis):
|
||||
def __init__(self):
|
||||
super(SJISDistributionAnalysis, self).__init__()
|
||||
self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
|
||||
self._table_size = JIS_TABLE_SIZE
|
||||
self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
|
||||
|
||||
def get_order(self, byte_str):
|
||||
# for sjis encoding, we are interested
|
||||
# first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
|
||||
# second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
|
||||
# no validation needed here. State machine has done that
|
||||
first_char, second_char = byte_str[0], byte_str[1]
|
||||
if (first_char >= 0x81) and (first_char <= 0x9F):
|
||||
order = 188 * (first_char - 0x81)
|
||||
elif (first_char >= 0xE0) and (first_char <= 0xEF):
|
||||
order = 188 * (first_char - 0xE0 + 31)
|
||||
else:
|
||||
return -1
|
||||
order = order + second_char - 0x40
|
||||
if second_char > 0x7F:
|
||||
order = -1
|
||||
return order
|
||||
|
||||
|
||||
class EUCJPDistributionAnalysis(CharDistributionAnalysis):
|
||||
def __init__(self):
|
||||
super(EUCJPDistributionAnalysis, self).__init__()
|
||||
self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
|
||||
self._table_size = JIS_TABLE_SIZE
|
||||
self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
|
||||
|
||||
def get_order(self, byte_str):
|
||||
# for euc-JP encoding, we are interested
|
||||
# first byte range: 0xa0 -- 0xfe
|
||||
# second byte range: 0xa1 -- 0xfe
|
||||
# no validation needed here. State machine has done that
|
||||
char = byte_str[0]
|
||||
if char >= 0xA0:
|
||||
return 94 * (char - 0xA1) + byte_str[1] - 0xa1
|
||||
else:
|
||||
return -1
|
||||
107
openpype/vendor/python/python_2/chardet/charsetgroupprober.py
vendored
Normal file
107
openpype/vendor/python/python_2/chardet/charsetgroupprober.py
vendored
Normal file
|
|
@ -0,0 +1,107 @@
|
|||
######################## BEGIN LICENSE BLOCK ########################
|
||||
# The Original Code is Mozilla Communicator client code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Netscape Communications Corporation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 1998
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mark Pilgrim - port to Python
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
######################### END LICENSE BLOCK #########################
|
||||
|
||||
from .enums import ProbingState
|
||||
from .charsetprober import CharSetProber
|
||||
|
||||
|
||||
class CharSetGroupProber(CharSetProber):
|
||||
def __init__(self, lang_filter=None):
|
||||
super(CharSetGroupProber, self).__init__(lang_filter=lang_filter)
|
||||
self._active_num = 0
|
||||
self.probers = []
|
||||
self._best_guess_prober = None
|
||||
|
||||
def reset(self):
|
||||
super(CharSetGroupProber, self).reset()
|
||||
self._active_num = 0
|
||||
for prober in self.probers:
|
||||
if prober:
|
||||
prober.reset()
|
||||
prober.active = True
|
||||
self._active_num += 1
|
||||
self._best_guess_prober = None
|
||||
|
||||
@property
|
||||
def charset_name(self):
|
||||
if not self._best_guess_prober:
|
||||
self.get_confidence()
|
||||
if not self._best_guess_prober:
|
||||
return None
|
||||
return self._best_guess_prober.charset_name
|
||||
|
||||
@property
|
||||
def language(self):
|
||||
if not self._best_guess_prober:
|
||||
self.get_confidence()
|
||||
if not self._best_guess_prober:
|
||||
return None
|
||||
return self._best_guess_prober.language
|
||||
|
||||
def feed(self, byte_str):
|
||||
for prober in self.probers:
|
||||
if not prober:
|
||||
continue
|
||||
if not prober.active:
|
||||
continue
|
||||
state = prober.feed(byte_str)
|
||||
if not state:
|
||||
continue
|
||||
if state == ProbingState.FOUND_IT:
|
||||
self._best_guess_prober = prober
|
||||
self._state = ProbingState.FOUND_IT
|
||||
return self.state
|
||||
elif state == ProbingState.NOT_ME:
|
||||
prober.active = False
|
||||
self._active_num -= 1
|
||||
if self._active_num <= 0:
|
||||
self._state = ProbingState.NOT_ME
|
||||
return self.state
|
||||
return self.state
|
||||
|
||||
def get_confidence(self):
|
||||
state = self.state
|
||||
if state == ProbingState.FOUND_IT:
|
||||
return 0.99
|
||||
elif state == ProbingState.NOT_ME:
|
||||
return 0.01
|
||||
best_conf = 0.0
|
||||
self._best_guess_prober = None
|
||||
for prober in self.probers:
|
||||
if not prober:
|
||||
continue
|
||||
if not prober.active:
|
||||
self.logger.debug('%s not active', prober.charset_name)
|
||||
continue
|
||||
conf = prober.get_confidence()
|
||||
self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf)
|
||||
if best_conf < conf:
|
||||
best_conf = conf
|
||||
self._best_guess_prober = prober
|
||||
if not self._best_guess_prober:
|
||||
return 0.0
|
||||
return best_conf
|
||||
145
openpype/vendor/python/python_2/chardet/charsetprober.py
vendored
Normal file
145
openpype/vendor/python/python_2/chardet/charsetprober.py
vendored
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
######################## BEGIN LICENSE BLOCK ########################
|
||||
# The Original Code is Mozilla Universal charset detector code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Netscape Communications Corporation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 2001
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mark Pilgrim - port to Python
|
||||
# Shy Shalom - original C code
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
######################### END LICENSE BLOCK #########################
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from .enums import ProbingState
|
||||
|
||||
|
||||
class CharSetProber(object):
|
||||
|
||||
SHORTCUT_THRESHOLD = 0.95
|
||||
|
||||
def __init__(self, lang_filter=None):
|
||||
self._state = None
|
||||
self.lang_filter = lang_filter
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
def reset(self):
|
||||
self._state = ProbingState.DETECTING
|
||||
|
||||
@property
|
||||
def charset_name(self):
|
||||
return None
|
||||
|
||||
def feed(self, buf):
|
||||
pass
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self._state
|
||||
|
||||
def get_confidence(self):
|
||||
return 0.0
|
||||
|
||||
@staticmethod
|
||||
def filter_high_byte_only(buf):
|
||||
buf = re.sub(b'([\x00-\x7F])+', b' ', buf)
|
||||
return buf
|
||||
|
||||
@staticmethod
|
||||
def filter_international_words(buf):
|
||||
"""
|
||||
We define three types of bytes:
|
||||
alphabet: english alphabets [a-zA-Z]
|
||||
international: international characters [\x80-\xFF]
|
||||
marker: everything else [^a-zA-Z\x80-\xFF]
|
||||
|
||||
The input buffer can be thought to contain a series of words delimited
|
||||
by markers. This function works to filter all words that contain at
|
||||
least one international character. All contiguous sequences of markers
|
||||
are replaced by a single space ascii character.
|
||||
|
||||
This filter applies to all scripts which do not use English characters.
|
||||
"""
|
||||
filtered = bytearray()
|
||||
|
||||
# This regex expression filters out only words that have at-least one
|
||||
# international character. The word may include one marker character at
|
||||
# the end.
|
||||
words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?',
|
||||
buf)
|
||||
|
||||
for word in words:
|
||||
filtered.extend(word[:-1])
|
||||
|
||||
# If the last character in the word is a marker, replace it with a
|
||||
# space as markers shouldn't affect our analysis (they are used
|
||||
# similarly across all languages and may thus have similar
|
||||
# frequencies).
|
||||
last_char = word[-1:]
|
||||
if not last_char.isalpha() and last_char < b'\x80':
|
||||
last_char = b' '
|
||||
filtered.extend(last_char)
|
||||
|
||||
return filtered
|
||||
|
||||
@staticmethod
|
||||
def filter_with_english_letters(buf):
|
||||
"""
|
||||
Returns a copy of ``buf`` that retains only the sequences of English
|
||||
alphabet and high byte characters that are not between <> characters.
|
||||
Also retains English alphabet and high byte characters immediately
|
||||
before occurrences of >.
|
||||
|
||||
This filter can be applied to all scripts which contain both English
|
||||
characters and extended ASCII characters, but is currently only used by
|
||||
``Latin1Prober``.
|
||||
"""
|
||||
filtered = bytearray()
|
||||
in_tag = False
|
||||
prev = 0
|
||||
|
||||
for curr in range(len(buf)):
|
||||
# Slice here to get bytes instead of an int with Python 3
|
||||
buf_char = buf[curr:curr + 1]
|
||||
# Check if we're coming out of or entering an HTML tag
|
||||
if buf_char == b'>':
|
||||
in_tag = False
|
||||
elif buf_char == b'<':
|
||||
in_tag = True
|
||||
|
||||
# If current character is not extended-ASCII and not alphabetic...
|
||||
if buf_char < b'\x80' and not buf_char.isalpha():
|
||||
# ...and we're not in a tag
|
||||
if curr > prev and not in_tag:
|
||||
# Keep everything after last non-extended-ASCII,
|
||||
# non-alphabetic character
|
||||
filtered.extend(buf[prev:curr])
|
||||
# Output a space to delimit stretch we kept
|
||||
filtered.extend(b' ')
|
||||
prev = curr + 1
|
||||
|
||||
# If we're not in a tag...
|
||||
if not in_tag:
|
||||
# Keep everything after last non-extended-ASCII, non-alphabetic
|
||||
# character
|
||||
filtered.extend(buf[prev:])
|
||||
|
||||
return filtered
|
||||
1
openpype/vendor/python/python_2/chardet/cli/__init__.py
vendored
Normal file
1
openpype/vendor/python/python_2/chardet/cli/__init__.py
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
|
||||
84
openpype/vendor/python/python_2/chardet/cli/chardetect.py
vendored
Normal file
84
openpype/vendor/python/python_2/chardet/cli/chardetect.py
vendored
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
"""
|
||||
Script which takes one or more file paths and reports on their detected
|
||||
encodings
|
||||
|
||||
Example::
|
||||
|
||||
% chardetect somefile someotherfile
|
||||
somefile: windows-1252 with confidence 0.5
|
||||
someotherfile: ascii with confidence 1.0
|
||||
|
||||
If no paths are provided, it takes its input from stdin.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from chardet import __version__
|
||||
from chardet.compat import PY2
|
||||
from chardet.universaldetector import UniversalDetector
|
||||
|
||||
|
||||
def description_of(lines, name='stdin'):
|
||||
"""
|
||||
Return a string describing the probable encoding of a file or
|
||||
list of strings.
|
||||
|
||||
:param lines: The lines to get the encoding of.
|
||||
:type lines: Iterable of bytes
|
||||
:param name: Name of file or collection of lines
|
||||
:type name: str
|
||||
"""
|
||||
u = UniversalDetector()
|
||||
for line in lines:
|
||||
line = bytearray(line)
|
||||
u.feed(line)
|
||||
# shortcut out of the loop to save reading further - particularly useful if we read a BOM.
|
||||
if u.done:
|
||||
break
|
||||
u.close()
|
||||
result = u.result
|
||||
if PY2:
|
||||
name = name.decode(sys.getfilesystemencoding(), 'ignore')
|
||||
if result['encoding']:
|
||||
return '{}: {} with confidence {}'.format(name, result['encoding'],
|
||||
result['confidence'])
|
||||
else:
|
||||
return '{}: no result'.format(name)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
"""
|
||||
Handles command line arguments and gets things started.
|
||||
|
||||
:param argv: List of arguments, as if specified on the command-line.
|
||||
If None, ``sys.argv[1:]`` is used instead.
|
||||
:type argv: list of str
|
||||
"""
|
||||
# Get command line arguments
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Takes one or more file paths and reports their detected \
|
||||
encodings")
|
||||
parser.add_argument('input',
|
||||
help='File whose encoding we would like to determine. \
|
||||
(default: stdin)',
|
||||
type=argparse.FileType('rb'), nargs='*',
|
||||
default=[sys.stdin if PY2 else sys.stdin.buffer])
|
||||
parser.add_argument('--version', action='version',
|
||||
version='%(prog)s {}'.format(__version__))
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
for f in args.input:
|
||||
if f.isatty():
|
||||
print("You are running chardetect interactively. Press " +
|
||||
"CTRL-D twice at the start of a blank line to signal the " +
|
||||
"end of your input. If you want help, run chardetect " +
|
||||
"--help\n", file=sys.stderr)
|
||||
print(description_of(f, f.name))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
88
openpype/vendor/python/python_2/chardet/codingstatemachine.py
vendored
Normal file
88
openpype/vendor/python/python_2/chardet/codingstatemachine.py
vendored
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
######################## BEGIN LICENSE BLOCK ########################
|
||||
# The Original Code is mozilla.org code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Netscape Communications Corporation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 1998
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mark Pilgrim - port to Python
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
######################### END LICENSE BLOCK #########################
|
||||
|
||||
import logging
|
||||
|
||||
from .enums import MachineState
|
||||
|
||||
|
||||
class CodingStateMachine(object):
|
||||
"""
|
||||
A state machine to verify a byte sequence for a particular encoding. For
|
||||
each byte the detector receives, it will feed that byte to every active
|
||||
state machine available, one byte at a time. The state machine changes its
|
||||
state based on its previous state and the byte it receives. There are 3
|
||||
states in a state machine that are of interest to an auto-detector:
|
||||
|
||||
START state: This is the state to start with, or a legal byte sequence
|
||||
(i.e. a valid code point) for character has been identified.
|
||||
|
||||
ME state: This indicates that the state machine identified a byte sequence
|
||||
that is specific to the charset it is designed for and that
|
||||
there is no other possible encoding which can contain this byte
|
||||
sequence. This will to lead to an immediate positive answer for
|
||||
the detector.
|
||||
|
||||
ERROR state: This indicates the state machine identified an illegal byte
|
||||
sequence for that encoding. This will lead to an immediate
|
||||
negative answer for this encoding. Detector will exclude this
|
||||
encoding from consideration from here on.
|
||||
"""
|
||||
def __init__(self, sm):
|
||||
self._model = sm
|
||||
self._curr_byte_pos = 0
|
||||
self._curr_char_len = 0
|
||||
self._curr_state = None
|
||||
self.logger = logging.getLogger(__name__)
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
self._curr_state = MachineState.START
|
||||
|
||||
def next_state(self, c):
|
||||
# for each byte we get its class
|
||||
# if it is first byte, we also get byte length
|
||||
byte_class = self._model['class_table'][c]
|
||||
if self._curr_state == MachineState.START:
|
||||
self._curr_byte_pos = 0
|
||||
self._curr_char_len = self._model['char_len_table'][byte_class]
|
||||
# from byte's class and state_table, we get its next state
|
||||
curr_state = (self._curr_state * self._model['class_factor']
|
||||
+ byte_class)
|
||||
self._curr_state = self._model['state_table'][curr_state]
|
||||
self._curr_byte_pos += 1
|
||||
return self._curr_state
|
||||
|
||||
def get_current_charlen(self):
|
||||
return self._curr_char_len
|
||||
|
||||
def get_coding_state_machine(self):
|
||||
return self._model['name']
|
||||
|
||||
@property
|
||||
def language(self):
|
||||
return self._model['language']
|
||||
36
openpype/vendor/python/python_2/chardet/compat.py
vendored
Normal file
36
openpype/vendor/python/python_2/chardet/compat.py
vendored
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
######################## BEGIN LICENSE BLOCK ########################
|
||||
# Contributor(s):
|
||||
# Dan Blanchard
|
||||
# Ian Cordasco
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
######################### END LICENSE BLOCK #########################
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
if sys.version_info < (3, 0):
|
||||
PY2 = True
|
||||
PY3 = False
|
||||
string_types = (str, unicode)
|
||||
text_type = unicode
|
||||
iteritems = dict.iteritems
|
||||
else:
|
||||
PY2 = False
|
||||
PY3 = True
|
||||
string_types = (bytes, str)
|
||||
text_type = str
|
||||
iteritems = dict.items
|
||||
49
openpype/vendor/python/python_2/chardet/cp949prober.py
vendored
Normal file
49
openpype/vendor/python/python_2/chardet/cp949prober.py
vendored
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
######################## BEGIN LICENSE BLOCK ########################
|
||||
# The Original Code is mozilla.org code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Netscape Communications Corporation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 1998
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mark Pilgrim - port to Python
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
######################### END LICENSE BLOCK #########################
|
||||
|
||||
from .chardistribution import EUCKRDistributionAnalysis
|
||||
from .codingstatemachine import CodingStateMachine
|
||||
from .mbcharsetprober import MultiByteCharSetProber
|
||||
from .mbcssm import CP949_SM_MODEL
|
||||
|
||||
|
||||
class CP949Prober(MultiByteCharSetProber):
|
||||
def __init__(self):
|
||||
super(CP949Prober, self).__init__()
|
||||
self.coding_sm = CodingStateMachine(CP949_SM_MODEL)
|
||||
# NOTE: CP949 is a superset of EUC-KR, so the distribution should be
|
||||
# not different.
|
||||
self.distribution_analyzer = EUCKRDistributionAnalysis()
|
||||
self.reset()
|
||||
|
||||
@property
|
||||
def charset_name(self):
|
||||
return "CP949"
|
||||
|
||||
@property
|
||||
def language(self):
|
||||
return "Korean"
|
||||
76
openpype/vendor/python/python_2/chardet/enums.py
vendored
Normal file
76
openpype/vendor/python/python_2/chardet/enums.py
vendored
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
"""
|
||||
All of the Enums that are used throughout the chardet package.
|
||||
|
||||
:author: Dan Blanchard (dan.blanchard@gmail.com)
|
||||
"""
|
||||
|
||||
|
||||
class InputState(object):
|
||||
"""
|
||||
This enum represents the different states a universal detector can be in.
|
||||
"""
|
||||
PURE_ASCII = 0
|
||||
ESC_ASCII = 1
|
||||
HIGH_BYTE = 2
|
||||
|
||||
|
||||
class LanguageFilter(object):
|
||||
"""
|
||||
This enum represents the different language filters we can apply to a
|
||||
``UniversalDetector``.
|
||||
"""
|
||||
CHINESE_SIMPLIFIED = 0x01
|
||||
CHINESE_TRADITIONAL = 0x02
|
||||
JAPANESE = 0x04
|
||||
KOREAN = 0x08
|
||||
NON_CJK = 0x10
|
||||
ALL = 0x1F
|
||||
CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL
|
||||
CJK = CHINESE | JAPANESE | KOREAN
|
||||
|
||||
|
||||
class ProbingState(object):
|
||||
"""
|
||||
This enum represents the different states a prober can be in.
|
||||
"""
|
||||
DETECTING = 0
|
||||
FOUND_IT = 1
|
||||
NOT_ME = 2
|
||||
|
||||
|
||||
class MachineState(object):
|
||||
"""
|
||||
This enum represents the different states a state machine can be in.
|
||||
"""
|
||||
START = 0
|
||||
ERROR = 1
|
||||
ITS_ME = 2
|
||||
|
||||
|
||||
class SequenceLikelihood(object):
|
||||
"""
|
||||
This enum represents the likelihood of a character following the previous one.
|
||||
"""
|
||||
NEGATIVE = 0
|
||||
UNLIKELY = 1
|
||||
LIKELY = 2
|
||||
POSITIVE = 3
|
||||
|
||||
@classmethod
|
||||
def get_num_categories(cls):
|
||||
""":returns: The number of likelihood categories in the enum."""
|
||||
return 4
|
||||
|
||||
|
||||
class CharacterCategory(object):
|
||||
"""
|
||||
This enum represents the different categories language models for
|
||||
``SingleByteCharsetProber`` put characters into.
|
||||
|
||||
Anything less than CONTROL is considered a letter.
|
||||
"""
|
||||
UNDEFINED = 255
|
||||
LINE_BREAK = 254
|
||||
SYMBOL = 253
|
||||
DIGIT = 252
|
||||
CONTROL = 251
|
||||
101
openpype/vendor/python/python_2/chardet/escprober.py
vendored
Normal file
101
openpype/vendor/python/python_2/chardet/escprober.py
vendored
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
######################## BEGIN LICENSE BLOCK ########################
|
||||
# The Original Code is mozilla.org code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Netscape Communications Corporation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 1998
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mark Pilgrim - port to Python
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
######################### END LICENSE BLOCK #########################
|
||||
|
||||
from .charsetprober import CharSetProber
|
||||
from .codingstatemachine import CodingStateMachine
|
||||
from .enums import LanguageFilter, ProbingState, MachineState
|
||||
from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL,
|
||||
ISO2022KR_SM_MODEL)
|
||||
|
||||
|
||||
class EscCharSetProber(CharSetProber):
|
||||
"""
|
||||
This CharSetProber uses a "code scheme" approach for detecting encodings,
|
||||
whereby easily recognizable escape or shift sequences are relied on to
|
||||
identify these encodings.
|
||||
"""
|
||||
|
||||
def __init__(self, lang_filter=None):
|
||||
super(EscCharSetProber, self).__init__(lang_filter=lang_filter)
|
||||
self.coding_sm = []
|
||||
if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED:
|
||||
self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL))
|
||||
self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL))
|
||||
if self.lang_filter & LanguageFilter.JAPANESE:
|
||||
self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL))
|
||||
if self.lang_filter & LanguageFilter.KOREAN:
|
||||
self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL))
|
||||
self.active_sm_count = None
|
||||
self._detected_charset = None
|
||||
self._detected_language = None
|
||||
self._state = None
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
super(EscCharSetProber, self).reset()
|
||||
for coding_sm in self.coding_sm:
|
||||
if not coding_sm:
|
||||
continue
|
||||
coding_sm.active = True
|
||||
coding_sm.reset()
|
||||
self.active_sm_count = len(self.coding_sm)
|
||||
self._detected_charset = None
|
||||
self._detected_language = None
|
||||
|
||||
@property
|
||||
def charset_name(self):
|
||||
return self._detected_charset
|
||||
|
||||
@property
|
||||
def language(self):
|
||||
return self._detected_language
|
||||
|
||||
def get_confidence(self):
|
||||
if self._detected_charset:
|
||||
return 0.99
|
||||
else:
|
||||
return 0.00
|
||||
|
||||
def feed(self, byte_str):
|
||||
for c in byte_str:
|
||||
for coding_sm in self.coding_sm:
|
||||
if not coding_sm or not coding_sm.active:
|
||||
continue
|
||||
coding_state = coding_sm.next_state(c)
|
||||
if coding_state == MachineState.ERROR:
|
||||
coding_sm.active = False
|
||||
self.active_sm_count -= 1
|
||||
if self.active_sm_count <= 0:
|
||||
self._state = ProbingState.NOT_ME
|
||||
return self.state
|
||||
elif coding_state == MachineState.ITS_ME:
|
||||
self._state = ProbingState.FOUND_IT
|
||||
self._detected_charset = coding_sm.get_coding_state_machine()
|
||||
self._detected_language = coding_sm.language
|
||||
return self.state
|
||||
|
||||
return self.state
|
||||
246
openpype/vendor/python/python_2/chardet/escsm.py
vendored
Normal file
246
openpype/vendor/python/python_2/chardet/escsm.py
vendored
Normal file
|
|
@ -0,0 +1,246 @@
|
|||
######################## BEGIN LICENSE BLOCK ########################
|
||||
# The Original Code is mozilla.org code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Netscape Communications Corporation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 1998
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mark Pilgrim - port to Python
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
######################### END LICENSE BLOCK #########################
|
||||
|
||||
from .enums import MachineState
|
||||
|
||||
HZ_CLS = (
|
||||
1,0,0,0,0,0,0,0, # 00 - 07
|
||||
0,0,0,0,0,0,0,0, # 08 - 0f
|
||||
0,0,0,0,0,0,0,0, # 10 - 17
|
||||
0,0,0,1,0,0,0,0, # 18 - 1f
|
||||
0,0,0,0,0,0,0,0, # 20 - 27
|
||||
0,0,0,0,0,0,0,0, # 28 - 2f
|
||||
0,0,0,0,0,0,0,0, # 30 - 37
|
||||
0,0,0,0,0,0,0,0, # 38 - 3f
|
||||
0,0,0,0,0,0,0,0, # 40 - 47
|
||||
0,0,0,0,0,0,0,0, # 48 - 4f
|
||||
0,0,0,0,0,0,0,0, # 50 - 57
|
||||
0,0,0,0,0,0,0,0, # 58 - 5f
|
||||
0,0,0,0,0,0,0,0, # 60 - 67
|
||||
0,0,0,0,0,0,0,0, # 68 - 6f
|
||||
0,0,0,0,0,0,0,0, # 70 - 77
|
||||
0,0,0,4,0,5,2,0, # 78 - 7f
|
||||
1,1,1,1,1,1,1,1, # 80 - 87
|
||||
1,1,1,1,1,1,1,1, # 88 - 8f
|
||||
1,1,1,1,1,1,1,1, # 90 - 97
|
||||
1,1,1,1,1,1,1,1, # 98 - 9f
|
||||
1,1,1,1,1,1,1,1, # a0 - a7
|
||||
1,1,1,1,1,1,1,1, # a8 - af
|
||||
1,1,1,1,1,1,1,1, # b0 - b7
|
||||
1,1,1,1,1,1,1,1, # b8 - bf
|
||||
1,1,1,1,1,1,1,1, # c0 - c7
|
||||
1,1,1,1,1,1,1,1, # c8 - cf
|
||||
1,1,1,1,1,1,1,1, # d0 - d7
|
||||
1,1,1,1,1,1,1,1, # d8 - df
|
||||
1,1,1,1,1,1,1,1, # e0 - e7
|
||||
1,1,1,1,1,1,1,1, # e8 - ef
|
||||
1,1,1,1,1,1,1,1, # f0 - f7
|
||||
1,1,1,1,1,1,1,1, # f8 - ff
|
||||
)
|
||||
|
||||
HZ_ST = (
|
||||
MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
|
||||
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
|
||||
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17
|
||||
5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f
|
||||
4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27
|
||||
4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f
|
||||
)
|
||||
|
||||
HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
|
||||
|
||||
HZ_SM_MODEL = {'class_table': HZ_CLS,
|
||||
'class_factor': 6,
|
||||
'state_table': HZ_ST,
|
||||
'char_len_table': HZ_CHAR_LEN_TABLE,
|
||||
'name': "HZ-GB-2312",
|
||||
'language': 'Chinese'}
|
||||
|
||||
ISO2022CN_CLS = (
|
||||
2,0,0,0,0,0,0,0, # 00 - 07
|
||||
0,0,0,0,0,0,0,0, # 08 - 0f
|
||||
0,0,0,0,0,0,0,0, # 10 - 17
|
||||
0,0,0,1,0,0,0,0, # 18 - 1f
|
||||
0,0,0,0,0,0,0,0, # 20 - 27
|
||||
0,3,0,0,0,0,0,0, # 28 - 2f
|
||||
0,0,0,0,0,0,0,0, # 30 - 37
|
||||
0,0,0,0,0,0,0,0, # 38 - 3f
|
||||
0,0,0,4,0,0,0,0, # 40 - 47
|
||||
0,0,0,0,0,0,0,0, # 48 - 4f
|
||||
0,0,0,0,0,0,0,0, # 50 - 57
|
||||
0,0,0,0,0,0,0,0, # 58 - 5f
|
||||
0,0,0,0,0,0,0,0, # 60 - 67
|
||||
0,0,0,0,0,0,0,0, # 68 - 6f
|
||||
0,0,0,0,0,0,0,0, # 70 - 77
|
||||
0,0,0,0,0,0,0,0, # 78 - 7f
|
||||
2,2,2,2,2,2,2,2, # 80 - 87
|
||||
2,2,2,2,2,2,2,2, # 88 - 8f
|
||||
2,2,2,2,2,2,2,2, # 90 - 97
|
||||
2,2,2,2,2,2,2,2, # 98 - 9f
|
||||
2,2,2,2,2,2,2,2, # a0 - a7
|
||||
2,2,2,2,2,2,2,2, # a8 - af
|
||||
2,2,2,2,2,2,2,2, # b0 - b7
|
||||
2,2,2,2,2,2,2,2, # b8 - bf
|
||||
2,2,2,2,2,2,2,2, # c0 - c7
|
||||
2,2,2,2,2,2,2,2, # c8 - cf
|
||||
2,2,2,2,2,2,2,2, # d0 - d7
|
||||
2,2,2,2,2,2,2,2, # d8 - df
|
||||
2,2,2,2,2,2,2,2, # e0 - e7
|
||||
2,2,2,2,2,2,2,2, # e8 - ef
|
||||
2,2,2,2,2,2,2,2, # f0 - f7
|
||||
2,2,2,2,2,2,2,2, # f8 - ff
|
||||
)
|
||||
|
||||
ISO2022CN_ST = (
|
||||
MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
|
||||
MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
|
||||
MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
|
||||
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f
|
||||
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27
|
||||
5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f
|
||||
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37
|
||||
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f
|
||||
)
|
||||
|
||||
ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0)
|
||||
|
||||
ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS,
|
||||
'class_factor': 9,
|
||||
'state_table': ISO2022CN_ST,
|
||||
'char_len_table': ISO2022CN_CHAR_LEN_TABLE,
|
||||
'name': "ISO-2022-CN",
|
||||
'language': 'Chinese'}
|
||||
|
||||
ISO2022JP_CLS = (
|
||||
2,0,0,0,0,0,0,0, # 00 - 07
|
||||
0,0,0,0,0,0,2,2, # 08 - 0f
|
||||
0,0,0,0,0,0,0,0, # 10 - 17
|
||||
0,0,0,1,0,0,0,0, # 18 - 1f
|
||||
0,0,0,0,7,0,0,0, # 20 - 27
|
||||
3,0,0,0,0,0,0,0, # 28 - 2f
|
||||
0,0,0,0,0,0,0,0, # 30 - 37
|
||||
0,0,0,0,0,0,0,0, # 38 - 3f
|
||||
6,0,4,0,8,0,0,0, # 40 - 47
|
||||
0,9,5,0,0,0,0,0, # 48 - 4f
|
||||
0,0,0,0,0,0,0,0, # 50 - 57
|
||||
0,0,0,0,0,0,0,0, # 58 - 5f
|
||||
0,0,0,0,0,0,0,0, # 60 - 67
|
||||
0,0,0,0,0,0,0,0, # 68 - 6f
|
||||
0,0,0,0,0,0,0,0, # 70 - 77
|
||||
0,0,0,0,0,0,0,0, # 78 - 7f
|
||||
2,2,2,2,2,2,2,2, # 80 - 87
|
||||
2,2,2,2,2,2,2,2, # 88 - 8f
|
||||
2,2,2,2,2,2,2,2, # 90 - 97
|
||||
2,2,2,2,2,2,2,2, # 98 - 9f
|
||||
2,2,2,2,2,2,2,2, # a0 - a7
|
||||
2,2,2,2,2,2,2,2, # a8 - af
|
||||
2,2,2,2,2,2,2,2, # b0 - b7
|
||||
2,2,2,2,2,2,2,2, # b8 - bf
|
||||
2,2,2,2,2,2,2,2, # c0 - c7
|
||||
2,2,2,2,2,2,2,2, # c8 - cf
|
||||
2,2,2,2,2,2,2,2, # d0 - d7
|
||||
2,2,2,2,2,2,2,2, # d8 - df
|
||||
2,2,2,2,2,2,2,2, # e0 - e7
|
||||
2,2,2,2,2,2,2,2, # e8 - ef
|
||||
2,2,2,2,2,2,2,2, # f0 - f7
|
||||
2,2,2,2,2,2,2,2, # f8 - ff
|
||||
)
|
||||
|
||||
ISO2022JP_ST = (
|
||||
MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
|
||||
MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
|
||||
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
|
||||
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f
|
||||
MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27
|
||||
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f
|
||||
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37
|
||||
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f
|
||||
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47
|
||||
)
|
||||
|
||||
ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
|
||||
|
||||
ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS,
|
||||
'class_factor': 10,
|
||||
'state_table': ISO2022JP_ST,
|
||||
'char_len_table': ISO2022JP_CHAR_LEN_TABLE,
|
||||
'name': "ISO-2022-JP",
|
||||
'language': 'Japanese'}
|
||||
|
||||
ISO2022KR_CLS = (
|
||||
2,0,0,0,0,0,0,0, # 00 - 07
|
||||
0,0,0,0,0,0,0,0, # 08 - 0f
|
||||
0,0,0,0,0,0,0,0, # 10 - 17
|
||||
0,0,0,1,0,0,0,0, # 18 - 1f
|
||||
0,0,0,0,3,0,0,0, # 20 - 27
|
||||
0,4,0,0,0,0,0,0, # 28 - 2f
|
||||
0,0,0,0,0,0,0,0, # 30 - 37
|
||||
0,0,0,0,0,0,0,0, # 38 - 3f
|
||||
0,0,0,5,0,0,0,0, # 40 - 47
|
||||
0,0,0,0,0,0,0,0, # 48 - 4f
|
||||
0,0,0,0,0,0,0,0, # 50 - 57
|
||||
0,0,0,0,0,0,0,0, # 58 - 5f
|
||||
0,0,0,0,0,0,0,0, # 60 - 67
|
||||
0,0,0,0,0,0,0,0, # 68 - 6f
|
||||
0,0,0,0,0,0,0,0, # 70 - 77
|
||||
0,0,0,0,0,0,0,0, # 78 - 7f
|
||||
2,2,2,2,2,2,2,2, # 80 - 87
|
||||
2,2,2,2,2,2,2,2, # 88 - 8f
|
||||
2,2,2,2,2,2,2,2, # 90 - 97
|
||||
2,2,2,2,2,2,2,2, # 98 - 9f
|
||||
2,2,2,2,2,2,2,2, # a0 - a7
|
||||
2,2,2,2,2,2,2,2, # a8 - af
|
||||
2,2,2,2,2,2,2,2, # b0 - b7
|
||||
2,2,2,2,2,2,2,2, # b8 - bf
|
||||
2,2,2,2,2,2,2,2, # c0 - c7
|
||||
2,2,2,2,2,2,2,2, # c8 - cf
|
||||
2,2,2,2,2,2,2,2, # d0 - d7
|
||||
2,2,2,2,2,2,2,2, # d8 - df
|
||||
2,2,2,2,2,2,2,2, # e0 - e7
|
||||
2,2,2,2,2,2,2,2, # e8 - ef
|
||||
2,2,2,2,2,2,2,2, # f0 - f7
|
||||
2,2,2,2,2,2,2,2, # f8 - ff
|
||||
)
|
||||
|
||||
ISO2022KR_ST = (
|
||||
MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
|
||||
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
|
||||
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17
|
||||
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f
|
||||
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27
|
||||
)
|
||||
|
||||
ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
|
||||
|
||||
ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS,
|
||||
'class_factor': 6,
|
||||
'state_table': ISO2022KR_ST,
|
||||
'char_len_table': ISO2022KR_CHAR_LEN_TABLE,
|
||||
'name': "ISO-2022-KR",
|
||||
'language': 'Korean'}
|
||||
|
||||
|
||||
92
openpype/vendor/python/python_2/chardet/eucjpprober.py
vendored
Normal file
92
openpype/vendor/python/python_2/chardet/eucjpprober.py
vendored
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
######################## BEGIN LICENSE BLOCK ########################
|
||||
# The Original Code is mozilla.org code.
|
||||
#
|
||||
# The Initial Developer of the Original Code is
|
||||
# Netscape Communications Corporation.
|
||||
# Portions created by the Initial Developer are Copyright (C) 1998
|
||||
# the Initial Developer. All Rights Reserved.
|
||||
#
|
||||
# Contributor(s):
|
||||
# Mark Pilgrim - port to Python
|
||||
#
|
||||
# This library is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU Lesser General Public
|
||||
# License as published by the Free Software Foundation; either
|
||||
# version 2.1 of the License, or (at your option) any later version.
|
||||
#
|
||||
# This library is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this library; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
######################### END LICENSE BLOCK #########################
|
||||
|
||||
from .enums import ProbingState, MachineState
|
||||
from .mbcharsetprober import MultiByteCharSetProber
|
||||
from .codingstatemachine import CodingStateMachine
|
||||
from .chardistribution import EUCJPDistributionAnalysis
|
||||
from .jpcntx import EUCJPContextAnalysis
|
||||
from .mbcssm import EUCJP_SM_MODEL
|
||||
|
||||
|
||||
class EUCJPProber(MultiByteCharSetProber):
|
||||
def __init__(self):
|
||||
super(EUCJPProber, self).__init__()
|
||||
self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL)
|
||||
self.distribution_analyzer = EUCJPDistributionAnalysis()
|
||||
self.context_analyzer = EUCJPContextAnalysis()
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
super(EUCJPProber, self).reset()
|
||||
self.context_analyzer.reset()
|
||||
|
||||
@property
|
||||
def charset_name(self):
|
||||
return "EUC-JP"
|
||||
|
||||
@property
|
||||
def language(self):
|
||||
return "Japanese"
|
||||
|
||||
def feed(self, byte_str):
|
||||
for i in range(len(byte_str)):
|
||||
# PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte
|
||||
coding_state = self.coding_sm.next_state(byte_str[i])
|
||||
if coding_state == MachineState.ERROR:
|
||||
self.logger.debug('%s %s prober hit error at byte %s',
|
||||
self.charset_name, self.language, i)
|
||||
self._state = ProbingState.NOT_ME
|
||||
break
|
||||
elif coding_state == MachineState.ITS_ME:
|
||||
self._state = ProbingState.FOUND_IT
|
||||
break
|
||||
elif coding_state == MachineState.START:
|
||||
char_len = self.coding_sm.get_current_charlen()
|
||||
if i == 0:
|
||||
self._last_char[1] = byte_str[0]
|
||||
self.context_analyzer.feed(self._last_char, char_len)
|
||||
self.distribution_analyzer.feed(self._last_char, char_len)
|
||||
else:
|
||||
self.context_analyzer.feed(byte_str[i - 1:i + 1],
|
||||
char_len)
|
||||
self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
|
||||
char_len)
|
||||
|
||||
self._last_char[0] = byte_str[-1]
|
||||
|
||||
if self.state == ProbingState.DETECTING:
|
||||
if (self.context_analyzer.got_enough_data() and
|
||||
(self.get_confidence() > self.SHORTCUT_THRESHOLD)):
|
||||
self._state = ProbingState.FOUND_IT
|
||||
|
||||
return self.state
|
||||
|
||||
def get_confidence(self):
|
||||
context_conf = self.context_analyzer.get_confidence()
|
||||
distrib_conf = self.distribution_analyzer.get_confidence()
|
||||
return max(context_conf, distrib_conf)
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue