Merge branch '2.x/develop' into feature/102-_draft_Celaction_quick_integration
67
pype/hooks/resolve/prelaunch.py
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
import os
|
||||
import traceback
|
||||
import importlib
|
||||
from pype.lib import PypeHook
|
||||
from pypeapp import Logger
|
||||
from pype.hosts.resolve import utils
|
||||
|
||||
|
||||
class ResolvePrelaunch(PypeHook):
|
||||
"""
|
||||
This hook will check if current workfile path has Resolve
|
||||
project inside. IF not, it initialize it and finally it pass
|
||||
path to the project by environment variable to Premiere launcher
|
||||
shell script.
|
||||
"""
|
||||
|
||||
def __init__(self, logger=None):
|
||||
if not logger:
|
||||
self.log = Logger().get_logger(self.__class__.__name__)
|
||||
else:
|
||||
self.log = logger
|
||||
|
||||
self.signature = "( {} )".format(self.__class__.__name__)
|
||||
|
||||
def execute(self, *args, env: dict = None) -> bool:
|
||||
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# making sure pyton 3.6 is installed at provided path
|
||||
py36_dir = os.path.normpath(env.get("PYTHON36_RESOLVE", ""))
|
||||
assert os.path.isdir(py36_dir), (
|
||||
"Python 3.6 is not installed at the provided folder path. Either "
|
||||
"make sure the `environments\resolve.json` is having correctly "
|
||||
"set `PYTHON36_RESOLVE` or make sure Python 3.6 is installed "
|
||||
f"in given path. \nPYTHON36_RESOLVE: `{py36_dir}`"
|
||||
)
|
||||
self.log.info(f"Path to Resolve Python folder: `{py36_dir}`...")
|
||||
env["PYTHON36_RESOLVE"] = py36_dir
|
||||
|
||||
# setting utility scripts dir for scripts syncing
|
||||
us_dir = os.path.normpath(env.get("RESOLVE_UTILITY_SCRIPTS_DIR", ""))
|
||||
assert os.path.isdir(us_dir), (
|
||||
"Resolve utility script dir does not exists. Either make sure "
|
||||
"the `environments\resolve.json` is having correctly set "
|
||||
"`RESOLVE_UTILITY_SCRIPTS_DIR` or reinstall DaVinci Resolve. \n"
|
||||
f"RESOLVE_UTILITY_SCRIPTS_DIR: `{us_dir}`"
|
||||
)
|
||||
|
||||
# correctly format path for pre python script
|
||||
pre_py_sc = os.path.normpath(env.get("PRE_PYTHON_SCRIPT", ""))
|
||||
env["PRE_PYTHON_SCRIPT"] = pre_py_sc
|
||||
|
||||
try:
|
||||
__import__("pype.resolve")
|
||||
__import__("pyblish")
|
||||
|
||||
except ImportError as e:
|
||||
print(traceback.format_exc())
|
||||
print("pyblish: Could not load integration: %s " % e)
|
||||
|
||||
else:
|
||||
# Resolve Setup integration
|
||||
importlib.reload(utils)
|
||||
utils.setup(env)
|
||||
|
||||
return True
|
||||
33
pype/hosts/photoshop/__init__.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
import os
|
||||
|
||||
from avalon import api
|
||||
import pyblish.api
|
||||
|
||||
|
||||
def install():
|
||||
print("Installing Pype config...")
|
||||
|
||||
plugins_directory = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
|
||||
"plugins",
|
||||
"photoshop"
|
||||
)
|
||||
|
||||
pyblish.api.register_plugin_path(
|
||||
os.path.join(plugins_directory, "publish")
|
||||
)
|
||||
api.register_plugin_path(
|
||||
api.Loader, os.path.join(plugins_directory, "load")
|
||||
)
|
||||
api.register_plugin_path(
|
||||
api.Creator, os.path.join(plugins_directory, "create")
|
||||
)
|
||||
|
||||
pyblish.api.register_callback(
|
||||
"instanceToggled", on_pyblish_instance_toggled
|
||||
)
|
||||
|
||||
|
||||
def on_pyblish_instance_toggled(instance, old_value, new_value):
|
||||
"""Toggle layer visibility on instance toggles."""
|
||||
instance[0].Visible = new_value
|
||||
1
pype/hosts/resolve/README.markdown
Normal file
|
|
@ -0,0 +1 @@
|
|||
|
||||
189
pype/hosts/resolve/RESOLVE_API_README.txt
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
Updated as of 08 March 2019
|
||||
|
||||
--------------------------
|
||||
In this package, you will find a brief introduction to the Scripting API for DaVinci Resolve Studio. Apart from this README.txt file, this package contains folders containing the basic import modules for scripting access (DaVinciResolve.py) and some representative examples.
|
||||
|
||||
Overview
|
||||
--------
|
||||
|
||||
As with Blackmagic Design Fusion scripts, user scripts written in Lua and Python programming languages are supported. By default, scripts can be invoked from the Console window in the Fusion page, or via command line. This permission can be changed in Resolve Preferences, to be only from Console, or to be invoked from the local network. Please be aware of the security implications when allowing scripting access from outside of the Resolve application.
|
||||
|
||||
|
||||
Using a script
|
||||
--------------
|
||||
DaVinci Resolve needs to be running for a script to be invoked.
|
||||
|
||||
For a Resolve script to be executed from an external folder, the script needs to know of the API location.
|
||||
You may need to set the these environment variables to allow for your Python installation to pick up the appropriate dependencies as shown below:
|
||||
|
||||
Mac OS X:
|
||||
RESOLVE_SCRIPT_API="/Library/Application Support/Blackmagic Design/DaVinci Resolve/Developer/Scripting/"
|
||||
RESOLVE_SCRIPT_LIB="/Applications/DaVinci Resolve/DaVinci Resolve.app/Contents/Libraries/Fusion/fusionscript.so"
|
||||
PYTHONPATH="$PYTHONPATH:$RESOLVE_SCRIPT_API/Modules/"
|
||||
|
||||
Windows:
|
||||
RESOLVE_SCRIPT_API="%PROGRAMDATA%\\Blackmagic Design\\DaVinci Resolve\\Support\\Developer\\Scripting\\"
|
||||
RESOLVE_SCRIPT_LIB="C:\\Program Files\\Blackmagic Design\\DaVinci Resolve\\fusionscript.dll"
|
||||
PYTHONPATH="%PYTHONPATH%;%RESOLVE_SCRIPT_API%\\Modules\\"
|
||||
|
||||
Linux:
|
||||
RESOLVE_SCRIPT_API="/opt/resolve/Developer/Scripting/"
|
||||
RESOLVE_SCRIPT_LIB="/opt/resolve/libs/Fusion/fusionscript.so"
|
||||
PYTHONPATH="$PYTHONPATH:$RESOLVE_SCRIPT_API/Modules/"
|
||||
(Note: For standard ISO Linux installations, the path above may need to be modified to refer to /home/resolve instead of /opt/resolve)
|
||||
|
||||
As with Fusion scripts, Resolve scripts can also be invoked via the menu and the Console.
|
||||
|
||||
On startup, DaVinci Resolve scans the Utility Scripts directory and enumerates the scripts found in the Script application menu. Placing your script in this folder and invoking it from this menu is the easiest way to use scripts. The Utility Scripts folder is located in:
|
||||
Mac OS X: /Library/Application Support/Blackmagic Design/DaVinci Resolve/Fusion/Scripts/Comp/
|
||||
Windows: %APPDATA%\Blackmagic Design\DaVinci Resolve\Fusion\Scripts\Comp\
|
||||
Linux: /opt/resolve/Fusion/Scripts/Comp/ (or /home/resolve/Fusion/Scripts/Comp/ depending on installation)
|
||||
|
||||
The interactive Console window allows for an easy way to execute simple scripting commands, to query or modify properties, and to test scripts. The console accepts commands in Python 2.7, Python 3.6 and Lua and evaluates and executes them immediately. For more information on how to use the Console, please refer to the DaVinci Resolve User Manual.
|
||||
|
||||
This example Python script creates a simple project:
|
||||
#!/usr/bin/env python
|
||||
import DaVinciResolveScript as dvr_script
|
||||
resolve = dvr_script.scriptapp("Resolve")
|
||||
fusion = resolve.Fusion()
|
||||
projectManager = resolve.GetProjectManager()
|
||||
projectManager.CreateProject("Hello World")
|
||||
|
||||
The resolve object is the fundamental starting point for scripting via Resolve. As a native object, it can be inspected for further scriptable properties - using table iteration and `getmetatable` in Lua and dir, help etc in Python (among other methods). A notable scriptable object above is fusion - it allows access to all existing Fusion scripting functionality.
|
||||
|
||||
Running DaVinci Resolve in headless mode
|
||||
----------------------------------------
|
||||
|
||||
DaVinci Resolve can be launched in a headless mode without the user interface using the -nogui command line option. When DaVinci Resolve is launched using this option, the user interface is disabled. However, the various scripting APIs will continue to work as expected.
|
||||
|
||||
Basic Resolve API
|
||||
-----------------
|
||||
|
||||
Some commonly used API functions are described below (*). As with the resolve object, each object is inspectable for properties and functions.
|
||||
|
||||
|
||||
Resolve
|
||||
Fusion() --> Fusion # Returns the Fusion object. Starting point for Fusion scripts.
|
||||
GetMediaStorage() --> MediaStorage # Returns media storage object to query and act on media locations.
|
||||
GetProjectManager() --> ProjectManager # Returns project manager object for currently open database.
|
||||
OpenPage(pageName) --> None # Switches to indicated page in DaVinci Resolve. Input can be one of ("media", "edit", "fusion", "color", "fairlight", "deliver").
|
||||
ProjectManager
|
||||
CreateProject(projectName) --> Project # Creates and returns a project if projectName (text) is unique, and None if it is not.
|
||||
LoadProject(projectName) --> Project # Loads and returns the project with name = projectName (text) if there is a match found, and None if there is no matching Project.
|
||||
GetCurrentProject() --> Project # Returns the currently loaded Resolve project.
|
||||
SaveProject() --> Bool # Saves the currently loaded project with its own name. Returns True if successful.
|
||||
CreateFolder(folderName) --> Bool # Creates a folder if folderName (text) is unique.
|
||||
GetProjectsInCurrentFolder() --> [project names...] # Returns an array of project names in current folder.
|
||||
GetFoldersInCurrentFolder() --> [folder names...] # Returns an array of folder names in current folder.
|
||||
GotoRootFolder() --> Bool # Opens root folder in database.
|
||||
GotoParentFolder() --> Bool # Opens parent folder of current folder in database if current folder has parent.
|
||||
OpenFolder(folderName) --> Bool # Opens folder under given name.
|
||||
ImportProject(filePath) --> Bool # Imports a project under given file path. Returns true in case of success.
|
||||
ExportProject(projectName, filePath) --> Bool # Exports a project based on given name into provided file path. Returns true in case of success.
|
||||
RestoreProject(filePath) --> Bool # Restores a project under given backup file path. Returns true in case of success.
|
||||
Project
|
||||
GetMediaPool() --> MediaPool # Returns the Media Pool object.
|
||||
GetTimelineCount() --> int # Returns the number of timelines currently present in the project.
|
||||
GetTimelineByIndex(idx) --> Timeline # Returns timeline at the given index, 1 <= idx <= project.GetTimelineCount()
|
||||
GetCurrentTimeline() --> Timeline # Returns the currently loaded timeline.
|
||||
SetCurrentTimeline(timeline) --> Bool # Sets given timeline as current timeline for the project. Returns True if successful.
|
||||
GetName() --> string # Returns project name.
|
||||
SetName(projectName) --> Bool # Sets project name if given projectname (text) is unique.
|
||||
GetPresets() --> [presets...] # Returns a table of presets and their information.
|
||||
SetPreset(presetName) --> Bool # Sets preset by given presetName (string) into project.
|
||||
GetRenderJobs() --> [render jobs...] # Returns a table of render jobs and their information.
|
||||
GetRenderPresets() --> [presets...] # Returns a table of render presets and their information.
|
||||
StartRendering(index1, index2, ...) --> Bool # Starts rendering for given render jobs based on their indices. If no parameter is given rendering would start for all render jobs.
|
||||
StartRendering([idxs...]) --> Bool # Starts rendering for given render jobs based on their indices. If no parameter is given rendering would start for all render jobs.
|
||||
StopRendering() --> None # Stops rendering for all render jobs.
|
||||
IsRenderingInProgress() --> Bool # Returns true is rendering is in progress.
|
||||
AddRenderJob() --> Bool # Adds render job to render queue.
|
||||
DeleteRenderJobByIndex(idx) --> Bool # Deletes render job based on given job index (int).
|
||||
DeleteAllRenderJobs() --> Bool # Deletes all render jobs.
|
||||
LoadRenderPreset(presetName) --> Bool # Sets a preset as current preset for rendering if presetName (text) exists.
|
||||
SaveAsNewRenderPreset(presetName) --> Bool # Creates a new render preset by given name if presetName(text) is unique.
|
||||
SetRenderSettings([settings map]) --> Bool # Sets given settings for rendering. Settings map is a map, keys of map are: "SelectAllFrames", "MarkIn", "MarkOut", "TargetDir", "CustomName".
|
||||
GetRenderJobStatus(idx) --> [status info] # Returns job status and completion rendering percentage of the job by given job index (int).
|
||||
GetSetting(settingName) --> string # Returns setting value by given settingName (string) if the setting exist. With empty settingName the function returns a full list of settings.
|
||||
SetSetting(settingName, settingValue) --> Bool # Sets project setting base on given name (string) and value (string).
|
||||
GetRenderFormats() --> [render formats...]# Returns a list of available render formats.
|
||||
GetRenderCodecs(renderFormat) --> [render codecs...] # Returns a list of available codecs for given render format (string).
|
||||
GetCurrentRenderFormatAndCodec() --> [format, codec] # Returns currently selected render format and render codec.
|
||||
SetCurrentRenderFormatAndCodec(format, codec) --> Bool # Sets given render format (string) and render codec (string) as options for rendering.
|
||||
MediaStorage
|
||||
GetMountedVolumes() --> [paths...] # Returns an array of folder paths corresponding to mounted volumes displayed in Resolve’s Media Storage.
|
||||
GetSubFolders(folderPath) --> [paths...] # Returns an array of folder paths in the given absolute folder path.
|
||||
GetFiles(folderPath) --> [paths...] # Returns an array of media and file listings in the given absolute folder path. Note that media listings may be logically consolidated entries.
|
||||
RevealInStorage(path) --> None # Expands and displays a given file/folder path in Resolve’s Media Storage.
|
||||
AddItemsToMediaPool(item1, item2, ...) --> [clips...] # Adds specified file/folder paths from Media Store into current Media Pool folder. Input is one or more file/folder paths.
|
||||
AddItemsToMediaPool([items...]) --> [clips...] # Adds specified file/folder paths from Media Store into current Media Pool folder. Input is an array of file/folder paths.
|
||||
MediaPool
|
||||
GetRootFolder() --> Folder # Returns the root Folder of Media Pool
|
||||
AddSubFolder(folder, name) --> Folder # Adds a new subfolder under specified Folder object with the given name.
|
||||
CreateEmptyTimeline(name) --> Timeline # Adds a new timeline with given name.
|
||||
AppendToTimeline(clip1, clip2...) --> Bool # Appends specified MediaPoolItem objects in the current timeline. Returns True if successful.
|
||||
AppendToTimeline([clips]) --> Bool # Appends specified MediaPoolItem objects in the current timeline. Returns True if successful.
|
||||
CreateTimelineFromClips(name, clip1, clip2, ...)--> Timeline # Creates a new timeline with specified name, and appends the specified MediaPoolItem objects.
|
||||
CreateTimelineFromClips(name, [clips]) --> Timeline # Creates a new timeline with specified name, and appends the specified MediaPoolItem objects.
|
||||
ImportTimelineFromFile(filePath) --> Timeline # Creates timeline based on parameters within given file.
|
||||
GetCurrentFolder() --> Folder # Returns currently selected Folder.
|
||||
SetCurrentFolder(Folder) --> Bool # Sets current folder by given Folder.
|
||||
Folder
|
||||
GetClips() --> [clips...] # Returns a list of clips (items) within the folder.
|
||||
GetName() --> string # Returns user-defined name of the folder.
|
||||
GetSubFolders() --> [folders...] # Returns a list of subfolders in the folder.
|
||||
MediaPoolItem
|
||||
GetMetadata(metadataType) --> [[types],[values]] # Returns a value of metadataType. If parameter is not specified returns all set metadata parameters.
|
||||
SetMetadata(metadataType, metadataValue) --> Bool # Sets metadata by given type and value. Returns True if successful.
|
||||
GetMediaId() --> string # Returns a unique ID name related to MediaPoolItem.
|
||||
AddMarker(frameId, color, name, note, duration) --> Bool # Creates a new marker at given frameId position and with given marker information.
|
||||
GetMarkers() --> [markers...] # Returns a list of all markers and their information.
|
||||
AddFlag(color) --> Bool # Adds a flag with given color (text).
|
||||
GetFlags() --> [colors...] # Returns a list of flag colors assigned to the item.
|
||||
GetClipColor() --> string # Returns an item color as a string.
|
||||
GetClipProperty(propertyName) --> [[types],[values]] # Returns property value related to the item based on given propertyName (string). if propertyName is empty then it returns a full list of properties.
|
||||
SetClipProperty(propertyName, propertyValue) --> Bool # Sets into given propertyName (string) propertyValue (string).
|
||||
Timeline
|
||||
GetName() --> string # Returns user-defined name of the timeline.
|
||||
SetName(timelineName) --> Bool # Sets timeline name is timelineName (text) is unique.
|
||||
GetStartFrame() --> int # Returns frame number at the start of timeline.
|
||||
GetEndFrame() --> int # Returns frame number at the end of timeline.
|
||||
GetTrackCount(trackType) --> int # Returns a number of track based on specified track type ("audio", "video" or "subtitle").
|
||||
GetItemsInTrack(trackType, index) --> [items...] # Returns an array of Timeline items on the video or audio track (based on trackType) at specified index. 1 <= index <= GetTrackCount(trackType).
|
||||
AddMarker(frameId, color, name, note, duration) --> Bool # Creates a new marker at given frameId position and with given marker information.
|
||||
GetMarkers() --> [markers...] # Returns a list of all markers and their information.
|
||||
ApplyGradeFromDRX(path, gradeMode, item1, item2, ...)--> Bool # Loads a still from given file path (string) and applies grade to Timeline Items with gradeMode (int): 0 - "No keyframes", 1 - "Source Timecode aligned", 2 - "Start Frames aligned".
|
||||
ApplyGradeFromDRX(path, gradeMode, [items]) --> Bool # Loads a still from given file path (string) and applies grade to Timeline Items with gradeMode (int): 0 - "No keyframes", 1 - "Source Timecode aligned", 2 - "Start Frames aligned".
|
||||
GetCurrentTimecode() --> string # Returns a string representing a timecode for current position of the timeline, while on Cut, Edit, Color and Deliver page.
|
||||
GetCurrentVideoItem() --> item # Returns current video timeline item.
|
||||
GetCurrentClipThumbnailImage() --> [width, height, format, data] # Returns raw thumbnail image data (This image data is encoded in base 64 format and the image format is RGB 8 bit) for the current media in the Color Page in the format of dictionary (in Python) and table (in Lua). Information return are "width", "height", "format" and "data". Example is provided in 6_get_current_media_thumbnail.py in Example folder.
|
||||
TimelineItem
|
||||
GetName() --> string # Returns a name of the item.
|
||||
GetDuration() --> int # Returns a duration of item.
|
||||
GetEnd() --> int # Returns a position of end frame.
|
||||
GetFusionCompCount() --> int # Returns the number of Fusion compositions associated with the timeline item.
|
||||
GetFusionCompByIndex(compIndex) --> fusionComp # Returns Fusion composition object based on given index. 1 <= compIndex <= timelineItem.GetFusionCompCount()
|
||||
GetFusionCompNames() --> [names...] # Returns a list of Fusion composition names associated with the timeline item.
|
||||
GetFusionCompByName(compName) --> fusionComp # Returns Fusion composition object based on given name.
|
||||
GetLeftOffset() --> int # Returns a maximum extension by frame for clip from left side.
|
||||
GetRightOffset() --> int # Returns a maximum extension by frame for clip from right side.
|
||||
GetStart() --> int # Returns a position of first frame.
|
||||
AddMarker(frameId, color, name, note, duration) --> Bool # Creates a new marker at given frameId position and with given marker information.
|
||||
GetMarkers() --> [markers...] # Returns a list of all markers and their information.
|
||||
GetFlags() --> [colors...] # Returns a list of flag colors assigned to the item.
|
||||
GetClipColor() --> string # Returns an item color as a string.
|
||||
AddFusionComp() --> fusionComp # Adds a new Fusion composition associated with the timeline item.
|
||||
ImportFusionComp(path) --> fusionComp # Imports Fusion composition from given file path by creating and adding a new composition for the item.
|
||||
ExportFusionComp(path, compIndex) --> Bool # Exports Fusion composition based on given index into provided file name path.
|
||||
DeleteFusionCompByName(compName) --> Bool # Deletes Fusion composition by provided name.
|
||||
LoadFusionCompByName(compName) --> fusionComp # Loads Fusion composition by provided name and sets it as active composition.
|
||||
RenameFusionCompByName(oldName, newName) --> Bool # Renames Fusion composition by provided name with new given name.
|
||||
AddVersion(versionName, versionType) --> Bool # Adds a new Version associated with the timeline item. versionType: 0 - local, 1 - remote.
|
||||
DeleteVersionByName(versionName, versionType) --> Bool # Deletes Version by provided name. versionType: 0 - local, 1 - remote.
|
||||
LoadVersionByName(versionName, versionType) --> Bool # Loads Version by provided name and sets it as active Version. versionType: 0 - local, 1 - remote.
|
||||
RenameVersionByName(oldName, newName, versionType)--> Bool # Renames Version by provided name with new given name. versionType: 0 - local, 1 - remote.
|
||||
GetMediaPoolItem() --> MediaPoolItem # Returns a corresponding to the timeline item media pool item if it exists.
|
||||
GetVersionNames(versionType) --> [strings...] # Returns a list of version names by provided versionType: 0 - local, 1 - remote.
|
||||
GetStereoConvergenceValues() --> [offset, value] # Returns a table of keyframe offsets and respective convergence values
|
||||
GetStereoLeftFloatingWindowParams() --> [offset, value] # For the LEFT eye -> returns a table of keyframe offsets and respective floating window params. Value at particular offset includes the left, right, top and bottom floating window values
|
||||
GetStereoRightFloatingWindowParams() --> [offset, value] # For the RIGHT eye -> returns a table of keyframe offsets and respective floating window params. Value at particular offset includes the left, right, top and bottom floating window values
|
||||
59
pype/hosts/resolve/__init__.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
from .pipeline import (
|
||||
install,
|
||||
uninstall,
|
||||
ls,
|
||||
containerise,
|
||||
publish,
|
||||
launch_workfiles_app
|
||||
)
|
||||
|
||||
from .utils import (
|
||||
setup,
|
||||
get_resolve_module
|
||||
)
|
||||
|
||||
from .workio import (
|
||||
open_file,
|
||||
save_file,
|
||||
current_file,
|
||||
has_unsaved_changes,
|
||||
file_extensions,
|
||||
work_root
|
||||
)
|
||||
|
||||
from .lib import (
|
||||
get_project_manager,
|
||||
set_project_manager_to_folder_name
|
||||
)
|
||||
|
||||
from .menu import launch_pype_menu
|
||||
|
||||
__all__ = [
|
||||
# pipeline
|
||||
"install",
|
||||
"uninstall",
|
||||
"ls",
|
||||
"containerise",
|
||||
"reload_pipeline",
|
||||
"publish",
|
||||
"launch_workfiles_app",
|
||||
|
||||
# utils
|
||||
"setup",
|
||||
"get_resolve_module",
|
||||
|
||||
# lib
|
||||
"get_project_manager",
|
||||
"set_project_manager_to_folder_name",
|
||||
|
||||
# menu
|
||||
"launch_pype_menu",
|
||||
|
||||
# workio
|
||||
"open_file",
|
||||
"save_file",
|
||||
"current_file",
|
||||
"has_unsaved_changes",
|
||||
"file_extensions",
|
||||
"work_root"
|
||||
]
|
||||
54
pype/hosts/resolve/action.py
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
# absolute_import is needed to counter the `module has no cmds error` in Maya
|
||||
from __future__ import absolute_import
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
from ...action import get_errored_instances_from_context
|
||||
|
||||
|
||||
class SelectInvalidAction(pyblish.api.Action):
|
||||
"""Select invalid clips in Resolve timeline when plug-in failed.
|
||||
|
||||
To retrieve the invalid nodes this assumes a static `get_invalid()`
|
||||
method is available on the plugin.
|
||||
|
||||
"""
|
||||
label = "Select invalid"
|
||||
on = "failed" # This action is only available on a failed plug-in
|
||||
icon = "search" # Icon from Awesome Icon
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
try:
|
||||
from pype.hosts.resolve.utils import get_resolve_module
|
||||
resolve = get_resolve_module()
|
||||
self.log.debug(resolve)
|
||||
except ImportError:
|
||||
raise ImportError("Current host is not Resolve")
|
||||
|
||||
errored_instances = get_errored_instances_from_context(context)
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(errored_instances, plugin)
|
||||
|
||||
# Get the invalid nodes for the plug-ins
|
||||
self.log.info("Finding invalid clips..")
|
||||
invalid = list()
|
||||
for instance in instances:
|
||||
invalid_nodes = plugin.get_invalid(instance)
|
||||
if invalid_nodes:
|
||||
if isinstance(invalid_nodes, (list, tuple)):
|
||||
invalid.extend(invalid_nodes)
|
||||
else:
|
||||
self.log.warning("Plug-in returned to be invalid, "
|
||||
"but has no selectable nodes.")
|
||||
|
||||
# Ensure unique (process each node only once)
|
||||
invalid = list(set(invalid))
|
||||
|
||||
if invalid:
|
||||
self.log.info("Selecting invalid nodes: %s" % ", ".join(invalid))
|
||||
# TODO: select resolve timeline track items in current timeline
|
||||
else:
|
||||
self.log.info("No invalid nodes found.")
|
||||
78
pype/hosts/resolve/lib.py
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
import sys
|
||||
from .utils import get_resolve_module
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger(__name__, "resolve")
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self.pm = None
|
||||
|
||||
|
||||
def get_project_manager():
|
||||
if not self.pm:
|
||||
resolve = get_resolve_module()
|
||||
self.pm = resolve.GetProjectManager()
|
||||
return self.pm
|
||||
|
||||
|
||||
def set_project_manager_to_folder_name(folder_name):
|
||||
"""
|
||||
Sets context of Project manager to given folder by name.
|
||||
|
||||
Searching for folder by given name from root folder to nested.
|
||||
If no existing folder by name it will create one in root folder.
|
||||
|
||||
Args:
|
||||
folder_name (str): name of searched folder
|
||||
|
||||
Returns:
|
||||
bool: True if success
|
||||
|
||||
Raises:
|
||||
Exception: Cannot create folder in root
|
||||
|
||||
"""
|
||||
# initialize project manager
|
||||
get_project_manager()
|
||||
|
||||
set_folder = False
|
||||
|
||||
# go back to root folder
|
||||
if self.pm.GotoRootFolder():
|
||||
log.info(f"Testing existing folder: {folder_name}")
|
||||
folders = convert_resolve_list_type(
|
||||
self.pm.GetFoldersInCurrentFolder())
|
||||
log.info(f"Testing existing folders: {folders}")
|
||||
# get me first available folder object
|
||||
# with the same name as in `folder_name` else return False
|
||||
if next((f for f in folders if f in folder_name), False):
|
||||
log.info(f"Found existing folder: {folder_name}")
|
||||
set_folder = self.pm.OpenFolder(folder_name)
|
||||
|
||||
if set_folder:
|
||||
return True
|
||||
|
||||
# if folder by name is not existent then create one
|
||||
# go back to root folder
|
||||
log.info(f"Folder `{folder_name}` not found and will be created")
|
||||
if self.pm.GotoRootFolder():
|
||||
try:
|
||||
# create folder by given name
|
||||
self.pm.CreateFolder(folder_name)
|
||||
self.pm.OpenFolder(folder_name)
|
||||
return True
|
||||
except NameError as e:
|
||||
log.error((f"Folder with name `{folder_name}` cannot be created!"
|
||||
f"Error: {e}"))
|
||||
return False
|
||||
|
||||
|
||||
def convert_resolve_list_type(resolve_list):
|
||||
""" Resolve is using indexed dictionary as list type.
|
||||
`{1.0: 'vaule'}`
|
||||
This will convert it to normal list class
|
||||
"""
|
||||
assert isinstance(resolve_list, dict), (
|
||||
"Input argument should be dict() type")
|
||||
|
||||
return [resolve_list[i] for i in sorted(resolve_list.keys())]
|
||||
154
pype/hosts/resolve/menu.py
Normal file
|
|
@ -0,0 +1,154 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from Qt import QtWidgets, QtCore
|
||||
|
||||
from .pipeline import (
|
||||
publish,
|
||||
launch_workfiles_app
|
||||
)
|
||||
|
||||
from avalon.tools import (
|
||||
creator,
|
||||
loader,
|
||||
sceneinventory,
|
||||
libraryloader
|
||||
)
|
||||
|
||||
|
||||
def load_stylesheet():
|
||||
path = os.path.join(os.path.dirname(__file__), "menu_style.qss")
|
||||
if not os.path.exists(path):
|
||||
print("Unable to load stylesheet, file not found in resources")
|
||||
return ""
|
||||
|
||||
with open(path, "r") as file_stream:
|
||||
stylesheet = file_stream.read()
|
||||
return stylesheet
|
||||
|
||||
|
||||
class Spacer(QtWidgets.QWidget):
|
||||
def __init__(self, height, *args, **kwargs):
|
||||
super(self.__class__, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setFixedHeight(height)
|
||||
|
||||
real_spacer = QtWidgets.QWidget(self)
|
||||
real_spacer.setObjectName("Spacer")
|
||||
real_spacer.setFixedHeight(height)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(0, 0, 0, 0)
|
||||
layout.addWidget(real_spacer)
|
||||
|
||||
self.setLayout(layout)
|
||||
|
||||
|
||||
class PypeMenu(QtWidgets.QWidget):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(self.__class__, self).__init__(*args, **kwargs)
|
||||
|
||||
self.setObjectName("PypeMenu")
|
||||
|
||||
self.setWindowFlags(
|
||||
QtCore.Qt.Window
|
||||
| QtCore.Qt.CustomizeWindowHint
|
||||
| QtCore.Qt.WindowTitleHint
|
||||
| QtCore.Qt.WindowCloseButtonHint
|
||||
| QtCore.Qt.WindowStaysOnTopHint
|
||||
)
|
||||
|
||||
self.setWindowTitle("Pype")
|
||||
workfiles_btn = QtWidgets.QPushButton("Workfiles", self)
|
||||
create_btn = QtWidgets.QPushButton("Create", self)
|
||||
publish_btn = QtWidgets.QPushButton("Publish", self)
|
||||
load_btn = QtWidgets.QPushButton("Load", self)
|
||||
inventory_btn = QtWidgets.QPushButton("Inventory", self)
|
||||
libload_btn = QtWidgets.QPushButton("Library", self)
|
||||
rename_btn = QtWidgets.QPushButton("Rename", self)
|
||||
set_colorspace_btn = QtWidgets.QPushButton(
|
||||
"Set colorspace from presets", self
|
||||
)
|
||||
reset_resolution_btn = QtWidgets.QPushButton(
|
||||
"Reset Resolution from peresets", self
|
||||
)
|
||||
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
layout.setContentsMargins(10, 20, 10, 20)
|
||||
|
||||
layout.addWidget(workfiles_btn)
|
||||
layout.addWidget(create_btn)
|
||||
layout.addWidget(publish_btn)
|
||||
layout.addWidget(load_btn)
|
||||
layout.addWidget(inventory_btn)
|
||||
|
||||
layout.addWidget(Spacer(15, self))
|
||||
|
||||
layout.addWidget(libload_btn)
|
||||
|
||||
layout.addWidget(Spacer(15, self))
|
||||
|
||||
layout.addWidget(rename_btn)
|
||||
|
||||
layout.addWidget(Spacer(15, self))
|
||||
|
||||
layout.addWidget(set_colorspace_btn)
|
||||
layout.addWidget(reset_resolution_btn)
|
||||
|
||||
self.setLayout(layout)
|
||||
|
||||
workfiles_btn.clicked.connect(self.on_workfile_clicked)
|
||||
create_btn.clicked.connect(self.on_create_clicked)
|
||||
publish_btn.clicked.connect(self.on_publish_clicked)
|
||||
load_btn.clicked.connect(self.on_load_clicked)
|
||||
inventory_btn.clicked.connect(self.on_inventory_clicked)
|
||||
libload_btn.clicked.connect(self.on_libload_clicked)
|
||||
rename_btn.clicked.connect(self.on_rename_clicked)
|
||||
set_colorspace_btn.clicked.connect(self.on_set_colorspace_clicked)
|
||||
reset_resolution_btn.clicked.connect(self.on_reset_resolution_clicked)
|
||||
|
||||
def on_workfile_clicked(self):
|
||||
print("Clicked Workfile")
|
||||
launch_workfiles_app()
|
||||
|
||||
def on_create_clicked(self):
|
||||
print("Clicked Create")
|
||||
creator.show()
|
||||
|
||||
def on_publish_clicked(self):
|
||||
print("Clicked Publish")
|
||||
publish(None)
|
||||
|
||||
def on_load_clicked(self):
|
||||
print("Clicked Load")
|
||||
loader.show(use_context=True)
|
||||
|
||||
def on_inventory_clicked(self):
|
||||
print("Clicked Inventory")
|
||||
sceneinventory.show()
|
||||
|
||||
def on_libload_clicked(self):
|
||||
print("Clicked Library")
|
||||
libraryloader.show()
|
||||
|
||||
def on_rename_clicked(self):
|
||||
print("Clicked Rename")
|
||||
|
||||
def on_set_colorspace_clicked(self):
|
||||
print("Clicked Set Colorspace")
|
||||
|
||||
def on_reset_resolution_clicked(self):
|
||||
print("Clicked Reset Resolution")
|
||||
|
||||
|
||||
def launch_pype_menu():
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
|
||||
pype_menu = PypeMenu()
|
||||
|
||||
stylesheet = load_stylesheet()
|
||||
pype_menu.setStyleSheet(stylesheet)
|
||||
|
||||
pype_menu.show()
|
||||
|
||||
sys.exit(app.exec_())
|
||||
29
pype/hosts/resolve/menu_style.qss
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
QWidget {
|
||||
background-color: #282828;
|
||||
border-radius: 3;
|
||||
}
|
||||
|
||||
QPushButton {
|
||||
border: 1px solid #090909;
|
||||
background-color: #201f1f;
|
||||
color: #ffffff;
|
||||
padding: 5;
|
||||
}
|
||||
|
||||
QPushButton:focus {
|
||||
background-color: "#171717";
|
||||
color: #d0d0d0;
|
||||
}
|
||||
|
||||
QPushButton:hover {
|
||||
background-color: "#171717";
|
||||
color: #e64b3d;
|
||||
}
|
||||
|
||||
#PypeMenu {
|
||||
border: 1px solid #fef9ef;
|
||||
}
|
||||
|
||||
#Spacer {
|
||||
background-color: #282828;
|
||||
}
|
||||
142
pype/hosts/resolve/pipeline.py
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
"""
|
||||
Basic avalon integration
|
||||
"""
|
||||
import os
|
||||
# import sys
|
||||
from avalon.tools import workfiles
|
||||
from avalon import api as avalon
|
||||
from pyblish import api as pyblish
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger(__name__, "resolve")
|
||||
|
||||
# self = sys.modules[__name__]
|
||||
|
||||
AVALON_CONFIG = os.environ["AVALON_CONFIG"]
|
||||
PARENT_DIR = os.path.dirname(__file__)
|
||||
PACKAGE_DIR = os.path.dirname(PARENT_DIR)
|
||||
PLUGINS_DIR = os.path.join(PACKAGE_DIR, "plugins")
|
||||
|
||||
LOAD_PATH = os.path.join(PLUGINS_DIR, "resolve", "load")
|
||||
CREATE_PATH = os.path.join(PLUGINS_DIR, "resolve", "create")
|
||||
INVENTORY_PATH = os.path.join(PLUGINS_DIR, "resolve", "inventory")
|
||||
|
||||
PUBLISH_PATH = os.path.join(
|
||||
PLUGINS_DIR, "resolve", "publish"
|
||||
).replace("\\", "/")
|
||||
|
||||
AVALON_CONTAINERS = ":AVALON_CONTAINERS"
|
||||
# IS_HEADLESS = not hasattr(cmds, "about") or cmds.about(batch=True)
|
||||
|
||||
|
||||
def install():
|
||||
"""Install resolve-specific functionality of avalon-core.
|
||||
|
||||
This is where you install menus and register families, data
|
||||
and loaders into resolve.
|
||||
|
||||
It is called automatically when installing via `api.install(resolve)`.
|
||||
|
||||
See the Maya equivalent for inspiration on how to implement this.
|
||||
|
||||
"""
|
||||
|
||||
# Disable all families except for the ones we explicitly want to see
|
||||
family_states = [
|
||||
"imagesequence",
|
||||
"mov"
|
||||
]
|
||||
avalon.data["familiesStateDefault"] = False
|
||||
avalon.data["familiesStateToggled"] = family_states
|
||||
|
||||
log.info("pype.hosts.resolve installed")
|
||||
|
||||
pyblish.register_host("resolve")
|
||||
pyblish.register_plugin_path(PUBLISH_PATH)
|
||||
log.info("Registering DaVinci Resovle plug-ins..")
|
||||
|
||||
avalon.register_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.register_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.register_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
|
||||
def uninstall():
|
||||
"""Uninstall all tha was installed
|
||||
|
||||
This is where you undo everything that was done in `install()`.
|
||||
That means, removing menus, deregistering families and data
|
||||
and everything. It should be as though `install()` was never run,
|
||||
because odds are calling this function means the user is interested
|
||||
in re-installing shortly afterwards. If, for example, he has been
|
||||
modifying the menu or registered families.
|
||||
|
||||
"""
|
||||
pyblish.deregister_host("resolve")
|
||||
pyblish.deregister_plugin_path(PUBLISH_PATH)
|
||||
log.info("Deregistering DaVinci Resovle plug-ins..")
|
||||
|
||||
avalon.deregister_plugin_path(avalon.Loader, LOAD_PATH)
|
||||
avalon.deregister_plugin_path(avalon.Creator, CREATE_PATH)
|
||||
avalon.deregister_plugin_path(avalon.InventoryAction, INVENTORY_PATH)
|
||||
|
||||
|
||||
def containerise(obj,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
loader=None,
|
||||
data=None):
|
||||
"""Bundle Resolve's object into an assembly and imprint it with metadata
|
||||
|
||||
Containerisation enables a tracking of version, author and origin
|
||||
for loaded assets.
|
||||
|
||||
Arguments:
|
||||
obj (obj): Resolve's object to imprint as container
|
||||
name (str): Name of resulting assembly
|
||||
namespace (str): Namespace under which to host container
|
||||
context (dict): Asset information
|
||||
loader (str, optional): Name of node used to produce this container.
|
||||
|
||||
Returns:
|
||||
obj (obj): containerised object
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def ls():
|
||||
"""List available containers.
|
||||
|
||||
This function is used by the Container Manager in Nuke. You'll
|
||||
need to implement a for-loop that then *yields* one Container at
|
||||
a time.
|
||||
|
||||
See the `container.json` schema for details on how it should look,
|
||||
and the Maya equivalent, which is in `avalon.maya.pipeline`
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def parse_container(container):
|
||||
"""Return the container node's full container data.
|
||||
|
||||
Args:
|
||||
container (str): A container node name.
|
||||
|
||||
Returns:
|
||||
dict: The container schema data for this container node.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def launch_workfiles_app(*args):
|
||||
workdir = os.environ["AVALON_WORKDIR"]
|
||||
workfiles.show(workdir)
|
||||
|
||||
|
||||
def publish(parent):
|
||||
"""Shorthand to publish from within host"""
|
||||
from avalon.tools import publish
|
||||
return publish.show(parent)
|
||||
75
pype/hosts/resolve/plugin.py
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
from avalon import api
|
||||
# from pype.hosts.resolve import lib as drlib
|
||||
from avalon.vendor import qargparse
|
||||
|
||||
|
||||
def get_reference_node_parents(ref):
|
||||
"""Return all parent reference nodes of reference node
|
||||
|
||||
Args:
|
||||
ref (str): reference node.
|
||||
|
||||
Returns:
|
||||
list: The upstream parent reference nodes.
|
||||
|
||||
"""
|
||||
parents = []
|
||||
return parents
|
||||
|
||||
|
||||
class SequenceLoader(api.Loader):
|
||||
"""A basic SequenceLoader for Resolve
|
||||
|
||||
This will implement the basic behavior for a loader to inherit from that
|
||||
will containerize the reference and will implement the `remove` and
|
||||
`update` logic.
|
||||
|
||||
"""
|
||||
|
||||
options = [
|
||||
qargparse.Toggle(
|
||||
"handles",
|
||||
label="Include handles",
|
||||
default=0,
|
||||
help="Load with handles or without?"
|
||||
),
|
||||
qargparse.Choice(
|
||||
"load_to",
|
||||
label="Where to load clips",
|
||||
items=[
|
||||
"Current timeline",
|
||||
"New timeline"
|
||||
],
|
||||
default=0,
|
||||
help="Where do you want clips to be loaded?"
|
||||
),
|
||||
qargparse.Choice(
|
||||
"load_how",
|
||||
label="How to load clips",
|
||||
items=[
|
||||
"original timing",
|
||||
"sequential in order"
|
||||
],
|
||||
default=0,
|
||||
help="Would you like to place it at orignal timing?"
|
||||
)
|
||||
]
|
||||
|
||||
def load(
|
||||
self,
|
||||
context,
|
||||
name=None,
|
||||
namespace=None,
|
||||
options=None
|
||||
):
|
||||
pass
|
||||
|
||||
def update(self, container, representation):
|
||||
"""Update an existing `container`
|
||||
"""
|
||||
pass
|
||||
|
||||
def remove(self, container):
|
||||
"""Remove an existing `container`
|
||||
"""
|
||||
pass
|
||||
31
pype/hosts/resolve/preload_console.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
#!/usr/bin/env python
|
||||
import time
|
||||
from pype.hosts.resolve.utils import get_resolve_module
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger(__name__, "resolve")
|
||||
|
||||
wait_delay = 2.5
|
||||
wait = 0.00
|
||||
ready = None
|
||||
while True:
|
||||
try:
|
||||
# Create project and set parameters:
|
||||
resolve = get_resolve_module()
|
||||
pm = resolve.GetProjectManager()
|
||||
if pm:
|
||||
ready = None
|
||||
else:
|
||||
ready = True
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if ready is None:
|
||||
time.sleep(wait_delay)
|
||||
log.info(f"Waiting {wait}s for Resolve to have opened Project Manager")
|
||||
wait += wait_delay
|
||||
else:
|
||||
print(f"Preloaded variables: \n\n\tResolve module: "
|
||||
f"`resolve` > {type(resolve)} \n\tProject manager: "
|
||||
f"`pm` > {type(pm)}")
|
||||
break
|
||||
26
pype/hosts/resolve/utility_scripts/Pype_menu.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
import os
|
||||
import sys
|
||||
import avalon.api as avalon
|
||||
import pype
|
||||
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
||||
def main(env):
|
||||
import pype.hosts.resolve as bmdvr
|
||||
# Registers pype's Global pyblish plugins
|
||||
pype.install()
|
||||
|
||||
# activate resolve from pype
|
||||
avalon.install(bmdvr)
|
||||
|
||||
log.info(f"Avalon registred hosts: {avalon.registered_host()}")
|
||||
|
||||
bmdvr.launch_pype_menu()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
result = main(os.environ)
|
||||
sys.exit(not bool(result))
|
||||
1
pype/hosts/resolve/utility_scripts/README.markdown
Normal file
|
|
@ -0,0 +1 @@
|
|||
|
||||
65
pype/hosts/resolve/utility_scripts/__dev_compound_clip.py
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
#! python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
# convert clip def
|
||||
def convert_clip(timeline=None):
|
||||
"""Convert timeline item (clip) into compound clip pype container
|
||||
|
||||
Args:
|
||||
timeline (MediaPool.Timeline): Object of timeline
|
||||
|
||||
Returns:
|
||||
bool: `True` if success
|
||||
|
||||
Raises:
|
||||
Exception: description
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
# decorator function create_current_timeline_media_bin()
|
||||
def create_current_timeline_media_bin(timeline=None):
|
||||
"""Convert timeline item (clip) into compound clip pype container
|
||||
|
||||
Args:
|
||||
timeline (MediaPool.Timeline): Object of timeline
|
||||
|
||||
Returns:
|
||||
bool: `True` if success
|
||||
|
||||
Raises:
|
||||
Exception: description
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
# decorator function get_selected_track_items()
|
||||
def get_selected_track_items():
|
||||
"""Convert timeline item (clip) into compound clip pype container
|
||||
|
||||
Args:
|
||||
timeline (MediaPool.Timeline): Object of timeline
|
||||
|
||||
Returns:
|
||||
bool: `True` if success
|
||||
|
||||
Raises:
|
||||
Exception: description
|
||||
|
||||
"""
|
||||
print("testText")
|
||||
|
||||
|
||||
# PypeCompoundClip() class
|
||||
class PypeCompoundClip(object):
|
||||
"""docstring for ."""
|
||||
|
||||
def __init__(self, arg):
|
||||
super(self).__init__()
|
||||
self.arg = arg
|
||||
|
||||
def create_compound_clip(self):
|
||||
pass
|
||||
57
pype/hosts/resolve/utility_scripts/__test_pyblish.py
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
import os
|
||||
import sys
|
||||
import pype
|
||||
import importlib
|
||||
import pyblish.api
|
||||
import pyblish.util
|
||||
import avalon.api
|
||||
from avalon.tools import publish
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
||||
def main(env):
|
||||
# Registers pype's Global pyblish plugins
|
||||
pype.install()
|
||||
|
||||
# Register Host (and it's pyblish plugins)
|
||||
host_name = env["AVALON_APP"]
|
||||
# TODO not sure if use "pype." or "avalon." for host import
|
||||
host_import_str = f"pype.{host_name}"
|
||||
|
||||
try:
|
||||
host_module = importlib.import_module(host_import_str)
|
||||
except ModuleNotFoundError:
|
||||
log.error((
|
||||
f"Host \"{host_name}\" can't be imported."
|
||||
f" Import string \"{host_import_str}\" failed."
|
||||
))
|
||||
return False
|
||||
|
||||
avalon.api.install(host_module)
|
||||
|
||||
# Register additional paths
|
||||
addition_paths_str = env.get("PUBLISH_PATHS") or ""
|
||||
addition_paths = addition_paths_str.split(os.pathsep)
|
||||
for path in addition_paths:
|
||||
path = os.path.normpath(path)
|
||||
if not os.path.exists(path):
|
||||
continue
|
||||
|
||||
pyblish.api.register_plugin_path(path)
|
||||
|
||||
# Register project specific plugins
|
||||
project_name = os.environ["AVALON_PROJECT"]
|
||||
project_plugins_paths = env.get("PYPE_PROJECT_PLUGINS") or ""
|
||||
for path in project_plugins_paths.split(os.pathsep):
|
||||
plugin_path = os.path.join(path, project_name, "plugins")
|
||||
if os.path.exists(plugin_path):
|
||||
pyblish.api.register_plugin_path(plugin_path)
|
||||
|
||||
return publish.show()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
result = main(os.environ)
|
||||
sys.exit(not bool(result))
|
||||
35
pype/hosts/resolve/utility_scripts/__test_subprocess.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
#! python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
from pypeapp import execute, Logger
|
||||
from pype.hosts.resolve.utils import get_resolve_module
|
||||
|
||||
log = Logger().get_logger("Resolve")
|
||||
|
||||
CURRENT_DIR = os.getenv("RESOLVE_UTILITY_SCRIPTS_DIR", "")
|
||||
python_dir = os.getenv("PYTHON36_RESOLVE")
|
||||
python_exe = os.path.normpath(
|
||||
os.path.join(python_dir, "python.exe")
|
||||
)
|
||||
|
||||
resolve = get_resolve_module()
|
||||
PM = resolve.GetProjectManager()
|
||||
P = PM.GetCurrentProject()
|
||||
|
||||
log.info(P.GetName())
|
||||
|
||||
|
||||
# ______________________________________________________
|
||||
# testing subprocessing Scripts
|
||||
testing_py = os.path.join(CURRENT_DIR, "ResolvePageSwitcher.py")
|
||||
testing_py = os.path.normpath(testing_py)
|
||||
log.info(f"Testing path to script: `{testing_py}`")
|
||||
|
||||
returncode = execute(
|
||||
[python_exe, os.path.normpath(testing_py)],
|
||||
env=dict(os.environ)
|
||||
)
|
||||
|
||||
# Check if output file exists
|
||||
if returncode != 0:
|
||||
log.error("Executing failed!")
|
||||
136
pype/hosts/resolve/utils.py
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
#! python3
|
||||
|
||||
"""
|
||||
Resolve's tools for setting environment
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from pypeapp import Logger
|
||||
|
||||
log = Logger().get_logger(__name__, "resolve")
|
||||
|
||||
self = sys.modules[__name__]
|
||||
self.bmd = None
|
||||
|
||||
|
||||
def get_resolve_module():
|
||||
# dont run if already loaded
|
||||
if self.bmd:
|
||||
return self.bmd
|
||||
|
||||
try:
|
||||
"""
|
||||
The PYTHONPATH needs to be set correctly for this import
|
||||
statement to work. An alternative is to import the
|
||||
DaVinciResolveScript by specifying absolute path
|
||||
(see ExceptionHandler logic)
|
||||
"""
|
||||
import DaVinciResolveScript as bmd
|
||||
except ImportError:
|
||||
if sys.platform.startswith("darwin"):
|
||||
expected_path = ("/Library/Application Support/Blackmagic Design"
|
||||
"/DaVinci Resolve/Developer/Scripting/Modules")
|
||||
elif sys.platform.startswith("win") \
|
||||
or sys.platform.startswith("cygwin"):
|
||||
expected_path = os.path.normpath(
|
||||
os.getenv('PROGRAMDATA') + (
|
||||
"/Blackmagic Design/DaVinci Resolve/Support/Developer"
|
||||
"/Scripting/Modules"
|
||||
)
|
||||
)
|
||||
elif sys.platform.startswith("linux"):
|
||||
expected_path = "/opt/resolve/libs/Fusion/Modules"
|
||||
|
||||
# check if the default path has it...
|
||||
print(("Unable to find module DaVinciResolveScript from "
|
||||
"$PYTHONPATH - trying default locations"))
|
||||
|
||||
module_path = os.path.normpath(
|
||||
os.path.join(
|
||||
expected_path,
|
||||
"DaVinciResolveScript.py"
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
import imp
|
||||
bmd = imp.load_source('DaVinciResolveScript', module_path)
|
||||
except ImportError:
|
||||
# No fallbacks ... report error:
|
||||
log.error(
|
||||
("Unable to find module DaVinciResolveScript - please "
|
||||
"ensure that the module DaVinciResolveScript is "
|
||||
"discoverable by python")
|
||||
)
|
||||
log.error(
|
||||
("For a default DaVinci Resolve installation, the "
|
||||
f"module is expected to be located in: {expected_path}")
|
||||
)
|
||||
sys.exit()
|
||||
# assign global var and return
|
||||
self.bmd = bmd.scriptapp("Resolve")
|
||||
return self.bmd
|
||||
|
||||
|
||||
def _sync_utility_scripts(env=None):
|
||||
""" Synchronizing basic utlility scripts for resolve.
|
||||
|
||||
To be able to run scripts from inside `Resolve/Workspace/Scripts` menu
|
||||
all scripts has to be accessible from defined folder.
|
||||
"""
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# initiate inputs
|
||||
scripts = {}
|
||||
us_env = env.get("RESOLVE_UTILITY_SCRIPTS_SOURCE_DIR")
|
||||
us_dir = env.get("RESOLVE_UTILITY_SCRIPTS_DIR", "")
|
||||
us_paths = [os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
"utility_scripts"
|
||||
)]
|
||||
|
||||
# collect script dirs
|
||||
if us_env:
|
||||
log.info(f"Utility Scripts Env: `{us_env}`")
|
||||
us_paths = us_env.split(
|
||||
os.pathsep) + us_paths
|
||||
|
||||
# collect scripts from dirs
|
||||
for path in us_paths:
|
||||
scripts.update({path: os.listdir(path)})
|
||||
|
||||
log.info(f"Utility Scripts Dir: `{us_paths}`")
|
||||
log.info(f"Utility Scripts: `{scripts}`")
|
||||
|
||||
# make sure no script file is in folder
|
||||
if next((s for s in os.listdir(us_dir)), None):
|
||||
for s in os.listdir(us_dir):
|
||||
path = os.path.join(us_dir, s)
|
||||
log.info(f"Removing `{path}`...")
|
||||
os.remove(path)
|
||||
|
||||
# copy scripts into Resolve's utility scripts dir
|
||||
for d, sl in scripts.items():
|
||||
# directory and scripts list
|
||||
for s in sl:
|
||||
# script in script list
|
||||
src = os.path.join(d, s)
|
||||
dst = os.path.join(us_dir, s)
|
||||
log.info(f"Copying `{src}` to `{dst}`...")
|
||||
shutil.copy2(src, dst)
|
||||
|
||||
|
||||
def setup(env=None):
|
||||
""" Wrapper installer started from pype.hooks.resolve.ResolvePrelaunch()
|
||||
"""
|
||||
if not env:
|
||||
env = os.environ
|
||||
|
||||
# synchronize resolve utility scripts
|
||||
_sync_utility_scripts(env)
|
||||
|
||||
log.info("Resolve Pype wrapper has been installed")
|
||||
92
pype/hosts/resolve/workio.py
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
"""Host API required Work Files tool"""
|
||||
|
||||
import os
|
||||
from pypeapp import Logger
|
||||
from .lib import (
|
||||
get_project_manager,
|
||||
set_project_manager_to_folder_name
|
||||
)
|
||||
|
||||
|
||||
log = Logger().get_logger(__name__, "resolve")
|
||||
|
||||
exported_projet_ext = ".drp"
|
||||
|
||||
|
||||
def file_extensions():
|
||||
return [exported_projet_ext]
|
||||
|
||||
|
||||
def has_unsaved_changes():
|
||||
get_project_manager().SaveProject()
|
||||
return False
|
||||
|
||||
|
||||
def save_file(filepath):
|
||||
pm = get_project_manager()
|
||||
file = os.path.basename(filepath)
|
||||
fname, _ = os.path.splitext(file)
|
||||
project = pm.GetCurrentProject()
|
||||
name = project.GetName()
|
||||
|
||||
if "Untitled Project" not in name:
|
||||
log.info("Saving project: `{}` as '{}'".format(name, file))
|
||||
pm.ExportProject(name, filepath)
|
||||
else:
|
||||
log.info("Creating new project...")
|
||||
pm.CreateProject(fname)
|
||||
pm.ExportProject(name, filepath)
|
||||
|
||||
|
||||
def open_file(filepath):
|
||||
"""
|
||||
Loading project
|
||||
"""
|
||||
pm = get_project_manager()
|
||||
file = os.path.basename(filepath)
|
||||
fname, _ = os.path.splitext(file)
|
||||
dname, _ = fname.split("_v")
|
||||
|
||||
# deal with current project
|
||||
project = pm.GetCurrentProject()
|
||||
log.info(f"Test `pm`: {pm}")
|
||||
pm.SaveProject()
|
||||
|
||||
try:
|
||||
log.info(f"Test `dname`: {dname}")
|
||||
if not set_project_manager_to_folder_name(dname):
|
||||
raise
|
||||
# load project from input path
|
||||
project = pm.LoadProject(fname)
|
||||
log.info(f"Project {project.GetName()} opened...")
|
||||
return True
|
||||
except AttributeError:
|
||||
log.warning((f"Project with name `{fname}` does not exist! It will "
|
||||
f"be imported from {filepath} and then loaded..."))
|
||||
if pm.ImportProject(filepath):
|
||||
# load project from input path
|
||||
project = pm.LoadProject(fname)
|
||||
log.info(f"Project imported/loaded {project.GetName()}...")
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def current_file():
|
||||
pm = get_project_manager()
|
||||
current_dir = os.getenv("AVALON_WORKDIR")
|
||||
project = pm.GetCurrentProject()
|
||||
name = project.GetName()
|
||||
fname = name + exported_projet_ext
|
||||
current_file = os.path.join(current_dir, fname)
|
||||
normalised = os.path.normpath(current_file)
|
||||
|
||||
# Unsaved current file
|
||||
if normalised == "":
|
||||
return None
|
||||
|
||||
return normalised
|
||||
|
||||
|
||||
def work_root(session):
|
||||
return os.path.normpath(session["AVALON_WORKDIR"]).replace("\\", "/")
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import threading
|
||||
from pype.api import Logger
|
||||
from pypeapp import style
|
||||
from avalon import style
|
||||
from Qt import QtWidgets
|
||||
from . import ClockifySettings, ClockifyAPI, MessageWidget
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from Qt import QtCore, QtGui, QtWidgets
|
||||
from pypeapp import style
|
||||
from avalon import style
|
||||
|
||||
|
||||
class MessageWidget(QtWidgets.QWidget):
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
from Qt import QtCore, QtGui, QtWidgets
|
||||
from pypeapp import style
|
||||
from avalon import style
|
||||
|
||||
|
||||
class ClockifySettings(QtWidgets.QWidget):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import toml
|
||||
import time
|
||||
from pype.modules.ftrack import AppAction
|
||||
from pype.modules.ftrack.lib import AppAction
|
||||
from avalon import lib
|
||||
from pype.api import Logger
|
||||
from pype.lib import get_all_avalon_projects
|
||||
|
|
@ -72,7 +72,7 @@ def register(session, plugins_presets={}):
|
|||
for app in apps:
|
||||
try:
|
||||
registerApp(app, session, plugins_presets)
|
||||
if app_counter%5 == 0:
|
||||
if app_counter % 5 == 0:
|
||||
time.sleep(0.1)
|
||||
app_counter += 1
|
||||
except Exception as exc:
|
||||
|
|
|
|||
|
|
@ -1,284 +0,0 @@
|
|||
import os
|
||||
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class AttributesRemapper(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
|
||||
ignore_me = True
|
||||
#: Action identifier.
|
||||
identifier = 'attributes.remapper'
|
||||
#: Action label.
|
||||
label = "Pype Doctor"
|
||||
variant = '- Attributes Remapper'
|
||||
#: Action description.
|
||||
description = 'Remaps attributes in avalon DB'
|
||||
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub", "Administrator"]
|
||||
icon = '{}/ftrack/action_icons/PypeDoctor.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
|
||||
db_con = DbConnector()
|
||||
keys_to_change = {
|
||||
"fstart": "frameStart",
|
||||
"startFrame": "frameStart",
|
||||
"edit_in": "frameStart",
|
||||
|
||||
"fend": "frameEnd",
|
||||
"endFrame": "frameEnd",
|
||||
"edit_out": "frameEnd",
|
||||
|
||||
"handle_start": "handleStart",
|
||||
"handle_end": "handleEnd",
|
||||
"handles": ["handleEnd", "handleStart"],
|
||||
|
||||
"frameRate": "fps",
|
||||
"framerate": "fps",
|
||||
"resolution_width": "resolutionWidth",
|
||||
"resolution_height": "resolutionHeight",
|
||||
"pixel_aspect": "pixelAspect"
|
||||
}
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
|
||||
return True
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if event['data'].get('values', {}):
|
||||
return
|
||||
|
||||
title = 'Select Projects where attributes should be remapped'
|
||||
|
||||
items = []
|
||||
|
||||
selection_enum = {
|
||||
'label': 'Process type',
|
||||
'type': 'enumerator',
|
||||
'name': 'process_type',
|
||||
'data': [
|
||||
{
|
||||
'label': 'Selection',
|
||||
'value': 'selection'
|
||||
}, {
|
||||
'label': 'Inverted selection',
|
||||
'value': 'except'
|
||||
}
|
||||
],
|
||||
'value': 'selection'
|
||||
}
|
||||
selection_label = {
|
||||
'type': 'label',
|
||||
'value': (
|
||||
'Selection based variants:<br/>'
|
||||
'- `Selection` - '
|
||||
'NOTHING is processed when nothing is selected<br/>'
|
||||
'- `Inverted selection` - '
|
||||
'ALL Projects are processed when nothing is selected'
|
||||
)
|
||||
}
|
||||
|
||||
items.append(selection_enum)
|
||||
items.append(selection_label)
|
||||
|
||||
item_splitter = {'type': 'label', 'value': '---'}
|
||||
|
||||
all_projects = session.query('Project').all()
|
||||
for project in all_projects:
|
||||
item_label = {
|
||||
'type': 'label',
|
||||
'value': '{} (<i>{}</i>)'.format(
|
||||
project['full_name'], project['name']
|
||||
)
|
||||
}
|
||||
item = {
|
||||
'name': project['id'],
|
||||
'type': 'boolean',
|
||||
'value': False
|
||||
}
|
||||
if len(items) > 0:
|
||||
items.append(item_splitter)
|
||||
items.append(item_label)
|
||||
items.append(item)
|
||||
|
||||
if len(items) == 0:
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Didn\'t found any projects'
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
||||
values = event['data']['values']
|
||||
process_type = values.pop('process_type')
|
||||
|
||||
selection = True
|
||||
if process_type == 'except':
|
||||
selection = False
|
||||
|
||||
interface_messages = {}
|
||||
|
||||
projects_to_update = []
|
||||
for project_id, update_bool in values.items():
|
||||
if not update_bool and selection:
|
||||
continue
|
||||
|
||||
if update_bool and not selection:
|
||||
continue
|
||||
|
||||
project = session.query(
|
||||
'Project where id is "{}"'.format(project_id)
|
||||
).one()
|
||||
projects_to_update.append(project)
|
||||
|
||||
if not projects_to_update:
|
||||
self.log.debug('Nothing to update')
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'Nothing to update'
|
||||
}
|
||||
|
||||
|
||||
self.db_con.install()
|
||||
|
||||
relevant_types = ["project", "asset", "version"]
|
||||
|
||||
for ft_project in projects_to_update:
|
||||
self.log.debug(
|
||||
"Processing project \"{}\"".format(ft_project["full_name"])
|
||||
)
|
||||
|
||||
self.db_con.Session["AVALON_PROJECT"] = ft_project["full_name"]
|
||||
project = self.db_con.find_one({'type': 'project'})
|
||||
if not project:
|
||||
key = "Projects not synchronized to db"
|
||||
if key not in interface_messages:
|
||||
interface_messages[key] = []
|
||||
interface_messages[key].append(ft_project["full_name"])
|
||||
continue
|
||||
|
||||
# Get all entities in project collection from MongoDB
|
||||
_entities = self.db_con.find({})
|
||||
for _entity in _entities:
|
||||
ent_t = _entity.get("type", "*unknown type")
|
||||
name = _entity.get("name", "*unknown name")
|
||||
|
||||
self.log.debug(
|
||||
"- {} ({})".format(name, ent_t)
|
||||
)
|
||||
|
||||
# Skip types that do not store keys to change
|
||||
if ent_t.lower() not in relevant_types:
|
||||
self.log.debug("-- skipping - type is not relevant")
|
||||
continue
|
||||
|
||||
# Get data which will change
|
||||
updating_data = {}
|
||||
source_data = _entity["data"]
|
||||
|
||||
for key_from, key_to in self.keys_to_change.items():
|
||||
# continue if final key already exists
|
||||
if type(key_to) == list:
|
||||
for key in key_to:
|
||||
# continue if final key was set in update_data
|
||||
if key in updating_data:
|
||||
continue
|
||||
|
||||
# continue if source key not exist or value is None
|
||||
value = source_data.get(key_from)
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
self.log.debug(
|
||||
"-- changing key {} to {}".format(
|
||||
key_from,
|
||||
key
|
||||
)
|
||||
)
|
||||
|
||||
updating_data[key] = value
|
||||
else:
|
||||
if key_to in source_data:
|
||||
continue
|
||||
|
||||
# continue if final key was set in update_data
|
||||
if key_to in updating_data:
|
||||
continue
|
||||
|
||||
# continue if source key not exist or value is None
|
||||
value = source_data.get(key_from)
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
self.log.debug(
|
||||
"-- changing key {} to {}".format(key_from, key_to)
|
||||
)
|
||||
updating_data[key_to] = value
|
||||
|
||||
# Pop out old keys from entity
|
||||
is_obsolete = False
|
||||
for key in self.keys_to_change:
|
||||
if key not in source_data:
|
||||
continue
|
||||
is_obsolete = True
|
||||
source_data.pop(key)
|
||||
|
||||
# continue if there is nothing to change
|
||||
if not is_obsolete and not updating_data:
|
||||
self.log.debug("-- nothing to change")
|
||||
continue
|
||||
|
||||
source_data.update(updating_data)
|
||||
|
||||
self.db_con.update_many(
|
||||
{"_id": _entity["_id"]},
|
||||
{"$set": {"data": source_data}}
|
||||
)
|
||||
|
||||
self.db_con.uninstall()
|
||||
|
||||
if interface_messages:
|
||||
self.show_interface_from_dict(
|
||||
messages=interface_messages,
|
||||
title="Errors during remapping attributes",
|
||||
event=event
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def show_interface_from_dict(self, event, messages, title=""):
|
||||
items = []
|
||||
|
||||
for key, value in messages.items():
|
||||
if not value:
|
||||
continue
|
||||
subtitle = {'type': 'label', 'value': '# {}'.format(key)}
|
||||
items.append(subtitle)
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
message = {
|
||||
'type': 'label', 'value': '<p>{}</p>'.format(item)
|
||||
}
|
||||
items.append(message)
|
||||
else:
|
||||
message = {'type': 'label', 'value': '<p>{}</p>'.format(value)}
|
||||
items.append(message)
|
||||
|
||||
self.show_interface(items=items, title=title, event=event)
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
AttributesRemapper(session, plugins_presets).register()
|
||||
|
|
@ -1,7 +1,6 @@
|
|||
import os
|
||||
import collections
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.modules.ftrack.lib.avalon_sync import get_avalon_attr
|
||||
|
||||
|
||||
|
|
@ -11,9 +10,7 @@ class CleanHierarchicalAttrsAction(BaseAction):
|
|||
variant = "- Clean hierarchical custom attributes"
|
||||
description = "Unset empty hierarchical attribute values."
|
||||
role_list = ["Pypeclub", "Administrator", "Project Manager"]
|
||||
icon = "{}/ftrack/action_icons/PypeAdmin.svg".format(
|
||||
os.environ.get("PYPE_STATICS_SERVER", "")
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")
|
||||
|
||||
all_project_entities_query = (
|
||||
"select id, name, parent_id, link"
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction
|
||||
try:
|
||||
from functools import cmp_to_key
|
||||
except Exception:
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import subprocess
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
|
||||
|
||||
class ComponentOpen(BaseAction):
|
||||
|
|
@ -15,9 +12,7 @@ class ComponentOpen(BaseAction):
|
|||
# Action label
|
||||
label = 'Open File'
|
||||
# Action icon
|
||||
icon = '{}/ftrack/action_icons/ComponentOpen.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "ComponentOpen.svg")
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
|
|
@ -69,42 +64,3 @@ def register(session, plugins_presets={}):
|
|||
'''Register action. Called when used as an event plugin.'''
|
||||
|
||||
ComponentOpen(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
import os
|
||||
import collections
|
||||
import json
|
||||
import arrow
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.modules.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pype.api import config
|
||||
|
||||
|
|
@ -114,9 +113,7 @@ class CustomAttributes(BaseAction):
|
|||
description = 'Creates Avalon/Mongo ID for double check'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")
|
||||
|
||||
required_keys = ['key', 'label', 'type']
|
||||
type_posibilities = [
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from avalon import lib as avalonlib
|
||||
from pype.api import config, Anatomy
|
||||
|
||||
|
|
@ -7,9 +7,7 @@ from pype.api import config, Anatomy
|
|||
class CreateFolders(BaseAction):
|
||||
identifier = "create.folders"
|
||||
label = "Create Folders"
|
||||
icon = "{}/ftrack/action_icons/CreateFolders.svg".format(
|
||||
os.environ.get("PYPE_STATICS_SERVER", "")
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "CreateFolders.svg")
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
if len(entities) != 1:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import re
|
||||
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.api import config, Anatomy
|
||||
|
||||
|
||||
|
|
@ -52,9 +52,7 @@ class CreateProjectFolders(BaseAction):
|
|||
label = "Create Project Structure"
|
||||
description = "Creates folder structure"
|
||||
role_list = ["Pypeclub", "Administrator", "Project Manager"]
|
||||
icon = "{}/ftrack/action_icons/CreateProjectFolders.svg".format(
|
||||
os.environ.get("PYPE_STATICS_SERVER", "")
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "CreateProjectFolders.svg")
|
||||
|
||||
pattern_array = re.compile(r"\[.*\]")
|
||||
pattern_ftrack = re.compile(r".*\[[.]*ftrack[.]*")
|
||||
|
|
|
|||
|
|
@ -1,336 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack import BaseAction
|
||||
|
||||
|
||||
class CustomAttributeDoctor(BaseAction):
|
||||
|
||||
ignore_me = True
|
||||
#: Action identifier.
|
||||
identifier = 'custom.attributes.doctor'
|
||||
#: Action label.
|
||||
label = "Pype Doctor"
|
||||
variant = '- Custom Attributes Doctor'
|
||||
#: Action description.
|
||||
description = (
|
||||
'Fix hierarchical custom attributes mainly handles, fstart'
|
||||
' and fend'
|
||||
)
|
||||
|
||||
icon = '{}/ftrack/action_icons/PypeDoctor.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
hierarchical_ca = ['handleStart', 'handleEnd', 'frameStart', 'frameEnd']
|
||||
hierarchical_alternatives = {
|
||||
'handleStart': 'handles',
|
||||
'handleEnd': 'handles',
|
||||
"frameStart": "fstart",
|
||||
"frameEnd": "fend"
|
||||
}
|
||||
|
||||
# Roles for new custom attributes
|
||||
read_roles = ['ALL',]
|
||||
write_roles = ['ALL',]
|
||||
|
||||
data_ca = {
|
||||
'handleStart': {
|
||||
'label': 'Frame handles start',
|
||||
'type': 'number',
|
||||
'config': json.dumps({'isdecimal': False})
|
||||
},
|
||||
'handleEnd': {
|
||||
'label': 'Frame handles end',
|
||||
'type': 'number',
|
||||
'config': json.dumps({'isdecimal': False})
|
||||
},
|
||||
'frameStart': {
|
||||
'label': 'Frame start',
|
||||
'type': 'number',
|
||||
'config': json.dumps({'isdecimal': False})
|
||||
},
|
||||
'frameEnd': {
|
||||
'label': 'Frame end',
|
||||
'type': 'number',
|
||||
'config': json.dumps({'isdecimal': False})
|
||||
}
|
||||
}
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
|
||||
return True
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if event['data'].get('values', {}):
|
||||
return
|
||||
|
||||
title = 'Select Project to fix Custom attributes'
|
||||
|
||||
items = []
|
||||
item_splitter = {'type': 'label', 'value': '---'}
|
||||
|
||||
all_projects = session.query('Project').all()
|
||||
for project in all_projects:
|
||||
item_label = {
|
||||
'type': 'label',
|
||||
'value': '{} (<i>{}</i>)'.format(
|
||||
project['full_name'], project['name']
|
||||
)
|
||||
}
|
||||
item = {
|
||||
'name': project['id'],
|
||||
'type': 'boolean',
|
||||
'value': False
|
||||
}
|
||||
if len(items) > 0:
|
||||
items.append(item_splitter)
|
||||
items.append(item_label)
|
||||
items.append(item)
|
||||
|
||||
if len(items) == 0:
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Didn\'t found any projects'
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'items': items,
|
||||
'title': title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
||||
values = event['data']['values']
|
||||
projects_to_update = []
|
||||
for project_id, update_bool in values.items():
|
||||
if not update_bool:
|
||||
continue
|
||||
|
||||
project = session.query(
|
||||
'Project where id is "{}"'.format(project_id)
|
||||
).one()
|
||||
projects_to_update.append(project)
|
||||
|
||||
if not projects_to_update:
|
||||
self.log.debug('Nothing to update')
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'Nothing to update'
|
||||
}
|
||||
|
||||
self.security_roles = {}
|
||||
self.to_process = {}
|
||||
# self.curent_default_values = {}
|
||||
existing_attrs = session.query('CustomAttributeConfiguration').all()
|
||||
self.prepare_custom_attributes(existing_attrs)
|
||||
|
||||
self.projects_data = {}
|
||||
for project in projects_to_update:
|
||||
self.process_data(project)
|
||||
|
||||
return True
|
||||
|
||||
def process_data(self, entity):
|
||||
cust_attrs = entity.get('custom_attributes')
|
||||
if not cust_attrs:
|
||||
return
|
||||
for dst_key, src_key in self.to_process.items():
|
||||
if src_key in cust_attrs:
|
||||
value = cust_attrs[src_key]
|
||||
entity['custom_attributes'][dst_key] = value
|
||||
self.session.commit()
|
||||
|
||||
for child in entity.get('children', []):
|
||||
self.process_data(child)
|
||||
|
||||
def prepare_custom_attributes(self, existing_attrs):
|
||||
to_process = {}
|
||||
to_create = []
|
||||
all_keys = {attr['key']: attr for attr in existing_attrs}
|
||||
for key in self.hierarchical_ca:
|
||||
if key not in all_keys:
|
||||
self.log.debug(
|
||||
'Custom attribute "{}" does not exist at all'.format(key)
|
||||
)
|
||||
to_create.append(key)
|
||||
if key in self.hierarchical_alternatives:
|
||||
alt_key = self.hierarchical_alternatives[key]
|
||||
if alt_key in all_keys:
|
||||
self.log.debug((
|
||||
'Custom attribute "{}" will use values from "{}"'
|
||||
).format(key, alt_key))
|
||||
|
||||
to_process[key] = alt_key
|
||||
|
||||
obj = all_keys[alt_key]
|
||||
# if alt_key not in self.curent_default_values:
|
||||
# self.curent_default_values[alt_key] = obj['default']
|
||||
obj['default'] = None
|
||||
self.session.commit()
|
||||
|
||||
else:
|
||||
obj = all_keys[key]
|
||||
new_key = key + '_old'
|
||||
|
||||
if obj['is_hierarchical']:
|
||||
if new_key not in all_keys:
|
||||
self.log.info((
|
||||
'Custom attribute "{}" is already hierarchical'
|
||||
' and can\'t find old one'
|
||||
).format(key)
|
||||
)
|
||||
continue
|
||||
|
||||
to_process[key] = new_key
|
||||
continue
|
||||
|
||||
# default_value = obj['default']
|
||||
# if new_key not in self.curent_default_values:
|
||||
# self.curent_default_values[new_key] = default_value
|
||||
|
||||
obj['key'] = new_key
|
||||
obj['label'] = obj['label'] + '(old)'
|
||||
obj['default'] = None
|
||||
|
||||
self.session.commit()
|
||||
|
||||
to_create.append(key)
|
||||
to_process[key] = new_key
|
||||
|
||||
self.to_process = to_process
|
||||
for key in to_create:
|
||||
data = {
|
||||
'key': key,
|
||||
'entity_type': 'show',
|
||||
'is_hierarchical': True,
|
||||
'default': None
|
||||
}
|
||||
for _key, _value in self.data_ca.get(key, {}).items():
|
||||
if _key == 'type':
|
||||
_value = self.session.query((
|
||||
'CustomAttributeType where name is "{}"'
|
||||
).format(_value)).first()
|
||||
|
||||
data[_key] = _value
|
||||
|
||||
avalon_group = self.session.query(
|
||||
'CustomAttributeGroup where name is "avalon"'
|
||||
).first()
|
||||
if avalon_group:
|
||||
data['group'] = avalon_group
|
||||
|
||||
read_roles = self.get_security_role(self.read_roles)
|
||||
write_roles = self.get_security_role(self.write_roles)
|
||||
data['read_security_roles'] = read_roles
|
||||
data['write_security_roles'] = write_roles
|
||||
|
||||
self.session.create('CustomAttributeConfiguration', data)
|
||||
self.session.commit()
|
||||
|
||||
# def return_back_defaults(self):
|
||||
# existing_attrs = self.session.query(
|
||||
# 'CustomAttributeConfiguration'
|
||||
# ).all()
|
||||
#
|
||||
# for attr_key, default in self.curent_default_values.items():
|
||||
# for attr in existing_attrs:
|
||||
# if attr['key'] != attr_key:
|
||||
# continue
|
||||
# attr['default'] = default
|
||||
# self.session.commit()
|
||||
# break
|
||||
|
||||
def get_security_role(self, security_roles):
|
||||
roles = []
|
||||
if len(security_roles) == 0 or security_roles[0] == 'ALL':
|
||||
roles = self.get_role_ALL()
|
||||
elif security_roles[0] == 'except':
|
||||
excepts = security_roles[1:]
|
||||
all = self.get_role_ALL()
|
||||
for role in all:
|
||||
if role['name'] not in excepts:
|
||||
roles.append(role)
|
||||
if role['name'] not in self.security_roles:
|
||||
self.security_roles[role['name']] = role
|
||||
else:
|
||||
for role_name in security_roles:
|
||||
if role_name in self.security_roles:
|
||||
roles.append(self.security_roles[role_name])
|
||||
continue
|
||||
|
||||
try:
|
||||
query = 'SecurityRole where name is "{}"'.format(role_name)
|
||||
role = self.session.query(query).one()
|
||||
self.security_roles[role_name] = role
|
||||
roles.append(role)
|
||||
except Exception:
|
||||
self.log.warning(
|
||||
'Securit role "{}" does not exist'.format(role_name)
|
||||
)
|
||||
continue
|
||||
|
||||
return roles
|
||||
|
||||
def get_role_ALL(self):
|
||||
role_name = 'ALL'
|
||||
if role_name in self.security_roles:
|
||||
all_roles = self.security_roles[role_name]
|
||||
else:
|
||||
all_roles = self.session.query('SecurityRole').all()
|
||||
self.security_roles[role_name] = all_roles
|
||||
for role in all_roles:
|
||||
if role['name'] not in self.security_roles:
|
||||
self.security_roles[role['name']] = role
|
||||
return all_roles
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
CustomAttributeDoctor(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
@ -1,11 +1,10 @@
|
|||
import os
|
||||
import collections
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from queue import Queue
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
|
|
@ -18,9 +17,7 @@ class DeleteAssetSubset(BaseAction):
|
|||
label = "Delete Asset/Subsets"
|
||||
#: Action description.
|
||||
description = "Removes from Avalon with all childs and asset from Ftrack"
|
||||
icon = "{}/ftrack/action_icons/DeleteAsset.svg".format(
|
||||
os.environ.get("PYPE_STATICS_SERVER", "")
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "DeleteAsset.svg")
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub", "Administrator", "Project Manager"]
|
||||
#: Db connection
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import uuid
|
|||
import clique
|
||||
from pymongo import UpdateOne
|
||||
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
from pype.api import Anatomy
|
||||
|
||||
|
|
@ -22,9 +22,7 @@ class DeleteOldVersions(BaseAction):
|
|||
" archived with only lates versions."
|
||||
)
|
||||
role_list = ["Pypeclub", "Project Manager", "Administrator"]
|
||||
icon = "{}/ftrack/action_icons/PypeAdmin.svg".format(
|
||||
os.environ.get("PYPE_STATICS_SERVER", "")
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")
|
||||
|
||||
dbcon = DbConnector()
|
||||
|
||||
|
|
|
|||
|
|
@ -8,11 +8,11 @@ from bson.objectid import ObjectId
|
|||
|
||||
from avalon import pipeline
|
||||
from avalon.vendor import filelink
|
||||
from avalon.tools.libraryloader.io_nonsingleton import DbConnector
|
||||
|
||||
from pype.api import Anatomy
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.modules.ftrack.lib.avalon_sync import CustAttrIdKey
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class Delivery(BaseAction):
|
||||
|
|
@ -21,9 +21,7 @@ class Delivery(BaseAction):
|
|||
label = "Delivery"
|
||||
description = "Deliver data to client"
|
||||
role_list = ["Pypeclub", "Administrator", "Project manager"]
|
||||
icon = "{}/ftrack/action_icons/Delivery.svg".format(
|
||||
os.environ.get("PYPE_STATICS_SERVER", "")
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "Delivery.svg")
|
||||
|
||||
db_con = DbConnector()
|
||||
|
||||
|
|
@ -508,6 +506,7 @@ class Delivery(BaseAction):
|
|||
"message": "Delivery Finished"
|
||||
}
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import logging
|
||||
import subprocess
|
||||
from operator import itemgetter
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.api import Logger, config
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
|
@ -16,9 +15,8 @@ class DJVViewAction(BaseAction):
|
|||
identifier = "djvview-launch-action"
|
||||
label = "DJV View"
|
||||
description = "DJV View Launcher"
|
||||
icon = '{}/app_icons/djvView.png'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
icon = statics_icon("app_icons", "djvView.png")
|
||||
|
||||
type = 'Application'
|
||||
|
||||
def __init__(self, session, plugins_presets):
|
||||
|
|
|
|||
|
|
@ -1,11 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import json
|
||||
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
|
||||
|
||||
class JobKiller(BaseAction):
|
||||
|
|
@ -20,9 +14,7 @@ class JobKiller(BaseAction):
|
|||
description = 'Killing selected running jobs'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub', 'Administrator']
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
|
|
@ -124,43 +116,3 @@ def register(session, plugins_presets={}):
|
|||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
JobKiller(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
|
|||
|
|
@ -1,10 +1,4 @@
|
|||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import ftrack_api
|
||||
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
|
||||
|
||||
class MultipleNotes(BaseAction):
|
||||
|
|
@ -16,9 +10,7 @@ class MultipleNotes(BaseAction):
|
|||
label = 'Multiple Notes'
|
||||
#: Action description.
|
||||
description = 'Add same note to multiple Asset Versions'
|
||||
icon = '{}/ftrack/action_icons/MultipleNotes.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "MultipleNotes.svg")
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
|
|
@ -116,42 +108,3 @@ def register(session, plugins_presets={}):
|
|||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
MultipleNotes(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.api import config, Anatomy, project_overrides_dir_path
|
||||
from pype.modules.ftrack.lib.avalon_sync import get_avalon_attr
|
||||
|
||||
|
|
@ -17,9 +17,7 @@ class PrepareProject(BaseAction):
|
|||
description = 'Set basic attributes on the project'
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub", "Administrator", "Project manager"]
|
||||
icon = '{}/ftrack/action_icons/PrepareProject.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "PrepareProject.svg")
|
||||
|
||||
# Key to store info about trigerring create folder structure
|
||||
create_project_structure_key = "create_folder_structure"
|
||||
|
|
|
|||
|
|
@ -1,17 +1,13 @@
|
|||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import logging
|
||||
import traceback
|
||||
import json
|
||||
|
||||
from pype.api import Logger, config
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.api import config
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
import ftrack_api
|
||||
from avalon import io, api
|
||||
|
||||
log = Logger().get_logger(__name__)
|
||||
|
||||
|
||||
class RVAction(BaseAction):
|
||||
""" Launch RV action """
|
||||
|
|
@ -19,9 +15,8 @@ class RVAction(BaseAction):
|
|||
identifier = "rv.launch.action"
|
||||
label = "rv"
|
||||
description = "rv Launcher"
|
||||
icon = '{}/ftrack/action_icons/RV.png'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "RV.png")
|
||||
|
||||
type = 'Application'
|
||||
|
||||
def __init__(self, session, plugins_presets):
|
||||
|
|
@ -144,7 +139,7 @@ class RVAction(BaseAction):
|
|||
try:
|
||||
items = self.get_interface_items(session, entities)
|
||||
except Exception:
|
||||
log.error(traceback.format_exc())
|
||||
self.log.error(traceback.format_exc())
|
||||
job["status"] = "failed"
|
||||
else:
|
||||
job["status"] = "done"
|
||||
|
|
@ -238,7 +233,7 @@ class RVAction(BaseAction):
|
|||
try:
|
||||
paths = self.get_file_paths(session, event)
|
||||
except Exception:
|
||||
log.error(traceback.format_exc())
|
||||
self.log.error(traceback.format_exc())
|
||||
job["status"] = "failed"
|
||||
else:
|
||||
job["status"] = "done"
|
||||
|
|
@ -254,7 +249,7 @@ class RVAction(BaseAction):
|
|||
|
||||
args.extend(paths)
|
||||
|
||||
log.info("Running rv: {}".format(args))
|
||||
self.log.info("Running rv: {}".format(args))
|
||||
|
||||
subprocess.Popen(args)
|
||||
|
||||
|
|
@ -332,43 +327,3 @@ def register(session, plugins_presets={}):
|
|||
"""Register hooks."""
|
||||
|
||||
RVAction(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
from operator import itemgetter
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
|
||||
|
||||
class SeedDebugProject(BaseAction):
|
||||
|
|
@ -16,9 +16,7 @@ class SeedDebugProject(BaseAction):
|
|||
priority = 100
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub"]
|
||||
icon = "{}/ftrack/action_icons/SeedProject.svg".format(
|
||||
os.environ.get("PYPE_STATICS_SERVER", "")
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "SeedProject.svg")
|
||||
|
||||
# Asset names which will be created in `Assets` entity
|
||||
assets = [
|
||||
|
|
@ -429,6 +427,7 @@ class SeedDebugProject(BaseAction):
|
|||
self.session.commit()
|
||||
return True
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import ftrack_api
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction
|
||||
|
||||
|
||||
class StartTimer(BaseAction):
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import errno
|
|||
import json
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.api import Anatomy
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
|
@ -22,10 +22,7 @@ class StoreThumbnailsToAvalon(BaseAction):
|
|||
description = 'Test action'
|
||||
# roles that are allowed to register this action
|
||||
role_list = ["Pypeclub", "Administrator", "Project Manager"]
|
||||
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")
|
||||
|
||||
thumbnail_key = "AVALON_THUMBNAIL_ROOT"
|
||||
db_con = DbConnector()
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
import os
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
from pype.modules.ftrack.lib.avalon_sync import SyncEntitiesFactory
|
||||
|
||||
|
||||
|
|
@ -43,9 +42,7 @@ class SyncToAvalonLocal(BaseAction):
|
|||
priority = 200
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub"]
|
||||
icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "PypeAdmin.svg")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
|
|
|||
|
|
@ -1,37 +1,19 @@
|
|||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import collections
|
||||
import json
|
||||
import re
|
||||
|
||||
import ftrack_api
|
||||
from avalon import io, inventory, schema
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
|
||||
|
||||
class TestAction(BaseAction):
|
||||
'''Edit meta data action.'''
|
||||
"""Action for testing purpose or as base for new actions."""
|
||||
|
||||
ignore_me = True
|
||||
#: Action identifier.
|
||||
|
||||
identifier = 'test.action'
|
||||
#: Action label.
|
||||
label = 'Test action'
|
||||
#: Action description.
|
||||
description = 'Test action'
|
||||
#: priority
|
||||
priority = 10000
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ['Pypeclub']
|
||||
icon = '{}/ftrack/action_icons/TestAction.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "TestAction.svg")
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
|
||||
return True
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
|
|
@ -41,45 +23,4 @@ class TestAction(BaseAction):
|
|||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
'''Register plugin. Called when used as an plugin.'''
|
||||
|
||||
TestAction(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
|
|||
|
|
@ -1,11 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import json
|
||||
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
|
||||
|
||||
class ThumbToChildren(BaseAction):
|
||||
|
|
@ -18,9 +12,7 @@ class ThumbToChildren(BaseAction):
|
|||
# Action variant
|
||||
variant = " to Children"
|
||||
# Action icon
|
||||
icon = '{}/ftrack/action_icons/Thumbnail.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "Thumbnail.svg")
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
''' Validation '''
|
||||
|
|
@ -71,42 +63,3 @@ def register(session, plugins_presets={}):
|
|||
'''Register action. Called when used as an event plugin.'''
|
||||
|
||||
ThumbToChildren(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
|
|||
|
|
@ -1,10 +1,5 @@
|
|||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import logging
|
||||
import json
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
|
||||
|
||||
class ThumbToParent(BaseAction):
|
||||
|
|
@ -17,9 +12,7 @@ class ThumbToParent(BaseAction):
|
|||
# Action variant
|
||||
variant = " to Parent"
|
||||
# Action icon
|
||||
icon = '{}/ftrack/action_icons/Thumbnail.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "Thumbnail.svg")
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
'''Return action config if triggered on asset versions.'''
|
||||
|
|
@ -93,42 +86,3 @@ def register(session, plugins_presets={}):
|
|||
'''Register action. Called when used as an event plugin.'''
|
||||
|
||||
ThumbToParent(session, plugins_presets).register()
|
||||
|
||||
|
||||
def main(arguments=None):
|
||||
'''Set up logging and register action.'''
|
||||
if arguments is None:
|
||||
arguments = []
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
# Allow setting of logging level from arguments.
|
||||
loggingLevels = {}
|
||||
for level in (
|
||||
logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
|
||||
logging.ERROR, logging.CRITICAL
|
||||
):
|
||||
loggingLevels[logging.getLevelName(level).lower()] = level
|
||||
|
||||
parser.add_argument(
|
||||
'-v', '--verbosity',
|
||||
help='Set the logging output verbosity.',
|
||||
choices=loggingLevels.keys(),
|
||||
default='info'
|
||||
)
|
||||
namespace = parser.parse_args(arguments)
|
||||
|
||||
# Set up basic logging
|
||||
logging.basicConfig(level=loggingLevels[namespace.verbosity])
|
||||
|
||||
session = ftrack_api.Session()
|
||||
register(session)
|
||||
|
||||
# Wait for events
|
||||
logging.info(
|
||||
'Registered actions and listening for events. Use Ctrl-C to abort.'
|
||||
)
|
||||
session.event_hub.wait()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
raise SystemExit(main(sys.argv[1:]))
|
||||
|
|
|
|||
|
|
@ -1,189 +0,0 @@
|
|||
import os
|
||||
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib.io_nonsingleton import DbConnector
|
||||
|
||||
|
||||
class PypeUpdateFromV2_2_0(BaseAction):
|
||||
"""This action is to remove silo field from database and changes asset
|
||||
schema to newer version
|
||||
|
||||
WARNING: it is NOT for situations when you want to switch from avalon-core
|
||||
to Pype's avalon-core!!!
|
||||
|
||||
"""
|
||||
#: Action identifier.
|
||||
identifier = "silos.doctor"
|
||||
#: Action label.
|
||||
label = "Pype Update"
|
||||
variant = "- v2.2.0 to v2.3.0 or higher"
|
||||
#: Action description.
|
||||
description = "Use when Pype was updated from v2.2.0 to v2.3.0 or higher"
|
||||
|
||||
#: roles that are allowed to register this action
|
||||
role_list = ["Pypeclub", "Administrator"]
|
||||
icon = "{}/ftrack/action_icons/PypeUpdate.svg".format(
|
||||
os.environ.get("PYPE_STATICS_SERVER", "")
|
||||
)
|
||||
# connector to MongoDB (Avalon mongo)
|
||||
db_con = DbConnector()
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
""" Validation """
|
||||
if len(entities) != 1:
|
||||
return False
|
||||
|
||||
if entities[0].entity_type.lower() != "project":
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def interface(self, session, entities, event):
|
||||
if event['data'].get('values', {}):
|
||||
return
|
||||
|
||||
items = []
|
||||
item_splitter = {'type': 'label', 'value': '---'}
|
||||
title = "Updated Pype from v 2.2.0 to v2.3.0 or higher"
|
||||
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"NOTE: This doctor action should be used ONLY when Pype"
|
||||
" was updated from v2.2.0 to v2.3.0 or higher.<br><br><br>"
|
||||
)
|
||||
})
|
||||
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"Select if want to process <b>all synchronized projects</b>"
|
||||
" or <b>selection</b>."
|
||||
)
|
||||
})
|
||||
|
||||
items.append({
|
||||
"type": "enumerator",
|
||||
"name": "__process_all__",
|
||||
"data": [{
|
||||
"label": "All synchronized projects",
|
||||
"value": True
|
||||
}, {
|
||||
"label": "Selection",
|
||||
"value": False
|
||||
}],
|
||||
"value": False
|
||||
})
|
||||
|
||||
items.append({
|
||||
"type": "label",
|
||||
"value": (
|
||||
"<br/><br/><h2>Synchronized projects:</h2>"
|
||||
"<i>(ignore if <strong>\"ALL projects\"</strong> selected)</i>"
|
||||
)
|
||||
})
|
||||
|
||||
self.log.debug("Getting all Ftrack projects")
|
||||
# Get all Ftrack projects
|
||||
all_ftrack_projects = [
|
||||
project["full_name"] for project in session.query("Project").all()
|
||||
]
|
||||
|
||||
self.log.debug("Getting Avalon projects that are also in the Ftrack")
|
||||
# Get Avalon projects that are in Ftrack
|
||||
self.db_con.install()
|
||||
possible_projects = [
|
||||
project["name"] for project in self.db_con.projects()
|
||||
if project["name"] in all_ftrack_projects
|
||||
]
|
||||
|
||||
for project in possible_projects:
|
||||
item_label = {
|
||||
"type": "label",
|
||||
"value": project
|
||||
}
|
||||
item = {
|
||||
"label": "- process",
|
||||
"name": project,
|
||||
"type": 'boolean',
|
||||
"value": False
|
||||
}
|
||||
items.append(item_splitter)
|
||||
items.append(item_label)
|
||||
items.append(item)
|
||||
|
||||
if len(possible_projects) == 0:
|
||||
return {
|
||||
"success": False,
|
||||
"message": (
|
||||
"Nothing to process."
|
||||
" There are not projects synchronized to avalon."
|
||||
)
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"items": items,
|
||||
"title": title
|
||||
}
|
||||
|
||||
def launch(self, session, entities, event):
|
||||
if 'values' not in event['data']:
|
||||
return
|
||||
|
||||
projects_selection = {
|
||||
True: [],
|
||||
False: []
|
||||
}
|
||||
process_all = None
|
||||
|
||||
values = event['data']['values']
|
||||
for key, value in values.items():
|
||||
if key == "__process_all__":
|
||||
process_all = value
|
||||
continue
|
||||
|
||||
projects_selection[value].append(key)
|
||||
|
||||
# Skip if process_all value is not boolean
|
||||
# - may happen when user delete string line in combobox
|
||||
if not isinstance(process_all, bool):
|
||||
self.log.warning(
|
||||
"Nothing was processed. User didn't select if want to process"
|
||||
" selection or all projects!"
|
||||
)
|
||||
return {
|
||||
"success": False,
|
||||
"message": (
|
||||
"Nothing was processed. You must select if want to process"
|
||||
" \"selection\" or \"all projects\"!"
|
||||
)
|
||||
}
|
||||
|
||||
projects_to_process = projects_selection[True]
|
||||
if process_all:
|
||||
projects_to_process.extend(projects_selection[False])
|
||||
|
||||
self.db_con.install()
|
||||
for project in projects_to_process:
|
||||
self.log.debug("Processing project \"{}\"".format(project))
|
||||
self.db_con.Session["AVALON_PROJECT"] = project
|
||||
|
||||
self.log.debug("- Unsetting silos on assets")
|
||||
self.db_con.update_many(
|
||||
{"type": "asset"},
|
||||
{"$unset": {"silo": ""}}
|
||||
)
|
||||
|
||||
self.log.debug("- setting schema of assets to v.3")
|
||||
self.db_con.update_many(
|
||||
{"type": "asset"},
|
||||
{"$set": {"schema": "avalon-core:asset-3.0"}}
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def register(session, plugins_presets={}):
|
||||
"""Register plugin. Called when used as an plugin."""
|
||||
|
||||
PypeUpdateFromV2_2_0(session, plugins_presets).register()
|
||||
|
|
@ -1,23 +1,15 @@
|
|||
import os
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction, statics_icon
|
||||
|
||||
|
||||
class ActionAskWhereIRun(BaseAction):
|
||||
""" Sometimes user forget where pipeline with his credentials is running.
|
||||
- this action triggers `ActionShowWhereIRun`
|
||||
"""
|
||||
# Action is ignored by default
|
||||
ignore_me = True
|
||||
#: Action identifier.
|
||||
identifier = 'ask.where.i.run'
|
||||
#: Action label.
|
||||
label = 'Ask where I run'
|
||||
#: Action description.
|
||||
description = 'Triggers PC info where user have running Pype'
|
||||
#: Action icon
|
||||
icon = '{}/ftrack/action_icons/ActionAskWhereIRun.svg'.format(
|
||||
os.environ.get('PYPE_STATICS_SERVER', '')
|
||||
)
|
||||
icon = statics_icon("ftrack", "action_icons", "ActionAskWhereIRun.svg")
|
||||
|
||||
def discover(self, session, entities, event):
|
||||
""" Hide by default - Should be enabled only if you want to run.
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
import platform
|
||||
import socket
|
||||
import getpass
|
||||
import ftrack_api
|
||||
from pype.modules.ftrack import BaseAction
|
||||
from pype.modules.ftrack.lib import BaseAction
|
||||
|
||||
|
||||
class ActionShowWhereIRun(BaseAction):
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from . import avalon_sync
|
|||
from . import credentials
|
||||
from .ftrack_base_handler import BaseHandler
|
||||
from .ftrack_event_handler import BaseEvent
|
||||
from .ftrack_action_handler import BaseAction
|
||||
from .ftrack_action_handler import BaseAction, statics_icon
|
||||
from .ftrack_app_handler import AppAction
|
||||
|
||||
__all__ = [
|
||||
|
|
@ -11,5 +11,6 @@ __all__ = [
|
|||
"BaseHandler",
|
||||
"BaseEvent",
|
||||
"BaseAction",
|
||||
"statics_icon",
|
||||
"AppAction"
|
||||
]
|
||||
|
|
|
|||
|
|
@ -5,27 +5,20 @@ Copy of io module in avalon-core.
|
|||
- In this case not working as singleton with api.Session!
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import errno
|
||||
import shutil
|
||||
import logging
|
||||
import tempfile
|
||||
import functools
|
||||
import contextlib
|
||||
import atexit
|
||||
|
||||
import requests
|
||||
|
||||
# Third-party dependencies
|
||||
import pymongo
|
||||
from pymongo.client_session import ClientSession
|
||||
|
||||
|
||||
class NotActiveTable(Exception):
|
||||
def __init__(self, *args, **kwargs):
|
||||
msg = "Active table is not set. (This is bug)"
|
||||
if not (args or kwargs):
|
||||
args = (default_message,)
|
||||
args = [msg]
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
|
|
@ -120,7 +113,7 @@ class DbConnector:
|
|||
else:
|
||||
raise IOError(
|
||||
"ERROR: Couldn't connect to %s in "
|
||||
"less than %.3f ms" % (self._mongo_url, timeout)
|
||||
"less than %.3f ms" % (self._mongo_url, self.timeout)
|
||||
)
|
||||
|
||||
self.log.info("Connected to %s, delay %.3f s" % (
|
||||
|
|
|
|||
|
|
@ -1,6 +1,14 @@
|
|||
import os
|
||||
from .ftrack_base_handler import BaseHandler
|
||||
|
||||
|
||||
def statics_icon(*icon_statics_file_parts):
|
||||
statics_server = os.environ.get("PYPE_STATICS_SERVER")
|
||||
if not statics_server:
|
||||
return None
|
||||
return "/".join((statics_server, *icon_statics_file_parts))
|
||||
|
||||
|
||||
class BaseAction(BaseHandler):
|
||||
'''Custom Action base class
|
||||
|
||||
|
|
@ -177,7 +185,9 @@ class BaseAction(BaseHandler):
|
|||
else:
|
||||
for key in ('success', 'message'):
|
||||
if key not in result:
|
||||
raise KeyError('Missing required key: {0}.'.format(key))
|
||||
raise KeyError(
|
||||
"Missing required key: {0}.".format(key)
|
||||
)
|
||||
return result
|
||||
|
||||
self.log.warning((
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
import requests
|
||||
from pypeapp import style
|
||||
from avalon import style
|
||||
from pype.modules.ftrack import credentials
|
||||
from . import login_tools
|
||||
from Qt import QtCore, QtGui, QtWidgets
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from Qt import QtWidgets, QtCore
|
||||
from .widgets import LogsWidget, LogDetailWidget
|
||||
from pypeapp import style
|
||||
from .widgets import LogsWidget, OutputWidget
|
||||
from avalon import style
|
||||
|
||||
|
||||
class LogsWindow(QtWidgets.QWidget):
|
||||
|
|
@ -10,7 +10,7 @@ class LogsWindow(QtWidgets.QWidget):
|
|||
self.setStyleSheet(style.load_stylesheet())
|
||||
self.resize(1200, 800)
|
||||
logs_widget = LogsWidget(parent=self)
|
||||
log_detail = LogDetailWidget(parent=self)
|
||||
log_detail = OutputWidget(parent=self)
|
||||
|
||||
main_layout = QtWidgets.QHBoxLayout()
|
||||
|
||||
|
|
@ -33,7 +33,5 @@ class LogsWindow(QtWidgets.QWidget):
|
|||
|
||||
def on_selection_changed(self):
|
||||
index = self.logs_widget.selected_log()
|
||||
if not index or not index.isValid():
|
||||
return
|
||||
node = index.data(self.logs_widget.model.NodeRole)
|
||||
self.log_detail.set_detail(node)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import os
|
||||
import collections
|
||||
from Qt import QtCore
|
||||
from pype.api import Logger
|
||||
from pypeapp.lib.log import _bootstrap_mongo_log
|
||||
|
|
@ -8,31 +9,32 @@ log = Logger().get_logger("LogModel", "LoggingModule")
|
|||
|
||||
class LogModel(QtCore.QAbstractItemModel):
|
||||
COLUMNS = [
|
||||
"user",
|
||||
"host",
|
||||
"lineNumber",
|
||||
"method",
|
||||
"module",
|
||||
"fileName",
|
||||
"loggerName",
|
||||
"message",
|
||||
"level",
|
||||
"timestamp",
|
||||
"process_name",
|
||||
"hostname",
|
||||
"hostip",
|
||||
"username",
|
||||
"system_name",
|
||||
"started"
|
||||
]
|
||||
|
||||
colums_mapping = {
|
||||
"user": "User",
|
||||
"host": "Host",
|
||||
"lineNumber": "Line n.",
|
||||
"method": "Method",
|
||||
"module": "Module",
|
||||
"fileName": "File name",
|
||||
"loggerName": "Logger name",
|
||||
"message": "Message",
|
||||
"level": "Level",
|
||||
"timestamp": "Timestamp",
|
||||
"process_name": "Process Name",
|
||||
"process_id": "Process Id",
|
||||
"hostname": "Hostname",
|
||||
"hostip": "Host IP",
|
||||
"username": "Username",
|
||||
"system_name": "System name",
|
||||
"started": "Started at"
|
||||
}
|
||||
|
||||
process_keys = [
|
||||
"process_id", "hostname", "hostip",
|
||||
"username", "system_name", "process_name"
|
||||
]
|
||||
log_keys = [
|
||||
"timestamp", "level", "thread", "threadName", "message", "loggerName",
|
||||
"fileName", "module", "method", "lineNumber"
|
||||
]
|
||||
default_value = "- Not set -"
|
||||
NodeRole = QtCore.Qt.UserRole + 1
|
||||
|
||||
def __init__(self, parent=None):
|
||||
|
|
@ -50,14 +52,47 @@ class LogModel(QtCore.QAbstractItemModel):
|
|||
self._root_node.add_child(node)
|
||||
|
||||
def refresh(self):
|
||||
self.log_by_process = collections.defaultdict(list)
|
||||
self.process_info = {}
|
||||
|
||||
self.clear()
|
||||
self.beginResetModel()
|
||||
if self.dbcon:
|
||||
result = self.dbcon.find({})
|
||||
for item in result:
|
||||
self.add_log(item)
|
||||
self.endResetModel()
|
||||
process_id = item.get("process_id")
|
||||
# backwards (in)compatibility
|
||||
if not process_id:
|
||||
continue
|
||||
|
||||
if process_id not in self.process_info:
|
||||
proc_dict = {}
|
||||
for key in self.process_keys:
|
||||
proc_dict[key] = (
|
||||
item.get(key) or self.default_value
|
||||
)
|
||||
self.process_info[process_id] = proc_dict
|
||||
|
||||
if "_logs" not in self.process_info[process_id]:
|
||||
self.process_info[process_id]["_logs"] = []
|
||||
|
||||
log_item = {}
|
||||
for key in self.log_keys:
|
||||
log_item[key] = item.get(key) or self.default_value
|
||||
|
||||
if "exception" in item:
|
||||
log_item["exception"] = item["exception"]
|
||||
|
||||
self.process_info[process_id]["_logs"].append(log_item)
|
||||
|
||||
for item in self.process_info.values():
|
||||
item["_logs"] = sorted(
|
||||
item["_logs"], key=lambda item: item["timestamp"]
|
||||
)
|
||||
item["started"] = item["_logs"][0]["timestamp"]
|
||||
self.add_log(item)
|
||||
|
||||
self.endResetModel()
|
||||
|
||||
def data(self, index, role):
|
||||
if not index.isValid():
|
||||
|
|
@ -68,7 +103,7 @@ class LogModel(QtCore.QAbstractItemModel):
|
|||
column = index.column()
|
||||
|
||||
key = self.COLUMNS[column]
|
||||
if key == "timestamp":
|
||||
if key == "started":
|
||||
return str(node.get(key, None))
|
||||
return node.get(key, None)
|
||||
|
||||
|
|
@ -86,8 +121,7 @@ class LogModel(QtCore.QAbstractItemModel):
|
|||
child_item = parent_node.child(row)
|
||||
if child_item:
|
||||
return self.createIndex(row, column, child_item)
|
||||
else:
|
||||
return QtCore.QModelIndex()
|
||||
return QtCore.QModelIndex()
|
||||
|
||||
def rowCount(self, parent):
|
||||
node = self._root_node
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import getpass
|
||||
from Qt import QtCore, QtWidgets, QtGui
|
||||
from PyQt5.QtCore import QVariant
|
||||
from .models import LogModel
|
||||
|
||||
|
||||
|
|
@ -97,7 +97,6 @@ class SelectableMenu(QtWidgets.QMenu):
|
|||
class CustomCombo(QtWidgets.QWidget):
|
||||
|
||||
selection_changed = QtCore.Signal()
|
||||
checked_changed = QtCore.Signal(bool)
|
||||
|
||||
def __init__(self, title, parent=None):
|
||||
super(CustomCombo, self).__init__(parent)
|
||||
|
|
@ -126,27 +125,12 @@ class CustomCombo(QtWidgets.QWidget):
|
|||
self.toolmenu.clear()
|
||||
self.addItems(items)
|
||||
|
||||
def select_items(self, items, ignore_input=False):
|
||||
if not isinstance(items, list):
|
||||
items = [items]
|
||||
|
||||
for action in self.toolmenu.actions():
|
||||
check = True
|
||||
if (
|
||||
action.text() in items and ignore_input or
|
||||
action.text() not in items and not ignore_input
|
||||
):
|
||||
check = False
|
||||
|
||||
action.setChecked(check)
|
||||
|
||||
def addItems(self, items):
|
||||
for item in items:
|
||||
action = self.toolmenu.addAction(item)
|
||||
action.setCheckable(True)
|
||||
self.toolmenu.addAction(action)
|
||||
action.setChecked(True)
|
||||
action.triggered.connect(self.checked_changed)
|
||||
self.toolmenu.addAction(action)
|
||||
|
||||
def items(self):
|
||||
for action in self.toolmenu.actions():
|
||||
|
|
@ -200,42 +184,15 @@ class CheckableComboBox(QtWidgets.QComboBox):
|
|||
for text, checked in items:
|
||||
text_item = QtGui.QStandardItem(text)
|
||||
checked_item = QtGui.QStandardItem()
|
||||
checked_item.setData(
|
||||
QtCore.QVariant(checked), QtCore.Qt.CheckStateRole
|
||||
)
|
||||
checked_item.setData(QVariant(checked), QtCore.Qt.CheckStateRole)
|
||||
self.model.appendRow([text_item, checked_item])
|
||||
|
||||
|
||||
class FilterLogModel(QtCore.QSortFilterProxyModel):
|
||||
sub_dict = ["$gt", "$lt", "$not"]
|
||||
def __init__(self, key_values, parent=None):
|
||||
super(FilterLogModel, self).__init__(parent)
|
||||
self.allowed_key_values = key_values
|
||||
|
||||
def filterAcceptsRow(self, row, parent):
|
||||
"""
|
||||
Reimplemented from base class.
|
||||
"""
|
||||
model = self.sourceModel()
|
||||
for key, values in self.allowed_key_values.items():
|
||||
col_indx = model.COLUMNS.index(key)
|
||||
value = model.index(row, col_indx, parent).data(
|
||||
QtCore.Qt.DisplayRole
|
||||
)
|
||||
if value not in values:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class LogsWidget(QtWidgets.QWidget):
|
||||
"""A widget that lists the published subsets for an asset"""
|
||||
|
||||
active_changed = QtCore.Signal()
|
||||
|
||||
_level_order = [
|
||||
"DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"
|
||||
]
|
||||
|
||||
def __init__(self, parent=None):
|
||||
super(LogsWidget, self).__init__(parent=parent)
|
||||
|
||||
|
|
@ -243,41 +200,47 @@ class LogsWidget(QtWidgets.QWidget):
|
|||
|
||||
filter_layout = QtWidgets.QHBoxLayout()
|
||||
|
||||
# user_filter = SearchComboBox(self, "Users")
|
||||
user_filter = CustomCombo("Users", self)
|
||||
users = model.dbcon.distinct("user")
|
||||
user_filter.populate(users)
|
||||
user_filter.checked_changed.connect(self.user_changed)
|
||||
user_filter.select_items(getpass.getuser())
|
||||
user_filter.selection_changed.connect(self.user_changed)
|
||||
|
||||
level_filter = CustomCombo("Levels", self)
|
||||
# levels = [(level, True) for level in model.dbcon.distinct("level")]
|
||||
levels = model.dbcon.distinct("level")
|
||||
_levels = []
|
||||
for level in self._level_order:
|
||||
if level in levels:
|
||||
_levels.append(level)
|
||||
level_filter.populate(_levels)
|
||||
level_filter.checked_changed.connect(self.level_changed)
|
||||
level_filter.addItems(levels)
|
||||
|
||||
# date_from_label = QtWidgets.QLabel("From:")
|
||||
# date_filter_from = QtWidgets.QDateTimeEdit()
|
||||
#
|
||||
# date_from_layout = QtWidgets.QVBoxLayout()
|
||||
# date_from_layout.addWidget(date_from_label)
|
||||
# date_from_layout.addWidget(date_filter_from)
|
||||
#
|
||||
# date_to_label = QtWidgets.QLabel("To:")
|
||||
# date_filter_to = QtWidgets.QDateTimeEdit()
|
||||
#
|
||||
# date_to_layout = QtWidgets.QVBoxLayout()
|
||||
# date_to_layout.addWidget(date_to_label)
|
||||
# date_to_layout.addWidget(date_filter_to)
|
||||
date_from_label = QtWidgets.QLabel("From:")
|
||||
date_filter_from = QtWidgets.QDateTimeEdit()
|
||||
|
||||
date_from_layout = QtWidgets.QVBoxLayout()
|
||||
date_from_layout.addWidget(date_from_label)
|
||||
date_from_layout.addWidget(date_filter_from)
|
||||
|
||||
# now = datetime.datetime.now()
|
||||
# QtCore.QDateTime(
|
||||
# now.year,
|
||||
# now.month,
|
||||
# now.day,
|
||||
# now.hour,
|
||||
# now.minute,
|
||||
# second=0,
|
||||
# msec=0,
|
||||
# timeSpec=0
|
||||
# )
|
||||
date_to_label = QtWidgets.QLabel("To:")
|
||||
date_filter_to = QtWidgets.QDateTimeEdit()
|
||||
|
||||
date_to_layout = QtWidgets.QVBoxLayout()
|
||||
date_to_layout.addWidget(date_to_label)
|
||||
date_to_layout.addWidget(date_filter_to)
|
||||
|
||||
filter_layout.addWidget(user_filter)
|
||||
filter_layout.addWidget(level_filter)
|
||||
filter_layout.setAlignment(QtCore.Qt.AlignLeft)
|
||||
|
||||
# filter_layout.addLayout(date_from_layout)
|
||||
# filter_layout.addLayout(date_to_layout)
|
||||
filter_layout.addLayout(date_from_layout)
|
||||
filter_layout.addLayout(date_to_layout)
|
||||
|
||||
view = QtWidgets.QTreeView(self)
|
||||
view.setAllColumnsShowFocus(True)
|
||||
|
|
@ -290,58 +253,28 @@ class LogsWidget(QtWidgets.QWidget):
|
|||
view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
|
||||
view.setSortingEnabled(True)
|
||||
view.sortByColumn(
|
||||
model.COLUMNS.index("timestamp"),
|
||||
model.COLUMNS.index("started"),
|
||||
QtCore.Qt.AscendingOrder
|
||||
)
|
||||
|
||||
key_val = {
|
||||
"user": users,
|
||||
"level": levels
|
||||
}
|
||||
proxy_model = FilterLogModel(key_val, view)
|
||||
proxy_model.setSourceModel(model)
|
||||
view.setModel(proxy_model)
|
||||
|
||||
view.customContextMenuRequested.connect(self.on_context_menu)
|
||||
view.selectionModel().selectionChanged.connect(self.active_changed)
|
||||
|
||||
# WARNING this is cool but slows down widget a lot
|
||||
# header = view.header()
|
||||
# # Enforce the columns to fit the data (purely cosmetic)
|
||||
# if Qt.__binding__ in ("PySide2", "PyQt5"):
|
||||
# header.setSectionResizeMode(QtWidgets.QHeaderView.ResizeToContents)
|
||||
# else:
|
||||
# header.setResizeMode(QtWidgets.QHeaderView.ResizeToContents)
|
||||
|
||||
view.setModel(model)
|
||||
view.pressed.connect(self._on_activated)
|
||||
# prepare
|
||||
model.refresh()
|
||||
|
||||
# Store to memory
|
||||
self.model = model
|
||||
self.proxy_model = proxy_model
|
||||
self.view = view
|
||||
|
||||
self.user_filter = user_filter
|
||||
self.level_filter = level_filter
|
||||
|
||||
def _on_activated(self, *args, **kwargs):
|
||||
self.active_changed.emit()
|
||||
|
||||
def user_changed(self):
|
||||
valid_actions = []
|
||||
for action in self.user_filter.items():
|
||||
if action.isChecked():
|
||||
valid_actions.append(action.text())
|
||||
|
||||
self.proxy_model.allowed_key_values["user"] = valid_actions
|
||||
self.proxy_model.invalidate()
|
||||
|
||||
def level_changed(self):
|
||||
valid_actions = []
|
||||
for action in self.level_filter.items():
|
||||
if action.isChecked():
|
||||
valid_actions.append(action.text())
|
||||
|
||||
self.proxy_model.allowed_key_values["level"] = valid_actions
|
||||
self.proxy_model.invalidate()
|
||||
|
||||
print(action)
|
||||
|
||||
def on_context_menu(self, point):
|
||||
# TODO will be any actions? it's ready
|
||||
|
|
@ -360,10 +293,74 @@ class LogsWidget(QtWidgets.QWidget):
|
|||
rows = selection.selectedRows(column=0)
|
||||
if len(rows) == 1:
|
||||
return rows[0]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class OutputWidget(QtWidgets.QWidget):
|
||||
def __init__(self, parent=None):
|
||||
super(OutputWidget, self).__init__(parent=parent)
|
||||
layout = QtWidgets.QVBoxLayout(self)
|
||||
output_text = QtWidgets.QTextEdit()
|
||||
output_text.setReadOnly(True)
|
||||
# output_text.setLineWrapMode(QtWidgets.QTextEdit.FixedPixelWidth)
|
||||
|
||||
layout.addWidget(output_text)
|
||||
|
||||
self.setLayout(layout)
|
||||
self.output_text = output_text
|
||||
|
||||
def add_line(self, line):
|
||||
self.output_text.append(line)
|
||||
|
||||
def set_detail(self, node):
|
||||
self.output_text.clear()
|
||||
for log in node["_logs"]:
|
||||
level = log["level"].lower()
|
||||
|
||||
line_f = "<font color=\"White\">{message}"
|
||||
if level == "debug":
|
||||
line_f = (
|
||||
"<font color=\"Yellow\"> -"
|
||||
" <font color=\"Lime\">{{ {loggerName} }}: ["
|
||||
" <font color=\"White\">{message}"
|
||||
" <font color=\"Lime\">]"
|
||||
)
|
||||
elif level == "info":
|
||||
line_f = (
|
||||
"<font color=\"Lime\">>>> ["
|
||||
" <font color=\"White\">{message}"
|
||||
" <font color=\"Lime\">]"
|
||||
)
|
||||
elif level == "warning":
|
||||
line_f = (
|
||||
"<font color=\"Yellow\">*** WRN:"
|
||||
" <font color=\"Lime\"> >>> {{ {loggerName} }}: ["
|
||||
" <font color=\"White\">{message}"
|
||||
" <font color=\"Lime\">]"
|
||||
)
|
||||
elif level == "error":
|
||||
line_f = (
|
||||
"<font color=\"Red\">!!! ERR:"
|
||||
" <font color=\"White\">{timestamp}"
|
||||
" <font color=\"Lime\">>>> {{ {loggerName} }}: ["
|
||||
" <font color=\"White\">{message}"
|
||||
" <font color=\"Lime\">]"
|
||||
)
|
||||
|
||||
exc = log.get("exception")
|
||||
if exc:
|
||||
log["message"] = exc["message"]
|
||||
|
||||
line = line_f.format(**log)
|
||||
|
||||
self.add_line(line)
|
||||
|
||||
if not exc:
|
||||
continue
|
||||
for _line in exc["stackTrace"].split("\n"):
|
||||
self.add_line(_line)
|
||||
|
||||
|
||||
class LogDetailWidget(QtWidgets.QWidget):
|
||||
"""A Widget that display information about a specific version"""
|
||||
data_rows = [
|
||||
|
|
@ -418,5 +415,4 @@ class LogDetailWidget(QtWidgets.QWidget):
|
|||
value = detail_data.get(row) or "< Not set >"
|
||||
data[row] = value
|
||||
|
||||
|
||||
self.detail_widget.setHtml(self.html_text.format(**data))
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import os
|
||||
from Qt import QtWidgets
|
||||
|
||||
from pype.api import Logger
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import appdirs
|
||||
from pypeapp import style
|
||||
from avalon import style
|
||||
from Qt import QtWidgets
|
||||
import os
|
||||
import json
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
from Qt import QtCore, QtGui, QtWidgets
|
||||
from pypeapp import style
|
||||
from avalon import style
|
||||
|
||||
|
||||
class MusterLogin(QtWidgets.QWidget):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
from . import QtCore, QtGui, QtWidgets
|
||||
from . import get_resource
|
||||
from pypeapp import style
|
||||
from avalon import style
|
||||
|
||||
|
||||
class ComponentItem(QtWidgets.QFrame):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from pype.api import Logger
|
||||
from pypeapp import style
|
||||
from avalon import style
|
||||
from Qt import QtCore, QtGui, QtWidgets
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
from Qt import QtCore, QtGui, QtWidgets
|
||||
from pypeapp import style, resources
|
||||
from pype.resources import get_resource
|
||||
from avalon import style
|
||||
|
||||
|
||||
class UserWidget(QtWidgets.QWidget):
|
||||
|
|
@ -14,7 +14,7 @@ class UserWidget(QtWidgets.QWidget):
|
|||
self.module = module
|
||||
|
||||
# Style
|
||||
icon = QtGui.QIcon(resources.get_resource("icon.png"))
|
||||
icon = QtGui.QIcon(get_resource("icon.png"))
|
||||
self.setWindowIcon(icon)
|
||||
self.setWindowTitle("Username Settings")
|
||||
self.setMinimumWidth(self.MIN_WIDTH)
|
||||
|
|
|
|||
40
pype/plugins/blender/create/create_layout.py
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
"""Create a layout asset."""
|
||||
|
||||
import bpy
|
||||
|
||||
from avalon import api
|
||||
from avalon.blender import Creator, lib
|
||||
import pype.hosts.blender.plugin
|
||||
|
||||
|
||||
class CreateLayout(Creator):
|
||||
"""Layout output for character rigs"""
|
||||
|
||||
name = "layoutMain"
|
||||
label = "Layout"
|
||||
family = "layout"
|
||||
icon = "cubes"
|
||||
|
||||
def process(self):
|
||||
|
||||
asset = self.data["asset"]
|
||||
subset = self.data["subset"]
|
||||
name = pype.hosts.blender.plugin.asset_name(asset, subset)
|
||||
collection = bpy.data.collections.new(name=name)
|
||||
bpy.context.scene.collection.children.link(collection)
|
||||
self.data['task'] = api.Session.get('AVALON_TASK')
|
||||
lib.imprint(collection, self.data)
|
||||
|
||||
# Add the rig object and all the children meshes to
|
||||
# a set and link them all at the end to avoid duplicates.
|
||||
# Blender crashes if trying to link an object that is already linked.
|
||||
# This links automatically the children meshes if they were not
|
||||
# selected, and doesn't link them twice if they, insted,
|
||||
# were manually selected by the user.
|
||||
objects_to_link = set()
|
||||
|
||||
if (self.options or {}).get("useSelection"):
|
||||
for obj in lib.get_selection():
|
||||
collection.children.link(obj.users_collection[0])
|
||||
|
||||
return collection
|
||||
|
|
@ -31,22 +31,11 @@ class CreateRig(Creator):
|
|||
# This links automatically the children meshes if they were not
|
||||
# selected, and doesn't link them twice if they, insted,
|
||||
# were manually selected by the user.
|
||||
objects_to_link = set()
|
||||
|
||||
if (self.options or {}).get("useSelection"):
|
||||
|
||||
for obj in lib.get_selection():
|
||||
|
||||
objects_to_link.add(obj)
|
||||
|
||||
if obj.type == 'ARMATURE':
|
||||
|
||||
for subobj in obj.children:
|
||||
|
||||
objects_to_link.add(subobj)
|
||||
|
||||
for obj in objects_to_link:
|
||||
|
||||
collection.objects.link(obj)
|
||||
for child in obj.users_collection[0].children:
|
||||
collection.children.link(child)
|
||||
collection.objects.link(obj)
|
||||
|
||||
return collection
|
||||
|
|
|
|||
|
|
@ -29,7 +29,6 @@ class BlendAnimationLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
@staticmethod
|
||||
def _remove(self, objects, lib_container):
|
||||
|
||||
for obj in objects:
|
||||
|
|
@ -41,7 +40,6 @@ class BlendAnimationLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
|
||||
bpy.data.collections.remove(bpy.data.collections[lib_container])
|
||||
|
||||
@staticmethod
|
||||
def _process(self, libpath, lib_container, container_name):
|
||||
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
|
|
@ -131,7 +129,7 @@ class BlendAnimationLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
container_metadata["lib_container"] = lib_container
|
||||
|
||||
objects_list = self._process(
|
||||
self, libpath, lib_container, container_name)
|
||||
libpath, lib_container, container_name)
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
container_metadata["objects"] = objects_list
|
||||
|
|
@ -205,14 +203,10 @@ class BlendAnimationLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
objects = collection_metadata["objects"]
|
||||
lib_container = collection_metadata["lib_container"]
|
||||
|
||||
# Get the armature of the rig
|
||||
armatures = [obj for obj in objects if obj.type == 'ARMATURE']
|
||||
assert(len(armatures) == 1)
|
||||
|
||||
self._remove(self, objects, lib_container)
|
||||
self._remove(objects, lib_container)
|
||||
|
||||
objects_list = self._process(
|
||||
self, str(libpath), lib_container, collection.name)
|
||||
str(libpath), lib_container, collection.name)
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
collection_metadata["objects"] = objects_list
|
||||
|
|
@ -249,7 +243,7 @@ class BlendAnimationLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
objects = collection_metadata["objects"]
|
||||
lib_container = collection_metadata["lib_container"]
|
||||
|
||||
self._remove(self, objects, lib_container)
|
||||
self._remove(objects, lib_container)
|
||||
|
||||
bpy.data.collections.remove(collection)
|
||||
|
||||
|
|
|
|||
264
pype/plugins/blender/load/load_layout.py
Normal file
|
|
@ -0,0 +1,264 @@
|
|||
"""Load a layout in Blender."""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from pprint import pformat
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from avalon import api, blender
|
||||
import bpy
|
||||
import pype.hosts.blender.plugin
|
||||
|
||||
|
||||
logger = logging.getLogger("pype").getChild(
|
||||
"blender").getChild("load_layout")
|
||||
|
||||
|
||||
class BlendLayoutLoader(pype.hosts.blender.plugin.AssetLoader):
|
||||
"""Load animations from a .blend file.
|
||||
|
||||
Warning:
|
||||
Loading the same asset more then once is not properly supported at the
|
||||
moment.
|
||||
"""
|
||||
|
||||
families = ["layout"]
|
||||
representations = ["blend"]
|
||||
|
||||
label = "Link Layout"
|
||||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
def _remove(self, objects, lib_container):
|
||||
|
||||
for obj in objects:
|
||||
|
||||
if obj.type == 'ARMATURE':
|
||||
bpy.data.armatures.remove(obj.data)
|
||||
elif obj.type == 'MESH':
|
||||
bpy.data.meshes.remove(obj.data)
|
||||
|
||||
for element_container in bpy.data.collections[lib_container].children:
|
||||
for child in element_container.children:
|
||||
bpy.data.collections.remove(child)
|
||||
bpy.data.collections.remove(element_container)
|
||||
|
||||
bpy.data.collections.remove(bpy.data.collections[lib_container])
|
||||
|
||||
def _process(self, libpath, lib_container, container_name, actions):
|
||||
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
with bpy.data.libraries.load(
|
||||
libpath, link=True, relative=relative
|
||||
) as (_, data_to):
|
||||
data_to.collections = [lib_container]
|
||||
|
||||
scene = bpy.context.scene
|
||||
|
||||
scene.collection.children.link(bpy.data.collections[lib_container])
|
||||
|
||||
layout_container = scene.collection.children[lib_container].make_local()
|
||||
|
||||
meshes = []
|
||||
armatures = []
|
||||
|
||||
objects_list = []
|
||||
|
||||
for element_container in layout_container.children:
|
||||
element_container.make_local()
|
||||
meshes.extend([obj for obj in element_container.objects if obj.type == 'MESH'])
|
||||
armatures.extend([obj for obj in element_container.objects if obj.type == 'ARMATURE'])
|
||||
for child in element_container.children:
|
||||
child.make_local()
|
||||
meshes.extend(child.objects)
|
||||
|
||||
# Link meshes first, then armatures.
|
||||
# The armature is unparented for all the non-local meshes,
|
||||
# when it is made local.
|
||||
for obj in meshes + armatures:
|
||||
obj = obj.make_local()
|
||||
obj.data.make_local()
|
||||
|
||||
if not obj.get(blender.pipeline.AVALON_PROPERTY):
|
||||
obj[blender.pipeline.AVALON_PROPERTY] = dict()
|
||||
|
||||
avalon_info = obj[blender.pipeline.AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": container_name})
|
||||
|
||||
action = actions.get( obj.name, None )
|
||||
|
||||
if obj.type == 'ARMATURE' and action is not None:
|
||||
obj.animation_data.action = action
|
||||
|
||||
objects_list.append(obj)
|
||||
|
||||
layout_container.pop(blender.pipeline.AVALON_PROPERTY)
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
return objects_list
|
||||
|
||||
def process_asset(
|
||||
self, context: dict, name: str, namespace: Optional[str] = None,
|
||||
options: Optional[Dict] = None
|
||||
) -> Optional[List]:
|
||||
"""
|
||||
Arguments:
|
||||
name: Use pre-defined name
|
||||
namespace: Use pre-defined namespace
|
||||
context: Full parenthood of representation to load
|
||||
options: Additional settings dictionary
|
||||
"""
|
||||
|
||||
libpath = self.fname
|
||||
asset = context["asset"]["name"]
|
||||
subset = context["subset"]["name"]
|
||||
lib_container = pype.hosts.blender.plugin.asset_name(asset, subset)
|
||||
container_name = pype.hosts.blender.plugin.asset_name(
|
||||
asset, subset, namespace
|
||||
)
|
||||
|
||||
container = bpy.data.collections.new(lib_container)
|
||||
container.name = container_name
|
||||
blender.pipeline.containerise_existing(
|
||||
container,
|
||||
name,
|
||||
namespace,
|
||||
context,
|
||||
self.__class__.__name__,
|
||||
)
|
||||
|
||||
container_metadata = container.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
|
||||
container_metadata["libpath"] = libpath
|
||||
container_metadata["lib_container"] = lib_container
|
||||
|
||||
objects_list = self._process(
|
||||
libpath, lib_container, container_name, {})
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
container_metadata["objects"] = objects_list
|
||||
|
||||
nodes = list(container.objects)
|
||||
nodes.append(container)
|
||||
self[:] = nodes
|
||||
return nodes
|
||||
|
||||
def update(self, container: Dict, representation: Dict):
|
||||
"""Update the loaded asset.
|
||||
|
||||
This will remove all objects of the current collection, load the new
|
||||
ones and add them to the collection.
|
||||
If the objects of the collection are used in another collection they
|
||||
will not be removed, only unlinked. Normally this should not be the
|
||||
case though.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
|
||||
libpath = Path(api.get_representation_path(representation))
|
||||
extension = libpath.suffix.lower()
|
||||
|
||||
logger.info(
|
||||
"Container: %s\nRepresentation: %s",
|
||||
pformat(container, indent=2),
|
||||
pformat(representation, indent=2),
|
||||
)
|
||||
|
||||
assert collection, (
|
||||
f"The asset is not loaded: {container['objectName']}"
|
||||
)
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
assert libpath, (
|
||||
"No existing library file found for {container['objectName']}"
|
||||
)
|
||||
assert libpath.is_file(), (
|
||||
f"The file doesn't exist: {libpath}"
|
||||
)
|
||||
assert extension in pype.hosts.blender.plugin.VALID_EXTENSIONS, (
|
||||
f"Unsupported file: {libpath}"
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
|
||||
collection_libpath = collection_metadata["libpath"]
|
||||
normalized_collection_libpath = (
|
||||
str(Path(bpy.path.abspath(collection_libpath)).resolve())
|
||||
)
|
||||
normalized_libpath = (
|
||||
str(Path(bpy.path.abspath(str(libpath))).resolve())
|
||||
)
|
||||
logger.debug(
|
||||
"normalized_collection_libpath:\n %s\nnormalized_libpath:\n %s",
|
||||
normalized_collection_libpath,
|
||||
normalized_libpath,
|
||||
)
|
||||
if normalized_collection_libpath == normalized_libpath:
|
||||
logger.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
objects = collection_metadata["objects"]
|
||||
lib_container = collection_metadata["lib_container"]
|
||||
|
||||
actions = {}
|
||||
|
||||
for obj in objects:
|
||||
|
||||
if obj.type == 'ARMATURE':
|
||||
|
||||
actions[obj.name] = obj.animation_data.action
|
||||
|
||||
self._remove(objects, lib_container)
|
||||
|
||||
objects_list = self._process(
|
||||
str(libpath), lib_container, collection.name, actions)
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
collection_metadata["objects"] = objects_list
|
||||
collection_metadata["libpath"] = str(libpath)
|
||||
collection_metadata["representation"] = str(representation["_id"])
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
def remove(self, container: Dict) -> bool:
|
||||
"""Remove an existing container from a Blender scene.
|
||||
|
||||
Arguments:
|
||||
container (avalon-core:container-1.0): Container to remove,
|
||||
from `host.ls()`.
|
||||
|
||||
Returns:
|
||||
bool: Whether the container was deleted.
|
||||
|
||||
Warning:
|
||||
No nested collections are supported at the moment!
|
||||
"""
|
||||
|
||||
collection = bpy.data.collections.get(
|
||||
container["objectName"]
|
||||
)
|
||||
if not collection:
|
||||
return False
|
||||
assert not (collection.children), (
|
||||
"Nested collections are not supported."
|
||||
)
|
||||
|
||||
collection_metadata = collection.get(
|
||||
blender.pipeline.AVALON_PROPERTY)
|
||||
objects = collection_metadata["objects"]
|
||||
lib_container = collection_metadata["lib_container"]
|
||||
|
||||
self._remove(objects, lib_container)
|
||||
|
||||
bpy.data.collections.remove(collection)
|
||||
|
||||
return True
|
||||
|
|
@ -30,7 +30,6 @@ class BlendModelLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
@staticmethod
|
||||
def _remove(self, objects, lib_container):
|
||||
|
||||
for obj in objects:
|
||||
|
|
@ -39,7 +38,6 @@ class BlendModelLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
|
||||
bpy.data.collections.remove(bpy.data.collections[lib_container])
|
||||
|
||||
@staticmethod
|
||||
def _process(self, libpath, lib_container, container_name):
|
||||
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
|
|
@ -118,7 +116,7 @@ class BlendModelLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
container_metadata["lib_container"] = lib_container
|
||||
|
||||
objects_list = self._process(
|
||||
self, libpath, lib_container, container_name)
|
||||
libpath, lib_container, container_name)
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
container_metadata["objects"] = objects_list
|
||||
|
|
@ -189,10 +187,10 @@ class BlendModelLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
logger.info("Library already loaded, not updating...")
|
||||
return
|
||||
|
||||
self._remove(self, objects, lib_container)
|
||||
self._remove(objects, lib_container)
|
||||
|
||||
objects_list = self._process(
|
||||
self, str(libpath), lib_container, collection.name)
|
||||
str(libpath), lib_container, collection.name)
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
collection_metadata["objects"] = objects_list
|
||||
|
|
@ -226,7 +224,7 @@ class BlendModelLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
objects = collection_metadata["objects"]
|
||||
lib_container = collection_metadata["lib_container"]
|
||||
|
||||
self._remove(self, objects, lib_container)
|
||||
self._remove(objects, lib_container)
|
||||
|
||||
bpy.data.collections.remove(collection)
|
||||
|
||||
|
|
|
|||
|
|
@ -30,7 +30,6 @@ class BlendRigLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
icon = "code-fork"
|
||||
color = "orange"
|
||||
|
||||
@staticmethod
|
||||
def _remove(self, objects, lib_container):
|
||||
|
||||
for obj in objects:
|
||||
|
|
@ -40,9 +39,11 @@ class BlendRigLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
elif obj.type == 'MESH':
|
||||
bpy.data.meshes.remove(obj.data)
|
||||
|
||||
for child in bpy.data.collections[lib_container].children:
|
||||
bpy.data.collections.remove(child)
|
||||
|
||||
bpy.data.collections.remove(bpy.data.collections[lib_container])
|
||||
|
||||
@staticmethod
|
||||
def _process(self, libpath, lib_container, container_name, action):
|
||||
|
||||
relative = bpy.context.preferences.filepaths.use_relative_paths
|
||||
|
|
@ -57,32 +58,30 @@ class BlendRigLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
|
||||
rig_container = scene.collection.children[lib_container].make_local()
|
||||
|
||||
meshes = [obj for obj in rig_container.objects if obj.type == 'MESH']
|
||||
meshes = []
|
||||
armatures = [
|
||||
obj for obj in rig_container.objects if obj.type == 'ARMATURE']
|
||||
|
||||
objects_list = []
|
||||
|
||||
assert(len(armatures) == 1)
|
||||
for child in rig_container.children:
|
||||
child.make_local()
|
||||
meshes.extend( child.objects )
|
||||
|
||||
# Link meshes first, then armatures.
|
||||
# The armature is unparented for all the non-local meshes,
|
||||
# when it is made local.
|
||||
for obj in meshes + armatures:
|
||||
|
||||
obj = obj.make_local()
|
||||
|
||||
obj.data.make_local()
|
||||
|
||||
if not obj.get(blender.pipeline.AVALON_PROPERTY):
|
||||
|
||||
obj[blender.pipeline.AVALON_PROPERTY] = dict()
|
||||
|
||||
avalon_info = obj[blender.pipeline.AVALON_PROPERTY]
|
||||
avalon_info.update({"container_name": container_name})
|
||||
|
||||
if obj.type == 'ARMATURE' and action is not None:
|
||||
|
||||
obj.animation_data.action = action
|
||||
|
||||
objects_list.append(obj)
|
||||
|
|
@ -130,7 +129,7 @@ class BlendRigLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
container_metadata["lib_container"] = lib_container
|
||||
|
||||
objects_list = self._process(
|
||||
self, libpath, lib_container, container_name, None)
|
||||
libpath, lib_container, container_name, None)
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
container_metadata["objects"] = objects_list
|
||||
|
|
@ -209,10 +208,10 @@ class BlendRigLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
|
||||
action = armatures[0].animation_data.action
|
||||
|
||||
self._remove(self, objects, lib_container)
|
||||
self._remove(objects, lib_container)
|
||||
|
||||
objects_list = self._process(
|
||||
self, str(libpath), lib_container, collection.name, action)
|
||||
str(libpath), lib_container, collection.name, action)
|
||||
|
||||
# Save the list of objects in the metadata container
|
||||
collection_metadata["objects"] = objects_list
|
||||
|
|
@ -249,7 +248,7 @@ class BlendRigLoader(pype.hosts.blender.plugin.AssetLoader):
|
|||
objects = collection_metadata["objects"]
|
||||
lib_container = collection_metadata["lib_container"]
|
||||
|
||||
self._remove(self, objects, lib_container)
|
||||
self._remove(objects, lib_container)
|
||||
|
||||
bpy.data.collections.remove(collection)
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class ExtractBlend(pype.api.Extractor):
|
|||
|
||||
label = "Extract Blend"
|
||||
hosts = ["blender"]
|
||||
families = ["animation", "model", "rig", "action"]
|
||||
families = ["animation", "model", "rig", "action", "layout"]
|
||||
optional = True
|
||||
|
||||
def process(self, instance):
|
||||
|
|
|
|||
25
pype/plugins/blender/publish/increment_workfile_version.py
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
import pyblish.api
|
||||
import avalon.blender.workio
|
||||
|
||||
|
||||
class IncrementWorkfileVersion(pyblish.api.ContextPlugin):
|
||||
"""Increment current workfile version."""
|
||||
|
||||
order = pyblish.api.IntegratorOrder + 0.9
|
||||
label = "Increment Workfile Version"
|
||||
optional = True
|
||||
hosts = ["blender"]
|
||||
families = ["animation", "model", "rig", "action"]
|
||||
|
||||
def process(self, context):
|
||||
|
||||
assert all(result["success"] for result in context.data["results"]), (
|
||||
"Publishing not succesfull so version is not increased.")
|
||||
|
||||
from pype.lib import version_up
|
||||
path = context.data["currentFile"]
|
||||
filepath = version_up(path)
|
||||
|
||||
avalon.blender.workio.save_file(filepath, copy=False)
|
||||
|
||||
self.log.info('Incrementing script version')
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
"""
|
||||
Requires:
|
||||
config_data -> ftrack.output_representation
|
||||
|
||||
Provides:
|
||||
context -> output_repre_config (str)
|
||||
"""
|
||||
|
||||
import pyblish.api
|
||||
from pype.api import config
|
||||
|
||||
|
||||
class CollectOutputRepreConfig(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder
|
||||
label = "Collect Config for representation"
|
||||
hosts = ["shell", "standalonepublisher"]
|
||||
|
||||
def process(self, context):
|
||||
config_data = config.get_presets()["ftrack"]["output_representation"]
|
||||
context.data['output_repre_config'] = config_data
|
||||
|
|
@ -14,18 +14,28 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
families = ["imagesequence", "render", "render2d", "source"]
|
||||
enabled = False
|
||||
|
||||
def process(self, instance):
|
||||
# presetable attribute
|
||||
ffmpeg_args = None
|
||||
|
||||
def process(self, instance):
|
||||
self.log.info("subset {}".format(instance.data['subset']))
|
||||
if 'crypto' in instance.data['subset']:
|
||||
return
|
||||
|
||||
# ffmpeg doesn't support multipart exrs
|
||||
if instance.data.get("multipartExr") is True:
|
||||
return
|
||||
|
||||
# get representation and loop them
|
||||
representations = instance.data["representations"]
|
||||
|
||||
# filter out mov and img sequences
|
||||
representations_new = representations[:]
|
||||
|
||||
if instance.data.get("multipartExr"):
|
||||
# ffmpeg doesn't support multipart exrs
|
||||
return
|
||||
|
||||
for repre in representations:
|
||||
tags = repre.get("tags", [])
|
||||
self.log.debug(repre)
|
||||
|
|
@ -33,11 +43,7 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
if not valid:
|
||||
continue
|
||||
|
||||
if not isinstance(repre['files'], list):
|
||||
continue
|
||||
|
||||
if instance.data.get("multipartExr") is True:
|
||||
# ffmpeg doesn't support multipart exrs
|
||||
if not isinstance(repre['files'], (list, tuple)):
|
||||
continue
|
||||
|
||||
stagingdir = os.path.normpath(repre.get("stagingDir"))
|
||||
|
|
@ -57,21 +63,19 @@ class ExtractJpegEXR(pyblish.api.InstancePlugin):
|
|||
|
||||
self.log.info("output {}".format(full_output_path))
|
||||
|
||||
config_data = instance.context.data['output_repre_config']
|
||||
|
||||
proj_name = os.environ.get('AVALON_PROJECT', '__default__')
|
||||
profile = config_data.get(proj_name, config_data['__default__'])
|
||||
|
||||
ffmpeg_path = pype.lib.get_ffmpeg_tool_path("ffmpeg")
|
||||
ffmpeg_args = self.ffmpeg_args or {}
|
||||
|
||||
jpeg_items = []
|
||||
jpeg_items.append(ffmpeg_path)
|
||||
# override file if already exists
|
||||
jpeg_items.append("-y")
|
||||
# use same input args like with mov
|
||||
jpeg_items.extend(profile.get('input', []))
|
||||
jpeg_items.extend(ffmpeg_args.get("input") or [])
|
||||
# input file
|
||||
jpeg_items.append("-i {}".format(full_input_path))
|
||||
# output arguments from presets
|
||||
jpeg_items.extend(ffmpeg_args.get("output") or [])
|
||||
# output file
|
||||
jpeg_items.append(full_output_path)
|
||||
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
|
|||
"gizmo",
|
||||
"source",
|
||||
"matchmove",
|
||||
"image"
|
||||
"image",
|
||||
"source",
|
||||
"assembly",
|
||||
"fbx",
|
||||
|
|
|
|||
|
|
@ -277,7 +277,13 @@ class CollectLook(pyblish.api.InstancePlugin):
|
|||
if looksets:
|
||||
for look in looksets:
|
||||
for at in shaderAttrs:
|
||||
con = cmds.listConnections("{}.{}".format(look, at))
|
||||
try:
|
||||
con = cmds.listConnections("{}.{}".format(look, at))
|
||||
except ValueError:
|
||||
# skip attributes that are invalid in current
|
||||
# context. For example in the case where
|
||||
# Arnold is not enabled.
|
||||
continue
|
||||
if con:
|
||||
materials.extend(con)
|
||||
|
||||
|
|
|
|||
|
|
@ -57,11 +57,12 @@ R_SINGLE_FRAME = re.compile(r"^(-?)\d+$")
|
|||
R_FRAME_RANGE = re.compile(r"^(?P<sf>(-?)\d+)-(?P<ef>(-?)\d+)$")
|
||||
R_FRAME_NUMBER = re.compile(r".+\.(?P<frame>[0-9]+)\..+")
|
||||
R_LAYER_TOKEN = re.compile(
|
||||
r".*%l.*|.*<layer>.*|.*<renderlayer>.*", re.IGNORECASE
|
||||
r".*((?:%l)|(?:<layer>)|(?:<renderlayer>)).*", re.IGNORECASE
|
||||
)
|
||||
R_AOV_TOKEN = re.compile(r".*%a.*|.*<aov>.*|.*<renderpass>.*", re.IGNORECASE)
|
||||
R_SUBSTITUTE_AOV_TOKEN = re.compile(r"%a|<aov>|<renderpass>", re.IGNORECASE)
|
||||
R_REMOVE_AOV_TOKEN = re.compile(r"_%a|_<aov>|_<renderpass>", re.IGNORECASE)
|
||||
R_REMOVE_AOV_TOKEN = re.compile(r"(?:_|\.)((?:%a)|(?:<aov>)|(?:<renderpass>))",
|
||||
re.IGNORECASE)
|
||||
# to remove unused renderman tokens
|
||||
R_CLEAN_FRAME_TOKEN = re.compile(r"\.?<f\d>\.?", re.IGNORECASE)
|
||||
R_CLEAN_EXT_TOKEN = re.compile(r"\.?<ext>\.?", re.IGNORECASE)
|
||||
|
|
@ -514,20 +515,23 @@ class AExpectedFiles:
|
|||
}
|
||||
return scene_data
|
||||
|
||||
def _generate_single_file_sequence(self, layer_data):
|
||||
def _generate_single_file_sequence(self, layer_data, aov_name=None):
|
||||
expected_files = []
|
||||
file_prefix = layer_data["filePrefix"]
|
||||
for cam in layer_data["cameras"]:
|
||||
mappings = (
|
||||
mappings = [
|
||||
(R_SUBSTITUTE_SCENE_TOKEN, layer_data["sceneName"]),
|
||||
(R_SUBSTITUTE_LAYER_TOKEN, layer_data["layerName"]),
|
||||
(R_SUBSTITUTE_CAMERA_TOKEN, cam),
|
||||
# this is required to remove unfilled aov token, for example
|
||||
# in Redshift
|
||||
(R_REMOVE_AOV_TOKEN, ""),
|
||||
(R_CLEAN_FRAME_TOKEN, ""),
|
||||
(R_CLEAN_EXT_TOKEN, ""),
|
||||
)
|
||||
]
|
||||
# this is required to remove unfilled aov token, for example
|
||||
# in Redshift
|
||||
if aov_name:
|
||||
mappings.append((R_SUBSTITUTE_AOV_TOKEN, aov_name))
|
||||
else:
|
||||
mappings.append((R_REMOVE_AOV_TOKEN, ""))
|
||||
|
||||
for regex, value in mappings:
|
||||
file_prefix = re.sub(regex, value, file_prefix)
|
||||
|
|
@ -837,13 +841,17 @@ class ExpectedFilesRedshift(AExpectedFiles):
|
|||
# mapping redshift extension dropdown values to strings
|
||||
ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"]
|
||||
|
||||
# name of aovs that are not merged into resulting exr and we need
|
||||
# them specified in expectedFiles output.
|
||||
unmerged_aovs = ["Cryptomatte"]
|
||||
|
||||
def __init__(self, layer):
|
||||
super(ExpectedFilesRedshift, self).__init__(layer)
|
||||
self.renderer = "redshift"
|
||||
|
||||
def get_renderer_prefix(self):
|
||||
prefix = super(ExpectedFilesRedshift, self).get_renderer_prefix()
|
||||
prefix = "{}_<aov>".format(prefix)
|
||||
prefix = "{}.<aov>".format(prefix)
|
||||
return prefix
|
||||
|
||||
def get_files(self):
|
||||
|
|
@ -856,7 +864,17 @@ class ExpectedFilesRedshift(AExpectedFiles):
|
|||
if layer_data.get("enabledAOVs"):
|
||||
expected_files[0][u"beauty"] = self._generate_single_file_sequence(
|
||||
layer_data
|
||||
) # noqa: E501
|
||||
)
|
||||
|
||||
# Redshift doesn't merge Cryptomatte AOV to final exr. We need to check
|
||||
# for such condition and add it to list of expected files.
|
||||
|
||||
for aov in layer_data.get("enabledAOVs"):
|
||||
if aov[0].lower() == "cryptomatte":
|
||||
aov_name = aov[0]
|
||||
expected_files.append(
|
||||
{aov_name: self._generate_single_file_sequence(
|
||||
layer_data, aov_name=aov_name)})
|
||||
|
||||
return expected_files
|
||||
|
||||
|
|
@ -864,23 +882,15 @@ class ExpectedFilesRedshift(AExpectedFiles):
|
|||
enabled_aovs = []
|
||||
|
||||
try:
|
||||
if self.maya_is_true(
|
||||
cmds.getAttr("redshiftOptions.exrForceMultilayer")
|
||||
):
|
||||
# AOVs are merged in mutli-channel file
|
||||
self.multipart = True
|
||||
return enabled_aovs
|
||||
default_ext = self.ext_mapping[
|
||||
cmds.getAttr("redshiftOptions.imageFormat")
|
||||
]
|
||||
except ValueError:
|
||||
# this occurs when Render Setting windows was not opened yet. In
|
||||
# such case there are no Arnold options created so query for AOVs
|
||||
# will fail. We terminate here as there are no AOVs specified then.
|
||||
# This state will most probably fail later on some Validator
|
||||
# anyway.
|
||||
return enabled_aovs
|
||||
# such case there are no Redshift options created so query
|
||||
# will fail.
|
||||
raise ValueError("Render settings are not initialized")
|
||||
|
||||
default_ext = self.ext_mapping[
|
||||
cmds.getAttr("redshiftOptions.imageFormat")
|
||||
]
|
||||
rs_aovs = [n for n in cmds.ls(type="RedshiftAOV")]
|
||||
|
||||
# todo: find out how to detect multichannel exr for redshift
|
||||
|
|
@ -892,9 +902,26 @@ class ExpectedFilesRedshift(AExpectedFiles):
|
|||
enabled = self.maya_is_true(override)
|
||||
|
||||
if enabled:
|
||||
enabled_aovs.append(
|
||||
(cmds.getAttr("%s.name" % aov), default_ext)
|
||||
)
|
||||
# If AOVs are merged into multipart exr, append AOV only if it
|
||||
# is in the list of AOVs that renderer cannot (or will not)
|
||||
# merge into final exr.
|
||||
if self.maya_is_true(
|
||||
cmds.getAttr("redshiftOptions.exrForceMultilayer")
|
||||
):
|
||||
if cmds.getAttr("%s.name" % aov) in self.unmerged_aovs:
|
||||
enabled_aovs.append(
|
||||
(cmds.getAttr("%s.name" % aov), default_ext)
|
||||
)
|
||||
else:
|
||||
enabled_aovs.append(
|
||||
(cmds.getAttr("%s.name" % aov), default_ext)
|
||||
)
|
||||
|
||||
if self.maya_is_true(
|
||||
cmds.getAttr("redshiftOptions.exrForceMultilayer")
|
||||
):
|
||||
# AOVs are merged in mutli-channel file
|
||||
self.multipart = True
|
||||
|
||||
return enabled_aovs
|
||||
|
||||
|
|
|
|||
12
pype/plugins/photoshop/create/create_image.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from avalon import photoshop
|
||||
|
||||
|
||||
class CreateImage(photoshop.Creator):
|
||||
"""Image folder for publish."""
|
||||
|
||||
name = "imageDefault"
|
||||
label = "Image"
|
||||
family = "image"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(CreateImage, self).__init__(*args, **kwargs)
|
||||
43
pype/plugins/photoshop/load/load_image.py
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
from avalon import api, photoshop
|
||||
|
||||
|
||||
class ImageLoader(api.Loader):
|
||||
"""Load images
|
||||
|
||||
Stores the imported asset in a container named after the asset.
|
||||
"""
|
||||
|
||||
families = ["image"]
|
||||
representations = ["*"]
|
||||
|
||||
def load(self, context, name=None, namespace=None, data=None):
|
||||
with photoshop.maintained_selection():
|
||||
layer = photoshop.import_smart_object(self.fname)
|
||||
|
||||
self[:] = [layer]
|
||||
|
||||
return photoshop.containerise(
|
||||
name,
|
||||
namespace,
|
||||
layer,
|
||||
context,
|
||||
self.__class__.__name__
|
||||
)
|
||||
|
||||
def update(self, container, representation):
|
||||
layer = container.pop("layer")
|
||||
|
||||
with photoshop.maintained_selection():
|
||||
photoshop.replace_smart_object(
|
||||
layer, api.get_representation_path(representation)
|
||||
)
|
||||
|
||||
photoshop.imprint(
|
||||
layer, {"representation": str(representation["_id"])}
|
||||
)
|
||||
|
||||
def remove(self, container):
|
||||
container["layer"].Delete()
|
||||
|
||||
def switch(self, container, representation):
|
||||
self.update(container, representation)
|
||||
17
pype/plugins/photoshop/publish/collect_current_file.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
from avalon import photoshop
|
||||
|
||||
|
||||
class CollectCurrentFile(pyblish.api.ContextPlugin):
|
||||
"""Inject the current working file into context"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.5
|
||||
label = "Current File"
|
||||
hosts = ["photoshop"]
|
||||
|
||||
def process(self, context):
|
||||
context.data["currentFile"] = os.path.normpath(
|
||||
photoshop.app().ActiveDocument.FullName
|
||||
).replace("\\", "/")
|
||||
56
pype/plugins/photoshop/publish/collect_instances.py
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
import pythoncom
|
||||
|
||||
from avalon import photoshop
|
||||
|
||||
import pyblish.api
|
||||
|
||||
|
||||
class CollectInstances(pyblish.api.ContextPlugin):
|
||||
"""Gather instances by LayerSet and file metadata
|
||||
|
||||
This collector takes into account assets that are associated with
|
||||
an LayerSet and marked with a unique identifier;
|
||||
|
||||
Identifier:
|
||||
id (str): "pyblish.avalon.instance"
|
||||
"""
|
||||
|
||||
label = "Instances"
|
||||
order = pyblish.api.CollectorOrder
|
||||
hosts = ["photoshop"]
|
||||
families_mapping = {
|
||||
"image": []
|
||||
}
|
||||
|
||||
def process(self, context):
|
||||
# Necessary call when running in a different thread which pyblish-qml
|
||||
# can be.
|
||||
pythoncom.CoInitialize()
|
||||
|
||||
for layer in photoshop.get_layers_in_document():
|
||||
layer_data = photoshop.read(layer)
|
||||
|
||||
# Skip layers without metadata.
|
||||
if layer_data is None:
|
||||
continue
|
||||
|
||||
# Skip containers.
|
||||
if "container" in layer_data["id"]:
|
||||
continue
|
||||
|
||||
child_layers = [*layer.Layers]
|
||||
if not child_layers:
|
||||
self.log.info("%s skipped, it was empty." % layer.Name)
|
||||
continue
|
||||
|
||||
instance = context.create_instance(layer.Name)
|
||||
instance.append(layer)
|
||||
instance.data.update(layer_data)
|
||||
instance.data["families"] = self.families_mapping[
|
||||
layer_data["family"]
|
||||
]
|
||||
instance.data["publish"] = layer.Visible
|
||||
|
||||
# Produce diagnostic message for any graphical
|
||||
# user interface interested in visualising it.
|
||||
self.log.info("Found: \"%s\" " % instance.data["name"])
|
||||
39
pype/plugins/photoshop/publish/collect_workfile.py
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
import pyblish.api
|
||||
import os
|
||||
|
||||
|
||||
class CollectWorkfile(pyblish.api.ContextPlugin):
|
||||
"""Collect current script for publish."""
|
||||
|
||||
order = pyblish.api.CollectorOrder + 0.1
|
||||
label = "Collect Workfile"
|
||||
hosts = ["photoshop"]
|
||||
|
||||
def process(self, context):
|
||||
family = "workfile"
|
||||
task = os.getenv("AVALON_TASK", None)
|
||||
subset = family + task.capitalize()
|
||||
|
||||
file_path = context.data["currentFile"]
|
||||
staging_dir = os.path.dirname(file_path)
|
||||
base_name = os.path.basename(file_path)
|
||||
|
||||
# Create instance
|
||||
instance = context.create_instance(subset)
|
||||
instance.data.update({
|
||||
"subset": subset,
|
||||
"label": base_name,
|
||||
"name": base_name,
|
||||
"family": family,
|
||||
"families": [],
|
||||
"representations": [],
|
||||
"asset": os.environ["AVALON_ASSET"]
|
||||
})
|
||||
|
||||
# creating representation
|
||||
instance.data["representations"].append({
|
||||
"name": "psd",
|
||||
"ext": "psd",
|
||||
"files": base_name,
|
||||
"stagingDir": staging_dir,
|
||||
})
|
||||
62
pype/plugins/photoshop/publish/extract_image.py
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
import os
|
||||
|
||||
import pype.api
|
||||
from avalon import photoshop
|
||||
|
||||
|
||||
class ExtractImage(pype.api.Extractor):
|
||||
"""Produce a flattened image file from instance
|
||||
|
||||
This plug-in takes into account only the layers in the group.
|
||||
"""
|
||||
|
||||
label = "Extract Image"
|
||||
hosts = ["photoshop"]
|
||||
families = ["image"]
|
||||
|
||||
def process(self, instance):
|
||||
|
||||
staging_dir = self.staging_dir(instance)
|
||||
self.log.info("Outputting image to {}".format(staging_dir))
|
||||
|
||||
# Perform extraction
|
||||
files = {}
|
||||
with photoshop.maintained_selection():
|
||||
self.log.info("Extracting %s" % str(list(instance)))
|
||||
with photoshop.maintained_visibility():
|
||||
# Hide all other layers.
|
||||
extract_ids = [
|
||||
x.id for x in photoshop.get_layers_in_layers([instance[0]])
|
||||
]
|
||||
for layer in photoshop.get_layers_in_document():
|
||||
if layer.id not in extract_ids:
|
||||
layer.Visible = False
|
||||
|
||||
save_options = {
|
||||
"png": photoshop.com_objects.PNGSaveOptions(),
|
||||
"jpg": photoshop.com_objects.JPEGSaveOptions()
|
||||
}
|
||||
|
||||
for extension, save_option in save_options.items():
|
||||
photoshop.app().ActiveDocument.SaveAs(
|
||||
staging_dir, save_option, True
|
||||
)
|
||||
files[extension] = "{} copy.{}".format(
|
||||
os.path.splitext(
|
||||
photoshop.app().ActiveDocument.Name
|
||||
)[0],
|
||||
extension
|
||||
)
|
||||
|
||||
representations = []
|
||||
for extension, filename in files.items():
|
||||
representations.append({
|
||||
"name": extension,
|
||||
"ext": extension,
|
||||
"files": filename,
|
||||
"stagingDir": staging_dir
|
||||
})
|
||||
instance.data["representations"] = representations
|
||||
instance.data["stagingDir"] = staging_dir
|
||||
|
||||
self.log.info(f"Extracted {instance} to {staging_dir}")
|
||||
14
pype/plugins/photoshop/publish/extract_save_scene.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
import pype.api
|
||||
from avalon import photoshop
|
||||
|
||||
|
||||
class ExtractSaveScene(pype.api.Extractor):
|
||||
"""Save scene before extraction."""
|
||||
|
||||
order = pype.api.Extractor.order - 0.49
|
||||
label = "Extract Save Scene"
|
||||
hosts = ["photoshop"]
|
||||
families = ["workfile"]
|
||||
|
||||
def process(self, instance):
|
||||
photoshop.app().ActiveDocument.Save()
|
||||
48
pype/plugins/photoshop/publish/validate_instance_asset.py
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
import os
|
||||
|
||||
import pyblish.api
|
||||
import pype.api
|
||||
from avalon import photoshop
|
||||
|
||||
|
||||
class ValidateInstanceAssetRepair(pyblish.api.Action):
|
||||
"""Repair the instance asset."""
|
||||
|
||||
label = "Repair"
|
||||
icon = "wrench"
|
||||
on = "failed"
|
||||
|
||||
def process(self, context, plugin):
|
||||
|
||||
# Get the errored instances
|
||||
failed = []
|
||||
for result in context.data["results"]:
|
||||
if (result["error"] is not None and result["instance"] is not None
|
||||
and result["instance"] not in failed):
|
||||
failed.append(result["instance"])
|
||||
|
||||
# Apply pyblish.logic to get the instances for the plug-in
|
||||
instances = pyblish.api.instances_by_plugin(failed, plugin)
|
||||
|
||||
for instance in instances:
|
||||
data = photoshop.read(instance[0])
|
||||
data["asset"] = os.environ["AVALON_ASSET"]
|
||||
photoshop.imprint(instance[0], data)
|
||||
|
||||
|
||||
class ValidateInstanceAsset(pyblish.api.InstancePlugin):
|
||||
"""Validate the instance asset is the current asset."""
|
||||
|
||||
label = "Validate Instance Asset"
|
||||
hosts = ["photoshop"]
|
||||
actions = [ValidateInstanceAssetRepair]
|
||||
order = pype.api.ValidateContentsOrder
|
||||
|
||||
def process(self, instance):
|
||||
instance_asset = instance.data["asset"]
|
||||
current_asset = os.environ["AVALON_ASSET"]
|
||||
msg = (
|
||||
"Instance asset is not the same as current asset:"
|
||||
f"\nInstance: {instance_asset}\nCurrent: {current_asset}"
|
||||
)
|
||||
assert instance_asset == current_asset, msg
|
||||
17
pype/plugins/resolve/publish/collect_host.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
import pyblish.api
|
||||
from pype.hosts.resolve.utils import get_resolve_module
|
||||
|
||||
|
||||
class CollectProject(pyblish.api.ContextPlugin):
|
||||
"""Collect Project object"""
|
||||
|
||||
order = pyblish.api.CollectorOrder - 0.1
|
||||
label = "Collect Project"
|
||||
hosts = ["resolve"]
|
||||
|
||||
def process(self, context):
|
||||
resolve = get_resolve_module()
|
||||
PM = resolve.GetProjectManager()
|
||||
P = PM.GetCurrentProject()
|
||||
|
||||
self.log.info(P.GetName())
|
||||
|
|
@ -18,6 +18,9 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin):
|
|||
hosts = ["standalonepublisher"]
|
||||
order = pyblish.api.ExtractorOrder
|
||||
|
||||
# Presetable attribute
|
||||
ffmpeg_args = None
|
||||
|
||||
def process(self, instance):
|
||||
repres = instance.data.get('representations')
|
||||
if not repres:
|
||||
|
|
@ -66,27 +69,23 @@ class ExtractThumbnailSP(pyblish.api.InstancePlugin):
|
|||
full_thumbnail_path = tempfile.mkstemp(suffix=".jpg")[1]
|
||||
self.log.info("output {}".format(full_thumbnail_path))
|
||||
|
||||
config_data = instance.context.data.get("output_repre_config", {})
|
||||
|
||||
proj_name = os.environ.get("AVALON_PROJECT", "__default__")
|
||||
profile = config_data.get(
|
||||
proj_name,
|
||||
config_data.get("__default__", {})
|
||||
)
|
||||
|
||||
ffmpeg_path = pype.lib.get_ffmpeg_tool_path("ffmpeg")
|
||||
|
||||
ffmpeg_args = self.ffmpeg_args or {}
|
||||
|
||||
jpeg_items = []
|
||||
jpeg_items.append(ffmpeg_path)
|
||||
# override file if already exists
|
||||
jpeg_items.append("-y")
|
||||
# add input filters from peresets
|
||||
if profile:
|
||||
jpeg_items.extend(profile.get('input', []))
|
||||
jpeg_items.extend(ffmpeg_args.get("input") or [])
|
||||
# input file
|
||||
jpeg_items.append("-i {}".format(full_input_path))
|
||||
# extract only single file
|
||||
jpeg_items.append("-vframes 1")
|
||||
|
||||
jpeg_items.extend(ffmpeg_args.get("output") or [])
|
||||
|
||||
# output file
|
||||
jpeg_items.append(full_thumbnail_path)
|
||||
|
||||
|
|
|
|||
16
pype/resources/__init__.py
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
import os
|
||||
|
||||
|
||||
def get_resource(*args):
|
||||
""" Serves to simple resources access
|
||||
|
||||
:param *args: should contain *subfolder* names and *filename* of
|
||||
resource from resources folder
|
||||
:type *args: list
|
||||
"""
|
||||
return os.path.normpath(
|
||||
os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
*args
|
||||
)
|
||||
)
|
||||
BIN
pype/resources/circle_green.png
Normal file
|
After Width: | Height: | Size: 35 KiB |
BIN
pype/resources/circle_orange.png
Normal file
|
After Width: | Height: | Size: 37 KiB |
BIN
pype/resources/circle_red.png
Normal file
|
After Width: | Height: | Size: 35 KiB |
BIN
pype/resources/icon.png
Normal file
|
After Width: | Height: | Size: 3.7 KiB |
BIN
pype/resources/icon_dev.png
Normal file
|
After Width: | Height: | Size: 6.7 KiB |
BIN
pype/resources/splash.png
Normal file
|
After Width: | Height: | Size: 3.7 KiB |
BIN
pype/resources/splash_dev.png
Normal file
|
After Width: | Height: | Size: 6.7 KiB |
17
pype/resources/working.svg
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
<svg version="1.1" id="loader-1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
width="312px" height="312px" viewBox="0 0 40 40" xml:space="preserve">
|
||||
<path opacity="0.2" fill="#ffa500" d="M20.201,5.169c-8.254,0-14.946,6.692-14.946,14.946c0,8.255,6.692,14.946,14.946,14.946
|
||||
s14.946-6.691,14.946-14.946C35.146,11.861,28.455,5.169,20.201,5.169z M20.201,31.749c-6.425,0-11.634-5.208-11.634-11.634
|
||||
c0-6.425,5.209-11.634,11.634-11.634c6.425,0,11.633,5.209,11.633,11.634C31.834,26.541,26.626,31.749,20.201,31.749z"/>
|
||||
<path fill="#ffa500" d="M26.013,10.047l1.654-2.866c-2.198-1.272-4.743-2.012-7.466-2.012h0v3.312h0
|
||||
C22.32,8.481,24.301,9.057,26.013,10.047z">
|
||||
<animateTransform attributeType="xml"
|
||||
attributeName="transform"
|
||||
type="rotate"
|
||||
from="00 20.2 20.1"
|
||||
to="360 20.2 20.1"
|
||||
dur="0.5s"
|
||||
repeatCount="indefinite"/>
|
||||
</path>
|
||||
<text x="3" y="23" fill="#ffa500" font-style="bold" font-size="7px" font-family="sans-serif">Working...</text>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1 KiB |
13
pype/tools/pyblish_pype/__init__.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
from .version import version, version_info, __version__
|
||||
|
||||
# This must be run prior to importing the application, due to the
|
||||
# application requiring a discovered copy of Qt bindings.
|
||||
|
||||
from .app import show
|
||||
|
||||
__all__ = [
|
||||
'show',
|
||||
'version',
|
||||
'version_info',
|
||||
'__version__'
|
||||
]
|
||||
19
pype/tools/pyblish_pype/__main__.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
from .app import show
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--debug", action="store_true")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.debug:
|
||||
from . import mock
|
||||
import pyblish.api
|
||||
|
||||
for Plugin in mock.plugins:
|
||||
pyblish.api.register_plugin(Plugin)
|
||||
|
||||
show()
|
||||
493
pype/tools/pyblish_pype/app.css
Normal file
|
|
@ -0,0 +1,493 @@
|
|||
/* Global CSS */
|
||||
|
||||
* {
|
||||
outline: none;
|
||||
color: #ddd;
|
||||
font-family: "Open Sans";
|
||||
font-style: normal;
|
||||
}
|
||||
|
||||
/* General CSS */
|
||||
|
||||
QWidget {
|
||||
background: #555;
|
||||
background-position: center center;
|
||||
background-repeat: no-repeat;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
QMenu {
|
||||
background-color: #555; /* sets background of the menu */
|
||||
border: 1px solid #222;
|
||||
}
|
||||
|
||||
QMenu::item {
|
||||
/* sets background of menu item. set this to something non-transparent
|
||||
if you want menu color and menu item color to be different */
|
||||
background-color: transparent;
|
||||
padding: 5px;
|
||||
padding-left: 30px;
|
||||
}
|
||||
|
||||
QMenu::item:selected { /* when user selects item using mouse or keyboard */
|
||||
background-color: #666;
|
||||
}
|
||||
|
||||
QDialog {
|
||||
min-width: 300;
|
||||
background: "#555";
|
||||
}
|
||||
|
||||
QListView {
|
||||
border: 0px;
|
||||
background: "transparent"
|
||||
}
|
||||
|
||||
QTreeView {
|
||||
border: 0px;
|
||||
background: "transparent"
|
||||
}
|
||||
|
||||
QPushButton {
|
||||
width: 27px;
|
||||
height: 27px;
|
||||
background: #555;
|
||||
border: 1px solid #aaa;
|
||||
border-radius: 4px;
|
||||
font-family: "FontAwesome";
|
||||
font-size: 11pt;
|
||||
color: white;
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
QPushButton:pressed {
|
||||
background: "#777";
|
||||
}
|
||||
|
||||
QPushButton:hover {
|
||||
color: white;
|
||||
background: "#666";
|
||||
}
|
||||
|
||||
QPushButton:disabled {
|
||||
color: rgba(255, 255, 255, 50);
|
||||
}
|
||||
|
||||
QTextEdit, QLineEdit {
|
||||
background: #555;
|
||||
border: 1px solid #333;
|
||||
font-size: 9pt;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
QCheckBox {
|
||||
min-width: 17px;
|
||||
max-width: 17px;
|
||||
border: 1px solid #222;
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
QCheckBox::indicator {
|
||||
width: 15px;
|
||||
height: 15px;
|
||||
/*background: #444;*/
|
||||
background: transparent;
|
||||
border: 1px solid #555;
|
||||
}
|
||||
|
||||
QCheckBox::indicator:checked {
|
||||
background: #222;
|
||||
}
|
||||
|
||||
QComboBox {
|
||||
background: #444;
|
||||
color: #EEE;
|
||||
font-size: 8pt;
|
||||
border: 1px solid #333;
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
QComboBox[combolist="true"]::drop-down {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
QComboBox[combolist="true"]::down-arrow {
|
||||
max-width: 0px;
|
||||
width: 1px;
|
||||
}
|
||||
|
||||
QComboBox[combolist="true"] QAbstractItemView {
|
||||
background: #555;
|
||||
}
|
||||
|
||||
QScrollBar:vertical {
|
||||
border: none;
|
||||
background: transparent;
|
||||
width: 6px;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
QScrollBar::handle:vertical {
|
||||
background: #333;
|
||||
border-radius: 3px;
|
||||
min-height: 20px;
|
||||
}
|
||||
|
||||
QScrollBar::add-line:vertical, QScrollBar::sub-line:vertical {
|
||||
height: 0px;
|
||||
}
|
||||
|
||||
QScrollBar::up-arrow:vertical, QScrollBar::down-arrow:vertical {
|
||||
border: 1px solid #444;
|
||||
width: 3px;
|
||||
height: 3px;
|
||||
background: white;
|
||||
}
|
||||
|
||||
QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical {
|
||||
background: none;
|
||||
}
|
||||
|
||||
QToolTip {
|
||||
color: #eee;
|
||||
background-color: #555;
|
||||
border: none;
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
QLabel {
|
||||
border-radius: 0px;
|
||||
}
|
||||
|
||||
QToolButton {
|
||||
background-color: transparent;
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
border-radius: 0px;
|
||||
border: none;
|
||||
}
|
||||
|
||||
/* Specific CSS */
|
||||
#PerspectiveToggleBtn {
|
||||
border-bottom: 3px solid lightblue;
|
||||
border-top: 0px;
|
||||
border-radius: 0px;
|
||||
border-right: 1px solid #232323;
|
||||
border-left: 0px;
|
||||
font-size: 26pt;
|
||||
font-family: "FontAwesome";
|
||||
}
|
||||
|
||||
#Terminal QComboBox::drop-down {
|
||||
width: 60px;
|
||||
}
|
||||
|
||||
#Header {
|
||||
background: #555;
|
||||
border: 1px solid #444;
|
||||
padding: 0px;
|
||||
margin: 0px;
|
||||
}
|
||||
|
||||
#Header QRadioButton {
|
||||
border: 3px solid "transparent";
|
||||
border-right: 1px solid #333;
|
||||
left: 2px;
|
||||
}
|
||||
|
||||
#Header QRadioButton::indicator {
|
||||
width: 65px;
|
||||
height: 40px;
|
||||
background-repeat: no-repeat;
|
||||
background-position: center center;
|
||||
image: none;
|
||||
}
|
||||
|
||||
#Header QRadioButton:hover {
|
||||
background-color: rgba(255, 255, 255, 10);
|
||||
}
|
||||
|
||||
#Header QRadioButton:checked {
|
||||
background-color: rgba(255, 255, 255, 20);
|
||||
border-bottom: 3px solid "lightblue";
|
||||
}
|
||||
|
||||
#Body {
|
||||
padding: 0px;
|
||||
border: 1px solid #333;
|
||||
background: #444;
|
||||
}
|
||||
|
||||
#Body QWidget {
|
||||
background: #444;
|
||||
}
|
||||
|
||||
#Header #ArtistTab {
|
||||
background-image: url("img/tab-home.png");
|
||||
}
|
||||
|
||||
#Header #TerminalTab {
|
||||
background-image: url("img/tab-terminal.png");
|
||||
}
|
||||
|
||||
#Header #OverviewTab {
|
||||
background-image: url("img/tab-overview.png");
|
||||
}
|
||||
|
||||
#ButtonWithMenu {
|
||||
background: #555;
|
||||
border: 1px solid #fff;
|
||||
border-radius: 4px;
|
||||
font-family: "FontAwesome";
|
||||
font-size: 11pt;
|
||||
color: white;
|
||||
}
|
||||
|
||||
#ButtonWithMenu:pressed {
|
||||
background: #777;
|
||||
}
|
||||
|
||||
#ButtonWithMenu:hover {
|
||||
color: white;
|
||||
background: #666;
|
||||
}
|
||||
#ButtonWithMenu:disabled {
|
||||
background: #666;
|
||||
color: #999;
|
||||
border: 1px solid #999;
|
||||
}
|
||||
|
||||
#FooterSpacer, #FooterInfo, #HeaderSpacer {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
#Footer {
|
||||
background: #555;
|
||||
min-height: 43px;
|
||||
}
|
||||
|
||||
#Footer[success="1"] {
|
||||
background: #458056
|
||||
}
|
||||
|
||||
#Footer[success="0"] {
|
||||
background-color: #AA5050
|
||||
}
|
||||
|
||||
#Footer QPushButton {
|
||||
background: #555;
|
||||
border: 1px solid #aaa;
|
||||
border-radius: 4px;
|
||||
font-family: "FontAwesome";
|
||||
font-size: 11pt;
|
||||
color: white;
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
#Footer QPushButton:pressed:hover {
|
||||
color: #3784c5;
|
||||
background: #444;
|
||||
}
|
||||
|
||||
#Footer QPushButton:hover {
|
||||
background: #505050;
|
||||
border: 2px solid #3784c5;
|
||||
}
|
||||
|
||||
#Footer QPushButton:disabled {
|
||||
border: 1px solid #888;
|
||||
background: #666;
|
||||
color: #999;
|
||||
}
|
||||
|
||||
#ClosingPlaceholder {
|
||||
background: rgba(0, 0, 0, 50);
|
||||
}
|
||||
|
||||
#CommentIntentWidget {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
#CommentBox, #CommentPlaceholder {
|
||||
font-family: "Open Sans";
|
||||
font-size: 8pt;
|
||||
padding: 5px;
|
||||
background: #444;
|
||||
}
|
||||
|
||||
#CommentBox {
|
||||
selection-background-color: #222;
|
||||
}
|
||||
|
||||
#CommentBox:disabled, #CommentPlaceholder:disabled, #IntentBox:disabled {
|
||||
background: #555;
|
||||
}
|
||||
|
||||
#CommentPlaceholder {
|
||||
color: #888
|
||||
}
|
||||
|
||||
#IntentBox {
|
||||
background: #444;
|
||||
font-size: 8pt;
|
||||
padding: 5px;
|
||||
min-width: 75px;
|
||||
color: #EEE;
|
||||
}
|
||||
|
||||
#IntentBox::drop-down:button {
|
||||
border: 0px;
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
#IntentBox::down-arrow {
|
||||
image: url("/img/down_arrow.png");
|
||||
}
|
||||
|
||||
#IntentBox::down-arrow:disabled {
|
||||
image: url();
|
||||
}
|
||||
|
||||
#TerminalView {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
#TerminalView:item {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
#TerminalView:hover {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
#TerminalView:selected {
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
#TerminalView:item:hover {
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
#TerminalView:item:selected {
|
||||
color: #eeeeee;
|
||||
}
|
||||
|
||||
#TerminalView QTextEdit {
|
||||
padding:3px;
|
||||
color: #aaa;
|
||||
border-radius: 7px;
|
||||
border-color: #222;
|
||||
border-style: solid;
|
||||
border-width: 2px;
|
||||
background-color: #333;
|
||||
}
|
||||
|
||||
#TerminalView QTextEdit:hover {
|
||||
background-color: #353535;
|
||||
}
|
||||
|
||||
#TerminalView QTextEdit:selected {
|
||||
background-color: #303030;
|
||||
}
|
||||
|
||||
#ExpandableWidgetContent {
|
||||
border: none;
|
||||
background-color: #232323;
|
||||
color:#eeeeee;
|
||||
}
|
||||
|
||||
#EllidableLabel {
|
||||
font-size: 16pt;
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
#PerspectiveScrollContent {
|
||||
border: 1px solid #333;
|
||||
border-radius: 0px;
|
||||
}
|
||||
|
||||
#PerspectiveWidgetContent{
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
#PerspectiveLabel {
|
||||
background-color: transparent;
|
||||
border: none;
|
||||
}
|
||||
|
||||
#PerspectiveIndicator {
|
||||
font-size: 16pt;
|
||||
font-weight: normal;
|
||||
padding: 5px;
|
||||
background-color: #ffffff;
|
||||
color: #333333;
|
||||
}
|
||||
|
||||
#PerspectiveIndicator[state="warning"] {
|
||||
background-color: #ff9900;
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
#PerspectiveIndicator[state="active"] {
|
||||
background-color: #99CEEE;
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
#PerspectiveIndicator[state="error"] {
|
||||
background-color: #cc4a4a;
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
#PerspectiveIndicator[state="ok"] {
|
||||
background-color: #69a567;
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
#ExpandableHeader {
|
||||
background-color: transparent;
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
border-radius: 0px;
|
||||
border: none;
|
||||
}
|
||||
|
||||
#ExpandableHeader QWidget {
|
||||
color: #ddd;
|
||||
}
|
||||
|
||||
#ExpandableHeader QWidget:hover {
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
#TerminalFilerBtn {
|
||||
/* font: %(font_size_pt)spt; */
|
||||
font-family: "FontAwesome";
|
||||
text-align: center;
|
||||
background-color: transparent;
|
||||
border-width: 1px;
|
||||
border-color: #777777;
|
||||
border-style: none;
|
||||
padding: 0px;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
#TerminalFilerBtn[type="info"]:checked {color: rgb(255, 255, 255);}
|
||||
#TerminalFilerBtn[type="info"] {color: rgba(255, 255, 255, 63);}
|
||||
|
||||
#TerminalFilerBtn[type="error"]:checked {color: rgb(255, 74, 74);}
|
||||
#TerminalFilerBtn[type="error"] {color: rgba(255, 74, 74, 63);}
|
||||
|
||||
#TerminalFilerBtn[type="log_debug"]:checked {color: rgb(255, 102, 232);}
|
||||
#TerminalFilerBtn[type="log_debug"] {color: rgba(255, 102, 232, 63);}
|
||||
|
||||
#TerminalFilerBtn[type="log_info"]:checked {color: rgb(102, 171, 255);}
|
||||
#TerminalFilerBtn[type="log_info"] {color: rgba(102, 171, 255, 63);}
|
||||
|
||||
#TerminalFilerBtn[type="log_warning"]:checked {color: rgb(255, 186, 102);}
|
||||
#TerminalFilerBtn[type="log_warning"] {color: rgba(255, 186, 102, 63);}
|
||||
|
||||
#TerminalFilerBtn[type="log_error"]:checked {color: rgb(255, 77, 88);}
|
||||
#TerminalFilerBtn[type="log_error"] {color: rgba(255, 77, 88, 63);}
|
||||
|
||||
#TerminalFilerBtn[type="log_critical"]:checked {color: rgb(255, 79, 117);}
|
||||
#TerminalFilerBtn[type="log_critical"] {color: rgba(255, 79, 117, 63);}
|
||||
104
pype/tools/pyblish_pype/app.py
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
|
||||
from . import compat, control, settings, util, window
|
||||
from .vendor.Qt import QtCore, QtGui, QtWidgets
|
||||
|
||||
self = sys.modules[__name__]
|
||||
|
||||
# Maintain reference to currently opened window
|
||||
self._window = None
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def application():
|
||||
app = QtWidgets.QApplication.instance()
|
||||
|
||||
if not app:
|
||||
print("Starting new QApplication..")
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
yield app
|
||||
app.exec_()
|
||||
else:
|
||||
print("Using existing QApplication..")
|
||||
yield app
|
||||
if os.environ.get("PYBLISH_GUI_ALWAYS_EXEC"):
|
||||
app.exec_()
|
||||
|
||||
|
||||
def install_translator(app):
|
||||
translator = QtCore.QTranslator(app)
|
||||
translator.load(QtCore.QLocale.system(), "i18n/",
|
||||
directory=util.root)
|
||||
app.installTranslator(translator)
|
||||
print("Installed translator")
|
||||
|
||||
|
||||
def install_fonts():
|
||||
database = QtGui.QFontDatabase()
|
||||
fonts = [
|
||||
"opensans/OpenSans-Bold.ttf",
|
||||
"opensans/OpenSans-BoldItalic.ttf",
|
||||
"opensans/OpenSans-ExtraBold.ttf",
|
||||
"opensans/OpenSans-ExtraBoldItalic.ttf",
|
||||
"opensans/OpenSans-Italic.ttf",
|
||||
"opensans/OpenSans-Light.ttf",
|
||||
"opensans/OpenSans-LightItalic.ttf",
|
||||
"opensans/OpenSans-Regular.ttf",
|
||||
"opensans/OpenSans-Semibold.ttf",
|
||||
"opensans/OpenSans-SemiboldItalic.ttf",
|
||||
"fontawesome/fontawesome-webfont.ttf"
|
||||
]
|
||||
|
||||
for font in fonts:
|
||||
path = util.get_asset("font", font)
|
||||
|
||||
# TODO(marcus): Check if they are already installed first.
|
||||
# In hosts, this will be called each time the GUI is shown,
|
||||
# potentially installing a font each time.
|
||||
if database.addApplicationFont(path) < 0:
|
||||
sys.stderr.write("Could not install %s\n" % path)
|
||||
else:
|
||||
sys.stdout.write("Installed %s\n" % font)
|
||||
|
||||
|
||||
def on_destroyed():
|
||||
"""Remove internal reference to window on window destroyed"""
|
||||
self._window = None
|
||||
|
||||
|
||||
def show(parent=None):
|
||||
with open(util.get_asset("app.css")) as f:
|
||||
css = f.read()
|
||||
|
||||
# Make relative paths absolute
|
||||
root = util.get_asset("").replace("\\", "/")
|
||||
css = css.replace("url(\"", "url(\"%s" % root)
|
||||
|
||||
with application() as app:
|
||||
compat.init()
|
||||
|
||||
install_fonts()
|
||||
install_translator(app)
|
||||
|
||||
ctrl = control.Controller()
|
||||
|
||||
if self._window is None:
|
||||
self._window = window.Window(ctrl, parent)
|
||||
self._window.destroyed.connect(on_destroyed)
|
||||
|
||||
self._window.show()
|
||||
self._window.activateWindow()
|
||||
self._window.resize(*settings.WindowSize)
|
||||
self._window.setWindowTitle(settings.WindowTitle)
|
||||
|
||||
font = QtGui.QFont("Open Sans", 8, QtGui.QFont.Normal)
|
||||
self._window.setFont(font)
|
||||
self._window.setStyleSheet(css)
|
||||
|
||||
self._window.reset()
|
||||
|
||||
return self._window
|
||||
733
pype/tools/pyblish_pype/awesome.py
Normal file
|
|
@ -0,0 +1,733 @@
|
|||
|
||||
tags = {
|
||||
"500px": u"\uf26e",
|
||||
"adjust": u"\uf042",
|
||||
"adn": u"\uf170",
|
||||
"align-center": u"\uf037",
|
||||
"align-justify": u"\uf039",
|
||||
"align-left": u"\uf036",
|
||||
"align-right": u"\uf038",
|
||||
"amazon": u"\uf270",
|
||||
"ambulance": u"\uf0f9",
|
||||
"american-sign-language-interpreting": u"\uf2a3",
|
||||
"anchor": u"\uf13d",
|
||||
"android": u"\uf17b",
|
||||
"angellist": u"\uf209",
|
||||
"angle-double-down": u"\uf103",
|
||||
"angle-double-left": u"\uf100",
|
||||
"angle-double-right": u"\uf101",
|
||||
"angle-double-up": u"\uf102",
|
||||
"angle-down": u"\uf107",
|
||||
"angle-left": u"\uf104",
|
||||
"angle-right": u"\uf105",
|
||||
"angle-up": u"\uf106",
|
||||
"apple": u"\uf179",
|
||||
"archive": u"\uf187",
|
||||
"area-chart": u"\uf1fe",
|
||||
"arrow-circle-down": u"\uf0ab",
|
||||
"arrow-circle-left": u"\uf0a8",
|
||||
"arrow-circle-o-down": u"\uf01a",
|
||||
"arrow-circle-o-left": u"\uf190",
|
||||
"arrow-circle-o-right": u"\uf18e",
|
||||
"arrow-circle-o-up": u"\uf01b",
|
||||
"arrow-circle-right": u"\uf0a9",
|
||||
"arrow-circle-up": u"\uf0aa",
|
||||
"arrow-down": u"\uf063",
|
||||
"arrow-left": u"\uf060",
|
||||
"arrow-right": u"\uf061",
|
||||
"arrow-up": u"\uf062",
|
||||
"arrows": u"\uf047",
|
||||
"arrows-alt": u"\uf0b2",
|
||||
"arrows-h": u"\uf07e",
|
||||
"arrows-v": u"\uf07d",
|
||||
"asl-interpreting (alias)": u"\uf2a3",
|
||||
"assistive-listening-systems": u"\uf2a2",
|
||||
"asterisk": u"\uf069",
|
||||
"at": u"\uf1fa",
|
||||
"audio-description": u"\uf29e",
|
||||
"automobile (alias)": u"\uf1b9",
|
||||
"backward": u"\uf04a",
|
||||
"balance-scale": u"\uf24e",
|
||||
"ban": u"\uf05e",
|
||||
"bank (alias)": u"\uf19c",
|
||||
"bar-chart": u"\uf080",
|
||||
"bar-chart-o (alias)": u"\uf080",
|
||||
"barcode": u"\uf02a",
|
||||
"bars": u"\uf0c9",
|
||||
"battery-0 (alias)": u"\uf244",
|
||||
"battery-1 (alias)": u"\uf243",
|
||||
"battery-2 (alias)": u"\uf242",
|
||||
"battery-3 (alias)": u"\uf241",
|
||||
"battery-4 (alias)": u"\uf240",
|
||||
"battery-empty": u"\uf244",
|
||||
"battery-full": u"\uf240",
|
||||
"battery-half": u"\uf242",
|
||||
"battery-quarter": u"\uf243",
|
||||
"battery-three-quarters": u"\uf241",
|
||||
"bed": u"\uf236",
|
||||
"beer": u"\uf0fc",
|
||||
"behance": u"\uf1b4",
|
||||
"behance-square": u"\uf1b5",
|
||||
"bell": u"\uf0f3",
|
||||
"bell-o": u"\uf0a2",
|
||||
"bell-slash": u"\uf1f6",
|
||||
"bell-slash-o": u"\uf1f7",
|
||||
"bicycle": u"\uf206",
|
||||
"binoculars": u"\uf1e5",
|
||||
"birthday-cake": u"\uf1fd",
|
||||
"bitbucket": u"\uf171",
|
||||
"bitbucket-square": u"\uf172",
|
||||
"bitcoin (alias)": u"\uf15a",
|
||||
"black-tie": u"\uf27e",
|
||||
"blind": u"\uf29d",
|
||||
"bluetooth": u"\uf293",
|
||||
"bluetooth-b": u"\uf294",
|
||||
"bold": u"\uf032",
|
||||
"bolt": u"\uf0e7",
|
||||
"bomb": u"\uf1e2",
|
||||
"book": u"\uf02d",
|
||||
"bookmark": u"\uf02e",
|
||||
"bookmark-o": u"\uf097",
|
||||
"braille": u"\uf2a1",
|
||||
"briefcase": u"\uf0b1",
|
||||
"btc": u"\uf15a",
|
||||
"bug": u"\uf188",
|
||||
"building": u"\uf1ad",
|
||||
"building-o": u"\uf0f7",
|
||||
"bullhorn": u"\uf0a1",
|
||||
"bullseye": u"\uf140",
|
||||
"bus": u"\uf207",
|
||||
"buysellads": u"\uf20d",
|
||||
"cab (alias)": u"\uf1ba",
|
||||
"calculator": u"\uf1ec",
|
||||
"calendar": u"\uf073",
|
||||
"calendar-check-o": u"\uf274",
|
||||
"calendar-minus-o": u"\uf272",
|
||||
"calendar-o": u"\uf133",
|
||||
"calendar-plus-o": u"\uf271",
|
||||
"calendar-times-o": u"\uf273",
|
||||
"camera": u"\uf030",
|
||||
"camera-retro": u"\uf083",
|
||||
"car": u"\uf1b9",
|
||||
"caret-down": u"\uf0d7",
|
||||
"caret-left": u"\uf0d9",
|
||||
"caret-right": u"\uf0da",
|
||||
"caret-square-o-down": u"\uf150",
|
||||
"caret-square-o-left": u"\uf191",
|
||||
"caret-square-o-right": u"\uf152",
|
||||
"caret-square-o-up": u"\uf151",
|
||||
"caret-up": u"\uf0d8",
|
||||
"cart-arrow-down": u"\uf218",
|
||||
"cart-plus": u"\uf217",
|
||||
"cc": u"\uf20a",
|
||||
"cc-amex": u"\uf1f3",
|
||||
"cc-diners-club": u"\uf24c",
|
||||
"cc-discover": u"\uf1f2",
|
||||
"cc-jcb": u"\uf24b",
|
||||
"cc-mastercard": u"\uf1f1",
|
||||
"cc-paypal": u"\uf1f4",
|
||||
"cc-stripe": u"\uf1f5",
|
||||
"cc-visa": u"\uf1f0",
|
||||
"certificate": u"\uf0a3",
|
||||
"chain (alias)": u"\uf0c1",
|
||||
"chain-broken": u"\uf127",
|
||||
"check": u"\uf00c",
|
||||
"check-circle": u"\uf058",
|
||||
"check-circle-o": u"\uf05d",
|
||||
"check-square": u"\uf14a",
|
||||
"check-square-o": u"\uf046",
|
||||
"chevron-circle-down": u"\uf13a",
|
||||
"chevron-circle-left": u"\uf137",
|
||||
"chevron-circle-right": u"\uf138",
|
||||
"chevron-circle-up": u"\uf139",
|
||||
"chevron-down": u"\uf078",
|
||||
"chevron-left": u"\uf053",
|
||||
"chevron-right": u"\uf054",
|
||||
"chevron-up": u"\uf077",
|
||||
"child": u"\uf1ae",
|
||||
"chrome": u"\uf268",
|
||||
"circle": u"\uf111",
|
||||
"circle-o": u"\uf10c",
|
||||
"circle-o-notch": u"\uf1ce",
|
||||
"circle-thin": u"\uf1db",
|
||||
"clipboard": u"\uf0ea",
|
||||
"clock-o": u"\uf017",
|
||||
"clone": u"\uf24d",
|
||||
"close (alias)": u"\uf00d",
|
||||
"cloud": u"\uf0c2",
|
||||
"cloud-download": u"\uf0ed",
|
||||
"cloud-upload": u"\uf0ee",
|
||||
"cny (alias)": u"\uf157",
|
||||
"code": u"\uf121",
|
||||
"code-fork": u"\uf126",
|
||||
"codepen": u"\uf1cb",
|
||||
"codiepie": u"\uf284",
|
||||
"coffee": u"\uf0f4",
|
||||
"cog": u"\uf013",
|
||||
"cogs": u"\uf085",
|
||||
"columns": u"\uf0db",
|
||||
"comment": u"\uf075",
|
||||
"comment-o": u"\uf0e5",
|
||||
"commenting": u"\uf27a",
|
||||
"commenting-o": u"\uf27b",
|
||||
"comments": u"\uf086",
|
||||
"comments-o": u"\uf0e6",
|
||||
"compass": u"\uf14e",
|
||||
"compress": u"\uf066",
|
||||
"connectdevelop": u"\uf20e",
|
||||
"contao": u"\uf26d",
|
||||
"copy (alias)": u"\uf0c5",
|
||||
"copyright": u"\uf1f9",
|
||||
"creative-commons": u"\uf25e",
|
||||
"credit-card": u"\uf09d",
|
||||
"credit-card-alt": u"\uf283",
|
||||
"crop": u"\uf125",
|
||||
"crosshairs": u"\uf05b",
|
||||
"css3": u"\uf13c",
|
||||
"cube": u"\uf1b2",
|
||||
"cubes": u"\uf1b3",
|
||||
"cut (alias)": u"\uf0c4",
|
||||
"cutlery": u"\uf0f5",
|
||||
"dashboard (alias)": u"\uf0e4",
|
||||
"dashcube": u"\uf210",
|
||||
"database": u"\uf1c0",
|
||||
"deaf": u"\uf2a4",
|
||||
"deafness (alias)": u"\uf2a4",
|
||||
"dedent (alias)": u"\uf03b",
|
||||
"delicious": u"\uf1a5",
|
||||
"desktop": u"\uf108",
|
||||
"deviantart": u"\uf1bd",
|
||||
"diamond": u"\uf219",
|
||||
"digg": u"\uf1a6",
|
||||
"dollar (alias)": u"\uf155",
|
||||
"dot-circle-o": u"\uf192",
|
||||
"download": u"\uf019",
|
||||
"dribbble": u"\uf17d",
|
||||
"dropbox": u"\uf16b",
|
||||
"drupal": u"\uf1a9",
|
||||
"edge": u"\uf282",
|
||||
"edit (alias)": u"\uf044",
|
||||
"eject": u"\uf052",
|
||||
"ellipsis-h": u"\uf141",
|
||||
"ellipsis-v": u"\uf142",
|
||||
"empire": u"\uf1d1",
|
||||
"envelope": u"\uf0e0",
|
||||
"envelope-o": u"\uf003",
|
||||
"envelope-square": u"\uf199",
|
||||
"envira": u"\uf299",
|
||||
"eraser": u"\uf12d",
|
||||
"eur": u"\uf153",
|
||||
"euro (alias)": u"\uf153",
|
||||
"exchange": u"\uf0ec",
|
||||
"exclamation": u"\uf12a",
|
||||
"exclamation-circle": u"\uf06a",
|
||||
"exclamation-triangle": u"\uf071",
|
||||
"expand": u"\uf065",
|
||||
"expeditedssl": u"\uf23e",
|
||||
"external-link": u"\uf08e",
|
||||
"external-link-square": u"\uf14c",
|
||||
"eye": u"\uf06e",
|
||||
"eye-slash": u"\uf070",
|
||||
"eyedropper": u"\uf1fb",
|
||||
"fa (alias)": u"\uf2b4",
|
||||
"facebook": u"\uf09a",
|
||||
"facebook-f (alias)": u"\uf09a",
|
||||
"facebook-official": u"\uf230",
|
||||
"facebook-square": u"\uf082",
|
||||
"fast-backward": u"\uf049",
|
||||
"fast-forward": u"\uf050",
|
||||
"fax": u"\uf1ac",
|
||||
"feed (alias)": u"\uf09e",
|
||||
"female": u"\uf182",
|
||||
"fighter-jet": u"\uf0fb",
|
||||
"file": u"\uf15b",
|
||||
"file-archive-o": u"\uf1c6",
|
||||
"file-audio-o": u"\uf1c7",
|
||||
"file-code-o": u"\uf1c9",
|
||||
"file-excel-o": u"\uf1c3",
|
||||
"file-image-o": u"\uf1c5",
|
||||
"file-movie-o (alias)": u"\uf1c8",
|
||||
"file-o": u"\uf016",
|
||||
"file-pdf-o": u"\uf1c1",
|
||||
"file-photo-o (alias)": u"\uf1c5",
|
||||
"file-picture-o (alias)": u"\uf1c5",
|
||||
"file-powerpoint-o": u"\uf1c4",
|
||||
"file-sound-o (alias)": u"\uf1c7",
|
||||
"file-text": u"\uf15c",
|
||||
"file-text-o": u"\uf0f6",
|
||||
"file-video-o": u"\uf1c8",
|
||||
"file-word-o": u"\uf1c2",
|
||||
"file-zip-o (alias)": u"\uf1c6",
|
||||
"files-o": u"\uf0c5",
|
||||
"film": u"\uf008",
|
||||
"filter": u"\uf0b0",
|
||||
"fire": u"\uf06d",
|
||||
"fire-extinguisher": u"\uf134",
|
||||
"firefox": u"\uf269",
|
||||
"first-order": u"\uf2b0",
|
||||
"flag": u"\uf024",
|
||||
"flag-checkered": u"\uf11e",
|
||||
"flag-o": u"\uf11d",
|
||||
"flash (alias)": u"\uf0e7",
|
||||
"flask": u"\uf0c3",
|
||||
"flickr": u"\uf16e",
|
||||
"floppy-o": u"\uf0c7",
|
||||
"folder": u"\uf07b",
|
||||
"folder-o": u"\uf114",
|
||||
"folder-open": u"\uf07c",
|
||||
"folder-open-o": u"\uf115",
|
||||
"font": u"\uf031",
|
||||
"font-awesome": u"\uf2b4",
|
||||
"fonticons": u"\uf280",
|
||||
"fort-awesome": u"\uf286",
|
||||
"forumbee": u"\uf211",
|
||||
"forward": u"\uf04e",
|
||||
"foursquare": u"\uf180",
|
||||
"frown-o": u"\uf119",
|
||||
"futbol-o": u"\uf1e3",
|
||||
"gamepad": u"\uf11b",
|
||||
"gavel": u"\uf0e3",
|
||||
"gbp": u"\uf154",
|
||||
"ge (alias)": u"\uf1d1",
|
||||
"gear (alias)": u"\uf013",
|
||||
"gears (alias)": u"\uf085",
|
||||
"genderless": u"\uf22d",
|
||||
"get-pocket": u"\uf265",
|
||||
"gg": u"\uf260",
|
||||
"gg-circle": u"\uf261",
|
||||
"gift": u"\uf06b",
|
||||
"git": u"\uf1d3",
|
||||
"git-square": u"\uf1d2",
|
||||
"github": u"\uf09b",
|
||||
"github-alt": u"\uf113",
|
||||
"github-square": u"\uf092",
|
||||
"gitlab": u"\uf296",
|
||||
"gittip (alias)": u"\uf184",
|
||||
"glass": u"\uf000",
|
||||
"glide": u"\uf2a5",
|
||||
"glide-g": u"\uf2a6",
|
||||
"globe": u"\uf0ac",
|
||||
"google": u"\uf1a0",
|
||||
"google-plus": u"\uf0d5",
|
||||
"google-plus-circle (alias)": u"\uf2b3",
|
||||
"google-plus-official": u"\uf2b3",
|
||||
"google-plus-square": u"\uf0d4",
|
||||
"google-wallet": u"\uf1ee",
|
||||
"graduation-cap": u"\uf19d",
|
||||
"gratipay": u"\uf184",
|
||||
"group (alias)": u"\uf0c0",
|
||||
"h-square": u"\uf0fd",
|
||||
"hacker-news": u"\uf1d4",
|
||||
"hand-grab-o (alias)": u"\uf255",
|
||||
"hand-lizard-o": u"\uf258",
|
||||
"hand-o-down": u"\uf0a7",
|
||||
"hand-o-left": u"\uf0a5",
|
||||
"hand-o-right": u"\uf0a4",
|
||||
"hand-o-up": u"\uf0a6",
|
||||
"hand-paper-o": u"\uf256",
|
||||
"hand-peace-o": u"\uf25b",
|
||||
"hand-pointer-o": u"\uf25a",
|
||||
"hand-rock-o": u"\uf255",
|
||||
"hand-scissors-o": u"\uf257",
|
||||
"hand-spock-o": u"\uf259",
|
||||
"hand-stop-o (alias)": u"\uf256",
|
||||
"hard-of-hearing (alias)": u"\uf2a4",
|
||||
"hashtag": u"\uf292",
|
||||
"hdd-o": u"\uf0a0",
|
||||
"header": u"\uf1dc",
|
||||
"headphones": u"\uf025",
|
||||
"heart": u"\uf004",
|
||||
"heart-o": u"\uf08a",
|
||||
"heartbeat": u"\uf21e",
|
||||
"history": u"\uf1da",
|
||||
"home": u"\uf015",
|
||||
"hospital-o": u"\uf0f8",
|
||||
"hotel (alias)": u"\uf236",
|
||||
"hourglass": u"\uf254",
|
||||
"hourglass-1 (alias)": u"\uf251",
|
||||
"hourglass-2 (alias)": u"\uf252",
|
||||
"hourglass-3 (alias)": u"\uf253",
|
||||
"hourglass-end": u"\uf253",
|
||||
"hourglass-half": u"\uf252",
|
||||
"hourglass-o": u"\uf250",
|
||||
"hourglass-start": u"\uf251",
|
||||
"houzz": u"\uf27c",
|
||||
"html5": u"\uf13b",
|
||||
"i-cursor": u"\uf246",
|
||||
"ils": u"\uf20b",
|
||||
"image (alias)": u"\uf03e",
|
||||
"inbox": u"\uf01c",
|
||||
"indent": u"\uf03c",
|
||||
"industry": u"\uf275",
|
||||
"info": u"\uf129",
|
||||
"info-circle": u"\uf05a",
|
||||
"inr": u"\uf156",
|
||||
"instagram": u"\uf16d",
|
||||
"institution (alias)": u"\uf19c",
|
||||
"internet-explorer": u"\uf26b",
|
||||
"intersex (alias)": u"\uf224",
|
||||
"ioxhost": u"\uf208",
|
||||
"italic": u"\uf033",
|
||||
"joomla": u"\uf1aa",
|
||||
"jpy": u"\uf157",
|
||||
"jsfiddle": u"\uf1cc",
|
||||
"key": u"\uf084",
|
||||
"keyboard-o": u"\uf11c",
|
||||
"krw": u"\uf159",
|
||||
"language": u"\uf1ab",
|
||||
"laptop": u"\uf109",
|
||||
"lastfm": u"\uf202",
|
||||
"lastfm-square": u"\uf203",
|
||||
"leaf": u"\uf06c",
|
||||
"leanpub": u"\uf212",
|
||||
"legal (alias)": u"\uf0e3",
|
||||
"lemon-o": u"\uf094",
|
||||
"level-down": u"\uf149",
|
||||
"level-up": u"\uf148",
|
||||
"life-bouy (alias)": u"\uf1cd",
|
||||
"life-buoy (alias)": u"\uf1cd",
|
||||
"life-ring": u"\uf1cd",
|
||||
"life-saver (alias)": u"\uf1cd",
|
||||
"lightbulb-o": u"\uf0eb",
|
||||
"line-chart": u"\uf201",
|
||||
"link": u"\uf0c1",
|
||||
"linkedin": u"\uf0e1",
|
||||
"linkedin-square": u"\uf08c",
|
||||
"linux": u"\uf17c",
|
||||
"list": u"\uf03a",
|
||||
"list-alt": u"\uf022",
|
||||
"list-ol": u"\uf0cb",
|
||||
"list-ul": u"\uf0ca",
|
||||
"location-arrow": u"\uf124",
|
||||
"lock": u"\uf023",
|
||||
"long-arrow-down": u"\uf175",
|
||||
"long-arrow-left": u"\uf177",
|
||||
"long-arrow-right": u"\uf178",
|
||||
"long-arrow-up": u"\uf176",
|
||||
"low-vision": u"\uf2a8",
|
||||
"magic": u"\uf0d0",
|
||||
"magnet": u"\uf076",
|
||||
"mail-forward (alias)": u"\uf064",
|
||||
"mail-reply (alias)": u"\uf112",
|
||||
"mail-reply-all (alias)": u"\uf122",
|
||||
"male": u"\uf183",
|
||||
"map": u"\uf279",
|
||||
"map-marker": u"\uf041",
|
||||
"map-o": u"\uf278",
|
||||
"map-pin": u"\uf276",
|
||||
"map-signs": u"\uf277",
|
||||
"mars": u"\uf222",
|
||||
"mars-double": u"\uf227",
|
||||
"mars-stroke": u"\uf229",
|
||||
"mars-stroke-h": u"\uf22b",
|
||||
"mars-stroke-v": u"\uf22a",
|
||||
"maxcdn": u"\uf136",
|
||||
"meanpath": u"\uf20c",
|
||||
"medium": u"\uf23a",
|
||||
"medkit": u"\uf0fa",
|
||||
"meh-o": u"\uf11a",
|
||||
"mercury": u"\uf223",
|
||||
"microphone": u"\uf130",
|
||||
"microphone-slash": u"\uf131",
|
||||
"minus": u"\uf068",
|
||||
"minus-circle": u"\uf056",
|
||||
"minus-square": u"\uf146",
|
||||
"minus-square-o": u"\uf147",
|
||||
"mixcloud": u"\uf289",
|
||||
"mobile": u"\uf10b",
|
||||
"mobile-phone (alias)": u"\uf10b",
|
||||
"modx": u"\uf285",
|
||||
"money": u"\uf0d6",
|
||||
"moon-o": u"\uf186",
|
||||
"mortar-board (alias)": u"\uf19d",
|
||||
"motorcycle": u"\uf21c",
|
||||
"mouse-pointer": u"\uf245",
|
||||
"music": u"\uf001",
|
||||
"navicon (alias)": u"\uf0c9",
|
||||
"neuter": u"\uf22c",
|
||||
"newspaper-o": u"\uf1ea",
|
||||
"object-group": u"\uf247",
|
||||
"object-ungroup": u"\uf248",
|
||||
"odnoklassniki": u"\uf263",
|
||||
"odnoklassniki-square": u"\uf264",
|
||||
"opencart": u"\uf23d",
|
||||
"openid": u"\uf19b",
|
||||
"opera": u"\uf26a",
|
||||
"optin-monster": u"\uf23c",
|
||||
"outdent": u"\uf03b",
|
||||
"pagelines": u"\uf18c",
|
||||
"paint-brush": u"\uf1fc",
|
||||
"paper-plane": u"\uf1d8",
|
||||
"paper-plane-o": u"\uf1d9",
|
||||
"paperclip": u"\uf0c6",
|
||||
"paragraph": u"\uf1dd",
|
||||
"paste (alias)": u"\uf0ea",
|
||||
"pause": u"\uf04c",
|
||||
"pause-circle": u"\uf28b",
|
||||
"pause-circle-o": u"\uf28c",
|
||||
"paw": u"\uf1b0",
|
||||
"paypal": u"\uf1ed",
|
||||
"pencil": u"\uf040",
|
||||
"pencil-square": u"\uf14b",
|
||||
"pencil-square-o": u"\uf044",
|
||||
"percent": u"\uf295",
|
||||
"phone": u"\uf095",
|
||||
"phone-square": u"\uf098",
|
||||
"photo (alias)": u"\uf03e",
|
||||
"picture-o": u"\uf03e",
|
||||
"pie-chart": u"\uf200",
|
||||
"pied-piper": u"\uf2ae",
|
||||
"pied-piper-alt": u"\uf1a8",
|
||||
"pied-piper-pp": u"\uf1a7",
|
||||
"pinterest": u"\uf0d2",
|
||||
"pinterest-p": u"\uf231",
|
||||
"pinterest-square": u"\uf0d3",
|
||||
"plane": u"\uf072",
|
||||
"play": u"\uf04b",
|
||||
"play-circle": u"\uf144",
|
||||
"play-circle-o": u"\uf01d",
|
||||
"plug": u"\uf1e6",
|
||||
"plus": u"\uf067",
|
||||
"plus-circle": u"\uf055",
|
||||
"plus-square": u"\uf0fe",
|
||||
"plus-square-o": u"\uf196",
|
||||
"power-off": u"\uf011",
|
||||
"print": u"\uf02f",
|
||||
"product-hunt": u"\uf288",
|
||||
"puzzle-piece": u"\uf12e",
|
||||
"qq": u"\uf1d6",
|
||||
"qrcode": u"\uf029",
|
||||
"question": u"\uf128",
|
||||
"question-circle": u"\uf059",
|
||||
"question-circle-o": u"\uf29c",
|
||||
"quote-left": u"\uf10d",
|
||||
"quote-right": u"\uf10e",
|
||||
"ra (alias)": u"\uf1d0",
|
||||
"random": u"\uf074",
|
||||
"rebel": u"\uf1d0",
|
||||
"recycle": u"\uf1b8",
|
||||
"reddit": u"\uf1a1",
|
||||
"reddit-alien": u"\uf281",
|
||||
"reddit-square": u"\uf1a2",
|
||||
"refresh": u"\uf021",
|
||||
"registered": u"\uf25d",
|
||||
"remove (alias)": u"\uf00d",
|
||||
"renren": u"\uf18b",
|
||||
"reorder (alias)": u"\uf0c9",
|
||||
"repeat": u"\uf01e",
|
||||
"reply": u"\uf112",
|
||||
"reply-all": u"\uf122",
|
||||
"resistance (alias)": u"\uf1d0",
|
||||
"retweet": u"\uf079",
|
||||
"rmb (alias)": u"\uf157",
|
||||
"road": u"\uf018",
|
||||
"rocket": u"\uf135",
|
||||
"rotate-left (alias)": u"\uf0e2",
|
||||
"rotate-right (alias)": u"\uf01e",
|
||||
"rouble (alias)": u"\uf158",
|
||||
"rss": u"\uf09e",
|
||||
"rss-square": u"\uf143",
|
||||
"rub": u"\uf158",
|
||||
"ruble (alias)": u"\uf158",
|
||||
"rupee (alias)": u"\uf156",
|
||||
"safari": u"\uf267",
|
||||
"save (alias)": u"\uf0c7",
|
||||
"scissors": u"\uf0c4",
|
||||
"scribd": u"\uf28a",
|
||||
"search": u"\uf002",
|
||||
"search-minus": u"\uf010",
|
||||
"search-plus": u"\uf00e",
|
||||
"sellsy": u"\uf213",
|
||||
"send (alias)": u"\uf1d8",
|
||||
"send-o (alias)": u"\uf1d9",
|
||||
"server": u"\uf233",
|
||||
"share": u"\uf064",
|
||||
"share-alt": u"\uf1e0",
|
||||
"share-alt-square": u"\uf1e1",
|
||||
"share-square": u"\uf14d",
|
||||
"share-square-o": u"\uf045",
|
||||
"shekel (alias)": u"\uf20b",
|
||||
"sheqel (alias)": u"\uf20b",
|
||||
"shield": u"\uf132",
|
||||
"ship": u"\uf21a",
|
||||
"shirtsinbulk": u"\uf214",
|
||||
"shopping-bag": u"\uf290",
|
||||
"shopping-basket": u"\uf291",
|
||||
"shopping-cart": u"\uf07a",
|
||||
"sign-in": u"\uf090",
|
||||
"sign-language": u"\uf2a7",
|
||||
"sign-out": u"\uf08b",
|
||||
"signal": u"\uf012",
|
||||
"signing (alias)": u"\uf2a7",
|
||||
"simplybuilt": u"\uf215",
|
||||
"sitemap": u"\uf0e8",
|
||||
"skyatlas": u"\uf216",
|
||||
"skype": u"\uf17e",
|
||||
"slack": u"\uf198",
|
||||
"sliders": u"\uf1de",
|
||||
"slideshare": u"\uf1e7",
|
||||
"smile-o": u"\uf118",
|
||||
"snapchat": u"\uf2ab",
|
||||
"snapchat-ghost": u"\uf2ac",
|
||||
"snapchat-square": u"\uf2ad",
|
||||
"soccer-ball-o (alias)": u"\uf1e3",
|
||||
"sort": u"\uf0dc",
|
||||
"sort-alpha-asc": u"\uf15d",
|
||||
"sort-alpha-desc": u"\uf15e",
|
||||
"sort-amount-asc": u"\uf160",
|
||||
"sort-amount-desc": u"\uf161",
|
||||
"sort-asc": u"\uf0de",
|
||||
"sort-desc": u"\uf0dd",
|
||||
"sort-down (alias)": u"\uf0dd",
|
||||
"sort-numeric-asc": u"\uf162",
|
||||
"sort-numeric-desc": u"\uf163",
|
||||
"sort-up (alias)": u"\uf0de",
|
||||
"soundcloud": u"\uf1be",
|
||||
"space-shuttle": u"\uf197",
|
||||
"spinner": u"\uf110",
|
||||
"spoon": u"\uf1b1",
|
||||
"spotify": u"\uf1bc",
|
||||
"square": u"\uf0c8",
|
||||
"square-o": u"\uf096",
|
||||
"stack-exchange": u"\uf18d",
|
||||
"stack-overflow": u"\uf16c",
|
||||
"star": u"\uf005",
|
||||
"star-half": u"\uf089",
|
||||
"star-half-empty (alias)": u"\uf123",
|
||||
"star-half-full (alias)": u"\uf123",
|
||||
"star-half-o": u"\uf123",
|
||||
"star-o": u"\uf006",
|
||||
"steam": u"\uf1b6",
|
||||
"steam-square": u"\uf1b7",
|
||||
"step-backward": u"\uf048",
|
||||
"step-forward": u"\uf051",
|
||||
"stethoscope": u"\uf0f1",
|
||||
"sticky-note": u"\uf249",
|
||||
"sticky-note-o": u"\uf24a",
|
||||
"stop": u"\uf04d",
|
||||
"stop-circle": u"\uf28d",
|
||||
"stop-circle-o": u"\uf28e",
|
||||
"street-view": u"\uf21d",
|
||||
"strikethrough": u"\uf0cc",
|
||||
"stumbleupon": u"\uf1a4",
|
||||
"stumbleupon-circle": u"\uf1a3",
|
||||
"subscript": u"\uf12c",
|
||||
"subway": u"\uf239",
|
||||
"suitcase": u"\uf0f2",
|
||||
"sun-o": u"\uf185",
|
||||
"superscript": u"\uf12b",
|
||||
"support (alias)": u"\uf1cd",
|
||||
"table": u"\uf0ce",
|
||||
"tablet": u"\uf10a",
|
||||
"tachometer": u"\uf0e4",
|
||||
"tag": u"\uf02b",
|
||||
"tags": u"\uf02c",
|
||||
"tasks": u"\uf0ae",
|
||||
"taxi": u"\uf1ba",
|
||||
"television": u"\uf26c",
|
||||
"tencent-weibo": u"\uf1d5",
|
||||
"terminal": u"\uf120",
|
||||
"text-height": u"\uf034",
|
||||
"text-width": u"\uf035",
|
||||
"th": u"\uf00a",
|
||||
"th-large": u"\uf009",
|
||||
"th-list": u"\uf00b",
|
||||
"themeisle": u"\uf2b2",
|
||||
"thumb-tack": u"\uf08d",
|
||||
"thumbs-down": u"\uf165",
|
||||
"thumbs-o-down": u"\uf088",
|
||||
"thumbs-o-up": u"\uf087",
|
||||
"thumbs-up": u"\uf164",
|
||||
"ticket": u"\uf145",
|
||||
"times": u"\uf00d",
|
||||
"times-circle": u"\uf057",
|
||||
"times-circle-o": u"\uf05c",
|
||||
"tint": u"\uf043",
|
||||
"toggle-down (alias)": u"\uf150",
|
||||
"toggle-left (alias)": u"\uf191",
|
||||
"toggle-off": u"\uf204",
|
||||
"toggle-on": u"\uf205",
|
||||
"toggle-right (alias)": u"\uf152",
|
||||
"toggle-up (alias)": u"\uf151",
|
||||
"trademark": u"\uf25c",
|
||||
"train": u"\uf238",
|
||||
"transgender": u"\uf224",
|
||||
"transgender-alt": u"\uf225",
|
||||
"trash": u"\uf1f8",
|
||||
"trash-o": u"\uf014",
|
||||
"tree": u"\uf1bb",
|
||||
"trello": u"\uf181",
|
||||
"tripadvisor": u"\uf262",
|
||||
"trophy": u"\uf091",
|
||||
"truck": u"\uf0d1",
|
||||
"try": u"\uf195",
|
||||
"tty": u"\uf1e4",
|
||||
"tumblr": u"\uf173",
|
||||
"tumblr-square": u"\uf174",
|
||||
"turkish-lira (alias)": u"\uf195",
|
||||
"tv (alias)": u"\uf26c",
|
||||
"twitch": u"\uf1e8",
|
||||
"twitter": u"\uf099",
|
||||
"twitter-square": u"\uf081",
|
||||
"umbrella": u"\uf0e9",
|
||||
"underline": u"\uf0cd",
|
||||
"undo": u"\uf0e2",
|
||||
"universal-access": u"\uf29a",
|
||||
"university": u"\uf19c",
|
||||
"unlink (alias)": u"\uf127",
|
||||
"unlock": u"\uf09c",
|
||||
"unlock-alt": u"\uf13e",
|
||||
"unsorted (alias)": u"\uf0dc",
|
||||
"upload": u"\uf093",
|
||||
"usb": u"\uf287",
|
||||
"usd": u"\uf155",
|
||||
"user": u"\uf007",
|
||||
"user-md": u"\uf0f0",
|
||||
"user-plus": u"\uf234",
|
||||
"user-secret": u"\uf21b",
|
||||
"user-times": u"\uf235",
|
||||
"users": u"\uf0c0",
|
||||
"venus": u"\uf221",
|
||||
"venus-double": u"\uf226",
|
||||
"venus-mars": u"\uf228",
|
||||
"viacoin": u"\uf237",
|
||||
"viadeo": u"\uf2a9",
|
||||
"viadeo-square": u"\uf2aa",
|
||||
"video-camera": u"\uf03d",
|
||||
"vimeo": u"\uf27d",
|
||||
"vimeo-square": u"\uf194",
|
||||
"vine": u"\uf1ca",
|
||||
"vk": u"\uf189",
|
||||
"volume-control-phone": u"\uf2a0",
|
||||
"volume-down": u"\uf027",
|
||||
"volume-off": u"\uf026",
|
||||
"volume-up": u"\uf028",
|
||||
"warning (alias)": u"\uf071",
|
||||
"wechat (alias)": u"\uf1d7",
|
||||
"weibo": u"\uf18a",
|
||||
"weixin": u"\uf1d7",
|
||||
"whatsapp": u"\uf232",
|
||||
"wheelchair": u"\uf193",
|
||||
"wheelchair-alt": u"\uf29b",
|
||||
"wifi": u"\uf1eb",
|
||||
"wikipedia-w": u"\uf266",
|
||||
"windows": u"\uf17a",
|
||||
"won (alias)": u"\uf159",
|
||||
"wordpress": u"\uf19a",
|
||||
"wpbeginner": u"\uf297",
|
||||
"wpforms": u"\uf298",
|
||||
"wrench": u"\uf0ad",
|
||||
"xing": u"\uf168",
|
||||
"xing-square": u"\uf169",
|
||||
"y-combinator": u"\uf23b",
|
||||
"y-combinator-square (alias)": u"\uf1d4",
|
||||
"yahoo": u"\uf19e",
|
||||
"yc (alias)": u"\uf23b",
|
||||
"yc-square (alias)": u"\uf1d4",
|
||||
"yelp": u"\uf1e9",
|
||||
"yen (alias)": u"\uf157",
|
||||
"yoast": u"\uf2b1",
|
||||
"youtube": u"\uf167",
|
||||
"youtube-play": u"\uf16a",
|
||||
"youtube-square": u"\uf166"
|
||||
}
|
||||
14
pype/tools/pyblish_pype/compat.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
import os
|
||||
|
||||
|
||||
def __windows_taskbar_compat():
|
||||
"""Enable icon and taskbar grouping for Windows 7+"""
|
||||
|
||||
import ctypes
|
||||
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(
|
||||
u"pyblish_pype")
|
||||
|
||||
|
||||
def init():
|
||||
if os.name == "nt":
|
||||
__windows_taskbar_compat()
|
||||