Merge branch 'develop' into 3.0/poetry

This commit is contained in:
Ondrej Samohel 2021-02-05 19:49:39 +01:00
commit dda74bbf05
No known key found for this signature in database
GPG key ID: 02376E18990A97C6
35 changed files with 430 additions and 284 deletions

View file

@ -31,6 +31,12 @@ def settings(dev=False):
PypeCommands().launch_settings_gui(dev)
@main.command()
def standalonepublisher():
"""Show Pype Standalone publisher UI."""
PypeCommands().launch_standalone_publisher()
@main.command()
@click.option("-d", "--debug",
is_flag=True, help=("Run pype tray in debug mode"))
@ -88,43 +94,18 @@ def eventserver(debug,
"""
if debug:
os.environ['PYPE_DEBUG'] = "3"
# map eventserver options
# TODO: switch eventserver to click, normalize option names
args = []
if ftrack_url:
args.append('-ftrackurl')
args.append(ftrack_url)
if ftrack_user:
args.append('-ftrackuser')
args.append(ftrack_user)
if ftrack_api_key:
args.append('-ftrackapikey')
args.append(ftrack_api_key)
if ftrack_events_path:
args.append('-ftrackeventpaths')
args.append(ftrack_events_path)
if no_stored_credentials:
args.append('-noloadcred')
if store_credentials:
args.append('-storecred')
if legacy:
args.append('-legacy')
if clockify_api_key:
args.append('-clockifyapikey')
args.append(clockify_api_key)
if clockify_workspace:
args.append('-clockifyworkspace')
args.append(clockify_workspace)
PypeCommands().launch_eventservercli(args)
PypeCommands().launch_eventservercli(
ftrack_url,
ftrack_user,
ftrack_api_key,
ftrack_events_path,
no_stored_credentials,
store_credentials,
legacy,
clockify_api_key,
clockify_workspace
)
@main.command()

View file

@ -12,6 +12,7 @@ from typing import Dict, List, Optional
from avalon import api, blender, pipeline
import bpy
import pype.hosts.blender.api.plugin as plugin
from pype.lib import get_creator_by_name
class BlendLayoutLoader(plugin.AssetLoader):
@ -24,6 +25,9 @@ class BlendLayoutLoader(plugin.AssetLoader):
icon = "code-fork"
color = "orange"
animation_creator_name = "CreateAnimation"
setdress_creator_name = "CreateSetDress"
def _remove(self, objects, obj_container):
for obj in list(objects):
if obj.type == 'ARMATURE':
@ -320,7 +324,7 @@ class UnrealLayoutLoader(plugin.AssetLoader):
for c in bpy.data.collections:
metadata = c.get('avalon')
if metadata:
if metadata:
print("metadata.get('id')")
print(metadata.get('id'))
if metadata and metadata.get('id') == 'pyblish.avalon.instance':
@ -375,7 +379,7 @@ class UnrealLayoutLoader(plugin.AssetLoader):
)
def _process(
self, libpath, layout_container, container_name, representation,
self, libpath, layout_container, container_name, representation,
actions, parent
):
with open(libpath, "r") as fp:
@ -428,6 +432,12 @@ class UnrealLayoutLoader(plugin.AssetLoader):
objects_to_transform = []
creator_plugin = get_creator_by_name(self.animation_creator_name)
if not creator_plugin:
raise ValueError("Creator plugin \"{}\" was not found.".format(
self.animation_creator_name
))
if family == 'rig':
for o in objects:
if o.type == 'ARMATURE':
@ -436,9 +446,9 @@ class UnrealLayoutLoader(plugin.AssetLoader):
o.select_set(True)
asset = api.Session["AVALON_ASSET"]
c = api.create(
creator_plugin,
name="animation_" + element_collection.name,
asset=asset,
family="animation",
options={"useSelection": True},
data={"dependencies": representation})
scene.collection.children.unlink(c)
@ -505,15 +515,20 @@ class UnrealLayoutLoader(plugin.AssetLoader):
# Create a setdress subset to contain all the animation for all
# the rigs in the layout
creator_plugin = get_creator_by_name(self.setdress_creator_name)
if not creator_plugin:
raise ValueError("Creator plugin \"{}\" was not found.".format(
self.setdress_creator_name
))
parent = api.create(
creator_plugin,
name="animation",
asset=api.Session["AVALON_ASSET"],
family="setdress",
options={"useSelection": True},
data={"dependencies": str(context["representation"]["_id"])})
layout_collection = self._process(
libpath, layout_container, container_name,
libpath, layout_container, container_name,
str(context["representation"]["_id"]), None, parent)
container_metadata["obj_container"] = layout_collection
@ -606,15 +621,21 @@ class UnrealLayoutLoader(plugin.AssetLoader):
bpy.data.collections.remove(obj_container)
creator_plugin = get_creator_by_name(self.setdress_creator_name)
if not creator_plugin:
raise ValueError("Creator plugin \"{}\" was not found.".format(
self.setdress_creator_name
))
parent = api.create(
creator_plugin,
name="animation",
asset=api.Session["AVALON_ASSET"],
family="setdress",
options={"useSelection": True},
data={"dependencies": str(representation["_id"])})
layout_collection = self._process(
libpath, layout_container, container_name,
libpath, layout_container, container_name,
str(representation["_id"]), actions, parent)
layout_container_metadata["obj_container"] = layout_collection

View file

@ -220,8 +220,7 @@ class ReferenceLoader(api.Loader):
"{}:*".format(members[0].split(":")[0]), type="AlembicNode"
)
if alembic_nodes:
for attr in alembic_attrs:
value = alembic_data[attr]
for attr, value in alembic_data.items():
cmds.setAttr("{}.{}".format(alembic_nodes[0], attr), value)
# Fix PLN-40 for older containers created with Avalon that had the

View file

@ -3,6 +3,7 @@ from avalon import api, maya
from maya import cmds
import os
from pype.api import get_project_settings
from pype.lib import get_creator_by_name
class ReferenceLoader(pype.hosts.maya.api.plugin.ReferenceLoader):
@ -25,6 +26,9 @@ class ReferenceLoader(pype.hosts.maya.api.plugin.ReferenceLoader):
icon = "code-fork"
color = "orange"
# Name of creator class that will be used to create animation instance
animation_creator_name = "CreateAnimation"
def process_reference(self, context, name, namespace, options):
import maya.cmds as cmds
from avalon import maya
@ -135,10 +139,13 @@ class ReferenceLoader(pype.hosts.maya.api.plugin.ReferenceLoader):
self.log.info("Creating subset: {}".format(namespace))
# Create the animation instance
creator_plugin = get_creator_by_name(self.animation_creator_name)
with maya.maintained_selection():
cmds.select([output, controls] + roots, noExpand=True)
api.create(name=namespace,
asset=asset,
family="animation",
options={"useSelection": True},
data={"dependencies": dependency})
api.create(
creator_plugin,
name=namespace,
asset=asset,
options={"useSelection": True},
data={"dependencies": dependency}
)

View file

@ -43,32 +43,6 @@ def preserve_trim(node):
"{}".format(script_start))
def loader_shift(node, frame, relative=True):
"""Shift global in time by i preserving duration
This moves the loader by i frames preserving global duration. When relative
is False it will shift the global in to the start frame.
Args:
loader (tool): The fusion loader tool.
frame (int): The amount of frames to move.
relative (bool): When True the shift is relative, else the shift will
change the global in to frame.
Returns:
int: The resulting relative frame change (how much it moved)
"""
# working script frame range
script_start = nuke.root()["first_frame"].value()
if relative:
node['frame_mode'].setValue("start at")
node['frame'].setValue(str(frame))
return int(script_start)
def add_review_presets_config():
returning = {
"families": list(),
@ -114,6 +88,8 @@ class LoadMov(api.Loader):
icon = "code-fork"
color = "orange"
script_start = nuke.root()["first_frame"].value()
def load(self, context, name, namespace, data):
from avalon.nuke import (
containerise,
@ -142,8 +118,6 @@ class LoadMov(api.Loader):
context["representation"]["_id"]
# create handles offset (only to last, because of mov)
last += handle_start + handle_end
# offset should be with handles so it match orig frame range
offset_frame = orig_first - handle_start
# Fallback to asset name when namespace is None
if namespace is None:
@ -171,13 +145,14 @@ class LoadMov(api.Loader):
)
read_node["file"].setValue(file)
loader_shift(read_node, first, relative=True)
read_node["origfirst"].setValue(first)
read_node["first"].setValue(first)
read_node["origlast"].setValue(last)
read_node["last"].setValue(last)
read_node["frame_mode"].setValue("start at")
read_node["frame"].setValue(str(offset_frame))
# start at script start
read_node['frame_mode'].setValue("start at")
read_node['frame'].setValue(str(self.script_start))
if colorspace:
read_node["colorspace"].setValue(str(colorspace))
@ -233,9 +208,9 @@ class LoadMov(api.Loader):
update_container
)
node = nuke.toNode(container['objectName'])
read_node = nuke.toNode(container['objectName'])
assert node.Class() == "Read", "Must be Read"
assert read_node.Class() == "Read", "Must be Read"
file = self.fname
@ -280,7 +255,7 @@ class LoadMov(api.Loader):
"Missing start frame for updated version"
"assuming starts at frame 0 for: "
"{} ({})").format(
node['name'].value(), representation))
read_node['name'].value(), representation))
first = 0
# fix handle start and end if none are available
@ -290,30 +265,30 @@ class LoadMov(api.Loader):
# create handles offset (only to last, because of mov)
last += handle_start + handle_end
# offset should be with handles so it match orig frame range
offset_frame = orig_first - handle_start
# Update the loader's path whilst preserving some values
with preserve_trim(node):
node["file"].setValue(file)
self.log.info("__ node['file']: {}".format(node["file"].value()))
with preserve_trim(read_node):
read_node["file"].setValue(file)
self.log.info("__ node['file']: {}".format(
read_node["file"].value()))
# Set the global in to the start frame of the sequence
loader_shift(node, first, relative=True)
node["origfirst"].setValue(first)
node["first"].setValue(first)
node["origlast"].setValue(last)
node["last"].setValue(last)
node["frame_mode"].setValue("start at")
node["frame"].setValue(str(offset_frame))
# Set the global in to the start frame of the sequence
read_node["origfirst"].setValue(first)
read_node["first"].setValue(first)
read_node["origlast"].setValue(last)
read_node["last"].setValue(last)
if colorspace:
node["colorspace"].setValue(str(colorspace))
# start at script start
read_node['frame_mode'].setValue("start at")
read_node['frame'].setValue(str(self.script_start))
preset_clrsp = get_imageio_input_colorspace(file)
if colorspace:
read_node["colorspace"].setValue(str(colorspace))
if preset_clrsp is not None:
node["colorspace"].setValue(preset_clrsp)
preset_clrsp = get_imageio_input_colorspace(file)
if preset_clrsp is not None:
read_node["colorspace"].setValue(preset_clrsp)
updated_dict = {}
updated_dict.update({
@ -332,13 +307,13 @@ class LoadMov(api.Loader):
# change color of node
if version.get("name") not in [max_version]:
node["tile_color"].setValue(int("0xd84f20ff", 16))
read_node["tile_color"].setValue(int("0xd84f20ff", 16))
else:
node["tile_color"].setValue(int("0x4ecd25ff", 16))
read_node["tile_color"].setValue(int("0x4ecd25ff", 16))
# Update the imprinted representation
update_container(
node, updated_dict
read_node, updated_dict
)
self.log.info("udated to version: {}".format(version.get("name")))

View file

@ -18,6 +18,9 @@ class ExtractThumbnail(pype.api.Extractor):
families = ["review"]
hosts = ["nuke"]
# presets
nodes = {}
def process(self, instance):
if "render.farm" in instance.data["families"]:
return
@ -164,7 +167,8 @@ class ExtractThumbnail(pype.api.Extractor):
if ipn_orig:
nuke.nodeCopy('%clipboard%')
[n.setSelected(False) for n in nuke.selectedNodes()] # Deselect all
# Deselect all
[n.setSelected(False) for n in nuke.selectedNodes()]
nuke.nodePaste('%clipboard%')

View file

@ -14,6 +14,7 @@ site.addsitedir(
from .terminal import Terminal
from .execute import (
get_pype_execute_args,
execute,
run_subprocess
)
@ -58,7 +59,9 @@ from .avalon_context import (
save_workfile_data_to_doc,
get_workfile_doc,
BuildWorkfile
BuildWorkfile,
get_creator_by_name
)
from .applications import (
@ -112,6 +115,7 @@ from .editorial import (
terminal = Terminal
__all__ = [
"get_pype_execute_args",
"execute",
"run_subprocess",
@ -139,6 +143,8 @@ __all__ = [
"BuildWorkfile",
"get_creator_by_name",
"ApplicationLaunchFailed",
"ApplictionExecutableNotFound",
"ApplicationNotFound",

View file

@ -1119,3 +1119,30 @@ class BuildWorkfile:
)
return output
def get_creator_by_name(creator_name, case_sensitive=False):
"""Find creator plugin by name.
Args:
creator_name (str): Name of creator class that should be returned.
case_sensitive (bool): Match of creator plugin name is case sensitive.
Set to `False` by default.
Returns:
Creator: Return first matching plugin or `None`.
"""
# Lower input creator name if is not case sensitive
if not case_sensitive:
creator_name = creator_name.lower()
for creator_plugin in avalon.api.discover(avalon.api.Creator):
_creator_name = creator_plugin.__name__
# Lower creator plugin name if is not case sensitive
if not case_sensitive:
_creator_name = _creator_name.lower()
if _creator_name == creator_name:
return creator_plugin
return None

View file

@ -133,3 +133,33 @@ def run_subprocess(*args, **kwargs):
raise RuntimeError(exc_msg)
return full_output
def get_pype_execute_args(*args):
"""Arguments to run pype command.
Arguments for subprocess when need to spawn new pype process. Which may be
needed when new python process for pype scripts must be executed in build
pype.
## Why is this needed?
Pype executed from code has different executable set to virtual env python
and must have path to script as first argument which is not needed for
build pype.
It is possible to pass any arguments that will be added after pype
executables.
"""
pype_executable = os.environ["PYPE_EXECUTABLE"]
pype_args = [pype_executable]
executable_filename = os.path.basename(pype_executable)
if "python" in executable_filename.lower():
pype_args.append(
os.path.join(os.environ["PYPE_ROOT"], "start.py")
)
if args:
pype_args.extend(args)
return pype_args

View file

@ -86,11 +86,11 @@ class CreateProjectFolders(BaseAction):
try:
# Get paths based on presets
basic_paths = self.get_path_items(project_folder_structure)
anatomy = Anatomy(project_entity["full_name"])
self.create_folders(basic_paths, project_entity, anatomy)
self.create_folders(basic_paths, project_entity)
self.create_ftrack_entities(basic_paths, project_entity)
except Exception as exc:
self.log.warning("Creating of structure crashed.", exc_info=True)
session.rollback()
return {
"success": False,
@ -220,10 +220,11 @@ class CreateProjectFolders(BaseAction):
output.append(os.path.normpath(os.path.sep.join(clean_items)))
return output
def create_folders(self, basic_paths, project, anatomy):
def create_folders(self, basic_paths, project):
anatomy = Anatomy(project["full_name"])
roots_paths = []
if isinstance(anatomy.roots, dict):
for root in anatomy.roots:
for root in anatomy.roots.values():
roots_paths.append(root.value)
else:
roots_paths.append(anatomy.roots.value)
@ -233,9 +234,14 @@ class CreateProjectFolders(BaseAction):
full_paths = self.compute_paths(basic_paths, project_root)
# Create folders
for path in full_paths:
if os.path.exists(path):
continue
os.makedirs(path.format(project_root=project_root))
full_path = path.format(project_root=project_root)
if os.path.exists(full_path):
self.log.debug(
"Folder already exists: {}".format(full_path)
)
else:
self.log.debug("Creating folder: {}".format(full_path))
os.makedirs(full_path)
def register(session):

View file

@ -1815,7 +1815,7 @@ class SyncToAvalonEvent(BaseEvent):
# Ftrack's entity_type does not have defined custom attributes
if ent_cust_attrs is None:
continue
ent_cust_attrs = []
for key, values in ent_info["changes"].items():
if key in hier_attrs_keys:
@ -1967,11 +1967,20 @@ class SyncToAvalonEvent(BaseEvent):
cust_attrs, hier_attrs = self.avalon_cust_attrs
# Hierarchical custom attributes preparation ***
hier_attr_key_by_id = {
attr["id"]: attr["key"]
for attr in hier_attrs
}
hier_attr_id_by_key = {
key: attr_id
for attr_id, key in hier_attr_key_by_id.items()
}
if all_keys:
hier_cust_attrs_keys = [
attr["key"] for attr in hier_attrs if (
not attr["key"].startswith("avalon_")
)
key
for key in hier_attr_id_by_key.keys()
if not key.startswith("avalon_")
]
mongo_ftrack_mapping = {}
@ -2077,15 +2086,19 @@ class SyncToAvalonEvent(BaseEvent):
entity_ids_joined = ", ".join([
"\"{}\"".format(id) for id in cust_attrs_ftrack_ids
])
configuration_ids = set()
for key in hier_cust_attrs_keys:
configuration_ids.add(hier_attr_id_by_key[key])
attributes_joined = ", ".join([
"\"{}\"".format(name) for name in hier_cust_attrs_keys
"\"{}\"".format(conf_id) for conf_id in configuration_ids
])
queries = [{
"action": "query",
"expression": (
"select value, entity_id from CustomAttributeValue "
"where entity_id in ({}) and configuration.key in ({})"
"where entity_id in ({}) and configuration_id in ({})"
).format(entity_ids_joined, attributes_joined)
}]
@ -2110,7 +2123,7 @@ class SyncToAvalonEvent(BaseEvent):
if value["value"] is None:
continue
entity_id = value["entity_id"]
key = value["configuration"]["key"]
key = hier_attr_key_by_id[value["configuration_id"]]
entities_dict[entity_id]["hier_attrs"][key] = value["value"]
# Get dictionary with not None hierarchical values to pull to childs

View file

@ -63,7 +63,7 @@ class FtrackModule(
def get_plugin_paths(self):
"""Ftrack plugin paths."""
return {
"publish": [os.path.join(pype.PLUGINS_DIR, "ftrack", "publish")]
"publish": [os.path.join(FTRACK_MODULE_DIR, "plugins", "publish")]
}
def get_launch_hook_paths(self):

View file

@ -14,6 +14,7 @@ import uuid
import ftrack_api
import pymongo
from pype.lib import get_pype_execute_args
from pype.modules.ftrack.lib import (
credentials,
get_ftrack_url_from_settings
@ -131,8 +132,9 @@ def legacy_server(ftrack_url):
if subproc is None:
if subproc_failed_count < max_fail_count:
args = get_pype_execute_args("run", subproc_path)
subproc = subprocess.Popen(
["python", subproc_path],
args,
stdout=subprocess.PIPE
)
elif subproc_failed_count == max_fail_count:
@ -414,6 +416,56 @@ def main_loop(ftrack_url):
time.sleep(1)
def run_event_server(
ftrack_url,
ftrack_user,
ftrack_api_key,
ftrack_events_path,
no_stored_credentials,
store_credentials,
legacy,
clockify_api_key,
clockify_workspace
):
if not no_stored_credentials:
cred = credentials.get_credentials(ftrack_url)
username = cred.get('username')
api_key = cred.get('api_key')
if clockify_workspace and clockify_api_key:
os.environ["CLOCKIFY_WORKSPACE"] = clockify_workspace
os.environ["CLOCKIFY_API_KEY"] = clockify_api_key
# Check url regex and accessibility
ftrack_url = check_ftrack_url(ftrack_url)
if not ftrack_url:
print('Exiting! < Please enter Ftrack server url >')
return 1
# Validate entered credentials
if not validate_credentials(ftrack_url, username, api_key):
print('Exiting! < Please enter valid credentials >')
return 1
if store_credentials:
credentials.save_credentials(username, api_key, ftrack_url)
# Set Ftrack environments
os.environ["FTRACK_SERVER"] = ftrack_url
os.environ["FTRACK_API_USER"] = username
os.environ["FTRACK_API_KEY"] = api_key
# TODO This won't work probably
if ftrack_events_path:
if isinstance(ftrack_events_path, (list, tuple)):
ftrack_events_path = os.pathsep.join(ftrack_events_path)
os.environ["FTRACK_EVENTS_PATH"] = ftrack_events_path
if legacy:
return legacy_server(ftrack_url)
return main_loop(ftrack_url)
def main(argv):
'''
There are 4 values neccessary for event server:

View file

@ -6,6 +6,7 @@ import threading
import traceback
import subprocess
from pype.api import Logger
from pype.lib import get_pype_execute_args
class SocketThread(threading.Thread):
@ -57,22 +58,15 @@ class SocketThread(threading.Thread):
env = os.environ.copy()
env["PYPE_PROCESS_MONGO_ID"] = str(Logger.mongo_process_id)
executable_args = [
sys.executable
]
if getattr(sys, "frozen", False):
executable_args.append("run")
self.subproc = subprocess.Popen(
[
*executable_args,
self.filepath,
*self.additional_args,
str(self.port)
],
env=env,
stdin=subprocess.PIPE
# Pype executable (with path to start script if not build)
args = get_pype_execute_args(
# Add `run` command
"run",
self.filepath,
*self.additional_args,
str(self.port)
)
self.subproc = subprocess.Popen(args, env=env, stdin=subprocess.PIPE)
# Listen for incoming connections
sock.listen(1)

View file

@ -1674,9 +1674,18 @@ class SyncEntitiesFactory:
avalon_id
)
)
# Prepare task changes as they have to be stored as one key
final_doc = self.entities_dict[ftrack_id]["final_entity"]
final_doc_tasks = final_doc["data"].pop("tasks", None) or {}
current_doc_tasks = avalon_entity["data"].get("tasks") or {}
if not final_doc_tasks:
update_tasks = True
else:
update_tasks = final_doc_tasks != current_doc_tasks
# check rest of data
data_changes = self.compare_dict(
self.entities_dict[ftrack_id]["final_entity"],
final_doc,
avalon_entity,
ignore_keys[ftrack_id]
)
@ -1686,17 +1695,13 @@ class SyncEntitiesFactory:
self.updates[avalon_id]
)
# double check changes in tasks, some task could be renamed or
# deleted in Ftrack - not captured otherwise
final_entity = self.entities_dict[ftrack_id]["final_entity"]
if final_entity["data"].get("tasks", {}) != \
avalon_entity["data"].get("tasks", {}):
# Add tasks back to final doc object
final_doc["data"]["tasks"] = final_doc_tasks
# Add tasks to updates if there are different
if update_tasks:
if "data" not in self.updates[avalon_id]:
self.updates[avalon_id]["data"] = {}
self.updates[avalon_id]["data"]["tasks"] = (
final_entity["data"]["tasks"]
)
self.updates[avalon_id]["data"]["tasks"] = final_doc_tasks
def synchronize(self):
self.log.debug("* Synchronization begins")
@ -2124,10 +2129,27 @@ class SyncEntitiesFactory:
self.report_items["warning"][msg] = sub_msg
self.log.warning(sub_msg)
return self.compare_dict(
self.entities_dict[self.ft_project_id]["final_entity"],
self.avalon_project
)
# Compare tasks from current project schema and previous project schema
final_doc_data = self.entities_dict[self.ft_project_id]["final_entity"]
final_doc_tasks = final_doc_data["config"].pop("tasks")
current_doc_tasks = self.avalon_project.get("config", {}).get("tasks")
# Update project's tasks if tasks are empty or are not same
if not final_doc_tasks:
update_tasks = True
else:
update_tasks = final_doc_tasks != current_doc_tasks
changes = self.compare_dict(final_doc_data, self.avalon_project)
# Put back tasks data to final entity object
final_doc_data["config"]["tasks"] = final_doc_tasks
# Add tasks updates if tasks changed
if update_tasks:
if "config" not in changes:
changes["config"] = {}
changes["config"]["tasks"] = final_doc_tasks
return changes
def compare_dict(self, dict_new, dict_old, _ignore_keys=[]):
"""

View file

@ -61,7 +61,7 @@ class ValidateFtrackAttributes(pyblish.api.InstancePlugin):
"Missing FTrack Task entity in context")
host = pyblish.api.current_host()
to_check = self.ftrack_custom_attributes.get(host, {}))
to_check = self.ftrack_custom_attributes.get(host, {})
if not to_check:
self.log.warning("ftrack_attributes preset not found")

View file

@ -1,6 +1,7 @@
import os
import sys
import subprocess
from pype.lib import get_pype_execute_args
from . import PypeModule, ITrayAction
@ -29,13 +30,5 @@ class StandAlonePublishAction(PypeModule, ITrayAction):
self.publish_paths.extend(publish_paths)
def run_standalone_publisher(self):
from pype import tools
standalone_publisher_tool_path = os.path.join(
os.path.dirname(os.path.abspath(tools.__file__)),
"standalonepublish"
)
subprocess.Popen([
sys.executable,
standalone_publisher_tool_path,
os.pathsep.join(self.publish_paths).replace("\\", "/")
])
args = get_pype_execute_args("standalonepublisher")
subprocess.Popen(args, creationflags=subprocess.DETACHED_PROCESS)

View file

@ -4,10 +4,15 @@ import json
import copy
import tempfile
import pype
import pype.api
import pyblish
from pype.lib import should_decompress, \
get_decompress_dir, decompress
from pype.lib import (
get_pype_execute_args,
should_decompress,
get_decompress_dir,
decompress
)
import shutil
@ -125,7 +130,14 @@ class ExtractBurnin(pype.api.Extractor):
anatomy = instance.context.data["anatomy"]
scriptpath = self.burnin_script_path()
executable = self.python_executable_path()
# Executable args that will execute the script
# [pype executable, *pype script, "run"]
executable_args = get_pype_execute_args("run", scriptpath)
# Environments for script process
env = os.environ.copy()
# pop PYTHONPATH
env.pop("PYTHONPATH", None)
for idx, repre in enumerate(tuple(instance.data["representations"])):
self.log.debug("repre ({}): `{}`".format(idx + 1, repre["name"]))
@ -256,17 +268,13 @@ class ExtractBurnin(pype.api.Extractor):
)
# Prepare subprocess arguments
args = [
"\"{}\"".format(executable),
"\"{}\"".format(scriptpath),
"\"{}\"".format(temporary_json_filepath)
]
subprcs_cmd = " ".join(args)
self.log.debug("Executing: {}".format(subprcs_cmd))
args = list(executable_args)
args.append(temporary_json_filepath)
self.log.debug("Executing: {}".format(" ".join(args)))
# Run burnin script
pype.api.run_subprocess(
subprcs_cmd, shell=True, logger=self.log
args, shell=True, logger=self.log, env=env
)
# Remove the temporary json
@ -812,19 +820,9 @@ class ExtractBurnin(pype.api.Extractor):
def burnin_script_path(self):
"""Return path to python script for burnin processing."""
# TODO maybe convert to Plugin's attribute
# Get script path.
module_path = os.environ["PYPE_ROOT"]
# There can be multiple paths in PYPE_ROOT, in which case
# we just take first one.
if os.pathsep in module_path:
module_path = module_path.split(os.pathsep)[0]
scriptpath = os.path.normpath(
os.path.join(
module_path,
"pype",
pype.PACKAGE_DIR,
"scripts",
"otio_burnin.py"
)
@ -833,17 +831,3 @@ class ExtractBurnin(pype.api.Extractor):
self.log.debug("scriptpath: {}".format(scriptpath))
return scriptpath
def python_executable_path(self):
"""Return path to Python 3 executable."""
# TODO maybe convert to Plugin's attribute
# Get executable.
executable = os.getenv("PYPE_PYTHON_EXE")
# There can be multiple paths in PYPE_PYTHON_EXE, in which case
# we just take first one.
if os.pathsep in executable:
executable = executable.split(os.pathsep)[0]
self.log.debug("executable: {}".format(executable))
return executable

View file

@ -28,19 +28,17 @@ class PypeCommands:
user_role = "developer"
settings.main(user_role)
def launch_eventservercli(self, args):
from pype.modules import ftrack
from pype.lib import execute
fname = os.path.join(
os.path.dirname(os.path.abspath(ftrack.__file__)),
"ftrack_server",
"event_server_cli.py"
@staticmethod
def launch_eventservercli(*args):
from pype.modules.ftrack.ftrack_server.event_server_cli import (
run_event_server
)
return run_event_server(*args)
return execute([
sys.executable, "-u", fname
])
@staticmethod
def launch_standalone_publisher():
from pype.tools import standalonepublish
standalonepublish.main()
def publish(self, gui, paths):
pass

View file

@ -1,8 +1,10 @@
from .app import (
show,
cli
main,
Window
)
__all__ = (
"main",
"Window"
)
__all__ = [
"show",
"cli"
]

View file

@ -1,35 +0,0 @@
import os
import sys
import app
import ctypes
import signal
from Qt import QtWidgets, QtGui
from avalon import style
from pype.api import resources
if __name__ == "__main__":
# Allow to change icon of running process in windows taskbar
if os.name == "nt":
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(
u"standalonepublish"
)
qt_app = QtWidgets.QApplication([])
# app.setQuitOnLastWindowClosed(False)
qt_app.setStyleSheet(style.load_stylesheet())
icon = QtGui.QIcon(resources.pype_icon_filepath())
qt_app.setWindowIcon(icon)
def signal_handler(sig, frame):
print("You pressed Ctrl+C. Process ended.")
qt_app.quit()
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
window = app.Window(sys.argv[-1].split(os.pathsep))
window.show()
sys.exit(qt_app.exec_())

View file

@ -1,7 +1,18 @@
import os
import sys
import ctypes
import signal
from bson.objectid import ObjectId
from Qt import QtWidgets, QtCore
from widgets import AssetWidget, FamilyWidget, ComponentsWidget, ShadowWidget
from Qt import QtWidgets, QtCore, QtGui
from .widgets import (
AssetWidget, FamilyWidget, ComponentsWidget, ShadowWidget
)
from avalon import style
from pype.api import resources
from avalon.api import AvalonMongoDB
from pype.modules import ModulesManager
class Window(QtWidgets.QDialog):
@ -194,3 +205,32 @@ class Window(QtWidgets.QDialog):
data.update(self.widget_components.collect_data())
return data
def main():
# Allow to change icon of running process in windows taskbar
if os.name == "nt":
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(
u"standalonepublish"
)
qt_app = QtWidgets.QApplication([])
# app.setQuitOnLastWindowClosed(False)
qt_app.setStyleSheet(style.load_stylesheet())
icon = QtGui.QIcon(resources.pype_icon_filepath())
qt_app.setWindowIcon(icon)
def signal_handler(sig, frame):
print("You pressed Ctrl+C. Process ended.")
qt_app.quit()
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
modules_manager = ModulesManager()
module = modules_manager.modules_by_name["standalonepublish_tool"]
window = Window(module.publish_paths)
window.show()
sys.exit(qt_app.exec_())

View file

@ -9,6 +9,7 @@ from Qt import QtWidgets, QtCore
from . import DropDataFrame
from avalon import io
from pype.api import execute, Logger
from pype.lib import get_pype_execute_args
log = Logger().get_logger("standalonepublisher")
@ -207,10 +208,8 @@ def cli_publish(data, publish_paths, gui=True):
if data.get("family", "").lower() == "editorial":
envcopy["PYBLISH_SUSPEND_LOGS"] = "1"
result = execute(
[sys.executable, PUBLISH_SCRIPT_PATH],
env=envcopy
)
args = get_pype_execute_args("run", PUBLISH_SCRIPT_PATH)
result = execute(args, env=envcopy)
result = {}
if os.path.exists(json_data_path):

View file

@ -116,7 +116,7 @@ from igniter.tools import load_environments # noqa: E402
from igniter.bootstrap_repos import PypeVersion # noqa: E402
bootstrap = BootstrapRepos()
silent_commands = ["run", "igniter"]
silent_commands = ["run", "igniter", "standalonepublisher"]
def set_environments() -> None:
@ -276,23 +276,36 @@ def _determine_mongodb() -> str:
def _initialize_environment(pype_version: PypeVersion) -> None:
version_path = pype_version.path
os.environ["PYPE_VERSION"] = pype_version.version
# set PYPE_ROOT to point to currently used Pype version.
os.environ["PYPE_ROOT"] = os.path.normpath(version_path.as_posix())
# inject version to Python environment (sys.path, ...)
print(">>> Injecting Pype version to running environment ...")
bootstrap.add_paths_from_directory(version_path)
# add venv 'site-packages' to PYTHONPATH
python_path = os.getenv("PYTHONPATH", "")
split_paths = python_path.split(os.pathsep)
# add pype tools
split_paths.append(os.path.join(os.environ["PYPE_ROOT"], "pype", "tools"))
# add common pype vendor
# (common for multiple Python interpreter versions)
split_paths.append(os.path.join(
os.environ["PYPE_ROOT"], "pype", "vendor", "python", "common"))
os.environ["PYTHONPATH"] = os.pathsep.join(split_paths)
# Additional sys paths related to PYPE_ROOT directory
# TODO move additional paths to `boot` part when PYPE_ROOT will point
# to same hierarchy from code and from frozen pype
additional_paths = [
# add pype tools
os.path.join(os.environ["PYPE_ROOT"], "pype", "pype", "tools"),
# add common pype vendor
# (common for multiple Python interpreter versions)
os.path.join(
os.environ["PYPE_ROOT"],
"pype",
"pype",
"vendor",
"python",
"common"
)
]
# set PYPE_ROOT to point to currently used Pype version.
os.environ["PYPE_ROOT"] = os.path.normpath(version_path.as_posix())
split_paths = os.getenv("PYTHONPATH", "").split(os.pathsep)
for path in additional_paths:
split_paths.insert(0, path)
sys.path.insert(0, path)
os.environ["PYTHONPATH"] = os.pathsep.join(split_paths)
def _find_frozen_pype(use_version: str = None,
@ -416,23 +429,33 @@ def _bootstrap_from_code(use_version):
# add self to python paths
repos.insert(0, pype_root)
for repo in repos:
sys.path.append(repo)
sys.path.insert(0, repo)
# add venv 'site-packages' to PYTHONPATH
python_path = os.getenv("PYTHONPATH", "")
split_paths = python_path.split(os.pathsep)
split_paths += repos
# add pype tools
split_paths.append(os.path.join(os.environ["PYPE_ROOT"], "pype", "tools"))
# Add repos as first in list
split_paths = repos + split_paths
# last one should be venv site-packages
# this is slightly convoluted as we can get here from frozen code too
# in case when we are running without any version installed.
if not getattr(sys, 'frozen', False):
split_paths.append(site.getsitepackages()[-1])
# add common pype vendor
# (common for multiple Python interpreter versions)
split_paths.append(os.path.join(
os.environ["PYPE_ROOT"], "pype", "vendor", "python", "common"))
# TODO move additional paths to `boot` part when PYPE_ROOT will point
# to same hierarchy from code and from frozen pype
additional_paths = [
# add pype tools
os.path.join(os.environ["PYPE_ROOT"], "pype", "tools"),
# add common pype vendor
# (common for multiple Python interpreter versions)
os.path.join(
os.environ["PYPE_ROOT"], "pype", "vendor", "python", "common"
)
]
for path in additional_paths:
split_paths.insert(0, path)
sys.path.insert(0, path)
os.environ["PYTHONPATH"] = os.pathsep.join(split_paths)
return Path(version_path)
@ -485,7 +508,7 @@ def boot():
# ------------------------------------------------------------------------
# Find Pype versions
# ------------------------------------------------------------------------
# WARNING Environment PYPE_ROOT may change if frozen pype is executed
if getattr(sys, 'frozen', False):
# find versions of Pype to be used with frozen code
try:
@ -507,10 +530,15 @@ def boot():
os.environ["PYPE_REPOS_ROOT"] = os.path.join(
os.environ["PYPE_ROOT"], "repos")
# delete Pype module from cache so it is used from specific version
# delete Pype module and it's submodules from cache so it is used from
# specific version
modules_to_del = []
for module_name in tuple(sys.modules):
if module_name == "pype" or module_name.startswith("pype."):
modules_to_del.append(sys.modules.pop(module_name))
try:
del sys.modules["pype"]
del sys.modules["pype.version"]
for module_name in modules_to_del:
del sys.modules[module_name]
except AttributeError:
pass
except KeyError:

View file

@ -117,7 +117,7 @@ echo -e "${RST}"
detect_python || return 1
# Directories
pype_root=$(dirname $(realpath $(dirname $(dirname "${BASH_SOURCE[0]}"))))
pype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}")))
pushd "$pype_root" > /dev/null || return > /dev/null
version_command="import os;exec(open(os.path.join('$pype_root', 'pype', 'version.py')).read());print(__version__);"

View file

@ -126,7 +126,7 @@ main () {
detect_python || return 1
# Directories
pype_root=$(dirname $(realpath $(dirname $(dirname "${BASH_SOURCE[0]}"))))
pype_root=$(realpath $(dirname $(dirname "${BASH_SOURCE[0]}")))
pushd "$pype_root" > /dev/null || return > /dev/null
echo -e "${BIGreen}>>>${RST} Reading Poetry ... \c"