diff --git a/docs/source/conf.py b/docs/source/conf.py
index 894425e56b..517c441ccd 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -15,14 +15,11 @@
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
-import sys
import os
-from pprint import pprint
from pypeapp.pypeLauncher import PypeLauncher
-from pypeapp.storage import Storage
from pypeapp.deployment import Deployment
-pype_setup = os.getenv('PYPE_ROOT')
+pype_setup = os.getenv('PYPE_SETUP_PATH')
d = Deployment(pype_setup)
launcher = PypeLauncher()
@@ -32,7 +29,6 @@ os.environ['PYPE_CONFIG'] = config_path
os.environ['TOOL_ENV'] = os.path.normpath(os.path.join(config_path,
'environments'))
launcher._add_modules()
-Storage().update_environment()
launcher._load_default_environments(tools=tools)
# -- Project information -----------------------------------------------------
diff --git a/make_docs.bat b/make_docs.bat
index f0011086e5..d2ea75562f 100644
--- a/make_docs.bat
+++ b/make_docs.bat
@@ -25,15 +25,15 @@ set PYTHONPATH=%%d;!PYTHONPATH!
echo [92m^>^>^>[0m Setting PYPE_CONFIG
call :ResolvePath pypeconfig "..\pype-config"
set PYPE_CONFIG=%pypeconfig%
-echo [92m^>^>^>[0m Setting PYPE_ROOT
+echo [92m^>^>^>[0m Setting PYPE_SETUP_PATH
call :ResolvePath pyperoot "..\..\"
-set PYPE_ROOT=%pyperoot%
-set PYTHONPATH=%PYPE_ROOT%;%PYTHONPATH%
+set PYPE_SETUP_PATH=%pyperoot%
+set PYTHONPATH=%PYPE_SETUP_PATH%;%PYTHONPATH%
echo [92m^>^>^>[0m Setting PYPE_ENV
set PYPE_ENV="C:\Users\Public\pype_env2"
call "docs\make.bat" clean
-sphinx-apidoc -M -f -d 6 --ext-autodoc --ext-intersphinx --ext-viewcode -o docs\source pype %PYPE_ROOT%\repos\pype\pype\vendor\*
+sphinx-apidoc -M -f -d 6 --ext-autodoc --ext-intersphinx --ext-viewcode -o docs\source pype %PYPE_SETUP_PATH%\repos\pype\pype\vendor\*
call "docs\make.bat" html
echo [92m^>^>^>[0m Doing cleanup ...
set PYTHONPATH=%_OLD_PYTHONPATH%
diff --git a/pype/__init__.py b/pype/__init__.py
index 0be016696c..505db4c57f 100644
--- a/pype/__init__.py
+++ b/pype/__init__.py
@@ -3,7 +3,7 @@ import os
from pyblish import api as pyblish
from avalon import api as avalon
from .lib import filter_pyblish_plugins
-from pypeapp import config
+from pypeapp import config, Roots
import logging
@@ -99,6 +99,9 @@ def install():
avalon.register_plugin_path(avalon.Creator, path)
avalon.register_plugin_path(avalon.InventoryAction, path)
+ if project_name:
+ root_obj = Roots(project_name)
+ avalon.register_root(root_obj.roots)
# apply monkey patched discover to original one
avalon.discover = patched_discover
diff --git a/pype/clockify/widget_settings.py b/pype/clockify/widget_settings.py
index 7142548fa6..027268834c 100644
--- a/pype/clockify/widget_settings.py
+++ b/pype/clockify/widget_settings.py
@@ -26,7 +26,7 @@ class ClockifySettings(QtWidgets.QWidget):
elif hasattr(parent, 'parent') and hasattr(parent.parent, 'icon'):
self.setWindowIcon(self.parent.parent.icon)
else:
- pype_setup = os.getenv('PYPE_ROOT')
+ pype_setup = os.getenv('PYPE_SETUP_PATH')
items = [pype_setup, "app", "resources", "icon.png"]
fname = os.path.sep.join(items)
icon = QtGui.QIcon(fname)
diff --git a/pype/ftrack/actions/action_create_folders.py b/pype/ftrack/actions/action_create_folders.py
index 68cf837469..8f3358cf9c 100644
--- a/pype/ftrack/actions/action_create_folders.py
+++ b/pype/ftrack/actions/action_create_folders.py
@@ -1,369 +1,240 @@
import os
-import sys
-import logging
-import argparse
-import re
-
-import ftrack_api
from pype.ftrack import BaseAction
from avalon import lib as avalonlib
-from pype.ftrack.lib.io_nonsingleton import DbConnector
from pypeapp import config, Anatomy
class CreateFolders(BaseAction):
-
- '''Custom action.'''
-
- #: Action identifier.
- identifier = 'create.folders'
-
- #: Action label.
- label = 'Create Folders'
-
- #: Action Icon.
- icon = '{}/ftrack/action_icons/CreateFolders.svg'.format(
- os.environ.get('PYPE_STATICS_SERVER', '')
+ identifier = "create.folders"
+ label = "Create Folders"
+ icon = "{}/ftrack/action_icons/CreateFolders.svg".format(
+ os.environ.get("PYPE_STATICS_SERVER", "")
)
- db = DbConnector()
-
def discover(self, session, entities, event):
- ''' Validation '''
if len(entities) != 1:
return False
- not_allowed = ['assetversion', 'project']
+ not_allowed = ["assetversion", "project"]
if entities[0].entity_type.lower() in not_allowed:
return False
return True
def interface(self, session, entities, event):
- if event['data'].get('values', {}):
+ if event["data"].get("values", {}):
return
entity = entities[0]
without_interface = True
- for child in entity['children']:
- if child['object_type']['name'].lower() != 'task':
+ for child in entity["children"]:
+ if child["object_type"]["name"].lower() != "task":
without_interface = False
break
self.without_interface = without_interface
if without_interface:
return
- title = 'Create folders'
+ title = "Create folders"
- entity_name = entity['name']
+ entity_name = entity["name"]
msg = (
- '
Do you want create folders also'
- ' for all children of "{}"?
'
+ "Do you want create folders also"
+ " for all children of \"{}\"?
"
)
- if entity.entity_type.lower() == 'project':
- entity_name = entity['full_name']
- msg = msg.replace(' also', '')
- msg += '(Project root won\'t be created if not checked)
'
+ if entity.entity_type.lower() == "project":
+ entity_name = entity["full_name"]
+ msg = msg.replace(" also", "")
+ msg += "(Project root won't be created if not checked)
"
items = []
item_msg = {
- 'type': 'label',
- 'value': msg.format(entity_name)
+ "type": "label",
+ "value": msg.format(entity_name)
}
item_label = {
- 'type': 'label',
- 'value': 'With all chilren entities'
+ "type": "label",
+ "value": "With all chilren entities"
}
item = {
- 'name': 'children_included',
- 'type': 'boolean',
- 'value': False
+ "name": "children_included",
+ "type": "boolean",
+ "value": False
}
items.append(item_msg)
items.append(item_label)
items.append(item)
- if len(items) == 0:
- return {
- 'success': False,
- 'message': 'Didn\'t found any running jobs'
- }
- else:
- return {
- 'items': items,
- 'title': title
- }
+ return {
+ "items": items,
+ "title": title
+ }
def launch(self, session, entities, event):
'''Callback method for custom action.'''
with_childrens = True
if self.without_interface is False:
- if 'values' not in event['data']:
+ if "values" not in event["data"]:
return
- with_childrens = event['data']['values']['children_included']
+ with_childrens = event["data"]["values"]["children_included"]
+
entity = entities[0]
- if entity.entity_type.lower() == 'project':
+ if entity.entity_type.lower() == "project":
proj = entity
else:
- proj = entity['project']
- project_name = proj['full_name']
- project_code = proj['name']
- if entity.entity_type.lower() == 'project' and with_childrens == False:
+ proj = entity["project"]
+ project_name = proj["full_name"]
+ project_code = proj["name"]
+
+ if entity.entity_type.lower() == 'project' and with_childrens is False:
return {
'success': True,
'message': 'Nothing was created'
}
- data = {
- "root": os.environ["AVALON_PROJECTS"],
- "project": {
- "name": project_name,
- "code": project_code
- }
- }
+
all_entities = []
all_entities.append(entity)
if with_childrens:
all_entities = self.get_notask_children(entity)
- av_project = None
- try:
- self.db.install()
- self.db.Session['AVALON_PROJECT'] = project_name
- av_project = self.db.find_one({'type': 'project'})
- template_work = av_project['config']['template']['work']
- template_publish = av_project['config']['template']['publish']
- self.db.uninstall()
- except Exception:
- templates = Anatomy().templates
- template_work = templates["avalon"]["work"]
- template_publish = templates["avalon"]["publish"]
+ anatomy = Anatomy(project_name)
+
+ work_keys = ["work", "folder"]
+ work_template = anatomy.templates
+ for key in work_keys:
+ work_template = work_template[key]
+ work_has_apps = "{app" in work_template
+
+ publish_keys = ["publish", "folder"]
+ publish_template = anatomy.templates
+ for key in publish_keys:
+ publish_template = publish_template[key]
+ publish_has_apps = "{app" in publish_template
+
+ presets = config.get_presets()
+ app_presets = presets.get("tools", {}).get("sw_folders")
+ cached_apps = {}
collected_paths = []
- presets = config.get_presets()['tools']['sw_folders']
for entity in all_entities:
- if entity.entity_type.lower() == 'project':
+ if entity.entity_type.lower() == "project":
continue
- ent_data = data.copy()
+ ent_data = {
+ "project": {
+ "name": project_name,
+ "code": project_code
+ }
+ }
- asset_name = entity['name']
- ent_data['asset'] = asset_name
+ ent_data["asset"] = entity["name"]
- parents = entity['link']
- hierarchy_names = [p['name'] for p in parents[1:-1]]
- hierarchy = ''
+ parents = entity["link"][1:-1]
+ hierarchy_names = [p["name"] for p in parents]
+ hierarchy = ""
if hierarchy_names:
hierarchy = os.path.sep.join(hierarchy_names)
- ent_data['hierarchy'] = hierarchy
+ ent_data["hierarchy"] = hierarchy
tasks_created = False
- if entity['children']:
- for child in entity['children']:
- if child['object_type']['name'].lower() != 'task':
- continue
- tasks_created = True
- task_type_name = child['type']['name'].lower()
- task_data = ent_data.copy()
- task_data['task'] = child['name']
- possible_apps = presets.get(task_type_name, [])
- template_work_created = False
- template_publish_created = False
- apps = []
+ for child in entity["children"]:
+ if child["object_type"]["name"].lower() != "task":
+ continue
+ tasks_created = True
+ task_type_name = child["type"]["name"].lower()
+ task_data = ent_data.copy()
+ task_data["task"] = child["name"]
+
+ apps = []
+ if app_presets and (work_has_apps or publish_has_apps):
+ possible_apps = app_presets.get(task_type_name, [])
for app in possible_apps:
- try:
- app_data = avalonlib.get_application(app)
- app_dir = app_data['application_dir']
- except ValueError:
- app_dir = app
+ if app in cached_apps:
+ app_dir = cached_apps[app]
+ else:
+ try:
+ app_data = avalonlib.get_application(app)
+ app_dir = app_data["application_dir"]
+ except ValueError:
+ app_dir = app
+ cached_apps[app] = app_dir
apps.append(app_dir)
- # Template wok
- if '{app}' in template_work:
- for app in apps:
- template_work_created = True
- app_data = task_data.copy()
- app_data['app'] = app
- collected_paths.append(
- self.compute_template(
- template_work, app_data
- )
- )
- if template_work_created is False:
- collected_paths.append(
- self.compute_template(template_work, task_data)
- )
- # Template publish
- if '{app}' in template_publish:
- for app in apps:
- template_publish_created = True
- app_data = task_data.copy()
- app_data['app'] = app
- collected_paths.append(
- self.compute_template(
- template_publish, app_data, True
- )
- )
- if template_publish_created is False:
- collected_paths.append(
- self.compute_template(
- template_publish, task_data, True
- )
- )
+ # Template wok
+ if work_has_apps:
+ app_data = task_data.copy()
+ for app in apps:
+ app_data["app"] = app
+ collected_paths.append(self.compute_template(
+ anatomy, app_data, work_keys
+ ))
+ else:
+ collected_paths.append(self.compute_template(
+ anatomy, task_data, work_keys
+ ))
+
+ # Template publish
+ if publish_has_apps:
+ app_data = task_data.copy()
+ for app in apps:
+ app_data["app"] = app
+ collected_paths.append(self.compute_template(
+ anatomy, app_data, publish_keys
+ ))
+ else:
+ collected_paths.append(self.compute_template(
+ anatomy, task_data, publish_keys
+ ))
if not tasks_created:
# create path for entity
- collected_paths.append(
- self.compute_template(template_work, ent_data)
- )
- collected_paths.append(
- self.compute_template(template_publish, ent_data)
- )
- if len(collected_paths) > 0:
- self.log.info('Creating folders:')
+ collected_paths.append(self.compute_template(
+ anatomy, ent_data, work_keys
+ ))
+ collected_paths.append(self.compute_template(
+ anatomy, ent_data, publish_keys
+ ))
+
+ if len(collected_paths) == 0:
+ return {
+ "success": True,
+ "message": "No project folders to create."
+ }
+
+ self.log.info("Creating folders:")
+
for path in set(collected_paths):
self.log.info(path)
if not os.path.exists(path):
os.makedirs(path)
return {
- 'success': True,
- 'message': 'Created Folders Successfully!'
+ "success": True,
+ "message": "Successfully created project folders."
}
def get_notask_children(self, entity):
output = []
- if entity.get('object_type', {}).get(
- 'name', entity.entity_type
- ).lower() == 'task':
+ if entity.entity_type.lower() == "task":
return output
- else:
- output.append(entity)
- if entity['children']:
- for child in entity['children']:
- output.extend(self.get_notask_children(child))
+
+ output.append(entity)
+ for child in entity["children"]:
+ output.extend(self.get_notask_children(child))
return output
- def template_format(self, template, data):
+ def compute_template(self, anatomy, data, anatomy_keys):
+ filled_template = anatomy.format_all(data)
+ for key in anatomy_keys:
+ filled_template = filled_template[key]
- partial_data = PartialDict(data)
+ if filled_template.solved:
+ return os.path.normpath(filled_template)
- # remove subdict items from string (like 'project[name]')
- subdict = PartialDict()
- count = 1
- store_pattern = 5*'_'+'{:0>3}'
- regex_patern = "\{\w*\[[^\}]*\]\}"
- matches = re.findall(regex_patern, template)
-
- for match in matches:
- key = store_pattern.format(count)
- subdict[key] = match
- template = template.replace(match, '{'+key+'}')
- count += 1
- # solve fillind keys with optional keys
- solved = self._solve_with_optional(template, partial_data)
- # try to solve subdict and replace them back to string
- for k, v in subdict.items():
- try:
- v = v.format_map(data)
- except (KeyError, TypeError):
- pass
- subdict[k] = v
-
- return solved.format_map(subdict)
-
- def _solve_with_optional(self, template, data):
- # Remove optional missing keys
- pattern = re.compile(r"(<.*?[^{0]*>)[^0-9]*?")
- invalid_optionals = []
- for group in pattern.findall(template):
- try:
- group.format(**data)
- except KeyError:
- invalid_optionals.append(group)
- for group in invalid_optionals:
- template = template.replace(group, "")
-
- solved = template.format_map(data)
-
- # solving after format optional in second round
- for catch in re.compile(r"(<.*?[^{0]*>)[^0-9]*?").findall(solved):
- if "{" in catch:
- # remove all optional
- solved = solved.replace(catch, "")
- else:
- # Remove optional symbols
- solved = solved.replace(catch, catch[1:-1])
-
- return solved
-
- def compute_template(self, str, data, task=False):
- first_result = self.template_format(str, data)
- if first_result == first_result.split('{')[0]:
- return os.path.normpath(first_result)
- if task:
- return os.path.normpath(first_result.split('{')[0])
-
- index = first_result.index('{')
-
- regex = '\{\w*[^\}]*\}'
- match = re.findall(regex, first_result[index:])[0]
- without_missing = str.split(match)[0].split('}')
- output_items = []
- for part in without_missing:
- if '{' in part:
- output_items.append(part + '}')
- return os.path.normpath(
- self.template_format(''.join(output_items), data)
+ self.log.warning(
+ "Template \"{}\" was not fully filled \"{}\"".format(
+ filled_template.template, filled_template
+ )
)
-
-
-class PartialDict(dict):
- def __getitem__(self, item):
- out = super().__getitem__(item)
- if isinstance(out, dict):
- return '{'+item+'}'
- return out
-
- def __missing__(self, key):
- return '{'+key+'}'
+ return os.path.normpath(filled_template.split("{")[0])
def register(session, plugins_presets={}):
- '''Register plugin. Called when used as an plugin.'''
-
+ """Register plugin. Called when used as an plugin."""
CreateFolders(session, plugins_presets).register()
-
-
-def main(arguments=None):
- '''Set up logging and register action.'''
- if arguments is None:
- arguments = []
-
- parser = argparse.ArgumentParser()
- # Allow setting of logging level from arguments.
- loggingLevels = {}
- for level in (
- logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
- logging.ERROR, logging.CRITICAL
- ):
- loggingLevels[logging.getLevelName(level).lower()] = level
-
- parser.add_argument(
- '-v', '--verbosity',
- help='Set the logging output verbosity.',
- choices=loggingLevels.keys(),
- default='info'
- )
- namespace = parser.parse_args(arguments)
-
- # Set up basic logging
- logging.basicConfig(level=loggingLevels[namespace.verbosity])
-
- session = ftrack_api.Session()
- register(session)
-
- # Wait for events
- logging.info(
- 'Registered actions and listening for events. Use Ctrl-C to abort.'
- )
- session.event_hub.wait()
-
-
-if __name__ == '__main__':
- raise SystemExit(main(sys.argv[1:]))
diff --git a/pype/ftrack/actions/action_create_project_structure.py b/pype/ftrack/actions/action_create_project_structure.py
index 6124ebe843..e1c5b6b837 100644
--- a/pype/ftrack/actions/action_create_project_structure.py
+++ b/pype/ftrack/actions/action_create_project_structure.py
@@ -1,36 +1,67 @@
import os
-import sys
import re
-import argparse
-import logging
-import ftrack_api
from pype.ftrack import BaseAction
-from pypeapp import config
+from pypeapp import config, Anatomy
class CreateProjectFolders(BaseAction):
- '''Edit meta data action.'''
+ """Action create folder structure and may create hierarchy in Ftrack.
- #: Action identifier.
- identifier = 'create.project.structure'
- #: Action label.
- label = 'Create Project Structure'
- #: Action description.
- description = 'Creates folder structure'
- #: roles that are allowed to register this action
- role_list = ['Pypeclub', 'Administrator', 'Project Manager']
- icon = '{}/ftrack/action_icons/CreateProjectFolders.svg'.format(
- os.environ.get('PYPE_STATICS_SERVER', '')
+ Creation of folder structure and hierarchy in Ftrack is based on presets.
+ These presets are located in:
+ `~/pype-config/presets/tools/project_folder_structure.json`
+
+ Example of content:
+ ```json
+ {
+ "__project_root__": {
+ "prod" : {},
+ "resources" : {
+ "footage": {
+ "plates": {},
+ "offline": {}
+ },
+ "audio": {},
+ "art_dept": {}
+ },
+ "editorial" : {},
+ "assets[ftrack.Library]": {
+ "characters[ftrack]": {},
+ "locations[ftrack]": {}
+ },
+ "shots[ftrack.Sequence]": {
+ "scripts": {},
+ "editorial[ftrack.Folder]": {}
+ }
+ }
+ }
+ ```
+ Key "__project_root__" indicates root folder (or entity). Each key in
+ dictionary represents folder name. Value may contain another dictionary
+ with subfolders.
+
+ Identifier `[ftrack]` in name says that this should be also created in
+ Ftrack hierarchy. It is possible to specify entity type of item with "." .
+ If key is `assets[ftrack.Library]` then in ftrack will be created entity
+ with name "assets" and entity type "Library". It is expected Library entity
+ type exist in Ftrack.
+ """
+
+ identifier = "create.project.structure"
+ label = "Create Project Structure"
+ description = "Creates folder structure"
+ role_list = ["Pypeclub", "Administrator", "Project Manager"]
+ icon = "{}/ftrack/action_icons/CreateProjectFolders.svg".format(
+ os.environ.get("PYPE_STATICS_SERVER", "")
)
- pattern_array = re.compile('\[.*\]')
- pattern_ftrack = '.*\[[.]*ftrack[.]*'
- pattern_ent_ftrack = 'ftrack\.[^.,\],\s,]*'
- project_root_key = '__project_root__'
+ pattern_array = re.compile(r"\[.*\]")
+ pattern_ftrack = re.compile(r".*\[[.]*ftrack[.]*")
+ pattern_ent_ftrack = re.compile(r"ftrack\.[^.,\],\s,]*")
+ project_root_key = "__project_root__"
def discover(self, session, entities, event):
- ''' Validation '''
if len(entities) != 1:
return False
@@ -41,22 +72,30 @@ class CreateProjectFolders(BaseAction):
def launch(self, session, entities, event):
entity = entities[0]
- if entity.entity_type.lower() == 'project':
- project = entity
- else:
- project = entity['project']
+ project = self.get_project_from_entity(entity)
+ project_folder_presets = (
+ config.get_presets()
+ .get("tools", {})
+ .get("project_folder_structure")
+ )
+ if not project_folder_presets:
+ return {
+ "success": False,
+ "message": "Project structure presets are not set."
+ }
- presets = config.get_presets()['tools']['project_folder_structure']
try:
# Get paths based on presets
- basic_paths = self.get_path_items(presets)
- self.create_folders(basic_paths, entity)
+ basic_paths = self.get_path_items(project_folder_presets)
+ anatomy = Anatomy(project["full_name"])
+ self.create_folders(basic_paths, entity, project, anatomy)
self.create_ftrack_entities(basic_paths, project)
- except Exception as e:
+
+ except Exception as exc:
session.rollback()
return {
- 'success': False,
- 'message': str(e)
+ "success": False,
+ "message": str(exc)
}
return True
@@ -113,15 +152,15 @@ class CreateProjectFolders(BaseAction):
def trigger_creation(self, separation, parent):
for item, subvalues in separation.items():
matches = re.findall(self.pattern_array, item)
- ent_type = 'Folder'
+ ent_type = "Folder"
if len(matches) == 0:
name = item
else:
match = matches[0]
- name = item.replace(match, '')
+ name = item.replace(match, "")
ent_type_match = re.findall(self.pattern_ent_ftrack, match)
if len(ent_type_match) > 0:
- ent_type_split = ent_type_match[0].split('.')
+ ent_type_split = ent_type_match[0].split(".")
if len(ent_type_split) == 2:
ent_type = ent_type_split[1]
new_parent = self.create_ftrack_entity(name, ent_type, parent)
@@ -130,22 +169,22 @@ class CreateProjectFolders(BaseAction):
self.trigger_creation(subvalue, new_parent)
def create_ftrack_entity(self, name, ent_type, parent):
- for children in parent['children']:
- if children['name'] == name:
+ for children in parent["children"]:
+ if children["name"] == name:
return children
data = {
- 'name': name,
- 'parent_id': parent['id']
+ "name": name,
+ "parent_id": parent["id"]
}
- if parent.entity_type.lower() == 'project':
- data['project_id'] = parent['id']
+ if parent.entity_type.lower() == "project":
+ data["project_id"] = parent["id"]
else:
- data['project_id'] = parent['project']['id']
+ data["project_id"] = parent["project"]["id"]
existing_entity = self.session.query((
"TypedContext where name is \"{}\" and "
"parent_id is \"{}\" and project_id is \"{}\""
- ).format(name, data['parent_id'], data['project_id'])).first()
+ ).format(name, data["parent_id"], data["project_id"])).first()
if existing_entity:
return existing_entity
@@ -161,12 +200,11 @@ class CreateProjectFolders(BaseAction):
else:
paths = self.get_path_items(value)
for path in paths:
- if isinstance(path, str):
- output.append([key, path])
- else:
- p = [key]
- p.extend(path)
- output.append(p)
+ if not isinstance(path, (list, tuple)):
+ path = [path]
+
+ output.append([key, *path])
+
return output
def compute_paths(self, basic_paths_items, project_root):
@@ -176,72 +214,30 @@ class CreateProjectFolders(BaseAction):
for path_item in path_items:
matches = re.findall(self.pattern_array, path_item)
if len(matches) > 0:
- path_item = path_item.replace(matches[0], '')
+ path_item = path_item.replace(matches[0], "")
if path_item == self.project_root_key:
path_item = project_root
clean_items.append(path_item)
output.append(os.path.normpath(os.path.sep.join(clean_items)))
return output
- def create_folders(self, basic_paths, entity):
- # Set project root folder
- if entity.entity_type.lower() == 'project':
- project_name = entity['full_name']
+ def create_folders(self, basic_paths, entity, project, anatomy):
+ roots_paths = []
+ if isinstance(anatomy.roots, dict):
+ for root in anatomy.roots:
+ roots_paths.append(root.value)
else:
- project_name = entity['project']['full_name']
- project_root_items = [os.environ['AVALON_PROJECTS'], project_name]
- project_root = os.path.sep.join(project_root_items)
-
- full_paths = self.compute_paths(basic_paths, project_root)
- #Create folders
- for path in full_paths:
- if os.path.exists(path):
- continue
- os.makedirs(path.format(project_root=project_root))
-
+ roots_paths.append(anatomy.roots.value)
+ for root_path in roots_paths:
+ project_root = os.path.join(root_path, project["full_name"])
+ full_paths = self.compute_paths(basic_paths, project_root)
+ # Create folders
+ for path in full_paths:
+ if os.path.exists(path):
+ continue
+ os.makedirs(path.format(project_root=project_root))
def register(session, plugins_presets={}):
- '''Register plugin. Called when used as an plugin.'''
-
CreateProjectFolders(session, plugins_presets).register()
-
-
-def main(arguments=None):
- '''Set up logging and register action.'''
- if arguments is None:
- arguments = []
-
- parser = argparse.ArgumentParser()
- # Allow setting of logging level from arguments.
- loggingLevels = {}
- for level in (
- logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING,
- logging.ERROR, logging.CRITICAL
- ):
- loggingLevels[logging.getLevelName(level).lower()] = level
-
- parser.add_argument(
- '-v', '--verbosity',
- help='Set the logging output verbosity.',
- choices=loggingLevels.keys(),
- default='info'
- )
- namespace = parser.parse_args(arguments)
-
- # Set up basic logging
- logging.basicConfig(level=loggingLevels[namespace.verbosity])
-
- session = ftrack_api.Session()
- register(session)
-
- # Wait for events
- logging.info(
- 'Registered actions and listening for events. Use Ctrl-C to abort.'
- )
- session.event_hub.wait()
-
-
-if __name__ == '__main__':
- raise SystemExit(main(sys.argv[1:]))
diff --git a/pype/ftrack/actions/action_delete_old_versions.py b/pype/ftrack/actions/action_delete_old_versions.py
index f6a66318c9..c13845f58c 100644
--- a/pype/ftrack/actions/action_delete_old_versions.py
+++ b/pype/ftrack/actions/action_delete_old_versions.py
@@ -7,6 +7,7 @@ from pymongo import UpdateOne
from pype.ftrack import BaseAction
from pype.ftrack.lib.io_nonsingleton import DbConnector
+from pypeapp import Anatomy
import avalon.pipeline
@@ -21,8 +22,8 @@ class DeleteOldVersions(BaseAction):
" archived with only lates versions."
)
role_list = ["Pypeclub", "Project Manager", "Administrator"]
- icon = '{}/ftrack/action_icons/PypeAdmin.svg'.format(
- os.environ.get('PYPE_STATICS_SERVER', '')
+ icon = "{}/ftrack/action_icons/PypeAdmin.svg".format(
+ os.environ.get("PYPE_STATICS_SERVER", "")
)
dbcon = DbConnector()
@@ -194,6 +195,7 @@ class DeleteOldVersions(BaseAction):
# Set Mongo collection
project_name = project["full_name"]
+ anatomy = Anatomy(project_name)
self.dbcon.Session["AVALON_PROJECT"] = project_name
self.log.debug("Project is set to {}".format(project_name))
@@ -307,7 +309,7 @@ class DeleteOldVersions(BaseAction):
dir_paths = {}
file_paths_by_dir = collections.defaultdict(list)
for repre in repres:
- file_path, seq_path = self.path_from_represenation(repre)
+ file_path, seq_path = self.path_from_represenation(repre, anatomy)
if file_path is None:
self.log.warning((
"Could not format path for represenation \"{}\""
@@ -495,21 +497,17 @@ class DeleteOldVersions(BaseAction):
self.log.debug("Removed folder: {}".format(dir_path))
os.rmdir(dir_path)
- def path_from_represenation(self, representation):
+ def path_from_represenation(self, representation, anatomy):
try:
template = representation["data"]["template"]
except KeyError:
return (None, None)
- root = os.environ["AVALON_PROJECTS"]
- if not root:
- return (None, None)
-
sequence_path = None
try:
context = representation["context"]
- context["root"] = root
+ context["root"] = anatomy.roots
path = avalon.pipeline.format_template_with_optional_keys(
context, template
)
diff --git a/pype/ftrack/actions/action_delivery.py b/pype/ftrack/actions/action_delivery.py
index 29fdfe39ae..9d686929de 100644
--- a/pype/ftrack/actions/action_delivery.py
+++ b/pype/ftrack/actions/action_delivery.py
@@ -2,7 +2,6 @@ import os
import copy
import shutil
import collections
-import string
import clique
from bson.objectid import ObjectId
@@ -17,24 +16,18 @@ from pype.ftrack.lib.avalon_sync import CustAttrIdKey
class Delivery(BaseAction):
- '''Edit meta data action.'''
- #: Action identifier.
identifier = "delivery.action"
- #: Action label.
label = "Delivery"
- #: Action description.
description = "Deliver data to client"
- #: roles that are allowed to register this action
role_list = ["Pypeclub", "Administrator", "Project manager"]
- icon = '{}/ftrack/action_icons/Delivery.svg'.format(
- os.environ.get('PYPE_STATICS_SERVER', '')
+ icon = "{}/ftrack/action_icons/Delivery.svg".format(
+ os.environ.get("PYPE_STATICS_SERVER", "")
)
db_con = DbConnector()
def discover(self, session, entities, event):
- ''' Validation '''
for entity in entities:
if entity.entity_type.lower() == "assetversion":
return True
@@ -301,17 +294,10 @@ class Delivery(BaseAction):
repre = repres_by_name.get(comp_name)
repres_to_deliver.append(repre)
- if not location_path:
- location_path = os.environ.get("AVALON_PROJECTS") or ""
-
- print(location_path)
-
anatomy = Anatomy(project_name)
for repre in repres_to_deliver:
# Get destination repre path
anatomy_data = copy.deepcopy(repre["context"])
- anatomy_data["root"] = location_path
-
anatomy_filled = anatomy.format_all(anatomy_data)
test_path = anatomy_filled["delivery"][anatomy_name]
@@ -341,7 +327,7 @@ class Delivery(BaseAction):
self.report_items[msg].append(sub_msg)
self.log.warning(
"{} Representation: \"{}\" Filled: <{}>".format(
- msg, str(repre["_id"]), str(result)
+ msg, str(repre["_id"]), str(test_path)
)
)
continue
@@ -352,7 +338,7 @@ class Delivery(BaseAction):
if frame:
repre["context"]["frame"] = len(str(frame)) * "#"
- repre_path = self.path_from_represenation(repre)
+ repre_path = self.path_from_represenation(repre, anatomy)
# TODO add backup solution where root of path from component
# is repalced with AVALON_PROJECTS root
if not frame:
@@ -452,7 +438,7 @@ class Delivery(BaseAction):
self.copy_file(src, dst)
- def path_from_represenation(self, representation):
+ def path_from_represenation(self, representation, anatomy):
try:
template = representation["data"]["template"]
@@ -461,7 +447,7 @@ class Delivery(BaseAction):
try:
context = representation["context"]
- context["root"] = os.environ.get("AVALON_PROJECTS") or ""
+ context["root"] = anatomy.roots
path = pipeline.format_template_with_optional_keys(
context, template
)
diff --git a/pype/ftrack/actions/action_prepare_project.py b/pype/ftrack/actions/action_prepare_project.py
index 4cc6cfd8df..2693a5750b 100644
--- a/pype/ftrack/actions/action_prepare_project.py
+++ b/pype/ftrack/actions/action_prepare_project.py
@@ -1,9 +1,8 @@
import os
import json
-from ruamel import yaml
from pype.ftrack import BaseAction
-from pypeapp import config
+from pypeapp import config, Anatomy, project_overrides_dir_path
from pype.ftrack.lib.avalon_sync import get_avalon_attr
@@ -24,6 +23,7 @@ class PrepareProject(BaseAction):
# Key to store info about trigerring create folder structure
create_project_structure_key = "create_folder_structure"
+ item_splitter = {'type': 'label', 'value': '---'}
def discover(self, session, entities, event):
''' Validation '''
@@ -41,15 +41,190 @@ class PrepareProject(BaseAction):
# Inform user that this may take a while
self.show_message(event, "Preparing data... Please wait", True)
+ self.log.debug("Preparing data which will be shown")
self.log.debug("Loading custom attributes")
- cust_attrs, hier_cust_attrs = get_avalon_attr(session, True)
- project_defaults = config.get_presets(
- entities[0]["full_name"]
- ).get("ftrack", {}).get("project_defaults", {})
- self.log.debug("Preparing data which will be shown")
+ project_name = entities[0]["full_name"]
+
+ project_defaults = (
+ config.get_presets(project_name)
+ .get("ftrack", {})
+ .get("project_defaults", {})
+ )
+
+ anatomy = Anatomy(project_name)
+ if not anatomy.roots:
+ return {
+ "success": False,
+ "message": (
+ "Have issues with loading Roots for project \"{}\"."
+ ).format(anatomy.project_name)
+ }
+
+ root_items = self.prepare_root_items(anatomy)
+
+ ca_items, multiselect_enumerators = (
+ self.prepare_custom_attribute_items(project_defaults)
+ )
+
+ self.log.debug("Heavy items are ready. Preparing last items group.")
+
+ title = "Prepare Project"
+ items = []
+
+ # Add root items
+ items.extend(root_items)
+ items.append(self.item_splitter)
+
+ # Ask if want to trigger Action Create Folder Structure
+ items.append({
+ "type": "label",
+ "value": "Want to create basic Folder Structure?
"
+ })
+ items.append({
+ "name": self.create_project_structure_key,
+ "type": "boolean",
+ "value": False,
+ "label": "Check if Yes"
+ })
+
+ items.append(self.item_splitter)
+ items.append({
+ "type": "label",
+ "value": "Set basic Attributes:
"
+ })
+
+ items.extend(ca_items)
+
+ # This item will be last (before enumerators)
+ # - sets value of auto synchronization
+ auto_sync_name = "avalon_auto_sync"
+ auto_sync_item = {
+ "name": auto_sync_name,
+ "type": "boolean",
+ "value": project_defaults.get(auto_sync_name, False),
+ "label": "AutoSync to Avalon"
+ }
+ # Add autosync attribute
+ items.append(auto_sync_item)
+
+ # Add enumerator items at the end
+ for item in multiselect_enumerators:
+ items.append(item)
+
+ return {
+ "items": items,
+ "title": title
+ }
+
+ def prepare_root_items(self, anatomy):
+ root_items = []
+ self.log.debug("Root items preparation begins.")
+
+ root_names = anatomy.root_names()
+ roots = anatomy.roots
+
+ root_items.append({
+ "type": "label",
+ "value": "Check your Project root settings
"
+ })
+ root_items.append({
+ "type": "label",
+ "value": (
+ "NOTE: Roots are crutial for path filling"
+ " (and creating folder structure).
"
+ )
+ })
+ root_items.append({
+ "type": "label",
+ "value": (
+ "WARNING: Do not change roots on running project,"
+ " that will cause workflow issues.
"
+ )
+ })
+
+ default_roots = anatomy.roots
+ while isinstance(default_roots, dict):
+ key = tuple(default_roots.keys())[0]
+ default_roots = default_roots[key]
+
+ empty_text = "Enter root path here..."
+
+ # Root names is None when anatomy templates contain "{root}"
+ all_platforms = ["windows", "linux", "darwin"]
+ if root_names is None:
+ root_items.append(self.item_splitter)
+ # find first possible key
+ for platform in all_platforms:
+ value = default_roots.raw_data.get(platform) or ""
+ root_items.append({
+ "label": platform,
+ "name": "__root__{}".format(platform),
+ "type": "text",
+ "value": value,
+ "empty_text": empty_text
+ })
+ return root_items
+
+ root_name_data = {}
+ missing_roots = []
+ for root_name in root_names:
+ root_name_data[root_name] = {}
+ if not isinstance(roots, dict):
+ missing_roots.append(root_name)
+ continue
+
+ root_item = roots.get(root_name)
+ if not root_item:
+ missing_roots.append(root_name)
+ continue
+
+ for platform in all_platforms:
+ root_name_data[root_name][platform] = (
+ root_item.raw_data.get(platform) or ""
+ )
+
+ if missing_roots:
+ default_values = {}
+ for platform in all_platforms:
+ default_values[platform] = (
+ default_roots.raw_data.get(platform) or ""
+ )
+
+ for root_name in missing_roots:
+ root_name_data[root_name] = default_values
+
+ root_names = list(root_name_data.keys())
+ root_items.append({
+ "type": "hidden",
+ "name": "__rootnames__",
+ "value": json.dumps(root_names)
+ })
+
+ for root_name, values in root_name_data.items():
+ root_items.append(self.item_splitter)
+ root_items.append({
+ "type": "label",
+ "value": "Root: \"{}\"".format(root_name)
+ })
+ for platform, value in values.items():
+ root_items.append({
+ "label": platform,
+ "name": "__root__{}{}".format(root_name, platform),
+ "type": "text",
+ "value": value,
+ "empty_text": empty_text
+ })
+
+ self.log.debug("Root items preparation ended.")
+ return root_items
+
+ def _attributes_to_set(self, project_defaults):
attributes_to_set = {}
+
+ cust_attrs, hier_cust_attrs = get_avalon_attr(self.session, True)
+
for attr in hier_cust_attrs:
key = attr["key"]
if key.startswith("avalon_"):
@@ -77,45 +252,17 @@ class PrepareProject(BaseAction):
attributes_to_set.items(),
key=lambda x: x[1]["label"]
))
+ return attributes_to_set
+
+ def prepare_custom_attribute_items(self, project_defaults):
+ items = []
+ multiselect_enumerators = []
+ attributes_to_set = self._attributes_to_set(project_defaults)
+
self.log.debug("Preparing interface for keys: \"{}\"".format(
str([key for key in attributes_to_set])
))
- item_splitter = {'type': 'label', 'value': '---'}
- title = "Prepare Project"
- items = []
-
- # Ask if want to trigger Action Create Folder Structure
- items.append({
- "type": "label",
- "value": "Want to create basic Folder Structure?
"
- })
-
- items.append({
- "name": self.create_project_structure_key,
- "type": "boolean",
- "value": False,
- "label": "Check if Yes"
- })
-
- items.append(item_splitter)
- items.append({
- "type": "label",
- "value": "Set basic Attributes:
"
- })
-
- multiselect_enumerators = []
-
- # This item will be last (before enumerators)
- # - sets value of auto synchronization
- auto_sync_name = "avalon_auto_sync"
- auto_sync_item = {
- "name": auto_sync_name,
- "type": "boolean",
- "value": project_defaults.get(auto_sync_name, False),
- "label": "AutoSync to Avalon"
- }
-
for key, in_data in attributes_to_set.items():
attr = in_data["object"]
@@ -139,8 +286,7 @@ class PrepareProject(BaseAction):
attr_config_data = json.loads(attr_config["data"])
if attr_config["multiSelect"] is True:
- multiselect_enumerators.append(item_splitter)
-
+ multiselect_enumerators.append(self.item_splitter)
multiselect_enumerators.append({
"type": "label",
"value": in_data["label"]
@@ -160,10 +306,7 @@ class PrepareProject(BaseAction):
"label": "- {}".format(option["menu"])
}
if default:
- if (
- isinstance(default, list) or
- isinstance(default, tuple)
- ):
+ if isinstance(default, (list, tuple)):
if name in default:
item["value"] = True
else:
@@ -204,17 +347,7 @@ class PrepareProject(BaseAction):
items.append(item)
- # Add autosync attribute
- items.append(auto_sync_item)
-
- # Add enumerator items at the end
- for item in multiselect_enumerators:
- items.append(item)
-
- return {
- 'items': items,
- 'title': title
- }
+ return items, multiselect_enumerators
def launch(self, session, entities, event):
if not event['data'].get('values', {}):
@@ -222,6 +355,35 @@ class PrepareProject(BaseAction):
in_data = event['data']['values']
+ root_values = {}
+ root_key = "__root__"
+ for key in tuple(in_data.keys()):
+ if key.startswith(root_key):
+ _key = key[len(root_key):]
+ root_values[_key] = in_data.pop(key)
+
+ root_names = in_data.pop("__rootnames__", None)
+ root_data = {}
+ if root_names:
+ for root_name in json.loads(root_names):
+ root_data[root_name] = {}
+ for key, value in tuple(root_values.items()):
+ if key.startswith(root_name):
+ _key = key[len(root_name):]
+ root_data[root_name][_key] = value
+
+ else:
+ for key, value in root_values.items():
+ root_data[key] = value
+
+ project_name = entities[0]["full_name"]
+ anatomy = Anatomy(project_name)
+ anatomy.templates_obj.save_project_overrides(project_name)
+ anatomy.roots_obj.save_project_overrides(
+ project_name, root_data, override=True
+ )
+ anatomy.reset()
+
# pop out info about creating project structure
create_proj_struct = in_data.pop(self.create_project_structure_key)
@@ -269,94 +431,22 @@ class PrepareProject(BaseAction):
def create_project_specific_config(self, project_name, json_data):
self.log.debug("*** Creating project specifig configs ***")
-
- path_proj_configs = os.environ.get('PYPE_PROJECT_CONFIGS', "")
-
- # Skip if PYPE_PROJECT_CONFIGS is not set
- # TODO show user OS message
- if not path_proj_configs:
- self.log.warning((
- "Environment variable \"PYPE_PROJECT_CONFIGS\" is not set."
- " Project specific config can't be set."
- ))
- return
-
- path_proj_configs = os.path.normpath(path_proj_configs)
- # Skip if path does not exist
- # TODO create if not exist?!!!
- if not os.path.exists(path_proj_configs):
- self.log.warning((
- "Path set in Environment variable \"PYPE_PROJECT_CONFIGS\""
- " Does not exist."
- ))
- return
-
- project_specific_path = os.path.normpath(
- os.path.join(path_proj_configs, project_name)
- )
+ project_specific_path = project_overrides_dir_path(project_name)
if not os.path.exists(project_specific_path):
os.makedirs(project_specific_path)
self.log.debug((
"Project specific config folder for project \"{}\" created."
).format(project_name))
- # Anatomy ####################################
- self.log.debug("--- Processing Anatomy Begins: ---")
-
- anatomy_dir = os.path.normpath(os.path.join(
- project_specific_path, "anatomy"
- ))
- anatomy_path = os.path.normpath(os.path.join(
- anatomy_dir, "default.yaml"
- ))
-
- anatomy = None
- if os.path.exists(anatomy_path):
- self.log.debug(
- "Anatomy file already exist. Trying to read: \"{}\"".format(
- anatomy_path
- )
- )
- # Try to load data
- with open(anatomy_path, 'r') as file_stream:
- try:
- anatomy = yaml.load(file_stream, Loader=yaml.loader.Loader)
- self.log.debug("Reading Anatomy file was successful")
- except yaml.YAMLError as exc:
- self.log.warning(
- "Reading Yaml file failed: \"{}\"".format(anatomy_path),
- exc_info=True
- )
-
- if not anatomy:
- self.log.debug("Anatomy is not set. Duplicating default.")
- # Create Anatomy folder
- if not os.path.exists(anatomy_dir):
- self.log.debug(
- "Creating Anatomy folder: \"{}\"".format(anatomy_dir)
- )
- os.makedirs(anatomy_dir)
-
- source_items = [
- os.environ["PYPE_CONFIG"], "anatomy", "default.yaml"
- ]
-
- source_path = os.path.normpath(os.path.join(*source_items))
- with open(source_path, 'r') as file_stream:
- source_data = file_stream.read()
-
- with open(anatomy_path, 'w') as file_stream:
- file_stream.write(source_data)
-
# Presets ####################################
self.log.debug("--- Processing Presets Begins: ---")
- project_defaults_dir = os.path.normpath(os.path.join(*[
+ project_defaults_dir = os.path.normpath(os.path.join(
project_specific_path, "presets", "ftrack"
- ]))
- project_defaults_path = os.path.normpath(os.path.join(*[
+ ))
+ project_defaults_path = os.path.normpath(os.path.join(
project_defaults_dir, "project_defaults.json"
- ]))
+ ))
# Create folder if not exist
if not os.path.exists(project_defaults_dir):
self.log.debug("Creating Ftrack Presets folder: \"{}\"".format(
@@ -372,5 +462,4 @@ class PrepareProject(BaseAction):
def register(session, plugins_presets={}):
'''Register plugin. Called when used as an plugin.'''
-
PrepareProject(session, plugins_presets).register()
diff --git a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py
index 051156c2f8..c95010c5ce 100644
--- a/pype/ftrack/actions/action_store_thumbnails_to_avalon.py
+++ b/pype/ftrack/actions/action_store_thumbnails_to_avalon.py
@@ -52,41 +52,6 @@ class StoreThumbnailsToAvalon(BaseAction):
})
session.commit()
- thumbnail_roots = os.environ.get(self.thumbnail_key)
- if not thumbnail_roots:
- msg = "`{}` environment is not set".format(self.thumbnail_key)
-
- action_job["status"] = "failed"
- session.commit()
-
- self.log.warning(msg)
-
- return {
- "success": False,
- "message": msg
- }
-
- existing_thumbnail_root = None
- for path in thumbnail_roots.split(os.pathsep):
- if os.path.exists(path):
- existing_thumbnail_root = path
- break
-
- if existing_thumbnail_root is None:
- msg = (
- "Can't access paths, set in `{}` ({})"
- ).format(self.thumbnail_key, thumbnail_roots)
-
- action_job["status"] = "failed"
- session.commit()
-
- self.log.warning(msg)
-
- return {
- "success": False,
- "message": msg
- }
-
project = self.get_project_from_entity(entities[0])
project_name = project["full_name"]
anatomy = Anatomy(project_name)
@@ -120,6 +85,44 @@ class StoreThumbnailsToAvalon(BaseAction):
"message": msg
}
+ thumbnail_roots = os.environ.get(self.thumbnail_key)
+ if (
+ "{thumbnail_root}" in anatomy.templates["publish"]["thumbnail"]
+ and not thumbnail_roots
+ ):
+ msg = "`{}` environment is not set".format(self.thumbnail_key)
+
+ action_job["status"] = "failed"
+ session.commit()
+
+ self.log.warning(msg)
+
+ return {
+ "success": False,
+ "message": msg
+ }
+
+ existing_thumbnail_root = None
+ for path in thumbnail_roots.split(os.pathsep):
+ if os.path.exists(path):
+ existing_thumbnail_root = path
+ break
+
+ if existing_thumbnail_root is None:
+ msg = (
+ "Can't access paths, set in `{}` ({})"
+ ).format(self.thumbnail_key, thumbnail_roots)
+
+ action_job["status"] = "failed"
+ session.commit()
+
+ self.log.warning(msg)
+
+ return {
+ "success": False,
+ "message": msg
+ }
+
example_template_data = {
"_id": "ID",
"thumbnail_root": "THUBMNAIL_ROOT",
diff --git a/pype/ftrack/events/event_user_assigment.py b/pype/ftrack/events/event_user_assigment.py
index eaacfd959a..bf3bec93be 100644
--- a/pype/ftrack/events/event_user_assigment.py
+++ b/pype/ftrack/events/event_user_assigment.py
@@ -158,20 +158,10 @@ class UserAssigmentEvent(BaseEvent):
"""
project_name = task['project']['full_name']
project_code = task['project']['name']
- try:
- root = os.environ['PYPE_STUDIO_PROJECTS_PATH']
- except KeyError:
- msg = 'Project ({}) root not set'.format(project_name)
- self.log.error(msg)
- return {
- 'success': False,
- 'message': msg
- }
# fill in template data
asset = self._get_asset(task)
t_data = {
- 'root': root,
'project': {
'name': project_name,
'code': project_code
@@ -204,11 +194,12 @@ class UserAssigmentEvent(BaseEvent):
data = self._get_template_data(task)
# format directories to pass to shell script
anatomy = Anatomy(data["project"]["name"])
+ anatomy_filled = anatomy.format(data)
# formatting work dir is easiest part as we can use whole path
- work_dir = anatomy.format(data)['avalon']['work']
+ work_dir = anatomy_filled["work"]["folder"]
# we also need publish but not whole
- filled_all = anatomy.format_all(data)
- publish = filled_all['avalon']['publish']
+ anatomy_filled.strict = False
+ publish = anatomy_filled["publish"]["folder"]
# now find path to {asset}
m = re.search("(^.+?{})".format(data['asset']),
diff --git a/pype/ftrack/lib/avalon_sync.py b/pype/ftrack/lib/avalon_sync.py
index 474c70bd26..469841d22c 100644
--- a/pype/ftrack/lib/avalon_sync.py
+++ b/pype/ftrack/lib/avalon_sync.py
@@ -1712,14 +1712,10 @@ class SyncEntitiesFactory:
except InvalidId:
new_id = ObjectId()
- project_name = self.entities_dict[self.ft_project_id]["name"]
project_item["_id"] = new_id
project_item["parent"] = None
project_item["schema"] = EntitySchemas["project"]
project_item["config"]["schema"] = EntitySchemas["config"]
- project_item["config"]["template"] = (
- get_avalon_project_template(project_name)
- )
self.ftrack_avalon_mapper[self.ft_project_id] = new_id
self.avalon_ftrack_mapper[new_id] = self.ft_project_id
diff --git a/pype/ftrack/lib/ftrack_app_handler.py b/pype/ftrack/lib/ftrack_app_handler.py
index 53a52b1ff9..21c49e7819 100644
--- a/pype/ftrack/lib/ftrack_app_handler.py
+++ b/pype/ftrack/lib/ftrack_app_handler.py
@@ -1,5 +1,6 @@
import os
import sys
+import copy
import platform
import avalon.lib
import acre
@@ -82,8 +83,8 @@ class AppAction(BaseAction):
"""
if (
- len(entities) != 1 or
- entities[0].entity_type.lower() != "task"
+ len(entities) != 1
+ or entities[0].entity_type.lower() != 'task'
):
return False
@@ -141,115 +142,88 @@ class AppAction(BaseAction):
"""
entity = entities[0]
- ft_project = self.get_project_from_entity(entity)
- project_name = ft_project["full_name"]
+ project_name = entity["project"]["full_name"]
database = pypelib.get_avalon_database()
- # Get current environments
- env_list = [
- "AVALON_PROJECT",
- "AVALON_SILO",
- "AVALON_ASSET",
- "AVALON_TASK",
- "AVALON_APP",
- "AVALON_APP_NAME"
- ]
- env_origin = {}
- for env in env_list:
- env_origin[env] = os.environ.get(env, None)
-
- # set environments for Avalon
- os.environ["AVALON_PROJECT"] = project_name
- os.environ["AVALON_SILO"] = entity["ancestors"][0]["name"]
- os.environ["AVALON_ASSET"] = entity["parent"]["name"]
- os.environ["AVALON_TASK"] = entity["name"]
- os.environ["AVALON_APP"] = self.identifier.split("_")[0]
- os.environ["AVALON_APP_NAME"] = self.identifier
-
- anatomy = Anatomy(project_name)
-
- asset_doc = database[project_name].find_one({
+ asset_name = entity["parent"]["name"]
+ asset_document = database[project_name].find_one({
"type": "asset",
- "name": entity["parent"]["name"]
+ "name": asset_name
})
- parents = asset_doc["data"]["parents"]
hierarchy = ""
- if parents:
- hierarchy = os.path.join(*parents)
-
- os.environ["AVALON_HIERARCHY"] = hierarchy
-
- application = avalon.lib.get_application(os.environ["AVALON_APP_NAME"])
+ asset_doc_parents = asset_document["data"].get("parents")
+ if len(asset_doc_parents) > 0:
+ hierarchy = os.path.join(*asset_doc_parents)
+ application = avalon.lib.get_application(self.identifier)
data = {
- "root": os.environ.get("PYPE_STUDIO_PROJECTS_MOUNT"),
"project": {
- "name": ft_project["full_name"],
- "code": ft_project["name"]
+ "name": entity["project"]["full_name"],
+ "code": entity["project"]["name"]
},
"task": entity["name"],
- "asset": entity["parent"]["name"],
+ "asset": asset_name,
"app": application["application_dir"],
"hierarchy": hierarchy
}
- av_project = database[project_name].find_one({"type": 'project'})
- templates = None
- if av_project:
- work_template = av_project.get('config', {}).get('template', {}).get(
- 'work', None
- )
- work_template = None
try:
- work_template = work_template.format(**data)
- except Exception:
- try:
- anatomy = anatomy.format(data)
- work_template = anatomy["work"]["folder"]
+ anatomy = Anatomy(project_name)
+ anatomy_filled = anatomy.format(data)
+ workdir = os.path.normpath(anatomy_filled["work"]["folder"])
- except Exception as exc:
- msg = "{} Error in anatomy.format: {}".format(
- __name__, str(exc)
- )
- self.log.error(msg, exc_info=True)
- return {
- 'success': False,
- 'message': msg
- }
+ except Exception as exc:
+ msg = "Error in anatomy.format: {}".format(
+ str(exc)
+ )
+ self.log.error(msg, exc_info=True)
+ return {
+ "success": False,
+ "message": msg
+ }
- workdir = os.path.normpath(work_template)
- os.environ["AVALON_WORKDIR"] = workdir
try:
os.makedirs(workdir)
except FileExistsError:
pass
+ # set environments for Avalon
+ prep_env = copy.deepcopy(os.environ)
+ prep_env.update({
+ "AVALON_PROJECT": project_name,
+ "AVALON_ASSET": asset_name,
+ "AVALON_TASK": entity["name"],
+ "AVALON_APP": self.identifier.split("_")[0],
+ "AVALON_APP_NAME": self.identifier,
+ "AVALON_HIERARCHY": hierarchy,
+ "AVALON_WORKDIR": workdir
+ })
+ prep_env.update(anatomy.roots_obj.root_environments())
+
# collect all parents from the task
parents = []
for item in entity['link']:
parents.append(session.get(item['type'], item['id']))
# collect all the 'environment' attributes from parents
- tools_attr = [os.environ["AVALON_APP"], os.environ["AVALON_APP_NAME"]]
- for parent in reversed(parents):
- # check if the attribute is empty, if not use it
- if parent['custom_attributes']['tools_env']:
- tools_attr.extend(parent['custom_attributes']['tools_env'])
- break
+ tools_attr = [prep_env["AVALON_APP"], prep_env["AVALON_APP_NAME"]]
+ tools_env = asset_document["data"].get("tools_env") or []
+ tools_attr.extend(tools_env)
tools_env = acre.get_tools(tools_attr)
env = acre.compute(tools_env)
- env = acre.merge(env, current_env=dict(os.environ))
- env = acre.append(dict(os.environ), env)
+ env = acre.merge(env, current_env=dict(prep_env))
+ env = acre.append(dict(prep_env), env)
# Get path to execute
- st_temp_path = os.environ['PYPE_CONFIG']
+ st_temp_path = os.environ["PYPE_CONFIG"]
os_plat = platform.system().lower()
# Path to folder with launchers
- path = os.path.join(st_temp_path, 'launchers', os_plat)
+ path = os.path.join(st_temp_path, "launchers", os_plat)
+
# Full path to executable launcher
execfile = None
@@ -266,94 +240,91 @@ class AppAction(BaseAction):
}
if sys.platform == "win32":
-
for ext in os.environ["PATHEXT"].split(os.pathsep):
fpath = os.path.join(path.strip('"'), self.executable + ext)
if os.path.isfile(fpath) and os.access(fpath, os.X_OK):
execfile = fpath
break
- pass
# Run SW if was found executable
- if execfile is not None:
- # Store subprocess to varaible. This is due to Blender launch
- # bug. Please make sure Blender >=2.81 can be launched before
- # remove `_popen` variable.
- _popen = avalon.lib.launch(
- executable=execfile, args=[], environment=env
- )
- else:
+ if execfile is None:
return {
- 'success': False,
- 'message': "We didn't found launcher for {0}"
- .format(self.label)
+ "success": False,
+ "message": "We didn't find launcher for {0}".format(
+ self.label
+ )
}
- if sys.platform.startswith('linux'):
+ popen = avalon.lib.launch(
+ executable=execfile, args=[], environment=env
+ )
+
+ elif sys.platform.startswith("linux"):
execfile = os.path.join(path.strip('"'), self.executable)
- if os.path.isfile(execfile):
- try:
- fp = open(execfile)
- except PermissionError as p:
- self.log.exception('Access denied on {0} - {1}'.format(
- execfile, p))
- return {
- 'success': False,
- 'message': "Access denied on launcher - {}".format(
- execfile)
- }
- fp.close()
- # check executable permission
- if not os.access(execfile, os.X_OK):
- self.log.error('No executable permission on {}'.format(
- execfile))
- return {
- 'success': False,
- 'message': "No executable permission - {}".format(
- execfile)
- }
+ if not os.path.isfile(execfile):
+ msg = "Launcher doesn't exist - {}".format(execfile)
- else:
- self.log.error('Launcher doesn\'t exist - {}'.format(
- execfile))
+ self.log.error(msg)
return {
- 'success': False,
- 'message': "Launcher doesn't exist - {}".format(execfile)
+ "success": False,
+ "message": msg
+ }
+
+ try:
+ fp = open(execfile)
+ except PermissionError as perm_exc:
+ msg = "Access denied on launcher {} - {}".format(
+ execfile, perm_exc
+ )
+
+ self.log.exception(msg, exc_info=True)
+ return {
+ "success": False,
+ "message": msg
+ }
+
+ fp.close()
+ # check executable permission
+ if not os.access(execfile, os.X_OK):
+ msg = "No executable permission - {}".format(execfile)
+
+ self.log.error(msg)
+ return {
+ "success": False,
+ "message": msg
}
# Run SW if was found executable
- if execfile is not None:
- # Store subprocess to varaible. This is due to Blender launch
- # bug. Please make sure Blender >=2.81 can be launched before
- # remove `_popen` variable.
- _popen = avalon.lib.launch(
- '/usr/bin/env', args=['bash', execfile], environment=env
- )
- else:
+ if execfile is None:
return {
- 'success': False,
- 'message': "We didn't found launcher for {0}"
- .format(self.label)
- }
+ "success": False,
+ "message": "We didn't found launcher for {0}".format(
+ self.label
+ )
+ }
+
+ popen = avalonlib.launch(
+ "/usr/bin/env", args=["bash", execfile], environment=env
+ )
# Change status of task to In progress
presets = config.get_presets()["ftrack"]["ftrack_config"]
- if 'status_update' in presets:
- statuses = presets['status_update']
+ if "status_update" in presets:
+ statuses = presets["status_update"]
- actual_status = entity['status']['name'].lower()
+ actual_status = entity["status"]["name"].lower()
already_tested = []
ent_path = "/".join(
- [ent["name"] for ent in entity['link']]
+ [ent["name"] for ent in entity["link"]]
)
while True:
next_status_name = None
for key, value in statuses.items():
if key in already_tested:
continue
- if actual_status in value or '_any_' in value:
- if key != '_ignore_':
+ if actual_status in value or "_any_" in value:
+ if key != "_ignore_":
next_status_name = key
already_tested.append(key)
break
@@ -363,12 +334,12 @@ class AppAction(BaseAction):
break
try:
- query = 'Status where name is "{}"'.format(
+ query = "Status where name is \"{}\"".format(
next_status_name
)
status = session.query(query).one()
- entity['status'] = status
+ entity["status"] = status
session.commit()
self.log.debug("Changing status to \"{}\" <{}>".format(
next_status_name, ent_path
@@ -378,18 +349,12 @@ class AppAction(BaseAction):
except Exception:
session.rollback()
msg = (
- 'Status "{}" in presets wasn\'t found'
- ' on Ftrack entity type "{}"'
+ "Status \"{}\" in presets wasn't found"
+ " on Ftrack entity type \"{}\""
).format(next_status_name, entity.entity_type)
self.log.warning(msg)
- # Set origin avalon environments
- for key, value in env_origin.items():
- if value == None:
- value = ""
- os.environ[key] = value
-
return {
- 'success': True,
- 'message': "Launching {0}".format(self.label)
+ "success": True,
+ "message": "Launching {0}".format(self.label)
}
diff --git a/pype/ftrack/tray/login_dialog.py b/pype/ftrack/tray/login_dialog.py
index 5f3777f93e..88c4e90374 100644
--- a/pype/ftrack/tray/login_dialog.py
+++ b/pype/ftrack/tray/login_dialog.py
@@ -29,7 +29,7 @@ class Login_Dialog_ui(QtWidgets.QWidget):
elif hasattr(parent, 'parent') and hasattr(parent.parent, 'icon'):
self.setWindowIcon(self.parent.parent.icon)
else:
- pype_setup = os.getenv('PYPE_ROOT')
+ pype_setup = os.getenv('PYPE_SETUP_PATH')
items = [pype_setup, "app", "resources", "icon.png"]
fname = os.path.sep.join(items)
icon = QtGui.QIcon(fname)
diff --git a/pype/lib.py b/pype/lib.py
index d3ccbc8589..9d19822974 100644
--- a/pype/lib.py
+++ b/pype/lib.py
@@ -659,7 +659,7 @@ def execute_hook(hook, *args, **kwargs):
This will load hook file, instantiate class and call `execute` method
on it. Hook must be in a form:
- `$PYPE_ROOT/repos/pype/path/to/hook.py/HookClass`
+ `$PYPE_SETUP_PATH/repos/pype/path/to/hook.py/HookClass`
This will load `hook.py`, instantiate HookClass and then execute_hook
`execute(*args, **kwargs)`
@@ -670,7 +670,7 @@ def execute_hook(hook, *args, **kwargs):
class_name = hook.split("/")[-1]
- abspath = os.path.join(os.getenv('PYPE_ROOT'),
+ abspath = os.path.join(os.getenv('PYPE_SETUP_PATH'),
'repos', 'pype', *hook.split("/")[:-1])
mod_name, mod_ext = os.path.splitext(os.path.basename(abspath))
diff --git a/pype/logging/gui/widgets.py b/pype/logging/gui/widgets.py
index 10aad3c282..1daaa28326 100644
--- a/pype/logging/gui/widgets.py
+++ b/pype/logging/gui/widgets.py
@@ -397,7 +397,7 @@ class LogDetailWidget(QtWidgets.QWidget):
layout = QtWidgets.QVBoxLayout(self)
label = QtWidgets.QLabel("Detail")
- detail_widget = LogDetailTextEdit()
+ detail_widget = QtWidgets.QTextEdit()
detail_widget.setReadOnly(True)
layout.addWidget(label)
layout.addWidget(detail_widget)
@@ -420,66 +420,3 @@ class LogDetailWidget(QtWidgets.QWidget):
self.detail_widget.setHtml(self.html_text.format(**data))
-
-
-class LogDetailTextEdit(QtWidgets.QTextEdit):
- """QTextEdit that displays version specific information.
-
- This also overrides the context menu to add actions like copying
- source path to clipboard or copying the raw data of the version
- to clipboard.
-
- """
- def __init__(self, parent=None):
- super(LogDetailTextEdit, self).__init__(parent=parent)
-
- # self.data = {
- # "source": None,
- # "raw": None
- # }
- #
- # def contextMenuEvent(self, event):
- # """Context menu with additional actions"""
- # menu = self.createStandardContextMenu()
- #
- # # Add additional actions when any text so we can assume
- # # the version is set.
- # if self.toPlainText().strip():
- #
- # menu.addSeparator()
- # action = QtWidgets.QAction("Copy source path to clipboard",
- # menu)
- # action.triggered.connect(self.on_copy_source)
- # menu.addAction(action)
- #
- # action = QtWidgets.QAction("Copy raw data to clipboard",
- # menu)
- # action.triggered.connect(self.on_copy_raw)
- # menu.addAction(action)
- #
- # menu.exec_(event.globalPos())
- # del menu
- #
- # def on_copy_source(self):
- # """Copy formatted source path to clipboard"""
- # source = self.data.get("source", None)
- # if not source:
- # return
- #
- # # path = source.format(root=api.registered_root())
- # # clipboard = QtWidgets.QApplication.clipboard()
- # # clipboard.setText(path)
- #
- # def on_copy_raw(self):
- # """Copy raw version data to clipboard
- #
- # The data is string formatted with `pprint.pformat`.
- #
- # """
- # raw = self.data.get("raw", None)
- # if not raw:
- # return
- #
- # raw_text = pprint.pformat(raw)
- # clipboard = QtWidgets.QApplication.clipboard()
- # clipboard.setText(raw_text)
diff --git a/pype/muster/widget_login.py b/pype/muster/widget_login.py
index 1d0dd29d59..88d769ef93 100644
--- a/pype/muster/widget_login.py
+++ b/pype/muster/widget_login.py
@@ -23,7 +23,7 @@ class MusterLogin(QtWidgets.QWidget):
elif hasattr(parent, 'parent') and hasattr(parent.parent, 'icon'):
self.setWindowIcon(parent.parent.icon)
else:
- pype_setup = os.getenv('PYPE_ROOT')
+ pype_setup = os.getenv('PYPE_SETUP_PATH')
items = [pype_setup, "app", "resources", "icon.png"]
fname = os.path.sep.join(items)
icon = QtGui.QIcon(fname)
diff --git a/pype/nuke/lib.py b/pype/nuke/lib.py
index 423738dd7f..621a464ac9 100644
--- a/pype/nuke/lib.py
+++ b/pype/nuke/lib.py
@@ -192,7 +192,6 @@ def format_anatomy(data):
data["version"] = pype.get_version_from_path(file)
project_document = pype.get_project()
data.update({
- "root": api.Session["AVALON_PROJECTS"],
"subset": data["avalon"]["subset"],
"asset": data["avalon"]["asset"],
"task": api.Session["AVALON_TASK"],
@@ -1111,15 +1110,15 @@ class BuildWorkfile(WorkfileSettings):
self.to_script = to_script
# collect data for formating
self.data_tmp = {
- "root": root_path or api.Session["AVALON_PROJECTS"],
"project": {"name": self._project["name"],
- "code": self._project["data"].get("code", '')},
+ "code": self._project["data"].get("code", "")},
"asset": self._asset or os.environ["AVALON_ASSET"],
"task": kwargs.get("task") or api.Session["AVALON_TASK"],
"hierarchy": kwargs.get("hierarchy") or pype.get_hierarchy(),
"version": kwargs.get("version", {}).get("name", 1),
"user": getpass.getuser(),
- "comment": "firstBuild"
+ "comment": "firstBuild",
+ "ext": "nk"
}
# get presets from anatomy
@@ -1128,8 +1127,8 @@ class BuildWorkfile(WorkfileSettings):
anatomy_filled = anatomy.format(self.data_tmp)
# get dir and file for workfile
- self.work_dir = anatomy_filled["avalon"]["work"]
- self.work_file = anatomy_filled["avalon"]["workfile"] + ".nk"
+ self.work_dir = anatomy_filled["work"]["folder"]
+ self.work_file = anatomy_filled["work"]["file"]
def save_script_as(self, path=None):
# first clear anything in open window
diff --git a/pype/plugins/global/publish/collect_anatomy.py b/pype/plugins/global/publish/collect_anatomy_context_data.py
similarity index 63%
rename from pype/plugins/global/publish/collect_anatomy.py
rename to pype/plugins/global/publish/collect_anatomy_context_data.py
index 73ae3bb024..e1e6c12ee9 100644
--- a/pype/plugins/global/publish/collect_anatomy.py
+++ b/pype/plugins/global/publish/collect_anatomy_context_data.py
@@ -1,13 +1,14 @@
-"""Collect Anatomy and global anatomy data.
+"""Collect global context Anatomy data.
Requires:
+ context -> anatomy
+ context -> projectEntity
+ context -> assetEntity
+ context -> username
+ context -> datetimeData
session -> AVALON_TASK
- projectEntity, assetEntity -> collect_avalon_entities *(pyblish.api.CollectorOrder)
- username -> collect_pype_user *(pyblish.api.CollectorOrder + 0.001)
- datetimeData -> collect_datetime_data *(pyblish.api.CollectorOrder)
Provides:
- context -> anatomy (pypeapp.Anatomy)
context -> anatomyData
"""
@@ -15,45 +16,51 @@ import os
import json
from avalon import api, lib
-from pypeapp import Anatomy
import pyblish.api
-class CollectAnatomy(pyblish.api.ContextPlugin):
- """Collect Anatomy into Context"""
+class CollectAnatomyContextData(pyblish.api.ContextPlugin):
+ """Collect Anatomy Context data.
+
+ Example:
+ context.data["anatomyData"] = {
+ "project": {
+ "name": "MyProject",
+ "code": "myproj"
+ },
+ "asset": "AssetName",
+ "hierarchy": "path/to/asset",
+ "task": "Working",
+ "username": "MeDespicable",
+
+ *** OPTIONAL ***
+ "app": "maya" # Current application base name
+ + mutliple keys from `datetimeData` # see it's collector
+ }
+ """
order = pyblish.api.CollectorOrder + 0.002
- label = "Collect Anatomy"
+ label = "Collect Anatomy Context Data"
def process(self, context):
- root_path = api.registered_root()
task_name = api.Session["AVALON_TASK"]
project_entity = context.data["projectEntity"]
asset_entity = context.data["assetEntity"]
- project_name = project_entity["name"]
-
- context.data["anatomy"] = Anatomy(project_name)
- self.log.info(
- "Anatomy object collected for project \"{}\".".format(project_name)
- )
-
hierarchy_items = asset_entity["data"]["parents"]
hierarchy = ""
if hierarchy_items:
hierarchy = os.path.join(*hierarchy_items)
context_data = {
- "root": root_path,
"project": {
- "name": project_name,
+ "name": project_entity["name"],
"code": project_entity["data"].get("code")
},
"asset": asset_entity["name"],
"hierarchy": hierarchy.replace("\\", "/"),
"task": task_name,
-
"username": context.data["user"]
}
diff --git a/pype/plugins/global/publish/collect_instance_anatomy_data.py b/pype/plugins/global/publish/collect_anatomy_instance_data.py
similarity index 96%
rename from pype/plugins/global/publish/collect_instance_anatomy_data.py
rename to pype/plugins/global/publish/collect_anatomy_instance_data.py
index 06a25b7c8a..6528bede2e 100644
--- a/pype/plugins/global/publish/collect_instance_anatomy_data.py
+++ b/pype/plugins/global/publish/collect_anatomy_instance_data.py
@@ -28,11 +28,11 @@ from avalon import io
import pyblish.api
-class CollectInstanceAnatomyData(pyblish.api.InstancePlugin):
- """Fill templates with data needed for publish"""
+class CollectAnatomyInstanceData(pyblish.api.InstancePlugin):
+ """Collect Instance specific Anatomy data."""
order = pyblish.api.CollectorOrder + 0.49
- label = "Collect instance anatomy data"
+ label = "Collect Anatomy Instance data"
def process(self, instance):
# get all the stuff from the database
diff --git a/pype/plugins/global/publish/collect_anatomy_object.py b/pype/plugins/global/publish/collect_anatomy_object.py
new file mode 100644
index 0000000000..8c01ea5c44
--- /dev/null
+++ b/pype/plugins/global/publish/collect_anatomy_object.py
@@ -0,0 +1,32 @@
+"""Collect Anatomy object.
+
+Requires:
+ os.environ -> AVALON_PROJECT
+
+Provides:
+ context -> anatomy (pypeapp.Anatomy)
+"""
+import os
+from pypeapp import Anatomy
+import pyblish.api
+
+
+class CollectAnatomyObject(pyblish.api.ContextPlugin):
+ """Collect Anatomy object into Context"""
+
+ order = pyblish.api.CollectorOrder - 0.4
+ label = "Collect Anatomy Object"
+
+ def process(self, context):
+ project_name = os.environ.get("AVALON_PROJECT")
+ if project_name is None:
+ raise AssertionError(
+ "Environment `AVALON_PROJECT` is not set."
+ "Could not initialize project's Anatomy."
+ )
+
+ context.data["anatomy"] = Anatomy(project_name)
+
+ self.log.info(
+ "Anatomy object collected for project \"{}\".".format(project_name)
+ )
diff --git a/pype/plugins/global/publish/collect_avalon_entities.py b/pype/plugins/global/publish/collect_avalon_entities.py
index 9d16a05a78..51dd3d7b06 100644
--- a/pype/plugins/global/publish/collect_avalon_entities.py
+++ b/pype/plugins/global/publish/collect_avalon_entities.py
@@ -15,7 +15,7 @@ import pyblish.api
class CollectAvalonEntities(pyblish.api.ContextPlugin):
"""Collect Anatomy into Context"""
- order = pyblish.api.CollectorOrder - 0.02
+ order = pyblish.api.CollectorOrder - 0.1
label = "Collect Avalon Entities"
def process(self, context):
diff --git a/pype/plugins/global/publish/collect_rendered_files.py b/pype/plugins/global/publish/collect_rendered_files.py
index 8ecf7ba156..5229cd9705 100644
--- a/pype/plugins/global/publish/collect_rendered_files.py
+++ b/pype/plugins/global/publish/collect_rendered_files.py
@@ -1,11 +1,18 @@
+"""Loads publishing context from json and continues in publish process.
+
+Requires:
+ anatomy -> context["anatomy"] *(pyblish.api.CollectorOrder - 0.11)
+
+Provides:
+ context, instances -> All data from previous publishing process.
+"""
+
import os
import json
import pyblish.api
from avalon import api
-from pypeapp import PypeLauncher
-
class CollectRenderedFiles(pyblish.api.ContextPlugin):
"""
@@ -13,14 +20,17 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
`PYPE_PUBLISH_DATA`. Those files _MUST_ share same context.
"""
- order = pyblish.api.CollectorOrder - 0.1
+ order = pyblish.api.CollectorOrder - 0.2
targets = ["filesequence"]
label = "Collect rendered frames"
_context = None
def _load_json(self, path):
- assert os.path.isfile(path), ("path to json file doesn't exist")
+ path = path.strip('\"')
+ assert os.path.isfile(path), (
+ "Path to json file doesn't exist. \"{}\"".format(path)
+ )
data = None
with open(path, "r") as json_file:
try:
@@ -32,7 +42,12 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
)
return data
- def _process_path(self, data):
+ def _fill_staging_dir(self, data_object, anatomy):
+ staging_dir = data_object.get("stagingDir")
+ if staging_dir:
+ data_object["stagingDir"] = anatomy.fill_root(staging_dir)
+
+ def _process_path(self, data, anatomy):
# validate basic necessary data
data_err = "invalid json file - missing data"
required = ["asset", "user", "comment",
@@ -66,14 +81,23 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
os.environ["FTRACK_SERVER"] = ftrack["FTRACK_SERVER"]
# now we can just add instances from json file and we are done
- for instance in data.get("instances"):
+ for instance_data in data.get("instances"):
self.log.info(" - processing instance for {}".format(
- instance.get("subset")))
- i = self._context.create_instance(instance.get("subset"))
- self.log.info("remapping paths ...")
- i.data["representations"] = [PypeLauncher().path_remapper(
- data=r) for r in instance.get("representations")]
- i.data.update(instance)
+ instance_data.get("subset")))
+ instance = self._context.create_instance(
+ instance_data.get("subset")
+ )
+ self.log.info("Filling stagignDir...")
+
+ self._fill_staging_dir(instance_data, anatomy)
+ instance.data.update(instance_data)
+
+ representations = []
+ for repre_data in instance_data.get("representations") or []:
+ self._fill_staging_dir(repre_data, anatomy)
+ representations.append(repre_data)
+
+ instance.data["representations"] = representations
def process(self, context):
self._context = context
@@ -82,13 +106,39 @@ class CollectRenderedFiles(pyblish.api.ContextPlugin):
"Missing `PYPE_PUBLISH_DATA`")
paths = os.environ["PYPE_PUBLISH_DATA"].split(os.pathsep)
- session_set = False
- for path in paths:
- data = self._load_json(path)
- if not session_set:
- self.log.info("Setting session using data from file")
- api.Session.update(data.get("session"))
- os.environ.update(data.get("session"))
- session_set = True
- assert data, "failed to load json file"
- self._process_path(data)
+ project_name = os.environ.get("AVALON_PROJECT")
+ if project_name is None:
+ raise AssertionError(
+ "Environment `AVALON_PROJECT` was not found."
+ "Could not set project `root` which may cause issues."
+ )
+
+ # TODO root filling should happen after collect Anatomy
+ self.log.info("Getting root setting for project \"{}\"".format(
+ project_name
+ ))
+
+ anatomy = context.data["anatomy"]
+ self.log.info("anatomy: {}".format(anatomy.roots))
+ try:
+ session_is_set = False
+ for path in paths:
+ path = anatomy.fill_root(path)
+ data = self._load_json(path)
+ assert data, "failed to load json file"
+ if not session_is_set:
+ session_data = data["session"]
+ remapped = anatomy.roots_obj.path_remapper(
+ session_data["AVALON_WORKDIR"]
+ )
+ if remapped:
+ session_data["AVALON_WORKDIR"] = remapped
+
+ self.log.info("Setting session using data from file")
+ api.Session.update(session_data)
+ os.environ.update(session_data)
+ session_is_set = True
+ self._process_path(data, anatomy)
+ except Exception as e:
+ self.log.error(e, exc_info=True)
+ raise Exception("Error") from e
diff --git a/pype/plugins/global/publish/integrate_master_version.py b/pype/plugins/global/publish/integrate_master_version.py
index af6e7707e4..e6e4247dd8 100644
--- a/pype/plugins/global/publish/integrate_master_version.py
+++ b/pype/plugins/global/publish/integrate_master_version.py
@@ -481,9 +481,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
def copy_file(self, src_path, dst_path):
# TODO check drives if are the same to check if cas hardlink
- dst_path = self.path_root_check(dst_path)
- src_path = self.path_root_check(src_path)
-
dirname = os.path.dirname(dst_path)
try:
@@ -513,75 +510,6 @@ class IntegrateMasterVersion(pyblish.api.InstancePlugin):
shutil.copy(src_path, dst_path)
- def path_root_check(self, path):
- normalized_path = os.path.normpath(path)
- forward_slash_path = normalized_path.replace("\\", "/")
-
- drive, _path = os.path.splitdrive(normalized_path)
- if os.path.exists(drive + "/"):
- key = "drive_check{}".format(drive)
- if key not in self.path_checks:
- self.log.debug(
- "Drive \"{}\" exist. Nothing to change.".format(drive)
- )
- self.path_checks.append(key)
-
- return normalized_path
-
- path_env_key = "PYPE_STUDIO_PROJECTS_PATH"
- mount_env_key = "PYPE_STUDIO_PROJECTS_MOUNT"
- missing_envs = []
- if path_env_key not in os.environ:
- missing_envs.append(path_env_key)
-
- if mount_env_key not in os.environ:
- missing_envs.append(mount_env_key)
-
- if missing_envs:
- key = "missing_envs"
- if key not in self.path_checks:
- self.path_checks.append(key)
- _add_s = ""
- if len(missing_envs) > 1:
- _add_s = "s"
-
- self.log.warning((
- "Can't replace MOUNT drive path to UNC path due to missing"
- " environment variable{}: `{}`. This may cause issues"
- " during publishing process."
- ).format(_add_s, ", ".join(missing_envs)))
-
- return normalized_path
-
- unc_root = os.environ[path_env_key].replace("\\", "/")
- mount_root = os.environ[mount_env_key].replace("\\", "/")
-
- # --- Remove slashes at the end of mount and unc roots ---
- while unc_root.endswith("/"):
- unc_root = unc_root[:-1]
-
- while mount_root.endswith("/"):
- mount_root = mount_root[:-1]
- # ---
-
- if forward_slash_path.startswith(unc_root):
- self.log.debug((
- "Path already starts with UNC root: \"{}\""
- ).format(unc_root))
- return normalized_path
-
- if not forward_slash_path.startswith(mount_root):
- self.log.warning((
- "Path do not start with MOUNT root \"{}\" "
- "set in environment variable \"{}\""
- ).format(unc_root, mount_env_key))
- return normalized_path
-
- # Replace Mount root with Unc root
- path = unc_root + forward_slash_path[len(mount_root):]
-
- return os.path.normpath(path)
-
def version_from_representations(self, repres):
for repre in repres:
version = io.find_one({"_id": repre["parent"]})
diff --git a/pype/plugins/global/publish/integrate_new.py b/pype/plugins/global/publish/integrate_new.py
index 5d3e70bf13..921a212a4d 100644
--- a/pype/plugins/global/publish/integrate_new.py
+++ b/pype/plugins/global/publish/integrate_new.py
@@ -332,6 +332,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
test_dest_files.append(
os.path.normpath(template_filled)
)
+ template_data["frame"] = repre_context["frame"]
self.log.debug(
"test_dest_files: {}".format(str(test_dest_files)))
@@ -395,7 +396,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
dst_start_frame,
dst_tail
).replace("..", ".")
- repre['published_path'] = self.unc_convert(dst)
+ repre['published_path'] = dst
else:
# Single file
@@ -423,7 +424,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
instance.data["transfers"].append([src, dst])
published_files.append(dst)
- repre['published_path'] = self.unc_convert(dst)
+ repre['published_path'] = dst
self.log.debug("__ dst: {}".format(dst))
repre["publishedFiles"] = published_files
@@ -527,23 +528,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
self.log.debug("Hardlinking file .. {} -> {}".format(src, dest))
self.hardlink_file(src, dest)
- def unc_convert(self, path):
- self.log.debug("> __ path: `{}`".format(path))
- drive, _path = os.path.splitdrive(path)
- self.log.debug("> __ drive, _path: `{}`, `{}`".format(drive, _path))
-
- if not os.path.exists(drive + "/"):
- self.log.info("Converting to unc from environments ..")
-
- path_replace = os.getenv("PYPE_STUDIO_PROJECTS_PATH")
- path_mount = os.getenv("PYPE_STUDIO_PROJECTS_MOUNT")
-
- if "/" in path_mount:
- path = path.replace(path_mount[0:-1], path_replace)
- else:
- path = path.replace(path_mount, path_replace)
- return path
-
def copy_file(self, src, dst):
""" Copy given source to destination
@@ -553,8 +537,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
Returns:
None
"""
- src = self.unc_convert(src)
- dst = self.unc_convert(dst)
src = os.path.normpath(src)
dst = os.path.normpath(dst)
self.log.debug("Copying file .. {} -> {}".format(src, dst))
@@ -582,9 +564,6 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
def hardlink_file(self, src, dst):
dirname = os.path.dirname(dst)
- src = self.unc_convert(src)
- dst = self.unc_convert(dst)
-
try:
os.makedirs(dirname)
except OSError as e:
@@ -669,30 +648,35 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin):
families.append(instance_family)
families += current_families
- self.log.debug("Registered root: {}".format(api.registered_root()))
-
# create relative source path for DB
- try:
- source = instance.data['source']
- except KeyError:
+ if "source" in instance.data:
+ source = instance.data["source"]
+ else:
source = context.data["currentFile"]
- self.log.debug("source: {}".format(source))
- source = str(source).replace(
- os.getenv("PYPE_STUDIO_PROJECTS_MOUNT"),
- api.registered_root()
+ anatomy = instance.context.data["anatomy"]
+ success, rootless_path = (
+ anatomy.roots_obj.find_root_template_from_path(source)
)
- relative_path = os.path.relpath(source, api.registered_root())
- source = os.path.join("{root}", relative_path).replace("\\", "/")
+ if success:
+ source = rootless_path
+ else:
+ self.log.warning((
+ "Could not find root path for remapping \"{}\"."
+ " This may cause issues on farm."
+ ).format(source))
self.log.debug("Source: {}".format(source))
- version_data = {"families": families,
- "time": context.data["time"],
- "author": context.data["user"],
- "source": source,
- "comment": context.data.get("comment"),
- "machine": context.data.get("machine"),
- "fps": context.data.get(
- "fps", instance.data.get("fps"))}
+ version_data = {
+ "families": families,
+ "time": context.data["time"],
+ "author": context.data["user"],
+ "source": source,
+ "comment": context.data.get("comment"),
+ "machine": context.data.get("machine"),
+ "fps": context.data.get(
+ "fps", instance.data.get("fps")
+ )
+ }
intent_value = instance.context.data.get("intent")
if intent_value and isinstance(intent_value, dict):
diff --git a/pype/plugins/global/publish/submit_publish_job.py b/pype/plugins/global/publish/submit_publish_job.py
index df4889d578..717a72ea96 100644
--- a/pype/plugins/global/publish/submit_publish_job.py
+++ b/pype/plugins/global/publish/submit_publish_job.py
@@ -1,3 +1,6 @@
+# -*- coding: utf-8 -*-
+"""Submit publishing job to farm."""
+
import os
import json
import re
@@ -10,7 +13,7 @@ import pyblish.api
def _get_script():
- """Get path to the image sequence script"""
+ """Get path to the image sequence script."""
try:
from pype.scripts import publish_filesequence
except Exception:
@@ -20,17 +23,11 @@ def _get_script():
if module_path.endswith(".pyc"):
module_path = module_path[: -len(".pyc")] + ".py"
- module_path = os.path.normpath(module_path)
- mount_root = os.path.normpath(os.environ["PYPE_STUDIO_CORE_MOUNT"])
- network_root = os.path.normpath(os.environ["PYPE_STUDIO_CORE_PATH"])
-
- module_path = module_path.replace(mount_root, network_root)
-
- return module_path
+ return os.path.normpath(module_path)
-# Logic to retrieve latest files concerning extendFrames
def get_latest_version(asset_name, subset_name, family):
+ """Retrieve latest files concerning extendFrame feature."""
# Get asset
asset_name = io.find_one(
{"type": "asset", "name": asset_name}, projection={"name": True}
@@ -64,9 +61,7 @@ def get_latest_version(asset_name, subset_name, family):
def get_resources(version, extension=None):
- """
- Get the files from the specific version
- """
+ """Get the files from the specific version."""
query = {"type": "representation", "parent": version["_id"]}
if extension:
query["name"] = extension
@@ -86,14 +81,25 @@ def get_resources(version, extension=None):
return resources
-def get_resource_files(resources, frame_range, override=True):
+def get_resource_files(resources, frame_range=None):
+ """Get resource files at given path.
+ If `frame_range` is specified those outside will be removed.
+
+ Arguments:
+ resources (list): List of resources
+ frame_range (list): Frame range to apply override
+
+ Returns:
+ list of str: list of collected resources
+
+ """
res_collections, _ = clique.assemble(resources)
assert len(res_collections) == 1, "Multiple collections found"
res_collection = res_collections[0]
# Remove any frames
- if override:
+ if frame_range is not None:
for frame in frame_range:
if frame not in res_collection.indexes:
continue
@@ -146,16 +152,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
aov_filter = {"maya": ["beauty"]}
enviro_filter = [
- "PATH",
- "PYTHONPATH",
"FTRACK_API_USER",
"FTRACK_API_KEY",
"FTRACK_SERVER",
- "PYPE_ROOT",
"PYPE_METADATA_FILE",
- "PYPE_STUDIO_PROJECTS_PATH",
- "PYPE_STUDIO_PROJECTS_MOUNT",
- "AVALON_PROJECT"
+ "AVALON_PROJECT",
+ "PYPE_LOG_NO_COLORS"
]
# pool used to do the publishing job
@@ -177,10 +179,12 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
families_transfer = ["render3d", "render2d", "ftrack", "slate"]
def _submit_deadline_post_job(self, instance, job):
- """
+ """Submit publish job to Deadline.
+
Deadline specific code separated from :meth:`process` for sake of
more universal code. Muster post job is sent directly by Muster
submitter, so this type of code isn't necessary for it.
+
"""
data = instance.data.copy()
subset = data["subset"]
@@ -188,14 +192,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
batch=job["Props"]["Name"], subset=subset
)
- metadata_filename = "{}_metadata.json".format(subset)
output_dir = instance.data["outputDir"]
- metadata_path = os.path.join(output_dir, metadata_filename)
-
- metadata_path = os.path.normpath(metadata_path)
- mount_root = os.path.normpath(os.environ["PYPE_STUDIO_PROJECTS_MOUNT"])
- network_root = os.environ["PYPE_STUDIO_PROJECTS_PATH"]
- metadata_path = metadata_path.replace(mount_root, network_root)
+ # Convert output dir to `{root}/rest/of/path/...` with Anatomy
+ success, rootless_path = (
+ self.anatomy.roots_obj.find_root_template_from_path(output_dir)
+ )
+ if not success:
+ # `rootless_path` is not set to `output_dir` if none of roots match
+ self.log.warning((
+ "Could not find root path for remapping \"{}\"."
+ " This may cause issues on farm."
+ ).format(output_dir))
+ rootless_path = output_dir
# Generate the payload for Deadline submission
payload = {
@@ -222,6 +230,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# Transfer the environment from the original job to this dependent
# job so they use the same environment
+ metadata_filename = "{}_metadata.json".format(subset)
+ metadata_path = os.path.join(rootless_path, metadata_filename)
+
environment = job["Props"].get("Env", {})
environment["PYPE_METADATA_FILE"] = metadata_path
environment["AVALON_PROJECT"] = io.Session["AVALON_PROJECT"]
@@ -256,14 +267,17 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
raise Exception(response.text)
def _copy_extend_frames(self, instance, representation):
- """
+ """Copy existing frames from latest version.
+
This will copy all existing frames from subset's latest version back
to render directory and rename them to what renderer is expecting.
- :param instance: instance to get required data from
- :type instance: pyblish.plugin.Instance
- """
+ Arguments:
+ instance (pyblish.plugin.Instance): instance to get required
+ data from
+ representation (dict): presentation to operate on
+ """
import speedcopy
self.log.info("Preparing to copy ...")
@@ -303,9 +317,11 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# type
assert fn is not None, "padding string wasn't found"
# list of tuples (source, destination)
+ staging = representation.get("stagingDir")
+ staging = self.anatomy.fill_roots(staging)
resource_files.append(
(frame,
- os.path.join(representation.get("stagingDir"),
+ os.path.join(staging,
"{}{}{}".format(pre,
fn.group("frame"),
post)))
@@ -325,19 +341,20 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
"Finished copying %i files" % len(resource_files))
def _create_instances_for_aov(self, instance_data, exp_files):
- """
+ """Create instance for each AOV found.
+
This will create new instance for every aov it can detect in expected
files list.
- :param instance_data: skeleton data for instance (those needed) later
- by collector
- :type instance_data: pyblish.plugin.Instance
- :param exp_files: list of expected files divided by aovs
- :type exp_files: list
- :returns: list of instances
- :rtype: list(publish.plugin.Instance)
- """
+ Arguments:
+ instance_data (pyblish.plugin.Instance): skeleton data for instance
+ (those needed) later by collector
+ exp_files (list): list of expected files divided by aovs
+ Returns:
+ list of instances
+
+ """
task = os.environ["AVALON_TASK"]
subset = instance_data["subset"]
instances = []
@@ -361,6 +378,16 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
subset_name = '{}_{}'.format(group_name, aov)
staging = os.path.dirname(list(cols[0])[0])
+ success, rootless_staging_dir = (
+ self.anatomy.roots_obj.find_root_template_from_path(staging)
+ )
+ if success:
+ staging = rootless_staging_dir
+ else:
+ self.log.warning((
+ "Could not find root path for remapping \"{}\"."
+ " This may cause issues on farm."
+ ).format(staging))
self.log.info("Creating data for: {}".format(subset_name))
@@ -404,26 +431,28 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
return instances
def _get_representations(self, instance, exp_files):
- """
+ """Create representations for file sequences.
+
This will return representations of expected files if they are not
in hierarchy of aovs. There should be only one sequence of files for
most cases, but if not - we create representation from each of them.
- :param instance: instance for which we are setting representations
- :type instance: pyblish.plugin.Instance
- :param exp_files: list of expected files
- :type exp_files: list
- :returns: list of representations
- :rtype: list(dict)
- """
+ Arguments:
+ instance (pyblish.plugin.Instance): instance for which we are
+ setting representations
+ exp_files (list): list of expected files
+ Returns:
+ list of representations
+
+ """
representations = []
- cols, rem = clique.assemble(exp_files)
+ collections, remainders = clique.assemble(exp_files)
bake_render_path = instance.get("bakeRenderPath")
# create representation for every collected sequence
- for c in cols:
- ext = c.tail.lstrip(".")
+ for collection in collections:
+ ext = collection.tail.lstrip(".")
preview = False
# if filtered aov name is found in filename, toggle it for
# preview video rendering
@@ -432,7 +461,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
for aov in self.aov_filter[app]:
if re.match(
r".+(?:\.|_)({})(?:\.|_).*".format(aov),
- list(c)[0]
+ list(collection)[0]
):
preview = True
break
@@ -441,14 +470,26 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
if bake_render_path:
preview = False
+ staging = os.path.dirname(list(collection)[0])
+ success, rootless_staging_dir = (
+ self.anatomy.roots_obj.find_root_template_from_path(staging)
+ )
+ if success:
+ staging = rootless_staging_dir
+ else:
+ self.log.warning((
+ "Could not find root path for remapping \"{}\"."
+ " This may cause issues on farm."
+ ).format(staging))
+
rep = {
"name": ext,
"ext": ext,
- "files": [os.path.basename(f) for f in list(c)],
+ "files": [os.path.basename(f) for f in list(collection)],
"frameStart": int(instance.get("frameStartHandle")),
"frameEnd": int(instance.get("frameEndHandle")),
# If expectedFile are absolute, we need only filenames
- "stagingDir": os.path.dirname(list(c)[0]),
+ "stagingDir": staging,
"anatomy_template": "render",
"fps": instance.get("fps"),
"tags": ["review", "preview"] if preview else [],
@@ -462,16 +503,29 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
self._solve_families(instance, preview)
# add reminders as representations
- for r in rem:
- ext = r.split(".")[-1]
+ for remainder in remainders:
+ ext = remainder.split(".")[-1]
+
+ staging = os.path.dirname(remainder)
+ success, rootless_staging_dir = (
+ self.anatomy.roots_obj.find_root_template_from_path(staging)
+ )
+ if success:
+ staging = rootless_staging_dir
+ else:
+ self.log.warning((
+ "Could not find root path for remapping \"{}\"."
+ " This may cause issues on farm."
+ ).format(staging))
+
rep = {
"name": ext,
"ext": ext,
- "files": os.path.basename(r),
- "stagingDir": os.path.dirname(r),
- "anatomy_template": "publish"
+ "files": os.path.basename(remainder),
+ "stagingDir": os.path.dirname(remainder),
+ "anatomy_template": "publish",
}
- if r in bake_render_path:
+ if remainder in bake_render_path:
rep.update({
"fps": instance.get("fps"),
"anatomy_template": "render",
@@ -496,7 +550,8 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
instance["families"] = families
def process(self, instance):
- """
+ """Process plugin.
+
Detect type of renderfarm submission and create and post dependend job
in case of Deadline. It creates json file with metadata needed for
publishing in directory of render.
@@ -507,6 +562,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
data = instance.data.copy()
context = instance.context
self.context = context
+ self.anatomy = instance.context.data["anatomy"]
if hasattr(instance, "_log"):
data['_log'] = instance._log
@@ -566,11 +622,18 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
except KeyError:
source = context.data["currentFile"]
- source = source.replace(
- os.getenv("PYPE_STUDIO_PROJECTS_MOUNT"), api.registered_root()
+ success, rootless_path = (
+ self.anatomy.roots_obj.find_root_template_from_path(source)
)
- relative_path = os.path.relpath(source, api.registered_root())
- source = os.path.join("{root}", relative_path).replace("\\", "/")
+ if success:
+ source = rootless_path
+
+ else:
+ # `rootless_path` is not set to `source` if none of roots match
+ self.log.warning((
+ "Could not find root path for remapping \"{}\"."
+ " This may cause issues."
+ ).format(source))
families = ["render"]
@@ -621,13 +684,29 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
# look into instance data if representations are not having any
# which are having tag `publish_on_farm` and include them
- for r in instance.data.get("representations", []):
- if "publish_on_farm" in r.get("tags"):
+ for repre in instance.data.get("representations", []):
+ staging_dir = repre.get("stagingDir")
+ if staging_dir:
+ success, rootless_staging_dir = (
+ self.anatomy.roots_obj.find_root_template_from_path(
+ staging_dir
+ )
+ )
+ if success:
+ repre["stagingDir"] = rootless_staging_dir
+ else:
+ self.log.warning((
+ "Could not find root path for remapping \"{}\"."
+ " This may cause issues on farm."
+ ).format(staging_dir))
+ repre["stagingDir"] = staging_dir
+
+ if "publish_on_farm" in repre.get("tags"):
# create representations attribute of not there
if "representations" not in instance_skeleton_data.keys():
instance_skeleton_data["representations"] = []
- instance_skeleton_data["representations"].append(r)
+ instance_skeleton_data["representations"].append(repre)
instances = None
assert data.get("expectedFiles"), ("Submission from old Pype version"
@@ -764,12 +843,21 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin):
with open(metadata_path, "w") as f:
json.dump(publish_job, f, indent=4, sort_keys=True)
- def _extend_frames(self, asset, subset, start, end, override):
- """
- This will get latest version of asset and update frame range based
- on minimum and maximuma values
- """
+ def _extend_frames(self, asset, subset, start, end):
+ """Get latest version of asset nad update frame range.
+ Based on minimum and maximuma values.
+
+ Arguments:
+ asset (str): asset name
+ subset (str): subset name
+ start (int): start frame
+ end (int): end frame
+
+ Returns:
+ (int, int): upddate frame start/end
+
+ """
# Frame comparison
prev_start = None
prev_end = None
diff --git a/pype/plugins/maya/publish/submit_maya_deadline.py b/pype/plugins/maya/publish/submit_maya_deadline.py
index 7547f34ba1..7d6437b81d 100644
--- a/pype/plugins/maya/publish/submit_maya_deadline.py
+++ b/pype/plugins/maya/publish/submit_maya_deadline.py
@@ -1,6 +1,17 @@
+# -*- coding: utf-8 -*-
+"""Submitting render job to Deadline.
+
+This module is taking care of submitting job from Maya to Deadline. It
+creates job and set correct environments. Its behavior is controlled by
+`DEADLINE_REST_URL` environment variable - pointing to Deadline Web Service
+and `MayaSubmitDeadline.use_published (bool)` property telling Deadline to
+use published scene workfile or not.
+"""
+
import os
import json
import getpass
+import re
import clique
from maya import cmds
@@ -14,7 +25,7 @@ import pype.maya.lib as lib
def get_renderer_variables(renderlayer=None):
- """Retrieve the extension which has been set in the VRay settings
+ """Retrieve the extension which has been set in the VRay settings.
Will return None if the current renderer is not VRay
For Maya 2016.5 and up the renderSetup creates renderSetupLayer node which
@@ -25,16 +36,21 @@ def get_renderer_variables(renderlayer=None):
Returns:
dict
- """
+ """
renderer = lib.get_renderer(renderlayer or lib.get_current_renderlayer())
render_attrs = lib.RENDER_ATTRS.get(renderer, lib.RENDER_ATTRS["default"])
padding = cmds.getAttr("{}.{}".format(render_attrs["node"],
render_attrs["padding"]))
- filename_0 = cmds.renderSettings(fullPath=True, firstImageName=True)[0]
-
+ filename_0 = cmds.renderSettings(
+ fullPath=True,
+ gin="#" * int(padding),
+ lut=True,
+ layer=renderlayer or lib.get_current_renderlayer())[0]
+ filename_0 = filename_0.replace('_', '_beauty')
+ prefix_attr = "defaultRenderGlobals.imageFilePrefix"
if renderer == "vray":
# Maya's renderSettings function does not return V-Ray file extension
# so we get the extension from vraySettings
@@ -46,62 +62,33 @@ def get_renderer_variables(renderlayer=None):
if extension is None:
extension = "png"
- filename_prefix = "/_/"
+ if extension == "exr (multichannel)" or extension == "exr (deep)":
+ extension = "exr"
+
+ prefix_attr = "vraySettings.fileNamePrefix"
+ elif renderer == "renderman":
+ prefix_attr = "rmanGlobals.imageFileFormat"
+ elif renderer == "redshift":
+ # mapping redshift extension dropdown values to strings
+ ext_mapping = ["iff", "exr", "tif", "png", "tga", "jpg"]
+ extension = ext_mapping[
+ cmds.getAttr("redshiftOptions.imageFormat")
+ ]
else:
# Get the extension, getAttr defaultRenderGlobals.imageFormat
# returns an index number.
filename_base = os.path.basename(filename_0)
extension = os.path.splitext(filename_base)[-1].strip(".")
- filename_prefix = cmds.getAttr("defaultRenderGlobals.imageFilePrefix")
+ filename_prefix = cmds.getAttr(prefix_attr)
return {"ext": extension,
"filename_prefix": filename_prefix,
"padding": padding,
"filename_0": filename_0}
-def preview_fname(folder, scene, layer, padding, ext):
- """Return output file path with #### for padding.
-
- Deadline requires the path to be formatted with # in place of numbers.
- For example `/path/to/render.####.png`
-
- Args:
- folder (str): The root output folder (image path)
- scene (str): The scene name
- layer (str): The layer name to be rendered
- padding (int): The padding length
- ext(str): The output file extension
-
- Returns:
- str
-
- """
-
- fileprefix = cmds.getAttr("defaultRenderGlobals.imageFilePrefix")
- output = fileprefix + ".{number}.{ext}"
- # RenderPass is currently hardcoded to "beauty" because its not important
- # for the deadline submission, but we will need something to replace
- # "".
- mapping = {
- "": "{scene}",
- "": "{layer}",
- "RenderPass": "beauty"
- }
- for key, value in mapping.items():
- output = output.replace(key, value)
- output = output.format(
- scene=scene,
- layer=layer,
- number="#" * padding,
- ext=ext
- )
-
- return os.path.join(folder, output)
-
-
class MayaSubmitDeadline(pyblish.api.InstancePlugin):
- """Submit available render layers to Deadline
+ """Submit available render layers to Deadline.
Renders are submitted to a Deadline Web Service as
supplied via the environment variable DEADLINE_REST_URL
@@ -194,22 +181,22 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
filename = os.path.basename(filepath)
comment = context.data.get("comment", "")
- scene = os.path.splitext(filename)[0]
dirname = os.path.join(workspace, "renders")
renderlayer = instance.data['setMembers'] # rs_beauty
- renderlayer_name = instance.data['subset'] # beauty
- # renderlayer_globals = instance.data["renderGlobals"]
- # legacy_layers = renderlayer_globals["UseLegacyRenderLayers"]
deadline_user = context.data.get("deadlineUser", getpass.getuser())
jobname = "%s - %s" % (filename, instance.name)
# Get the variables depending on the renderer
render_variables = get_renderer_variables(renderlayer)
- output_filename_0 = preview_fname(folder=dirname,
- scene=scene,
- layer=renderlayer_name,
- padding=render_variables["padding"],
- ext=render_variables["ext"])
+ filename_0 = render_variables["filename_0"]
+ if self.use_published:
+ new_scene = os.path.splitext(filename)[0]
+ orig_scene = os.path.splitext(
+ os.path.basename(context.data["currentFile"]))[0]
+ filename_0 = render_variables["filename_0"].replace(
+ orig_scene, new_scene)
+
+ output_filename_0 = filename_0
try:
# Ensure render folder exists
@@ -284,7 +271,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
for aov, files in exp[0].items():
col = clique.assemble(files)[0][0]
outputFile = col.format('{head}{padding}{tail}')
- payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile
+ payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile # noqa: E501
OutputFilenames[expIndex] = outputFile
expIndex += 1
else:
@@ -293,7 +280,6 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
payload['JobInfo']['OutputFilename' + str(expIndex)] = outputFile
# OutputFilenames[expIndex] = outputFile
-
# We need those to pass them to pype for it to set correct context
keys = [
"FTRACK_API_KEY",
@@ -334,7 +320,7 @@ class MayaSubmitDeadline(pyblish.api.InstancePlugin):
raise Exception(response.text)
# Store output dir for unified publisher (filesequence)
- instance.data["outputDir"] = os.path.dirname(output_filename_0)
+ instance.data["outputDir"] = os.path.dirname(filename_0)
instance.data["deadlineSubmissionJob"] = response.json()
def preflight_check(self, instance):
diff --git a/pype/plugins/maya/publish/submit_maya_muster.py b/pype/plugins/maya/publish/submit_maya_muster.py
index ac60c40bf7..c6660fe601 100644
--- a/pype/plugins/maya/publish/submit_maya_muster.py
+++ b/pype/plugins/maya/publish/submit_maya_muster.py
@@ -309,14 +309,7 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
output_dir = instance.data["outputDir"]
metadata_path = os.path.join(output_dir, metadata_filename)
- # replace path for UNC / network share paths, co PYPE is found
- # over network. It assumes PYPE is located somewhere in
- # PYPE_STUDIO_CORE_PATH
- pype_root = os.environ["PYPE_ROOT"].replace(
- os.path.normpath(
- os.environ['PYPE_STUDIO_CORE_MOUNT']), # noqa
- os.path.normpath(
- os.environ['PYPE_STUDIO_CORE_PATH'])) # noqa
+ pype_root = os.environ["PYPE_SETUP_PATH"]
# we must provide either full path to executable or use musters own
# python named MPython.exe, residing directly in muster bin
@@ -517,33 +510,25 @@ class MayaSubmitMuster(pyblish.api.InstancePlugin):
environment["PATH"] = os.environ["PATH"]
# self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
clean_environment = {}
- for key in environment:
+ for key, value in environment.items():
clean_path = ""
self.log.debug("key: {}".format(key))
- to_process = environment[key]
- if key == "PYPE_STUDIO_CORE_MOUNT":
- clean_path = environment[key]
- elif "://" in environment[key]:
- clean_path = environment[key]
- elif os.pathsep not in to_process:
- try:
- path = environment[key]
- path.decode('UTF-8', 'strict')
- clean_path = os.path.normpath(path)
- except UnicodeDecodeError:
- print('path contains non UTF characters')
+ if "://" in value:
+ clean_path = value
else:
- for path in environment[key].split(os.pathsep):
+ valid_paths = []
+ for path in value.split(os.pathsep):
+ if not path:
+ continue
try:
path.decode('UTF-8', 'strict')
- clean_path += os.path.normpath(path) + os.pathsep
+ valid_paths.append(os.path.normpath(path))
except UnicodeDecodeError:
print('path contains non UTF characters')
- # this should replace paths so they are pointing to network share
- clean_path = clean_path.replace(
- os.path.normpath(environment['PYPE_STUDIO_CORE_MOUNT']),
- os.path.normpath(environment['PYPE_STUDIO_CORE_PATH']))
+ if valid_paths:
+ clean_path = os.pathsep.join(valid_paths)
+
clean_environment[key] = clean_path
return clean_environment
diff --git a/pype/plugins/nuke/publish/submit_nuke_deadline.py b/pype/plugins/nuke/publish/submit_nuke_deadline.py
index 7990c20112..e41eba3ad7 100644
--- a/pype/plugins/nuke/publish/submit_nuke_deadline.py
+++ b/pype/plugins/nuke/publish/submit_nuke_deadline.py
@@ -201,40 +201,32 @@ class NukeSubmitDeadline(pyblish.api.InstancePlugin):
if path.lower().startswith('pype_'):
environment[path] = os.environ[path]
- environment["PATH"] = os.environ["PATH"]
+ # environment["PATH"] = os.environ["PATH"]
# self.log.debug("enviro: {}".format(environment['PYPE_SCRIPTS']))
clean_environment = {}
- for key in environment:
+ for key, value in environment.items():
clean_path = ""
self.log.debug("key: {}".format(key))
- to_process = environment[key]
- if key == "PYPE_STUDIO_CORE_MOUNT":
- clean_path = environment[key]
- elif "://" in environment[key]:
- clean_path = environment[key]
- elif os.pathsep not in to_process:
- try:
- path = environment[key]
- path.decode('UTF-8', 'strict')
- clean_path = os.path.normpath(path)
- except UnicodeDecodeError:
- print('path contains non UTF characters')
+ if "://" in value:
+ clean_path = value
else:
- for path in environment[key].split(os.pathsep):
+ valid_paths = []
+ for path in value.split(os.pathsep):
+ if not path:
+ continue
try:
path.decode('UTF-8', 'strict')
- clean_path += os.path.normpath(path) + os.pathsep
+ valid_paths.append(os.path.normpath(path))
except UnicodeDecodeError:
print('path contains non UTF characters')
+ if valid_paths:
+ clean_path = os.pathsep.join(valid_paths)
+
if key == "PYTHONPATH":
clean_path = clean_path.replace('python2', 'python3')
- clean_path = clean_path.replace(
- os.path.normpath(
- environment['PYPE_STUDIO_CORE_MOUNT']), # noqa
- os.path.normpath(
- environment['PYPE_STUDIO_CORE_PATH'])) # noqa
+ self.log.debug("clean path: {}".format(clean_path))
clean_environment[key] = clean_path
environment = clean_environment
diff --git a/pype/scripts/publish_deadline.py b/pype/scripts/publish_deadline.py
index e6052dbfd2..16d097a1ea 100644
--- a/pype/scripts/publish_deadline.py
+++ b/pype/scripts/publish_deadline.py
@@ -14,9 +14,9 @@ def __main__():
"configuration.")
kwargs, args = parser.parse_known_args()
- pype_root = os.environ.get("PYPE_ROOT")
+ pype_root = os.environ.get("PYPE_SETUP_PATH")
if not pype_root:
- raise Exception("PYPE_ROOT is not set")
+ raise Exception("PYPE_SETUP_PATH is not set")
# TODO: set correct path
pype_command = "pype.ps1"
diff --git a/pype/scripts/publish_filesequence.py b/pype/scripts/publish_filesequence.py
index a41d97668e..905c6b99ba 100644
--- a/pype/scripts/publish_filesequence.py
+++ b/pype/scripts/publish_filesequence.py
@@ -47,10 +47,10 @@ def __main__():
auto_pype_root = os.path.dirname(os.path.abspath(__file__))
auto_pype_root = os.path.abspath(auto_pype_root + "../../../../..")
- auto_pype_root = os.environ.get('PYPE_ROOT') or auto_pype_root
- if os.environ.get('PYPE_ROOT'):
+ auto_pype_root = os.environ.get('PYPE_SETUP_PATH') or auto_pype_root
+ if os.environ.get('PYPE_SETUP_PATH'):
print("Got Pype location from environment: {}".format(
- os.environ.get('PYPE_ROOT')))
+ os.environ.get('PYPE_SETUP_PATH')))
pype_command = "pype.ps1"
if platform.system().lower() == "linux":
@@ -77,11 +77,10 @@ def __main__():
print("Paths: {}".format(kwargs.paths or [os.getcwd()]))
paths = kwargs.paths or [os.environ.get("PYPE_METADATA_FILE")] or [os.getcwd()] # noqa
-
args = [
os.path.join(pype_root, pype_command),
"publish",
- " ".join(paths)
+ " ".join(['"{}"'.format(p) for p in paths])
]
print("Pype command: {}".format(" ".join(args)))
diff --git a/pype/tools/texture_copy/app.py b/pype/tools/texture_copy/app.py
index a59d30ec8b..5f89db53ff 100644
--- a/pype/tools/texture_copy/app.py
+++ b/pype/tools/texture_copy/app.py
@@ -46,25 +46,26 @@ class TextureCopy:
return asset
def _get_destination_path(self, asset, project):
- root = api.registered_root()
- PROJECT = api.Session["AVALON_PROJECT"]
+ project_name = project["name"]
hierarchy = ""
parents = asset['data']['parents']
if parents and len(parents) > 0:
hierarchy = os.path.join(*parents)
- template_data = {"root": root,
- "project": {"name": PROJECT,
- "code": project['data']['code']},
- "silo": asset.get('silo'),
- "asset": asset['name'],
- "family": 'texture',
- "subset": 'Main',
- "hierarchy": hierarchy}
- anatomy = Anatomy()
- anatomy_filled = os.path.normpath(
- anatomy.format(template_data)['texture']['path'])
- return anatomy_filled
+ template_data = {
+ "project": {
+ "name": project_name,
+ "code": project['data']['code']
+ },
+ "silo": asset.get('silo'),
+ "asset": asset['name'],
+ "family": 'texture',
+ "subset": 'Main',
+ "hierarchy": hierarchy
+ }
+ anatomy = Anatomy(project_name)
+ anatomy_filled = anatomy.format(template_data)
+ return anatomy_filled['texture']['path']
def _get_version(self, path):
versions = [0]
diff --git a/schema/config-1.0.json b/schema/config-1.0.json
index b3c4362f41..198f51e04d 100644
--- a/schema/config-1.0.json
+++ b/schema/config-1.0.json
@@ -8,7 +8,6 @@
"additionalProperties": false,
"required": [
- "template",
"tasks",
"apps"
],